diff --git a/.flake8_packages b/.flake8_packages index d17a34951d1..fc2dbf98afd 100644 --- a/.flake8_packages +++ b/.flake8_packages @@ -9,7 +9,7 @@ # # Note that we also add *per-line* exemptions for certain patterns in the # `spack flake8` command. This is where F403 for `from spack import *` -# is added (beause we *only* allow that wildcard). +# is added (because we *only* allow that wildcard). # # See .flake8 for regular exceptions. # diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md index b38a6774164..9a568285ea1 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -1,21 +1,11 @@ --- -name: "\U0001F41E Bug report" -about: Report a bug in the core of Spack (command not working as expected, etc.) +name: "\U0001F41E Bug report" +about: Report a bug in the core of Spack (command not working as expected, etc.) labels: "bug,triage" --- - - - -### Spack version - -```console -$ spack --version - -``` + ### Steps to reproduce the issue @@ -27,38 +17,26 @@ $ spack ### Error Message - ```console -$ spack -d --stacktrace -$ spack -d --stacktrace -... +$ spack --debug --stacktrace ``` -that activate the full debug output. ---> ### Information on your system - ---> + -### General information +### Additional information -- [ ] I have run `spack --version` and reported the version of Spack + +- [ ] I have run `spack debug report` and reported the version of Spack/Python/Platform - [ ] I have searched the issues of this repo and believe this is not a duplicate - [ ] I have run the failing commands in debug mode and reported the output - \ No newline at end of file +Other than that, thanks for taking the time to contribute to Spack! --> diff --git a/.github/workflows/linux_build_tests.yaml b/.github/workflows/linux_build_tests.yaml index 68c60c08edf..04119c5c6ed 100644 --- a/.github/workflows/linux_build_tests.yaml +++ b/.github/workflows/linux_build_tests.yaml @@ -42,6 +42,7 @@ jobs: python-version: 3.8 - name: Install System Packages run: | + sudo apt-get update sudo apt-get -yqq install ccache gfortran perl perl-base r-base r-base-core r-base-dev findutils openssl libssl-dev libpciaccess-dev R --version perl --version diff --git a/.github/workflows/minimum_python_versions.yaml b/.github/workflows/minimum_python_versions.yaml index c55042ffa11..95bc9af3582 100644 --- a/.github/workflows/minimum_python_versions.yaml +++ b/.github/workflows/minimum_python_versions.yaml @@ -15,7 +15,7 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v1 + - uses: actions/checkout@v2 - name: Setup Python uses: actions/setup-python@v1 with: diff --git a/.gitignore b/.gitignore index f5169a54058..da05b2bcf04 100644 --- a/.gitignore +++ b/.gitignore @@ -22,8 +22,14 @@ .coverage \#* .#* +.cache lib/spack/spack/test/.cache /bin/spackc *.in.log *.out.log *.orig + +# Eclipse files +.project +.cproject +.pydevproject diff --git a/CHANGELOG.md b/CHANGELOG.md index 47657b98257..092959aff98 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,15 @@ +# v0.14.1 (2019-03-20) + +This is a bugfix release on top of `v0.14.0`. Specific fixes include: + +* several bugfixes for parallel installation (#15339, #15341, #15220, #15197) +* `spack load` now works with packages that have been renamed (#14348) +* bugfix for `suite-sparse` installation (#15326) +* deduplicate identical suffixes added to module names (#14920) +* fix issues with `configure_args` during module refresh (#11084) +* increased test coverage and test fixes (#15237, #15354, #15346) +* remove some unused code (#15431) + # v0.14.0 (2020-02-23) `v0.14.0` is a major feature release, with 3 highlighted features: @@ -13,7 +25,7 @@ 2. **Build pipelines.** You can also build in parallel through Gitlab CI. Simply create a Spack environment and push it to Gitlab to build - on Gitlab runners. Pipeline support is now integreated into a single + on Gitlab runners. Pipeline support is now integrated into a single `spack ci` command, so setting it up is easier than ever. See the [Pipelines section](https://spack.readthedocs.io/en/v0.14.0/pipelines.html) in the docs. @@ -113,8 +125,8 @@ RHEL8. * mirror bugfixes: symlinks, duplicate patches, and exception handling (#13789) * don't try to fetch `BundlePackages` (#13908) * avoid re-fetching patches already added to a mirror (#13908) -* avoid re-fetching alread added patches (#13908) -* avoid re-fetching alread added patches (#13908) +* avoid re-fetching already added patches (#13908) +* avoid re-fetching already added patches (#13908) * allow repeated invocations of `spack mirror create` on the same dir (#13908) * bugfix for RHEL8 when `python` is unavailable (#14252) * improve concretization performance in environments (#14190) diff --git a/bin/spack-python b/bin/spack-python index b81c8b7dbb9..22141b46d69 100755 --- a/bin/spack-python +++ b/bin/spack-python @@ -22,4 +22,4 @@ # # This is compatible across platforms. # -/usr/bin/env spack python "$@" +exec /usr/bin/env spack python "$@" diff --git a/etc/spack/defaults/modules.yaml b/etc/spack/defaults/modules.yaml index 77c976510bd..5ae1b50fdad 100644 --- a/etc/spack/defaults/modules.yaml +++ b/etc/spack/defaults/modules.yaml @@ -30,7 +30,11 @@ modules: lib64: - LIBRARY_PATH include: - - CPATH + - C_INCLUDE_PATH + - CPLUS_INCLUDE_PATH + # The INCLUDE env variable specifies paths to look for + # .mod file for Intel Fortran compilers + - INCLUDE lib/pkgconfig: - PKG_CONFIG_PATH lib64/pkgconfig: diff --git a/etc/spack/defaults/packages.yaml b/etc/spack/defaults/packages.yaml index fcc39f534ea..2c91b75e45d 100644 --- a/etc/spack/defaults/packages.yaml +++ b/etc/spack/defaults/packages.yaml @@ -27,6 +27,7 @@ packages: glx: [mesa+glx, opengl] glu: [mesa-glu, openglu] golang: [gcc] + iconv: [libiconv] ipp: [intel-ipp] java: [openjdk, jdk, ibm-java] jpeg: [libjpeg-turbo, libjpeg] @@ -40,6 +41,7 @@ packages: pil: [py-pillow] pkgconfig: [pkgconf, pkg-config] scalapack: [netlib-scalapack] + sycl: [hipsycl] szip: [libszip, libaec] tbb: [intel-tbb] unwind: [libunwind] diff --git a/lib/spack/docs/basic_usage.rst b/lib/spack/docs/basic_usage.rst index d63dec8827e..2f047c07853 100644 --- a/lib/spack/docs/basic_usage.rst +++ b/lib/spack/docs/basic_usage.rst @@ -925,7 +925,7 @@ contains any spaces. Any of ``cppflags=-O3``, ``cppflags="-O3"``, ``cppflags='-O3'``, and ``cppflags="-O3 -fPIC"`` are acceptable, but ``cppflags=-O3 -fPIC`` is not. Additionally, if the value of the compiler flags is not the last thing on the line, it must be followed -by a space. The commmand ``spack install libelf cppflags="-O3"%intel`` +by a space. The command ``spack install libelf cppflags="-O3"%intel`` will be interpreted as an attempt to set ``cppflags="-O3%intel"``. The six compiler flags are injected in the order of implicit make commands diff --git a/lib/spack/docs/build_settings.rst b/lib/spack/docs/build_settings.rst index b141f2b717c..cfd850af28a 100644 --- a/lib/spack/docs/build_settings.rst +++ b/lib/spack/docs/build_settings.rst @@ -124,6 +124,39 @@ The ``buildable`` does not need to be paired with external packages. It could also be used alone to forbid packages that may be buggy or otherwise undesirable. +Virtual packages in Spack can also be specified as not buildable, and +external implementations can be provided. In the example above, +OpenMPI is configured as not buildable, but Spack will often prefer +other MPI implementations over the externally available OpenMPI. Spack +can be configured with every MPI provider not buildable individually, +but more conveniently: + +.. code-block:: yaml + + packages: + mpi: + buildable: False + openmpi: + paths: + openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64: /opt/openmpi-1.4.3 + openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64+debug: /opt/openmpi-1.4.3-debug + openmpi@1.6.5%intel@10.1 arch=linux-debian7-x86_64: /opt/openmpi-1.6.5-intel + +Implementations can also be listed immediately under the virtual they provide: + +.. code-block:: yaml + + packages: + mpi: + buildable: False + openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64: /opt/openmpi-1.4.3 + openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64+debug: /opt/openmpi-1.4.3-debug + openmpi@1.6.5%intel@10.1 arch=linux-debian7-x86_64: /opt/openmpi-1.6.5-intel + mpich@3.3 %clang@9.0.0 arch=linux-debian7-x86_64: /opt/mpich-3.3-intel + +Spack can then use any of the listed external implementations of MPI +to satisfy a dependency, and will choose depending on the compiler and +architecture. .. _concretization-preferences: diff --git a/lib/spack/docs/build_systems/cmakepackage.rst b/lib/spack/docs/build_systems/cmakepackage.rst index 0a771edad32..76e89c80b1a 100644 --- a/lib/spack/docs/build_systems/cmakepackage.rst +++ b/lib/spack/docs/build_systems/cmakepackage.rst @@ -128,17 +128,20 @@ Adding flags to cmake ^^^^^^^^^^^^^^^^^^^^^ To add additional flags to the ``cmake`` call, simply override the -``cmake_args`` function: +``cmake_args`` function. The following example defines values for the flags +``WHATEVER``, ``ENABLE_BROKEN_FEATURE``, ``DETECT_HDF5``, and ``THREADS`` with +and without the :py:meth:`~.CMakePackage.define` and +:py:meth:`~.CMakePackage.define_from_variant` helper functions: .. code-block:: python def cmake_args(self): - args = [] - - if '+hdf5' in self.spec: - args.append('-DDETECT_HDF5=ON') - else: - args.append('-DDETECT_HDF5=OFF') + args = [ + '-DWHATEVER:STRING=somevalue', + self.define('ENABLE_BROKEN_FEATURE', False), + self.define_from_variant('DETECT_HDF5', 'hdf5'), + self.define_from_variant('THREADS'), # True if +threads + ] return args diff --git a/lib/spack/docs/build_systems/intelpackage.rst b/lib/spack/docs/build_systems/intelpackage.rst index c14fc34f519..153b41bdc75 100644 --- a/lib/spack/docs/build_systems/intelpackage.rst +++ b/lib/spack/docs/build_systems/intelpackage.rst @@ -553,7 +553,7 @@ follow `the next section `_ instead. f77: stub fc: stub - Replace ``18.0.3`` with the version that you determined in the preceeding + Replace ``18.0.3`` with the version that you determined in the preceding step. The contents under ``paths:`` do not matter yet. You are right to ask: "Why on earth is that necessary?" [fn8]_. @@ -696,7 +696,7 @@ follow `the next section `_ instead. - /home/$user/spack-stage Do not duplicate the ``config:`` line if it already is present. - Adapt the location, which here is the same as in the preceeding example. + Adapt the location, which here is the same as in the preceding example. 3. Retry installing the large package. @@ -965,7 +965,7 @@ a *virtual* ``mkl`` package is declared in Spack. Likewise, in a :ref:`MakefilePackage ` - or similiar package that does not use AutoTools you may need to provide include + or similar package that does not use AutoTools you may need to provide include and link options for use on command lines or in environment variables. For example, to generate an option string of the form ``-I``, use: diff --git a/lib/spack/docs/build_systems/sippackage.rst b/lib/spack/docs/build_systems/sippackage.rst index b8c08ec513e..ddf9a26ab9f 100644 --- a/lib/spack/docs/build_systems/sippackage.rst +++ b/lib/spack/docs/build_systems/sippackage.rst @@ -51,10 +51,8 @@ Build system dependencies ``SIPPackage`` requires several dependencies. Python is needed to run the ``configure.py`` build script, and to run the resulting Python libraries. Qt is needed to provide the ``qmake`` command. SIP is also -needed to build the package. SIP is an unusual dependency in that it -must be installed in the same installation directory as the package, -so instead of a ``depends_on``, we use a ``resource``. All of these -dependencies are automatically added via the base class +needed to build the package. All of these dependencies are automatically +added via the base class .. code-block:: python @@ -62,11 +60,7 @@ dependencies are automatically added via the base class depends_on('qt', type='build') - resource(name='sip', - url='https://www.riverbankcomputing.com/static/Downloads/sip/4.19.18/sip-4.19.18.tar.gz', - sha256='c0bd863800ed9b15dcad477c4017cdb73fa805c25908b0240564add74d697e1e', - destination='.') - + depends_on('py-sip', type='build') ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Passing arguments to ``configure.py`` diff --git a/lib/spack/docs/containers.rst b/lib/spack/docs/containers.rst index 18500ff7648..2ca25b7207b 100644 --- a/lib/spack/docs/containers.rst +++ b/lib/spack/docs/containers.rst @@ -44,7 +44,7 @@ Environments: && echo " install_tree: /opt/software" \ && echo " view: /opt/view") > /opt/spack-environment/spack.yaml - # Install the software, remove unecessary deps + # Install the software, remove unnecessary deps RUN cd /opt/spack-environment && spack install && spack gc -y # Strip all the binaries @@ -266,7 +266,7 @@ following ``Dockerfile``: && echo " install_tree: /opt/software" \ && echo " view: /opt/view") > /opt/spack-environment/spack.yaml - # Install the software, remove unecessary deps + # Install the software, remove unnecessary deps RUN cd /opt/spack-environment && spack install && spack gc -y # Strip all the binaries @@ -304,4 +304,4 @@ following ``Dockerfile``: .. note:: Spack can also produce Singularity definition files to build the image. The minimum version of Singularity required to build a SIF (Singularity Image Format) - from them is ``3.5.3``. \ No newline at end of file + from them is ``3.5.3``. diff --git a/lib/spack/docs/contribution_guide.rst b/lib/spack/docs/contribution_guide.rst index 0b79141ee37..9935ec0c83a 100644 --- a/lib/spack/docs/contribution_guide.rst +++ b/lib/spack/docs/contribution_guide.rst @@ -385,8 +385,8 @@ coverage. This helps us tell what percentage of lines of code in Spack are covered by unit tests. Although code covered by unit tests can still contain bugs, it is much less error prone than code that is not covered by unit tests. -Codecov provides `browser extensions `_ -for Google Chrome, Firefox, and Opera. These extensions integrate with GitHub +Codecov provides `browser extensions `_ +for Google Chrome and Firefox. These extensions integrate with GitHub and allow you to see coverage line-by-line when viewing the Spack repository. If you are new to Spack, a great way to get started is to write unit tests to increase coverage! diff --git a/lib/spack/docs/environments.rst b/lib/spack/docs/environments.rst index 5ec1ec9032a..5d091dee30c 100644 --- a/lib/spack/docs/environments.rst +++ b/lib/spack/docs/environments.rst @@ -285,7 +285,7 @@ be called using the ``spack -E`` flag to specify the environment. .. code-block:: console - $ spack activate myenv + $ spack env activate myenv $ spack add mpileaks or diff --git a/lib/spack/docs/getting_started.rst b/lib/spack/docs/getting_started.rst index 3a9b2d42b2e..8de9d276623 100644 --- a/lib/spack/docs/getting_started.rst +++ b/lib/spack/docs/getting_started.rst @@ -851,7 +851,7 @@ from websites and from git. .. warning:: - This workaround should be used ONLY as a last resort! Wihout SSL + This workaround should be used ONLY as a last resort! Without SSL certificate verification, spack and git will download from sites you wouldn't normally trust. The code you download and run may then be compromised! While this is not a major issue for archives that will diff --git a/lib/spack/docs/packaging_guide.rst b/lib/spack/docs/packaging_guide.rst index 3c588c240b8..88b78148389 100644 --- a/lib/spack/docs/packaging_guide.rst +++ b/lib/spack/docs/packaging_guide.rst @@ -3607,7 +3607,7 @@ the command line. For most compilers, ``$rpath_flag`` is ``-Wl,-rpath,``. However, NAG passes its flags to GCC instead of passing them directly to the linker. Therefore, its ``$rpath_flag`` is doubly wrapped: ``-Wl,-Wl,,-rpath,``. - ``$rpath_flag`` can be overriden on a compiler specific basis in + ``$rpath_flag`` can be overridden on a compiler specific basis in ``lib/spack/spack/compilers/$compiler.py``. The compiler wrappers also pass the compiler flags specified by the user from diff --git a/lib/spack/docs/pipelines.rst b/lib/spack/docs/pipelines.rst index f70b39a16d8..a8f81988528 100644 --- a/lib/spack/docs/pipelines.rst +++ b/lib/spack/docs/pipelines.rst @@ -132,7 +132,7 @@ A pipeline-enabled spack environment ------------------------------------ Here's an example of a spack environment file that has been enhanced with -sections desribing a build pipeline: +sections describing a build pipeline: .. code-block:: yaml diff --git a/lib/spack/docs/workflows.rst b/lib/spack/docs/workflows.rst index b329a0205c8..9ce664b6cad 100644 --- a/lib/spack/docs/workflows.rst +++ b/lib/spack/docs/workflows.rst @@ -284,8 +284,10 @@ have some drawbacks: The ``spack load`` and ``spack module tcl loads`` commands, on the other hand, are not very smart: if the user-supplied spec matches more than one installed package, then ``spack module tcl loads`` will - fail. This may change in the future. For now, the workaround is to - be more specific on any ``spack load`` commands that fail. + fail. This default behavior may change in the future. For now, + the workaround is to either be more specific on any failing ``spack load`` + commands or to use ``spack load --first`` to allow spack to load the + first matching spec. """""""""""""""""""""" @@ -444,7 +446,7 @@ environment. A single-prefix filesystem view is a single directory tree that is the union of the directory hierarchies of a number of installed packages; -it is similar to the directory hiearchy that might exist under +it is similar to the directory hierarchy that might exist under ``/usr/local``. The files of the view's installed packages are brought into the view by symbolic or hard links, referencing the original Spack installation. @@ -1237,7 +1239,7 @@ you can also manually set them in your ``.bashrc``. 2. Other package managers like Homebrew will try to install things to the same directory. If you plan on using Homebrew in conjunction with Spack, don't symlink things to ``/usr/local``. - 3. If you are on a shared workstation, or don't have sudo priveleges, you + 3. If you are on a shared workstation, or don't have sudo privileges, you can't do this. If you still want to do this anyway, there are several ways around SIP. @@ -1467,7 +1469,7 @@ In order to build and run the image, execute: SPACK_ROOT=/usr/local \ FORCE_UNSAFE_CONFIGURE=1 - # install minimal spack depedencies + # install minimal spack dependencies RUN apt-get update \ && apt-get install -y --no-install-recommends \ autoconf \ diff --git a/lib/spack/external/__init__.py b/lib/spack/external/__init__.py index 27f00efb095..230ec170b26 100644 --- a/lib/spack/external/__init__.py +++ b/lib/spack/external/__init__.py @@ -88,10 +88,11 @@ * Homepage: https://pypi.python.org/pypi/pytest * Usage: Testing framework used by Spack. * Version: 3.2.5 (last version supporting Python 2.6) -* Note: This package has been slightly modified to improve - Python 2.6 compatibility. See the following commit if the - vendored copy ever needs to be updated again: - https://github.com/spack/spack/pull/6801/commits/ff513c39f2c67ff615de5cbc581dd69a8ec96526 +* Note: This package has been slightly modified: + * We improve Python 2.6 compatibility. See: + https://github.com/spack/spack/pull/6801. + * We have patched pytest not to depend on setuptools. See: + https://github.com/spack/spack/pull/15612 ruamel.yaml ------ @@ -125,4 +126,5 @@ * Homepage: https://altgraph.readthedocs.io/en/latest/index.html * Usage: dependency of macholib * Version: 0.16.1 + """ diff --git a/lib/spack/external/_pytest/config.py b/lib/spack/external/_pytest/config.py index 19835d2c39f..513478a972f 100644 --- a/lib/spack/external/_pytest/config.py +++ b/lib/spack/external/_pytest/config.py @@ -1028,34 +1028,13 @@ def _consider_importhook(self, args): except SystemError: mode = 'plain' else: - self._mark_plugins_for_rewrite(hook) + # REMOVED FOR SPACK: This routine imports `pkg_resources` from + # `setuptools`, but we do not need it for Spack. We have removed + # it from Spack to avoid a dependency on setuptools. + # self._mark_plugins_for_rewrite(hook) + pass self._warn_about_missing_assertion(mode) - def _mark_plugins_for_rewrite(self, hook): - """ - Given an importhook, mark for rewrite any top-level - modules or packages in the distribution package for - all pytest plugins. - """ - import pkg_resources - self.pluginmanager.rewrite_hook = hook - - # 'RECORD' available for plugins installed normally (pip install) - # 'SOURCES.txt' available for plugins installed in dev mode (pip install -e) - # for installed plugins 'SOURCES.txt' returns an empty list, and vice-versa - # so it shouldn't be an issue - metadata_files = 'RECORD', 'SOURCES.txt' - - package_files = ( - entry.split(',')[0] - for entrypoint in pkg_resources.iter_entry_points('pytest11') - for metadata in metadata_files - for entry in entrypoint.dist._get_metadata(metadata) - ) - - for name in _iter_rewritable_modules(package_files): - hook.mark_rewrite(name) - def _warn_about_missing_assertion(self, mode): try: assert False @@ -1081,7 +1060,12 @@ def _preparse(self, args, addopts=True): self._checkversion() self._consider_importhook(args) self.pluginmanager.consider_preparse(args) - self.pluginmanager.load_setuptools_entrypoints('pytest11') + + # REMOVED FOR SPACK: This routine imports `pkg_resources` from + # `setuptools`, but we do not need it for Spack. We have removed + # it from Spack to avoid a dependency on setuptools. + # self.pluginmanager.load_setuptools_entrypoints('pytest11') + self.pluginmanager.consider_env() self.known_args_namespace = ns = self._parser.parse_known_args(args, namespace=self.option.copy()) if self.known_args_namespace.confcutdir is None and self.inifile: diff --git a/lib/spack/external/_pytest/vendored_packages/pluggy.py b/lib/spack/external/_pytest/vendored_packages/pluggy.py index aebddad01da..6f26552d73b 100644 --- a/lib/spack/external/_pytest/vendored_packages/pluggy.py +++ b/lib/spack/external/_pytest/vendored_packages/pluggy.py @@ -497,26 +497,6 @@ def check_pending(self): "unknown hook %r in plugin %r" % (name, hookimpl.plugin)) - def load_setuptools_entrypoints(self, entrypoint_name): - """ Load modules from querying the specified setuptools entrypoint name. - Return the number of loaded plugins. """ - from pkg_resources import (iter_entry_points, DistributionNotFound, - VersionConflict) - for ep in iter_entry_points(entrypoint_name): - # is the plugin registered or blocked? - if self.get_plugin(ep.name) or self.is_blocked(ep.name): - continue - try: - plugin = ep.load() - except DistributionNotFound: - continue - except VersionConflict as e: - raise PluginValidationError( - "Plugin %r could not be loaded: %s!" % (ep.name, e)) - self.register(plugin, name=ep.name) - self._plugin_distinfo.append((plugin, ep.dist)) - return len(self._plugin_distinfo) - def list_plugin_distinfo(self): """ return list of distinfo/plugin tuples for all setuptools registered plugins. """ diff --git a/lib/spack/external/altgraph/__init__.py b/lib/spack/external/altgraph/__init__.py index 289c6408d11..ee70a9c91bf 100644 --- a/lib/spack/external/altgraph/__init__.py +++ b/lib/spack/external/altgraph/__init__.py @@ -139,9 +139,12 @@ @contributor: U{Reka Albert } ''' -import pkg_resources -__version__ = pkg_resources.require('altgraph')[0].version - +# import pkg_resources +# __version__ = pkg_resources.require('altgraph')[0].version +# pkg_resources is not finding the altgraph import despite the fact that it is in sys.path +# there is no .dist-info or .egg-info for pkg_resources to query the version from +# so it must be set manually +__version__ = '0.16.1' class GraphError(ValueError): pass diff --git a/lib/spack/llnl/util/tty/log.py b/lib/spack/llnl/util/tty/log.py index 10516e59d2e..e6089926425 100644 --- a/lib/spack/llnl/util/tty/log.py +++ b/lib/spack/llnl/util/tty/log.py @@ -13,12 +13,18 @@ import select import sys import traceback +import signal from contextlib import contextmanager from six import string_types from six import StringIO import llnl.util.tty as tty +try: + import termios +except ImportError: + termios = None + # Use this to strip escape sequences _escape = re.compile(r'\x1b[^m]*m|\x1b\[?1034h') @@ -31,12 +37,26 @@ control = re.compile('(\x11\n|\x13\n)') +@contextmanager +def background_safe(): + signal.signal(signal.SIGTTOU, signal.SIG_IGN) + yield + signal.signal(signal.SIGTTOU, signal.SIG_DFL) + + +def _is_background_tty(): + """Return True iff this process is backgrounded and stdout is a tty""" + if sys.stdout.isatty(): + return os.getpgrp() != os.tcgetpgrp(sys.stdout.fileno()) + return False # not writing to tty, not background + + def _strip(line): """Strip color and control characters from a line.""" return _escape.sub('', line) -class keyboard_input(object): +class _keyboard_input(object): """Context manager to disable line editing and echoing. Use this with ``sys.stdin`` for keyboard input, e.g.:: @@ -81,32 +101,30 @@ def __enter__(self): if not self.stream or not self.stream.isatty(): return - try: - # If this fails, self.old_cfg will remain None - import termios - + # If this fails, self.old_cfg will remain None + if termios and not _is_background_tty(): # save old termios settings - fd = self.stream.fileno() - self.old_cfg = termios.tcgetattr(fd) + old_cfg = termios.tcgetattr(self.stream) - # create new settings with canonical input and echo - # disabled, so keypresses are immediate & don't echo. - self.new_cfg = termios.tcgetattr(fd) - self.new_cfg[3] &= ~termios.ICANON - self.new_cfg[3] &= ~termios.ECHO + try: + # create new settings with canonical input and echo + # disabled, so keypresses are immediate & don't echo. + self.new_cfg = termios.tcgetattr(self.stream) + self.new_cfg[3] &= ~termios.ICANON + self.new_cfg[3] &= ~termios.ECHO - # Apply new settings for terminal - termios.tcsetattr(fd, termios.TCSADRAIN, self.new_cfg) + # Apply new settings for terminal + termios.tcsetattr(self.stream, termios.TCSADRAIN, self.new_cfg) + self.old_cfg = old_cfg - except Exception: - pass # some OS's do not support termios, so ignore + except Exception: + pass # some OS's do not support termios, so ignore def __exit__(self, exc_type, exception, traceback): """If termios was avaialble, restore old settings.""" if self.old_cfg: - import termios - termios.tcsetattr( - self.stream.fileno(), termios.TCSADRAIN, self.old_cfg) + with background_safe(): # change it back even if backgrounded now + termios.tcsetattr(self.stream, termios.TCSADRAIN, self.old_cfg) class Unbuffered(object): @@ -426,45 +444,63 @@ def _writer_daemon(self, stdin): istreams = [in_pipe, stdin] if stdin else [in_pipe] log_file = self.log_file + + def handle_write(force_echo): + # Handle output from the with block process. + # If we arrive here it means that in_pipe was + # ready for reading : it should never happen that + # line is false-ish + line = in_pipe.readline() + if not line: + return (True, force_echo) # break while loop + + # find control characters and strip them. + controls = control.findall(line) + line = re.sub(control, '', line) + + # Echo to stdout if requested or forced + if echo or force_echo: + try: + if termios: + conf = termios.tcgetattr(sys.stdout) + tostop = conf[3] & termios.TOSTOP + else: + tostop = True + except Exception: + tostop = True + if not (tostop and _is_background_tty()): + sys.stdout.write(line) + sys.stdout.flush() + + # Stripped output to log file. + log_file.write(_strip(line)) + log_file.flush() + + if xon in controls: + force_echo = True + if xoff in controls: + force_echo = False + return (False, force_echo) + try: - with keyboard_input(stdin): + with _keyboard_input(stdin): while True: # No need to set any timeout for select.select # Wait until a key press or an event on in_pipe. rlist, _, _ = select.select(istreams, [], []) - # Allow user to toggle echo with 'v' key. # Currently ignores other chars. - if stdin in rlist: + # only read stdin if we're in the foreground + if stdin in rlist and not _is_background_tty(): if stdin.read(1) == 'v': echo = not echo - # Handle output from the with block process. if in_pipe in rlist: - # If we arrive here it means that in_pipe was - # ready for reading : it should never happen that - # line is false-ish - line = in_pipe.readline() - if not line: - break # EOF + br, fe = handle_write(force_echo) + force_echo = fe + if br: + break - # find control characters and strip them. - controls = control.findall(line) - line = re.sub(control, '', line) - - # Echo to stdout if requested or forced - if echo or force_echo: - sys.stdout.write(line) - sys.stdout.flush() - - # Stripped output to log file. - log_file.write(_strip(line)) - log_file.flush() - - if xon in controls: - force_echo = True - if xoff in controls: - force_echo = False except BaseException: tty.error("Exception occurred in writer daemon!") traceback.print_exc() diff --git a/lib/spack/spack/__init__.py b/lib/spack/spack/__init__.py index b18acb20942..112a98c01c1 100644 --- a/lib/spack/spack/__init__.py +++ b/lib/spack/spack/__init__.py @@ -5,7 +5,7 @@ #: major, minor, patch version for Spack, in a tuple -spack_version_info = (0, 14, 0) +spack_version_info = (0, 14, 1) #: String containing Spack version joined with .'s spack_version = '.'.join(str(v) for v in spack_version_info) diff --git a/lib/spack/spack/binary_distribution.py b/lib/spack/spack/binary_distribution.py index 0569bcc665f..0fb6feae028 100644 --- a/lib/spack/spack/binary_distribution.py +++ b/lib/spack/spack/binary_distribution.py @@ -10,6 +10,9 @@ import shutil import tempfile import hashlib +import glob +import platform + from contextlib import closing import ruamel.yaml as yaml @@ -53,7 +56,7 @@ BUILD_CACHE_INDEX_ENTRY_TEMPLATE = '
  • {path}
  • ' -class NoOverwriteException(Exception): +class NoOverwriteException(spack.error.SpackError): """ Raised when a file exists and must be overwritten. """ @@ -68,14 +71,18 @@ class NoGpgException(spack.error.SpackError): """ Raised when gpg2 is not in PATH """ - pass + + def __init__(self, msg): + super(NoGpgException, self).__init__(msg) class NoKeyException(spack.error.SpackError): """ Raised when gpg has no default key added. """ - pass + + def __init__(self, msg): + super(NoKeyException, self).__init__(msg) class PickKeyException(spack.error.SpackError): @@ -84,7 +91,7 @@ class PickKeyException(spack.error.SpackError): """ def __init__(self, keys): - err_msg = "Multi keys available for signing\n%s\n" % keys + err_msg = "Multiple keys available for signing\n%s\n" % keys err_msg += "Use spack buildcache create -k to pick a key." super(PickKeyException, self).__init__(err_msg) @@ -107,7 +114,9 @@ class NewLayoutException(spack.error.SpackError): """ Raised if directory layout is different from buildcache. """ - pass + + def __init__(self, msg): + super(NewLayoutException, self).__init__(msg) def build_cache_relative_path(): @@ -137,15 +146,21 @@ def read_buildinfo_file(prefix): return buildinfo -def write_buildinfo_file(prefix, workdir, rel=False): +def write_buildinfo_file(spec, workdir, rel=False): """ Create a cache file containing information required for the relocation """ + prefix = spec.prefix text_to_relocate = [] binary_to_relocate = [] link_to_relocate = [] blacklist = (".spack", "man") + prefix_to_hash = dict() + prefix_to_hash[str(spec.package.prefix)] = spec.dag_hash() + deps = spack.build_environment.get_rpath_deps(spec.package) + for d in deps: + prefix_to_hash[str(d.prefix)] = d.dag_hash() # Do this at during tarball creation to save time when tarball unpacked. # Used by make_package_relative to determine binaries to change. for root, dirs, files in os.walk(prefix, topdown=True): @@ -162,8 +177,8 @@ def write_buildinfo_file(prefix, workdir, rel=False): link_to_relocate.append(rel_path_name) else: msg = 'Absolute link %s to %s ' % (path_name, link) - msg += 'outside of stage %s ' % prefix - msg += 'cannot be relocated.' + msg += 'outside of prefix %s ' % prefix + msg += 'should not be relocated.' tty.warn(msg) if relocate.needs_binary_relocation(m_type, m_subtype): @@ -184,6 +199,7 @@ def write_buildinfo_file(prefix, workdir, rel=False): buildinfo['relocate_textfiles'] = text_to_relocate buildinfo['relocate_binaries'] = binary_to_relocate buildinfo['relocate_links'] = link_to_relocate + buildinfo['prefix_to_hash'] = prefix_to_hash filename = buildinfo_file_name(workdir) with open(filename, 'w') as outfile: outfile.write(syaml.dump(buildinfo, default_flow_style=True)) @@ -356,7 +372,7 @@ def build_tarball(spec, outdir, force=False, rel=False, unsigned=False, os.remove(temp_tarfile_path) # create info for later relocation and create tar - write_buildinfo_file(spec.prefix, workdir, rel=rel) + write_buildinfo_file(spec, workdir, rel) # optionally make the paths in the binaries relative to each other # in the spack install tree before creating tarball @@ -370,7 +386,7 @@ def build_tarball(spec, outdir, force=False, rel=False, unsigned=False, tty.die(e) else: try: - make_package_placeholder(workdir, spec, allow_root) + check_package_relocatable(workdir, spec, allow_root) except Exception as e: shutil.rmtree(workdir) shutil.rmtree(tarfile_dir) @@ -400,6 +416,7 @@ def build_tarball(spec, outdir, force=False, rel=False, unsigned=False, buildinfo = {} buildinfo['relative_prefix'] = os.path.relpath( spec.prefix, spack.store.layout.root) + buildinfo['relative_rpaths'] = rel spec_dict['buildinfo'] = buildinfo spec_dict['full_hash'] = spec.full_hash() @@ -481,100 +498,149 @@ def make_package_relative(workdir, spec, allow_root): """ prefix = spec.prefix buildinfo = read_buildinfo_file(workdir) - old_path = buildinfo['buildpath'] + old_layout_root = buildinfo['buildpath'] orig_path_names = list() cur_path_names = list() for filename in buildinfo['relocate_binaries']: orig_path_names.append(os.path.join(prefix, filename)) cur_path_names.append(os.path.join(workdir, filename)) - if spec.architecture.platform == 'darwin': + if (spec.architecture.platform == 'darwin' or + spec.architecture.platform == 'test' and + platform.system().lower() == 'darwin'): relocate.make_macho_binaries_relative(cur_path_names, orig_path_names, - old_path, allow_root) - else: + old_layout_root) + if (spec.architecture.platform == 'linux' or + spec.architecture.platform == 'test' and + platform.system().lower() == 'linux'): relocate.make_elf_binaries_relative(cur_path_names, orig_path_names, - old_path, allow_root) + old_layout_root) + relocate.check_files_relocatable(cur_path_names, allow_root) orig_path_names = list() cur_path_names = list() - for filename in buildinfo.get('relocate_links', []): - orig_path_names.append(os.path.join(prefix, filename)) - cur_path_names.append(os.path.join(workdir, filename)) + for linkname in buildinfo.get('relocate_links', []): + orig_path_names.append(os.path.join(prefix, linkname)) + cur_path_names.append(os.path.join(workdir, linkname)) relocate.make_link_relative(cur_path_names, orig_path_names) -def make_package_placeholder(workdir, spec, allow_root): +def check_package_relocatable(workdir, spec, allow_root): """ Check if package binaries are relocatable. Change links to placeholder links. """ - prefix = spec.prefix buildinfo = read_buildinfo_file(workdir) cur_path_names = list() for filename in buildinfo['relocate_binaries']: cur_path_names.append(os.path.join(workdir, filename)) relocate.check_files_relocatable(cur_path_names, allow_root) - cur_path_names = list() - for filename in buildinfo.get('relocate_links', []): - cur_path_names.append(os.path.join(workdir, filename)) - relocate.make_link_placeholder(cur_path_names, workdir, prefix) - -def relocate_package(workdir, spec, allow_root): +def relocate_package(spec, allow_root): """ Relocate the given package """ + workdir = str(spec.prefix) buildinfo = read_buildinfo_file(workdir) - new_path = str(spack.store.layout.root) - new_prefix = str(spack.paths.prefix) - old_path = str(buildinfo['buildpath']) - old_prefix = str(buildinfo.get('spackprefix', - '/not/in/buildinfo/dictionary')) - rel = buildinfo.get('relative_rpaths', False) + new_layout_root = str(spack.store.layout.root) + new_prefix = str(spec.prefix) + new_rel_prefix = str(os.path.relpath(new_prefix, new_layout_root)) + new_spack_prefix = str(spack.paths.prefix) + old_layout_root = str(buildinfo['buildpath']) + old_spack_prefix = str(buildinfo.get('spackprefix')) + old_rel_prefix = buildinfo.get('relative_prefix') + old_prefix = os.path.join(old_layout_root, old_rel_prefix) + rel = buildinfo.get('relative_rpaths') + prefix_to_hash = buildinfo.get('prefix_to_hash', None) + if (old_rel_prefix != new_rel_prefix and not prefix_to_hash): + msg = "Package tarball was created from an install " + msg += "prefix with a different directory layout and an older " + msg += "buildcache create implementation. It cannot be relocated." + raise NewLayoutException(msg) + # older buildcaches do not have the prefix_to_hash dictionary + # need to set an empty dictionary and add one entry to + # prefix_to_prefix to reproduce the old behavior + if not prefix_to_hash: + prefix_to_hash = dict() + hash_to_prefix = dict() + hash_to_prefix[spec.format('{hash}')] = str(spec.package.prefix) + new_deps = spack.build_environment.get_rpath_deps(spec.package) + for d in new_deps: + hash_to_prefix[d.format('{hash}')] = str(d.prefix) + prefix_to_prefix = dict() + for orig_prefix, hash in prefix_to_hash.items(): + prefix_to_prefix[orig_prefix] = hash_to_prefix.get(hash, None) + prefix_to_prefix[old_prefix] = new_prefix + prefix_to_prefix[old_layout_root] = new_layout_root - tty.msg("Relocating package from", - "%s to %s." % (old_path, new_path)) - path_names = set() + tty.debug("Relocating package from", + "%s to %s." % (old_layout_root, new_layout_root)) + + def is_backup_file(file): + return file.endswith('~') + + # Text files containing the prefix text + text_names = list() for filename in buildinfo['relocate_textfiles']: - path_name = os.path.join(workdir, filename) + text_name = os.path.join(workdir, filename) # Don't add backup files generated by filter_file during install step. - if not path_name.endswith('~'): - path_names.add(path_name) - relocate.relocate_text(path_names, oldpath=old_path, - newpath=new_path, oldprefix=old_prefix, - newprefix=new_prefix) - # If the binary files in the package were not edited to use - # relative RPATHs, then the RPATHs need to be relocated - if rel: - if old_path != new_path: - files_to_relocate = list(filter( - lambda pathname: not relocate.file_is_relocatable( - pathname, paths_to_relocate=[old_path, old_prefix]), - map(lambda filename: os.path.join(workdir, filename), - buildinfo['relocate_binaries']))) + if not is_backup_file(text_name): + text_names.append(text_name) - if len(old_path) < len(new_path) and files_to_relocate: - tty.debug('Cannot do a binary string replacement with padding ' - 'for package because %s is longer than %s.' % - (new_path, old_path)) - else: - for path_name in files_to_relocate: - relocate.replace_prefix_bin(path_name, old_path, new_path) - else: - path_names = set() - for filename in buildinfo['relocate_binaries']: - path_name = os.path.join(workdir, filename) - path_names.add(path_name) - if spec.architecture.platform == 'darwin': - relocate.relocate_macho_binaries(path_names, old_path, - new_path, allow_root) - else: - relocate.relocate_elf_binaries(path_names, old_path, - new_path, allow_root) - path_names = set() - for filename in buildinfo.get('relocate_links', []): - path_name = os.path.join(workdir, filename) - path_names.add(path_name) - relocate.relocate_links(path_names, old_path, new_path) +# If we are installing back to the same location don't replace anything + if old_layout_root != new_layout_root: + paths_to_relocate = [old_spack_prefix, old_layout_root] + paths_to_relocate.extend(prefix_to_hash.keys()) + files_to_relocate = list(filter( + lambda pathname: not relocate.file_is_relocatable( + pathname, paths_to_relocate=paths_to_relocate), + map(lambda filename: os.path.join(workdir, filename), + buildinfo['relocate_binaries']))) + # If the buildcache was not created with relativized rpaths + # do the relocation of path in binaries + if (spec.architecture.platform == 'darwin' or + spec.architecture.platform == 'test' and + platform.system().lower() == 'darwin'): + relocate.relocate_macho_binaries(files_to_relocate, + old_layout_root, + new_layout_root, + prefix_to_prefix, rel, + old_prefix, + new_prefix) + if (spec.architecture.platform == 'linux' or + spec.architecture.platform == 'test' and + platform.system().lower() == 'linux'): + relocate.relocate_elf_binaries(files_to_relocate, + old_layout_root, + new_layout_root, + prefix_to_prefix, rel, + old_prefix, + new_prefix) + # Relocate links to the new install prefix + link_names = [linkname + for linkname in buildinfo.get('relocate_links', [])] + relocate.relocate_links(link_names, + old_layout_root, + new_layout_root, + old_prefix, + new_prefix, + prefix_to_prefix) + + # For all buildcaches + # relocate the install prefixes in text files including dependencies + relocate.relocate_text(text_names, + old_layout_root, new_layout_root, + old_prefix, new_prefix, + old_spack_prefix, + new_spack_prefix, + prefix_to_prefix) + + # relocate the install prefixes in binary files including dependencies + relocate.relocate_text_bin(files_to_relocate, + old_layout_root, new_layout_root, + old_prefix, new_prefix, + old_spack_prefix, + new_spack_prefix, + prefix_to_prefix) def extract_tarball(spec, filename, allow_root=False, unsigned=False, @@ -610,7 +676,7 @@ def extract_tarball(spec, filename, allow_root=False, unsigned=False, Gpg.verify('%s.asc' % specfile_path, specfile_path, suppress) except Exception as e: shutil.rmtree(tmpdir) - tty.die(e) + raise e else: shutil.rmtree(tmpdir) raise NoVerifyException( @@ -639,22 +705,30 @@ def extract_tarball(spec, filename, allow_root=False, unsigned=False, # if the original relative prefix is in the spec file use it buildinfo = spec_dict.get('buildinfo', {}) old_relative_prefix = buildinfo.get('relative_prefix', new_relative_prefix) + rel = buildinfo.get('relative_rpaths') # if the original relative prefix and new relative prefix differ the # directory layout has changed and the buildcache cannot be installed - if old_relative_prefix != new_relative_prefix: - shutil.rmtree(tmpdir) - msg = "Package tarball was created from an install " - msg += "prefix with a different directory layout.\n" - msg += "It cannot be relocated." - raise NewLayoutException(msg) + # if it was created with relative rpaths + info = 'old relative prefix %s\nnew relative prefix %s\nrelative rpaths %s' + tty.debug(info % + (old_relative_prefix, new_relative_prefix, rel)) +# if (old_relative_prefix != new_relative_prefix and (rel)): +# shutil.rmtree(tmpdir) +# msg = "Package tarball was created from an install " +# msg += "prefix with a different directory layout. " +# msg += "It cannot be relocated because it " +# msg += "uses relative rpaths." +# raise NewLayoutException(msg) # extract the tarball in a temp directory with closing(tarfile.open(tarfile_path, 'r')) as tar: tar.extractall(path=tmpdir) - # the base of the install prefix is used when creating the tarball - # so the pathname should be the same now that the directory layout - # is confirmed - workdir = os.path.join(tmpdir, os.path.basename(spec.prefix)) + # get the parent directory of the file .spack/binary_distribution + # this should the directory unpacked from the tarball whose + # name is unknown because the prefix naming is unknown + bindist_file = glob.glob('%s/*/.spack/binary_distribution' % tmpdir)[0] + workdir = re.sub('/.spack/binary_distribution$', '', bindist_file) + tty.debug('workdir %s' % workdir) # install_tree copies hardlinks # create a temporary tarfile from prefix and exract it to workdir # tarfile preserves hardlinks @@ -672,10 +746,10 @@ def extract_tarball(spec, filename, allow_root=False, unsigned=False, os.remove(specfile_path) try: - relocate_package(spec.prefix, spec, allow_root) + relocate_package(spec, allow_root) except Exception as e: shutil.rmtree(spec.prefix) - tty.die(e) + raise e else: manifest_file = os.path.join(spec.prefix, spack.store.layout.metadata_dir, @@ -685,6 +759,8 @@ def extract_tarball(spec, filename, allow_root=False, unsigned=False, tty.warn('No manifest file in tarball for spec %s' % spec_id) finally: shutil.rmtree(tmpdir) + if os.path.exists(filename): + os.remove(filename) # Internal cache for downloaded specs @@ -732,7 +808,7 @@ def get_spec(spec=None, force=False): tty.debug("No Spack mirrors are currently configured") return {} - if spec in _cached_specs: + if _cached_specs and spec in _cached_specs: return _cached_specs for mirror in spack.mirror.MirrorCollection().values(): @@ -817,7 +893,7 @@ def get_keys(install=False, trust=False, force=False): mirror_dir = url_util.local_file_path(fetch_url_build_cache) if mirror_dir: tty.msg("Finding public keys in %s" % mirror_dir) - files = os.listdir(mirror_dir) + files = os.listdir(str(mirror_dir)) for file in files: if re.search(r'\.key', file) or re.search(r'\.pub', file): link = url_util.join(fetch_url_build_cache, file) @@ -827,7 +903,7 @@ def get_keys(install=False, trust=False, force=False): url_util.format(fetch_url_build_cache)) # For s3 mirror need to request index.html directly p, links = web_util.spider( - url_util.join(fetch_url_build_cache, 'index.html'), depth=1) + url_util.join(fetch_url_build_cache, 'index.html')) for link in links: if re.search(r'\.key', link) or re.search(r'\.pub', link): diff --git a/lib/spack/spack/build_systems/cmake.py b/lib/spack/spack/build_systems/cmake.py index 14f33e94e67..d7da957a9d6 100644 --- a/lib/spack/spack/build_systems/cmake.py +++ b/lib/spack/spack/build_systems/cmake.py @@ -147,33 +147,129 @@ def _std_args(pkg): except KeyError: build_type = 'RelWithDebInfo' + define = CMakePackage.define args = [ '-G', generator, - '-DCMAKE_INSTALL_PREFIX:PATH={0}'.format(pkg.prefix), - '-DCMAKE_BUILD_TYPE:STRING={0}'.format(build_type), + define('CMAKE_INSTALL_PREFIX', pkg.prefix), + define('CMAKE_BUILD_TYPE', build_type), ] if primary_generator == 'Unix Makefiles': - args.append('-DCMAKE_VERBOSE_MAKEFILE:BOOL=ON') + args.append(define('CMAKE_VERBOSE_MAKEFILE', True)) if platform.mac_ver()[0]: args.extend([ - '-DCMAKE_FIND_FRAMEWORK:STRING=LAST', - '-DCMAKE_FIND_APPBUNDLE:STRING=LAST' + define('CMAKE_FIND_FRAMEWORK', "LAST"), + define('CMAKE_FIND_APPBUNDLE', "LAST"), ]) # Set up CMake rpath - args.append('-DCMAKE_INSTALL_RPATH_USE_LINK_PATH:BOOL=FALSE') - rpaths = ';'.join(spack.build_environment.get_rpaths(pkg)) - args.append('-DCMAKE_INSTALL_RPATH:STRING={0}'.format(rpaths)) + args.extend([ + define('CMAKE_INSTALL_RPATH_USE_LINK_PATH', False), + define('CMAKE_INSTALL_RPATH', + spack.build_environment.get_rpaths(pkg)), + ]) # CMake's find_package() looks in CMAKE_PREFIX_PATH first, help CMake # to find immediate link dependencies in right places: deps = [d.prefix for d in pkg.spec.dependencies(deptype=('build', 'link'))] deps = filter_system_paths(deps) - args.append('-DCMAKE_PREFIX_PATH:STRING={0}'.format(';'.join(deps))) + args.append(define('CMAKE_PREFIX_PATH', deps)) return args + @staticmethod + def define(cmake_var, value): + """Return a CMake command line argument that defines a variable. + + The resulting argument will convert boolean values to OFF/ON + and lists/tuples to CMake semicolon-separated string lists. All other + values will be interpreted as strings. + + Examples: + + .. code-block:: python + + [define('BUILD_SHARED_LIBS', True), + define('CMAKE_CXX_STANDARD', 14), + define('swr', ['avx', 'avx2'])] + + will generate the following configuration options: + + .. code-block:: console + + ["-DBUILD_SHARED_LIBS:BOOL=ON", + "-DCMAKE_CXX_STANDARD:STRING=14", + "-DSWR:STRING=avx;avx2] + + """ + # Create a list of pairs. Each pair includes a configuration + # option and whether or not that option is activated + if isinstance(value, bool): + kind = 'BOOL' + value = "ON" if value else "OFF" + else: + kind = 'STRING' + if isinstance(value, (list, tuple)): + value = ";".join(str(v) for v in value) + else: + value = str(value) + + return "".join(["-D", cmake_var, ":", kind, "=", value]) + + def define_from_variant(self, cmake_var, variant=None): + """Return a CMake command line argument from the given variant's value. + + The optional ``variant`` argument defaults to the lower-case transform + of ``cmake_var``. + + This utility function is similar to + :py:meth:`~.AutotoolsPackage.with_or_without`. + + Examples: + + Given a package with: + + .. code-block:: python + + variant('cxxstd', default='11', values=('11', '14'), + multi=False, description='') + variant('shared', default=True, description='') + variant('swr', values=any_combination_of('avx', 'avx2'), + description='') + + calling this function like: + + .. code-block:: python + + [define_from_variant('BUILD_SHARED_LIBS', 'shared'), + define_from_variant('CMAKE_CXX_STANDARD', 'cxxstd'), + define_from_variant('SWR')] + + will generate the following configuration options: + + .. code-block:: console + + ["-DBUILD_SHARED_LIBS:BOOL=ON", + "-DCMAKE_CXX_STANDARD:STRING=14", + "-DSWR:STRING=avx;avx2] + + for `` cxxstd=14 +shared swr=avx,avx2`` + """ + + if variant is None: + variant = cmake_var.lower() + + if variant not in self.variants: + raise KeyError( + '"{0}" is not a variant of "{1}"'.format(variant, self.name)) + + value = self.spec.variants[variant].value + if isinstance(value, (tuple, list)): + # Sort multi-valued variants for reproducibility + value = sorted(value) + + return self.define(cmake_var, value) + def flags_to_build_system_args(self, flags): """Produces a list of all command line arguments to pass the specified compiler flags to cmake. Note CMAKE does not have a cppflags option, diff --git a/lib/spack/spack/build_systems/cuda.py b/lib/spack/spack/build_systems/cuda.py index 50a7002dbb3..cbee7100496 100644 --- a/lib/spack/spack/build_systems/cuda.py +++ b/lib/spack/spack/build_systems/cuda.py @@ -140,12 +140,12 @@ def cuda_flags(arch_list): conflicts('%intel@16.0:', when='+cuda ^cuda@:8.0.43') conflicts('%intel@17.0:', when='+cuda ^cuda@:8.0.60') conflicts('%intel@18.0:', when='+cuda ^cuda@:9.9') - conflicts('%intel@19.0:', when='+cuda ^cuda@:10.2.89') + conflicts('%intel@19.0:', when='+cuda ^cuda@:10.0') # XL is mostly relevant for ppc64le Linux conflicts('%xl@:12,14:', when='+cuda ^cuda@:9.1') conflicts('%xl@:12,14:15,17:', when='+cuda ^cuda@9.2') - conflicts('%xl@17:', when='+cuda ^cuda@10.0.130:10.2.89') + conflicts('%xl@17:', when='+cuda ^cuda@:10.2.89') # Mac OS X # platform = ' platform=darwin' diff --git a/lib/spack/spack/build_systems/sip.py b/lib/spack/spack/build_systems/sip.py index 314f91d5d2c..f814ef1837e 100644 --- a/lib/spack/spack/build_systems/sip.py +++ b/lib/spack/spack/build_systems/sip.py @@ -5,9 +5,10 @@ import inspect -from llnl.util.filesystem import working_dir -from spack.directives import depends_on, extends, resource -from spack.package import PackageBase, run_before, run_after +from llnl.util.filesystem import working_dir, join_path +from spack.directives import depends_on, extends +from spack.package import PackageBase, run_after +import os class SIPPackage(PackageBase): @@ -40,33 +41,12 @@ class SIPPackage(PackageBase): extends('python') depends_on('qt') - - resource(name='sip', - url='https://www.riverbankcomputing.com/static/Downloads/sip/4.19.18/sip-4.19.18.tar.gz', - sha256='c0bd863800ed9b15dcad477c4017cdb73fa805c25908b0240564add74d697e1e', - destination='.') + depends_on('py-sip') def python(self, *args, **kwargs): """The python ``Executable``.""" inspect.getmodule(self).python(*args, **kwargs) - @run_before('configure') - def install_sip(self): - args = [ - '--sip-module={0}'.format(self.sip_module), - '--bindir={0}'.format(self.prefix.bin), - '--destdir={0}'.format(inspect.getmodule(self).site_packages_dir), - '--incdir={0}'.format(inspect.getmodule(self).python_include_dir), - '--sipdir={0}'.format(self.prefix.share.sip), - '--stubsdir={0}'.format(inspect.getmodule(self).site_packages_dir), - ] - - with working_dir('sip-4.19.18'): - self.python('configure.py', *args) - - inspect.getmodule(self).make() - inspect.getmodule(self).make('install') - def configure_file(self): """Returns the name of the configure file to use.""" return 'configure.py' @@ -77,12 +57,15 @@ def configure(self, spec, prefix): args = self.configure_args() + python_include_dir = 'python' + str(spec['python'].version.up_to(2)) + args.extend([ '--verbose', '--confirm-license', '--qmake', spec['qt'].prefix.bin.qmake, - '--sip', prefix.bin.sip, - '--sip-incdir', inspect.getmodule(self).python_include_dir, + '--sip', spec['py-sip'].prefix.bin.sip, + '--sip-incdir', join_path(spec['py-sip'].prefix.include, + python_include_dir), '--bindir', prefix.bin, '--destdir', inspect.getmodule(self).site_packages_dir, ]) @@ -131,3 +114,14 @@ def import_module_test(self): # Check that self.prefix is there after installation run_after('install')(PackageBase.sanity_check_prefix) + + @run_after('install') + def extend_path_setup(self): + # See github issue #14121 and PR #15297 + module = self.spec['py-sip'].variants['module'].value + if module != 'sip': + module = module.split('.')[0] + with working_dir(inspect.getmodule(self).site_packages_dir): + with open(os.path.join(module, '__init__.py'), 'a') as f: + f.write('from pkgutil import extend_path\n') + f.write('__path__ = extend_path(__path__, __name__)\n') diff --git a/lib/spack/spack/cmd/__init__.py b/lib/spack/spack/cmd/__init__.py index 2a75a87b549..83e12004a19 100644 --- a/lib/spack/spack/cmd/__init__.py +++ b/lib/spack/spack/cmd/__init__.py @@ -177,7 +177,7 @@ def elide_list(line_list, max_num=10): return line_list -def disambiguate_spec(spec, env, local=False, installed=True): +def disambiguate_spec(spec, env, local=False, installed=True, first=False): """Given a spec, figure out which installed package it refers to. Arguments: @@ -190,10 +190,11 @@ def disambiguate_spec(spec, env, local=False, installed=True): database query. See ``spack.database.Database._query`` for details. """ hashes = env.all_hashes() if env else None - return disambiguate_spec_from_hashes(spec, hashes, local, installed) + return disambiguate_spec_from_hashes(spec, hashes, local, installed, first) -def disambiguate_spec_from_hashes(spec, hashes, local=False, installed=True): +def disambiguate_spec_from_hashes(spec, hashes, local=False, + installed=True, first=False): """Given a spec and a list of hashes, get concrete spec the spec refers to. Arguments: @@ -213,6 +214,9 @@ def disambiguate_spec_from_hashes(spec, hashes, local=False, installed=True): if not matching_specs: tty.die("Spec '%s' matches no installed packages." % spec) + elif first: + return matching_specs[0] + elif len(matching_specs) > 1: format_string = '{name}{@version}{%compiler}{arch=architecture}' args = ["%s matches multiple packages." % spec, diff --git a/lib/spack/spack/cmd/checksum.py b/lib/spack/spack/cmd/checksum.py index 343915868cd..eaeaf5337f9 100644 --- a/lib/spack/spack/cmd/checksum.py +++ b/lib/spack/spack/cmd/checksum.py @@ -56,7 +56,8 @@ def checksum(parser, args): tty.die("Could not find any versions for {0}".format(pkg.name)) version_lines = spack.stage.get_checksums_for_versions( - url_dict, pkg.name, keep_stage=args.keep_stage) + url_dict, pkg.name, keep_stage=args.keep_stage, + fetch_options=pkg.fetch_options) print() print(version_lines) diff --git a/lib/spack/spack/cmd/debug.py b/lib/spack/spack/cmd/debug.py index 4fc39251aff..074e95209af 100644 --- a/lib/spack/spack/cmd/debug.py +++ b/lib/spack/spack/cmd/debug.py @@ -3,7 +3,10 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) +from __future__ import print_function + import os +import platform import re from datetime import datetime from glob import glob @@ -11,7 +14,9 @@ import llnl.util.tty as tty from llnl.util.filesystem import working_dir +import spack.architecture as architecture import spack.paths +from spack.main import get_version from spack.util.executable import which description = "debugging commands for troubleshooting Spack" @@ -23,6 +28,7 @@ def setup_parser(subparser): sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='debug_command') sp.add_parser('create-db-tarball', help="create a tarball of Spack's installation metadata") + sp.add_parser('report', help='print information useful for bug reports') def _debug_tarball_suffix(): @@ -78,6 +84,16 @@ def create_db_tarball(args): tty.msg('Created %s' % tarball_name) +def report(args): + print('* **Spack:**', get_version()) + print('* **Python:**', platform.python_version()) + print('* **Platform:**', architecture.Arch( + architecture.platform(), 'frontend', 'frontend')) + + def debug(parser, args): - action = {'create-db-tarball': create_db_tarball} + action = { + 'create-db-tarball': create_db_tarball, + 'report': report, + } action[args.debug_command](args) diff --git a/lib/spack/spack/cmd/dependencies.py b/lib/spack/spack/cmd/dependencies.py index e65e050bfac..7f390341ef4 100644 --- a/lib/spack/spack/cmd/dependencies.py +++ b/lib/spack/spack/cmd/dependencies.py @@ -9,6 +9,7 @@ import spack.cmd import spack.cmd.common.arguments as arguments import spack.environment as ev +import spack.package import spack.repo import spack.store @@ -52,22 +53,15 @@ def dependencies(parser, args): else: spec = specs[0] - - if not spec.virtual: - packages = [spec.package] - else: - packages = [ - spack.repo.get(s.name) - for s in spack.repo.path.providers_for(spec)] - - dependencies = set() - for pkg in packages: - possible = pkg.possible_dependencies( - args.transitive, args.expand_virtuals, deptype=args.deptype) - dependencies.update(possible) + dependencies = spack.package.possible_dependencies( + spec, + transitive=args.transitive, + expand_virtuals=args.expand_virtuals, + deptype=args.deptype + ) if spec.name in dependencies: - dependencies.remove(spec.name) + del dependencies[spec.name] if dependencies: colify(sorted(dependencies)) diff --git a/lib/spack/spack/cmd/dependents.py b/lib/spack/spack/cmd/dependents.py index e60733f5890..89fd15ffdab 100644 --- a/lib/spack/spack/cmd/dependents.py +++ b/lib/spack/spack/cmd/dependents.py @@ -30,7 +30,7 @@ def setup_parser(subparser): def inverted_dependencies(): """Iterate through all packages and return a dictionary mapping package - names to possible dependnecies. + names to possible dependencies. Virtual packages are included as sources, so that you can query dependents of, e.g., `mpi`, but virtuals are not included as diff --git a/lib/spack/spack/cmd/load.py b/lib/spack/spack/cmd/load.py index 9a00ad1c589..3ef485941fe 100644 --- a/lib/spack/spack/cmd/load.py +++ b/lib/spack/spack/cmd/load.py @@ -33,6 +33,14 @@ def setup_parser(subparser): '--csh', action='store_const', dest='shell', const='csh', help="print csh commands to load the package") + subparser.add_argument( + '--first', + action='store_true', + default=False, + dest='load_first', + help="load the first match if multiple packages match the spec" + ) + subparser.add_argument( '--only', default='package,dependencies', @@ -47,7 +55,7 @@ def setup_parser(subparser): def load(parser, args): env = ev.get_env(args, 'load') - specs = [spack.cmd.disambiguate_spec(spec, env) + specs = [spack.cmd.disambiguate_spec(spec, env, first=args.load_first) for spec in spack.cmd.parse_specs(args.specs)] if not args.shell: diff --git a/lib/spack/spack/cmd/python.py b/lib/spack/spack/cmd/python.py index 2f2290aad8a..a91f3663fbe 100644 --- a/lib/spack/spack/cmd/python.py +++ b/lib/spack/spack/cmd/python.py @@ -3,6 +3,8 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) +from __future__ import print_function + import os import sys import code @@ -20,6 +22,9 @@ def setup_parser(subparser): + subparser.add_argument( + '-V', '--version', action='store_true', + help='print the Python version number and exit') subparser.add_argument( '-c', dest='python_command', help='command to execute') subparser.add_argument( @@ -31,6 +36,10 @@ def setup_parser(subparser): def python(parser, args, unknown_args): + if args.version: + print('Python', platform.python_version()) + return + if args.module: sys.argv = ['spack-python'] + unknown_args + args.python_args runpy.run_module(args.module, run_name="__main__", alter_sys=True) diff --git a/lib/spack/spack/cmd/repo.py b/lib/spack/spack/cmd/repo.py index 83acf796a24..f271790994e 100644 --- a/lib/spack/spack/cmd/repo.py +++ b/lib/spack/spack/cmd/repo.py @@ -8,10 +8,9 @@ import os import llnl.util.tty as tty - -import spack.spec import spack.config -from spack.repo import Repo, create_repo, canonicalize_path, RepoError +import spack.repo +import spack.util.path description = "manage package source repositories" section = "config" @@ -61,7 +60,9 @@ def setup_parser(subparser): def repo_create(args): """Create a new package repository.""" - full_path, namespace = create_repo(args.directory, args.namespace) + full_path, namespace = spack.repo.create_repo( + args.directory, args.namespace + ) tty.msg("Created repo with namespace '%s'." % namespace) tty.msg("To register it with spack, run this command:", 'spack repo add %s' % full_path) @@ -72,7 +73,7 @@ def repo_add(args): path = args.path # real_path is absolute and handles substitution. - canon_path = canonicalize_path(path) + canon_path = spack.util.path.canonicalize_path(path) # check if the path exists if not os.path.exists(canon_path): @@ -83,7 +84,7 @@ def repo_add(args): tty.die("Not a Spack repository: %s" % path) # Make sure it's actually a spack repository by constructing it. - repo = Repo(canon_path) + repo = spack.repo.Repo(canon_path) # If that succeeds, finally add it to the configuration. repos = spack.config.get('repos', scope=args.scope) @@ -104,9 +105,9 @@ def repo_remove(args): namespace_or_path = args.namespace_or_path # If the argument is a path, remove that repository from config. - canon_path = canonicalize_path(namespace_or_path) + canon_path = spack.util.path.canonicalize_path(namespace_or_path) for repo_path in repos: - repo_canon_path = canonicalize_path(repo_path) + repo_canon_path = spack.util.path.canonicalize_path(repo_path) if canon_path == repo_canon_path: repos.remove(repo_path) spack.config.set('repos', repos, args.scope) @@ -116,14 +117,14 @@ def repo_remove(args): # If it is a namespace, remove corresponding repo for path in repos: try: - repo = Repo(path) + repo = spack.repo.Repo(path) if repo.namespace == namespace_or_path: repos.remove(path) spack.config.set('repos', repos, args.scope) tty.msg("Removed repository %s with namespace '%s'." % (repo.root, repo.namespace)) return - except RepoError: + except spack.repo.RepoError: continue tty.die("No repository with path or namespace: %s" @@ -136,8 +137,8 @@ def repo_list(args): repos = [] for r in roots: try: - repos.append(Repo(r)) - except RepoError: + repos.append(spack.repo.Repo(r)) + except spack.repo.RepoError: continue msg = "%d package repositor" % len(repos) diff --git a/lib/spack/spack/database.py b/lib/spack/spack/database.py index 03e8a215561..dfea6bf85c2 100644 --- a/lib/spack/spack/database.py +++ b/lib/spack/spack/database.py @@ -26,6 +26,12 @@ import socket import sys import time +try: + import uuid + _use_uuid = True +except ImportError: + _use_uuid = False + pass import llnl.util.tty as tty import six @@ -294,6 +300,7 @@ def __init__(self, root, db_dir=None, upstream_dbs=None, # Set up layout of database files within the db dir self._index_path = os.path.join(self._db_dir, 'index.json') + self._verifier_path = os.path.join(self._db_dir, 'index_verifier') self._lock_path = os.path.join(self._db_dir, 'lock') # This is for other classes to use to lock prefix directories. @@ -311,10 +318,11 @@ def __init__(self, root, db_dir=None, upstream_dbs=None, if not os.path.exists(self._db_dir): mkdirp(self._db_dir) - if not os.path.exists(self._failure_dir): + if not os.path.exists(self._failure_dir) and not is_upstream: mkdirp(self._failure_dir) self.is_upstream = is_upstream + self.last_seen_verifier = '' # initialize rest of state. self.db_lock_timeout = ( @@ -932,6 +940,11 @@ def _write(self, type, value, traceback): with open(temp_file, 'w') as f: self._write_to_file(f) os.rename(temp_file, self._index_path) + if _use_uuid: + with open(self._verifier_path, 'w') as f: + new_verifier = str(uuid.uuid4()) + f.write(new_verifier) + self.last_seen_verifier = new_verifier except BaseException as e: tty.debug(e) # Clean up temp file if something goes wrong. @@ -947,8 +960,18 @@ def _read(self): write lock. """ if os.path.isfile(self._index_path): - # Read from file if a database exists - self._read_from_file(self._index_path) + current_verifier = '' + if _use_uuid: + try: + with open(self._verifier_path, 'r') as f: + current_verifier = f.read() + except BaseException: + pass + if ((current_verifier != self.last_seen_verifier) or + (current_verifier == '')): + self.last_seen_verifier = current_verifier + # Read from file if a database exists + self._read_from_file(self._index_path) return elif self.is_upstream: raise UpstreamDatabaseLockingError( @@ -1361,7 +1384,7 @@ def _query( # TODO: handling of hashes restriction is not particularly elegant. hash_key = query_spec.dag_hash() if (hash_key in self._data and - (not hashes or hash_key in hashes)): + (not hashes or hash_key in hashes)): return [self._data[hash_key].spec] else: return [] diff --git a/lib/spack/spack/fetch_strategy.py b/lib/spack/spack/fetch_strategy.py index d923e2b58e7..d7613ae58af 100644 --- a/lib/spack/spack/fetch_strategy.py +++ b/lib/spack/spack/fetch_strategy.py @@ -29,7 +29,6 @@ import re import shutil import sys -import xml.etree.ElementTree import llnl.util.tty as tty import six @@ -257,7 +256,7 @@ def __init__(self, url=None, checksum=None, **kwargs): self.digest = kwargs[h] self.expand_archive = kwargs.get('expand', True) - self.extra_options = kwargs.get('fetch_options', []) + self.extra_options = kwargs.get('fetch_options', {}) self._curl = None self.extension = kwargs.get('extension', None) @@ -773,13 +772,6 @@ def mirror_id(self): result = os.path.sep.join(['git', repo_path, repo_ref]) return result - def get_source_id(self): - if not self.branch: - return - output = self.git('ls-remote', self.url, self.branch, output=str) - if output: - return output.split()[0] - def _repo_info(self): args = '' @@ -957,11 +949,6 @@ def cachable(self): def source_id(self): return self.revision - def get_source_id(self): - output = self.svn('info', '--xml', self.url, output=str) - info = xml.etree.ElementTree.fromstring(output) - return info.find('entry/commit').get('revision') - def mirror_id(self): if self.revision: repo_path = url_util.parse(self.url).path @@ -1077,11 +1064,6 @@ def mirror_id(self): result = os.path.sep.join(['hg', repo_path, self.revision]) return result - def get_source_id(self): - output = self.hg('id', self.url, output=str) - if output: - return output.strip() - @_needs_stage def fetch(self): if self.stage.expanded: @@ -1265,7 +1247,8 @@ def _check_version_attributes(fetcher, pkg, version): def _extrapolate(pkg, version): """Create a fetcher from an extrapolated URL for this version.""" try: - return URLFetchStrategy(pkg.url_for_version(version)) + return URLFetchStrategy(pkg.url_for_version(version), + fetch_options=pkg.fetch_options) except spack.package.NoURLError: msg = ("Can't extrapolate a URL for version %s " "because package %s defines no URLs") @@ -1279,12 +1262,13 @@ def _from_merged_attrs(fetcher, pkg, version): # TODO: refactor this logic into its own method or function # TODO: to avoid duplication mirrors = [spack.url.substitute_version(u, version) - for u in getattr(pkg, 'urls', [])] + for u in getattr(pkg, 'urls', [])[1:]] attrs = {fetcher.url_attr: url, 'mirrors': mirrors} else: url = getattr(pkg, fetcher.url_attr) attrs = {fetcher.url_attr: url} + attrs['fetch_options'] = pkg.fetch_options attrs.update(pkg.versions[version]) return fetcher(**attrs) @@ -1307,8 +1291,10 @@ def for_package_version(pkg, version): if version not in pkg.versions: return _extrapolate(pkg, version) + # Set package args first so version args can override them + args = {'fetch_options': pkg.fetch_options} # Grab a dict of args out of the package version dict - args = pkg.versions[version] + args.update(pkg.versions[version]) # If the version specifies a `url_attr` directly, use that. for fetcher in all_strategies: @@ -1388,7 +1374,8 @@ def from_list_url(pkg): args.get('checksum')) # construct a fetcher - return URLFetchStrategy(url_from_list, checksum) + return URLFetchStrategy(url_from_list, checksum, + fetch_options=pkg.fetch_options) except KeyError as e: tty.debug(e) tty.msg("Cannot find version %s in url_list" % pkg.version) diff --git a/lib/spack/spack/installer.py b/lib/spack/spack/installer.py index bd38d11ea2e..213a85f0dc7 100644 --- a/lib/spack/spack/installer.py +++ b/lib/spack/spack/installer.py @@ -36,6 +36,7 @@ import sys import time +import llnl.util.filesystem as fs import llnl.util.lock as lk import llnl.util.tty as tty import spack.binary_distribution as binary_distribution @@ -43,14 +44,12 @@ import spack.error import spack.hooks import spack.package +import spack.package_prefs as prefs import spack.repo import spack.store -from llnl.util.filesystem import \ - chgrp, install, install_tree, mkdirp, touch, working_dir -from llnl.util.tty.color import colorize, cwrite +from llnl.util.tty.color import colorize from llnl.util.tty.log import log_output -from spack.package_prefs import get_package_dir_permissions, get_package_group from spack.util.environment import dump_environment from spack.util.executable import which @@ -133,21 +132,21 @@ def _do_fake_install(pkg): chmod = which('chmod') # Install fake command - mkdirp(pkg.prefix.bin) - touch(os.path.join(pkg.prefix.bin, command)) + fs.mkdirp(pkg.prefix.bin) + fs.touch(os.path.join(pkg.prefix.bin, command)) chmod('+x', os.path.join(pkg.prefix.bin, command)) # Install fake header file - mkdirp(pkg.prefix.include) - touch(os.path.join(pkg.prefix.include, header + '.h')) + fs.mkdirp(pkg.prefix.include) + fs.touch(os.path.join(pkg.prefix.include, header + '.h')) # Install fake shared and static libraries - mkdirp(pkg.prefix.lib) + fs.mkdirp(pkg.prefix.lib) for suffix in [dso_suffix, '.a']: - touch(os.path.join(pkg.prefix.lib, library + suffix)) + fs.touch(os.path.join(pkg.prefix.lib, library + suffix)) # Install fake man page - mkdirp(pkg.prefix.man.man1) + fs.mkdirp(pkg.prefix.man.man1) packages_dir = spack.store.layout.build_packages_path(pkg.spec) dump_packages(pkg.spec, packages_dir) @@ -182,6 +181,9 @@ def _packages_needed_to_bootstrap_compiler(pkg): # concrete CompilerSpec has less info than concrete Spec # concretize as Spec to add that information dep.concretize() + # mark compiler as depended-on by the package that uses it + dep._dependents[pkg.name] = spack.spec.DependencySpec( + pkg.spec, dep, ('build',)) packages = [(s.package, False) for s in dep.traverse(order='post', root=False)] packages.append((dep.package, True)) @@ -251,8 +253,7 @@ def _print_installed_pkg(message): Args: message (str): message to be output """ - cwrite('@*g{[+]} ') - print(message) + print(colorize('@*g{[+]} ') + message) def _process_external_package(pkg, explicit): @@ -377,14 +378,14 @@ def dump_packages(spec, path): Dump all package information for a spec and its dependencies. This creates a package repository within path for every namespace in the - spec DAG, and fills the repos wtih package files and patch files for every + spec DAG, and fills the repos with package files and patch files for every node in the DAG. Args: spec (Spec): the Spack spec whose package information is to be dumped path (str): the path to the build packages directory """ - mkdirp(path) + fs.mkdirp(path) # Copy in package.py files from any dependencies. # Note that we copy them in as they are in the *install* directory @@ -407,7 +408,10 @@ def dump_packages(spec, path): source_repo = spack.repo.Repo(source_repo_root) source_pkg_dir = source_repo.dirname_for_package_name( node.name) - except spack.repo.RepoError: + except spack.repo.RepoError as err: + tty.debug('Failed to create source repo for {0}: {1}' + .format(node.name, str(err))) + source_pkg_dir = None tty.warn("Warning: Couldn't copy in provenance for {0}" .format(node.name)) @@ -419,10 +423,10 @@ def dump_packages(spec, path): # Get the location of the package in the dest repo. dest_pkg_dir = repo.dirname_for_package_name(node.name) - if node is not spec: - install_tree(source_pkg_dir, dest_pkg_dir) - else: + if node is spec: spack.repo.path.dump_provenance(node, dest_pkg_dir) + elif source_pkg_dir: + fs.install_tree(source_pkg_dir, dest_pkg_dir) def install_msg(name, pid): @@ -458,17 +462,17 @@ def log(pkg): tty.debug(e) # Archive the whole stdout + stderr for the package - install(pkg.log_path, pkg.install_log_path) + fs.install(pkg.log_path, pkg.install_log_path) # Archive the environment used for the build - install(pkg.env_path, pkg.install_env_path) + fs.install(pkg.env_path, pkg.install_env_path) if os.path.exists(pkg.configure_args_path): # Archive the args used for the build - install(pkg.configure_args_path, pkg.install_configure_args_path) + fs.install(pkg.configure_args_path, pkg.install_configure_args_path) # Finally, archive files that are specific to each package - with working_dir(pkg.stage.path): + with fs.working_dir(pkg.stage.path): errors = six.StringIO() target_dir = os.path.join( spack.store.layout.metadata_path(pkg.spec), 'archived-files') @@ -490,8 +494,8 @@ def log(pkg): target = os.path.join(target_dir, f) # We must ensure that the directory exists before # copying a file in - mkdirp(os.path.dirname(target)) - install(f, target) + fs.mkdirp(os.path.dirname(target)) + fs.install(f, target) except Exception as e: tty.debug(e) @@ -502,7 +506,7 @@ def log(pkg): if errors.getvalue(): error_file = os.path.join(target_dir, 'errors.txt') - mkdirp(target_dir) + fs.mkdirp(target_dir) with open(error_file, 'w') as err: err.write(errors.getvalue()) tty.warn('Errors occurred when archiving files.\n\t' @@ -1068,10 +1072,10 @@ def build_process(): pkg.name, 'src') tty.msg('{0} Copying source to {1}' .format(pre, src_target)) - install_tree(pkg.stage.source_path, src_target) + fs.install_tree(pkg.stage.source_path, src_target) # Do the real install in the source directory. - with working_dir(pkg.stage.source_path): + with fs.working_dir(pkg.stage.source_path): # Save the build environment in a file before building. dump_environment(pkg.env_path) @@ -1283,20 +1287,20 @@ def _setup_install_dir(self, pkg): spack.store.layout.create_install_directory(pkg.spec) else: # Set the proper group for the prefix - group = get_package_group(pkg.spec) + group = prefs.get_package_group(pkg.spec) if group: - chgrp(pkg.spec.prefix, group) + fs.chgrp(pkg.spec.prefix, group) # Set the proper permissions. # This has to be done after group because changing groups blows # away the sticky group bit on the directory mode = os.stat(pkg.spec.prefix).st_mode - perms = get_package_dir_permissions(pkg.spec) + perms = prefs.get_package_dir_permissions(pkg.spec) if mode != perms: os.chmod(pkg.spec.prefix, perms) # Ensure the metadata path exists as well - mkdirp(spack.store.layout.metadata_path(pkg.spec), mode=perms) + fs.mkdirp(spack.store.layout.metadata_path(pkg.spec), mode=perms) def _update_failed(self, task, mark=False, exc=None): """ @@ -1608,6 +1612,21 @@ def __init__(self, pkg, compiler, start, attempts, status, installed): self.spec.dependencies() if package_id(d.package) != self.pkg_id) + # Handle bootstrapped compiler + # + # The bootstrapped compiler is not a dependency in the spec, but it is + # a dependency of the build task. Here we add it to self.dependencies + compiler_spec = self.spec.compiler + arch_spec = self.spec.architecture + if not spack.compilers.compilers_for_spec(compiler_spec, + arch_spec=arch_spec): + # The compiler is in the queue, identify it as dependency + dep = spack.compilers.pkg_spec_for_compiler(compiler_spec) + dep.architecture = arch_spec + dep.concretize() + dep_id = package_id(dep.package) + self.dependencies.add(dep_id) + # List of uninstalled dependencies, which is used to establish # the priority of the build task. # diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index 56bdab22c17..b8ded0364b3 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -477,6 +477,9 @@ class PackageBase(with_metaclass(PackageMeta, PackageViewMixin, object)): #: This is currently only used by package sanity tests. manual_download = False + #: Set of additional options used when fetching package versions. + fetch_options = {} + # # Set default licensing information # @@ -602,11 +605,10 @@ def possible_dependencies( """ deptype = spack.dependency.canonical_deptype(deptype) - if visited is None: - visited = {cls.name: set()} + visited = {} if visited is None else visited + missing = {} if missing is None else missing - if missing is None: - missing = {cls.name: set()} + visited.setdefault(cls.name, set()) for name, conditions in cls.dependencies.items(): # check whether this dependency could be of the type asked for @@ -621,6 +623,7 @@ def possible_dependencies( providers = spack.repo.path.providers_for(name) dep_names = [spec.name for spec in providers] else: + visited.setdefault(cls.name, set()).add(name) visited.setdefault(name, set()) continue else: @@ -763,7 +766,7 @@ def url_for_version(self, version): # If no specific URL, use the default, class-level URL url = getattr(self, 'url', None) urls = getattr(self, 'urls', [None]) - default_url = url or urls.pop(0) + default_url = url or urls[0] # if no exact match AND no class-level default, use the nearest URL if not default_url: @@ -1032,6 +1035,14 @@ def provides(self, vpkg_name): for s, constraints in self.provided.items() if s.name == vpkg_name ) + @property + def virtuals_provided(self): + """ + virtual packages provided by this package with its spec + """ + return [vspec for vspec, constraints in self.provided.items() + if any(self.spec.satisfies(c) for c in constraints)] + @property def installed(self): """Installation status of a package. @@ -2151,26 +2162,27 @@ def possible_dependencies(*pkg_or_spec, **kwargs): See ``PackageBase.possible_dependencies`` for details. """ - transitive = kwargs.get('transitive', True) - expand_virtuals = kwargs.get('expand_virtuals', True) - deptype = kwargs.get('deptype', 'all') - missing = kwargs.get('missing') - packages = [] for pos in pkg_or_spec: if isinstance(pos, PackageMeta): - pkg = pos - elif isinstance(pos, spack.spec.Spec): - pkg = pos.package - else: - pkg = spack.spec.Spec(pos).package + packages.append(pos) + continue - packages.append(pkg) + if not isinstance(pos, spack.spec.Spec): + pos = spack.spec.Spec(pos) + + if spack.repo.path.is_virtual(pos.name): + packages.extend( + p.package_class + for p in spack.repo.path.providers_for(pos.name) + ) + continue + else: + packages.append(pos.package_class) visited = {} for pkg in packages: - pkg.possible_dependencies( - transitive, expand_virtuals, deptype, visited, missing) + pkg.possible_dependencies(visited=visited, **kwargs) return visited diff --git a/lib/spack/spack/package_prefs.py b/lib/spack/spack/package_prefs.py index 2801a6d1230..0158c7063a6 100644 --- a/lib/spack/spack/package_prefs.py +++ b/lib/spack/spack/package_prefs.py @@ -6,7 +6,6 @@ import stat from six import string_types -from six import iteritems import spack.repo import spack.error @@ -23,27 +22,6 @@ def _spec_type(component): return _lesser_spec_types.get(component, spack.spec.Spec) -def get_packages_config(): - """Wrapper around get_packages_config() to validate semantics.""" - config = spack.config.get('packages') - - # Get a list of virtuals from packages.yaml. Note that because we - # check spack.repo, this collects virtuals that are actually provided - # by sometihng, not just packages/names that don't exist. - # So, this won't include, e.g., 'all'. - virtuals = [(pkg_name, pkg_name._start_mark) for pkg_name in config - if spack.repo.path.is_virtual(pkg_name)] - - # die if there are virtuals in `packages.py` - if virtuals: - errors = ["%s: %s" % (line_info, name) for name, line_info in virtuals] - raise VirtualInPackagesYAMLError( - "packages.yaml entries cannot be virtual packages:", - '\n'.join(errors)) - - return config - - class PackagePrefs(object): """Defines the sort order for a set of specs. @@ -116,7 +94,7 @@ def order_for_package(cls, pkgname, component, vpkg=None, all=True): pkglist.append('all') for pkg in pkglist: - pkg_entry = get_packages_config().get(pkg) + pkg_entry = spack.config.get('packages').get(pkg) if not pkg_entry: continue @@ -160,7 +138,8 @@ def has_preferred_targets(cls, pkg_name): def preferred_variants(cls, pkg_name): """Return a VariantMap of preferred variants/values for a spec.""" for pkg in (pkg_name, 'all'): - variants = get_packages_config().get(pkg, {}).get('variants', '') + variants = spack.config.get('packages').get(pkg, {}).get( + 'variants', '') if variants: break @@ -181,33 +160,29 @@ def spec_externals(spec): # break circular import. from spack.util.module_cmd import get_path_from_module # NOQA: ignore=F401 - allpkgs = get_packages_config() - name = spec.name + allpkgs = spack.config.get('packages') + names = set([spec.name]) + names |= set(vspec.name for vspec in spec.package.virtuals_provided) external_specs = [] - pkg_paths = allpkgs.get(name, {}).get('paths', None) - pkg_modules = allpkgs.get(name, {}).get('modules', None) - if (not pkg_paths) and (not pkg_modules): - return [] - - for external_spec, path in iteritems(pkg_paths): - if not path: - # skip entries without paths (avoid creating extra Specs) + for name in names: + pkg_config = allpkgs.get(name, {}) + pkg_paths = pkg_config.get('paths', {}) + pkg_modules = pkg_config.get('modules', {}) + if (not pkg_paths) and (not pkg_modules): continue - external_spec = spack.spec.Spec(external_spec, - external_path=canonicalize_path(path)) - if external_spec.satisfies(spec): - external_specs.append(external_spec) + for external_spec, path in pkg_paths.items(): + external_spec = spack.spec.Spec( + external_spec, external_path=canonicalize_path(path)) + if external_spec.satisfies(spec): + external_specs.append(external_spec) - for external_spec, module in iteritems(pkg_modules): - if not module: - continue - - external_spec = spack.spec.Spec( - external_spec, external_module=module) - if external_spec.satisfies(spec): - external_specs.append(external_spec) + for external_spec, module in pkg_modules.items(): + external_spec = spack.spec.Spec( + external_spec, external_module=module) + if external_spec.satisfies(spec): + external_specs.append(external_spec) # defensively copy returned specs return [s.copy() for s in external_specs] @@ -215,12 +190,11 @@ def spec_externals(spec): def is_spec_buildable(spec): """Return true if the spec pkgspec is configured as buildable""" - allpkgs = get_packages_config() - if spec.name not in allpkgs: - return True - if 'buildable' not in allpkgs[spec.name]: - return True - return allpkgs[spec.name]['buildable'] + allpkgs = spack.config.get('packages') + do_not_build = [name for name, entry in allpkgs.items() + if not entry.get('buildable', True)] + return not (spec.name in do_not_build or + any(spec.package.provides(name) for name in do_not_build)) def get_package_dir_permissions(spec): diff --git a/lib/spack/spack/patch.py b/lib/spack/spack/patch.py index 3a3c1507e15..3a839c5b0f5 100644 --- a/lib/spack/spack/patch.py +++ b/lib/spack/spack/patch.py @@ -346,7 +346,8 @@ def patch_for_package(self, sha256, pkg): sha_index = self.index.get(sha256) if not sha_index: raise NoSuchPatchError( - "Couldn't find patch with sha256: %s" % sha256) + "Couldn't find patch for package %s with sha256: %s" + % (pkg.fullname, sha256)) patch_dict = sha_index.get(pkg.fullname) if not patch_dict: diff --git a/lib/spack/spack/platforms/cray.py b/lib/spack/spack/platforms/cray.py index cbeba96461b..6e8c79ef0cd 100644 --- a/lib/spack/spack/platforms/cray.py +++ b/lib/spack/spack/platforms/cray.py @@ -7,7 +7,7 @@ import re import llnl.util.tty as tty from spack.paths import build_env_path -from spack.util.executable import which +from spack.util.executable import Executable from spack.architecture import Platform, Target, NoPlatformError from spack.operating_systems.cray_frontend import CrayFrontend from spack.operating_systems.cnl import Cnl @@ -117,11 +117,17 @@ def _default_target_from_env(self): ''' # env -i /bin/bash -lc echo $CRAY_CPU_TARGET 2> /dev/null if getattr(self, 'default', None) is None: - env = which('env') - output = env("-i", "/bin/bash", "-lc", "echo $CRAY_CPU_TARGET", - output=str, error=os.devnull) - self.default = output.strip() - tty.debug("Found default module:%s" % self.default) + bash = Executable('/bin/bash') + output = bash( + '-lc', 'echo $CRAY_CPU_TARGET', + env={'TERM': os.environ.get('TERM', '')}, + output=str, + error=os.devnull + ) + output = ''.join(output.split()) # remove all whitespace + if output: + self.default = output + tty.debug("Found default module:%s" % self.default) return self.default def _avail_targets(self): diff --git a/lib/spack/spack/provider_index.py b/lib/spack/spack/provider_index.py index 9bf4af8911a..326f6aa8f11 100644 --- a/lib/spack/spack/provider_index.py +++ b/lib/spack/spack/provider_index.py @@ -2,54 +2,147 @@ # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) +"""Classes and functions to manage providers of virtual dependencies""" +import itertools -""" -The ``virtual`` module contains utility classes for virtual dependencies. -""" - -from itertools import product as iproduct -from six import iteritems -from pprint import pformat - +import six import spack.error import spack.util.spack_json as sjson -class ProviderIndex(object): - """This is a dict of dicts used for finding providers of particular - virtual dependencies. The dict of dicts looks like: - - { vpkg name : - { full vpkg spec : set(packages providing spec) } } - - Callers can use this to first find which packages provide a vpkg, - then find a matching full spec. e.g., in this scenario: - - { 'mpi' : - { mpi@:1.1 : set([mpich]), - mpi@:2.3 : set([mpich2@1.9:]) } } - - Calling providers_for(spec) will find specs that provide a - matching implementation of MPI. +def _cross_provider_maps(lmap, rmap): + """Return a dictionary that combines constraint requests from both input. + Args: + lmap: main provider map + rmap: provider map with additional constraints """ + # TODO: this is pretty darned nasty, and inefficient, but there + # TODO: are not that many vdeps in most specs. + result = {} + for lspec, rspec in itertools.product(lmap, rmap): + try: + constrained = lspec.constrained(rspec) + except spack.error.UnsatisfiableSpecError: + continue + # lp and rp are left and right provider specs. + for lp_spec, rp_spec in itertools.product(lmap[lspec], rmap[rspec]): + if lp_spec.name == rp_spec.name: + try: + const = lp_spec.constrained(rp_spec, deps=False) + result.setdefault(constrained, set()).add(const) + except spack.error.UnsatisfiableSpecError: + continue + return result + + +class _IndexBase(object): + #: This is a dict of dicts used for finding providers of particular + #: virtual dependencies. The dict of dicts looks like: + #: + #: { vpkg name : + #: { full vpkg spec : set(packages providing spec) } } + #: + #: Callers can use this to first find which packages provide a vpkg, + #: then find a matching full spec. e.g., in this scenario: + #: + #: { 'mpi' : + #: { mpi@:1.1 : set([mpich]), + #: mpi@:2.3 : set([mpich2@1.9:]) } } + #: + #: Calling providers_for(spec) will find specs that provide a + #: matching implementation of MPI. Derived class need to construct + #: this attribute according to the semantics above. + providers = None + + def providers_for(self, virtual_spec): + """Return a list of specs of all packages that provide virtual + packages with the supplied spec. + + Args: + virtual_spec: virtual spec to be provided + """ + result = set() + # Allow string names to be passed as input, as well as specs + if isinstance(virtual_spec, six.string_types): + virtual_spec = spack.spec.Spec(virtual_spec) + + # Add all the providers that satisfy the vpkg spec. + if virtual_spec.name in self.providers: + for p_spec, spec_set in self.providers[virtual_spec.name].items(): + if p_spec.satisfies(virtual_spec, deps=False): + result.update(spec_set) + + # Return providers in order. Defensively copy. + return sorted(s.copy() for s in result) + + def __contains__(self, name): + return name in self.providers + + def satisfies(self, other): + """Determine if the providers of virtual specs are compatible. + + Args: + other: another provider index + + Returns: + True if the providers are compatible, False otherwise. + """ + common = set(self.providers) & set(other.providers) + if not common: + return True + + # This ensures that some provider in other COULD satisfy the + # vpkg constraints on self. + result = {} + for name in common: + crossed = _cross_provider_maps( + self.providers[name], other.providers[name] + ) + if crossed: + result[name] = crossed + + return all(c in result for c in common) + + def __eq__(self, other): + return self.providers == other.providers + + def _transform(self, transform_fun, out_mapping_type=dict): + """Transform this provider index dictionary and return it. + + Args: + transform_fun: transform_fun takes a (vpkg, pset) mapping and runs + it on each pair in nested dicts. + out_mapping_type: type to be used internally on the + transformed (vpkg, pset) + + Returns: + Transformed mapping + """ + return _transform(self.providers, transform_fun, out_mapping_type) + + def __str__(self): + return str(self.providers) + + def __repr__(self): + return repr(self.providers) + + +class ProviderIndex(_IndexBase): def __init__(self, specs=None, restrict=False): - """Create a new ProviderIndex. + """Provider index based on a single mapping of providers. - Optional arguments: + Args: + specs (list of specs): if provided, will call update on each + single spec to initialize this provider index. - specs - List (or sequence) of specs. If provided, will call - `update` on this ProviderIndex with each spec in the list. + restrict: "restricts" values to the verbatim input specs; do not + pre-apply package's constraints. - restrict - "restricts" values to the verbatim input specs; do not - pre-apply package's constraints. - - TODO: rename this. It is intended to keep things as broad - as possible without overly restricting results, so it is - not the best name. + TODO: rename this. It is intended to keep things as broad + TODO: as possible without overly restricting results, so it is + TODO: not the best name. """ if specs is None: specs = [] @@ -67,6 +160,11 @@ def __init__(self, specs=None, restrict=False): self.update(spec) def update(self, spec): + """Update the provider index with additional virtual specs. + + Args: + spec: spec potentially providing additional virtual specs + """ if not isinstance(spec, spack.spec.Spec): spec = spack.spec.Spec(spec) @@ -74,10 +172,10 @@ def update(self, spec): # Empty specs do not have a package return - assert(not spec.virtual) + assert not spec.virtual, "cannot update an index using a virtual spec" pkg_provided = spec.package_class.provided - for provided_spec, provider_specs in iteritems(pkg_provided): + for provided_spec, provider_specs in six.iteritems(pkg_provided): for provider_spec in provider_specs: # TODO: fix this comment. # We want satisfaction other than flags @@ -110,94 +208,24 @@ def update(self, spec): constrained.constrain(provider_spec) provider_map[provided_spec].add(constrained) - def providers_for(self, *vpkg_specs): - """Gives specs of all packages that provide virtual packages - with the supplied specs.""" - providers = set() - for vspec in vpkg_specs: - # Allow string names to be passed as input, as well as specs - if type(vspec) == str: - vspec = spack.spec.Spec(vspec) - - # Add all the providers that satisfy the vpkg spec. - if vspec.name in self.providers: - for p_spec, spec_set in self.providers[vspec.name].items(): - if p_spec.satisfies(vspec, deps=False): - providers.update(spec_set) - - # Return providers in order. Defensively copy. - return sorted(s.copy() for s in providers) - - # TODO: this is pretty darned nasty, and inefficient, but there - # are not that many vdeps in most specs. - def _cross_provider_maps(self, lmap, rmap): - result = {} - for lspec, rspec in iproduct(lmap, rmap): - try: - constrained = lspec.constrained(rspec) - except spack.error.UnsatisfiableSpecError: - continue - - # lp and rp are left and right provider specs. - for lp_spec, rp_spec in iproduct(lmap[lspec], rmap[rspec]): - if lp_spec.name == rp_spec.name: - try: - const = lp_spec.constrained(rp_spec, deps=False) - result.setdefault(constrained, set()).add(const) - except spack.error.UnsatisfiableSpecError: - continue - return result - - def __contains__(self, name): - """Whether a particular vpkg name is in the index.""" - return name in self.providers - - def satisfies(self, other): - """Check that providers of virtual specs are compatible.""" - common = set(self.providers) & set(other.providers) - if not common: - return True - - # This ensures that some provider in other COULD satisfy the - # vpkg constraints on self. - result = {} - for name in common: - crossed = self._cross_provider_maps(self.providers[name], - other.providers[name]) - if crossed: - result[name] = crossed - - return all(c in result for c in common) - def to_json(self, stream=None): + """Dump a JSON representation of this object. + + Args: + stream: stream where to dump + """ provider_list = self._transform( lambda vpkg, pset: [ vpkg.to_node_dict(), [p.to_node_dict() for p in pset]], list) sjson.dump({'provider_index': {'providers': provider_list}}, stream) - @staticmethod - def from_json(stream): - data = sjson.load(stream) - - if not isinstance(data, dict): - raise ProviderIndexError("JSON ProviderIndex data was not a dict.") - - if 'provider_index' not in data: - raise ProviderIndexError( - "YAML ProviderIndex does not start with 'provider_index'") - - index = ProviderIndex() - providers = data['provider_index']['providers'] - index.providers = _transform( - providers, - lambda vpkg, plist: ( - spack.spec.Spec.from_node_dict(vpkg), - set(spack.spec.Spec.from_node_dict(p) for p in plist))) - return index - def merge(self, other): - """Merge `other` ProviderIndex into this one.""" + """Merge another provider index into this one. + + Args: + other (ProviderIndex): provider index to be merged + """ other = other.copy() # defensive copy. for pkg in other.providers: @@ -236,40 +264,61 @@ def remove_provider(self, pkg_name): del self.providers[pkg] def copy(self): - """Deep copy of this ProviderIndex.""" + """Return a deep copy of this index.""" clone = ProviderIndex() clone.providers = self._transform( lambda vpkg, pset: (vpkg, set((p.copy() for p in pset)))) return clone - def __eq__(self, other): - return self.providers == other.providers + @staticmethod + def from_json(stream): + """Construct a provider index from its JSON representation. - def _transform(self, transform_fun, out_mapping_type=dict): - return _transform(self.providers, transform_fun, out_mapping_type) + Args: + stream: stream where to read from the JSON data + """ + data = sjson.load(stream) - def __str__(self): - return pformat( - _transform(self.providers, - lambda k, v: (k, list(v)))) + if not isinstance(data, dict): + raise ProviderIndexError("JSON ProviderIndex data was not a dict.") + + if 'provider_index' not in data: + raise ProviderIndexError( + "YAML ProviderIndex does not start with 'provider_index'") + + index = ProviderIndex() + providers = data['provider_index']['providers'] + index.providers = _transform( + providers, + lambda vpkg, plist: ( + spack.spec.Spec.from_node_dict(vpkg), + set(spack.spec.Spec.from_node_dict(p) for p in plist))) + return index def _transform(providers, transform_fun, out_mapping_type=dict): """Syntactic sugar for transforming a providers dict. - transform_fun takes a (vpkg, pset) mapping and runs it on each - pair in nested dicts. + Args: + providers: provider dictionary + transform_fun: transform_fun takes a (vpkg, pset) mapping and runs + it on each pair in nested dicts. + out_mapping_type: type to be used internally on the + transformed (vpkg, pset) + Returns: + Transformed mapping """ def mapiter(mappings): if isinstance(mappings, dict): - return iteritems(mappings) + return six.iteritems(mappings) else: return iter(mappings) return dict( - (name, out_mapping_type([ - transform_fun(vpkg, pset) for vpkg, pset in mapiter(mappings)])) + (name, out_mapping_type( + [transform_fun(vpkg, pset) for vpkg, pset in mapiter(mappings)] + )) for name, mappings in providers.items()) diff --git a/lib/spack/spack/relocate.py b/lib/spack/spack/relocate.py index c8c7947f9c3..9f8669f3d45 100644 --- a/lib/spack/spack/relocate.py +++ b/lib/spack/spack/relocate.py @@ -2,84 +2,99 @@ # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) - - import os +import platform import re import shutil -import platform -import spack.repo -import spack.cmd + import llnl.util.lang -from spack.util.executable import Executable, ProcessError import llnl.util.tty as tty +import macholib.MachO +import macholib.mach_o +import spack.cmd +import spack.repo +import spack.spec +import spack.util.executable as executable -class InstallRootStringException(spack.error.SpackError): - """ - Raised when the relocated binary still has the install root string. - """ - +class InstallRootStringError(spack.error.SpackError): def __init__(self, file_path, root_path): - super(InstallRootStringException, self).__init__( + """Signal that the relocated binary still has the original + Spack's store root string + + Args: + file_path (str): path of the binary + root_path (str): original Spack's store root string + """ + super(InstallRootStringError, self).__init__( "\n %s \ncontains string\n %s \n" "after replacing it in rpaths.\n" "Package should not be relocated.\n Use -a to override." % (file_path, root_path)) -class BinaryStringReplacementException(spack.error.SpackError): - """ - Raised when the size of the file changes after binary path substitution. - """ - +class BinaryStringReplacementError(spack.error.SpackError): def __init__(self, file_path, old_len, new_len): - super(BinaryStringReplacementException, self).__init__( + """The size of the file changed after binary path substitution + + Args: + file_path (str): file with changing size + old_len (str): original length of the file + new_len (str): length of the file after substitution + """ + super(BinaryStringReplacementError, self).__init__( "Doing a binary string replacement in %s failed.\n" "The size of the file changed from %s to %s\n" "when it should have remanined the same." % (file_path, old_len, new_len)) -class MissingMacholibException(spack.error.SpackError): - """ - Raised when the size of the file changes after binary path substitution. - """ +class BinaryTextReplaceError(spack.error.SpackError): + def __init__(self, old_path, new_path): + """Raised when the new install path is longer than the + old one, so binary text replacement cannot occur. - def __init__(self, error): - super(MissingMacholibException, self).__init__( - "%s\n" - "Python package macholib needs to be avaiable to list\n" - "and modify a mach-o binary's rpaths, deps and id.\n" - "Use virtualenv with pip install macholib or\n" - "use spack to install the py-macholib package\n" - "spack install py-macholib\n" - "spack activate py-macholib\n" - "spack load python\n" - % error) + Args: + old_path (str): original path to be substituted + new_path (str): candidate path for substitution + """ + + msg = "New path longer than old path: binary text" + msg += " replacement not possible." + err_msg = "The new path %s" % new_path + err_msg += " is longer than the old path %s.\n" % old_path + err_msg += "Text replacement in binaries will not work.\n" + err_msg += "Create buildcache from an install path " + err_msg += "longer than new path." + super(BinaryTextReplaceError, self).__init__(msg, err_msg) -def get_patchelf(): +def _patchelf(): + """Return the full path to the patchelf binary, if available, else None. + + Search first the current PATH for patchelf. If not found, try to look + if the default patchelf spec is installed and if not install it. + + Return None on Darwin or if patchelf cannot be found. """ - Builds and installs spack patchelf package on linux platforms - using the first concretized spec. - Returns the full patchelf binary path. - """ - # as we may need patchelf, find out where it is + # Check if patchelf is already in the PATH patchelf = spack.util.executable.which('patchelf') if patchelf is not None: return patchelf.path - else: - if str(spack.architecture.platform()) == 'test': - return None - if str(spack.architecture.platform()) == 'darwin': - return None - patchelf_spec = spack.cmd.parse_specs("patchelf", concretize=True)[0] - patchelf = spack.repo.get(patchelf_spec) - if not patchelf.installed: - patchelf.do_install(use_cache=False) - patchelf_executable = os.path.join(patchelf.prefix.bin, "patchelf") - return patchelf_executable + + # Check if patchelf spec is installed + spec = spack.spec.Spec('patchelf').concretized() + exe_path = os.path.join(spec.prefix.bin, "patchelf") + if spec.package.installed and os.path.exists(exe_path): + return exe_path + + # Skip darwin + if str(spack.architecture.platform()) == 'darwin': + return None + + # Install the spec and return its path + spec.package.do_install() + return exe_path if os.path.exists(exe_path) else None def get_existing_elf_rpaths(path_name): @@ -90,38 +105,58 @@ def get_existing_elf_rpaths(path_name): # if we're relocating patchelf itself, use it - if path_name[-13:] == "/bin/patchelf": - patchelf = Executable(path_name) + if path_name.endswith("/bin/patchelf"): + patchelf = executable.Executable(path_name) else: - patchelf = Executable(get_patchelf()) + patchelf = executable.Executable(_patchelf()) + rpaths = list() try: output = patchelf('--print-rpath', '%s' % path_name, output=str, error=str) - return output.rstrip('\n').split(':') - except ProcessError as e: - tty.debug('patchelf --print-rpath produced an error on %s' % - path_name, e) - return [] - return + rpaths = output.rstrip('\n').split(':') + except executable.ProcessError as e: + msg = 'patchelf --print-rpath %s produced an error %s' % (path_name, e) + tty.warn(msg) + return rpaths -def get_relative_rpaths(path_name, orig_dir, orig_rpaths): +def get_relative_elf_rpaths(path_name, orig_layout_root, orig_rpaths): """ - Replaces orig_dir with relative path from dirname(path_name) if an rpath - in orig_rpaths contains orig_path. Prefixes $ORIGIN + Replaces orig rpath with relative path from dirname(path_name) if an rpath + in orig_rpaths contains orig_layout_root. Prefixes $ORIGIN to relative paths and returns replacement rpaths. """ rel_rpaths = [] for rpath in orig_rpaths: - if re.match(orig_dir, rpath): + if re.match(orig_layout_root, rpath): rel = os.path.relpath(rpath, start=os.path.dirname(path_name)) - rel_rpaths.append('$ORIGIN/%s' % rel) + rel_rpaths.append(os.path.join('$ORIGIN', '%s' % rel)) else: rel_rpaths.append(rpath) return rel_rpaths +def get_normalized_elf_rpaths(orig_path_name, rel_rpaths): + """ + Normalize the relative rpaths with respect to the original path name + of the file. If the rpath starts with $ORIGIN replace $ORIGIN with the + dirname of the original path name and then normalize the rpath. + A dictionary mapping relativized rpaths to normalized rpaths is returned. + """ + norm_rpaths = list() + for rpath in rel_rpaths: + if rpath.startswith('$ORIGIN'): + sub = re.sub(re.escape('$ORIGIN'), + os.path.dirname(orig_path_name), + rpath) + norm = os.path.normpath(sub) + norm_rpaths.append(norm) + else: + norm_rpaths.append(rpath) + return norm_rpaths + + def set_placeholder(dirname): """ return string of @'s with same length @@ -129,183 +164,157 @@ def set_placeholder(dirname): return '@' * len(dirname) -def get_placeholder_rpaths(path_name, orig_rpaths): +def macho_make_paths_relative(path_name, old_layout_root, + rpaths, deps, idpath): """ - Replaces original layout root dir with a placeholder string in all rpaths. + Return a dictionary mapping the original rpaths to the relativized rpaths. + This dictionary is used to replace paths in mach-o binaries. + Replace old_dir with relative path from dirname of path name + in rpaths and deps; idpath is replaced with @rpath/libname. """ - rel_rpaths = [] - orig_dir = spack.store.layout.root - for rpath in orig_rpaths: - if re.match(orig_dir, rpath): - placeholder = set_placeholder(orig_dir) - rel = re.sub(orig_dir, placeholder, rpath) - rel_rpaths.append('%s' % rel) - else: - rel_rpaths.append(rpath) - return rel_rpaths - - -def macho_get_paths(path_name): - """ - Examines the output of otool -l path_name for these three fields: - LC_ID_DYLIB, LC_LOAD_DYLIB, LC_RPATH and parses out the rpaths, - dependiencies and library id. - Returns these values. - """ - otool = Executable('otool') - output = otool("-l", path_name, output=str, err=str) - last_cmd = None - idpath = None - rpaths = [] - deps = [] - for line in output.split('\n'): - match = re.search('( *[a-zA-Z]+ )(.*)', line) - if match: - lhs = match.group(1).lstrip().rstrip() - rhs = match.group(2) - match2 = re.search(r'(.*) \(.*\)', rhs) - if match2: - rhs = match2.group(1) - if lhs == 'cmd': - last_cmd = rhs - if lhs == 'path' and last_cmd == 'LC_RPATH': - rpaths.append(rhs) - if lhs == 'name' and last_cmd == 'LC_ID_DYLIB': - idpath = rhs - if lhs == 'name' and last_cmd == 'LC_LOAD_DYLIB': - deps.append(rhs) - return rpaths, deps, idpath - - -def macho_make_paths_relative(path_name, old_dir, rpaths, deps, idpath): - """ - Replace old_dir with relative path from dirname(path_name) - in rpaths and deps; idpaths are replaced with @rpath/libname as needed; - replacement are returned. - """ - new_idpath = None + paths_to_paths = dict() if idpath: - new_idpath = '@rpath/%s' % os.path.basename(idpath) - new_rpaths = list() - new_deps = list() + paths_to_paths[idpath] = os.path.join( + '@rpath', '%s' % os.path.basename(idpath)) for rpath in rpaths: - if re.match(old_dir, rpath): + if re.match(old_layout_root, rpath): rel = os.path.relpath(rpath, start=os.path.dirname(path_name)) - new_rpaths.append('@loader_path/%s' % rel) + paths_to_paths[rpath] = os.path.join('@loader_path', '%s' % rel) else: - new_rpaths.append(rpath) + paths_to_paths[rpath] = rpath for dep in deps: - if re.match(old_dir, dep): + if re.match(old_layout_root, dep): rel = os.path.relpath(dep, start=os.path.dirname(path_name)) - new_deps.append('@loader_path/%s' % rel) + paths_to_paths[dep] = os.path.join('@loader_path', '%s' % rel) else: - new_deps.append(dep) - return (new_rpaths, new_deps, new_idpath) + paths_to_paths[dep] = dep + return paths_to_paths -def macho_make_paths_placeholder(rpaths, deps, idpath): +def macho_make_paths_normal(orig_path_name, rpaths, deps, idpath): """ - Replace old_dir with a placeholder of the same length - in rpaths and deps and idpaths is needed. - replacement are returned. + Return a dictionary mapping the relativized rpaths to the original rpaths. + This dictionary is used to replace paths in mach-o binaries. + Replace '@loader_path' with the dirname of the origname path name + in rpaths and deps; idpath is replaced with the original path name """ - new_idpath = None - old_dir = spack.store.layout.root - placeholder = set_placeholder(old_dir) + rel_to_orig = dict() if idpath: - new_idpath = re.sub(old_dir, placeholder, idpath) - new_rpaths = list() - new_deps = list() + rel_to_orig[idpath] = orig_path_name + for rpath in rpaths: - if re.match(old_dir, rpath): - ph = re.sub(old_dir, placeholder, rpath) - new_rpaths.append('%s' % ph) + if re.match('@loader_path', rpath): + norm = os.path.normpath(re.sub(re.escape('@loader_path'), + os.path.dirname(orig_path_name), + rpath)) + rel_to_orig[rpath] = norm else: - new_rpaths.append(rpath) + rel_to_orig[rpath] = rpath for dep in deps: - if re.match(old_dir, dep): - ph = re.sub(old_dir, placeholder, dep) - new_deps.append('%s' % ph) + if re.match('@loader_path', dep): + norm = os.path.normpath(re.sub(re.escape('@loader_path'), + os.path.dirname(orig_path_name), + dep)) + rel_to_orig[dep] = norm else: - new_deps.append(dep) - return (new_rpaths, new_deps, new_idpath) + rel_to_orig[dep] = dep + return rel_to_orig -def macho_replace_paths(old_dir, new_dir, rpaths, deps, idpath): +def macho_find_paths(orig_rpaths, deps, idpath, + old_layout_root, prefix_to_prefix): """ - Replace old_dir with new_dir in rpaths, deps and idpath - and return replacements + Inputs + original rpaths from mach-o binaries + dependency libraries for mach-o binaries + id path of mach-o libraries + old install directory layout root + prefix_to_prefix dictionary which maps prefixes in the old directory layout + to directories in the new directory layout + Output + paths_to_paths dictionary which maps all of the old paths to new paths """ - new_idpath = None + paths_to_paths = dict() + for orig_rpath in orig_rpaths: + if orig_rpath.startswith(old_layout_root): + for old_prefix, new_prefix in prefix_to_prefix.items(): + if orig_rpath.startswith(old_prefix): + new_rpath = re.sub(re.escape(old_prefix), + new_prefix, orig_rpath) + paths_to_paths[orig_rpath] = new_rpath + else: + paths_to_paths[orig_rpath] = orig_rpath + if idpath: - new_idpath = idpath.replace(old_dir, new_dir) - new_rpaths = list() - new_deps = list() - for rpath in rpaths: - new_rpath = rpath.replace(old_dir, new_dir) - new_rpaths.append(new_rpath) + for old_prefix, new_prefix in prefix_to_prefix.items(): + if idpath.startswith(old_prefix): + paths_to_paths[idpath] = re.sub( + re.escape(old_prefix), new_prefix, idpath) for dep in deps: - new_dep = dep.replace(old_dir, new_dir) - new_deps.append(new_dep) - return new_rpaths, new_deps, new_idpath + for old_prefix, new_prefix in prefix_to_prefix.items(): + if dep.startswith(old_prefix): + paths_to_paths[dep] = re.sub( + re.escape(old_prefix), new_prefix, dep) + if dep.startswith('@'): + paths_to_paths[dep] = dep + + return paths_to_paths def modify_macho_object(cur_path, rpaths, deps, idpath, - new_rpaths, new_deps, new_idpath): + paths_to_paths): """ - Modify MachO binary path_name by replacing old_dir with new_dir - or the relative path to spack install root. - The old install dir in LC_ID_DYLIB is replaced with the new install dir - using install_name_tool -id newid binary - The old install dir in LC_LOAD_DYLIB is replaced with the new install dir - using install_name_tool -change old new binary - The old install dir in LC_RPATH is replaced with the new install dir using - install_name_tool -rpath old new binary + This function is used to make machO buildcaches on macOS by + replacing old paths with new paths using install_name_tool + Inputs: + mach-o binary to be modified + original rpaths + original dependency paths + original id path if a mach-o library + dictionary mapping paths in old install layout to new install layout """ # avoid error message for libgcc_s if 'libgcc_' in cur_path: return - install_name_tool = Executable('install_name_tool') - if new_idpath and not idpath == new_idpath: - install_name_tool('-id', new_idpath, str(cur_path)) + install_name_tool = executable.Executable('install_name_tool') - if len(deps) == len(new_deps): - for orig, new in zip(deps, new_deps): - if not orig == new: - install_name_tool('-change', orig, new, str(cur_path)) - - if len(rpaths) == len(new_rpaths): - for orig, new in zip(rpaths, new_rpaths): - if not orig == new: - install_name_tool('-rpath', orig, new, str(cur_path)) + if idpath: + new_idpath = paths_to_paths.get(idpath, None) + if new_idpath and not idpath == new_idpath: + install_name_tool('-id', new_idpath, str(cur_path)) + for dep in deps: + new_dep = paths_to_paths.get(dep) + if new_dep and dep != new_dep: + install_name_tool('-change', dep, new_dep, str(cur_path)) + for orig_rpath in rpaths: + new_rpath = paths_to_paths.get(orig_rpath) + if new_rpath and not orig_rpath == new_rpath: + install_name_tool('-rpath', orig_rpath, new_rpath, str(cur_path)) return -def modify_object_macholib(cur_path, old_dir, new_dir): +def modify_object_macholib(cur_path, paths_to_paths): """ - Modify MachO binary path_name by replacing old_dir with new_dir - or the relative path to spack install root. - The old install dir in LC_ID_DYLIB is replaced with the new install dir - using py-macholib - The old install dir in LC_LOAD_DYLIB is replaced with the new install dir - using py-macholib - The old install dir in LC_RPATH is replaced with the new install dir using - using py-macholib + This function is used when install machO buildcaches on linux by + rewriting mach-o loader commands for dependency library paths of + mach-o binaries and the id path for mach-o libraries. + Rewritting of rpaths is handled by replace_prefix_bin. + Inputs + mach-o binary to be modified + dictionary mapping paths in old install layout to new install layout """ - if cur_path.endswith('.o'): - return - try: - from macholib.MachO import MachO - except ImportError as e: - raise MissingMacholibException(e) - def match_func(cpath): - rpath = cpath.replace(old_dir, new_dir) - return rpath + dll = macholib.MachO.MachO(cur_path) + + changedict = paths_to_paths + + def changefunc(path): + npath = changedict.get(path, None) + return npath + + dll.rewriteLoadCommands(changefunc) - dll = MachO(cur_path) - dll.rewriteLoadCommands(match_func) try: f = open(dll.filename, 'rb+') for header in dll.headers: @@ -320,14 +329,32 @@ def match_func(cpath): return -def strings_contains_installroot(path_name, root_dir): +def macholib_get_paths(cur_path): """ - Check if the file contain the install root string. + Get rpaths, dependencies and id of mach-o objects + using python macholib package """ - strings = Executable('strings') - output = strings('%s' % path_name, - output=str, err=str) - return (root_dir in output or spack.paths.prefix in output) + dll = macholib.MachO.MachO(cur_path) + + ident = None + rpaths = list() + deps = list() + for header in dll.headers: + rpaths = [data.rstrip(b'\0').decode('utf-8') + for load_command, dylib_command, data in header.commands if + load_command.cmd == macholib.mach_o.LC_RPATH] + deps = [data.rstrip(b'\0').decode('utf-8') + for load_command, dylib_command, data in header.commands if + load_command.cmd == macholib.mach_o.LC_LOAD_DYLIB] + idents = [data.rstrip(b'\0').decode('utf-8') + for load_command, dylib_command, data in header.commands if + load_command.cmd == macholib.mach_o.LC_ID_DYLIB] + if len(idents) == 1: + ident = idents[0] + tty.debug('ident: %s' % ident) + tty.debug('deps: %s' % deps) + tty.debug('rpaths: %s' % rpaths) + return (rpaths, deps, ident) def modify_elf_object(path_name, new_rpaths): @@ -338,21 +365,23 @@ def modify_elf_object(path_name, new_rpaths): new_joined = ':'.join(new_rpaths) # if we're relocating patchelf itself, use it + bak_path = path_name + ".bak" if path_name[-13:] == "/bin/patchelf": - bak_path = path_name + ".bak" shutil.copy(path_name, bak_path) - patchelf = Executable(bak_path) + patchelf = executable.Executable(bak_path) else: - patchelf = Executable(get_patchelf()) + patchelf = executable.Executable(_patchelf()) try: patchelf('--force-rpath', '--set-rpath', '%s' % new_joined, '%s' % path_name, output=str, error=str) - except ProcessError as e: - tty.die('patchelf --set-rpath %s failed' % - path_name, e) - pass + except executable.ProcessError as e: + msg = 'patchelf --force-rpath --set-rpath %s failed with error %s' % ( + path_name, e) + tty.warn(msg) + if os.path.exists(bak_path): + os.remove(bak_path) def needs_binary_relocation(m_type, m_subtype): @@ -419,12 +448,12 @@ def replace(match): data = f.read() f.seek(0) original_data_len = len(data) - pat = re.compile(old_dir.encode('utf-8') + b'([^\0]*?)\0') + pat = re.compile(old_dir.encode('utf-8')) if not pat.search(data): return ndata = pat.sub(replace, data) if not len(ndata) == original_data_len: - raise BinaryStringReplacementException( + raise BinaryStringReplacementError( path_name, original_data_len, len(ndata)) f.write(ndata) f.truncate() @@ -447,95 +476,148 @@ def replace(match): return data return match.group().replace(old_dir.encode('utf-8'), new_dir.encode('utf-8')) + b'\0' * padding + + if len(new_dir) > len(old_dir): + raise BinaryTextReplaceError(old_dir, new_dir) + with open(path_name, 'rb+') as f: data = f.read() f.seek(0) original_data_len = len(data) - pat = re.compile(old_dir.encode('utf-8') + b'([^\0]*?)\0') + pat = re.compile(re.escape(old_dir).encode('utf-8') + b'([^\0]*?)\0') if not pat.search(data): return ndata = pat.sub(replace, data) if not len(ndata) == original_data_len: - raise BinaryStringReplacementException( + raise BinaryStringReplacementError( path_name, original_data_len, len(ndata)) f.write(ndata) f.truncate() -def relocate_macho_binaries(path_names, old_dir, new_dir, allow_root): +def relocate_macho_binaries(path_names, old_layout_root, new_layout_root, + prefix_to_prefix, rel, old_prefix, new_prefix): """ - Change old_dir to new_dir in LC_RPATH of mach-o files (on macOS) - Change old_dir to new_dir in LC_ID and LC_DEP of mach-o files - Account for the case where old_dir is now a placeholder + Use macholib python package to get the rpaths, depedent libraries + and library identity for libraries from the MachO object. Modify them + with the replacement paths queried from the dictionary mapping old layout + prefixes to hashes and the dictionary mapping hashes to the new layout + prefixes. """ - placeholder = set_placeholder(old_dir) + for path_name in path_names: + # Corner case where macho object file ended up in the path name list if path_name.endswith('.o'): continue - if new_dir == old_dir: - continue - if platform.system().lower() == 'darwin': - rpaths, deps, idpath = macho_get_paths(path_name) - # one pass to replace placeholder - (n_rpaths, - n_deps, - n_idpath) = macho_replace_paths(placeholder, - new_dir, - rpaths, - deps, - idpath) - # another pass to replace old_dir - (new_rpaths, - new_deps, - new_idpath) = macho_replace_paths(old_dir, - new_dir, - n_rpaths, - n_deps, - n_idpath) - modify_macho_object(path_name, - rpaths, deps, idpath, - new_rpaths, new_deps, new_idpath) + if rel: + # get the relativized paths + rpaths, deps, idpath = macholib_get_paths(path_name) + # get the file path name in the original prefix + orig_path_name = re.sub(re.escape(new_prefix), old_prefix, + path_name) + # get the mapping of the relativized paths to the original + # normalized paths + rel_to_orig = macho_make_paths_normal(orig_path_name, + rpaths, deps, + idpath) + # replace the relativized paths with normalized paths + if platform.system().lower() == 'darwin': + modify_macho_object(path_name, rpaths, deps, + idpath, rel_to_orig) + else: + modify_object_macholib(path_name, + rel_to_orig) + # get the normalized paths in the mach-o binary + rpaths, deps, idpath = macholib_get_paths(path_name) + # get the mapping of paths in old prefix to path in new prefix + paths_to_paths = macho_find_paths(rpaths, deps, idpath, + old_layout_root, + prefix_to_prefix) + # replace the old paths with new paths + if platform.system().lower() == 'darwin': + modify_macho_object(path_name, rpaths, deps, + idpath, paths_to_paths) + else: + modify_object_macholib(path_name, + paths_to_paths) + # get the new normalized path in the mach-o binary + rpaths, deps, idpath = macholib_get_paths(path_name) + # get the mapping of paths to relative paths in the new prefix + paths_to_paths = macho_make_paths_relative(path_name, + new_layout_root, + rpaths, deps, idpath) + # replace the new paths with relativized paths in the new prefix + if platform.system().lower() == 'darwin': + modify_macho_object(path_name, rpaths, deps, + idpath, paths_to_paths) + else: + modify_object_macholib(path_name, + paths_to_paths) else: - modify_object_macholib(path_name, placeholder, new_dir) - modify_object_macholib(path_name, old_dir, new_dir) - if len(new_dir) <= len(old_dir): - replace_prefix_nullterm(path_name, old_dir, new_dir) - else: - tty.warn('Cannot do a binary string replacement' - ' with padding for %s' - ' because %s is longer than %s' % - (path_name, new_dir, old_dir)) + # get the paths in the old prefix + rpaths, deps, idpath = macholib_get_paths(path_name) + # get the mapping of paths in the old prerix to the new prefix + paths_to_paths = macho_find_paths(rpaths, deps, idpath, + old_layout_root, + prefix_to_prefix) + # replace the old paths with new paths + if platform.system().lower() == 'darwin': + modify_macho_object(path_name, rpaths, deps, + idpath, paths_to_paths) + else: + modify_object_macholib(path_name, + paths_to_paths) -def relocate_elf_binaries(path_names, old_dir, new_dir, allow_root): +def elf_find_paths(orig_rpaths, old_layout_root, prefix_to_prefix): + new_rpaths = list() + for orig_rpath in orig_rpaths: + if orig_rpath.startswith(old_layout_root): + for old_prefix, new_prefix in prefix_to_prefix.items(): + if orig_rpath.startswith(old_prefix): + new_rpaths.append(re.sub(re.escape(old_prefix), + new_prefix, orig_rpath)) + else: + new_rpaths.append(orig_rpath) + return new_rpaths + + +def relocate_elf_binaries(path_names, old_layout_root, new_layout_root, + prefix_to_prefix, rel, old_prefix, new_prefix): """ - Change old_dir to new_dir in RPATHs of elf binaries - Account for the case where old_dir is now a placeholder + Use patchelf to get the original rpaths and then replace them with + rpaths in the new directory layout. + New rpaths are determined from a dictionary mapping the prefixes in the + old directory layout to the prefixes in the new directory layout if the + rpath was in the old layout root, i.e. system paths are not replaced. """ - placeholder = set_placeholder(old_dir) for path_name in path_names: orig_rpaths = get_existing_elf_rpaths(path_name) - if orig_rpaths: - # one pass to replace placeholder - n_rpaths = substitute_rpath(orig_rpaths, - placeholder, new_dir) - # one pass to replace old_dir - new_rpaths = substitute_rpath(n_rpaths, - old_dir, new_dir) + new_rpaths = list() + if rel: + # get the file path in the old_prefix + orig_path_name = re.sub(re.escape(new_prefix), old_prefix, + path_name) + # get the normalized rpaths in the old prefix using the file path + # in the orig prefix + orig_norm_rpaths = get_normalized_elf_rpaths(orig_path_name, + orig_rpaths) + # get the normalize rpaths in the new prefix + norm_rpaths = elf_find_paths(orig_norm_rpaths, old_layout_root, + prefix_to_prefix) + # get the relativized rpaths in the new prefix + new_rpaths = get_relative_elf_rpaths(path_name, new_layout_root, + norm_rpaths) + modify_elf_object(path_name, new_rpaths) + else: + new_rpaths = elf_find_paths(orig_rpaths, old_layout_root, + prefix_to_prefix) modify_elf_object(path_name, new_rpaths) - if not new_dir == old_dir: - if len(new_dir) <= len(old_dir): - replace_prefix_bin(path_name, old_dir, new_dir) - else: - tty.warn('Cannot do a binary string replacement' - ' with padding for %s' - ' because %s is longer than %s.' % - (path_name, new_dir, old_dir)) def make_link_relative(cur_path_names, orig_path_names): """ - Change absolute links to be relative. + Change absolute links to relative links. """ for cur_path, orig_path in zip(cur_path_names, orig_path_names): target = os.readlink(orig_path) @@ -545,8 +627,8 @@ def make_link_relative(cur_path_names, orig_path_names): os.symlink(relative_target, cur_path) -def make_macho_binaries_relative(cur_path_names, orig_path_names, old_dir, - allow_root): +def make_macho_binaries_relative(cur_path_names, orig_path_names, + old_layout_root): """ Replace old RPATHs with paths relative to old_dir in binary files """ @@ -555,33 +637,26 @@ def make_macho_binaries_relative(cur_path_names, orig_path_names, old_dir, deps = set() idpath = None if platform.system().lower() == 'darwin': - (rpaths, deps, idpath) = macho_get_paths(cur_path) - (new_rpaths, - new_deps, - new_idpath) = macho_make_paths_relative(orig_path, old_dir, - rpaths, deps, idpath) + (rpaths, deps, idpath) = macholib_get_paths(cur_path) + paths_to_paths = macho_make_paths_relative(orig_path, + old_layout_root, + rpaths, deps, idpath) modify_macho_object(cur_path, rpaths, deps, idpath, - new_rpaths, new_deps, new_idpath) - if (not allow_root and - not file_is_relocatable(cur_path)): - raise InstallRootStringException(cur_path, old_dir) + paths_to_paths) -def make_elf_binaries_relative(cur_path_names, orig_path_names, old_dir, - allow_root): +def make_elf_binaries_relative(cur_path_names, orig_path_names, + old_layout_root): """ Replace old RPATHs with paths relative to old_dir in binary files """ for cur_path, orig_path in zip(cur_path_names, orig_path_names): orig_rpaths = get_existing_elf_rpaths(cur_path) if orig_rpaths: - new_rpaths = get_relative_rpaths(orig_path, old_dir, - orig_rpaths) + new_rpaths = get_relative_elf_rpaths(orig_path, old_layout_root, + orig_rpaths) modify_elf_object(cur_path, new_rpaths) - if (not allow_root and - not file_is_relocatable(cur_path)): - raise InstallRootStringException(cur_path, old_dir) def check_files_relocatable(cur_path_names, allow_root): @@ -591,67 +666,77 @@ def check_files_relocatable(cur_path_names, allow_root): for cur_path in cur_path_names: if (not allow_root and not file_is_relocatable(cur_path)): - raise InstallRootStringException( + raise InstallRootStringError( cur_path, spack.store.layout.root) -def make_link_placeholder(cur_path_names, cur_dir, old_dir): +def relocate_links(linknames, old_layout_root, new_layout_root, + old_install_prefix, new_install_prefix, prefix_to_prefix): """ - Replace old install path with placeholder in absolute links. - - Links in ``cur_path_names`` must link to absolute paths. + The symbolic links in filenames are absolute links or placeholder links. + The old link target is read and the placeholder is replaced by the old + layout root. If the old link target is in the old install prefix, the new + link target is create by replacing the old install prefix with the new + install prefix. """ - for cur_path in cur_path_names: - placeholder = set_placeholder(spack.store.layout.root) - placeholder_prefix = old_dir.replace(spack.store.layout.root, - placeholder) - cur_src = os.readlink(cur_path) - rel_src = os.path.relpath(cur_src, cur_dir) - new_src = os.path.join(placeholder_prefix, rel_src) - - os.unlink(cur_path) - os.symlink(new_src, cur_path) + placeholder = set_placeholder(old_layout_root) + link_names = [os.path.join(new_install_prefix, linkname) + for linkname in linknames] + for link_name in link_names: + link_target = os.readlink(link_name) + link_target = re.sub(placeholder, old_layout_root, link_target) + if link_target.startswith(old_install_prefix): + new_link_target = re.sub( + old_install_prefix, new_install_prefix, link_target) + os.unlink(link_name) + os.symlink(new_link_target, link_name) + if (os.path.isabs(link_target) and + not link_target.startswith(new_install_prefix)): + msg = 'Link target %s' % link_target + msg += ' for symbolic link %s is outside' % link_name + msg += ' of the newinstall prefix %s.\n' % new_install_prefix + tty.warn(msg) -def relocate_links(path_names, old_dir, new_dir): +def relocate_text(path_names, old_layout_root, new_layout_root, + old_install_prefix, new_install_prefix, + old_spack_prefix, new_spack_prefix, + prefix_to_prefix): """ - Replace old path with new path in link sources. - - Links in ``path_names`` must link to absolute paths or placeholders. + Replace old paths with new paths in text files + including the path the the spack sbang script """ - placeholder = set_placeholder(old_dir) + sbangre = '#!/bin/bash %s/bin/sbang' % old_spack_prefix + sbangnew = '#!/bin/bash %s/bin/sbang' % new_spack_prefix + for path_name in path_names: - old_src = os.readlink(path_name) - # replace either placeholder or old_dir - new_src = old_src.replace(placeholder, new_dir, 1) - new_src = new_src.replace(old_dir, new_dir, 1) - - os.unlink(path_name) - os.symlink(new_src, path_name) - - -def relocate_text(path_names, oldpath, newpath, oldprefix, newprefix): - """ - Replace old path with new path in text files - including the path the the spack sbang script. - """ - sbangre = '#!/bin/bash %s/bin/sbang' % oldprefix - sbangnew = '#!/bin/bash %s/bin/sbang' % newprefix - for path_name in path_names: - replace_prefix_text(path_name, oldpath, newpath) + replace_prefix_text(path_name, old_install_prefix, new_install_prefix) + for orig_dep_prefix, new_dep_prefix in prefix_to_prefix.items(): + replace_prefix_text(path_name, orig_dep_prefix, new_dep_prefix) + replace_prefix_text(path_name, old_layout_root, new_layout_root) replace_prefix_text(path_name, sbangre, sbangnew) - replace_prefix_text(path_name, oldprefix, newprefix) -def substitute_rpath(orig_rpath, topdir, new_root_path): +def relocate_text_bin(path_names, old_layout_root, new_layout_root, + old_install_prefix, new_install_prefix, + old_spack_prefix, new_spack_prefix, + prefix_to_prefix): """ - Replace topdir with new_root_path RPATH list orig_rpath - """ - new_rpaths = [] - for path in orig_rpath: - new_rpath = path.replace(topdir, new_root_path) - new_rpaths.append(new_rpath) - return new_rpaths + Replace null terminated path strings hard coded into binaries. + Raise an exception when the new path in longer than the old path + because this breaks the binary. + """ + if len(new_install_prefix) <= len(old_install_prefix): + for path_name in path_names: + for old_dep_prefix, new_dep_prefix in prefix_to_prefix.items(): + if len(new_dep_prefix) <= len(old_dep_prefix): + replace_prefix_bin( + path_name, old_dep_prefix, new_dep_prefix) + replace_prefix_bin(path_name, old_spack_prefix, new_spack_prefix) + else: + if len(path_names) > 0: + raise BinaryTextReplaceError( + old_install_prefix, new_install_prefix) def is_relocatable(spec): @@ -714,7 +799,7 @@ def file_is_relocatable(file, paths_to_relocate=None): if not os.path.isabs(file): raise ValueError('{0} is not an absolute path'.format(file)) - strings = Executable('strings') + strings = executable.Executable('strings') # Remove the RPATHS from the strings in the executable set_of_strings = set(strings(file, output=str).split()) @@ -729,7 +814,7 @@ def file_is_relocatable(file, paths_to_relocate=None): set_of_strings.discard(rpaths) if platform.system().lower() == 'darwin': if m_subtype == 'x-mach-binary': - rpaths, deps, idpath = macho_get_paths(file) + rpaths, deps, idpath = macholib_get_paths(file) set_of_strings.discard(set(rpaths)) set_of_strings.discard(set(deps)) if idpath is not None: @@ -776,9 +861,11 @@ def mime_type(file): Returns: Tuple containing the MIME type and subtype """ - file_cmd = Executable('file') + file_cmd = executable.Executable('file') output = file_cmd('-b', '-h', '--mime-type', file, output=str, error=str) tty.debug('[MIME_TYPE] {0} -> {1}'.format(file, output.strip())) + # In corner cases the output does not contain a subtype prefixed with a / + # In those cases add the / so the tuple can be formed. if '/' not in output: output += '/' split_by_slash = output.strip().split('/') diff --git a/lib/spack/spack/repo.py b/lib/spack/spack/repo.py index 8e3dae5d474..4a739189fc2 100644 --- a/lib/spack/spack/repo.py +++ b/lib/spack/spack/repo.py @@ -16,22 +16,20 @@ import stat import sys import traceback - -from six import string_types, add_metaclass +import types try: from collections.abc import Mapping # novm except ImportError: from collections import Mapping -from types import ModuleType +import six import ruamel.yaml as yaml import llnl.util.lang import llnl.util.tty as tty -from llnl.util.filesystem import mkdirp, install - +import llnl.util.filesystem as fs import spack.config import spack.caches import spack.error @@ -39,11 +37,9 @@ import spack.spec import spack.util.spack_json as sjson import spack.util.imp as simp -from spack.provider_index import ProviderIndex -from spack.util.path import canonicalize_path -from spack.util.naming import NamespaceTrie, valid_module_name -from spack.util.naming import mod_to_class, possible_spack_module_names - +import spack.provider_index +import spack.util.path +import spack.util.naming as nm #: Super-namespace for all packages. #: Package modules are imported as spack.pkg... @@ -95,7 +91,7 @@ def converter(self, spec_like, *args, **kwargs): return converter -class SpackNamespace(ModuleType): +class SpackNamespace(types.ModuleType): """ Allow lazy loading of modules.""" def __init__(self, namespace): @@ -151,7 +147,7 @@ def _create_new_cache(self): pkg_dir = os.path.join(self.packages_path, pkg_name) # Warn about invalid names that look like packages. - if not valid_module_name(pkg_name): + if not nm.valid_module_name(pkg_name): if not pkg_name.startswith('.'): tty.warn('Skipping package at {0}. "{1}" is not ' 'a valid Spack module name.'.format( @@ -247,7 +243,7 @@ def update_package(self, pkg_name): self._tag_dict[tag].append(package.name) -@add_metaclass(abc.ABCMeta) +@six.add_metaclass(abc.ABCMeta) class Indexer(object): """Adaptor for indexes that need to be generated when repos are updated.""" @@ -305,10 +301,10 @@ def write(self, stream): class ProviderIndexer(Indexer): """Lifecycle methods for virtual package providers.""" def _create(self): - return ProviderIndex() + return spack.provider_index.ProviderIndex() def read(self, stream): - self.index = ProviderIndex.from_json(stream) + self.index = spack.provider_index.ProviderIndex.from_json(stream) def update(self, pkg_fullname): self.index.remove_provider(pkg_fullname) @@ -447,7 +443,7 @@ class RepoPath(object): def __init__(self, *repos): self.repos = [] - self.by_namespace = NamespaceTrie() + self.by_namespace = nm.NamespaceTrie() self._all_package_names = None self._provider_index = None @@ -456,7 +452,7 @@ def __init__(self, *repos): # Add each repo to this path. for repo in repos: try: - if isinstance(repo, string_types): + if isinstance(repo, six.string_types): repo = Repo(repo) self.put_last(repo) except RepoError as e: @@ -544,7 +540,7 @@ def all_packages(self): def provider_index(self): """Merged ProviderIndex from all Repos in the RepoPath.""" if self._provider_index is None: - self._provider_index = ProviderIndex() + self._provider_index = spack.provider_index.ProviderIndex() for repo in reversed(self.repos): self._provider_index.merge(repo.provider_index) @@ -707,7 +703,7 @@ def __init__(self, root): """ # Root directory, containing _repo.yaml and package dirs # Allow roots to by spack-relative by starting with '$spack' - self.root = canonicalize_path(root) + self.root = spack.util.path.canonicalize_path(root) # check and raise BadRepoError on fail. def check(condition, msg): @@ -803,7 +799,7 @@ def real_name(self, import_name): if import_name in self: return import_name - options = possible_spack_module_names(import_name) + options = nm.possible_spack_module_names(import_name) options.remove(import_name) for name in options: if name in self: @@ -921,18 +917,18 @@ def dump_provenance(self, spec, path): % (self.namespace, spec.fullname)) # Install patch files needed by the package. - mkdirp(path) + fs.mkdirp(path) for patch in itertools.chain.from_iterable( spec.package.patches.values()): if patch.path: if os.path.exists(patch.path): - install(patch.path, path) + fs.install(patch.path, path) else: tty.warn("Patch file did not exist: %s" % patch.path) # Install the package.py file itself. - install(self.filename_for_package_name(spec.name), path) + fs.install(self.filename_for_package_name(spec.name), path) def purge(self): """Clear entire package instance cache.""" @@ -1082,7 +1078,7 @@ def get_pkg_class(self, pkg_name): raise InvalidNamespaceError('Invalid namespace for %s repo: %s' % (self.namespace, namespace)) - class_name = mod_to_class(pkg_name) + class_name = nm.mod_to_class(pkg_name) module = self._get_pkg_module(pkg_name) cls = getattr(module, class_name) @@ -1107,7 +1103,7 @@ def create_repo(root, namespace=None): If the namespace is not provided, use basename of root. Return the canonicalized path and namespace of the created repository. """ - root = canonicalize_path(root) + root = spack.util.path.canonicalize_path(root) if not namespace: namespace = os.path.basename(root) @@ -1141,7 +1137,7 @@ def create_repo(root, namespace=None): config_path = os.path.join(root, repo_config_name) packages_path = os.path.join(root, packages_dir_name) - mkdirp(packages_path) + fs.mkdirp(packages_path) with open(config_path, 'w') as config: config.write("repo:\n") config.write(" namespace: '%s'\n" % namespace) @@ -1163,7 +1159,7 @@ def create_repo(root, namespace=None): def create_or_construct(path, namespace=None): """Create a repository, or just return a Repo if it already exists.""" if not os.path.exists(path): - mkdirp(path) + fs.mkdirp(path) create_repo(path, namespace) return Repo(path) diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index 718b5ef14db..c6fe2da7627 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -3120,7 +3120,7 @@ def copy(self, deps=True, **kwargs): A copy of this spec. Examples: - Deep copy with dependnecies:: + Deep copy with dependencies:: spec.copy() spec.copy(deps=True) diff --git a/lib/spack/spack/stage.py b/lib/spack/spack/stage.py index 6f5520b54ee..04f0c1d18c0 100644 --- a/lib/spack/spack/stage.py +++ b/lib/spack/spack/stage.py @@ -753,7 +753,8 @@ def purge(): def get_checksums_for_versions( - url_dict, name, first_stage_function=None, keep_stage=False): + url_dict, name, first_stage_function=None, keep_stage=False, + fetch_options=None): """Fetches and checksums archives from URLs. This function is called by both ``spack checksum`` and ``spack @@ -767,6 +768,8 @@ def get_checksums_for_versions( first_stage_function (callable): function that takes a Stage and a URL; this is run on the stage of the first URL downloaded keep_stage (bool): whether to keep staging area when command completes + fetch_options (dict): Options used for the fetcher (such as timeout + or cookies) Returns: (str): A multi-line string containing versions and corresponding hashes @@ -800,7 +803,12 @@ def get_checksums_for_versions( i = 0 for url, version in zip(urls, versions): try: - with Stage(url, keep=keep_stage) as stage: + if fetch_options: + url_or_fs = fs.URLFetchStrategy( + url, fetch_options=fetch_options) + else: + url_or_fs = url + with Stage(url_or_fs, keep=keep_stage) as stage: # Fetch the archive stage.fetch() if i == 0 and first_stage_function: diff --git a/lib/spack/spack/test/build_systems.py b/lib/spack/spack/test/build_systems.py index 744821a04e2..7ede78b7a51 100644 --- a/lib/spack/spack/test/build_systems.py +++ b/lib/spack/spack/test/build_systems.py @@ -181,3 +181,41 @@ def test_none_is_allowed(self): assert '--without-bar' in options assert '--without-baz' in options assert '--no-fee' in options + + +@pytest.mark.usefixtures('config', 'mock_packages') +class TestCMakePackage(object): + + def test_define(self): + s = Spec('cmake-client') + s.concretize() + pkg = spack.repo.get(s) + + for cls in (list, tuple): + arg = pkg.define('MULTI', cls(['right', 'up'])) + assert arg == '-DMULTI:STRING=right;up' + + arg = pkg.define('ENABLE_TRUTH', False) + assert arg == '-DENABLE_TRUTH:BOOL=OFF' + arg = pkg.define('ENABLE_TRUTH', True) + assert arg == '-DENABLE_TRUTH:BOOL=ON' + + arg = pkg.define('SINGLE', 'red') + assert arg == '-DSINGLE:STRING=red' + + def test_define_from_variant(self): + s = Spec('cmake-client multi=up,right ~truthy single=red') + s.concretize() + pkg = spack.repo.get(s) + + arg = pkg.define_from_variant('MULTI') + assert arg == '-DMULTI:STRING=right;up' + + arg = pkg.define_from_variant('ENABLE_TRUTH', 'truthy') + assert arg == '-DENABLE_TRUTH:BOOL=OFF' + + arg = pkg.define_from_variant('SINGLE') + assert arg == '-DSINGLE:STRING=red' + + with pytest.raises(KeyError, match="not a variant"): + pkg.define_from_variant('NONEXISTENT') diff --git a/lib/spack/spack/test/cmd/debug.py b/lib/spack/spack/test/cmd/debug.py index 4a06276abf0..2898ad670eb 100644 --- a/lib/spack/spack/test/cmd/debug.py +++ b/lib/spack/spack/test/cmd/debug.py @@ -3,12 +3,15 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) +import platform + import pytest import os import os.path -from spack.main import SpackCommand +import spack.architecture as architecture +from spack.main import SpackCommand, get_version from spack.util.executable import which debug = SpackCommand('debug') @@ -41,3 +44,12 @@ def test_create_db_tarball(tmpdir, database): spec_suffix = '%s/.spack/spec.yaml' % spec.dag_hash() assert spec_suffix in contents + + +def test_report(): + out = debug('report') + arch = architecture.Arch(architecture.platform(), 'frontend', 'frontend') + + assert get_version() in out + assert platform.python_version() in out + assert str(arch) in out diff --git a/lib/spack/spack/test/cmd/dependencies.py b/lib/spack/spack/test/cmd/dependencies.py index fc470691814..05d05569369 100644 --- a/lib/spack/spack/test/cmd/dependencies.py +++ b/lib/spack/spack/test/cmd/dependencies.py @@ -17,7 +17,7 @@ mpi_deps = ['fake'] -def test_immediate_dependencies(mock_packages): +def test_direct_dependencies(mock_packages): out = dependencies('mpileaks') actual = set(re.split(r'\s+', out.strip())) expected = set(['callpath'] + mpis) @@ -47,7 +47,7 @@ def test_transitive_dependencies_with_deptypes(mock_packages): @pytest.mark.db -def test_immediate_installed_dependencies(mock_packages, database): +def test_direct_installed_dependencies(mock_packages, database): with color_when(False): out = dependencies('--installed', 'mpileaks^mpich') diff --git a/lib/spack/spack/test/cmd/env.py b/lib/spack/spack/test/cmd/env.py index 841e6e20c8a..4f3abb4438d 100644 --- a/lib/spack/spack/test/cmd/env.py +++ b/lib/spack/spack/test/cmd/env.py @@ -370,6 +370,54 @@ def test_init_from_yaml(tmpdir): assert not e2.specs_by_hash +@pytest.mark.usefixtures('config') +def test_env_view_external_prefix(tmpdir_factory, mutable_database, + mock_packages): + fake_prefix = tmpdir_factory.mktemp('a-prefix') + fake_bin = fake_prefix.join('bin') + fake_bin.ensure(dir=True) + + initial_yaml = StringIO("""\ +env: + specs: + - a + view: true +""") + + external_config = StringIO("""\ +packages: + a: + paths: + a: {a_prefix} + buildable: false +""".format(a_prefix=str(fake_prefix))) + external_config_dict = spack.util.spack_yaml.load_config(external_config) + + test_scope = spack.config.InternalConfigScope( + 'env-external-test', data=external_config_dict) + with spack.config.override(test_scope): + + e = ev.create('test', initial_yaml) + e.concretize() + # Note: normally installing specs in a test environment requires doing + # a fake install, but not for external specs since no actions are + # taken to install them. The installation commands also include + # post-installation functions like DB-registration, so are important + # to do (otherwise the package is not considered installed). + e.install_all() + e.write() + + env_modifications = e.add_default_view_to_shell('sh') + individual_modifications = env_modifications.split('\n') + + def path_includes_fake_prefix(cmd): + return 'export PATH' in cmd and str(fake_bin) in cmd + + assert any( + path_includes_fake_prefix(cmd) for cmd in individual_modifications + ) + + def test_init_with_file_and_remove(tmpdir): """Ensure a user can remove from any position in the spack.yaml file.""" path = tmpdir.join('spack.yaml') diff --git a/lib/spack/spack/test/cmd/load.py b/lib/spack/spack/test/cmd/load.py index a10b99d45b4..e6664a9d39b 100644 --- a/lib/spack/spack/test/cmd/load.py +++ b/lib/spack/spack/test/cmd/load.py @@ -3,7 +3,8 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) import os -from spack.main import SpackCommand +import pytest +from spack.main import SpackCommand, SpackCommandError import spack.spec import spack.user_environment as uenv @@ -83,6 +84,18 @@ def test_load_includes_run_env(install_mockery, mock_fetch, mock_archive, assert 'setenv FOOBAR mpileaks' in csh_out +def test_load_first(install_mockery, mock_fetch, mock_archive, mock_packages): + """Test with and without the --first option""" + install('libelf@0.8.12') + install('libelf@0.8.13') + # Now there are two versions of libelf + with pytest.raises(SpackCommandError): + # This should cause an error due to multiple versions + load('--sh', 'libelf') + # Using --first should avoid the error condition + load('--sh', '--first', 'libelf') + + def test_load_fails_no_shell(install_mockery, mock_fetch, mock_archive, mock_packages): """Test that spack load prints an error message without a shell.""" diff --git a/lib/spack/spack/test/cmd/python.py b/lib/spack/spack/test/cmd/python.py index 5bc05e0127e..b1c9d3db00e 100644 --- a/lib/spack/spack/test/cmd/python.py +++ b/lib/spack/spack/test/cmd/python.py @@ -3,6 +3,8 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) +import platform + import pytest import spack @@ -16,6 +18,11 @@ def test_python(): assert out.strip() == spack.spack_version +def test_python_version(): + out = python('-V') + assert platform.python_version() in out + + def test_python_with_module(): # pytest rewrites a lot of modules, which interferes with runpy, so # it's hard to test this. Trying to import a module like sys, that diff --git a/lib/spack/spack/test/cmd/repo.py b/lib/spack/spack/test/cmd/repo.py new file mode 100644 index 00000000000..82fe872710e --- /dev/null +++ b/lib/spack/spack/test/cmd/repo.py @@ -0,0 +1,35 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +import os.path + +import pytest +import spack.main + +repo = spack.main.SpackCommand('repo') + + +def test_help_option(): + # Test 'spack repo --help' to check basic import works + # and the command exits successfully + with pytest.raises(SystemExit): + repo('--help') + assert repo.returncode in (None, 0) + + +def test_create_add_list_remove(mutable_config, tmpdir): + # Create a new repository and check that the expected + # files are there + repo('create', str(tmpdir), 'mockrepo') + assert os.path.exists(os.path.join(str(tmpdir), 'repo.yaml')) + + # Add the new repository and check it appears in the list output + repo('add', '--scope=site', str(tmpdir)) + output = repo('list', '--scope=site', output=str) + assert 'mockrepo' in output + + # Then remove it and check it's not there + repo('remove', '--scope=site', str(tmpdir)) + output = repo('list', '--scope=site', output=str) + assert 'mockrepo' not in output diff --git a/lib/spack/spack/test/concretize_preferences.py b/lib/spack/spack/test/concretize_preferences.py index 81b5360869c..922d5a11d87 100644 --- a/lib/spack/spack/test/concretize_preferences.py +++ b/lib/spack/spack/test/concretize_preferences.py @@ -185,34 +185,6 @@ def test_develop(self): spec.concretize() assert spec.version == Version('0.2.15.develop') - def test_no_virtuals_in_packages_yaml(self): - """Verify that virtuals are not allowed in packages.yaml.""" - - # set up a packages.yaml file with a vdep as a key. We use - # syaml.load_config here to make sure source lines in the config are - # attached to parsed strings, as the error message uses them. - conf = syaml.load_config("""\ -mpi: - paths: - mpi-with-lapack@2.1: /path/to/lapack -""") - spack.config.set('packages', conf, scope='concretize') - - # now when we get the packages.yaml config, there should be an error - with pytest.raises(spack.package_prefs.VirtualInPackagesYAMLError): - spack.package_prefs.get_packages_config() - - def test_all_is_not_a_virtual(self): - """Verify that `all` is allowed in packages.yaml.""" - conf = syaml.load_config("""\ -all: - variants: [+mpi] -""") - spack.config.set('packages', conf, scope='concretize') - - # should be no error for 'all': - spack.package_prefs.get_packages_config() - def test_external_mpi(self): # make sure this doesn't give us an external first. spec = Spec('mpi') @@ -236,6 +208,37 @@ def test_external_mpi(self): spec.concretize() assert spec['mpich'].external_path == '/dummy/path' + def test_external_module(self, monkeypatch): + """Test that packages can find externals specified by module + + The specific code for parsing the module is tested elsewhere. + This just tests that the preference is accounted for""" + # make sure this doesn't give us an external first. + def mock_module(cmd, module): + return 'prepend-path PATH /dummy/path' + monkeypatch.setattr(spack.util.module_cmd, 'module', mock_module) + + spec = Spec('mpi') + spec.concretize() + assert not spec['mpi'].external + + # load config + conf = syaml.load_config("""\ +all: + providers: + mpi: [mpich] +mpi: + buildable: false + modules: + mpich@3.0.4: dummy +""") + spack.config.set('packages', conf, scope='concretize') + + # ensure that once config is in place, external is used + spec = Spec('mpi') + spec.concretize() + assert spec['mpich'].external_path == '/dummy/path' + def test_config_permissions_from_all(self, configure_permissions): # Although these aren't strictly about concretization, they are # configured in the same file and therefore convenient to test here. diff --git a/lib/spack/spack/test/conftest.py b/lib/spack/spack/test/conftest.py index 04e870d336a..8912c0219b7 100644 --- a/lib/spack/spack/test/conftest.py +++ b/lib/spack/spack/test/conftest.py @@ -301,8 +301,17 @@ def use_configuration(config): """Context manager to swap out the global Spack configuration.""" saved = spack.config.config spack.config.config = config + + # Avoid using real spack configuration that has been cached by other + # tests, and avoid polluting the cache with spack test configuration + # (including modified configuration) + saved_compiler_cache = spack.compilers._cache_config_file + spack.compilers._cache_config_file = [] + yield + spack.config.config = saved + spack.compilers._cache_config_file = saved_compiler_cache @contextlib.contextmanager @@ -427,10 +436,6 @@ def mutable_config(tmpdir_factory, configuration_dir, monkeypatch): *[spack.config.ConfigScope(name, str(mutable_dir)) for name in ['site', 'system', 'user']]) - # This is essential, otherwise the cache will create weird side effects - # that will compromise subsequent tests if compilers.yaml is modified - monkeypatch.setattr(spack.compilers, '_cache_config_file', []) - with use_configuration(cfg): yield cfg @@ -525,6 +530,8 @@ def database(mock_store, mock_packages, config): """This activates the mock store, packages, AND config.""" with use_store(mock_store): yield mock_store.db + # Force reading the database again between tests + mock_store.db.last_seen_verifier = '' @pytest.fixture(scope='function') @@ -1033,6 +1040,13 @@ def __init__(self, name, dependencies, dependency_types, conditions=None, self.conflicts = {} self.patches = {} + def provides(self, vname): + return vname in self.provided + + @property + def virtuals_provided(self): + return [v.name for v, c in self.provided] + class MockPackageMultiRepo(object): def __init__(self, packages): diff --git a/lib/spack/spack/test/data/sourceme_lmod.sh b/lib/spack/spack/test/data/sourceme_lmod.sh new file mode 100644 index 00000000000..b71e338ec98 --- /dev/null +++ b/lib/spack/spack/test/data/sourceme_lmod.sh @@ -0,0 +1,10 @@ +#!/usr/bin/env bash +# +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +export LMOD_VARIABLE=foo +export LMOD_ANOTHER_VARIABLE=bar +export NEW_VAR=new diff --git a/lib/spack/spack/test/database.py b/lib/spack/spack/test/database.py index a25edd83fee..ebd4a960f31 100644 --- a/lib/spack/spack/test/database.py +++ b/lib/spack/spack/test/database.py @@ -14,6 +14,12 @@ import pytest import json import shutil +try: + import uuid + _use_uuid = True +except ImportError: + _use_uuid = False + pass import llnl.util.lock as lk from llnl.util.tty.colify import colify @@ -483,6 +489,21 @@ def test_015_write_and_read(mutable_database): assert new_rec.installed == rec.installed +def test_017_write_and_read_without_uuid(mutable_database, monkeypatch): + monkeypatch.setattr(spack.database, '_use_uuid', False) + # write and read DB + with spack.store.db.write_transaction(): + specs = spack.store.db.query() + recs = [spack.store.db.get_record(s) for s in specs] + + for spec, rec in zip(specs, recs): + new_rec = spack.store.db.get_record(spec) + assert new_rec.ref_count == rec.ref_count + assert new_rec.spec == rec.spec + assert new_rec.path == rec.path + assert new_rec.installed == rec.installed + + def test_020_db_sanity(database): """Make sure query() returns what's actually in the db.""" _check_db_sanity(database) @@ -717,6 +738,9 @@ def test_old_external_entries_prefix(mutable_database): with open(spack.store.db._index_path, 'w') as f: f.write(json.dumps(db_obj)) + if _use_uuid: + with open(spack.store.db._verifier_path, 'w') as f: + f.write(str(uuid.uuid4())) record = spack.store.db.get_record(s) diff --git a/lib/spack/spack/test/environment_modifications.py b/lib/spack/spack/test/environment_modifications.py index 9983594a843..d1f5a4b791a 100644 --- a/lib/spack/spack/test/environment_modifications.py +++ b/lib/spack/spack/test/environment_modifications.py @@ -437,3 +437,14 @@ def test_from_environment_diff(before, after, search_list): for item in search_list: assert item in mod + + +@pytest.mark.regression('15775') +def test_blacklist_lmod_variables(): + # Construct the list of environment modifications + file = os.path.join(datadir, 'sourceme_lmod.sh') + env = EnvironmentModifications.from_sourcing_file(file) + + # Check that variables related to lmod are not in there + modifications = env.group_by_name() + assert not any(x.startswith('LMOD_') for x in modifications) diff --git a/lib/spack/spack/test/install.py b/lib/spack/spack/test/install.py index ff7f5a33556..2f779c6a5f3 100644 --- a/lib/spack/spack/test/install.py +++ b/lib/spack/spack/test/install.py @@ -7,7 +7,7 @@ import pytest import shutil -from llnl.util.filesystem import mkdirp, touch, working_dir +import llnl.util.filesystem as fs from spack.package import InstallError, PackageBase, PackageStillNeededError import spack.error @@ -380,11 +380,11 @@ def test_pkg_build_paths(install_mockery): # Backward compatibility checks log_dir = os.path.dirname(log_path) - mkdirp(log_dir) - with working_dir(log_dir): + fs.mkdirp(log_dir) + with fs.working_dir(log_dir): # Start with the older of the previous log filenames older_log = 'spack-build.out' - touch(older_log) + fs.touch(older_log) assert spec.package.log_path.endswith(older_log) # Now check the newer log filename @@ -416,11 +416,11 @@ def test_pkg_install_paths(install_mockery): # Backward compatibility checks log_dir = os.path.dirname(log_path) - mkdirp(log_dir) - with working_dir(log_dir): + fs.mkdirp(log_dir) + with fs.working_dir(log_dir): # Start with the older of the previous install log filenames older_log = 'build.out' - touch(older_log) + fs.touch(older_log) assert spec.package.install_log_path.endswith(older_log) # Now check the newer install log filename @@ -437,7 +437,8 @@ def test_pkg_install_paths(install_mockery): shutil.rmtree(log_dir) -def test_pkg_install_log(install_mockery): +def test_log_install_without_build_files(install_mockery): + """Test the installer log function when no build files are present.""" # Get a basic concrete spec for the trivial install package. spec = Spec('trivial-install-test-package').concretized() @@ -445,17 +446,40 @@ def test_pkg_install_log(install_mockery): with pytest.raises(IOError, match="No such file or directory"): spack.installer.log(spec.package) - # Set up mock build files and try again + +def test_log_install_with_build_files(install_mockery, monkeypatch): + """Test the installer's log function when have build files.""" + config_log = 'config.log' + + # Retain the original function for use in the monkey patch that is used + # to raise an exception under the desired condition for test coverage. + orig_install_fn = fs.install + + def _install(src, dest): + orig_install_fn(src, dest) + if src.endswith(config_log): + raise Exception('Mock log install error') + + monkeypatch.setattr(fs, 'install', _install) + + spec = Spec('trivial-install-test-package').concretized() + + # Set up mock build files and try again to include archive failure log_path = spec.package.log_path log_dir = os.path.dirname(log_path) - mkdirp(log_dir) - with working_dir(log_dir): - touch(log_path) - touch(spec.package.env_path) - touch(spec.package.configure_args_path) + fs.mkdirp(log_dir) + with fs.working_dir(log_dir): + fs.touch(log_path) + fs.touch(spec.package.env_path) + fs.touch(spec.package.configure_args_path) install_path = os.path.dirname(spec.package.install_log_path) - mkdirp(install_path) + fs.mkdirp(install_path) + + source = spec.package.stage.source_path + config = os.path.join(source, 'config.log') + fs.touchp(config) + spec.package.archive_files = ['missing', '..', config] spack.installer.log(spec.package) @@ -463,6 +487,21 @@ def test_pkg_install_log(install_mockery): assert os.path.exists(spec.package.install_env_path) assert os.path.exists(spec.package.install_configure_args_path) + archive_dir = os.path.join(install_path, 'archived-files') + source_dir = os.path.dirname(source) + rel_config = os.path.relpath(config, source_dir) + + assert os.path.exists(os.path.join(archive_dir, rel_config)) + assert not os.path.exists(os.path.join(archive_dir, 'missing')) + + expected_errs = [ + 'OUTSIDE SOURCE PATH', # for '..' + 'FAILED TO ARCHIVE' # for rel_config + ] + with open(os.path.join(archive_dir, 'errors.txt'), 'r') as fd: + for ln, expected in zip(fd, expected_errs): + assert expected in ln + # Cleanup shutil.rmtree(log_dir) diff --git a/lib/spack/spack/test/installer.py b/lib/spack/spack/test/installer.py index 8c3a232f19f..0be4bc78c02 100644 --- a/lib/spack/spack/test/installer.py +++ b/lib/spack/spack/test/installer.py @@ -4,17 +4,38 @@ # SPDX-License-Identifier: (Apache-2.0 OR MIT) import os +import py import pytest +import llnl.util.filesystem as fs import llnl.util.tty as tty +import llnl.util.lock as ulk import spack.binary_distribution import spack.compilers import spack.directory_layout as dl import spack.installer as inst -import spack.util.lock as lk +import spack.package_prefs as prefs import spack.repo import spack.spec +import spack.store +import spack.util.lock as lk + + +def _mock_repo(root, namespace): + """Create an empty repository at the specified root + + Args: + root (str): path to the mock repository root + namespace (str): mock repo's namespace + """ + repodir = py.path.local(root) if isinstance(root, str) else root + repodir.ensure(spack.repo.packages_dir_name, dir=True) + yaml = repodir.join('repo.yaml') + yaml.write(""" +repo: + namespace: {0} +""".format(namespace)) def _noop(*args, **kwargs): @@ -135,7 +156,7 @@ def test_process_external_package_module(install_mockery, monkeypatch, capfd): def test_process_binary_cache_tarball_none(install_mockery, monkeypatch, capfd): - """Tests to cover _process_binary_cache_tarball when no tarball.""" + """Tests of _process_binary_cache_tarball when no tarball.""" monkeypatch.setattr(spack.binary_distribution, 'download_tarball', _none) pkg = spack.repo.get('trivial-install-test-package') @@ -145,7 +166,7 @@ def test_process_binary_cache_tarball_none(install_mockery, monkeypatch, def test_process_binary_cache_tarball_tar(install_mockery, monkeypatch, capfd): - """Tests to cover _process_binary_cache_tarball with a tar file.""" + """Tests of _process_binary_cache_tarball with a tar file.""" def _spec(spec): return spec @@ -162,6 +183,25 @@ def _spec(spec): assert 'Installing a from binary cache' in capfd.readouterr()[0] +def test_try_install_from_binary_cache(install_mockery, mock_packages, + monkeypatch, capsys): + """Tests SystemExit path for_try_install_from_binary_cache.""" + def _spec(spec, force): + spec = spack.spec.Spec('mpi').concretized() + return {spec: None} + + spec = spack.spec.Spec('mpich') + spec.concretize() + + monkeypatch.setattr(spack.binary_distribution, 'get_spec', _spec) + + with pytest.raises(SystemExit): + inst._try_install_from_binary_cache(spec.package, False, False) + + captured = capsys.readouterr() + assert 'add a spack mirror to allow download' in str(captured) + + def test_installer_init_errors(install_mockery): """Test to ensure cover installer constructor errors.""" with pytest.raises(ValueError, match='must be a package'): @@ -172,17 +212,18 @@ def test_installer_init_errors(install_mockery): inst.PackageInstaller(pkg) -def test_installer_strings(install_mockery): - """Tests of installer repr and str for coverage purposes.""" +def test_installer_repr(install_mockery): spec, installer = create_installer('trivial-install-test-package') - # Cover __repr__ irep = installer.__repr__() assert irep.startswith(installer.__class__.__name__) assert "installed=" in irep assert "failed=" in irep - # Cover __str__ + +def test_installer_str(install_mockery): + spec, installer = create_installer('trivial-install-test-package') + istr = str(installer) assert "#tasks=0" in istr assert "installed (0)" in istr @@ -190,7 +231,6 @@ def test_installer_strings(install_mockery): def test_installer_last_phase_error(install_mockery, capsys): - """Test to cover last phase error.""" spec = spack.spec.Spec('trivial-install-test-package') spec.concretize() assert spec.concrete @@ -203,7 +243,6 @@ def test_installer_last_phase_error(install_mockery, capsys): def test_installer_ensure_ready_errors(install_mockery): - """Test to cover _ensure_ready errors.""" spec, installer = create_installer('trivial-install-test-package') fmt = r'cannot be installed locally.*{0}' @@ -230,24 +269,102 @@ def test_installer_ensure_ready_errors(install_mockery): installer._ensure_install_ready(spec.package) -def test_ensure_locked_have(install_mockery, tmpdir): - """Test to cover _ensure_locked when already have lock.""" +def test_ensure_locked_err(install_mockery, monkeypatch, tmpdir, capsys): + """Test _ensure_locked when a non-lock exception is raised.""" + mock_err_msg = 'Mock exception error' + + def _raise(lock, timeout): + raise RuntimeError(mock_err_msg) + + spec, installer = create_installer('trivial-install-test-package') + + monkeypatch.setattr(ulk.Lock, 'acquire_read', _raise) + with tmpdir.as_cwd(): + with pytest.raises(RuntimeError): + installer._ensure_locked('read', spec.package) + + out = str(capsys.readouterr()[1]) + assert 'Failed to acquire a read lock' in out + assert mock_err_msg in out + + +def test_ensure_locked_have(install_mockery, tmpdir, capsys): + """Test _ensure_locked when already have lock.""" spec, installer = create_installer('trivial-install-test-package') with tmpdir.as_cwd(): + # Test "downgrade" of a read lock (to a read lock) lock = lk.Lock('./test', default_timeout=1e-9, desc='test') lock_type = 'read' tpl = (lock_type, lock) installer.locks[installer.pkg_id] = tpl assert installer._ensure_locked(lock_type, spec.package) == tpl + # Test "upgrade" of a read lock without read count to a write + lock_type = 'write' + err = 'Cannot upgrade lock' + with pytest.raises(ulk.LockUpgradeError, match=err): + installer._ensure_locked(lock_type, spec.package) -def test_package_id(install_mockery): - """Test to cover package_id functionality.""" + out = str(capsys.readouterr()[1]) + assert 'Failed to upgrade to a write lock' in out + assert 'exception when releasing read lock' in out + + # Test "upgrade" of the read lock *with* read count to a write + lock._reads = 1 + tpl = (lock_type, lock) + assert installer._ensure_locked(lock_type, spec.package) == tpl + + # Test "downgrade" of the write lock to a read lock + lock_type = 'read' + tpl = (lock_type, lock) + assert installer._ensure_locked(lock_type, spec.package) == tpl + + +@pytest.mark.parametrize('lock_type,reads,writes', [ + ('read', 1, 0), + ('write', 0, 1)]) +def test_ensure_locked_new_lock( + install_mockery, tmpdir, lock_type, reads, writes): + pkg_id = 'a' + spec, installer = create_installer(pkg_id) + with tmpdir.as_cwd(): + ltype, lock = installer._ensure_locked(lock_type, spec.package) + assert ltype == lock_type + assert lock is not None + assert lock._reads == reads + assert lock._writes == writes + + +def test_ensure_locked_new_warn(install_mockery, monkeypatch, tmpdir, capsys): + orig_pl = spack.database.Database.prefix_lock + + def _pl(db, spec, timeout): + lock = orig_pl(db, spec, timeout) + lock.default_timeout = 1e-9 if timeout is None else None + return lock + + pkg_id = 'a' + spec, installer = create_installer(pkg_id) + + monkeypatch.setattr(spack.database.Database, 'prefix_lock', _pl) + + lock_type = 'read' + ltype, lock = installer._ensure_locked(lock_type, spec.package) + assert ltype == lock_type + assert lock is not None + + out = str(capsys.readouterr()[1]) + assert 'Expected prefix lock timeout' in out + + +def test_package_id_err(install_mockery): pkg = spack.repo.get('trivial-install-test-package') with pytest.raises(ValueError, match='spec is not concretized'): inst.package_id(pkg) + +def test_package_id_ok(install_mockery): spec = spack.spec.Spec('trivial-install-test-package') spec.concretize() assert spec.concrete @@ -256,94 +373,134 @@ def test_package_id(install_mockery): def test_fake_install(install_mockery): - """Test to cover fake install basics.""" spec = spack.spec.Spec('trivial-install-test-package') spec.concretize() assert spec.concrete + pkg = spec.package inst._do_fake_install(pkg) assert os.path.isdir(pkg.prefix.lib) -def test_packages_needed_to_bootstrap_compiler(install_mockery, monkeypatch): - """Test to cover most of _packages_needed_to_boostrap_compiler.""" - # TODO: More work is needed to go beyond the dependency check - def _no_compilers(pkg, arch_spec): - return [] - - # Test path where no compiler packages returned +def test_packages_needed_to_bootstrap_compiler_none(install_mockery): spec = spack.spec.Spec('trivial-install-test-package') spec.concretize() assert spec.concrete + packages = inst._packages_needed_to_bootstrap_compiler(spec.package) assert not packages - # Test up to the dependency check - monkeypatch.setattr(spack.compilers, 'compilers_for_spec', _no_compilers) - with pytest.raises(spack.repo.UnknownPackageError, match='not found'): - inst._packages_needed_to_bootstrap_compiler(spec.package) + +def test_packages_needed_to_bootstrap_compiler_packages(install_mockery, + monkeypatch): + spec = spack.spec.Spec('trivial-install-test-package') + spec.concretize() + + def _conc_spec(compiler): + return spack.spec.Spec('a').concretized() + + # Ensure we can get past functions that are precluding obtaining + # packages. + monkeypatch.setattr(spack.compilers, 'compilers_for_spec', _none) + monkeypatch.setattr(spack.compilers, 'pkg_spec_for_compiler', _conc_spec) + monkeypatch.setattr(spack.spec.Spec, 'concretize', _noop) + + packages = inst._packages_needed_to_bootstrap_compiler(spec.package) + assert packages -def test_dump_packages_deps(install_mockery, tmpdir): - """Test to add coverage to dump_packages.""" +def test_dump_packages_deps_ok(install_mockery, tmpdir, mock_repo_path): + """Test happy path for dump_packages with dependencies.""" + + spec_name = 'simple-inheritance' + spec = spack.spec.Spec(spec_name).concretized() + inst.dump_packages(spec, str(tmpdir)) + + repo = mock_repo_path.repos[0] + dest_pkg = repo.filename_for_package_name(spec_name) + assert os.path.isfile(dest_pkg) + + +def test_dump_packages_deps_errs(install_mockery, tmpdir, monkeypatch, capsys): + """Test error paths for dump_packages with dependencies.""" + orig_bpp = spack.store.layout.build_packages_path + orig_dirname = spack.repo.Repo.dirname_for_package_name + repo_err_msg = "Mock dirname_for_package_name" + + def bpp_path(spec): + # Perform the original function + source = orig_bpp(spec) + # Mock the required directory structure for the repository + _mock_repo(os.path.join(source, spec.namespace), spec.namespace) + return source + + def _repoerr(repo, name): + if name == 'cmake': + raise spack.repo.RepoError(repo_err_msg) + else: + return orig_dirname(repo, name) + + # Now mock the creation of the required directory structure to cover + # the try-except block + monkeypatch.setattr(spack.store.layout, 'build_packages_path', bpp_path) + spec = spack.spec.Spec('simple-inheritance').concretized() - with tmpdir.as_cwd(): - inst.dump_packages(spec, '.') + path = str(tmpdir) + + # The call to install_tree will raise the exception since not mocking + # creation of dependency package files within *install* directories. + with pytest.raises(IOError, match=path): + inst.dump_packages(spec, path) + + # Now try the error path, which requires the mock directory structure + # above + monkeypatch.setattr(spack.repo.Repo, 'dirname_for_package_name', _repoerr) + with pytest.raises(spack.repo.RepoError, match=repo_err_msg): + inst.dump_packages(spec, path) + + out = str(capsys.readouterr()[1]) + assert "Couldn't copy in provenance for cmake" in out -@pytest.mark.tld -def test_check_deps_status_errs(install_mockery, monkeypatch): - """Test to cover _check_deps_status failures.""" +def test_check_deps_status_install_failure(install_mockery, monkeypatch): spec, installer = create_installer('a') # Make sure the package is identified as failed - orig_fn = spack.database.Database.prefix_failed monkeypatch.setattr(spack.database.Database, 'prefix_failed', _true) with pytest.raises(inst.InstallError, match='install failure'): installer._check_deps_status() - monkeypatch.setattr(spack.database.Database, 'prefix_failed', orig_fn) - # Ensure do not acquire the lock +def test_check_deps_status_write_locked(install_mockery, monkeypatch): + spec, installer = create_installer('a') + + # Ensure the lock is not acquired monkeypatch.setattr(inst.PackageInstaller, '_ensure_locked', _not_locked) with pytest.raises(inst.InstallError, match='write locked by another'): installer._check_deps_status() -@pytest.mark.tld def test_check_deps_status_external(install_mockery, monkeypatch): - """Test to cover _check_deps_status for external.""" spec, installer = create_installer('a') - deps = spec.dependencies() - assert len(deps) > 0 - dep_id = 'b' - - # Ensure the known dependent is installed if flagged as external + # Mock the known dependent, b, as external so assumed to be installed monkeypatch.setattr(spack.spec.Spec, 'external', True) installer._check_deps_status() - assert dep_id in installer.installed + assert 'b' in installer.installed -@pytest.mark.tld def test_check_deps_status_upstream(install_mockery, monkeypatch): - """Test to cover _check_deps_status for upstream.""" spec, installer = create_installer('a') - deps = spec.dependencies() - assert len(deps) > 0 - dep_id = 'b' - - # Ensure the known dependent, b, is installed if flagged as upstream + # Mock the known dependent, b, as installed upstream monkeypatch.setattr(spack.package.PackageBase, 'installed_upstream', True) installer._check_deps_status() - assert dep_id in installer.installed + assert 'b' in installer.installed def test_add_bootstrap_compilers(install_mockery, monkeypatch): - """Test to cover _add_bootstrap_compilers.""" def _pkgs(pkg): spec = spack.spec.Spec('mpi').concretized() return [(spec.package, True)] @@ -382,7 +539,6 @@ def test_installer_init_queue(install_mockery): def test_install_task_use_cache(install_mockery, monkeypatch): - """Test _install_task to cover use_cache path.""" spec, installer = create_installer('trivial-install-test-package') task = create_build_task(spec.package) @@ -391,25 +547,27 @@ def test_install_task_use_cache(install_mockery, monkeypatch): assert spec.package.name in installer.installed -def test_install_task_stop_iter(install_mockery, monkeypatch, capfd): - """Test _install_task to cover the StopIteration exception.""" - mock_err_msg = 'mock stop iteration' +def test_install_task_add_compiler(install_mockery, monkeypatch, capfd): + config_msg = 'mock add_compilers_to_config' - def _raise(installer, pkg): - raise StopIteration(mock_err_msg) + def _add(_compilers): + tty.msg(config_msg) spec, installer = create_installer('a') task = create_build_task(spec.package) + task.compiler = True + # Preclude any meaningful side-effects monkeypatch.setattr(spack.package.PackageBase, 'unit_test_check', _true) - monkeypatch.setattr(inst.PackageInstaller, '_setup_install_dir', _raise) + monkeypatch.setattr(inst.PackageInstaller, '_setup_install_dir', _noop) + monkeypatch.setattr(spack.build_environment, 'fork', _noop) + monkeypatch.setattr(spack.database.Database, 'add', _noop) + monkeypatch.setattr(spack.compilers, 'add_compilers_to_config', _add) installer._install_task(task) - out = capfd.readouterr()[0] - assert mock_err_msg in out - assert 'Package stage directory' in out - assert spec.package.stage.source_path in out + out = capfd.readouterr()[0] + assert config_msg in out def test_release_lock_write_n_exception(install_mockery, tmpdir, capsys): @@ -466,8 +624,36 @@ def _rmtask(installer, pkg_id): assert len(installer.build_tasks) == 1 -def test_cleanup_failed(install_mockery, tmpdir, monkeypatch, capsys): - """Test to increase coverage of _cleanup_failed.""" +def test_setup_install_dir_grp(install_mockery, monkeypatch, capfd): + """Test _setup_install_dir's group change.""" + mock_group = 'mockgroup' + mock_chgrp_msg = 'Changing group for {0} to {1}' + + def _get_group(spec): + return mock_group + + def _chgrp(path, group): + tty.msg(mock_chgrp_msg.format(path, group)) + + monkeypatch.setattr(prefs, 'get_package_group', _get_group) + monkeypatch.setattr(fs, 'chgrp', _chgrp) + + spec, installer = create_installer('trivial-install-test-package') + + fs.touchp(spec.prefix) + metadatadir = spack.store.layout.metadata_path(spec) + # Should fail with a "not a directory" error + with pytest.raises(OSError, match=metadatadir): + installer._setup_install_dir(spec.package) + + out = str(capfd.readouterr()[0]) + + expected_msg = mock_chgrp_msg.format(spec.prefix, mock_group) + assert expected_msg in out + + +def test_cleanup_failed_err(install_mockery, tmpdir, monkeypatch, capsys): + """Test _cleanup_failed exception path.""" msg = 'Fake release_write exception' def _raise_except(lock): @@ -487,13 +673,14 @@ def _raise_except(lock): assert msg in out -def test_update_failed_no_mark(install_mockery): - """Test of _update_failed sans mark and dependent build tasks.""" +def test_update_failed_no_dependent_task(install_mockery): + """Test _update_failed with missing dependent build tasks.""" spec, installer = create_installer('dependent-install') - task = create_build_task(spec.package) - installer._update_failed(task) - assert installer.failed['dependent-install'] is None + for dep in spec.traverse(root=False): + task = create_build_task(dep.package) + installer._update_failed(task, mark=False) + assert installer.failed[task.pkg_id] is None def test_install_uninstalled_deps(install_mockery, monkeypatch, capsys): @@ -647,3 +834,12 @@ def _install(installer, task, **kwargs): installer.install() assert 'b' in installer.installed + + +def test_install_skip_patch(install_mockery, mock_fetch): + """Test the path skip_patch install path.""" + spec, installer = create_installer('b') + + installer.install(fake=False, skip_patch=True) + + assert 'b' in installer.installed diff --git a/lib/spack/spack/test/mirror.py b/lib/spack/spack/test/mirror.py index 570c329b717..05cf46dc200 100644 --- a/lib/spack/spack/test/mirror.py +++ b/lib/spack/spack/test/mirror.py @@ -52,52 +52,52 @@ def check_mirror(): mirror_root = os.path.join(stage.path, 'test-mirror') # register mirror with spack config mirrors = {'spack-mirror-test': 'file://' + mirror_root} - spack.config.set('mirrors', mirrors) - with spack.config.override('config:checksum', False): - specs = [Spec(x).concretized() for x in repos] - spack.mirror.create(mirror_root, specs) - - # Stage directory exists - assert os.path.isdir(mirror_root) - - for spec in specs: - fetcher = spec.package.fetcher[0] - per_package_ref = os.path.join( - spec.name, '-'.join([spec.name, str(spec.version)])) - mirror_paths = spack.mirror.mirror_archive_paths( - fetcher, - per_package_ref) - expected_path = os.path.join( - mirror_root, mirror_paths.storage_path) - assert os.path.exists(expected_path) - - # Now try to fetch each package. - for name, mock_repo in repos.items(): - spec = Spec(name).concretized() - pkg = spec.package - + with spack.config.override('mirrors', mirrors): with spack.config.override('config:checksum', False): - with pkg.stage: - pkg.do_stage(mirror_only=True) + specs = [Spec(x).concretized() for x in repos] + spack.mirror.create(mirror_root, specs) - # Compare the original repo with the expanded archive - original_path = mock_repo.path - if 'svn' in name: - # have to check out the svn repo to compare. - original_path = os.path.join( - mock_repo.path, 'checked_out') + # Stage directory exists + assert os.path.isdir(mirror_root) - svn = which('svn', required=True) - svn('checkout', mock_repo.url, original_path) + for spec in specs: + fetcher = spec.package.fetcher[0] + per_package_ref = os.path.join( + spec.name, '-'.join([spec.name, str(spec.version)])) + mirror_paths = spack.mirror.mirror_archive_paths( + fetcher, + per_package_ref) + expected_path = os.path.join( + mirror_root, mirror_paths.storage_path) + assert os.path.exists(expected_path) - dcmp = filecmp.dircmp( - original_path, pkg.stage.source_path) + # Now try to fetch each package. + for name, mock_repo in repos.items(): + spec = Spec(name).concretized() + pkg = spec.package - # make sure there are no new files in the expanded - # tarball - assert not dcmp.right_only - # and that all original files are present. - assert all(l in exclude for l in dcmp.left_only) + with spack.config.override('config:checksum', False): + with pkg.stage: + pkg.do_stage(mirror_only=True) + + # Compare the original repo with the expanded archive + original_path = mock_repo.path + if 'svn' in name: + # have to check out the svn repo to compare. + original_path = os.path.join( + mock_repo.path, 'checked_out') + + svn = which('svn', required=True) + svn('checkout', mock_repo.url, original_path) + + dcmp = filecmp.dircmp( + original_path, pkg.stage.source_path) + + # make sure there are no new files in the expanded + # tarball + assert not dcmp.right_only + # and that all original files are present. + assert all(l in exclude for l in dcmp.left_only) def test_url_mirror(mock_archive): diff --git a/lib/spack/spack/test/module_parsing.py b/lib/spack/spack/test/module_parsing.py index bbe18b1ad05..0bf485913f5 100644 --- a/lib/spack/spack/test/module_parsing.py +++ b/lib/spack/spack/test/module_parsing.py @@ -20,28 +20,11 @@ 'setenv LDFLAGS -L/path/to/lib', 'prepend-path PATH /path/to/bin'] - -@pytest.fixture -def module_function_test_mode(): - old_mode = spack.util.module_cmd._test_mode - spack.util.module_cmd._test_mode = True - - yield - - spack.util.module_cmd._test_mode = old_mode +_test_template = "'. %s 2>&1' % args[1]" -@pytest.fixture -def save_module_func(): - old_func = spack.util.module_cmd.module - - yield - - spack.util.module_cmd.module = old_func - - -def test_module_function_change_env(tmpdir, working_env, - module_function_test_mode): +def test_module_function_change_env(tmpdir, working_env, monkeypatch): + monkeypatch.setattr(spack.util.module_cmd, '_cmd_template', _test_template) src_file = str(tmpdir.join('src_me')) with open(src_file, 'w') as f: f.write('export TEST_MODULE_ENV_VAR=TEST_SUCCESS\n') @@ -53,7 +36,8 @@ def test_module_function_change_env(tmpdir, working_env, assert os.environ['NOT_AFFECTED'] == "NOT_AFFECTED" -def test_module_function_no_change(tmpdir, module_function_test_mode): +def test_module_function_no_change(tmpdir, monkeypatch): + monkeypatch.setattr(spack.util.module_cmd, '_cmd_template', _test_template) src_file = str(tmpdir.join('src_me')) with open(src_file, 'w') as f: f.write('echo TEST_MODULE_FUNCTION_PRINT') @@ -65,11 +49,11 @@ def test_module_function_no_change(tmpdir, module_function_test_mode): assert os.environ == old_env -def test_get_path_from_module_faked(save_module_func): +def test_get_path_from_module_faked(monkeypatch): for line in test_module_lines: def fake_module(*args): return line - spack.util.module_cmd.module = fake_module + monkeypatch.setattr(spack.util.module_cmd, 'module', fake_module) path = get_path_from_module('mod') assert path == '/path/to' diff --git a/lib/spack/spack/test/package_class.py b/lib/spack/spack/test/package_class.py index b3351ffb491..d540ac663e1 100644 --- a/lib/spack/spack/test/package_class.py +++ b/lib/spack/spack/test/package_class.py @@ -11,12 +11,17 @@ """ import pytest +import spack.package import spack.repo -@pytest.fixture -def mpileaks_possible_deps(mock_packages): - mpi_names = [spec.name for spec in spack.repo.path.providers_for('mpi')] +@pytest.fixture(scope="module") +def mpi_names(mock_repo_path): + return [spec.name for spec in mock_repo_path.providers_for('mpi')] + + +@pytest.fixture() +def mpileaks_possible_deps(mock_packages, mpi_names): possible = { 'callpath': set(['dyninst'] + mpi_names), 'dyninst': set(['libdwarf', 'libelf']), @@ -34,47 +39,72 @@ def mpileaks_possible_deps(mock_packages): def test_possible_dependencies(mock_packages, mpileaks_possible_deps): mpileaks = spack.repo.get('mpileaks') - assert (mpileaks.possible_dependencies(expand_virtuals=True) == - mpileaks_possible_deps) + assert mpileaks_possible_deps == ( + mpileaks.possible_dependencies(expand_virtuals=True)) - assert mpileaks.possible_dependencies(expand_virtuals=False) == { - 'callpath': set(['dyninst']), + assert { + 'callpath': set(['dyninst', 'mpi']), 'dyninst': set(['libdwarf', 'libelf']), 'libdwarf': set(['libelf']), 'libelf': set(), 'mpi': set(), - 'mpileaks': set(['callpath']), - } + 'mpileaks': set(['callpath', 'mpi']), + } == mpileaks.possible_dependencies(expand_virtuals=False) + + +def test_possible_direct_dependencies(mock_packages, mpileaks_possible_deps): + mpileaks = spack.repo.get('mpileaks') + deps = mpileaks.possible_dependencies(transitive=False, + expand_virtuals=False) + + assert { + 'callpath': set(), + 'mpi': set(), + 'mpileaks': set(['callpath', 'mpi']), + } == deps + + +def test_possible_dependencies_virtual(mock_packages, mpi_names): + expected = dict( + (name, set(spack.repo.get(name).dependencies)) + for name in mpi_names + ) + + # only one mock MPI has a dependency + expected['fake'] = set() + + assert expected == spack.package.possible_dependencies( + "mpi", transitive=False) def test_possible_dependencies_missing(mock_packages): md = spack.repo.get("missing-dependency") missing = {} md.possible_dependencies(transitive=True, missing=missing) - assert missing["missing-dependency"] == set([ + assert set([ "this-is-a-missing-dependency" - ]) + ]) == missing["missing-dependency"] def test_possible_dependencies_with_deptypes(mock_packages): dtbuild1 = spack.repo.get('dtbuild1') - assert dtbuild1.possible_dependencies(deptype=('link', 'run')) == { + assert { 'dtbuild1': set(['dtrun2', 'dtlink2']), 'dtlink2': set(), 'dtrun2': set(), - } + } == dtbuild1.possible_dependencies(deptype=('link', 'run')) - assert dtbuild1.possible_dependencies(deptype=('build')) == { + assert { 'dtbuild1': set(['dtbuild2', 'dtlink2']), 'dtbuild2': set(), 'dtlink2': set(), - } + } == dtbuild1.possible_dependencies(deptype=('build')) - assert dtbuild1.possible_dependencies(deptype=('link')) == { + assert { 'dtbuild1': set(['dtlink2']), 'dtlink2': set(), - } + } == dtbuild1.possible_dependencies(deptype=('link')) def test_possible_dependencies_with_multiple_classes( @@ -88,4 +118,4 @@ def test_possible_dependencies_with_multiple_classes( 'dt-diamond-bottom': set(), }) - assert spack.package.possible_dependencies(*pkgs) == expected + assert expected == spack.package.possible_dependencies(*pkgs) diff --git a/lib/spack/spack/test/packages.py b/lib/spack/spack/test/packages.py index 299c56481e4..ffaad396c18 100644 --- a/lib/spack/spack/test/packages.py +++ b/lib/spack/spack/test/packages.py @@ -402,3 +402,24 @@ def test_bundle_patch_directive(mock_directive_bundle, match="Patches are not allowed"): patch = spack.directives.patch('mock/patch.txt') patch(mock_directive_bundle) + + +def test_fetch_options(mock_packages, config): + """Test fetch options inference.""" + + pkg = spack.repo.get('fetch-options') + + fetcher = spack.fetch_strategy.for_package_version(pkg, '1.0') + assert isinstance(fetcher, spack.fetch_strategy.URLFetchStrategy) + assert fetcher.digest == 'abc10' + assert fetcher.extra_options == {'timeout': 42, 'cookie': 'foobar'} + + fetcher = spack.fetch_strategy.for_package_version(pkg, '1.1') + assert isinstance(fetcher, spack.fetch_strategy.URLFetchStrategy) + assert fetcher.digest == 'abc11' + assert fetcher.extra_options == {'timeout': 65} + + fetcher = spack.fetch_strategy.for_package_version(pkg, '1.2') + assert isinstance(fetcher, spack.fetch_strategy.URLFetchStrategy) + assert fetcher.digest == 'abc12' + assert fetcher.extra_options == {'cookie': 'baz'} diff --git a/lib/spack/spack/test/packaging.py b/lib/spack/spack/test/packaging.py index 39d12df7b71..39da7c3ae5e 100644 --- a/lib/spack/spack/test/packaging.py +++ b/lib/spack/spack/test/packaging.py @@ -8,10 +8,11 @@ """ import os import stat -import sys import shutil import pytest import argparse +import re +import platform from llnl.util.filesystem import mkdirp @@ -19,16 +20,17 @@ import spack.store import spack.binary_distribution as bindist import spack.cmd.buildcache as buildcache -import spack.util.gpg from spack.spec import Spec from spack.paths import mock_gpg_keys_path from spack.fetch_strategy import URLFetchStrategy, FetchStrategyComposite from spack.relocate import needs_binary_relocation, needs_text_relocation -from spack.relocate import strings_contains_installroot -from spack.relocate import get_patchelf, relocate_text, relocate_links -from spack.relocate import substitute_rpath, get_relative_rpaths -from spack.relocate import macho_replace_paths, macho_make_paths_relative -from spack.relocate import modify_macho_object, macho_get_paths +from spack.relocate import relocate_text, relocate_links +from spack.relocate import get_relative_elf_rpaths +from spack.relocate import get_normalized_elf_rpaths +from spack.relocate import macho_make_paths_relative +from spack.relocate import macho_make_paths_normal +from spack.relocate import set_placeholder, macho_find_paths +from spack.relocate import file_is_relocatable def has_gpg(): @@ -50,9 +52,9 @@ def fake_fetchify(url, pkg): @pytest.mark.usefixtures('install_mockery', 'mock_gnupghome') def test_buildcache(mock_archive, tmpdir): # tweak patchelf to only do a download - spec = Spec("patchelf") - spec.concretize() - pkg = spack.repo.get(spec) + pspec = Spec("patchelf") + pspec.concretize() + pkg = spack.repo.get(pspec) fake_fetchify(pkg.fetcher, pkg) mkdirp(os.path.join(pkg.prefix, "bin")) patchelfscr = os.path.join(pkg.prefix, "bin", "patchelf") @@ -71,7 +73,7 @@ def test_buildcache(mock_archive, tmpdir): pkg = spec.package fake_fetchify(mock_archive.url, pkg) pkg.do_install() - pkghash = '/' + spec.dag_hash(7) + pkghash = '/' + str(spec.dag_hash(7)) # Put some non-relocatable file in there filename = os.path.join(spec.prefix, "dummy.txt") @@ -99,88 +101,69 @@ def test_buildcache(mock_archive, tmpdir): parser = argparse.ArgumentParser() buildcache.setup_parser(parser) + create_args = ['create', '-a', '-f', '-d', mirror_path, pkghash] # Create a private key to sign package with if gpg2 available if spack.util.gpg.Gpg.gpg(): spack.util.gpg.Gpg.create(name='test key 1', expires='0', email='spack@googlegroups.com', comment='Spack test key') - # Create build cache with signing - args = parser.parse_args(['create', '-d', mirror_path, str(spec)]) - buildcache.buildcache(parser, args) - - # Uninstall the package - pkg.do_uninstall(force=True) - - # test overwrite install - args = parser.parse_args(['install', '-f', str(pkghash)]) - buildcache.buildcache(parser, args) - - files = os.listdir(spec.prefix) - - # create build cache with relative path and signing - args = parser.parse_args( - ['create', '-d', mirror_path, '-f', '-r', str(spec)]) - buildcache.buildcache(parser, args) - - # Uninstall the package - pkg.do_uninstall(force=True) - - # install build cache with verification - args = parser.parse_args(['install', str(spec)]) - buildcache.install_tarball(spec, args) - - # test overwrite install - args = parser.parse_args(['install', '-f', str(pkghash)]) - buildcache.buildcache(parser, args) - else: - # create build cache without signing - args = parser.parse_args( - ['create', '-d', mirror_path, '-f', '-u', str(spec)]) - buildcache.buildcache(parser, args) + create_args.insert(create_args.index('-a'), '-u') - # Uninstall the package - pkg.do_uninstall(force=True) + args = parser.parse_args(create_args) + buildcache.buildcache(parser, args) + # trigger overwrite warning + buildcache.buildcache(parser, args) - # install build cache without verification - args = parser.parse_args(['install', '-u', str(spec)]) - buildcache.install_tarball(spec, args) + # Uninstall the package + pkg.do_uninstall(force=True) - files = os.listdir(spec.prefix) - assert 'link_to_dummy.txt' in files - assert 'dummy.txt' in files - # test overwrite install without verification - args = parser.parse_args(['install', '-f', '-u', str(pkghash)]) - buildcache.buildcache(parser, args) + install_args = ['install', '-a', '-f', pkghash] + if not spack.util.gpg.Gpg.gpg(): + install_args.insert(install_args.index('-a'), '-u') + args = parser.parse_args(install_args) + # Test install + buildcache.buildcache(parser, args) - # create build cache with relative path - args = parser.parse_args( - ['create', '-d', mirror_path, '-f', '-r', '-u', str(pkghash)]) - buildcache.buildcache(parser, args) + files = os.listdir(spec.prefix) - # Uninstall the package - pkg.do_uninstall(force=True) - - # install build cache - args = parser.parse_args(['install', '-u', str(spec)]) - buildcache.install_tarball(spec, args) - - # test overwrite install - args = parser.parse_args(['install', '-f', '-u', str(pkghash)]) - buildcache.buildcache(parser, args) - - files = os.listdir(spec.prefix) - assert 'link_to_dummy.txt' in files - assert 'dummy.txt' in files - assert os.path.realpath( - os.path.join(spec.prefix, 'link_to_dummy.txt') - ) == os.path.realpath(os.path.join(spec.prefix, 'dummy.txt')) + assert 'link_to_dummy.txt' in files + assert 'dummy.txt' in files # Validate the relocation information buildinfo = bindist.read_buildinfo_file(spec.prefix) assert(buildinfo['relocate_textfiles'] == ['dummy.txt']) assert(buildinfo['relocate_links'] == ['link_to_dummy.txt']) + # create build cache with relative path + create_args.insert(create_args.index('-a'), '-f') + create_args.insert(create_args.index('-a'), '-r') + args = parser.parse_args(create_args) + buildcache.buildcache(parser, args) + + # Uninstall the package + pkg.do_uninstall(force=True) + + if not spack.util.gpg.Gpg.gpg(): + install_args.insert(install_args.index('-a'), '-u') + args = parser.parse_args(install_args) + buildcache.buildcache(parser, args) + + # test overwrite install + install_args.insert(install_args.index('-a'), '-f') + args = parser.parse_args(install_args) + buildcache.buildcache(parser, args) + + files = os.listdir(spec.prefix) + assert 'link_to_dummy.txt' in files + assert 'dummy.txt' in files +# assert os.path.realpath( +# os.path.join(spec.prefix, 'link_to_dummy.txt') +# ) == os.path.realpath(os.path.join(spec.prefix, 'dummy.txt')) + + args = parser.parse_args(['keys']) + buildcache.buildcache(parser, args) + args = parser.parse_args(['list']) buildcache.buildcache(parser, args) @@ -200,6 +183,9 @@ def test_buildcache(mock_archive, tmpdir): args = parser.parse_args(['keys', '-f']) buildcache.buildcache(parser, args) + args = parser.parse_args(['keys', '-i', '-t']) + buildcache.buildcache(parser, args) + # unregister mirror with spack config mirrors = {} spack.config.set('mirrors', mirrors) @@ -210,7 +196,10 @@ def test_buildcache(mock_archive, tmpdir): bindist._cached_specs = set() +@pytest.mark.usefixtures('install_mockery') def test_relocate_text(tmpdir): + spec = Spec('trivial-install-test-package') + spec.concretize() with tmpdir.as_cwd(): # Validate the text path replacement old_dir = '/home/spack/opt/spack' @@ -220,24 +209,46 @@ def test_relocate_text(tmpdir): script.close() filenames = [filename] new_dir = '/opt/rh/devtoolset/' - relocate_text(filenames, oldpath=old_dir, newpath=new_dir, - oldprefix=old_dir, newprefix=new_dir) + relocate_text(filenames, old_dir, new_dir, + old_dir, new_dir, + old_dir, new_dir, + {old_dir: new_dir}) with open(filename, "r")as script: for line in script: assert(new_dir in line) - assert(strings_contains_installroot(filename, old_dir) is False) + assert(file_is_relocatable(os.path.realpath(filename))) + # Remove cached binary specs since we deleted the mirror + bindist._cached_specs = set() def test_relocate_links(tmpdir): with tmpdir.as_cwd(): - old_dir = '/home/spack/opt/spack' - filename = 'link.ln' - old_src = os.path.join(old_dir, filename) - os.symlink(old_src, filename) - filenames = [filename] - new_dir = '/opt/rh/devtoolset' - relocate_links(filenames, old_dir, new_dir) - assert os.path.realpath(filename) == os.path.join(new_dir, filename) + old_layout_root = os.path.join( + '%s' % tmpdir, 'home', 'spack', 'opt', 'spack') + old_install_prefix = os.path.join( + '%s' % old_layout_root, 'debian6', 'test') + old_binname = os.path.join(old_install_prefix, 'binfile') + placeholder = set_placeholder(old_layout_root) + re.sub(old_layout_root, placeholder, old_binname) + filenames = ['link.ln', 'outsideprefix.ln'] + new_layout_root = os.path.join( + '%s' % tmpdir, 'opt', 'rh', 'devtoolset') + new_install_prefix = os.path.join( + '%s' % new_layout_root, 'test', 'debian6') + new_linkname = os.path.join(new_install_prefix, 'link.ln') + new_linkname2 = os.path.join(new_install_prefix, 'outsideprefix.ln') + new_binname = os.path.join(new_install_prefix, 'binfile') + mkdirp(new_install_prefix) + with open(new_binname, 'w') as f: + f.write('\n') + os.utime(new_binname, None) + os.symlink(old_binname, new_linkname) + os.symlink('/usr/lib/libc.so', new_linkname2) + relocate_links(filenames, old_layout_root, new_layout_root, + old_install_prefix, new_install_prefix, + {old_install_prefix: new_install_prefix}) + assert os.readlink(new_linkname) == new_binname + assert os.readlink(new_linkname2) == '/usr/lib/libc.so' def test_needs_relocation(): @@ -246,16 +257,223 @@ def test_needs_relocation(): assert needs_binary_relocation('application', 'x-executable') assert not needs_binary_relocation('application', 'x-octet-stream') assert not needs_binary_relocation('text', 'x-') - assert needs_text_relocation('text', 'x-') assert not needs_text_relocation('symbolic link to', 'x-') assert needs_binary_relocation('application', 'x-mach-binary') -def test_macho_paths(): +def test_replace_paths(tmpdir): + with tmpdir.as_cwd(): + suffix = 'dylib' if platform.system().lower() == 'darwin' else 'so' + hash_a = '53moz6jwnw3xpiztxwhc4us26klribws' + hash_b = 'tk62dzu62kd4oh3h3heelyw23hw2sfee' + hash_c = 'hdkhduizmaddpog6ewdradpobnbjwsjl' + hash_d = 'hukkosc7ahff7o65h6cdhvcoxm57d4bw' + hash_loco = 'zy4oigsc4eovn5yhr2lk4aukwzoespob' - out = macho_make_paths_relative('/Users/Shares/spack/pkgC/lib/libC.dylib', + prefix2hash = dict() + + old_spack_dir = os.path.join('%s' % tmpdir, + 'Users', 'developer', 'spack') + mkdirp(old_spack_dir) + + oldprefix_a = os.path.join('%s' % old_spack_dir, 'pkgA-%s' % hash_a) + oldlibdir_a = os.path.join('%s' % oldprefix_a, 'lib') + mkdirp(oldlibdir_a) + prefix2hash[str(oldprefix_a)] = hash_a + + oldprefix_b = os.path.join('%s' % old_spack_dir, 'pkgB-%s' % hash_b) + oldlibdir_b = os.path.join('%s' % oldprefix_b, 'lib') + mkdirp(oldlibdir_b) + prefix2hash[str(oldprefix_b)] = hash_b + + oldprefix_c = os.path.join('%s' % old_spack_dir, 'pkgC-%s' % hash_c) + oldlibdir_c = os.path.join('%s' % oldprefix_c, 'lib') + oldlibdir_cc = os.path.join('%s' % oldlibdir_c, 'C') + mkdirp(oldlibdir_c) + prefix2hash[str(oldprefix_c)] = hash_c + + oldprefix_d = os.path.join('%s' % old_spack_dir, 'pkgD-%s' % hash_d) + oldlibdir_d = os.path.join('%s' % oldprefix_d, 'lib') + mkdirp(oldlibdir_d) + prefix2hash[str(oldprefix_d)] = hash_d + + oldprefix_local = os.path.join('%s' % tmpdir, 'usr', 'local') + oldlibdir_local = os.path.join('%s' % oldprefix_local, 'lib') + mkdirp(oldlibdir_local) + prefix2hash[str(oldprefix_local)] = hash_loco + libfile_a = 'libA.%s' % suffix + libfile_b = 'libB.%s' % suffix + libfile_c = 'libC.%s' % suffix + libfile_d = 'libD.%s' % suffix + libfile_loco = 'libloco.%s' % suffix + old_libnames = [os.path.join(oldlibdir_a, libfile_a), + os.path.join(oldlibdir_b, libfile_b), + os.path.join(oldlibdir_c, libfile_c), + os.path.join(oldlibdir_d, libfile_d), + os.path.join(oldlibdir_local, libfile_loco)] + + for old_libname in old_libnames: + with open(old_libname, 'a'): + os.utime(old_libname, None) + + hash2prefix = dict() + + new_spack_dir = os.path.join('%s' % tmpdir, 'Users', 'Shared', + 'spack') + mkdirp(new_spack_dir) + + prefix_a = os.path.join(new_spack_dir, 'pkgA-%s' % hash_a) + libdir_a = os.path.join(prefix_a, 'lib') + mkdirp(libdir_a) + hash2prefix[hash_a] = str(prefix_a) + + prefix_b = os.path.join(new_spack_dir, 'pkgB-%s' % hash_b) + libdir_b = os.path.join(prefix_b, 'lib') + mkdirp(libdir_b) + hash2prefix[hash_b] = str(prefix_b) + + prefix_c = os.path.join(new_spack_dir, 'pkgC-%s' % hash_c) + libdir_c = os.path.join(prefix_c, 'lib') + libdir_cc = os.path.join(libdir_c, 'C') + mkdirp(libdir_cc) + hash2prefix[hash_c] = str(prefix_c) + + prefix_d = os.path.join(new_spack_dir, 'pkgD-%s' % hash_d) + libdir_d = os.path.join(prefix_d, 'lib') + mkdirp(libdir_d) + hash2prefix[hash_d] = str(prefix_d) + + prefix_local = os.path.join('%s' % tmpdir, 'usr', 'local') + libdir_local = os.path.join(prefix_local, 'lib') + mkdirp(libdir_local) + hash2prefix[hash_loco] = str(prefix_local) + + new_libnames = [os.path.join(libdir_a, libfile_a), + os.path.join(libdir_b, libfile_b), + os.path.join(libdir_cc, libfile_c), + os.path.join(libdir_d, libfile_d), + os.path.join(libdir_local, libfile_loco)] + + for new_libname in new_libnames: + with open(new_libname, 'a'): + os.utime(new_libname, None) + + prefix2prefix = dict() + for prefix, hash in prefix2hash.items(): + prefix2prefix[prefix] = hash2prefix[hash] + + out_dict = macho_find_paths([oldlibdir_a, oldlibdir_b, + oldlibdir_c, + oldlibdir_cc, oldlibdir_local], + [os.path.join(oldlibdir_a, + libfile_a), + os.path.join(oldlibdir_b, + libfile_b), + os.path.join(oldlibdir_local, + libfile_loco)], + os.path.join(oldlibdir_cc, + libfile_c), + old_spack_dir, + prefix2prefix + ) + assert out_dict == {oldlibdir_a: libdir_a, + oldlibdir_b: libdir_b, + oldlibdir_c: libdir_c, + oldlibdir_cc: libdir_cc, + libdir_local: libdir_local, + os.path.join(oldlibdir_a, libfile_a): + os.path.join(libdir_a, libfile_a), + os.path.join(oldlibdir_b, libfile_b): + os.path.join(libdir_b, libfile_b), + os.path.join(oldlibdir_local, libfile_loco): + os.path.join(libdir_local, libfile_loco), + os.path.join(oldlibdir_cc, libfile_c): + os.path.join(libdir_cc, libfile_c)} + + out_dict = macho_find_paths([oldlibdir_a, oldlibdir_b, + oldlibdir_c, + oldlibdir_cc, + oldlibdir_local], + [os.path.join(oldlibdir_a, + libfile_a), + os.path.join(oldlibdir_b, + libfile_b), + os.path.join(oldlibdir_cc, + libfile_c), + os.path.join(oldlibdir_local, + libfile_loco)], + None, + old_spack_dir, + prefix2prefix + ) + assert out_dict == {oldlibdir_a: libdir_a, + oldlibdir_b: libdir_b, + oldlibdir_c: libdir_c, + oldlibdir_cc: libdir_cc, + libdir_local: libdir_local, + os.path.join(oldlibdir_a, libfile_a): + os.path.join(libdir_a, libfile_a), + os.path.join(oldlibdir_b, libfile_b): + os.path.join(libdir_b, libfile_b), + os.path.join(oldlibdir_local, libfile_loco): + os.path.join(libdir_local, libfile_loco), + os.path.join(oldlibdir_cc, libfile_c): + os.path.join(libdir_cc, libfile_c)} + + out_dict = macho_find_paths([oldlibdir_a, oldlibdir_b, + oldlibdir_c, oldlibdir_cc, + oldlibdir_local], + ['@rpath/%s' % libfile_a, + '@rpath/%s' % libfile_b, + '@rpath/%s' % libfile_c, + '@rpath/%s' % libfile_loco], + None, + old_spack_dir, + prefix2prefix + ) + + assert out_dict == {'@rpath/%s' % libfile_a: + '@rpath/%s' % libfile_a, + '@rpath/%s' % libfile_b: + '@rpath/%s' % libfile_b, + '@rpath/%s' % libfile_c: + '@rpath/%s' % libfile_c, + '@rpath/%s' % libfile_loco: + '@rpath/%s' % libfile_loco, + oldlibdir_a: libdir_a, + oldlibdir_b: libdir_b, + oldlibdir_c: libdir_c, + oldlibdir_cc: libdir_cc, + libdir_local: libdir_local, + } + + out_dict = macho_find_paths([oldlibdir_a, + oldlibdir_b, + oldlibdir_d, + oldlibdir_local], + ['@rpath/%s' % libfile_a, + '@rpath/%s' % libfile_b, + '@rpath/%s' % libfile_loco], + None, + old_spack_dir, + prefix2prefix) + assert out_dict == {'@rpath/%s' % libfile_a: + '@rpath/%s' % libfile_a, + '@rpath/%s' % libfile_b: + '@rpath/%s' % libfile_b, + '@rpath/%s' % libfile_loco: + '@rpath/%s' % libfile_loco, + oldlibdir_a: libdir_a, + oldlibdir_b: libdir_b, + oldlibdir_d: libdir_d, + libdir_local: libdir_local, + } + + +def test_macho_make_paths(): + out = macho_make_paths_relative('/Users/Shared/spack/pkgC/lib/libC.dylib', '/Users/Shared/spack', ('/Users/Shared/spack/pkgA/lib', '/Users/Shared/spack/pkgB/lib', @@ -264,13 +482,43 @@ def test_macho_paths(): '/Users/Shared/spack/pkgB/libB.dylib', '/usr/local/lib/libloco.dylib'), '/Users/Shared/spack/pkgC/lib/libC.dylib') - assert out == (['@loader_path/../../../../Shared/spack/pkgA/lib', - '@loader_path/../../../../Shared/spack/pkgB/lib', - '/usr/local/lib'], - ['@loader_path/../../../../Shared/spack/pkgA/libA.dylib', - '@loader_path/../../../../Shared/spack/pkgB/libB.dylib', - '/usr/local/lib/libloco.dylib'], - '@rpath/libC.dylib') + assert out == {'/Users/Shared/spack/pkgA/lib': + '@loader_path/../../pkgA/lib', + '/Users/Shared/spack/pkgB/lib': + '@loader_path/../../pkgB/lib', + '/usr/local/lib': '/usr/local/lib', + '/Users/Shared/spack/pkgA/libA.dylib': + '@loader_path/../../pkgA/libA.dylib', + '/Users/Shared/spack/pkgB/libB.dylib': + '@loader_path/../../pkgB/libB.dylib', + '/usr/local/lib/libloco.dylib': + '/usr/local/lib/libloco.dylib', + '/Users/Shared/spack/pkgC/lib/libC.dylib': + '@rpath/libC.dylib'} + + out = macho_make_paths_normal('/Users/Shared/spack/pkgC/lib/libC.dylib', + ('@loader_path/../../pkgA/lib', + '@loader_path/../../pkgB/lib', + '/usr/local/lib'), + ('@loader_path/../../pkgA/libA.dylib', + '@loader_path/../../pkgB/libB.dylib', + '/usr/local/lib/libloco.dylib'), + '@rpath/libC.dylib') + + assert out == {'@rpath/libC.dylib': + '/Users/Shared/spack/pkgC/lib/libC.dylib', + '@loader_path/../../pkgA/lib': + '/Users/Shared/spack/pkgA/lib', + '@loader_path/../../pkgB/lib': + '/Users/Shared/spack/pkgB/lib', + '/usr/local/lib': '/usr/local/lib', + '@loader_path/../../pkgA/libA.dylib': + '/Users/Shared/spack/pkgA/libA.dylib', + '@loader_path/../../pkgB/libB.dylib': + '/Users/Shared/spack/pkgB/libB.dylib', + '/usr/local/lib/libloco.dylib': + '/usr/local/lib/libloco.dylib' + } out = macho_make_paths_relative('/Users/Shared/spack/pkgC/bin/exeC', '/Users/Shared/spack', @@ -281,98 +529,47 @@ def test_macho_paths(): '/Users/Shared/spack/pkgB/libB.dylib', '/usr/local/lib/libloco.dylib'), None) - assert out == (['@loader_path/../../pkgA/lib', - '@loader_path/../../pkgB/lib', - '/usr/local/lib'], - ['@loader_path/../../pkgA/libA.dylib', - '@loader_path/../../pkgB/libB.dylib', - '/usr/local/lib/libloco.dylib'], None) + assert out == {'/Users/Shared/spack/pkgA/lib': + '@loader_path/../../pkgA/lib', + '/Users/Shared/spack/pkgB/lib': + '@loader_path/../../pkgB/lib', + '/usr/local/lib': '/usr/local/lib', + '/Users/Shared/spack/pkgA/libA.dylib': + '@loader_path/../../pkgA/libA.dylib', + '/Users/Shared/spack/pkgB/libB.dylib': + '@loader_path/../../pkgB/libB.dylib', + '/usr/local/lib/libloco.dylib': + '/usr/local/lib/libloco.dylib'} - out = macho_replace_paths('/Users/Shared/spack', - '/Applications/spack', - ('/Users/Shared/spack/pkgA/lib', - '/Users/Shared/spack/pkgB/lib', - '/usr/local/lib'), - ('/Users/Shared/spack/pkgA/libA.dylib', - '/Users/Shared/spack/pkgB/libB.dylib', - '/usr/local/lib/libloco.dylib'), - '/Users/Shared/spack/pkgC/lib/libC.dylib') - assert out == (['/Applications/spack/pkgA/lib', - '/Applications/spack/pkgB/lib', - '/usr/local/lib'], - ['/Applications/spack/pkgA/libA.dylib', - '/Applications/spack/pkgB/libB.dylib', - '/usr/local/lib/libloco.dylib'], - '/Applications/spack/pkgC/lib/libC.dylib') + out = macho_make_paths_normal('/Users/Shared/spack/pkgC/bin/exeC', + ('@loader_path/../../pkgA/lib', + '@loader_path/../../pkgB/lib', + '/usr/local/lib'), + ('@loader_path/../../pkgA/libA.dylib', + '@loader_path/../../pkgB/libB.dylib', + '/usr/local/lib/libloco.dylib'), + None) - out = macho_replace_paths('/Users/Shared/spack', - '/Applications/spack', - ('/Users/Shared/spack/pkgA/lib', - '/Users/Shared/spack/pkgB/lib', - '/usr/local/lib'), - ('/Users/Shared/spack/pkgA/libA.dylib', - '/Users/Shared/spack/pkgB/libB.dylib', - '/usr/local/lib/libloco.dylib'), - None) - assert out == (['/Applications/spack/pkgA/lib', - '/Applications/spack/pkgB/lib', - '/usr/local/lib'], - ['/Applications/spack/pkgA/libA.dylib', - '/Applications/spack/pkgB/libB.dylib', - '/usr/local/lib/libloco.dylib'], - None) + assert out == {'@loader_path/../../pkgA/lib': + '/Users/Shared/spack/pkgA/lib', + '@loader_path/../../pkgB/lib': + '/Users/Shared/spack/pkgB/lib', + '/usr/local/lib': '/usr/local/lib', + '@loader_path/../../pkgA/libA.dylib': + '/Users/Shared/spack/pkgA/libA.dylib', + '@loader_path/../../pkgB/libB.dylib': + '/Users/Shared/spack/pkgB/libB.dylib', + '/usr/local/lib/libloco.dylib': + '/usr/local/lib/libloco.dylib'} def test_elf_paths(): - out = get_relative_rpaths( + out = get_relative_elf_rpaths( '/usr/bin/test', '/usr', ('/usr/lib', '/usr/lib64', '/opt/local/lib')) assert out == ['$ORIGIN/../lib', '$ORIGIN/../lib64', '/opt/local/lib'] - out = substitute_rpath( - ('/usr/lib', '/usr/lib64', '/opt/local/lib'), '/usr', '/opt') - assert out == ['/opt/lib', '/opt/lib64', '/opt/local/lib'] - - -@pytest.mark.skipif(sys.platform != 'darwin', - reason="only works with Mach-o objects") -def test_relocate_macho(tmpdir): - with tmpdir.as_cwd(): - - get_patchelf() # this does nothing on Darwin - - rpaths, deps, idpath = macho_get_paths('/bin/bash') - nrpaths, ndeps, nid = macho_make_paths_relative('/bin/bash', '/usr', - rpaths, deps, idpath) - shutil.copyfile('/bin/bash', 'bash') - modify_macho_object('bash', - rpaths, deps, idpath, - nrpaths, ndeps, nid) - - rpaths, deps, idpath = macho_get_paths('/bin/bash') - nrpaths, ndeps, nid = macho_replace_paths('/usr', '/opt', - rpaths, deps, idpath) - shutil.copyfile('/bin/bash', 'bash') - modify_macho_object('bash', - rpaths, deps, idpath, - nrpaths, ndeps, nid) - - path = '/usr/lib/libncurses.5.4.dylib' - rpaths, deps, idpath = macho_get_paths(path) - nrpaths, ndeps, nid = macho_make_paths_relative(path, '/usr', - rpaths, deps, idpath) - shutil.copyfile( - '/usr/lib/libncurses.5.4.dylib', 'libncurses.5.4.dylib') - modify_macho_object('libncurses.5.4.dylib', - rpaths, deps, idpath, - nrpaths, ndeps, nid) - - rpaths, deps, idpath = macho_get_paths(path) - nrpaths, ndeps, nid = macho_replace_paths('/usr', '/opt', - rpaths, deps, idpath) - shutil.copyfile( - '/usr/lib/libncurses.5.4.dylib', 'libncurses.5.4.dylib') - modify_macho_object( - 'libncurses.5.4.dylib', - rpaths, deps, idpath, - nrpaths, ndeps, nid) + out = get_normalized_elf_rpaths( + '/usr/bin/test', + ['$ORIGIN/../lib', '$ORIGIN/../lib64', '/opt/local/lib']) + assert out == ['/usr/lib', '/usr/lib64', '/opt/local/lib'] diff --git a/lib/spack/spack/test/relocate.py b/lib/spack/spack/test/relocate.py index 113bdcf66aa..0a9e9f7f0a3 100644 --- a/lib/spack/spack/test/relocate.py +++ b/lib/spack/spack/test/relocate.py @@ -3,15 +3,18 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) +import collections import os.path import platform import shutil -import pytest - import llnl.util.filesystem +import pytest +import spack.architecture +import spack.concretize import spack.paths import spack.relocate +import spack.spec import spack.store import spack.tengine import spack.util.executable @@ -45,6 +48,47 @@ def source_file(tmpdir, is_relocatable): return src +@pytest.fixture(params=['which_found', 'installed', 'to_be_installed']) +def expected_patchelf_path(request, mutable_database, monkeypatch): + """Prepare the stage to tests different cases that can occur + when searching for patchelf. + """ + case = request.param + + # Mock the which function + which_fn = { + 'which_found': lambda x: collections.namedtuple( + '_', ['path'] + )('/usr/bin/patchelf') + } + monkeypatch.setattr( + spack.util.executable, 'which', + which_fn.setdefault(case, lambda x: None) + ) + if case == 'which_found': + return '/usr/bin/patchelf' + + # TODO: Mock a case for Darwin architecture + + spec = spack.spec.Spec('patchelf') + spec.concretize() + + patchelf_cls = type(spec.package) + do_install = patchelf_cls.do_install + expected_path = os.path.join(spec.prefix.bin, 'patchelf') + + def do_install_mock(self, **kwargs): + do_install(self, fake=True) + with open(expected_path): + pass + + monkeypatch.setattr(patchelf_cls, 'do_install', do_install_mock) + if case == 'installed': + spec.package.do_install() + + return expected_path + + @pytest.mark.requires_executables( '/usr/bin/gcc', 'patchelf', 'strings', 'file' ) @@ -64,7 +108,7 @@ def test_file_is_relocatable(source_file, is_relocatable): 'patchelf', 'strings', 'file' ) def test_patchelf_is_relocatable(): - patchelf = spack.relocate.get_patchelf() + patchelf = spack.relocate._patchelf() assert llnl.util.filesystem.is_exe(patchelf) assert spack.relocate.file_is_relocatable(patchelf) @@ -87,3 +131,12 @@ def test_file_is_relocatable_errors(tmpdir): with pytest.raises(ValueError) as exc_info: spack.relocate.file_is_relocatable('delete.me') assert 'is not an absolute path' in str(exc_info.value) + + +@pytest.mark.skipif( + platform.system().lower() != 'linux', + reason='implementation for MacOS still missing' +) +def test_search_patchelf(expected_patchelf_path): + current = spack.relocate._patchelf() + assert current == expected_patchelf_path diff --git a/lib/spack/spack/test/spec_dag.py b/lib/spack/spack/test/spec_dag.py index 419a39968e9..e031f02c255 100644 --- a/lib/spack/spack/test/spec_dag.py +++ b/lib/spack/spack/test/spec_dag.py @@ -189,7 +189,7 @@ def test_conditional_dep_with_user_constraints(): assert ('y@3' in spec) -@pytest.mark.usefixtures('mutable_mock_repo') +@pytest.mark.usefixtures('mutable_mock_repo', 'config') class TestSpecDag(object): def test_conflicting_package_constraints(self, set_dependency): @@ -387,7 +387,6 @@ def test_unsatisfiable_architecture(self, set_dependency): with pytest.raises(spack.spec.UnsatisfiableArchitectureSpecError): spec.normalize() - @pytest.mark.usefixtures('config') def test_invalid_dep(self): spec = Spec('libelf ^mpich') with pytest.raises(spack.spec.InvalidDependencyError): @@ -602,7 +601,6 @@ def test_copy_normalized(self): copy_ids = set(id(s) for s in copy.traverse()) assert not orig_ids.intersection(copy_ids) - @pytest.mark.usefixtures('config') def test_copy_concretized(self): orig = Spec('mpileaks') orig.concretize() diff --git a/lib/spack/spack/test/url_fetch.py b/lib/spack/spack/test/url_fetch.py index 71a122455f7..20648b47666 100644 --- a/lib/spack/spack/test/url_fetch.py +++ b/lib/spack/spack/test/url_fetch.py @@ -26,18 +26,19 @@ def checksum_type(request): @pytest.fixture def pkg_factory(): Pkg = collections.namedtuple( - 'Pkg', ['url_for_version', 'urls', 'url', 'versions'] + 'Pkg', ['url_for_version', 'urls', 'url', 'versions', 'fetch_options'] ) - def factory(url, urls): + def factory(url, urls, fetch_options={}): def fn(v): - main_url = url or urls.pop(0) + main_url = url or urls[0] return spack.url.substitute_version(main_url, v) return Pkg( url_for_version=fn, url=url, urls=urls, - versions=collections.defaultdict(dict) + versions=collections.defaultdict(dict), + fetch_options=fetch_options ) return factory @@ -130,6 +131,10 @@ def test_from_list_url(mock_packages, config, spec, url, digest): assert isinstance(fetch_strategy, fs.URLFetchStrategy) assert os.path.basename(fetch_strategy.url) == url assert fetch_strategy.digest == digest + assert fetch_strategy.extra_options == {} + pkg.fetch_options = {'timeout': 60} + fetch_strategy = fs.from_list_url(pkg) + assert fetch_strategy.extra_options == {'timeout': 60} def test_from_list_url_unspecified(mock_packages, config): @@ -142,6 +147,10 @@ def test_from_list_url_unspecified(mock_packages, config): assert isinstance(fetch_strategy, fs.URLFetchStrategy) assert os.path.basename(fetch_strategy.url) == 'foo-2.0.0.tar.gz' assert fetch_strategy.digest is None + assert fetch_strategy.extra_options == {} + pkg.fetch_options = {'timeout': 60} + fetch_strategy = fs.from_list_url(pkg) + assert fetch_strategy.extra_options == {'timeout': 60} def test_nosource_from_list_url(mock_packages, config): @@ -191,3 +200,7 @@ def test_candidate_urls(pkg_factory, url, urls, version, expected): pkg = pkg_factory(url, urls) f = fs._from_merged_attrs(fs.URLFetchStrategy, pkg, version) assert f.candidate_urls == expected + assert f.extra_options == {} + pkg = pkg_factory(url, urls, fetch_options={'timeout': 60}) + f = fs._from_merged_attrs(fs.URLFetchStrategy, pkg, version) + assert f.extra_options == {'timeout': 60} diff --git a/lib/spack/spack/user_environment.py b/lib/spack/spack/user_environment.py index 4c9fdc3d67f..5f2c1c6f502 100644 --- a/lib/spack/spack/user_environment.py +++ b/lib/spack/spack/user_environment.py @@ -65,7 +65,7 @@ def environment_modifications_for_spec(spec, view=None): This list is specific to the location of the spec or its projection in the view.""" spec = spec.copy() - if view: + if view and not spec.external: spec.prefix = prefix.Prefix(view.view().get_projection_for_spec(spec)) # generic environment modifications determined by inspecting the spec diff --git a/lib/spack/spack/util/compression.py b/lib/spack/spack/util/compression.py index d617954ab17..1688b49f1b1 100644 --- a/lib/spack/spack/util/compression.py +++ b/lib/spack/spack/util/compression.py @@ -32,6 +32,9 @@ def decompressor_for(path, extension=None): if extension and re.match(r'gz', extension): gunzip = which('gunzip', required=True) return gunzip + if extension and re.match(r'bz2', extension): + bunzip2 = which('bunzip2', required=True) + return bunzip2 tar = which('tar', required=True) tar.add_default_arg('-xf') return tar diff --git a/lib/spack/spack/util/environment.py b/lib/spack/spack/util/environment.py index 248a6d3c8c0..8069f514319 100644 --- a/lib/spack/spack/util/environment.py +++ b/lib/spack/spack/util/environment.py @@ -597,12 +597,15 @@ def from_sourcing_file(filename, *arguments, **kwargs): 'SHLVL', '_', 'PWD', 'OLDPWD', 'PS1', 'PS2', 'ENV', # Environment modules v4 'LOADEDMODULES', '_LMFILES_', 'BASH_FUNC_module()', 'MODULEPATH', - 'MODULES_(.*)', r'(\w*)_mod(quar|share)' + 'MODULES_(.*)', r'(\w*)_mod(quar|share)', + # Lmod configuration + r'LMOD_(.*)', 'MODULERCFILE' ]) # Compute the environments before and after sourcing before = sanitize( - dict(os.environ), blacklist=blacklist, whitelist=whitelist + environment_after_sourcing_files(os.devnull, **kwargs), + blacklist=blacklist, whitelist=whitelist ) file_and_args = (filename,) + arguments after = sanitize( diff --git a/lib/spack/spack/util/executable.py b/lib/spack/spack/util/executable.py index ce30e18f420..1f5fdfb7614 100644 --- a/lib/spack/spack/util/executable.py +++ b/lib/spack/spack/util/executable.py @@ -209,7 +209,7 @@ def streamify(arg, mode): istream.close() def __eq__(self, other): - return self.exe == other.exe + return hasattr(other, 'exe') and self.exe == other.exe def __neq__(self, other): return not (self == other) diff --git a/lib/spack/spack/util/module_cmd.py b/lib/spack/spack/util/module_cmd.py index 0edf7e6102a..74790156ae0 100644 --- a/lib/spack/spack/util/module_cmd.py +++ b/lib/spack/spack/util/module_cmd.py @@ -19,16 +19,11 @@ # If we need another option that changes the environment, add it here. module_change_commands = ['load', 'swap', 'unload', 'purge', 'use', 'unuse'] py_cmd = "'import os;import json;print(json.dumps(dict(os.environ)))'" - -# This is just to enable testing. I hate it but we can't find a better way -_test_mode = False +_cmd_template = "'module ' + ' '.join(args) + ' 2>&1'" def module(*args): - module_cmd = 'module ' + ' '.join(args) + ' 2>&1' - if _test_mode: - tty.warn('module function operating in test mode') - module_cmd = ". %s 2>&1" % args[1] + module_cmd = eval(_cmd_template) # So we can monkeypatch for testing if args[0] in module_change_commands: # Do the module manipulation, then output the environment in JSON # and read the JSON back in the parent process to update os.environ diff --git a/share/spack/csh/spack.csh b/share/spack/csh/spack.csh index b4d963ae101..7a510502812 100644 --- a/share/spack/csh/spack.csh +++ b/share/spack/csh/spack.csh @@ -66,7 +66,7 @@ case cd: [ $#_sp_args -gt 0 ] && set _sp_arg = ($_sp_args[1]) shift _sp_args - if ( "$_sp_arg" == "-h" ) then + if ( "$_sp_arg" == "-h" || "$_sp_args" == "--help" ) then \spack cd -h else cd `\spack location $_sp_arg $_sp_args` @@ -78,7 +78,7 @@ case env: set _sp_arg="" [ $#_sp_args -gt 0 ] && set _sp_arg = ($_sp_args[1]) - if ( "$_sp_arg" == "-h" ) then + if ( "$_sp_arg" == "-h" || "$_sp_arg" == "--help" ) then \spack env -h else switch ($_sp_arg) @@ -86,12 +86,18 @@ case env: set _sp_env_arg="" [ $#_sp_args -gt 1 ] && set _sp_env_arg = ($_sp_args[2]) - if ( "$_sp_env_arg" == "" || "$_sp_args" =~ "*--sh*" || "$_sp_args" =~ "*--csh*" || "$_sp_args" =~ "*-h*" ) then - # no args or args contain -h/--help, --sh, or --csh: just execute + # Space needed here to differentiate between `-h` + # argument and environments with "-h" in the name. + if ( "$_sp_env_arg" == "" || \ + "$_sp_args" =~ "* --sh*" || \ + "$_sp_args" =~ "* --csh*" || \ + "$_sp_args" =~ "* -h*" || \ + "$_sp_args" =~ "* --help*" ) then + # No args or args contain --sh, --csh, or -h/--help: just execute. \spack $_sp_flags env $_sp_args else shift _sp_args # consume 'activate' or 'deactivate' - # actual call to activate: source the output + # Actual call to activate: source the output. eval `\spack $_sp_flags env activate --csh $_sp_args` endif breaksw @@ -99,30 +105,40 @@ case env: set _sp_env_arg="" [ $#_sp_args -gt 1 ] && set _sp_env_arg = ($_sp_args[2]) - if ( "$_sp_env_arg" != "" ) then - # with args: execute the command + # Space needed here to differentiate between `--sh` + # argument and environments with "--sh" in the name. + if ( "$_sp_args" =~ "* --sh*" || \ + "$_sp_args" =~ "* --csh*" ) then + # Args contain --sh or --csh: just execute. \spack $_sp_flags env $_sp_args + else if ( "$_sp_env_arg" != "" ) then + # Any other arguments are an error or -h/--help: just run help. + \spack $_sp_flags env deactivate -h else - # no args: source the output + # No args: source the output of the command. eval `\spack $_sp_flags env deactivate --csh` endif breaksw default: - echo default \spack $_sp_flags env $_sp_args breaksw endsw endif + breaksw + case load: case unload: - # Space in `-h` portion is important for differentiating -h option - # from variants that begin with "h" or packages with "-h" in name - if ( "$_sp_spec" =~ "*--sh*" || "$_sp_spec" =~ "*--csh*" || \ - " $_sp_spec" =~ "* -h*" || "$_sp_spec" =~ "*--help*") then - # IF a shell is given, print shell output + # Get --sh, --csh, -h, or --help arguments. + # Space needed here to differentiate between `-h` + # argument and specs with "-h" in the name. + if ( " $_sp_spec" =~ "* --sh*" || \ + " $_sp_spec" =~ "* --csh*" || \ + " $_sp_spec" =~ "* -h*" || \ + " $_sp_spec" =~ "* --help*") then + # Args contain --sh, --csh, or -h/--help: just execute. \spack $_sp_flags $_sp_subcommand $_sp_spec else - # otherwise eval with csh + # Otherwise, eval with csh. eval `\spack $_sp_flags $_sp_subcommand --csh $_sp_spec || \ echo "exit 1"` endif diff --git a/share/spack/setup-env.sh b/share/spack/setup-env.sh index c3d9ef260cd..5968c4c0160 100755 --- a/share/spack/setup-env.sh +++ b/share/spack/setup-env.sh @@ -115,31 +115,44 @@ spack() { else case $_sp_arg in activate) - _a="$@" + # Get --sh, --csh, or -h/--help arguments. + # Space needed here becauses regexes start with a space + # and `-h` may be the only argument. + _a=" $@" + # Space needed here to differentiate between `-h` + # argument and environments with "-h" in the name. + # Also see: https://www.gnu.org/software/bash/manual/html_node/Shell-Parameter-Expansion.html#Shell-Parameter-Expansion if [ -z ${1+x} ] || \ - [ "${_a#*--sh}" != "$_a" ] || \ - [ "${_a#*--csh}" != "$_a" ] || \ - [ "${_a#*-h}" != "$_a" ]; + [ "${_a#* --sh}" != "$_a" ] || \ + [ "${_a#* --csh}" != "$_a" ] || \ + [ "${_a#* -h}" != "$_a" ] || \ + [ "${_a#* --help}" != "$_a" ]; then - # no args or args contain -h/--help, --sh, or --csh: just execute + # No args or args contain --sh, --csh, or -h/--help: just execute. command spack env activate "$@" else - # actual call to activate: source the output + # Actual call to activate: source the output. eval $(command spack $_sp_flags env activate --sh "$@") fi ;; deactivate) - _a="$@" - if [ "${_a#*--sh}" != "$_a" ] || \ - [ "${_a#*--csh}" != "$_a" ]; + # Get --sh, --csh, or -h/--help arguments. + # Space needed here becauses regexes start with a space + # and `-h` may be the only argument. + _a=" $@" + # Space needed here to differentiate between `--sh` + # argument and environments with "--sh" in the name. + # Also see: https://www.gnu.org/software/bash/manual/html_node/Shell-Parameter-Expansion.html#Shell-Parameter-Expansion + if [ "${_a#* --sh}" != "$_a" ] || \ + [ "${_a#* --csh}" != "$_a" ]; then - # just execute the command if --sh or --csh are provided + # Args contain --sh or --csh: just execute. command spack env deactivate "$@" elif [ -n "$*" ]; then - # any other arguments are an error or help, so just run help + # Any other arguments are an error or -h/--help: just run help. command spack env deactivate -h else - # no args: source the output of the command + # No args: source the output of the command. eval $(command spack $_sp_flags env deactivate --sh) fi ;; @@ -151,17 +164,19 @@ spack() { return ;; "load"|"unload") - # get --sh, --csh, --help, or -h arguments - # space is important for -h case to differentiate between `-h` - # argument and specs with "-h" in package name or variant settings + # Get --sh, --csh, -h, or --help arguments. + # Space needed here becauses regexes start with a space + # and `-h` may be the only argument. _a=" $@" + # Space needed here to differentiate between `-h` + # argument and specs with "-h" in the name. + # Also see: https://www.gnu.org/software/bash/manual/html_node/Shell-Parameter-Expansion.html#Shell-Parameter-Expansion if [ "${_a#* --sh}" != "$_a" ] || \ [ "${_a#* --csh}" != "$_a" ] || \ [ "${_a#* -h}" != "$_a" ] || \ [ "${_a#* --help}" != "$_a" ]; then - # just execute the command if --sh or --csh are provided - # or if the -h or --help arguments are provided + # Args contain --sh, --csh, or -h/--help: just execute. command spack $_sp_flags $_sp_subcommand "$@" else eval $(command spack $_sp_flags $_sp_subcommand --sh "$@" || \ diff --git a/share/spack/spack-completion.bash b/share/spack/spack-completion.bash index d3d149bec30..34ed2bd1484 100755 --- a/share/spack/spack-completion.bash +++ b/share/spack/spack-completion.bash @@ -655,7 +655,7 @@ _spack_debug() { then SPACK_COMPREPLY="-h --help" else - SPACK_COMPREPLY="create-db-tarball" + SPACK_COMPREPLY="create-db-tarball report" fi } @@ -663,6 +663,10 @@ _spack_debug_create_db_tarball() { SPACK_COMPREPLY="-h --help" } +_spack_debug_report() { + SPACK_COMPREPLY="-h --help" +} + _spack_dependencies() { if $list_options then @@ -980,7 +984,7 @@ _spack_list() { _spack_load() { if $list_options then - SPACK_COMPREPLY="-h --help -r --dependencies --sh --csh --only" + SPACK_COMPREPLY="-h --help -r --dependencies --sh --csh --first --only" else _installed_packages fi @@ -1272,7 +1276,7 @@ _spack_pydoc() { _spack_python() { if $list_options then - SPACK_COMPREPLY="-h --help -c -m" + SPACK_COMPREPLY="-h --help -V --version -c -m" else SPACK_COMPREPLY="" fi diff --git a/var/spack/repos/builtin.mock/packages/cmake-client/package.py b/var/spack/repos/builtin.mock/packages/cmake-client/package.py index 286ee080863..2350259b228 100644 --- a/var/spack/repos/builtin.mock/packages/cmake-client/package.py +++ b/var/spack/repos/builtin.mock/packages/cmake-client/package.py @@ -21,6 +21,14 @@ class CmakeClient(CMakePackage): version('1.0', '4cb3ff35b2472aae70f542116d616e63') + variant( + 'multi', description='', + values=any_combination_of('up', 'right', 'back').with_default('up') + ) + variant('single', description='', default='blue', + values=('blue', 'red', 'green'), multi=False) + variant('truthy', description='', default=True) + callback_counter = 0 flipped = False diff --git a/var/spack/repos/builtin.mock/packages/fetch-options/package.py b/var/spack/repos/builtin.mock/packages/fetch-options/package.py new file mode 100644 index 00000000000..1a31a535e13 --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/fetch-options/package.py @@ -0,0 +1,22 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class FetchOptions(Package): + """Mock package with fetch_options.""" + + homepage = "http://www.fetch-options-example.com" + + url = 'https://example.com/some/tarball-1.0.tar.gz' + + fetch_options = {'timeout': 42, 'cookie': 'foobar'} + timeout = {'timeout': 65} + cookie = {'cookie': 'baz'} + + version('1.2', 'abc12', fetch_options=cookie) + version('1.1', 'abc11', fetch_options=timeout) + version('1.0', 'abc10') diff --git a/var/spack/repos/builtin/packages/acl/package.py b/var/spack/repos/builtin/packages/acl/package.py new file mode 100644 index 00000000000..ea9bf4172f5 --- /dev/null +++ b/var/spack/repos/builtin/packages/acl/package.py @@ -0,0 +1,30 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Acl(AutotoolsPackage): + """Commands for Manipulating POSIX Access Control Lists.""" + + homepage = "https://savannah.nongnu.org/projects/acl" + url = "http://git.savannah.nongnu.org/cgit/acl.git/snapshot/acl-2.2.53.tar.gz" + + version('2.2.53', sha256='9e905397ac10d06768c63edd0579c34b8431555f2ea8e8f2cee337b31f856805') + version('2.2.52', sha256='f3f31d2229c903184ff877aa0ee658b87ec20fec8aebb51e65eaa68d7b24e629') + version('2.2.51', sha256='31a43d96a274a39bfcb805fb903d45840515344884d224cef166b482693a9f48') + version('2.2.50', sha256='39e21d623a9f0da8c042cde346c01871b498d51400e92c2ab1490d5ffd724401') + version('2.2.49', sha256='c6e01460cac4e47673dd60a7f57b970b49f6998bb564eff141cca129aa8940d1') + version('2.2.48', sha256='877eaeccc1500baec58391935b46ac7dfc5ffd8c54fbc0385ccd8b2b18ac3fa6') + + depends_on('m4', type='build') + depends_on('autoconf', type='build') + depends_on('automake', type='build') + depends_on('libtool', type='build') + depends_on('attr') + + def autoreconf(self, spec, prefix): + bash = which('bash') + bash('./autogen.sh') diff --git a/var/spack/repos/builtin/packages/acts-core/package.py b/var/spack/repos/builtin/packages/acts-core/package.py index 1cd9fe63758..d0c9498d404 100644 --- a/var/spack/repos/builtin/packages/acts-core/package.py +++ b/var/spack/repos/builtin/packages/acts-core/package.py @@ -30,10 +30,14 @@ class ActsCore(CMakePackage): """ homepage = "http://acts.web.cern.ch/ACTS/" - git = "https://gitlab.cern.ch/acts/acts-core.git" + git = "https://github.com/acts-project/acts.git" maintainers = ['HadrienG2'] version('develop', branch='master') + version('0.20.0', commit='1d37a849a9c318e8ca4fa541ef8433c1f004637b') + version('0.19.0', commit='408335636486c421c6222a64372250ef12544df6') + version('0.18.0', commit='d58a68cf75b52a5e0f563bc237f09250aa9da80c') + version('0.17.0', commit='0789f654ff484b013fd27e5023cf342785ea8d97') version('0.16.0', commit='b3d965fe0b8ae335909d79114ef261c6b996773a') version('0.15.0', commit='267c28f69c561e64369661a6235b03b5a610d6da') version('0.14.0', commit='38d678fcb205b77d60326eae913fbb1b054acea1') @@ -75,7 +79,8 @@ class ActsCore(CMakePackage): depends_on('cmake @3.11:', type='build') depends_on('boost @1.62:1.69.99 +program_options +test', when='@:0.10.3') - depends_on('boost @1.62: +program_options +test', when='@0.10.4:') + depends_on('boost @1.62: +program_options +test', when='@0.10.4:0.18.0') + depends_on('boost @1.69: +program_options +test', when='@0.19.0:') depends_on('eigen @3.2.9:', type='build') depends_on('nlohmann-json @3.2.0:', when='@0.14.0: +json') depends_on('root @6.10: cxxstd=14', when='+tgeo @:0.8.0') diff --git a/var/spack/repos/builtin/packages/adol-c/package.py b/var/spack/repos/builtin/packages/adol-c/package.py index 336f1833dba..def99d8a4ef 100644 --- a/var/spack/repos/builtin/packages/adol-c/package.py +++ b/var/spack/repos/builtin/packages/adol-c/package.py @@ -11,15 +11,20 @@ class AdolC(AutotoolsPackage): derivatives of vector functions in C and C++ programs by operator overloading.""" - homepage = "https://projects.coin-or.org/ADOL-C" - url = "http://www.coin-or.org/download/source/ADOL-C/ADOL-C-2.6.1.tgz" - git = "https://gitlab.com/adol-c/adol-c.git" - - version('develop', branch='master') - version('2.6.3', sha256='6ed74580695a0d2c960581e5430ebfcd380eb5da9337daf488bf2e89039e9c21') - version('2.6.2', sha256='f6326e7ba994d02074816132d4461915221069267c31862b31fab7020965c658') - version('2.6.1', sha256='037089e0f64224e5e6255b61af4fe7faac080533fd778b76fe946e52491918b5') - version('2.5.2', sha256='2fa514d9799989d6379738c2bcf75070d9834e4d227eb32a5b278840893b2af9') + homepage = "https://github.com/coin-or/ADOL-C" + url = "https://github.com/coin-or/ADOL-C/archive/releases/2.7.2.tar.gz" + git = "https://github.com/coin-or/ADOL-C.git" + version('master', branch='master') + version('2.7.2', sha256='701e0856baae91b98397960d5e0a87a549988de9d4002d0e9a56fa08f5455f6e') + version('2.7.1', sha256='a05422cc7faff5700e134e113822d1934fb540ad247e63778524d5d6d75bb0ef') + version('2.7.0', sha256='a75cfa6240de8692b2a3e8e782319efefc316f1e595234fcee972ab0e7afa3cd') + version('2.6.3', sha256='9750a0a06dcab9a0dba2010f07872ea9057ed29781e9e7d571691c27aa559b04') + version('2.6.2', sha256='4ef6ff15b4691235c0ea6580917c7eb17d09ded485ac524a0a33ac7e99ab004b') + version('2.6.1', sha256='48b41c40d1c8437fb98eeed4b24deaf3e59da804f34ac9c848da1b049b3b071a') + version('2.6.0', sha256='26a1fcb8561f15781f645d245fc345c83497147ec7bb64d4bfc96e32c34c6c1c') + version('2.5.2', sha256='390edb1513f749b2dbf6fb90db12ce786f6532af80e589f161ff43646b3a78a6') + version('2.5.1', sha256='dedb93c3bb291366d799014b04b6d1ec63ca4e7216edf16167776c07961e3b4a') + version('2.5.0', sha256='9d51c426d831884aac8f418be410c001eb62f3a11cb8f30c66af0b842edffb96') variant('advanced_branching', default=False, description='Enable advanced branching to reduce retaping') diff --git a/var/spack/repos/builtin/packages/amdblis/package.py b/var/spack/repos/builtin/packages/amdblis/package.py index 5b216bebd7d..706e7f6bd25 100644 --- a/var/spack/repos/builtin/packages/amdblis/package.py +++ b/var/spack/repos/builtin/packages/amdblis/package.py @@ -15,6 +15,7 @@ class Amdblis(BlisBase): computationally intensive operations. """ + _name = 'amdblis' homepage = "https://developer.amd.com/amd-aocl/blas-library/" url = "https://github.com/amd/blis/archive/2.1.tar.gz" git = "https://github.com/amd/blis.git" diff --git a/var/spack/repos/builtin/packages/amrex/package.py b/var/spack/repos/builtin/packages/amrex/package.py index aaf28a9d602..5bd19e582ba 100644 --- a/var/spack/repos/builtin/packages/amrex/package.py +++ b/var/spack/repos/builtin/packages/amrex/package.py @@ -18,6 +18,7 @@ class Amrex(CMakePackage): maintainers = ['mic84', 'asalmgren'] version('develop', branch='development') + version('20.04', sha256='ce951105336d6fcc07abe3eadf9f71161f0ccbe3e45f4547be4d0ae99e15f3c6') version('20.03', sha256='a535dcc016f0d38b55d0ab8e9067c1c53e3686961f6a1fb471cb18a0ebc909e6') version('20.02', sha256='33529a23694283d12eb37d4682aa86c9cc1240bd50124efcf4464747a7554147') version('20.01', sha256='f7026d267ca5de79ec7e740264d54230f419776d40feae705e939be0b1d8e0d3') diff --git a/var/spack/repos/builtin/packages/anaconda2/package.py b/var/spack/repos/builtin/packages/anaconda2/package.py new file mode 100644 index 00000000000..e22ab88c8ba --- /dev/null +++ b/var/spack/repos/builtin/packages/anaconda2/package.py @@ -0,0 +1,47 @@ +# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * +from os.path import split + + +class Anaconda2(Package): + """Anaconda is a free and open-source distribution of the Python and + R programming languages for scientific computing, that aims to + simplify package management and deployment. Package versions are + managed by the package management system conda. + """ + + homepage = "https://www.anaconda.com" + url = "https://repo.anaconda.com/archive/Anaconda2-2019.10-Linux-x86_64.sh" + + maintainers = ['ajkotobi'] + + version('2019.10', sha256='8b2e7dea2da7d8cc18e822e8ec1804052102f4eefb94c1b3d0e586e126e8cd2f', expand=False) + version('2019.07', sha256='189e16e7adf9ba4b7b7d06ecdc10ce4ad4153e5e3505b9331f3d142243e18e97', expand=False) + version('2019.03', sha256='cedfee5b5a3f62fcdac0a1d2d12396d0f232d2213d24d6dc893df5d8e64b8773', expand=False) + version('2018.12', sha256='1821d4b623ed449e0acb6df3ecbabd3944cffa98f96a5234b7a102a7c0853dc6', expand=False) + version('5.3.1', sha256='f0650ad2f9ca4ae3f3162d7204a32950bc794f37f322eb47b5ad9412454f998c', expand=False) + version('5.3.0', sha256='50eeaab24bfa2472bc6485fe8f0e612ed67e561eda1ff9fbf07b62c96443c1be', expand=False) + version('5.2.0', sha256='cb0d7a08b0e2cec4372033d3269979b4e72e2353ffd1444f57cb38bc9621219f', expand=False) + version('5.1.0', sha256='5f26ee92860d1dffdcd20910ff2cf75572c39d2892d365f4e867a611cca2af5b', expand=False) + version('5.0.1', sha256='23c676510bc87c95184ecaeb327c0b2c88007278e0d698622e2dd8fb14d9faa4', expand=False) + version('5.0.0.1', sha256='18730808d863a5c194ab3f59dd395c1a63cbd769c9bfb1df65efe61ee62fc6d6', expand=False) + version('5.0.0', sha256='58a7117f89c40275114bf7e824a613a963da2b0fe63f2ec3c1175fea785b468e', expand=False) + version('4.4.0', sha256='2d30b91ed4d215b6b4a15162a3389e9057b15445a0c02da71bd7bd272e7b824e', expand=False) + version('4.3.1', sha256='e9b8f2645df6b1527ba56d61343162e0794acc3ee8dde2a6bba353719e2d878d', expand=False) + version('4.3.0', sha256='7c52e6e99aabb24a49880130615a48e685da444c3c14eb48d6a65f3313bf745c', expand=False) + version('4.2.0', sha256='beee286d24fb37dd6555281bba39b3deb5804baec509a9dc5c69185098cf661a', expand=False) + version('4.1.1', sha256='9413b1d3ca9498ba6f53913df9c43d685dd973440ff10b7fe0c45b1cbdcb582e', expand=False) + version('4.1.0', sha256='3b7e504ca0132fb555d1f10e174cae07007f1bc6898cad0f7d416a68aca01f45', expand=False) + version('4.0.0', sha256='ae312143952ca00e061a656c2080e0e4fd3532721282ba8e2978177cad71a5f0', expand=False) + version('2.5.0', sha256='e10abf459cde4a838bd6fc5ca03023c3401b81ad470627acde5a298d56715321', expand=False) + version('2.4.1', sha256='2de682c96edf8cca2852071a84ff860025fbe8c502218e1995acd5ab47e8c9ac', expand=False) + version('2.4.0', sha256='49d19834da06b1b82b6fa85bc647d2e78fa5957d0cbae3ccd6c695a541befa6b', expand=False) + + def install(self, spec, prefix): + dir, anaconda_script = split(self.stage.archive_file) + bash = which('bash') + bash(anaconda_script, '-b', '-f', '-p', self.prefix) diff --git a/var/spack/repos/builtin/packages/anaconda3/package.py b/var/spack/repos/builtin/packages/anaconda3/package.py new file mode 100644 index 00000000000..2925b0a4583 --- /dev/null +++ b/var/spack/repos/builtin/packages/anaconda3/package.py @@ -0,0 +1,53 @@ +# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * +from os.path import split + + +class Anaconda3(Package): + """ + Anaconda is a free and open-source distribution of the Python and R + programming languages for scientific computing, that aims to simplify + package management and deployment. Package versions are managed by + the package management system conda. + """ + homepage = "https://www.anaconda.com" + url = "https://repo.anaconda.com/archive/Anaconda3-2019.10-Linux-x86_64.sh" + + maintainers = ['ajkotobi'] + + version('2019.10', sha256='46d762284d252e51cd58a8ca6c8adc9da2eadc82c342927b2f66ed011d1d8b53', expand=False) + version('2019.07', sha256='69581cf739365ec7fb95608eef694ba959d7d33b36eb961953f2b82cb25bdf5a', expand=False) + version('2019.03', sha256='45c851b7497cc14d5ca060064394569f724b67d9b5f98a926ed49b834a6bb73a', expand=False) + version('2018.12', sha256='1019d0857e5865f8a6861eaf15bfe535b87e92b72ce4f531000dc672be7fce00', expand=False) + version('5.3.1', sha256='d4c4256a8f46173b675dd6a62d12f566ed3487f932bab6bb7058f06c124bcc27', expand=False) + version('5.3.0', sha256='cfbf5fe70dd1b797ec677e63c61f8efc92dad930fd1c94d60390bb07fdc09959', expand=False) + version('5.2.0', sha256='09f53738b0cd3bb96f5b1bac488e5528df9906be2480fe61df40e0e0d19e3d48', expand=False) + version('5.1.0', sha256='7e6785caad25e33930bc03fac4994a434a21bc8401817b7efa28f53619fa9c29', expand=False) + version('5.0.1', sha256='55e4db1919f49c92d5abbf27a4be5986ae157f074bf9f8238963cd4582a4068a', expand=False) + version('5.0.0.1', sha256='092c92427f44687d789a41922ce8426fbdc3c529cc9d6d4ee6de5b62954b93b2', expand=False) + version('5.0.0', sha256='67f5c20232a3e493ea3f19a8e273e0618ab678fa14b03b59b1783613062143e9', expand=False) + version('4.4.0', sha256='3301b37e402f3ff3df216fe0458f1e6a4ccbb7e67b4d626eae9651de5ea3ab63', expand=False) + version('4.3.1', sha256='4447b93d2c779201e5fb50cfc45de0ec96c3804e7ad0fe201ab6b99f73e90302', expand=False) + version('4.3.0', sha256='e9169c3a5029aa820393ac92704eb9ee0701778a085ca7bdc3c57b388ac1beb6', expand=False) + version('4.2.0', sha256='73b51715a12b6382dd4df3dd1905b531bd6792d4aa7273b2377a0436d45f0e78', expand=False) + version('4.1.1', sha256='4f5c95feb0e7efeadd3d348dcef117d7787c799f24b0429e45017008f3534e55', expand=False) + version('4.1.0', sha256='11d32cf4026603d3b327dc4299863be6b815905ff51a80329085e1bb9f96c8bd', expand=False) + version('4.0.0', sha256='36a558a1109868661a5735f5f32607643f6dc05cf581fefb1c10fb8abbe22f39', expand=False) + version('2.5.0', sha256='addadcb927f15cb0b5b6e36890563d3352a8ff6a901ea753d389047d274a29a9', expand=False) + version('2.4.1', sha256='0735e69199fc37135930ea2fd4fb6ad0adef215a2a7ba9fd6b0a0a4daaadb1cf', expand=False) + version('2.4.0', sha256='fb4e480059e991f2fa632b5a9bcdd284c7f0677814cd719c11d524453f96a40d', expand=False) + version('2.3.0', sha256='3be5410b2d9db45882c7de07c554cf4f1034becc274ec9074b23fd37a5c87a6f', expand=False) + version('2.2.0', sha256='4aac68743e7706adb93f042f970373a6e7e087dbf4b02ac467c94ca4ce33d2d1', expand=False) + version('2.1.0', sha256='af3225ccbe8df0ffb918939e009aa57740e35058ebf9dfcf5fec794a77556c3c', expand=False) + version('2.0.1', sha256='3c3b834793e461f3316ad1d9a9178c67859a9d74aaf7bcade076f04134dd1e26', expand=False) + version('2.0.0', sha256='57ce4f97e300cf94c5724f72d992e9eecef708fdaa13bc672ae9779773056540', expand=False) + + def install(self, spec, prefix): + + dir, anaconda_script = split(self.stage.archive_file) + bash = which('bash') + bash(anaconda_script, '-b', '-f', '-p', self.prefix) diff --git a/var/spack/repos/builtin/packages/apr-util/package.py b/var/spack/repos/builtin/packages/apr-util/package.py index 2d9354e817a..0bb2fecfa66 100644 --- a/var/spack/repos/builtin/packages/apr-util/package.py +++ b/var/spack/repos/builtin/packages/apr-util/package.py @@ -24,7 +24,7 @@ class AprUtil(AutotoolsPackage): depends_on('apr') depends_on('expat') - depends_on('libiconv') + depends_on('iconv') depends_on('openssl', when='+crypto') depends_on('gdbm', when='+gdbm') @@ -38,7 +38,7 @@ def configure_args(self): args = [ '--with-apr={0}'.format(spec['apr'].prefix), '--with-expat={0}'.format(spec['expat'].prefix), - '--with-iconv={0}'.format(spec['libiconv'].prefix), + '--with-iconv={0}'.format(spec['iconv'].prefix), # TODO: Add support for the following database managers '--without-ndbm', '--without-berkeley-db', diff --git a/var/spack/repos/builtin/packages/argobots/package.py b/var/spack/repos/builtin/packages/argobots/package.py index 11c8febea6a..8f967d020b1 100644 --- a/var/spack/repos/builtin/packages/argobots/package.py +++ b/var/spack/repos/builtin/packages/argobots/package.py @@ -21,6 +21,7 @@ class Argobots(AutotoolsPackage): maintainers = ['shintaro-iwasaki'] version("master", branch="master") + version("1.0", sha256="36a0815f7bf99900a9c9c1eef61ef9b3b76aa2cfc4594a304f6c8c3296da8def") version("1.0rc2", sha256="7496b8bd39930a548b01aa3b1fe8f8b582c272600ef6a05ddc4398cf21dc12a2") version("1.0rc1", sha256="2dc4487556dce602655a6535f501136f0edc3575708029c80b1af6dccd069ce7") version("1.0b1", sha256="480b85b0e8db288400088a57c2dc5639f556843b06b0492841920c38348a2a3e") diff --git a/var/spack/repos/builtin/packages/attr/package.py b/var/spack/repos/builtin/packages/attr/package.py index a58dc75f4d0..549909d2097 100644 --- a/var/spack/repos/builtin/packages/attr/package.py +++ b/var/spack/repos/builtin/packages/attr/package.py @@ -12,9 +12,17 @@ class Attr(AutotoolsPackage): homepage = "https://savannah.nongnu.org/projects/attr" url = "http://download.savannah.gnu.org/releases/attr/attr-2.4.47.src.tar.gz" + version('2.4.48', sha256='5ead72b358ec709ed00bbf7a9eaef1654baad937c001c044fe8b74c57f5324e7') version('2.4.47', sha256='25772f653ac5b2e3ceeb89df50e4688891e21f723c460636548971652af0a859') version('2.4.46', sha256='dcd69bdca7ff166bc45141eddbcf21967999a6b66b0544be12a1cc2fd6340e1f') + def url_for_version(self, version): + if version >= Version('2.4.48'): + url = 'http://download.savannah.gnu.org/releases/attr/attr-{0}.tar.gz' + else: + url = 'http://download.savannah.gnu.org/releases/attr/attr-{0}.src.tar.gz' + return url.format(version) + def configure_args(self): args = [] args.append('--disable-static') @@ -22,5 +30,7 @@ def configure_args(self): # Ref. http://www.linuxfromscratch.org/blfs/view/7.5/postlfs/attr.html def install(self, spec, prefix): - make() - make('install', 'install-dev', 'install-lib') + if self.version >= Version('2.4.48'): + make('install') + else: + make('install', 'install-dev', 'install-lib') diff --git a/var/spack/repos/builtin/packages/autoconf-archive/package.py b/var/spack/repos/builtin/packages/autoconf-archive/package.py new file mode 100644 index 00000000000..91e89d46e9b --- /dev/null +++ b/var/spack/repos/builtin/packages/autoconf-archive/package.py @@ -0,0 +1,14 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +class AutoconfArchive(AutotoolsPackage, GNUMirrorPackage): + """The GNU Autoconf Archive is a collection of more than 500 macros for + GNU Autoconf.""" + + homepage = "https://www.gnu.org/software/autoconf-archive/" + gnu_mirror_path = "autoconf-archive/autoconf-archive-2019.01.06.tar.xz" + + version('2019.01.06', sha256='17195c833098da79de5778ee90948f4c5d90ed1a0cf8391b4ab348e2ec511e3f') diff --git a/var/spack/repos/builtin/packages/automake/package.py b/var/spack/repos/builtin/packages/automake/package.py index aa5d16de5d3..5327c90daf5 100644 --- a/var/spack/repos/builtin/packages/automake/package.py +++ b/var/spack/repos/builtin/packages/automake/package.py @@ -12,6 +12,7 @@ class Automake(AutotoolsPackage, GNUMirrorPackage): homepage = 'http://www.gnu.org/software/automake/' gnu_mirror_path = 'automake/automake-1.15.tar.gz' + version('1.16.2', sha256='b2f361094b410b4acbf4efba7337bdb786335ca09eb2518635a09fb7319ca5c1') version('1.16.1', sha256='608a97523f97db32f1f5d5615c98ca69326ced2054c9f82e65bade7fc4c9dea8') version('1.15.1', sha256='988e32527abe052307d21c8ca000aa238b914df363a617e38f4fb89f5abf6260') version('1.15', sha256='7946e945a96e28152ba5a6beb0625ca715c6e32ac55f2e353ef54def0c8ed924') diff --git a/var/spack/repos/builtin/packages/axl/package.py b/var/spack/repos/builtin/packages/axl/package.py index 1725a2751cf..73eb5acdcd7 100644 --- a/var/spack/repos/builtin/packages/axl/package.py +++ b/var/spack/repos/builtin/packages/axl/package.py @@ -19,13 +19,15 @@ def async_api_validator(pkg_name, variant_name, values): class Axl(CMakePackage): """Asynchronous transfer library""" - homepage = "https://github.com/ECP-VeloC/AXL" - url = "https://github.com/ECP-VeloC/AXL/archive/v0.1.1.zip" + homepage = "https://github.com/ecp-veloc/AXL" + url = "https://github.com/ecp-veloc/AXL/archive/v0.1.1.zip" git = "https://github.com/ecp-veloc/axl.git" tags = ['ecp'] version('master', branch='master') + version('0.3.0', sha256='3f5efff87be700a5792a0ee9a7aeae45c640e2936623b024e8bc1056f7952a46') + version('0.2.0', sha256='a0babe3576da30919f89df2f83c76bd01d06345919f2e54d4dddcd6f73faedcc') version('0.1.1', sha256='ebbf231bb542a6c91efb79fce05d4c8a346d5506d88ae1899fb670be52e81933') variant('async_api', default='daemon', diff --git a/var/spack/repos/builtin/packages/bash/package.py b/var/spack/repos/builtin/packages/bash/package.py index 2723b667907..f6901294242 100644 --- a/var/spack/repos/builtin/packages/bash/package.py +++ b/var/spack/repos/builtin/packages/bash/package.py @@ -19,7 +19,7 @@ class Bash(AutotoolsPackage, GNUMirrorPackage): depends_on('ncurses') depends_on('readline@5.0:') - depends_on('libiconv') + depends_on('iconv') patches = [ ('5.0', '001', 'f2fe9e1f0faddf14ab9bfa88d450a75e5d028fedafad23b88716bd657c737289'), @@ -33,6 +33,11 @@ class Bash(AutotoolsPackage, GNUMirrorPackage): ('5.0', '009', 'ed3ca21767303fc3de93934aa524c2e920787c506b601cc40a4897d4b094d903'), ('5.0', '010', 'd6fbc325f0b5dc54ddbe8ee43020bced8bd589ddffea59d128db14b2e52a8a11'), ('5.0', '011', '2c4de332b91eaf797abbbd6c79709690b5cbd48b12e8dfe748096dbd7bf474ea'), + ('5.0', '012', '2943ee19688018296f2a04dbfe30b7138b889700efa8ff1c0524af271e0ee233'), + ('5.0', '013', 'f5d7178d8da30799e01b83a0802018d913d6aa972dd2ddad3b927f3f3eb7099a'), + ('5.0', '014', '5d6eee6514ee6e22a87bba8d22be0a8621a0ae119246f1c5a9a35db1f72af589'), + ('5.0', '015', 'a517df2dda93b26d5cbf00effefea93e3a4ccd6652f152f4109170544ebfa05e'), + ('5.0', '016', 'ffd1d7a54a99fa7f5b1825e4f7e95d8c8876bc2ca151f150e751d429c650b06d'), ] # TODO: patches below are not managed by the GNUMirrorPackage base class @@ -49,7 +54,7 @@ def configure_args(self): '--with-curses', '--enable-readline', '--with-installed-readline', - '--with-libiconv-prefix={0}'.format(spec['libiconv'].prefix), + '--with-libiconv-prefix={0}'.format(spec['iconv'].prefix), ] def check(self): diff --git a/var/spack/repos/builtin/packages/bazel/package.py b/var/spack/repos/builtin/packages/bazel/package.py index 655c9151781..26d79616dd9 100644 --- a/var/spack/repos/builtin/packages/bazel/package.py +++ b/var/spack/repos/builtin/packages/bazel/package.py @@ -92,6 +92,7 @@ class Bazel(Package): # https://docs.bazel.build/versions/master/install-compile-source.html#bootstrap-unix-prereq depends_on('jdk@1.8.0:1.8.999', type=('build', 'run')) depends_on('python', type=('build', 'run')) + depends_on('zip', type=('build', 'run')) # Pass Spack environment variables to the build patch('bazelruleclassprovider-0.25.patch', when='@0.25:') diff --git a/var/spack/repos/builtin/packages/bcache/func_crc64.patch b/var/spack/repos/builtin/packages/bcache/func_crc64.patch new file mode 100644 index 00000000000..7acc312139c --- /dev/null +++ b/var/spack/repos/builtin/packages/bcache/func_crc64.patch @@ -0,0 +1,13 @@ +diff --git a/bcache.c b/bcache.c +index 8f37445..8b4b986 100644 +--- a/bcache.c ++++ b/bcache.c +@@ -115,7 +115,7 @@ static const uint64_t crc_table[256] = { + 0x9AFCE626CE85B507ULL + }; + +-inline uint64_t crc64(const void *_data, size_t len) ++uint64_t crc64(const void *_data, size_t len) + { + uint64_t crc = 0xFFFFFFFFFFFFFFFFULL; + const unsigned char *data = _data; diff --git a/var/spack/repos/builtin/packages/bcache/package.py b/var/spack/repos/builtin/packages/bcache/package.py new file mode 100644 index 00000000000..63c42de3334 --- /dev/null +++ b/var/spack/repos/builtin/packages/bcache/package.py @@ -0,0 +1,36 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Bcache(MakefilePackage): + """Bcache is a patch for the Linux kernel to use SSDs to cache other block + devices.""" + + homepage = "http://bcache.evilpiepirate.org" + url = "https://github.com/g2p/bcache-tools/archive/v1.0.8.tar.gz" + + version('1.0.8', sha256='d56923936f37287efc57a46315679102ef2c86cd0be5874590320acd48c1201c') + version('1.0.7', sha256='64d76d1085afba8c3d5037beb67bf9d69ee163f357016e267bf328c0b1807abd') + version('1.0.6', sha256='9677c6da3ceac4e1799d560617c4d00ea7e9d26031928f8f94b8ab327496d4e0') + version('1.0.5', sha256='1449294ef545b3dc6f715f7b063bc2c8656984ad73bcd81a0dc048cbba416ea9') + version('1.0.4', sha256='102ffc3a8389180f4b491188c3520f8a4b1a84e5a7ca26d2bd6de1821f4d913d') + + depends_on('libuuid') + depends_on('util-linux') + depends_on('gettext') + + def setup_build_environment(self, env): + env.append_flags('LDFLAGS', '-lintl') + + patch('func_crc64.patch', sha256='558b35cadab4f410ce8f87f0766424a429ca0611aa2fd247326ad10da115737d') + + def install(self, spec, prefix): + mkdirp(prefix.bin) + install('bcache-register', prefix.bin) + install('bcache-super-show', prefix.bin) + install('make-bcache', prefix.bin) + install('probe-bcache', prefix.bin) diff --git a/var/spack/repos/builtin/packages/binutils/package.py b/var/spack/repos/builtin/packages/binutils/package.py index d84dd32bc58..60cd9563fa1 100644 --- a/var/spack/repos/builtin/packages/binutils/package.py +++ b/var/spack/repos/builtin/packages/binutils/package.py @@ -14,6 +14,7 @@ class Binutils(AutotoolsPackage, GNUMirrorPackage): homepage = "http://www.gnu.org/software/binutils/" gnu_mirror_path = "binutils/binutils-2.28.tar.bz2" + version('2.34', sha256='89f010078b6cf69c23c27897d686055ab89b198dddf819efb0a4f2c38a0b36e6') version('2.33.1', sha256='0cb4843da15a65a953907c96bad658283f3c4419d6bcc56bf2789db16306adb2') version('2.32', sha256='de38b15c902eb2725eac6af21183a5f34ea4634cb0bcef19612b50e5ed31072d') version('2.31.1', sha256='ffcc382695bf947da6135e7436b8ed52d991cf270db897190f19d6f9838564d0') @@ -34,6 +35,9 @@ class Binutils(AutotoolsPackage, GNUMirrorPackage): variant('libiberty', default=False, description='Also install libiberty.') variant('nls', default=True, description='Enable Native Language Support') variant('headers', default=False, description='Install extra headers (e.g. ELF)') + variant('lto', default=False, description='Enable lto.') + variant('ld', default=False, description='Enable ld.') + variant('interwork', default=False, description='Enable interwork.') patch('cr16.patch', when='@:2.29.1') patch('update_symbol-2.26.patch', when='@2.26') @@ -46,6 +50,10 @@ class Binutils(AutotoolsPackage, GNUMirrorPackage): depends_on('m4', type='build', when='@:2.29.99 +gold') depends_on('bison', type='build', when='@:2.29.99 +gold') + # 2.34 needs makeinfo due to a bug, see: + # https://sourceware.org/bugzilla/show_bug.cgi?id=25491 + depends_on('texinfo', type='build', when='@2.34') + conflicts('+gold', when='platform=darwin', msg="Binutils cannot build linkers on macOS") @@ -63,6 +71,15 @@ def configure_args(self): '--with-sysroot=/', ] + if '+lto' in spec: + configure_args.append('--enable-lto') + + if '+ld' in spec: + configure_args.append('--enable-ld') + + if '+interwork' in spec: + configure_args.append('--enable-interwork') + if '+gold' in spec: configure_args.append('--enable-gold') diff --git a/var/spack/repos/builtin/packages/bison/package.py b/var/spack/repos/builtin/packages/bison/package.py index a0b04062408..0a51dd583e0 100644 --- a/var/spack/repos/builtin/packages/bison/package.py +++ b/var/spack/repos/builtin/packages/bison/package.py @@ -18,6 +18,7 @@ class Bison(AutotoolsPackage, GNUMirrorPackage): version('3.4.2', sha256='ff3922af377d514eca302a6662d470e857bd1a591e96a2050500df5a9d59facf') version('3.4.1', sha256='7007fc89c216fbfaff5525359b02a7e5b612694df5168c74673f67055f015095') + version('3.3.2', sha256='0fda1d034185397430eb7b0c9e140fb37e02fbfc53b90252fa5575e382b6dbd1') version('3.0.5', sha256='cd399d2bee33afa712bac4b1f4434e20379e9b4099bce47189e09a7675a2d566') version('3.0.4', sha256='b67fd2daae7a64b5ba862c66c07c1addb9e6b1b05c5f2049392cfd8a2172952e') version('2.7', sha256='19bbe7374fd602f7a6654c131c21a15aebdc06cc89493e8ff250cb7f9ed0a831') diff --git a/var/spack/repos/builtin/packages/blis/package.py b/var/spack/repos/builtin/packages/blis/package.py index ea7ca8ff428..c2fc1545a3e 100644 --- a/var/spack/repos/builtin/packages/blis/package.py +++ b/var/spack/repos/builtin/packages/blis/package.py @@ -102,6 +102,10 @@ def darwin_fix(self): if self.spec.satisfies('platform=darwin'): fix_darwin_install_name(self.prefix.lib) + @property + def libs(self): + return find_libraries(['libblis'], root=self.prefix, recursive=True) + class Blis(BlisBase): """BLIS is a portable software framework for instantiating high-performance diff --git a/var/spack/repos/builtin/packages/boost/package.py b/var/spack/repos/builtin/packages/boost/package.py index 41be592a528..c0998e23d0a 100644 --- a/var/spack/repos/builtin/packages/boost/package.py +++ b/var/spack/repos/builtin/packages/boost/package.py @@ -178,6 +178,9 @@ def libs(self): conflicts('+taggedlayout', when='+versionedlayout') conflicts('+numpy', when='~python') + # boost-python in 1.72.0 broken with cxxstd=98 + conflicts('cxxstd=98', when='+mpi+python @1.72.0:') + # Container's Extended Allocators were not added until 1.56.0 conflicts('+container', when='@:1.55.99') diff --git a/var/spack/repos/builtin/packages/bzip2/package.py b/var/spack/repos/builtin/packages/bzip2/package.py index 87e616d76cd..08d5978a63d 100644 --- a/var/spack/repos/builtin/packages/bzip2/package.py +++ b/var/spack/repos/builtin/packages/bzip2/package.py @@ -19,9 +19,9 @@ class Bzip2(Package): # The server is sometimes a bit slow to respond fetch_options = {'timeout': 60} - version('1.0.8', sha256='ab5a03176ee106d3f0fa90e381da478ddae405918153cca248e682cd0c4a2269', fetch_options=fetch_options) - version('1.0.7', sha256='e768a87c5b1a79511499beb41500bcc4caf203726fff46a6f5f9ad27fe08ab2b', fetch_options=fetch_options) - version('1.0.6', sha256='a2848f34fcd5d6cf47def00461fcb528a0484d8edef8208d6d2e2909dc61d9cd', fetch_options=fetch_options) + version('1.0.8', sha256='ab5a03176ee106d3f0fa90e381da478ddae405918153cca248e682cd0c4a2269') + version('1.0.7', sha256='e768a87c5b1a79511499beb41500bcc4caf203726fff46a6f5f9ad27fe08ab2b') + version('1.0.6', sha256='a2848f34fcd5d6cf47def00461fcb528a0484d8edef8208d6d2e2909dc61d9cd') variant('shared', default=True, description='Enables the build of shared libraries.') diff --git a/var/spack/repos/builtin/packages/c-blosc/package.py b/var/spack/repos/builtin/packages/c-blosc/package.py index 8e11cb0956a..71280590566 100644 --- a/var/spack/repos/builtin/packages/c-blosc/package.py +++ b/var/spack/repos/builtin/packages/c-blosc/package.py @@ -50,6 +50,13 @@ def cmake_args(self): args.append('-DPREFER_EXTERNAL_ZSTD=ON') args.append('-DPREFER_EXTERNAL_LZ4=ON') + if self.run_tests: + args.append('-DBUILD_TESTS=ON') + args.append('-DBUILD_BENCHMARKS=ON') + else: + args.append('-DBUILD_TESTS=OFF') + args.append('-DBUILD_BENCHMARKS=OFF') + return args @run_after('install') diff --git a/var/spack/repos/builtin/packages/caliper/package.py b/var/spack/repos/builtin/packages/caliper/package.py index 53b34dd04dd..fb12a787045 100644 --- a/var/spack/repos/builtin/packages/caliper/package.py +++ b/var/spack/repos/builtin/packages/caliper/package.py @@ -19,6 +19,7 @@ class Caliper(CMakePackage): git = "https://github.com/LLNL/Caliper.git" version('master') + version('2.3.0', tag='v2.3.0') version('2.2.0', tag='v2.2.0') version('2.1.1', tag='v2.1.1') version('2.0.1', tag='v2.0.1') @@ -72,7 +73,7 @@ class Caliper(CMakePackage): depends_on('python@3:', type='build') # sosflow support not yet in 2.0 - conflicts('+sosflow', '@2.0.0:2.2.99') + conflicts('+sosflow', '@2.0.0:2.3.99') conflicts('+adiak', '@:2.1.99') def cmake_args(self): diff --git a/var/spack/repos/builtin/packages/catch2/package.py b/var/spack/repos/builtin/packages/catch2/package.py index 989c96f7fb6..6961afad299 100644 --- a/var/spack/repos/builtin/packages/catch2/package.py +++ b/var/spack/repos/builtin/packages/catch2/package.py @@ -14,6 +14,14 @@ class Catch2(CMakePackage): url = "https://github.com/catchorg/Catch2/archive/v2.9.1.tar.gz" maintainers = ['ax3l'] + version('2.11.3', sha256='9a6967138062688f04374698fce4ce65908f907d8c0fe5dfe8dc33126bd46543') + version('2.11.2', sha256='a96203fa531092375678ad2d81c43317ee58c684787f24b2a55748f6c6839799') + version('2.11.1', sha256='9af06ca5b10362620c6c9c729821367e1aeb0f76adfc7bc3a468da83db3c50c6') + version('2.11.0', sha256='b9957af46a04327d80833960ae51cf5e67765fd264389bd1e275294907f1a3e0') + version('2.10.2', sha256='79aa46ee6c5a87bc5306bfffc6ecde6a1ad6327715b208ee2e846873f282a494') + version('2.10.1', sha256='dcbbe0a5f4d2a4330bdf5bcb9ef6a02303d679d46596e4ed06ca462f2372d4de') + version('2.10.0', sha256='a3beaa8ba6238c189e1f81238ab38e585836af13204a7099e22eff6c25b98558') + version('2.9.2', sha256='54bea6d80a388a80f895cd0e2343fca72b0d9093a776af40904aefce49c13bda') version('2.9.1', sha256='0b36488aca6265e7be14da2c2d0c748b4ddb9c70a1ea4da75736699c629f14ac') version('2.9.0', sha256='00040cad9b6d6bb817ebd5853ff6dda23f9957153d8c4eedf85def0c9e787c42') version('2.8.0', sha256='b567c37446cd22c8550bfeb7e2fe3f981b8f3ab8b2148499a522e7f61b8a481d') diff --git a/var/spack/repos/builtin/packages/cctools/package.py b/var/spack/repos/builtin/packages/cctools/package.py index 481e4a47f17..973a6c3d114 100644 --- a/var/spack/repos/builtin/packages/cctools/package.py +++ b/var/spack/repos/builtin/packages/cctools/package.py @@ -13,16 +13,17 @@ class Cctools(AutotoolsPackage): """ homepage = "https://github.com/cooperative-computing-lab/cctools" - url = "https://github.com/cooperative-computing-lab/cctools/archive/release/6.1.1.tar.gz" + url = "https://github.com/cooperative-computing-lab/cctools/archive/release/7.1.0.tar.gz" + version('7.1.0', sha256='84748245db10ff26c0c0a7b9fd3ec20fbbb849dd4aadc5e8531fd1671abe7a81') version('7.0.18', sha256='5b6f3c87ae68dd247534a5c073eb68cb1a60176a7f04d82699fbc05e649a91c2') version('6.1.1', sha256='97f073350c970d6157f80891b3bf6d4f3eedb5f031fea386dc33e22f22b8af9d') depends_on('openssl') depends_on('perl+shared', type=('build', 'run')) - depends_on('python@:2.9', when='@6.1.1', type=('build', 'run')) depends_on('python', type=('build', 'run')) depends_on('readline') + depends_on('gettext') # Corrects python linking of -lintl flag. depends_on('swig') # depends_on('xrootd') depends_on('zlib') @@ -44,21 +45,49 @@ def patch(self): def configure_args(self): args = [] - # For python - if self.spec.satisfies('^python@3:'): - args.append('--with-python-path=no') - args.append( - '--with-python3-path={0}'.format(self.spec['python'].prefix) - ) + + # make sure we do not pick a python outside spack: + if self.spec.satisfies('@6.1.1'): + if self.spec.satisfies('^python@3:'): + args.extend([ + '--with-python3-path', self.spec['python'].prefix, + '--with-python-path', 'no' + ]) + elif self.spec.satisfies('^python@:2.9'): + args.extend([ + '--with-python-path', self.spec['python'].prefix, + '--with-python3-path', 'no' + ]) + else: + args.extend([ + '--with-python-path', 'no', + '--with-python3-path', 'no' + ]) else: - args.append('--with-python3-path=no') - args.append( - '--with-python-path={0}'.format(self.spec['python'].prefix) - ) + # versions 7 and above, where --with-python-path recognized the + # python version: + if self.spec.satisfies('^python@3:'): + args.extend([ + '--with-python-path', self.spec['python'].prefix, + '--with-python2-path', 'no' + ]) + elif self.spec.satisfies('^python@:2.9'): + args.extend([ + '--with-python-path', self.spec['python'].prefix, + '--with-python3-path', 'no' + ]) + else: + args.extend([ + '--with-python2-path', 'no', + '--with-python3-path', 'no' + ]) + # disable these bits for p in ['mysql', 'xrootd']: args.append('--with-{0}-path=no'.format(p)) + # point these bits at the Spack installations for p in ['openssl', 'perl', 'readline', 'swig', 'zlib']: args.append('--with-{0}-path={1}'.format(p, self.spec[p].prefix)) + return args diff --git a/var/spack/repos/builtin/packages/ceres-solver/package.py b/var/spack/repos/builtin/packages/ceres-solver/package.py index 599c1801213..ab84b4f9b7c 100644 --- a/var/spack/repos/builtin/packages/ceres-solver/package.py +++ b/var/spack/repos/builtin/packages/ceres-solver/package.py @@ -17,19 +17,38 @@ class CeresSolver(CMakePackage): homepage = "http://ceres-solver.org" url = "http://ceres-solver.org/ceres-solver-1.12.0.tar.gz" + version('1.14.0', sha256='4744005fc3b902fed886ea418df70690caa8e2ff6b5a90f3dd88a3d291ef8e8e') version('1.12.0', sha256='745bfed55111e086954126b748eb9efe20e30be5b825c6dec3c525cf20afc895') + variant('suitesparse', default=False, description='Build with SuiteSparse') + variant('shared', default=True, description='Build shared libraries') + variant('examples', default=False, description='Build examples') + depends_on('eigen@3:') depends_on('lapack') depends_on('glog') def cmake_args(self): args = [ - '-DSUITESPARSE=OFF', '-DCXSPARSE=OFF', '-DEIGENSPARSE=ON', '-DLAPACK=ON', - '-DBUILD_SHARED_LIBS=ON', '-DSCHUR_SPECIALIZATIONS=OFF' ] + + if '+suitesparse' in self.spec: + args.append('-DSUITESPARSE=ON') + else: + args.append('-DSUITESPARSE=OFF') + + if '+shared' in self.spec: + args.append('-DBUILD_SHARED_LIBS=ON') + else: + args.append('-DBUILD_SHARED_LIBS=OFF') + + if '+examples' in self.spec: + args.append('-DBUILD_EXAMPLES=ON') + else: + args.append('-DBUILD_EXAMPLES=OFF') + return args diff --git a/var/spack/repos/builtin/packages/cgns/package.py b/var/spack/repos/builtin/packages/cgns/package.py index 4e8a1126346..5e05c805af9 100644 --- a/var/spack/repos/builtin/packages/cgns/package.py +++ b/var/spack/repos/builtin/packages/cgns/package.py @@ -18,15 +18,26 @@ class Cgns(CMakePackage): version('develop', branch='develop') version('master', branch='master') + version('4.1.1', sha256='055d345c3569df3ae832fb2611cd7e0bc61d56da41b2be1533407e949581e226') + version('4.1.0', sha256='4674de1fac3c47998248725fd670377be497f568312c5903d1bb8090a3cf4da0') + version('4.0.0', sha256='748585a8e52dff4d250d6b603e6b847d05498e4566aba2dc3d7a7d85c4d55849') + version('3.4.1', sha256='d32595e7737b9332243bd3de1eb8c018a272f620f09b289dea8292eba1365994') version('3.4.0', sha256='6372196caf25b27d38cf6f056258cb0bdd45757f49d9c59372b6dbbddb1e05da') version('3.3.1', sha256='81093693b2e21a99c5640b82b267a495625b663d7b8125d5f1e9e7aaa1f8d469') version('3.3.0', sha256='8422c67994f8dc6a2f201523a14f6c7d7e16313bdd404c460c16079dbeafc662') - variant('hdf5', default=True, description='Enable HDF5 interface') - variant('fortran', default=False, description='Enable Fortran interface') - variant('scoping', default=True, description='Enable scoping') - variant('mpi', default=True, description='Enable parallel cgns') - variant('int64', default=False, description='Build with 64-bit integers') + variant('hdf5', default=True, description='Enable HDF5 interface') + variant('fortran', default=False, description='Enable Fortran interface') + variant('base_scope', default=False, description='Enable base scope') + variant('scoping', default=True, description='Enable scoping') + variant('mpi', default=True, description='Enable parallel cgns') + variant('int64', default=False, description='Build with 64-bit integers') + variant('shared', default=True, description='Enable shared library') + variant('static', default=False, description='Build static libraries') + variant('testing', default=False, description='Build CGNS testing') + variant('legacy', default=False, description='Enable legacy options') + variant('parallel', default=False, description='Enable parallel features') + variant('mem_debug', default=False, description='Enable memory debugging option') depends_on('hdf5~mpi', when='+hdf5~mpi') depends_on('hdf5+mpi', when='+hdf5+mpi') @@ -44,7 +55,21 @@ def cmake_args(self): '-DCGNS_ENABLE_PARALLEL:BOOL=%s' % ( 'ON' if '+mpi' in spec else 'OFF'), '-DCGNS_ENABLE_TESTS:BOOL=OFF', - '-DCGNS_BUILD_CGNSTOOLS:BOOL=OFF' + '-DCGNS_BUILD_TESTING:BOOL=%s' % ( + 'ON' if '+testing' in spec else 'OFF'), + '-DCGNS_BUILD_CGNSTOOLS:BOOL=OFF', + '-DCGNS_BUILD_SHARED:BOOL=%s' % ( + 'ON' if '+shared' in spec else 'OFF'), + '-DCGNS_BUILD_STATIC:BOOL=%s' % ( + 'ON' if '+static' in spec else 'OFF'), + '-DCGNS_ENABLE_BASE_SCOPE:BOOL=%s' % ( + 'ON' if '+base_scope' in spec else 'OFF'), + '-DCGNS_ENABLE_LEGACY:BOOL=%s' % ( + 'ON' if '+legacy' in spec else 'OFF'), + '-DCGNS_ENABLE_PARALLEL:BOOL=%s' % ( + 'ON' if '+parallel' in spec else 'OFF'), + '-DCGNS_ENABLE_MEM_DEBUG:BOOL=%s' % ( + 'ON' if '+mem_debug' in spec else 'OFF') ]) if '+mpi' in spec: diff --git a/var/spack/repos/builtin/packages/charliecloud/package.py b/var/spack/repos/builtin/packages/charliecloud/package.py index 9329455d842..e72c71cf860 100644 --- a/var/spack/repos/builtin/packages/charliecloud/package.py +++ b/var/spack/repos/builtin/packages/charliecloud/package.py @@ -9,22 +9,24 @@ class Charliecloud(AutotoolsPackage): """Lightweight user-defined software stacks for HPC.""" + maintainers = ['j-ogas'] homepage = "https://hpc.github.io/charliecloud" - url = "https://github.com/hpc/charliecloud/releases/download/v0.9.10/charliecloud-0.9.10.tar.gz" + url = "https://github.com/hpc/charliecloud/releases/download/v0.14/charliecloud-0.9.10.tar.gz" git = "https://github.com/hpc/charliecloud.git" version('master', branch='master') - version('0.13', sha256='5740bff6e410ca99484c1bdf3dbe834c0f753c846d55c19d6162967a3e2718e0') + version('0.14', sha256='4ae23c2d6442949e16902f9d5604dbd1d6059aeb5dd461b11fc5c74d49dcb194') - depends_on('python@3.4:', type=('build', 'run')) + depends_on('m4', type='build') + depends_on('autoconf', type='build') + depends_on('automake', type='build') + depends_on('libtool', type='build') - # experimental builder (ch-grow) - variant('builder', default=False, description='Bundle dependencies for unprivileged builder (ch-grow)') - depends_on('py-lark-parser', type='run', when='+builder') - depends_on('skopeo', type='run', when='+builder') - depends_on('umoci', type='run', when='+builder') + depends_on('python@3.5:', type='run') + depends_on('py-lark-parser', type='run') + depends_on('py-requests', type='run') - # man pages and html docs + # man pages and html docs variant variant('docs', default=False, description='Build man pages and html docs') depends_on('rsync', type='build', when='+docs') depends_on('py-sphinx', type='build', when='+docs') @@ -37,10 +39,9 @@ def configure_args(self): args = [] - if '+docs' not in self.spec: + if '+docs' in self.spec: + args.append('--enable-html') + else: args.append('--disable-html') - if '+builder' not in self.spec: - args.append('--disable-ch-grow') - return args diff --git a/var/spack/repos/builtin/packages/charmpp/package.py b/var/spack/repos/builtin/packages/charmpp/package.py index 80213216141..158dd599378 100644 --- a/var/spack/repos/builtin/packages/charmpp/package.py +++ b/var/spack/repos/builtin/packages/charmpp/package.py @@ -18,10 +18,12 @@ class Charmpp(Package): (your laptop) to the largest supercomputers.""" homepage = "http://charmplusplus.org" - url = "https://charm.cs.illinois.edu/distrib/charm-6.8.2.tar.gz" + url = "http://charm.cs.illinois.edu/distrib/charm-6.8.2.tar.gz" git = "https://github.com/UIUC-PPL/charm.git" - version("develop", branch="charm") + version("develop", branch="master") + + version('6.10.1', sha256='ab96198105daabbb8c8bdf370f87b0523521ce502c656cb6cd5b89f69a2c70a8') version('6.10.0', sha256='7c526a78aa0c202b7f0418b345138e7dc40496f0bb7b9e301e0381980450b25c') version("6.9.0", sha256="85ed660e46eeb7a6fc6b32deab08226f647c244241948f6b592ebcd2b6050cbd") version("6.8.2", sha256="08e6001b0e9cd00ebde179f76098767149bf7e454f29028fb9a8bfb55778698e") diff --git a/var/spack/repos/builtin/packages/cinch/package.py b/var/spack/repos/builtin/packages/cinch/package.py index 65281b8455c..f46a801cd4a 100644 --- a/var/spack/repos/builtin/packages/cinch/package.py +++ b/var/spack/repos/builtin/packages/cinch/package.py @@ -16,7 +16,7 @@ class Cinch(Package): url = "https://github.com/laristra/cinch/archive/1.0.zip" git = "https://github.com/laristra/cinch.git" - version('develop', branch='master', submodules=False) + version('master', branch='master', submodules=False) version('1.0', sha256='98b73473829b478191481621d84c3d63c662da6e951321f858a032eae3ca07b7') def install(self, spec, prefix): diff --git a/var/spack/repos/builtin/packages/citcoms/package.py b/var/spack/repos/builtin/packages/citcoms/package.py index 2529e544f64..35fe86b2b4d 100644 --- a/var/spack/repos/builtin/packages/citcoms/package.py +++ b/var/spack/repos/builtin/packages/citcoms/package.py @@ -12,12 +12,14 @@ class Citcoms(AutotoolsPackage): homepage = "https://geodynamics.org/cig/software/citcoms/" url = "https://github.com/geodynamics/citcoms/releases/download/v3.3.1/CitcomS-3.3.1.tar.gz" + git = "https://github.com/geodynamics/citcoms.git" + maintainers = ['adamjstewart'] + + version('master', branch='master', submodules=True) version('3.3.1', sha256='e3520e0a933e4699d31e86fe309b8c154ea6ecb0f42a1cf6f25e8d13d825a4b3') version('3.2.0', sha256='773a14d91ecbb4a4d1e04317635fab79819d83c57b47f19380ff30b9b19cb07a') - variant('pyre', default=False, description='build Pyre modules') - variant('exchanger', default=False, description='use Exchanger') variant('ggrd', default=False, description='use GGRD file support') variant('cuda', default=False, description='use CUDA') variant('hdf5', default=False, description='add HDF5 support') @@ -25,17 +27,17 @@ class Citcoms(AutotoolsPackage): # Required dependencies depends_on('mpi') depends_on('zlib') + depends_on('python@:2', type='run') # needed for post-processing scripts + depends_on('automake', when='@master', type='build') + depends_on('autoconf', when='@master', type='build') + depends_on('libtool', when='@master', type='build') + depends_on('m4', when='@master', type='build') # Optional dependencies - depends_on('exchanger', when='+exchanger') - depends_on('py-pythia', type=('build', 'run'), when='+pyre') depends_on('hc', when='+ggrd') depends_on('cuda', when='+cuda') depends_on('hdf5+mpi', when='+hdf5') - conflicts('+pyre', when='@3.3:', msg='Pyre support was removed from 3.3+') - conflicts('+exchanger', when='@3.3:', msg='Exchanger support was removed from 3.3+') - def setup_build_environment(self, env): if '+ggrd' in self.spec: env.set('HC_HOME', self.spec['hc'].prefix) @@ -45,15 +47,8 @@ def configure_args(self): # Flags only valid in 3.2 if self.spec.satisfies('@:3.2'): - if '+pyre' in self.spec: - args.append('--with-pyre') - else: - args.append('--without-pyre') - - if '+exchanger' in self.spec: - args.append('--with-exchanger') - else: - args.append('--without-exchanger') + args.append('--without-pyre') + args.append('--without-exchanger') if '+ggrd' in self.spec: args.append('--with-ggrd') diff --git a/var/spack/repos/builtin/packages/cmake/package.py b/var/spack/repos/builtin/packages/cmake/package.py index fdfa46af426..ce436d2ea9f 100644 --- a/var/spack/repos/builtin/packages/cmake/package.py +++ b/var/spack/repos/builtin/packages/cmake/package.py @@ -13,6 +13,8 @@ class Cmake(Package): url = 'https://github.com/Kitware/CMake/releases/download/v3.15.5/cmake-3.15.5.tar.gz' maintainers = ['chuckatkins'] + version('3.17.0', sha256='b74c05b55115eacc4fa2b77a814981dbda05cdc95a53e279fe16b7b272f00847') + version('3.16.5', sha256='5f760b50b8ecc9c0c37135fae5fbf00a2fef617059aa9d61c1bb91653e5a8bfc') version('3.16.2', sha256='8c09786ec60ca2be354c29829072c38113de9184f29928eb9da8446a5f2ce6a9') version('3.16.1', sha256='a275b3168fa8626eca4465da7bb159ff07c8c6cb0fb7179be59e12cbdfa725fd') version('3.16.0', sha256='6da56556c63cab6e9a3e1656e8763ed4a841ac9859fefb63cbe79472e67e8c5f') @@ -49,6 +51,7 @@ class Cmake(Package): version('3.10.0', sha256='b3345c17609ea0f039960ef470aa099de9942135990930a57c14575aae884987') version('3.9.6', sha256='7410851a783a41b521214ad987bb534a7e4a65e059651a2514e6ebfc8f46b218') version('3.9.4', sha256='b5d86f12ae0072db520fdbdad67405f799eb728b610ed66043c20a92b4906ca1') + version('3.9.2', sha256='954a5801a456ee48e76f01107c9a4961677dd0f3e115275bbd18410dc22ba3c1') version('3.9.0', sha256='167701525183dbb722b9ffe69fb525aa2b81798cf12f5ce1c020c93394dfae0f') version('3.8.2', sha256='da3072794eb4c09f2d782fcee043847b99bb4cf8d4573978d9b2024214d6e92d') version('3.8.1', sha256='ce5d9161396e06501b00e52933783150a87c33080d4bdcef461b5b7fd24ac228') diff --git a/var/spack/repos/builtin/packages/cminpack/link_with_blas_pr_21.patch b/var/spack/repos/builtin/packages/cminpack/link_with_blas_pr_21.patch new file mode 100644 index 00000000000..a28d67ea0f9 --- /dev/null +++ b/var/spack/repos/builtin/packages/cminpack/link_with_blas_pr_21.patch @@ -0,0 +1,267 @@ +From 3b386a0ed507a9923f942098a4dcf2df2bcde7d7 Mon Sep 17 00:00:00 2001 +From: Michel Zou +Date: Thu, 6 Dec 2018 23:38:07 +0100 +Subject: [PATCH] cmake to link to cblas + +--- + CMakeLists.txt | 30 ++++--- + cmake/CMakeLists.txt | 2 +- + cmake/FindCBLAS.cmake | 180 ++++++++++++++++++++++++++++++++++++++++++ + 3 files changed, 201 insertions(+), 11 deletions(-) + create mode 100644 cmake/FindCBLAS.cmake + +diff --git a/CMakeLists.txt b/CMakeLists.txt +index 6c8d16b..f22039f 100644 +--- a/CMakeLists.txt ++++ b/CMakeLists.txt +@@ -5,6 +5,8 @@ cmake_minimum_required (VERSION 2.8.9) + project (CMINPACK) + string(TOLOWER ${PROJECT_NAME} PROJECT_NAME_LOWER) + ++set (CMAKE_MODULE_PATH ${PROJECT_SOURCE_DIR}/cmake) ++ + include(${PROJECT_SOURCE_DIR}/cmake/cminpack_utils.cmake) + # Set version and OS-specific settings + set(CMINPACK_VERSION 1.3.6 CACHE STRING "CMinpack version") +@@ -34,7 +36,7 @@ else () + endif(WIN32) + endif () + +-option(USE_BLAS "Compile cminpack using a blas library if possible" ON) ++option(USE_BLAS "Compile cminpack using cblas library if possible" ON) + + #set(CMAKE_INSTALL_PREFIX ${PROJECT_SOURCE_DIR}/../build) + +@@ -61,16 +63,24 @@ if (${CMAKE_SYSTEM_NAME} STREQUAL "FreeBSD") + TARGET_LINK_LIBRARIES(cminpack m) + endif() + +-# Link with a BLAS library if requested +-if (USE_BLAS) +- if (NOT BUILD_SHARED_LIBS) +- set(BLA_STATIC True) ++ ++include (CheckLibraryExists) ++include (CheckFunctionExists) ++check_function_exists (sqrt HAVE_SQRT_NO_LIBM) ++if (NOT HAVE_SQRT_NO_LIBM) ++ check_library_exists ("m" sqrt "" HAVE_LIBM) ++ if (HAVE_LIBM) ++ target_link_libraries(cminpack PUBLIC m) + endif() +- find_package(BLAS) +- if (BLAS_FOUND) +- target_link_libraries(cminpack PUBLIC ${BLAS_LIBRARIES}) +- set_target_properties(cminpack PROPERTIES LINK_FLAGS "${BLAS_LINKER_FLAGS}") +- target_compile_definitions(cminpack PUBLIC -DUSE_CBLAS) ++endif () ++ ++# Link with CBLAS library if requested ++if (USE_BLAS) ++ find_package (CBLAS) ++ if (CBLAS_FOUND) ++ target_link_libraries(cminpack PUBLIC ${CBLAS_LIBRARIES}) ++ set_target_properties(cminpack PROPERTIES LINK_FLAGS "${CBLAS_LINKER_FLAGS}") ++ target_compile_definitions(cminpack PUBLIC USE_CBLAS) + endif() + endif() + +diff --git a/cmake/CMakeLists.txt b/cmake/CMakeLists.txt +index 058a89b..d8277bc 100644 +--- a/cmake/CMakeLists.txt ++++ b/cmake/CMakeLists.txt +@@ -1,7 +1,7 @@ + set(PKG_DESC "CMinPack") + set(PKG_EXTERNAL_DEPS "") + set(pkg_conf_file ${CMAKE_CURRENT_BINARY_DIR}/cminpack.pc) +-if (USE_BLAS AND BLAS_FOUND) ++if (USE_BLAS AND CBLAS_FOUND) + set(PC_CMINPACK_CFLAGS "-DUSE_CBLAS") + if (NOT "${BLAS_LIBRARIES}" STREQUAL "") + string(REPLACE ";" " -l" PC_CMINPACK_LIBRARIES "${BLAS_LIBRARIES}") +diff --git a/cmake/FindCBLAS.cmake b/cmake/FindCBLAS.cmake +new file mode 100644 +index 0000000..e4fb422 +--- /dev/null ++++ b/cmake/FindCBLAS.cmake +@@ -0,0 +1,180 @@ ++# - Find CBLAS library ++# ++# This module finds an installed fortran library that implements the CBLAS ++# linear-algebra interface (see http://www.netlib.org/blas/), with CBLAS ++# interface. ++# ++# This module sets the following variables: ++# CBLAS_FOUND - set to true if a library implementing the CBLAS interface is found ++# CBLAS_LIBRARIES - list of libraries (using full path name) to link against to use CBLAS ++# CBLAS_INCLUDE_DIR - path to includes ++# CBLAS_INCLUDE_FILE - the file to be included to use CBLAS ++# ++ ++SET(CBLAS_LIBRARIES) ++SET(CBLAS_INCLUDE_DIR) ++SET(CBLAS_INCLUDE_FILE) ++ ++# CBLAS in Intel mkl ++FIND_PACKAGE(MKL) ++IF (MKL_FOUND AND NOT CBLAS_LIBRARIES) ++ SET(CBLAS_LIBRARIES ${MKL_LIBRARIES}) ++ SET(CBLAS_INCLUDE_DIR ${MKL_INCLUDE_DIR}) ++ SET(CBLAS_INCLUDE_FILE "mkl_cblas.h") ++ENDIF (MKL_FOUND AND NOT CBLAS_LIBRARIES) ++ ++# Old CBLAS search ++SET(_verbose TRUE) ++INCLUDE(CheckFunctionExists) ++INCLUDE(CheckIncludeFile) ++ ++MACRO(CHECK_ALL_LIBRARIES LIBRARIES _prefix _name _flags _list _include _search_include) ++ # This macro checks for the existence of the combination of fortran libraries ++ # given by _list. If the combination is found, this macro checks (using the ++ # Check_Fortran_Function_Exists macro) whether can link against that library ++ # combination using the name of a routine given by _name using the linker ++ # flags given by _flags. If the combination of libraries is found and passes ++ # the link test, LIBRARIES is set to the list of complete library paths that ++ # have been found. Otherwise, LIBRARIES is set to FALSE. ++ # N.B. _prefix is the prefix applied to the names of all cached variables that ++ # are generated internally and marked advanced by this macro. ++ SET(__list) ++ FOREACH(_elem ${_list}) ++ IF(__list) ++ SET(__list "${__list} - ${_elem}") ++ ELSE(__list) ++ SET(__list "${_elem}") ++ ENDIF(__list) ++ ENDFOREACH(_elem) ++ IF(_verbose) ++ MESSAGE(STATUS "Checking for [${__list}]") ++ ENDIF(_verbose) ++ SET(_libraries_work TRUE) ++ SET(${LIBRARIES}) ++ SET(_combined_name) ++ SET(_paths) ++ FOREACH(_library ${_list}) ++ SET(_combined_name ${_combined_name}_${_library}) ++ # did we find all the libraries in the _list until now? ++ # (we stop at the first unfound one) ++ IF(_libraries_work) ++ IF(APPLE) ++ FIND_LIBRARY(${_prefix}_${_library}_LIBRARY ++ NAMES ${_library} ++ PATHS /usr/local/lib /usr/lib /usr/local/lib64 /usr/lib64 ENV ++ DYLD_LIBRARY_PATH ++ ) ++ ELSE(APPLE) ++ FIND_LIBRARY(${_prefix}_${_library}_LIBRARY ++ NAMES ${_library} ++ PATHS /usr/local/lib /usr/lib /usr/local/lib64 /usr/lib64 ENV ++ LD_LIBRARY_PATH ++ ) ++ ENDIF(APPLE) ++ MARK_AS_ADVANCED(${_prefix}_${_library}_LIBRARY) ++ IF(${_prefix}_${_library}_LIBRARY) ++ GET_FILENAME_COMPONENT(_path ${${_prefix}_${_library}_LIBRARY} PATH) ++ LIST(APPEND _paths ${_path}/../include ${_path}/../../include) ++ ENDIF(${_prefix}_${_library}_LIBRARY) ++ SET(${LIBRARIES} ${${LIBRARIES}} ${${_prefix}_${_library}_LIBRARY}) ++ SET(_libraries_work ${${_prefix}_${_library}_LIBRARY}) ++ ENDIF(_libraries_work) ++ ENDFOREACH(_library ${_list}) ++ # Test include ++ SET(_bug_search_include ${_search_include}) #CMAKE BUG!!! SHOULD NOT BE THAT ++ IF(_bug_search_include) ++ FIND_PATH(${_prefix}${_combined_name}_INCLUDE ${_include} ${_paths}) ++ MARK_AS_ADVANCED(${_prefix}${_combined_name}_INCLUDE) ++ IF(${_prefix}${_combined_name}_INCLUDE) ++ IF (_verbose) ++ MESSAGE(STATUS "Includes found") ++ ENDIF (_verbose) ++ SET(${_prefix}_INCLUDE_DIR ${${_prefix}${_combined_name}_INCLUDE}) ++ SET(${_prefix}_INCLUDE_FILE ${_include}) ++ ELSE(${_prefix}${_combined_name}_INCLUDE) ++ SET(_libraries_work FALSE) ++ ENDIF(${_prefix}${_combined_name}_INCLUDE) ++ ELSE(_bug_search_include) ++ SET(${_prefix}_INCLUDE_DIR) ++ SET(${_prefix}_INCLUDE_FILE ${_include}) ++ ENDIF(_bug_search_include) ++ # Test this combination of libraries. ++ IF(_libraries_work) ++ SET(CMAKE_REQUIRED_LIBRARIES ${_flags} ${${LIBRARIES}}) ++ CHECK_FUNCTION_EXISTS(${_name} ${_prefix}${_combined_name}_WORKS) ++ SET(CMAKE_REQUIRED_LIBRARIES) ++ MARK_AS_ADVANCED(${_prefix}${_combined_name}_WORKS) ++ SET(_libraries_work ${${_prefix}${_combined_name}_WORKS}) ++ IF(_verbose AND _libraries_work) ++ MESSAGE(STATUS "Libraries found") ++ ENDIF(_verbose AND _libraries_work) ++ ENDIF(_libraries_work) ++ # Fin ++ IF(NOT _libraries_work) ++ SET(${LIBRARIES} NOTFOUND) ++ ENDIF(NOT _libraries_work) ++ENDMACRO(CHECK_ALL_LIBRARIES) ++ ++# Generic CBLAS library ++IF(NOT CBLAS_LIBRARIES) ++ CHECK_ALL_LIBRARIES( ++ CBLAS_LIBRARIES ++ CBLAS ++ cblas_dgemm ++ "" ++ "cblas" ++ "cblas.h" ++ TRUE ) ++ENDIF() ++ ++# CBLAS in ATLAS library? (http://math-atlas.sourceforge.net/) ++IF(NOT CBLAS_LIBRARIES) ++ CHECK_ALL_LIBRARIES( ++ CBLAS_LIBRARIES ++ CBLAS ++ cblas_dgemm ++ "" ++ "cblas;atlas" ++ "cblas.h" ++ TRUE ) ++ENDIF() ++ ++# CBLAS in BLAS library ++IF(NOT CBLAS_LIBRARIES) ++ CHECK_ALL_LIBRARIES( ++ CBLAS_LIBRARIES ++ CBLAS ++ cblas_dgemm ++ "" ++ "blas" ++ "cblas.h" ++ TRUE ) ++ENDIF() ++ ++# Apple CBLAS library? ++IF(NOT CBLAS_LIBRARIES) ++ CHECK_ALL_LIBRARIES( ++ CBLAS_LIBRARIES ++ CBLAS ++ cblas_dgemm ++ "" ++ "Accelerate" ++ "Accelerate/Accelerate.h" ++ FALSE ) ++ENDIF() ++ ++IF( NOT CBLAS_LIBRARIES ) ++ CHECK_ALL_LIBRARIES( ++ CBLAS_LIBRARIES ++ CBLAS ++ cblas_dgemm ++ "" ++ "vecLib" ++ "vecLib/vecLib.h" ++ FALSE ) ++ENDIF() ++ ++include ( FindPackageHandleStandardArgs ) ++find_package_handle_standard_args ( CBLAS DEFAULT_MSG CBLAS_LIBRARIES ++) ++ diff --git a/var/spack/repos/builtin/packages/cminpack/package.py b/var/spack/repos/builtin/packages/cminpack/package.py new file mode 100644 index 00000000000..e269accabf7 --- /dev/null +++ b/var/spack/repos/builtin/packages/cminpack/package.py @@ -0,0 +1,39 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Cminpack(CMakePackage): + """This is a C version of the minpack minimization package. + Minpack includes software for solving nonlinear equations + and nonlinear least squares problems. + """ + + homepage = "http://devernay.free.fr/hacks/cminpack" + url = "https://github.com/devernay/cminpack/archive/v1.3.6.tar.gz" + git = 'https://github.com/devernay/cminpack.git' + + version('master', branch='master') + version('1.3.6', sha256='3c07fd21308c96477a2c900032e21d937739c233ee273b4347a0d4a84a32d09f') + + variant('shared', default=False, description='Build shared libraries') + variant('blas', default=True, description='Compile with BLAS') + + depends_on('blas', when='+blas') + + # Backport a pull request for correctly linking blas. + # See https://github.com/devernay/cminpack/pull/21 + patch('link_with_blas_pr_21.patch', when='@:1.3.6') + + def cmake_args(self): + args = [ + '-DBUILD_SHARED_LIBS=%s' % ( + 'ON' if '+shared' in self.spec else 'OFF'), + '-DUSE_BLAS=%s' % ( + 'ON' if 'blas' in self.spec else 'OFF') + ] + + return args diff --git a/var/spack/repos/builtin/packages/cosma/package.py b/var/spack/repos/builtin/packages/cosma/package.py new file mode 100644 index 00000000000..efb81035da9 --- /dev/null +++ b/var/spack/repos/builtin/packages/cosma/package.py @@ -0,0 +1,69 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack import * + + +class Cosma(CMakePackage): + """ + Distributed Communication-Optimal Matrix-Matrix Multiplication Library + """ + + maintainers = ['teonnik', 'kabicm'] + homepage = 'https://github.com/eth-cscs/COSMA' + url = 'https://github.com/eth-cscs/COSMA/releases/download/v2.0.2/cosma.tar.gz' + git = 'https://github.com/eth-cscs/COSMA.git' + + # note: The default archives produced with github do not have the archives + # of the submodules. + version('master', branch='master', submodules=True) + version('2.0.2', sha256='4f3354828bc718f3eef2f0098c3bdca3499297497a220da32db1acd57920c68d') + # note: this version fails to build at the moment + # version('1.0.0', + # url='https://github.com/eth-cscs/COSMA/releases/download/1.0/cosma.tar.gz', + # sha256='c142104258dcca4c17fa7faffc2990a08d2777235c7980006e93c5dca51061f6') + + variant('cuda', default=False, + description='Build with the CUBLAS back end.') + variant('scalapack', default=False, + description='Build with ScaLAPACK support.') + + depends_on('cmake@3.12:', type='build') + depends_on('mpi@3:') + depends_on('blas', when='~cuda') + depends_on('scalapack', when='+scalapack') + # COSMA is written entirely in C++, it may use cublasXt but a CUDA capable + # compiler is not needed. There is no need for CudaPackage in this recipe. + depends_on('cuda', when='+cuda') + + def setup_build_environment(self, env): + if '+cuda' in self.spec: + env.set('CUDA_PATH', self.spec['cuda'].prefix) + + def cmake_args(self): + spec = self.spec + args = ['-DCOSMA_WITH_TESTS=OFF', + '-DCOSMA_WITH_APPS=OFF', + '-DCOSMA_WITH_PROFILING=OFF', + '-DCOSMA_WITH_BENCHMARKS=OFF'] + + if '^mkl' in spec: + args += ['-DCOSMA_BLAS=MKL'] + elif '^netlib-lapack' in spec: + args += ['-DCOSMA_BLAS=CUSTOM'] + elif '^openblas' in spec: + args += ['-DCOSMA_BLAS=OPENBLAS'] + elif '+cuda' in spec: + args += ['-DCOSMA_BLAS=CUDA'] + else: # TODO '^rocm' in spec: + args += ['-DCOSMA_BLAS=ROCM'] + + if '+scalapack' and '^mkl' in spec: + args += ['-DCOSMA_SCALAPACK=MKL'] + elif '+scalapack' and '^netlib-scalapack' in spec: + args += ['-DCOSMA_SCALAPACK=CUSTOM'] + + return args diff --git a/var/spack/repos/builtin/packages/cpio/package.py b/var/spack/repos/builtin/packages/cpio/package.py index ae6b429ccc6..2381a824997 100644 --- a/var/spack/repos/builtin/packages/cpio/package.py +++ b/var/spack/repos/builtin/packages/cpio/package.py @@ -16,3 +16,8 @@ class Cpio(AutotoolsPackage, GNUMirrorPackage): version('2.13', sha256='e87470d9c984317f658567c03bfefb6b0c829ff17dbf6b0de48d71a4c8f3db88') build_directory = 'spack-build' + + def flag_handler(self, name, flags): + if self.spec.satisfies('%intel') and name == 'cflags': + flags.append('-no-gcc') + return (flags, None, None) diff --git a/var/spack/repos/builtin/packages/cppgsl/package.py b/var/spack/repos/builtin/packages/cppgsl/package.py index 4f4c68841fd..ce045a7c687 100644 --- a/var/spack/repos/builtin/packages/cppgsl/package.py +++ b/var/spack/repos/builtin/packages/cppgsl/package.py @@ -13,7 +13,8 @@ class Cppgsl(CMakePackage): url = "https://github.com/Microsoft/GSL/archive/v2.0.0.tar.gz" git = "https://github.com/Microsoft/GSL.git" - version('develop', branch='master') + version('master', branch='master') + version('2.1.0', sha256='ef73814657b073e1be86c8f7353718771bf4149b482b6cb54f99e79b23ff899d') version('2.0.0', sha256='6cce6fb16b651e62711a4f58e484931013c33979b795d1b1f7646f640cfa9c8e') version('1.0.0', sha256='9694b04cd78e5b1a769868f19fdd9eea2002de3d4c3a81a1b769209364543c36') diff --git a/var/spack/repos/builtin/packages/cpprestsdk/Release.patch b/var/spack/repos/builtin/packages/cpprestsdk/Release.patch new file mode 100644 index 00000000000..e860b85f18c --- /dev/null +++ b/var/spack/repos/builtin/packages/cpprestsdk/Release.patch @@ -0,0 +1,45 @@ +diff -ur spack-src/Release.org/include/pplx/pplxlinux.h spack-src/Release/include/pplx/pplxlinux.h +--- spack-src/Release.org/include/pplx/pplxlinux.h 2020-03-12 13:52:40.161917503 +0900 ++++ spack-src/Release/include/pplx/pplxlinux.h 2020-03-12 14:05:50.834896829 +0900 +@@ -240,6 +240,11 @@ + { + public: + _PPLXIMP virtual void schedule( TaskProc_t proc, _In_ void* param); ++#if defined(__APPLE__) ++ virtual ~apple_scheduler() {} ++#else ++ virtual ~linux_scheduler() {} ++#endif + }; + + } // namespace details +diff -ur spack-src/Release.org/libs/websocketpp/websocketpp/transport/asio/connection.hpp spack-src/Release/libs/websocketpp/websocketpp/transport/asio/connection.hpp +--- spack-src/Release.org/libs/websocketpp/websocketpp/transport/asio/connection.hpp 2020-03-12 13:52:40.201921703 +0900 ++++ spack-src/Release/libs/websocketpp/websocketpp/transport/asio/connection.hpp 2020-03-12 14:09:13.586186467 +0900 +@@ -422,7 +422,7 @@ + m_io_service = io_service; + + if (config::enable_multithreading) { +- m_strand = lib::make_shared( ++ m_strand = lib::make_shared( + lib::ref(*io_service)); + + m_async_read_handler = m_strand->wrap(lib::bind( +diff -ur spack-src/Release.org/src/uri/uri.cpp spack-src/Release/src/uri/uri.cpp +--- spack-src/Release.org/src/uri/uri.cpp 2020-03-12 13:52:40.241925902 +0900 ++++ spack-src/Release/src/uri/uri.cpp 2020-03-12 13:53:58.610152210 +0900 +@@ -22,12 +22,12 @@ + // canonicalize components first + + // convert scheme to lowercase +- std::transform(m_scheme.begin(), m_scheme.end(), m_scheme.begin(), [this](utility::char_t c) { ++ std::transform(m_scheme.begin(), m_scheme.end(), m_scheme.begin(), [](utility::char_t c) { + return (utility::char_t)tolower(c); + }); + + // convert host to lowercase +- std::transform(m_host.begin(), m_host.end(), m_host.begin(), [this](utility::char_t c) { ++ std::transform(m_host.begin(), m_host.end(), m_host.begin(), [](utility::char_t c) { + return (utility::char_t)tolower(c); + }); + diff --git a/var/spack/repos/builtin/packages/cpprestsdk/package.py b/var/spack/repos/builtin/packages/cpprestsdk/package.py index 51acc882b6d..3bfbf52c292 100644 --- a/var/spack/repos/builtin/packages/cpprestsdk/package.py +++ b/var/spack/repos/builtin/packages/cpprestsdk/package.py @@ -17,6 +17,11 @@ class Cpprestsdk(CMakePackage): version('2.9.1', sha256='537358760acd782f4d2ed3a85d92247b4fc423aff9c85347dc31dbb0ab9bab16') - depends_on('boost') + depends_on('boost@:1.69.0') + + # Ref: https://github.com/microsoft/cpprestsdk/commit/f9f518e4ad84577eb684ad8235181e4495299af4 + # Ref: https://github.com/Microsoft/cpprestsdk/commit/6b2e0480018530b616f61d5cdc786c92ba148bb7 + # Ref: https://github.com/microsoft/cpprestsdk/commit/70c1b14f39f5d47984fdd8a31fc357ebb5a37851 + patch('Release.patch') root_cmakelists_dir = 'Release' diff --git a/var/spack/repos/builtin/packages/cryptsetup/package.py b/var/spack/repos/builtin/packages/cryptsetup/package.py index 10519100ee8..99c14de004b 100644 --- a/var/spack/repos/builtin/packages/cryptsetup/package.py +++ b/var/spack/repos/builtin/packages/cryptsetup/package.py @@ -16,6 +16,9 @@ class Cryptsetup(AutotoolsPackage): # If you're adding newer versions, check whether the patch below # still needs to be applied. + version('2.3.1', sha256='92aba4d559a2cf7043faed92e0f22c5addea36bd63f8c039ba5a8f3a159fe7d2') + version('2.2.3', sha256='2af0ec9551ab9c870074cae9d3f68d82cab004f4095fa89db0e4413713424a46') + version('2.2.2', sha256='2af0ec9551ab9c870074cae9d3f68d82cab004f4095fa89db0e4413713424a46') version('2.2.1', sha256='94e79a31ed38bdb0acd9af7ccca1605a2ac62ca850ed640202876b1ee11c1c61') depends_on('libuuid', type=('build', 'link')) @@ -25,6 +28,7 @@ class Cryptsetup(AutotoolsPackage): depends_on('util-linux~libuuid', type=('build', 'link')) depends_on('gettext', type=('build', 'link')) + depends_on('pkgconfig', type='build') depends_on('autoconf', type='build') depends_on('automake', type='build') depends_on('libtool', type='build') @@ -35,7 +39,7 @@ class Cryptsetup(AutotoolsPackage): # the LDFLAGS. See https://gitlab.com/cryptsetup/cryptsetup/issues/479 # This *should* be unnecessary starting with release 2.2.2, see # https://gitlab.com/cryptsetup/cryptsetup/issues/479#note_227617031 - patch('autotools-libintl.patch') + patch('autotools-libintl.patch', when='@:2.2.1') def url_for_version(self, version): url = "https://www.kernel.org/pub/linux/utils/cryptsetup/v{0}/cryptsetup-{1}.tar.xz" diff --git a/var/spack/repos/builtin/packages/darshan-runtime/package.py b/var/spack/repos/builtin/packages/darshan-runtime/package.py index f6487d498ee..3789612f129 100644 --- a/var/spack/repos/builtin/packages/darshan-runtime/package.py +++ b/var/spack/repos/builtin/packages/darshan-runtime/package.py @@ -21,6 +21,7 @@ class DarshanRuntime(Package): maintainers = ['shanedsnyder', 'carns'] version('develop', branch='master') + version('3.1.8', sha256='3ed51c8d5d93b4a8cbb7d53d13052140a9dffe0bc1a3e1ebfc44a36a184b5c82') version('3.1.7', sha256='9ba535df292727ac1e8025bdf2dc42942715205cad8319d925723fd88709e8d6') version('3.1.6', sha256='21cb24e2a971c45e04476e00441b7fbea63d2afa727a5cf8b7a4a9d9004dd856') version('3.1.0', sha256='b847047c76759054577823fbe21075cfabb478cdafad341d480274fb1cef861c') diff --git a/var/spack/repos/builtin/packages/darshan-util/package.py b/var/spack/repos/builtin/packages/darshan-util/package.py index 9fe3012b2c2..8d25fa99650 100644 --- a/var/spack/repos/builtin/packages/darshan-util/package.py +++ b/var/spack/repos/builtin/packages/darshan-util/package.py @@ -19,6 +19,7 @@ class DarshanUtil(Package): maintainers = ['shanedsnyder', 'carns'] version('develop', branch='master') + version('3.1.8', sha256='3ed51c8d5d93b4a8cbb7d53d13052140a9dffe0bc1a3e1ebfc44a36a184b5c82') version('3.1.7', sha256='9ba535df292727ac1e8025bdf2dc42942715205cad8319d925723fd88709e8d6') version('3.1.6', sha256='21cb24e2a971c45e04476e00441b7fbea63d2afa727a5cf8b7a4a9d9004dd856') version('3.1.0', sha256='b847047c76759054577823fbe21075cfabb478cdafad341d480274fb1cef861c') diff --git a/var/spack/repos/builtin/packages/davix/package.py b/var/spack/repos/builtin/packages/davix/package.py index faed2f16fe0..4062fb4daec 100644 --- a/var/spack/repos/builtin/packages/davix/package.py +++ b/var/spack/repos/builtin/packages/davix/package.py @@ -19,7 +19,18 @@ class Davix(CMakePackage): version('0.6.9', sha256='fbd97eb5fdf82ca48770d06bf8e2805b35f23255478aa381a9d25a49eb98e348') version('0.6.8', sha256='e1820f4cc3fc44858ae97197a3922cce2a1130ff553b080ba19e06eb8383ddf7') + variant('cxxstd', + default='11', + values=('11', '14', '17'), + multi=False, + description='Use the specified C++ standard when building.') + depends_on('pkgconfig', type='build') depends_on('libxml2') depends_on('libuuid') depends_on('openssl') + + def cmake_args(self): + cmake_args = ['-DCMAKE_CXX_STANDARD={0}'.format( + self.spec.variants['cxxstd'].value)] + return cmake_args diff --git a/var/spack/repos/builtin/packages/dcmtk/package.py b/var/spack/repos/builtin/packages/dcmtk/package.py index de7a4c6ef26..2bf3db66d14 100644 --- a/var/spack/repos/builtin/packages/dcmtk/package.py +++ b/var/spack/repos/builtin/packages/dcmtk/package.py @@ -32,7 +32,7 @@ class Dcmtk(CMakePackage): depends_on('libxml2', type=('build', 'link'), when='+xml') variant('iconv', default=True, description="Charset conversion support (iconv)") - depends_on('libiconv', type=('build', 'link')) + depends_on('iconv', type=('build', 'link')) variant('cxx11', default=False, description="Enable c++11 features") variant('stl', default=True, description="Use native STL implementation") diff --git a/var/spack/repos/builtin/packages/ddd/package.py b/var/spack/repos/builtin/packages/ddd/package.py new file mode 100644 index 00000000000..d0edde042d8 --- /dev/null +++ b/var/spack/repos/builtin/packages/ddd/package.py @@ -0,0 +1,75 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * +import os + + +class Ddd(AutotoolsPackage, GNUMirrorPackage): + """A graphical front-end for command-line debuggers such as GDB, DBX, + WDB, Ladebug, JDB, XDB, the Perl debugger, the bash debugger bashdb, + the GNU Make debugger remake, or the Python debugger pydb.""" + + homepage = "https://www.gnu.org/software/ddd" + gnu_mirror_path = "ddd/ddd-3.3.12.tar.gz" + + version('3.3.12', sha256='3ad6cd67d7f4b1d6b2d38537261564a0d26aaed077bf25c51efc1474d0e8b65c') + + variant('shared', default=True, description='Build shared libraries') + variant('static', default=False, description='Build static libraries') + + depends_on('gdb@4.16:') + depends_on('lesstif@0.89:') + + # Needed for OSX 10.9 DP6 build failure: + # https://savannah.gnu.org/patch/?8178 + patch('https://savannah.gnu.org/patch/download.php?file_id=29114', + sha256='aaacae79ce27446ead3483123abef0f8222ebc13fd61627bfadad96016248af6', + working_dir='ddd') + + # https://savannah.gnu.org/bugs/?41997 + patch('https://savannah.gnu.org/patch/download.php?file_id=31132', + sha256='f3683f23c4b4ff89ba701660031d4b5ef27594076f6ef68814903ff3141f6714') + + # Patch to fix compilation with Xcode 9 + # https://savannah.gnu.org/bugs/?52175 + patch('https://raw.githubusercontent.com/macports/macports-ports/a71fa9f4/devel/ddd/files/patch-unknown-type-name-a_class.diff', + sha256='c187a024825144f186f0cf9cd175f3e972bb84590e62079793d0182cb15ca183', + working_dir='ddd') + + def configure_args(self): + spec = self.spec + + args = [ + '--disable-debug', + '--disable-dependency-tracking', + '--enable-builtin-app-defaults', + '--enable-builtin-manual', + '--enable-shared' if '+shared' in spec else '--disable-shared', + '--enable-static' if '+static' in spec else '--disable-static', + ] + + return args + + # From MacPorts: make will build the executable "ddd" and the X + # resource file "Ddd" in the same directory. As HFS+ is case- + # insensitive by default this will loosely FAIL. Mitigate this by + # building/installing 'dddexe' on Darwin and fixing up post install. + def build(self, spec, prefix): + make('EXEEXT={0}'. + format('exe' if spec.satisfies('platform=darwin') else '')) + + # DDD won't install in parallel + def install(self, spec, prefix): + make('install', + 'EXEEXT={0}'. + format('exe' if spec.satisfies('platform=darwin') else ''), + parallel=False) + + @run_after('install') + def _rename_exe_on_darwin(self): + if self.spec.satisfies('platform=darwin'): + with working_dir(self.prefix.bin): + os.rename('dddexe', 'ddd') diff --git a/var/spack/repos/builtin/packages/dealii/package.py b/var/spack/repos/builtin/packages/dealii/package.py index 1b9e02c0440..d3a58cb835f 100644 --- a/var/spack/repos/builtin/packages/dealii/package.py +++ b/var/spack/repos/builtin/packages/dealii/package.py @@ -5,6 +5,8 @@ from spack import * +import os + class Dealii(CMakePackage, CudaPackage): """C++ software library providing well-documented tools to build finite @@ -459,6 +461,13 @@ def cmake_args(self): ' '.join(cxx_flags)) ]) + # Add flags for machine vectorization, used when tutorials + # and user code is built. + # See https://github.com/dealii/dealii/issues/9164 + options.extend([ + '-DDEAL_II_CXX_FLAGS=%s' % os.environ['SPACK_TARGET_ARGS'] + ]) + return options def setup_run_environment(self, env): diff --git a/var/spack/repos/builtin/packages/denovogear/newmat6.cpp.patch b/var/spack/repos/builtin/packages/denovogear/newmat6.cpp.patch new file mode 100644 index 00000000000..36d2e240cd8 --- /dev/null +++ b/var/spack/repos/builtin/packages/denovogear/newmat6.cpp.patch @@ -0,0 +1,11 @@ +--- spack-src/src/contrib/newmat/newmat6.cpp.org 2020-03-19 14:06:13.679032667 +0900 ++++ spack-src/src/contrib/newmat/newmat6.cpp 2020-03-19 14:07:34.267492838 +0900 +@@ -428,7 +428,7 @@ + { + if (&gm == this) { REPORT tag_val = -1; return; } + REPORT +- if (indx > 0) { delete [] indx; indx = 0; } ++ if (indx != NULL) { delete [] indx; indx = 0; } + ((CroutMatrix&)gm).get_aux(*this); + Eq(gm); + } diff --git a/var/spack/repos/builtin/packages/denovogear/package.py b/var/spack/repos/builtin/packages/denovogear/package.py index 8f475492c15..bc683febeff 100644 --- a/var/spack/repos/builtin/packages/denovogear/package.py +++ b/var/spack/repos/builtin/packages/denovogear/package.py @@ -25,3 +25,5 @@ class Denovogear(CMakePackage): depends_on('zlib', type=('link')) patch('stream-open.patch', when='@:1.1.1') + # fix: ordered comparison between pointer and zero. + patch('newmat6.cpp.patch') diff --git a/var/spack/repos/builtin/packages/dhpmm-f/package.py b/var/spack/repos/builtin/packages/dhpmm-f/package.py new file mode 100644 index 00000000000..5e2bfa31548 --- /dev/null +++ b/var/spack/repos/builtin/packages/dhpmm-f/package.py @@ -0,0 +1,51 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class DhpmmF(MakefilePackage): + """DHPMM_P:High-precision Matrix Multiplication with Faithful Rounding""" + + homepage = "http://www.math.twcu.ac.jp/ogita/post-k/" + url = "http://www.math.twcu.ac.jp/ogita/post-k/software/DHPMM_F/DHPMM_F_alpha.tar.gz" + + version('alpha', sha256='35321ecbc749f2682775ffcd27833afc8c8eb4fa7753ce769727c9d1fe097848') + + depends_on('blas', type='link') + depends_on('lapack', type='link') + + def patch(self): + math_libs = self.spec['lapack'].libs + self.spec['blas'].libs + makefile = FileFilter('Makefile') + if self.spec.satisfies('%gcc'): + makefile.filter(r'^MKL\s+=\s1', 'MKL=0') + makefile.filter(r'^CC\s+=\sgcc', + 'CC={0}'.format(spack_cc)) + makefile.filter(r'^CXX\s+=\sg\+\+', + 'CXX={0}'.format(spack_cxx)) + makefile.filter(r'^BLASLIBS\s+=\s-llapack\s-lblas', + 'BLASLIBS={0}'.format(math_libs.ld_flags)) + elif self.spec.satisfies('%fj'): + makefile.filter(r'^#ENV\s+=\sFX100', 'ENV=FX100') + makefile.filter(r'^ENV\s+=\sGCC', '#ENV=GCC') + makefile.filter(r'^MKL\s+=\s1', 'MKL=0') + makefile.filter(r'^CC\s+=\sfccpx', + 'CC={0}'.format(spack_cc)) + makefile.filter(r'^CXX\s+=\sFCCpx', + 'CXX={0}'.format(spack_cxx)) + makefile.filter(r'^BLASLIBS\s+=\s-llapack\s-lblas', + 'BLASLIBS={0}'.format(math_libs.ld_flags)) + elif self.spec.satisfies('%intel'): + makefile.filter(r'^ENV\s+=\sGCC', '#ENV=GCC') + makefile.filter(r'^ENV\s+=\sICC', 'ENV=ICC') + makefile.filter(r'^CC\s+=\sicc', + 'CC={0}'.format(spack_cc)) + makefile.filter(r'^CXX\s+=\sicc', + 'CXX={0}'.format(spack_cxx)) + + def install(self, spec, prefix): + mkdirp(prefix.bin) + install('test/source4_SpMV', prefix.bin) diff --git a/var/spack/repos/builtin/packages/diffutils/package.py b/var/spack/repos/builtin/packages/diffutils/package.py index c8f2dca83f3..d41086113a5 100644 --- a/var/spack/repos/builtin/packages/diffutils/package.py +++ b/var/spack/repos/builtin/packages/diffutils/package.py @@ -18,7 +18,7 @@ class Diffutils(AutotoolsPackage, GNUMirrorPackage): build_directory = 'spack-build' - depends_on('libiconv') + depends_on('iconv') def setup_build_environment(self, env): if self.spec.satisfies('%fj'): diff --git a/var/spack/repos/builtin/packages/dihydrogen/package.py b/var/spack/repos/builtin/packages/dihydrogen/package.py new file mode 100644 index 00000000000..6de0cfb5164 --- /dev/null +++ b/var/spack/repos/builtin/packages/dihydrogen/package.py @@ -0,0 +1,111 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Dihydrogen(CMakePackage, CudaPackage): + """DiHydrogen is the second version of the Hydrogen fork of the + well-known distributed linear algebra library, + Elemental. DiHydrogen aims to be a basic distributed + multilinear algebra interface with a particular emphasis on the + needs of the distributed machine learning effort, LBANN.""" + + homepage = "https://github.com/LLNL/DiHydrogen.git" + git = "https://github.com/LLNL/DiHydrogen.git" + + maintainers = ['bvanessen'] + + version('master', branch='master') + + variant('al', default=True, + description='Builds with Aluminum communication library') + variant('developer', default=False, + description='Enable extra warnings and force tests to be enabled.') + variant('half', default=False, + description='Enable FP16 support on the CPU.') + variant('legacy', default=False, + description='Enable the legacy DistConv code branch.') + variant('nvshmem', default=False, + description='Builds with support for NVSHMEM') + variant('openmp', default=False, + description='Enable CPU acceleration with OpenMP threads.') + variant('rocm', default=False, + description='Enable ROCm/HIP language features.') + variant('shared', default=True, + description='Enables the build of shared libraries') + + # Override the default set of CUDA architectures with the relevant + # subset from lib/spack/spack/build_systems/cuda.py + cuda_arch_values = [ + '60', '61', '62', + '70', '72', '75', + ] + variant('cuda_arch', + description='CUDA architecture', + values=spack.variant.auto_or_any_combination_of(*cuda_arch_values)) + + depends_on('mpi') + depends_on('catch2', type='test') + + depends_on('aluminum', when='+al ~cuda') + depends_on('aluminum +gpu +nccl +mpi_cuda', when='+al +cuda') + + depends_on('cuda', when=('+cuda' or '+legacy')) + depends_on('cudnn', when=('+cuda' or '+legacy')) + depends_on('cub', when=('+cuda' or '+legacy')) + + # Legacy builds require cuda + conflicts('~cuda', when='+legacy') + + depends_on('half', when='+half') + + generator = 'Ninja' + depends_on('ninja', type='build') + depends_on('cmake@3.14.0:', type='build') + + illegal_cuda_arch_values = [ + '10', '11', '12', '13', + '20', '21', + '30', '32', '35', '37', + '50', '52', '53', + ] + for value in illegal_cuda_arch_values: + conflicts('cuda_arch=' + value) + + @property + def libs(self): + shared = True if '+shared' in self.spec else False + return find_libraries( + 'libH2Core', root=self.prefix, shared=shared, recursive=True + ) + + def cmake_args(self): + spec = self.spec + + args = [ + '-DCMAKE_INSTALL_MESSAGE:STRING=LAZY', + '-DBUILD_SHARED_LIBS:BOOL=%s' % ('+shared' in spec), + '-DH2_ENABLE_CUDA=%s' % ('+cuda' in spec), + '-DH2_ENABLE_DISTCONV_LEGACY=%s' % ('+legacy' in spec), + '-DH2_ENABLE_OPENMP=%s' % ('+openmp' in spec), + '-DH2_ENABLE_FP16=%s' % ('+half' in spec), + '-DH2_ENABLE_HIP_ROCM=%s' % ('+rocm' in spec), + '-DH2_DEVELOPER_BUILD=%s' % ('+developer' in spec), + ] + + if '+cuda' in spec: + cuda_arch = spec.variants['cuda_arch'].value + if len(cuda_arch) == 1 and cuda_arch[0] == 'auto': + args.append('-DCMAKE_CUDA_FLAGS=-arch=sm_60') + else: + cuda_arch = [x for x in spec.variants['cuda_arch'].value + if x != 'auto'] + if cuda_arch: + args.append('-DCMAKE_CUDA_FLAGS={0}'.format( + ' '.join(self.cuda_flags(cuda_arch)) + )) + + return args diff --git a/var/spack/repos/builtin/packages/intel-mkl-dnn/package.py b/var/spack/repos/builtin/packages/dnnl/package.py similarity index 63% rename from var/spack/repos/builtin/packages/intel-mkl-dnn/package.py rename to var/spack/repos/builtin/packages/dnnl/package.py index ab3a7cde461..9b339a5a015 100644 --- a/var/spack/repos/builtin/packages/intel-mkl-dnn/package.py +++ b/var/spack/repos/builtin/packages/dnnl/package.py @@ -3,18 +3,22 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * +import sys -class IntelMklDnn(CMakePackage): - """Intel(R) Math Kernel Library for Deep Neural Networks - (Intel(R) MKL-DNN).""" +class Dnnl(CMakePackage): + """Deep Neural Network Library (DNNL). - homepage = "https://intel.github.io/mkl-dnn/" - url = "https://github.com/intel/mkl-dnn/archive/v1.1.1.tar.gz" + Formerly known as Intel MKL-DNN.""" + + homepage = "https://01.org/dnnl" + url = "https://github.com/intel/mkl-dnn/archive/v1.3.tar.gz" maintainers = ['adamjstewart'] + version('1.3', sha256='7396c20bd0c2dcf71cec84422bd6f9b91778938c10a7578424a7681fb822b077') + version('1.2.2', sha256='a71ec1f27c30b8a176605e8a78444f1f12301a3c313b70ff93290926c140509c') + version('1.2.1', sha256='c69544783c453ab3fbf14c7a5b9a512561267690c9fc3e7fc3470f04756e0ab3') version('1.2', sha256='30979a09753e8e35d942446c3778c9f0eba543acf2fb0282af8b9c89355d0ddf') version('1.1.3', sha256='0e9bcbc86cc215a84a5455a395ce540c68e255eaa586e37222fff622b9b17df7') version('1.1.2', sha256='284b20e0cab67025bb7d21317f805d6217ad77fb3a47ad84b3bacf37bde62da9') @@ -25,6 +29,7 @@ class IntelMklDnn(CMakePackage): version('1.0.2', sha256='3164eb2914e2160ac6ffd345781cf7554ce410830398cc6b2761e8668faf5ca8') version('1.0.1', sha256='91fb84601c18f8a5a87eccd7b63d61f03495f36c5c533bd7f59443e4f8bb2595') version('1.0', sha256='27fd9da9720c452852f1226581e7914efcf74e1ff898468fdcbe1813528831ba') + version('0.21.4', sha256='1e774138203b773b5af2eed9cc6f1973f13a7263a3b80127682246c5a6c5bc45') version('0.21.3', sha256='31e78581e59d7e60d4becaba3834fc6a5bf2dccdae3e16b7f70d89ceab38423f') version('0.21.2', sha256='ed56652dd237deb86ee9bf102c18de5f2625c059e5ab1d7512c8dc01e316b694') version('0.21.1', sha256='766ecfa5ac68be8cf9eacd4c712935c0ed945e5e6fe51640f05ee735cff62a38') @@ -42,15 +47,38 @@ class IntelMklDnn(CMakePackage): version('0.10', sha256='59828764ae43f1151f77b8997012c52e0e757bc50af1196b86fce8934178c570') version('0.9', sha256='8606a80851c45b0076f7d4047fbf774ce13d6b6d857cb2edf95c7e1fd4bca1c7') + default_cpu_runtime = 'omp' + if sys.platform == 'darwin': + default_cpu_runtime = 'tbb' + + variant('cpu_runtime', default=default_cpu_runtime, + description='CPU threading runtime to use', + values=('omp', 'tbb', 'seq'), multi=False) + variant('gpu_runtime', default='none', + description='Runtime to use for GPU engines', + values=('ocl', 'none'), multi=False) + + # https://github.com/intel/mkl-dnn#requirements-for-building-from-source depends_on('cmake@2.8.11:', type='build') - depends_on('intel-mkl') - depends_on('llvm-openmp', when='%clang platform=darwin') + depends_on('tbb@2017:', when='cpu_runtime=tbb') + depends_on('llvm-openmp', when='%clang platform=darwin cpu_runtime=omp') + depends_on('opencl@1.2:', when='gpu_runtime=ocl') def cmake_args(self): - args = [] + args = [ + '-DDNNL_CPU_RUNTIME={0}'.format( + self.spec.variants['cpu_runtime'].value.upper()), + '-DDNNL_GPU_RUNTIME={0}'.format( + self.spec.variants['gpu_runtime'].value.upper()), + ] + + if self.run_tests: + args.append('-DDNNL_BUILD_TESTS=ON') + else: + args.append('-DDNNL_BUILD_TESTS=OFF') # https://github.com/intel/mkl-dnn/issues/591 - if self.spec.satisfies('%clang platform=darwin'): + if self.spec.satisfies('%clang platform=darwin cpu_runtime=omp'): args.extend([ '-DOpenMP_CXX_FLAGS={0}'.format(self.compiler.openmp_flag), '-DOpenMP_C_FLAGS={0}'.format(self.compiler.openmp_flag), @@ -63,5 +91,10 @@ def cmake_args(self): self.spec['llvm-openmp'].libs.ld_flags ), ]) + elif self.spec.satisfies('cpu_runtime=tbb'): + args.append('-DTBBROOT=' + self.spec['tbb'].prefix) + + if self.spec.satisfies('gpu_runtime=ocl'): + args.append('-DOPENCLROOT=' + self.spec['opencl'].prefix) return args diff --git a/var/spack/repos/builtin/packages/doxygen/package.py b/var/spack/repos/builtin/packages/doxygen/package.py index aa99ca73ee2..c3c80614d5d 100644 --- a/var/spack/repos/builtin/packages/doxygen/package.py +++ b/var/spack/repos/builtin/packages/doxygen/package.py @@ -29,7 +29,7 @@ class Doxygen(CMakePackage): depends_on("cmake@2.8.12:", type='build') depends_on("python", type='build') # 2 or 3 OK; used in CMake build - depends_on("libiconv") + depends_on("iconv") depends_on("flex", type='build') # code.l just checks subminor version <=2.5.4 or >=2.5.33 # but does not recognize 2.6.x as newer...could be patched if needed @@ -45,6 +45,8 @@ class Doxygen(CMakePackage): patch('shared_ptr.patch', when='@1.8.14') def patch(self): + if self.spec['iconv'].name == 'libc': + return # On Linux systems, iconv is provided by libc. Since CMake finds the # symbol in libc, it does not look for libiconv, which leads to linker # errors. This makes sure that CMake always looks for the external diff --git a/var/spack/repos/builtin/packages/dpdk/package.py b/var/spack/repos/builtin/packages/dpdk/package.py new file mode 100644 index 00000000000..c25b32c8d7b --- /dev/null +++ b/var/spack/repos/builtin/packages/dpdk/package.py @@ -0,0 +1,29 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Dpdk(MakefilePackage): + """DPDK is a set of libraries and drivers for fast packet processing. + It supports many processor architectures and both FreeBSD and Linux.""" + + homepage = "https://github.com/DPDK/dpdk" + url = "https://github.com/DPDK/dpdk/archive/v19.11.tar.gz" + + version('20.02', sha256='29e56ea8e47e30110ecb881fa5a37125a865dd2d45b61f68e93e334caaab16b7') + version('19.11', sha256='ce1befb20a5e5c5399b326a39cfa23314a5229c0ced2553f53b09b1ae630706b') + version('19.08', sha256='1ceff1a6f4f8d5f6f62c1682097249227ac5225ccd9638e0af09f5411c681038') + version('19.05', sha256='5fea95cb726e6adaa506dab330e79563ccd4dacf03f126c826aabdced605d32b') + version('19.02', sha256='04885d32c86fff5aefcfffdb8257fed405233602dbcd22f8298be13c2e285a50') + + depends_on('numactl') + + def build(self, spec, prefix): + make('defconfig') + make() + + def install(self, spec, prefix): + install_tree('.', prefix) diff --git a/var/spack/repos/builtin/packages/eagle/package.py b/var/spack/repos/builtin/packages/eagle/package.py new file mode 100644 index 00000000000..81e0be6f903 --- /dev/null +++ b/var/spack/repos/builtin/packages/eagle/package.py @@ -0,0 +1,46 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Eagle(MakefilePackage): + """EAGLE: Explicit Alternative Genome Likelihood Evaluator""" + + homepage = "https://github.com/tony-kuo/eagle" + url = "https://github.com/tony-kuo/eagle/archive/v1.1.2.tar.gz" + + version('1.1.2', sha256='afe967560d1f8fdbd0caf4b93b5f2a86830e9e4d399fee4a526140431343045e') + + depends_on('curl') + depends_on('zlib') + depends_on('lzma') + depends_on('htslib') + + def edit(self, spec, prefix): + # remove unused gcc flags + filter_file('$(LFLAGS) $(INCLUDES)', '', 'Makefile', string=True) + + # drop static link to htslib + filter_file('$(LIBS)', '', 'Makefile', string=True) + + # don't try to build htslib. + filter_file('all: UTIL HTSLIB', 'all: UTIL', + 'Makefile', string=True) + + # add htslib link to ldflags + filter_file('-lcurl', '-lcurl -lhts', 'Makefile', string=True) + + def install(self, spec, prefix): + mkdirp(prefix.bin) + + bins = [ + 'eagle', + 'eagle-rc', + 'eagle-nm', + ] + + for b in bins: + install(b, prefix.bin) diff --git a/var/spack/repos/builtin/packages/eclipse-gcj-parser/package.py b/var/spack/repos/builtin/packages/eclipse-gcj-parser/package.py index 11054b409c1..13100d89a86 100644 --- a/var/spack/repos/builtin/packages/eclipse-gcj-parser/package.py +++ b/var/spack/repos/builtin/packages/eclipse-gcj-parser/package.py @@ -19,6 +19,9 @@ class EclipseGcjParser(Package): maintainers = ['citibeth'] + # The server is sometimes a bit slow to respond + fetch_options = {'timeout': 60} + version('4.8', sha256='98fd128f1d374d9e42fd9d4836bdd249c6d511ebc6c0df17fbc1b9df96c3d781', expand=False) phases = ('build', 'install') diff --git a/var/spack/repos/builtin/packages/elfutils/package.py b/var/spack/repos/builtin/packages/elfutils/package.py index 8dd2980c3d3..e1ffdc0e3b6 100644 --- a/var/spack/repos/builtin/packages/elfutils/package.py +++ b/var/spack/repos/builtin/packages/elfutils/package.py @@ -22,17 +22,17 @@ class Elfutils(AutotoolsPackage): list_depth = 1 # Sourceware is often slow to respond. - timeout = {'timeout': 60} + fetch_options = {'timeout': 60} - version('0.178', sha256='31e7a00e96d4e9c4bda452e1f2cdac4daf8abd24f5e154dee232131899f3a0f2', fetch_options=timeout) - version('0.177', sha256='fa489deccbcae7d8c920f60d85906124c1989c591196d90e0fd668e3dc05042e', fetch_options=timeout) - version('0.176', sha256='eb5747c371b0af0f71e86215a5ebb88728533c3a104a43d4231963f308cd1023', fetch_options=timeout) - version('0.175', sha256='f7ef925541ee32c6d15ae5cb27da5f119e01a5ccdbe9fe57bf836730d7b7a65b', fetch_options=timeout) - version('0.174', sha256='cdf27e70076e10a29539d89e367101d516bc4aa11b0d7777fe52139e3fcad08a', fetch_options=timeout) - version('0.173', sha256='b76d8c133f68dad46250f5c223482c8299d454a69430d9aa5c19123345a000ff', fetch_options=timeout) - version('0.170', sha256='1f844775576b79bdc9f9c717a50058d08620323c1e935458223a12f249c9e066', fetch_options=timeout) - version('0.168', sha256='b88d07893ba1373c7dd69a7855974706d05377766568a7d9002706d5de72c276', fetch_options=timeout) - version('0.163', sha256='7c774f1eef329309f3b05e730bdac50013155d437518a2ec0e24871d312f2e23', fetch_options=timeout) + version('0.178', sha256='31e7a00e96d4e9c4bda452e1f2cdac4daf8abd24f5e154dee232131899f3a0f2') + version('0.177', sha256='fa489deccbcae7d8c920f60d85906124c1989c591196d90e0fd668e3dc05042e') + version('0.176', sha256='eb5747c371b0af0f71e86215a5ebb88728533c3a104a43d4231963f308cd1023') + version('0.175', sha256='f7ef925541ee32c6d15ae5cb27da5f119e01a5ccdbe9fe57bf836730d7b7a65b') + version('0.174', sha256='cdf27e70076e10a29539d89e367101d516bc4aa11b0d7777fe52139e3fcad08a') + version('0.173', sha256='b76d8c133f68dad46250f5c223482c8299d454a69430d9aa5c19123345a000ff') + version('0.170', sha256='1f844775576b79bdc9f9c717a50058d08620323c1e935458223a12f249c9e066') + version('0.168', sha256='b88d07893ba1373c7dd69a7855974706d05377766568a7d9002706d5de72c276') + version('0.163', sha256='7c774f1eef329309f3b05e730bdac50013155d437518a2ec0e24871d312f2e23') # Libraries for reading compressed DWARF sections. variant('bzip2', default=False, diff --git a/var/spack/repos/builtin/packages/er/package.py b/var/spack/repos/builtin/packages/er/package.py index 55e5067f129..343a08ff919 100644 --- a/var/spack/repos/builtin/packages/er/package.py +++ b/var/spack/repos/builtin/packages/er/package.py @@ -9,8 +9,8 @@ class Er(CMakePackage): """Encoding and redundancy on a file set""" - homepage = "https://github.com/ECP-VeloC/er" - url = "https://github.com/ECP-VeloC/er/archive/v0.0.3.zip" + homepage = "https://github.com/ecp-veloc/er" + url = "https://github.com/ecp-veloc/er/archive/v0.0.3.zip" git = "https://github.com/ecp-veloc/er.git" tags = ['ecp'] diff --git a/var/spack/repos/builtin/packages/exchanger/package.py b/var/spack/repos/builtin/packages/exchanger/package.py deleted file mode 100644 index 018459d8c32..00000000000 --- a/var/spack/repos/builtin/packages/exchanger/package.py +++ /dev/null @@ -1,21 +0,0 @@ -# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other -# Spack Project Developers. See the top-level COPYRIGHT file for details. -# -# SPDX-License-Identifier: (Apache-2.0 OR MIT) - -from spack import * - - -class Exchanger(AutotoolsPackage): - """Exchanger is a package containing several C++ base classes. These - classes, when customized for a solver, can provide communication channels - between solvers. This packaged is used by CitcomS for solver coupling.""" - - homepage = "https://geodynamics.org/cig/software/exchanger/" - url = "https://geodynamics.org/cig/software/exchanger/Exchanger-1.0.1.tar.gz" - - version('1.0.1', sha256='1e6c8311db96582bcf2c9aee16a863a5730c1aa54cb3aa7d0249239c6e0b68ee') - - depends_on('python', type=('build', 'run')) - depends_on('py-merlin', type='build') - depends_on('py-pythia@0.8.1.0:0.8.1.999', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/extrae/package.py b/var/spack/repos/builtin/packages/extrae/package.py index 4ea5928b023..bab7b365d8e 100644 --- a/var/spack/repos/builtin/packages/extrae/package.py +++ b/var/spack/repos/builtin/packages/extrae/package.py @@ -35,7 +35,7 @@ class Extrae(AutotoolsPackage): OpenMP, CUDA, OpenCL, pthread, OmpSs""" homepage = "https://tools.bsc.es/extrae" url = "https://ftp.tools.bsc.es/extrae/extrae-3.4.1-src.tar.bz2" - version('3.7.1', sha256='95810b057f95e91bfc89813eb8bd320dfe40614fc8e98c63d95c5101c56dd213') + version('3.7.1', sha256='c83ddd18a380c9414d64ee5de263efc6f7bac5fe362d5b8374170c7f18360378') version('3.4.1', sha256='77bfec16d6b5eee061fbaa879949dcef4cad28395d6a546b1ae1b9246f142725') depends_on("autoconf", type='build') @@ -51,7 +51,7 @@ class Extrae(AutotoolsPackage): depends_on("elf", type="link") depends_on("libxml2") depends_on("numactl") - depends_on("binutils+libiberty") + depends_on("binutils+libiberty@:2.33") depends_on("gettext") # gettext dependency added to find -lintl # https://www.gnu.org/software/gettext/FAQ.html#integrating_undefined diff --git a/var/spack/repos/builtin/packages/eztrace/package.py b/var/spack/repos/builtin/packages/eztrace/package.py index 9468ce82147..ddf9808f7b7 100644 --- a/var/spack/repos/builtin/packages/eztrace/package.py +++ b/var/spack/repos/builtin/packages/eztrace/package.py @@ -11,9 +11,10 @@ class Eztrace(AutotoolsPackage): of HPC applications.""" homepage = "http://eztrace.gforge.inria.fr" - url = "https://gforge.inria.fr/frs/download.php/file/37703/eztrace-1.1-8.tar.gz" + url = "https://gitlab.com/eztrace/eztrace/-/archive/eztrace-1.1-10/eztrace-eztrace-1.1-10.tar.gz" + maintainers = ['trahay'] - version('1.1-8', sha256='d80d78a25f1eb0e6e60a3e535e3972cd178c6a8663a3d6109105dfa6a880b8ec') + version('1.1-10', sha256='97aba8f3b3b71e8e2f7ef47e00c262234e27b9cb4a870c85c525317a83a3f0d4') depends_on('mpi') diff --git a/var/spack/repos/builtin/packages/fabtests/package.py b/var/spack/repos/builtin/packages/fabtests/package.py index a71a69ab37b..a9f76b5d072 100644 --- a/var/spack/repos/builtin/packages/fabtests/package.py +++ b/var/spack/repos/builtin/packages/fabtests/package.py @@ -7,15 +7,30 @@ class Fabtests(AutotoolsPackage): - """Fabtests provides a set of examples that uses libfabric. - - DEPRECATED. Fabtests has merged with the libfabric git repo.""" + """Fabtests provides a set of runtime analysis tools and examples that use + libfabric.""" homepage = "http://libfabric.org" - url = "https://github.com/ofiwg/fabtests/releases/download/v1.5.3/fabtests-1.5.3.tar.gz" + url = "https://github.com/ofiwg/libfabric/releases/download/v1.9.1/fabtests-1.9.1.tar.bz2" + version('1.9.1', sha256='6f8ced2c6b3514759a0e177c8b2a19125e4ef0714d4cc0fe0386b33bd6cd5585') + version('1.9.0', sha256='60cc21db7092334904cbdafd142b2403572976018a22218e7c453195caef366e') + version('1.8.1', sha256='e9005d8fe73ca3849c872649c29811846bd72a62f897ecab73a08c7a9514f37b') + # old releases, published in a separate repository + version('1.6.2', sha256='37405c6202f5b1aa81f8ea211237a2d87937f06254fa3ed44a9b69ac73b234e8') + version('1.6.1', sha256='d357466b868fdaf1560d89ffac4c4e93a679486f1b4221315644d8d3e21174bf') version('1.6.0', sha256='dc3eeccccb005205017f5af60681ede15782ce202a0103450a6d56a7ff515a67') version('1.5.3', sha256='3835b3bf86cd00d23df0ddba8bf317e4a195e8d5c3c2baa918b373d548f77f29') + version('1.5.0', sha256='1dddd446c3f1df346899f9a8636f1b4265de5b863103ae24876e9f0c1e40a69d') + version('1.4.2', sha256='3b78d0ca1b223ff21b7f5b3627e67e358e3c18b700f86b017e2233fee7e88c2e') - depends_on('libfabric@1.6.0', when='@1.6.0') - depends_on('libfabric@1.5.3', when='@1.5.3') + for v in ['1.4.2', '1.5.0', '1.5.3', '1.6.0', '1.6.1', '1.6.2', + '1.8.1', '1.9.0', '1.9.1']: + depends_on('libfabric@{0}'.format(v), when='@{0}'.format(v)) + + def url_for_version(self, version): + if version >= Version('1.8.1'): + url = "https://github.com/ofiwg/libfabric/releases/download/v{0}/fabtests-{0}.tar.bz2" + else: + url = "https://github.com/ofiwg/fabtests/releases/download/v{0}/fabtests-{0}.tar.gz" + return url.format(version.dotted) diff --git a/var/spack/repos/builtin/packages/filo/package.py b/var/spack/repos/builtin/packages/filo/package.py new file mode 100644 index 00000000000..aeba476c041 --- /dev/null +++ b/var/spack/repos/builtin/packages/filo/package.py @@ -0,0 +1,32 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Filo(CMakePackage): + """File flush and fetch, with MPI""" + + homepage = "https://github.com/ecp-veloc/filo" + git = "https://github.com/ecp-veloc/filo.git" + + tags = ['ecp'] + + version('master', branch='master') + + depends_on('mpi') + depends_on('axl') + depends_on('kvtree') + depends_on('spath') + + def cmake_args(self): + args = [] + args.append("-DMPI_C_COMPILER=%s" % self.spec['mpi'].mpicc) + if self.spec.satisfies('platform=cray'): + args.append("-DFILO_LINK_STATIC=ON") + args.append("-DWITH_AXL_PREFIX=%s" % self.spec['axl'].prefix) + args.append("-DWITH_KVTREE_PREFIX=%s" % self.spec['kvtree'].prefix) + args.append("-DWITH_SPATH_PREFIX=%s" % self.spec['spath'].prefix) + return args diff --git a/var/spack/repos/builtin/packages/fl/package.py b/var/spack/repos/builtin/packages/fl/package.py index f314ca6cdca..98809a21f10 100644 --- a/var/spack/repos/builtin/packages/fl/package.py +++ b/var/spack/repos/builtin/packages/fl/package.py @@ -17,7 +17,7 @@ class Fl(Package): def install(self, spec, prefix): if (self.spec.satisfies('platform=linux') and - self.spec.satisfies('target=x86_64')): + self.spec.target.family == 'x86_64'): with working_dir('fl_{0}'.format(spec.version)): install_tree('.', prefix) else: diff --git a/var/spack/repos/builtin/packages/flecsi/package.py b/var/spack/repos/builtin/packages/flecsi/package.py index 59c62e6352e..fccdd686b07 100644 --- a/var/spack/repos/builtin/packages/flecsi/package.py +++ b/var/spack/repos/builtin/packages/flecsi/package.py @@ -20,12 +20,14 @@ class Flecsi(CMakePackage): homepage = 'http://flecsi.org/' git = 'https://github.com/laristra/flecsi.git' - version('master', branch='master', submodules=False, preferred=True) + version('devel', branch='devel', submodules=False, preferred=False) + version('1', branch='1', submodules=False, preferred=True) + version('1.4', branch='1.4', submodules=False, preferred=False) variant('build_type', default='Release', values=('Debug', 'Release', 'RelWithDebInfo', 'MinSizeRel'), description='The build type to build', multi=False) - variant('backend', default='mpi', values=('serial', 'mpi', 'legion', 'hpx'), + variant('backend', default='mpi', values=('serial', 'mpi', 'legion', 'hpx', 'charmpp'), description='Backend to use for distributed memory', multi=False) variant('debug_backend', default=False, description='Build Backend with Debug Mode') @@ -64,8 +66,8 @@ class Flecsi(CMakePackage): depends_on('legion@ctrl-rep+shared+mpi build_type=Debug', when='backend=legion +debug_backend ~hdf5') depends_on('legion@ctrl-rep+shared+mpi+hdf5 build_type=Release', when='backend=legion ~debug_backend +hdf5') depends_on('legion@ctrl-rep+shared+mpi build_type=Release', when='backend=legion ~debug_backend ~hdf5') - depends_on('hpx@1.3.0 cxxstd=14 build_type=Debug', when='backend=hpx +debug_backend') - depends_on('hpx@1.3.0 cxxstd=14 build_type=Release', when='backend=hpx ~debug_backend') + depends_on('hpx@1.3.0 cxxstd=14 malloc=system build_type=Debug', when='backend=hpx +debug_backend') + depends_on('hpx@1.3.0 cxxstd=14 malloc=system build_type=Release', when='backend=hpx ~debug_backend') depends_on('boost@1.70.0: cxxstd=14 +program_options') depends_on('metis@5.1.0:') depends_on('parmetis@4.0.3:') diff --git a/var/spack/repos/builtin/packages/flit/package.py b/var/spack/repos/builtin/packages/flit/package.py index 66f4c43452c..fbe1d2f3423 100644 --- a/var/spack/repos/builtin/packages/flit/package.py +++ b/var/spack/repos/builtin/packages/flit/package.py @@ -14,16 +14,18 @@ class Flit(MakefilePackage): homepage = "https://pruners.github.io/flit" url = "https://github.com/PRUNERS/FLiT" - url = "https://github.com/PRUNERS/FLiT/archive/v2.0-alpha.1.tar.gz" + url = "https://github.com/PRUNERS/FLiT/archive/v2.1.0.tar.gz" - version('2.0-alpha.1', sha256='8de2bd400acf0f513d69f3dbf588e8984dfb18b8ccaaf684391811a0582f694b') + version('2.1.0', sha256='b31ffa02fda1ab0f5555acdc6edc353d93d53ae8ef85e099f83bcf1c83e70885') + + maintainers = ['mikebentley15'] # Add dependencies - depends_on('python@3:', type='run') - depends_on('py-numpy', type='run') - depends_on('py-matplotlib tk=False', type='run') - depends_on('py-toml', type='run') + depends_on('python@3:', type='run') + depends_on('py-toml', type='run') + depends_on('py-pyelftools', type='run') + depends_on('gmake', type=('run', 'build')) + depends_on('sqlite@3:', type='run') - @property - def install_targets(self): - return ['install', 'PREFIX=%s' % self.prefix] + def edit(self, spec, prefix): + env['PREFIX'] = prefix diff --git a/var/spack/repos/builtin/packages/fmt/package.py b/var/spack/repos/builtin/packages/fmt/package.py index 8cba74d000a..be39f6cc838 100644 --- a/var/spack/repos/builtin/packages/fmt/package.py +++ b/var/spack/repos/builtin/packages/fmt/package.py @@ -14,6 +14,7 @@ class Fmt(CMakePackage): homepage = "http://fmtlib.net/latest/index.html" url = "https://github.com/fmtlib/fmt/releases/download/5.2.1/fmt-5.2.1.zip" + version('6.1.2', sha256='63650f3c39a96371f5810c4e41d6f9b0bb10305064e6faf201cbafe297ea30e8') version('5.3.0', sha256='4c0741e10183f75d7d6f730b8708a99b329b2f942dad5a9da3385ab92bb4a15c') version('5.2.1', sha256='43894ab8fe561fc9e523a8024efc23018431fa86b95d45b06dbe6ddb29ffb6cd') version('5.2.0', sha256='c016db7f825bce487a7929e1edb747b9902a2935057af6512cad3df3a080a027') diff --git a/var/spack/repos/builtin/packages/freeipmi/package.py b/var/spack/repos/builtin/packages/freeipmi/package.py new file mode 100644 index 00000000000..bcb3e502cd2 --- /dev/null +++ b/var/spack/repos/builtin/packages/freeipmi/package.py @@ -0,0 +1,40 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * +import llnl.util.tty as tty + + +class Freeipmi(AutotoolsPackage): + """FreeIPMI provides in-band and out-of-band IPMI software based on the IPMI + v1.5/2.0 specification. The IPMI specification defines a set of interfaces + for platform management and is implemented by a number vendors for system + management. The features of IPMI that most users will be interested in are + sensor monitoring, system event monitoring, power control, and + serial-over-LAN (SOL). The FreeIPMI tools and libraries listed below should + provide users with the ability to access and utilize these and many other + features. A number of useful features for large HPC or cluster environments + have also been implemented into FreeIPMI. See the README or FAQ for more + info.""" + + homepage = "https://www.gnu.org/software/freeipmi/" + url = "https://ftp.gnu.org/gnu/freeipmi/freeipmi-1.6.4.tar.gz" + + version('1.6.4', + sha256='65dfbb95a30438ba247f01a58498862a37d2e71c8c950bcfcee459d079241a3c') + + depends_on('libgcrypt') + + parallel = False + + def configure_args(self): + # FIXME: If root checking of root installation is added fix this: + # Discussed in issue #4432 + tty.warn("Requires 'root' for bmc-watchdog.service installation to" + " /lib/systemd/system/ !") + + args = ['--prefix={0}'.format(prefix)] + + return args diff --git a/var/spack/repos/builtin/packages/g4abla/package.py b/var/spack/repos/builtin/packages/g4abla/package.py index b14f88d4d91..bed114d825b 100644 --- a/var/spack/repos/builtin/packages/g4abla/package.py +++ b/var/spack/repos/builtin/packages/g4abla/package.py @@ -11,12 +11,12 @@ class G4abla(Package): """Geant4 data for nuclear shell effects in INCL/ABLA hadronic mode""" homepage = "http://geant4.web.cern.ch" url = "http://geant4-data.web.cern.ch/geant4-data/datasets/G4ABLA.3.0.tar.gz" + maintainers = ['drbenmorgan'] - version( - '3.0', sha256='99fd4dcc9b4949778f14ed8364088e45fa4ff3148b3ea36f9f3103241d277014') - version( - '3.1', sha256='7698b052b58bf1b9886beacdbd6af607adc1e099fc730ab6b21cf7f090c027ed') + # Only versions relevant to Geant4 releases built by spack are added + version('3.1', sha256='7698b052b58bf1b9886beacdbd6af607adc1e099fc730ab6b21cf7f090c027ed') + version('3.0', sha256='99fd4dcc9b4949778f14ed8364088e45fa4ff3148b3ea36f9f3103241d277014') def install(self, spec, prefix): mkdirp(join_path(prefix.share, 'data')) @@ -24,6 +24,11 @@ def install(self, spec, prefix): .format(self.version)) install_tree(self.stage.source_path, install_path) + def setup_dependent_run_environment(self, env, dependent_spec): + install_path = join_path(self.prefix.share, 'data', 'G4ABLA{0}' + .format(self.version)) + env.set('G4ABLADATA', install_path) + def url_for_version(self, version): """Handle version string.""" return ("http://geant4-data.web.cern.ch/geant4-data/datasets/G4ABLA.%s.tar.gz" % version) diff --git a/var/spack/repos/builtin/packages/g4emlow/package.py b/var/spack/repos/builtin/packages/g4emlow/package.py index df6af19e262..713b9da6311 100644 --- a/var/spack/repos/builtin/packages/g4emlow/package.py +++ b/var/spack/repos/builtin/packages/g4emlow/package.py @@ -11,12 +11,14 @@ class G4emlow(Package): """Geant4 data files for low energy electromagnetic processes.""" homepage = "http://geant4.web.cern.ch" url = "http://geant4-data.web.cern.ch/geant4-data/datasets/G4EMLOW.6.50.tar.gz" + maintainers = ['drbenmorgan'] - version( - '6.50', sha256='c97be73fece5fb4f73c43e11c146b43f651c6991edd0edf8619c9452f8ab1236') - version( - '7.3', sha256='583aa7f34f67b09db7d566f904c54b21e95a9ac05b60e2bfb794efb569dba14e') + # Only versions relevant to Geant4 releases built by spack are added + version('7.9', sha256='4abf9aa6cda91e4612676ce4d2d8a73b91184533aa66f9aad19a53a8c4dc3aff') + version('7.7', sha256='16dec6adda6477a97424d749688d73e9bd7d0b84d0137a67cf341f1960984663') + version('7.3', sha256='583aa7f34f67b09db7d566f904c54b21e95a9ac05b60e2bfb794efb569dba14e') + version('6.50', sha256='c97be73fece5fb4f73c43e11c146b43f651c6991edd0edf8619c9452f8ab1236') def install(self, spec, prefix): mkdirp(join_path(prefix.share, 'data')) @@ -24,6 +26,11 @@ def install(self, spec, prefix): .format(self.version)) install_tree(self.stage.source_path, install_path) + def setup_dependent_run_environment(self, env, dependent_spec): + install_path = join_path(self.prefix.share, 'data', 'G4EMLOW{0}' + .format(self.version)) + env.set('G4LEDATA', install_path) + def url_for_version(self, version): """Handle version string.""" - return ("http://geant4.web.cern.ch/support/source/G4EMLOW.%s.tar.gz" % version) + return ("http://geant4-data.web.cern.ch/geant4-data/datasets/G4EMLOW.%s.tar.gz" % version) diff --git a/var/spack/repos/builtin/packages/g4ensdfstate/package.py b/var/spack/repos/builtin/packages/g4ensdfstate/package.py index 06b50b37545..36bc897f4b1 100644 --- a/var/spack/repos/builtin/packages/g4ensdfstate/package.py +++ b/var/spack/repos/builtin/packages/g4ensdfstate/package.py @@ -14,8 +14,9 @@ class G4ensdfstate(Package): maintainers = ['drbenmorgan'] - version('2.1', sha256='933e7f99b1c70f24694d12d517dfca36d82f4e95b084c15d86756ace2a2790d9') + # Only versions relevant to Geant4 releases built by spack are added version('2.2', sha256='dd7e27ef62070734a4a709601f5b3bada6641b111eb7069344e4f99a01d6e0a6') + version('2.1', sha256='933e7f99b1c70f24694d12d517dfca36d82f4e95b084c15d86756ace2a2790d9') def install(self, spec, prefix): mkdirp(join_path(prefix.share, 'data')) @@ -24,7 +25,9 @@ def install(self, spec, prefix): install_tree(self.stage.source_path, install_path) def setup_dependent_run_environment(self, env, dependent_spec): - env.set('G4ENSDFSTATEDATA', self.prefix.share.data) + install_path = join_path(self.prefix.share, 'data', 'G4ENSDFSTATE{0}' + .format(self.version)) + env.set('G4ENSDFSTATEDATA', install_path) def url_for_version(self, version): """Handle version string.""" diff --git a/var/spack/repos/builtin/packages/g4incl/package.py b/var/spack/repos/builtin/packages/g4incl/package.py new file mode 100644 index 00000000000..e21010f411a --- /dev/null +++ b/var/spack/repos/builtin/packages/g4incl/package.py @@ -0,0 +1,34 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack import * + + +class G4incl(Package): + """Geant4 data for evaluated particle cross-sections on natural + composition of elements""" + homepage = "http://geant4.web.cern.ch" + url = "http://geant4-data.web.cern.ch/geant4-data/datasets/G4INCL.1.0.tar.gz" + + maintainers = ['drbenmorgan'] + + # Only versions relevant to Geant4 releases built by spack are added + version('1.0', sha256='716161821ae9f3d0565fbf3c2cf34f4e02e3e519eb419a82236eef22c2c4367d') + + def install(self, spec, prefix): + mkdirp(join_path(prefix.share, 'data')) + install_path = join_path(prefix.share, 'data', "G4INCL{0}" + .format(self.version)) + install_tree(self.stage.source_path, install_path) + + def setup_dependent_run_environment(self, env, dependent_spec): + install_path = join_path(self.prefix.share, 'data', 'G4INCL{0}' + .format(self.version)) + env.set('G4INCLDATA', install_path) + + def url_for_version(self, version): + """Handle version string.""" + return ("http://geant4-data.web.cern.ch/geant4-data/datasets/G4INCL.%s.tar.gz" % version) \ No newline at end of file diff --git a/var/spack/repos/builtin/packages/g4ndl/package.py b/var/spack/repos/builtin/packages/g4ndl/package.py index 7ab6c9f26dc..3eaad0f101f 100644 --- a/var/spack/repos/builtin/packages/g4ndl/package.py +++ b/var/spack/repos/builtin/packages/g4ndl/package.py @@ -14,6 +14,7 @@ class G4ndl(Package): maintainers = ['drbenmorgan'] + version('4.6', sha256='9d287cf2ae0fb887a2adce801ee74fb9be21b0d166dab49bcbee9408a5145408') version('4.5', sha256='cba928a520a788f2bc8229c7ef57f83d0934bb0c6a18c31ef05ef4865edcdf8e') def install(self, spec, prefix): @@ -22,6 +23,11 @@ def install(self, spec, prefix): .format(self.version)) install_tree(self.stage.source_path, install_path) + def setup_dependent_run_environment(self, env, dependent_spec): + install_path = join_path(self.prefix.share, 'data', 'G4NDL{0}' + .format(self.version)) + env.set('G4NEUTRONHPDATA', install_path) + def url_for_version(self, version): """Handle version string.""" return ("http://geant4-data.web.cern.ch/geant4-data/datasets/G4NDL.%s.tar.gz" % version) diff --git a/var/spack/repos/builtin/packages/g4neutronxs/package.py b/var/spack/repos/builtin/packages/g4neutronxs/package.py index df3de0696cf..41acc423a6f 100644 --- a/var/spack/repos/builtin/packages/g4neutronxs/package.py +++ b/var/spack/repos/builtin/packages/g4neutronxs/package.py @@ -15,6 +15,8 @@ class G4neutronxs(Package): maintainers = ['drbenmorgan'] + # Only versions relevant to Geant4 releases built by spack are added + # Dataset not used after Geant4 10.4.x version('1.4', sha256='57b38868d7eb060ddd65b26283402d4f161db76ed2169437c266105cca73a8fd') def install(self, spec, prefix): @@ -23,6 +25,11 @@ def install(self, spec, prefix): .format(self.version)) install_tree(self.stage.source_path, install_path) + def setup_dependent_run_environment(self, env, dependent_spec): + install_path = join_path(self.prefix.share, 'data', 'G4NEUTRONXS{0}' + .format(self.version)) + env.set('G4NEUTRONXSDATA', install_path) + def url_for_version(self, version): """Handle version string.""" return "http://geant4-data.web.cern.ch/geant4-data/datasets/G4NEUTRONXS.%s.tar.gz" % version diff --git a/var/spack/repos/builtin/packages/g4particlexs/package.py b/var/spack/repos/builtin/packages/g4particlexs/package.py new file mode 100644 index 00000000000..e573025b64e --- /dev/null +++ b/var/spack/repos/builtin/packages/g4particlexs/package.py @@ -0,0 +1,35 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack import * + + +class G4particlexs(Package): + """Geant4 data for evaluated particle cross-sections on + natural composition of elements""" + homepage = "http://geant4.web.cern.ch" + url = "http://geant4-data.web.cern.ch/geant4-data/datasets/G4PARTICLEXS.2.1.tar.gz" + + maintainers = ['drbenmorgan'] + + # Only versions relevant to Geant4 releases built by spack are added + version('2.1', sha256='094d103372bbf8780d63a11632397e72d1191dc5027f9adabaf6a43025520b41') + version('1.1', sha256='100a11c9ed961152acfadcc9b583a9f649dda4e48ab314fcd4f333412ade9d62') + + def install(self, spec, prefix): + mkdirp(join_path(prefix.share, 'data')) + install_path = join_path(prefix.share, 'data', "G4PARTICLEXS{0}" + .format(self.version)) + install_tree(self.stage.source_path, install_path) + + def setup_dependent_run_environment(self, env, dependent_spec): + install_path = join_path(self.prefix.share, 'data', 'G4PARTICLEXS{0}' + .format(self.version)) + env.set('G4PARTICLEXSDATA', install_path) + + def url_for_version(self, version): + """Handle version string.""" + return ("http://geant4-data.web.cern.ch/geant4-data/datasets/G4PARTICLEXS.%s.tar.gz" % version) \ No newline at end of file diff --git a/var/spack/repos/builtin/packages/g4photonevaporation/package.py b/var/spack/repos/builtin/packages/g4photonevaporation/package.py index 4e5c62b9992..93d7f82de42 100644 --- a/var/spack/repos/builtin/packages/g4photonevaporation/package.py +++ b/var/spack/repos/builtin/packages/g4photonevaporation/package.py @@ -14,8 +14,11 @@ class G4photonevaporation(Package): maintainers = ['drbenmorgan'] - version('4.3.2', sha256='d4641a6fe1c645ab2a7ecee09c34e5ea584fb10d63d2838248bfc487d34207c7') + # Only versions relevant to Geant4 releases built by spack are added + version('5.5', sha256='5995dda126c18bd7f68861efde87b4af438c329ecbe849572031ceed8f5e76d7') + version('5.3', sha256='d47ababc8cbe548065ef644e9bd88266869e75e2f9e577ebc36bc55bf7a92ec8') version('5.2', sha256='83607f8d36827b2a7fca19c9c336caffbebf61a359d0ef7cee44a8bcf3fc2d1f') + version('4.3.2', sha256='d4641a6fe1c645ab2a7ecee09c34e5ea584fb10d63d2838248bfc487d34207c7') def install(self, spec, prefix): mkdirp(join_path(prefix.share, 'data')) @@ -24,6 +27,12 @@ def install(self, spec, prefix): .format(self.version)) install_tree(self.stage.source_path, install_path) + def setup_dependent_run_environment(self, env, dependent_spec): + install_path = join_path(self.prefix.share, 'data', + 'PhotonEvaporation{0}' + .format(self.version)) + env.set('G4LEVELGAMMADATA', install_path) + def url_for_version(self, version): """Handle version string.""" return ("http://geant4-data.web.cern.ch/geant4-data/datasets/G4PhotonEvaporation.%s.tar.gz" % version) diff --git a/var/spack/repos/builtin/packages/g4pii/package.py b/var/spack/repos/builtin/packages/g4pii/package.py index 2e238a64180..ea91b5b9c92 100644 --- a/var/spack/repos/builtin/packages/g4pii/package.py +++ b/var/spack/repos/builtin/packages/g4pii/package.py @@ -14,6 +14,7 @@ class G4pii(Package): maintainers = ['drbenmorgan'] + # Only versions relevant to Geant4 releases built by spack are added version('1.3', sha256='6225ad902675f4381c98c6ba25fc5a06ce87549aa979634d3d03491d6616e926') def install(self, spec, prefix): @@ -22,6 +23,11 @@ def install(self, spec, prefix): .format(self.version)) install_tree(self.stage.source_path, install_path) + def setup_dependent_run_environment(self, env, dependent_spec): + install_path = join_path(self.prefix.share, 'data', 'G4PII{0}' + .format(self.version)) + env.set('G4PIIDATA', install_path) + def url_for_version(self, version): """Handle version string.""" return ("http://geant4-data.web.cern.ch/geant4-data/datasets/G4PII.1.3.tar.gz" % version) diff --git a/var/spack/repos/builtin/packages/g4radioactivedecay/package.py b/var/spack/repos/builtin/packages/g4radioactivedecay/package.py index c281ff63cdc..6f34e89cea4 100644 --- a/var/spack/repos/builtin/packages/g4radioactivedecay/package.py +++ b/var/spack/repos/builtin/packages/g4radioactivedecay/package.py @@ -14,15 +14,25 @@ class G4radioactivedecay(Package): maintainers = ['drbenmorgan'] - version('5.1.1', sha256='f7a9a0cc998f0d946359f2cb18d30dff1eabb7f3c578891111fc3641833870ae') + # Only versions relevant to Geant4 releases built by spack are added + version('5.4', sha256='240779da7d13f5bf0db250f472298c3804513e8aca6cae301db97f5ccdcc4a61') + version('5.3', sha256='5c8992ac57ae56e66b064d3f5cdfe7c2fee76567520ad34a625bfb187119f8c1') version('5.2', sha256='99c038d89d70281316be15c3c98a66c5d0ca01ef575127b6a094063003e2af5d') + version('5.1.1', sha256='f7a9a0cc998f0d946359f2cb18d30dff1eabb7f3c578891111fc3641833870ae') def install(self, spec, prefix): mkdirp(join_path(prefix.share, 'data')) - install_path = join_path(prefix.share, 'data', 'RadioactiveDecay{0}' + install_path = join_path(prefix.share, 'data', + 'RadioactiveDecay{0}' .format(self.version)) install_tree(self.stage.source_path, install_path) + def setup_dependent_run_environment(self, env, dependent_spec): + install_path = join_path(self.prefix.share, 'data', + 'RadioactiveDecay{0}' + .format(self.version)) + env.set('G4RADIOACTIVEDATA', install_path) + def url_for_version(self, version): """Handle version string.""" return ("http://geant4-data.web.cern.ch/geant4-data/datasets/G4RadioactiveDecay.%s.tar.gz" % version) diff --git a/var/spack/repos/builtin/packages/g4realsurface/package.py b/var/spack/repos/builtin/packages/g4realsurface/package.py index b741b61ef88..dee32da8b93 100644 --- a/var/spack/repos/builtin/packages/g4realsurface/package.py +++ b/var/spack/repos/builtin/packages/g4realsurface/package.py @@ -14,9 +14,10 @@ class G4realsurface(Package): maintainers = ['drbenmorgan'] - version('1.0', sha256='3e2d2506600d2780ed903f1f2681962e208039329347c58ba1916740679020b1') - version('2.1', sha256='2a287adbda1c0292571edeae2082a65b7f7bd6cf2bf088432d1d6f889426dcf3') + # Only versions relevant to Geant4 releases built by spack are added version('2.1.1', sha256='90481ff97a7c3fa792b7a2a21c9ed80a40e6be386e581a39950c844b2dd06f50') + version('2.1', sha256='2a287adbda1c0292571edeae2082a65b7f7bd6cf2bf088432d1d6f889426dcf3') + version('1.0', sha256='3e2d2506600d2780ed903f1f2681962e208039329347c58ba1916740679020b1') def install(self, spec, prefix): mkdirp(join_path(prefix.share, 'data')) @@ -24,6 +25,11 @@ def install(self, spec, prefix): .format(self.version)) install_tree(self.stage.source_path, install_path) + def setup_dependent_run_environment(self, env, dependent_spec): + install_path = join_path(self.prefix.share, 'data', 'RealSurface{0}' + .format(self.version)) + env.set('G4REALSURFACEDATA', install_path) + def url_for_version(self, version): """Handle version string.""" return "http://geant4-data.web.cern.ch/geant4-data/datasets/{0}RealSurface.{1}.tar.gz".format( diff --git a/var/spack/repos/builtin/packages/g4saiddata/package.py b/var/spack/repos/builtin/packages/g4saiddata/package.py index 7b03177f735..0091d09744d 100644 --- a/var/spack/repos/builtin/packages/g4saiddata/package.py +++ b/var/spack/repos/builtin/packages/g4saiddata/package.py @@ -14,6 +14,8 @@ class G4saiddata(Package): maintainers = ['drbenmorgan'] + # Only versions relevant to Geant4 releases built by spack are added + version('2.0', sha256='1d26a8e79baa71e44d5759b9f55a67e8b7ede31751316a9e9037d80090c72e91') version('1.1', sha256='a38cd9a83db62311922850fe609ecd250d36adf264a88e88c82ba82b7da0ed7f') def install(self, spec, prefix): @@ -22,6 +24,11 @@ def install(self, spec, prefix): .format(self.version)) install_tree(self.stage.source_path, install_path) + def setup_dependent_run_environment(self, env, dependent_spec): + install_path = join_path(self.prefix.share, 'data', 'G4SAIDDATA{0}' + .format(self.version)) + env.set('G4SAIDXSDATA', install_path) + def url_for_version(self, version): """Handle version string.""" return "http://geant4-data.web.cern.ch/geant4-data/datasets/G4SAIDDATA.%s.tar.gz" % version diff --git a/var/spack/repos/builtin/packages/g4tendl/package.py b/var/spack/repos/builtin/packages/g4tendl/package.py index 2a3ecbc0749..aac083f539b 100644 --- a/var/spack/repos/builtin/packages/g4tendl/package.py +++ b/var/spack/repos/builtin/packages/g4tendl/package.py @@ -14,8 +14,9 @@ class G4tendl(Package): maintainers = ['drbenmorgan'] - version('1.3', sha256='52ad77515033a5d6f995c699809b464725a0e62099b5e55bf07c8bdd02cd3bce') + # Only versions relevant to Geant4 releases built by spack are added version('1.3.2', sha256='3b2987c6e3bee74197e3bd39e25e1cc756bb866c26d21a70f647959fc7afb849') + version('1.3', sha256='52ad77515033a5d6f995c699809b464725a0e62099b5e55bf07c8bdd02cd3bce') def install(self, spec, prefix): mkdirp(join_path(prefix.share, 'data')) @@ -23,6 +24,11 @@ def install(self, spec, prefix): .format(self.version)) install_tree(self.stage.source_path, install_path) + def setup_dependent_run_environment(self, env, dependent_spec): + install_path = join_path(self.prefix.share, 'data', 'G4TENDL{0}' + .format(self.version)) + env.set('G4PARTICLEHPDATA', install_path) + def url_for_version(self, version): """Handle version string.""" return ("http://geant4-data.web.cern.ch/geant4-data/datasets/G4TENDL.%s.tar.gz" % version) diff --git a/var/spack/repos/builtin/packages/gcc/glibc-2.31-libsanitizer-1.patch b/var/spack/repos/builtin/packages/gcc/glibc-2.31-libsanitizer-1.patch new file mode 100644 index 00000000000..96037707d3c --- /dev/null +++ b/var/spack/repos/builtin/packages/gcc/glibc-2.31-libsanitizer-1.patch @@ -0,0 +1,37 @@ +From ce9568e9e9cf6094be30e748821421e703754ffc Mon Sep 17 00:00:00 2001 +From: Jakub Jelinek +Date: Fri, 8 Nov 2019 19:53:18 +0100 +Subject: [PATCH] backport: re PR sanitizer/92154 (new glibc breaks arm + bootstrap due to libsanitizer) + + Backported from mainline + 2019-10-22 Tamar Christina + + PR sanitizer/92154 + * sanitizer_common/sanitizer_platform_limits_posix.cc: + Cherry-pick compiler-rt revision r375220. + +From-SVN: r277981 +--- + libsanitizer/ChangeLog | 9 +++++++++ + .../sanitizer_common/sanitizer_platform_limits_posix.cc | 6 +++++- + 2 files changed, 14 insertions(+), 1 deletion(-) + +diff --git a/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.cc b/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.cc +index 6cd4a5bac8b0..06a605ff4670 100644 +--- a/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.cc ++++ b/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.cc +@@ -1156,8 +1156,12 @@ CHECK_SIZE_AND_OFFSET(ipc_perm, uid); + CHECK_SIZE_AND_OFFSET(ipc_perm, gid); + CHECK_SIZE_AND_OFFSET(ipc_perm, cuid); + CHECK_SIZE_AND_OFFSET(ipc_perm, cgid); +-#if !defined(__aarch64__) || !SANITIZER_LINUX || __GLIBC_PREREQ (2, 21) ++#if (!defined(__aarch64__) || !SANITIZER_LINUX || __GLIBC_PREREQ (2, 21)) && \ ++ !defined(__arm__) + /* On aarch64 glibc 2.20 and earlier provided incorrect mode field. */ ++/* On Arm glibc 2.31 and later provide a different mode field, this field is ++ never used by libsanitizer so we can simply ignore this assert for all glibc ++ versions. */ + CHECK_SIZE_AND_OFFSET(ipc_perm, mode); + #endif + diff --git a/var/spack/repos/builtin/packages/gcc/glibc-2.31-libsanitizer-2.patch b/var/spack/repos/builtin/packages/gcc/glibc-2.31-libsanitizer-2.patch new file mode 100644 index 00000000000..75234436e8e --- /dev/null +++ b/var/spack/repos/builtin/packages/gcc/glibc-2.31-libsanitizer-2.patch @@ -0,0 +1,73 @@ +From 75003cdd23c310ec385344e8040d490e8dd6d2be Mon Sep 17 00:00:00 2001 +From: Jakub Jelinek +Date: Fri, 20 Dec 2019 17:58:35 +0100 +Subject: [PATCH] backport: re PR sanitizer/92154 (new glibc breaks arm + bootstrap due to libsanitizer) + + Backported from mainline + 2019-11-26 Jakub Jelinek + + PR sanitizer/92154 + * sanitizer_common/sanitizer_platform_limits_posix.h: Cherry-pick + llvm-project revision 947f9692440836dcb8d88b74b69dd379d85974ce. + * sanitizer_common/sanitizer_platform_limits_posix.cc: Likewise. + +From-SVN: r279653 +--- + libsanitizer/ChangeLog | 10 ++++++++++ + .../sanitizer_platform_limits_posix.cc | 9 +++------ + .../sanitizer_platform_limits_posix.h | 15 +-------------- + 3 files changed, 14 insertions(+), 20 deletions(-) + +diff --git a/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.cc b/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.cc +index 06a605ff4670..d823a12190c0 100644 +--- a/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.cc ++++ b/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.cc +@@ -1156,12 +1156,9 @@ CHECK_SIZE_AND_OFFSET(ipc_perm, uid); + CHECK_SIZE_AND_OFFSET(ipc_perm, gid); + CHECK_SIZE_AND_OFFSET(ipc_perm, cuid); + CHECK_SIZE_AND_OFFSET(ipc_perm, cgid); +-#if (!defined(__aarch64__) || !SANITIZER_LINUX || __GLIBC_PREREQ (2, 21)) && \ +- !defined(__arm__) +-/* On aarch64 glibc 2.20 and earlier provided incorrect mode field. */ +-/* On Arm glibc 2.31 and later provide a different mode field, this field is +- never used by libsanitizer so we can simply ignore this assert for all glibc +- versions. */ ++#if !SANITIZER_LINUX || __GLIBC_PREREQ (2, 31) ++/* glibc 2.30 and earlier provided 16-bit mode field instead of 32-bit ++ on many architectures. */ + CHECK_SIZE_AND_OFFSET(ipc_perm, mode); + #endif + +diff --git a/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.h b/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.h +index 73af92af1e8f..6a673a7c9959 100644 +--- a/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.h ++++ b/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.h +@@ -211,26 +211,13 @@ namespace __sanitizer { + u64 __unused1; + u64 __unused2; + #elif defined(__sparc__) +-#if defined(__arch64__) + unsigned mode; +- unsigned short __pad1; +-#else +- unsigned short __pad1; +- unsigned short mode; + unsigned short __pad2; +-#endif + unsigned short __seq; + unsigned long long __unused1; + unsigned long long __unused2; +-#elif defined(__mips__) || defined(__aarch64__) || defined(__s390x__) +- unsigned int mode; +- unsigned short __seq; +- unsigned short __pad1; +- unsigned long __unused1; +- unsigned long __unused2; + #else +- unsigned short mode; +- unsigned short __pad1; ++ unsigned int mode; + unsigned short __seq; + unsigned short __pad2; + #if defined(__x86_64__) && !defined(_LP64) diff --git a/var/spack/repos/builtin/packages/gcc/package.py b/var/spack/repos/builtin/packages/gcc/package.py index 4b47217c9a6..cf16f1bc8f7 100644 --- a/var/spack/repos/builtin/packages/gcc/package.py +++ b/var/spack/repos/builtin/packages/gcc/package.py @@ -25,6 +25,7 @@ class Gcc(AutotoolsPackage, GNUMirrorPackage): version('develop', svn=svn + 'trunk') + version('9.3.0', sha256='71e197867611f6054aa1119b13a0c0abac12834765fe2d81f35ac57f84f742d1') version('9.2.0', sha256='ea6ef08f121239da5695f76c9b33637a118dcf63e24164422231917fa61fb206') version('9.1.0', sha256='79a66834e96a6050d8fe78db2c3b32fb285b230b855d0a66288235bc04b327a0') @@ -102,18 +103,22 @@ class Gcc(AutotoolsPackage, GNUMirrorPackage): depends_on('isl@0.15:0.18', when='@6:8.9') depends_on('isl@0.15:0.20', when='@9:') depends_on('zlib', when='@6:') - depends_on('libiconv', when='platform=darwin') + depends_on('iconv', when='platform=darwin') depends_on('gnat', when='languages=ada') depends_on('binutils~libiberty', when='+binutils') depends_on('zip', type='build', when='languages=java') depends_on('cuda', when='+nvptx') + # The server is sometimes a bit slow to respond + timeout = {'timeout': 60} + resource( name='newlib', url='ftp://sourceware.org/pub/newlib/newlib-3.0.0.20180831.tar.gz', sha256='3ad3664f227357df15ff34e954bfd9f501009a647667cd307bf0658aefd6eb5b', destination='newlibsource', - when='+nvptx' + when='+nvptx', + fetch_options=timeout ) # nvptx-tools does not seem to work as a dependency, @@ -218,7 +223,7 @@ class Gcc(AutotoolsPackage, GNUMirrorPackage): # Fix system headers for Catalina SDK # (otherwise __OSX_AVAILABLE_STARTING ends up undefined) patch('https://raw.githubusercontent.com/Homebrew/formula-patches/b8b8e65e/gcc/9.2.0-catalina.patch', - sha256='0b8d14a7f3c6a2f0d2498526e86e088926671b5da50a554ffa6b7f73ac4f132b', when='@9.2.0:') + sha256='0b8d14a7f3c6a2f0d2498526e86e088926671b5da50a554ffa6b7f73ac4f132b', when='@9.2.0') # Use -headerpad_max_install_names in the build, # otherwise updated load commands won't fit in the Mach-O header. # This is needed because `gcc` avoids the superenv shim. @@ -230,6 +235,10 @@ class Gcc(AutotoolsPackage, GNUMirrorPackage): patch('piclibs.patch', when='+piclibs') patch('gcc-backport.patch', when='@4.7:4.9.2,5:5.3') + # Backport libsanitizer patch for glibc >= 2.31 and 8.1.0 <= gcc <= 9.2.0 + # https://bugs.gentoo.org/708346 + patch('glibc-2.31-libsanitizer-1.patch', when='@8.1.0:8.3.99,9.0.0:9.2.0') + patch('glibc-2.31-libsanitizer-2.patch', when='@8.1.0:8.3.99,9.0.0:9.2.0') # Older versions do not compile with newer versions of glibc # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=81712 patch('ucontext_t.patch', when='@4.9,5.1:5.4,6.1:6.4,7.1') @@ -347,7 +356,7 @@ def configure_args(self): options.extend([ '--with-native-system-header-dir=/usr/include', '--with-sysroot={0}'.format(macos_sdk_path()), - '--with-libiconv-prefix={0}'.format(spec['libiconv'].prefix) + '--with-libiconv-prefix={0}'.format(spec['iconv'].prefix) ]) return options diff --git a/var/spack/repos/builtin/packages/gdal/package.py b/var/spack/repos/builtin/packages/gdal/package.py index 21828112335..b6b78f80200 100644 --- a/var/spack/repos/builtin/packages/gdal/package.py +++ b/var/spack/repos/builtin/packages/gdal/package.py @@ -107,7 +107,7 @@ class Gdal(AutotoolsPackage): # Optional dependencies depends_on('libtool', type='build', when='+libtool') depends_on('zlib', when='+libz') - depends_on('libiconv', when='+libiconv') + depends_on('iconv', when='+libiconv') depends_on('xz', when='+liblzma') depends_on('zstd', when='+zstd @2.3:') depends_on('postgresql', when='+pg') @@ -250,7 +250,7 @@ def configure_args(self): if '+libiconv' in spec: args.append('--with-libiconv-prefix={0}'.format( - spec['libiconv'].prefix)) + spec['iconv'].prefix)) else: args.append('--with-libiconv-prefix=no') diff --git a/var/spack/repos/builtin/packages/gdb/package.py b/var/spack/repos/builtin/packages/gdb/package.py index 825291feb09..e6aef7d6719 100644 --- a/var/spack/repos/builtin/packages/gdb/package.py +++ b/var/spack/repos/builtin/packages/gdb/package.py @@ -33,6 +33,11 @@ class Gdb(AutotoolsPackage, GNUMirrorPackage): variant('python', default=True, description='Compile with Python support') variant('xz', default=True, description='Compile with lzma support') + variant('source-highlight', default=False, description='Compile with source-highlight support') + variant('lto', default=False, description='Enable lto') + variant('quad', default=False, description='Enable quad') + variant('gold', default=False, description='Enable gold linker') + variant('ld', default=False, description='Enable ld') # Required dependency depends_on('texinfo', type='build') @@ -40,6 +45,7 @@ class Gdb(AutotoolsPackage, GNUMirrorPackage): # Optional dependencies depends_on('python', when='+python') depends_on('xz', when='+xz') + depends_on('source-highlight', when='+source-highlight') build_directory = 'spack-build' @@ -49,4 +55,17 @@ def configure_args(self): args.append('--with-python') args.append('LDFLAGS={0}'.format( self.spec['python'].libs.ld_flags)) + + if '+lto' in self.spec: + args.append('--enable-lto') + + if '+quad' in self.spec: + args.append('--with-quad') + + if '+gold' in self.spec: + args.append('--enable-gold') + + if '+ld' in self.spec: + args.append('--enable-ld') + return args diff --git a/var/spack/repos/builtin/packages/gdrcopy/package.py b/var/spack/repos/builtin/packages/gdrcopy/package.py new file mode 100644 index 00000000000..e85c8fb0c00 --- /dev/null +++ b/var/spack/repos/builtin/packages/gdrcopy/package.py @@ -0,0 +1,27 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Gdrcopy(MakefilePackage): + """A fast GPU memory copy library based on NVIDIA GPUDirect + RDMA technology.""" + + homepage = "https://github.com/NVIDIA/gdrcopy" + url = "https://github.com/NVIDIA/gdrcopy/archive/v2.0-beta.3.tar.gz" + git = "https://github.com/NVIDIA/gdrcopy" + + version('master', branch='master') + version('2.0', sha256='98320e6e980a7134ebc4eedd6cf23647104f2b3c557f2eaf0d31a02609f5f2b0') + version('1.3', sha256='f11cdfe389b685f6636b80b4a3312dc014a385ad7220179c1318c60e2e28af3a') + + def build(self, spec, prefix): + make('lib') + + def install(self, spec, prefix): + mkdir(prefix.include) + mkdir(prefix.lib64) + make('lib_install', 'PREFIX={0}'.format(self.prefix)) diff --git a/var/spack/repos/builtin/packages/geant4-data/package.py b/var/spack/repos/builtin/packages/geant4-data/package.py index f5c4e72fac0..4157fb76c6e 100644 --- a/var/spack/repos/builtin/packages/geant4-data/package.py +++ b/var/spack/repos/builtin/packages/geant4-data/package.py @@ -8,90 +8,89 @@ import glob -class Geant4Data(Package): - """An umbrella package to hold Geant4 data packages""" +class Geant4Data(BundlePackage): + """A bundle package to hold Geant4 data packages""" homepage = "http://geant4.cern.ch" - url = "http://geant4-data.web.cern.ch/geant4-data/ReleaseNotes/ReleaseNotes4.10.3.html" maintainers = ['drbenmorgan'] - version('10.03.p03', sha256='3e0d4d4e6854c8667d930fd5beaec09b7e6ec41f4847935e5d6a2720d0094b30', expand=False) - version('10.04', sha256='f67fb899b99348a1a7e471a05f249f972e7e303c78238fc5f693b99968642255', expand=False) + version('10.6.0') + version('10.5.1') + version('10.4.3') + version('10.4.0') + version('10.3.3') - # geant4@10.03.p03 - depends_on("g4abla@3.0", when='@10.03.p03 ') - depends_on("g4emlow@6.50", when='@10.03.p03 ') - depends_on("g4ndl@4.5", when='@10.03.p03 ') - depends_on("g4neutronxs@1.4", when='@10.03.p03 ') - depends_on("g4saiddata@1.1", when='@10.03.p03 ') - depends_on("g4ensdfstate@2.1", when='@10.03.p03 ') - depends_on("g4photonevaporation@4.3.2", when='@10.03.p03 ') - depends_on("g4pii@1.3", when='@10.03.p03 ') - depends_on("g4radioactivedecay@5.1.1", when='@10.03.p03 ') - depends_on("g4realsurface@1.0", when='@10.03.p03 ') - depends_on("g4tendl@1.3", when='@10.03.p03 ') - # geant4@10.04 - depends_on("g4abla@3.1", when='@10.04 ') - depends_on("g4emlow@7.3", when='@10.04 ') - depends_on("g4ndl@4.5", when='@10.04 ') - depends_on("g4neutronxs@1.4", when='@10.04 ') - depends_on("g4saiddata@1.1", when='@10.04 ') - depends_on("g4ensdfstate@2.2", when='@10.04 ') - depends_on("g4photonevaporation@5.2", when='@10.04 ') - depends_on("g4pii@1.3", when='@10.04 ') - depends_on("g4radioactivedecay@5.2", when='@10.04 ') - depends_on("g4realsurface@2.1", when='@10.04 ') - depends_on("g4tendl@1.3.2", when='@10.04 ') + # Add install phase so we can create the data "view" + phases = ['install'] + + # For clarity, declare deps on a Major-Minor version basis as + # they generally don't change on the patch level + # Can move to declaring on a dataset basis if needed + # geant4@10.6.X + depends_on("g4ndl@4.6", when='@10.6.0') + depends_on("g4emlow@7.9", when='@10.6.0') + depends_on("g4photonevaporation@5.5", when='@10.6.0') + depends_on("g4radioactivedecay@5.4", when='@10.6.0') + depends_on("g4particlexs@2.1", when='@10.6.0') + depends_on("g4pii@1.3", when='@10.6.0') + depends_on("g4realsurface@2.1.1", when='@10.6.0') + depends_on("g4saiddata@2.0", when='@10.6.0') + depends_on("g4abla@3.1", when='@10.6.0') + depends_on("g4incl@1.0", when='@10.6.0') + depends_on("g4ensdfstate@2.2", when='@10.6.0') + + # geant4@10.5.X + depends_on("g4ndl@4.5", when='@10.5.0:10.5.9999') + depends_on("g4emlow@7.7", when='@10.5.0:10.5.9999') + depends_on("g4photonevaporation@5.3", when='@10.5.0:10.5.9999') + depends_on("g4radioactivedecay@5.3", when='@10.5.0:10.5.9999') + depends_on("g4particlexs@1.1", when='@10.5.0:10.5.9999') + depends_on("g4pii@1.3", when='@10.5.0:10.5.9999') + depends_on("g4realsurface@2.1.1", when='@10.5.0:10.5.9999') + depends_on("g4saiddata@2.0", when='@10.5.0:10.5.9999') + depends_on("g4abla@3.1", when='@10.5.0:10.5.9999') + depends_on("g4incl@1.0", when='@10.5.0:10.5.9999') + depends_on("g4ensdfstate@2.2", when='@10.5.0:10.5.9999') + + # geant4@10.4.X + depends_on("g4ndl@4.5", when='@10.4.0:10.4.9999') + depends_on("g4emlow@7.3", when='@10.4.0:10.4.9999') + depends_on("g4photonevaporation@5.2", when='@10.4.0:10.4.9999') + depends_on("g4radioactivedecay@5.2", when='@10.4.0:10.4.9999') + depends_on("g4neutronxs@1.4", when='@10.4.0:10.4.9999') + depends_on("g4pii@1.3", when='@10.4.0:10.4.9999') + + depends_on("g4realsurface@2.1.1", when='@10.4.2:10.4.9999') + depends_on("g4realsurface@2.1", when='@10.4.0:10.4.1') + + depends_on("g4saiddata@1.1", when='@10.4.0:10.4.9999') + depends_on("g4abla@3.1", when='@10.4.0:10.4.9999') + depends_on("g4ensdfstate@2.2", when='@10.4.0:10.4.9999') + + # geant4@10.3.X + depends_on("g4ndl@4.5", when='@10.3.0:10.3.9999') + depends_on("g4emlow@6.50", when='@10.3.0:10.3.9999') + + depends_on("g4photonevaporation@4.3.2", when='@10.3.1:10.3.9999') + depends_on("g4photonevaporation@4.3", when='@10.3.0') + + depends_on("g4radioactivedecay@5.1.1", when='@10.3.1:10.3.9999') + depends_on("g4radioactivedecay@5.1", when='@10.3.0') + + depends_on("g4neutronxs@1.4", when='@10.3.0:10.3.9999') + depends_on("g4pii@1.3", when='@10.3.0:10.3.9999') + depends_on("g4realsurface@1.0", when='@10.3.0:10.3.9999') + depends_on("g4saiddata@1.1", when='@10.3.0:10.3.9999') + depends_on("g4abla@3.0", when='@10.3.0:10.3.9999') + depends_on("g4ensdfstate@2.1", when='@10.3.0:10.3.9999') def install(self, spec, prefix): spec = self.spec - version = self.version - major = version[0] - minor = version[1] - if len(version) > 2: - patch = version[-1] - else: - patch = 0 - data = 'Geant4-%s.%s.%s/data' % (major, minor, patch) + data = '{0}-{1}'.format(self.name, self.version.dotted) datadir = join_path(spec.prefix.share, data) - with working_dir(datadir, create=True): - for d in glob.glob('%s/share/data/*' % - spec['g4abla'].prefix): - os.symlink(d, os.path.basename(d)) - for d in glob.glob('%s/share/data/*' % - spec['g4emlow'].prefix): - os.symlink(d, os.path.basename(d)) - for d in glob.glob('%s/share/data/*' % - spec['g4ndl'].prefix): - os.symlink(d, os.path.basename(d)) - for d in glob.glob('%s/share/data/*' % - spec['g4saiddata'].prefix): - os.symlink(d, os.path.basename(d)) - for d in glob.glob('%s/share/data/*' % - spec['g4neutronxs'].prefix): - os.symlink(d, os.path.basename(d)) - for d in glob.glob('%s/share/data/*' % - spec['g4ensdfstate'].prefix): - os.symlink(d, os.path.basename(d)) - for d in glob.glob('%s/share/data/*' % - spec['g4photonevaporation'].prefix): - os.symlink(d, os.path.basename(d)) - for d in glob.glob('%s/share/data/*' % - spec['g4pii'].prefix): - os.symlink(d, os.path.basename(d)) - for d in glob.glob('%s/share/data/*' % - spec['g4radioactivedecay'].prefix): - os.symlink(d, os.path.basename(d)) - for d in glob.glob('%s/share/data/*' % - spec['g4realsurface'].prefix): - os.symlink(d, os.path.basename(d)) - for d in glob.glob('%s/share/data/*' % - spec['g4tendl'].prefix): - os.symlink(d, os.path.basename(d)) - def url_for_version(self, version): - """Handle version string.""" - url = 'http://geant4-data.web.cern.ch/geant4-data/ReleaseNotes/' - url = url + 'ReleaseNotes4.{0}.{1}.html'.format(version[0], version[1]) - return url + with working_dir(datadir, create=True): + for s in spec.dependencies(): + for d in glob.glob('{0}/data/*'.format(s.prefix.share)): + os.symlink(d, os.path.basename(d)) diff --git a/var/spack/repos/builtin/packages/geant4/geant4-10.4.3-cxx17-removed-features.patch b/var/spack/repos/builtin/packages/geant4/geant4-10.4.3-cxx17-removed-features.patch new file mode 100644 index 00000000000..a4938876794 --- /dev/null +++ b/var/spack/repos/builtin/packages/geant4/geant4-10.4.3-cxx17-removed-features.patch @@ -0,0 +1,18 @@ +diff --git a/cmake/Modules/G4BuildSettings.cmake b/cmake/Modules/G4BuildSettings.cmake +index f68cb0a44..6bf4b6948 100644 +--- a/cmake/Modules/G4BuildSettings.cmake ++++ b/cmake/Modules/G4BuildSettings.cmake +@@ -205,6 +205,13 @@ endif() + # Add Definition to flags for temporary back compatibility + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -DG4USE_STD11") + ++# Spack patch to support use of C++ features deprecated/removed in C++17 ++# Only checked on AppleClang for now ++if(GEANT4_BUILD_CXXSTD GREATER 14) ++ set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -D_LIBCPP_ENABLE_CXX17_REMOVED_FEATURES=1") ++endif() ++#---- ++ + # Hold any appropriate compile flag(s) in variable for later export to + # config files. Needed to support clients using late CMake 2.8 where compile features + # are not available. diff --git a/var/spack/repos/builtin/packages/geant4/package.py b/var/spack/repos/builtin/packages/geant4/package.py index 3c87ccdff61..4bf35792aac 100644 --- a/var/spack/repos/builtin/packages/geant4/package.py +++ b/var/spack/repos/builtin/packages/geant4/package.py @@ -4,8 +4,6 @@ # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * -import os -import glob class Geant4(CMakePackage): @@ -15,85 +13,113 @@ class Geant4(CMakePackage): science.""" homepage = "http://geant4.cern.ch/" - url = "http://geant4.cern.ch/support/source/geant4.10.01.p03.tar.gz" + url = "https://gitlab.cern.ch/geant4/geant4/-/archive/v10.6.0/geant4-v10.6.0.tar.gz" maintainers = ['drbenmorgan'] - version('10.05.p01', sha256='f4a292220500fad17e0167ce3153e96e3410ecbe96284e572dc707f63523bdff') - version('10.04', sha256='f6d883132f110eb036c69da2b21df51f13c585dc7b99d4211ddd32f4ccee1670') - version('10.03.p03', sha256='a164f49c038859ab675eec474d08c9d02be8c4be9c0c2d3aa8e69adf89e1e138') + version('10.6.0', sha256='eebe6a170546064ff81ab3b00f513ccd1d4122a026514982368d503ac55a4ee4') + version('10.5.1', sha256='2397eb859dc4de095ff66059d8bda9f060fdc42e10469dd7890946293eeb0e39') + version('10.4.3', sha256='67f3bb6405a2c77e573936c2b933f5a4a33915aa379626a2eb3012009b91e1da') + version('10.4.0', sha256='e919b9b0a88476e00c0b18ab65d40e6a714b55ee4778f66bac32a5396c22aa74') + version('10.3.3', sha256='bcd36a453da44de9368d1d61b0144031a58e4b43a6d2d875e19085f2700a89d8') - variant('qt', default=False, description='Enable Qt support') + _cxxstd_values = ('11', '14', '17') + variant('cxxstd', + default=_cxxstd_values[0], + values=_cxxstd_values, + multi=False, + description='Use the specified C++ standard when building.') + + variant('threads', default=True, description='Build with multithreading') variant('vecgeom', default=False, description='Enable vecgeom support') variant('opengl', default=False, description='Optional OpenGL support') variant('x11', default=False, description='Optional X11 support') variant('motif', default=False, description='Optional motif support') - variant('threads', default=True, description='Build with multithreading') - variant('data', default=True, description='Install geant4 data') - - variant('cxxstd', - default='11', - values=('11', '14', '17'), - multi=False, - description='Use the specified C++ standard when building.') + variant('qt', default=False, description='Enable Qt support') depends_on('cmake@3.5:', type='build') + depends_on('cmake@3.8:', type='build', when='@10.6.0:') - # C++11 support - depends_on("xerces-c cxxstd=11", when="cxxstd=11") - depends_on("clhep@2.3.3.0: cxxstd=11", when="@10.03.p03: cxxstd=11") - depends_on("vecgeom cxxstd=11", when="+vecgeom cxxstd=11") - - # C++14 support - depends_on("xerces-c cxxstd=14", when="cxxstd=14") - depends_on("clhep@2.3.3.0: cxxstd=14", when="@10.03.p03: cxxstd=14") - depends_on("vecgeom cxxstd=14", when="+vecgeom cxxstd=14") - - # C++17 support - depends_on("xerces-c cxxstd=17", when="cxxstd=17") - depends_on("clhep@2.3.3.0: cxxstd=17", when="@10.03.p03: cxxstd=17") - patch('cxx17.patch', when='@:10.03.p99 cxxstd=17') - patch('cxx17_geant4_10_0.patch', level=1, when='@10.04.00: cxxstd=17') - depends_on("vecgeom cxxstd=17", when="+vecgeom cxxstd=17") + depends_on('geant4-data@10.6.0', when='@10.6.0') + depends_on('geant4-data@10.5.1', when='@10.5.1') + depends_on('geant4-data@10.4.3', when='@10.4.3') + depends_on('geant4-data@10.4.0', when='@10.4.0') + depends_on('geant4-data@10.3.3', when='@10.3.3') depends_on("expat") depends_on("zlib") - depends_on("xerces-c") + + for std in _cxxstd_values: + # CLHEP version requirements to be reviewed + depends_on('clhep@2.3.3.0: cxxstd=' + std, + when='@10.3.3: cxxstd=' + std) + + # Spack only supports Xerces-c 3 and above, so no version req + depends_on('xerces-c cxxstd=' + std, when='cxxstd=' + std) + + # Vecgeom specific versions for each Geant4 version + depends_on('vecgeom@1.1.5 cxxstd=' + std, + when='@10.6.0:10.6.99 +vecgeom cxxstd=' + std) + depends_on('vecgeom@1.1.0 cxxstd=' + std, + when='@10.5.0:10.5.99 +vecgeom cxxstd=' + std) + depends_on('vecgeom@0.5.2 cxxstd=' + std, + when='@10.4.0:10.4.99 +vecgeom cxxstd=' + std) + depends_on('vecgeom@0.3rc cxxstd=' + std, + when='@10.3.0:10.3.99 +vecgeom cxxstd=' + std) + + # Visualization driver ependencies depends_on("gl", when='+opengl') depends_on("glx", when='+opengl+x11') depends_on("libx11", when='+x11') depends_on("libxmu", when='+x11') depends_on("motif", when='+motif') - depends_on("qt@4.8:", when="+qt") - - # if G4 data not installed with geant4 - # depend on G4 data packages - # this allows external data installations - # to avoid duplication - - depends_on('geant4-data@10.03.p03', when='@10.03.p03 ~data') - depends_on('geant4-data@10.04', when='@10.04 ~data') - depends_on('geant4-data@10.05.p01', when='@10.05.p01 ~data') + depends_on("qt@5:", when="+qt") # As released, 10.03.03 has issues with respect to using external # CLHEP. - patch('CLHEP-10.03.03.patch', level=1, when='@10.03.p03') + patch('CLHEP-10.03.03.patch', level=1, when='@10.3.3') + # These patches can be applied independent of the cxxstd value? + patch('cxx17.patch', when='@:10.3.99 cxxstd=17') + patch('cxx17_geant4_10_0.patch', level=1, when='@10.4.0 cxxstd=17') + patch('geant4-10.4.3-cxx17-removed-features.patch', + level=1, when='@10.4.3 cxxstd=17') def cmake_args(self): spec = self.spec + # Core options options = [ - '-DGEANT4_USE_GDML=ON', + '-DGEANT4_BUILD_CXXSTD=c++{0}'.format( + self.spec.variants['cxxstd'].value), '-DGEANT4_USE_SYSTEM_CLHEP=ON', - '-DGEANT4_USE_SYSTEM_CLHEP_GRANULAR=ON', - '-DGEANT4_USE_G3TOG4=ON', - '-DGEANT4_INSTALL_DATA=ON', - '-DGEANT4_BUILD_TLS_MODEL=global-dynamic', '-DGEANT4_USE_SYSTEM_EXPAT=ON', '-DGEANT4_USE_SYSTEM_ZLIB=ON', - '-DXERCESC_ROOT_DIR:STRING=%s' % - spec['xerces-c'].prefix, ] + '-DGEANT4_USE_G3TOG4=ON', + '-DGEANT4_USE_GDML=ON', + '-DXERCESC_ROOT_DIR={0}'.format(spec['xerces-c'].prefix) + ] + # Multithreading + options.append(self.define_from_variant('GEANT4_BUILD_MULTITHREADED', + 'threads')) + if '+threads' in spec: + # This should be a variant + options.append('-DGEANT4_BUILD_TLS_MODEL=global-dynamic') + + # install the data with geant4 + datadir = spec['geant4-data'].prefix.share + dataver = '{0}-{1}'.format(spec['geant4-data'].name, + spec['geant4-data'].version.dotted) + datapath = join_path(datadir, dataver) + options.append('-DGEANT4_INSTALL_DATADIR={0}'.format(datapath)) + + # Vecgeom + if '+vecgeom' in spec: + options.append('-DGEANT4_USE_USOLIDS=ON') + options.append('-DUSolids_DIR=%s' % spec[ + 'vecgeom'].prefix.lib.CMake.USolids) + + # Visualization options if 'platform=darwin' not in spec: if "+x11" in spec and "+opengl" in spec: options.append('-DGEANT4_USE_OPENGL_X11=ON') @@ -102,61 +128,10 @@ def cmake_args(self): if "+x11" in spec: options.append('-DGEANT4_USE_RAYTRACER_X11=ON') - options.append('-DGEANT4_BUILD_CXXSTD=c++{0}'.format( - self.spec.variants['cxxstd'].value)) - if '+qt' in spec: options.append('-DGEANT4_USE_QT=ON') options.append( '-DQT_QMAKE_EXECUTABLE=%s' % spec['qt'].prefix.bin.qmake) - if '+vecgeom' in spec: - options.append('-DGEANT4_USE_USOLIDS=ON') - options.append('-DUSolids_DIR=%s' % spec[ - 'vecgeom'].prefix.lib.CMake.USolids) - - on_or_off = lambda opt: 'ON' if '+' + opt in spec else 'OFF' - options.append('-DGEANT4_BUILD_MULTITHREADED=' + on_or_off('threads')) - - # install the data with geant4 - options.append('-DGEANT4_INSTALL_DATA=' + on_or_off('data')) - return options - - def url_for_version(self, version): - """Handle Geant4's unusual version string.""" - return ("http://geant4.cern.ch/support/source/geant4.%s.tar.gz" % version) - - @run_before('cmake') - def make_data_links(self): - if '+data' in self.spec: - return - spec = self.spec - version = self.version - major = version[0] - minor = version[1] - if len(version) > 2: - patch = version[-1] - else: - patch = 0 - datadir = 'Geant4-%s.%s.%s/data' % (major, minor, patch) - with working_dir(join_path(spec.prefix.share, datadir), - create=True): - dirs = glob.glob('%s/%s/*' % - (spec['geant4-data'].prefix.share, datadir)) - for d in dirs: - target = os.readlink(d) - os.symlink(target, os.path.basename(target)) - - def setup_dependent_build_environment(self, env, dependent_spec): - version = self.version - major = version[0] - minor = version[1] - if len(version) > 2: - patch = version[-1] - else: - patch = 0 - datadir = 'Geant4-%s.%s.%s' % (major, minor, patch) - env.append_path('CMAKE_MODULE_PATH', join_path( - self.prefix.lib64, datadir, 'Modules')) diff --git a/var/spack/repos/builtin/packages/genometools/package.py b/var/spack/repos/builtin/packages/genometools/package.py index 082fc777411..ac7ef818053 100644 --- a/var/spack/repos/builtin/packages/genometools/package.py +++ b/var/spack/repos/builtin/packages/genometools/package.py @@ -11,16 +11,17 @@ class Genometools(MakefilePackage): of genome informatics) combined into a single binary named gt.""" homepage = "http://genometools.org/" - url = "http://genometools.org/pub/genometools-1.5.9.tar.gz" + url = "https://github.com/genometools/genometools/archive/v1.6.1.tar.gz" - version('1.5.9', sha256='36923198a4214422886fd1425ef986bd7e558c73b94194982431cfd3dc7eb387') + version('1.6.1', sha256='528ca143a7f1d42af8614d60ea1e5518012913a23526d82e434f0dad2e2d863f') + version('1.5.9', sha256='bba8e043f097e7c72e823f73cb0efbd20bbd60f1ce797a0e4c0ab632b170c909') depends_on('perl', type=('build', 'run')) depends_on('cairo') depends_on('pango') # build fails with gcc 7" - conflicts('%gcc@7.1.0:') + conflicts('%gcc@7.1.0:', when='@:1.5.9') def install(self, spec, prefix): make('install', 'prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/git/package.py b/var/spack/repos/builtin/packages/git/package.py index 1bd236051a7..7b92fb65454 100644 --- a/var/spack/repos/builtin/packages/git/package.py +++ b/var/spack/repos/builtin/packages/git/package.py @@ -24,6 +24,11 @@ class Git(AutotoolsPackage): # You can find the source here: https://mirrors.edge.kernel.org/pub/software/scm/git/sha256sums.asc releases = [ + { + 'version': '2.26.0', + 'sha256': 'aa168c2318e7187cd295a645f7370cc6d71a324aafc932f80f00c780b6a26bed', + 'sha256_manpages': 'c1ffaf0b4cd1e80a0eb0d4039e208c9d411ef94d5da44e38363804e1a7961218' + }, { 'version': '2.25.0', 'sha256': 'a98c9b96d91544b130f13bf846ff080dda2867e77fe08700b793ab14ba5346f6', @@ -177,7 +182,7 @@ class Git(AutotoolsPackage): depends_on('curl') depends_on('expat') depends_on('gettext') - depends_on('libiconv') + depends_on('iconv') depends_on('libidn2') depends_on('openssl') depends_on('pcre', when='@:2.13') @@ -221,7 +226,7 @@ def configure_args(self): configure_args = [ '--with-curl={0}'.format(spec['curl'].prefix), '--with-expat={0}'.format(spec['expat'].prefix), - '--with-iconv={0}'.format(spec['libiconv'].prefix), + '--with-iconv={0}'.format(spec['iconv'].prefix), '--with-openssl={0}'.format(spec['openssl'].prefix), '--with-perl={0}'.format(spec['perl'].command.path), '--with-zlib={0}'.format(spec['zlib'].prefix), diff --git a/var/spack/repos/builtin/packages/gitconddb/package.py b/var/spack/repos/builtin/packages/gitconddb/package.py new file mode 100644 index 00000000000..9400ea549ce --- /dev/null +++ b/var/spack/repos/builtin/packages/gitconddb/package.py @@ -0,0 +1,43 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Gitconddb(CMakePackage): + """Conditions Database library using a Git repository as the + storage backend""" + + homepage = "https://gitlab.cern.ch/lhcb/GitCondDB" + url = "https://gitlab.cern.ch/lhcb/GitCondDB/-/archive/0.1.1/GitCondDB-0.1.1.tar.gz" + git = "https://gitlab.cern.ch/lhcb/GitCondDB.git" + + maintainers = ['drbenmorgan'] + + version('master', branch='master') + version('0.1.1', sha256='024a6867722a3a622ed4327ea7d15641dd48e4e8411bdcc21915e406b3c479a2') + + # Add the cxxstd variant for forward compatibility, though we require 17 + _cxxstd_values = ('17',) + variant('cxxstd', + default='17', + values=_cxxstd_values, + multi=False, + description='Use the specified C++ standard when building.') + + depends_on('cmake@3.10:', type='build') + depends_on('pkgconfig', type='build') + depends_on('nlohmann-json@3.2.0:', type='build') + depends_on('googletest@1.8.1:', type='build') + + for s in _cxxstd_values: + depends_on('fmt@5.2.0: cxxstd=' + s, when='cxxstd=' + s) + # Maybe also a boost dependency for macOS older than catalina + + depends_on('libgit2') + + # Known conflicts on C++17 compatibility (aggressive for now) + conflicts('%gcc@:7.9.999', msg="GitCondDB requires GCC 8 or newer for C++17 support") + conflicts('%clang platform=darwin', when="@:0.1.99", msg="No Darwin support for clang in older versions") diff --git a/var/spack/repos/builtin/packages/glib/package.py b/var/spack/repos/builtin/packages/glib/package.py index e43a9d17284..43195680086 100644 --- a/var/spack/repos/builtin/packages/glib/package.py +++ b/var/spack/repos/builtin/packages/glib/package.py @@ -46,7 +46,7 @@ class Glib(AutotoolsPackage): depends_on('python', type=('build', 'run'), when='@2.53.4:') depends_on('pcre+utf', when='@2.48:') depends_on('util-linux', when='+libmount') - depends_on('libiconv') + depends_on('iconv') # The following patch is needed for gcc-6.1 patch('g_date_strftime.patch', when='@2.42.1') @@ -70,7 +70,10 @@ def configure_args(self): args.append('--with-python={0}'.format( os.path.basename(self.spec['python'].command.path)) ) - args.append('--with-libiconv=gnu') + if 'libc' in self.spec: + args.append('--with-libiconv=maybe') + else: + args.append('--with-libiconv=gnu') args.extend(self.enable_or_disable('tracing')) # SELinux is not available in Spack, so glib should not use it. args.append('--disable-selinux') diff --git a/var/spack/repos/builtin/packages/glusterfs/package.py b/var/spack/repos/builtin/packages/glusterfs/package.py new file mode 100644 index 00000000000..89024cf9774 --- /dev/null +++ b/var/spack/repos/builtin/packages/glusterfs/package.py @@ -0,0 +1,44 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Glusterfs(AutotoolsPackage): + """Gluster is a software defined distributed storage that can scale to + several petabytes. It provides interfaces for object, block and file + storage.""" + + homepage = "https://www.gluster.org/" + url = "https://download.gluster.org/pub/gluster/glusterfs/7/7.3/glusterfs-7.3.tar.gz" + list_url = "https://download.gluster.org/pub/gluster/glusterfs/" + list_depth = 2 + + version('7.3', sha256='2401cc7c3f5488f6fc5ea09ce2ab30c918612f592571fb3de6124f8482ad4954') + version('7.2', sha256='8e43614967b90d64495fbe2c52230dd72572ce219507fb48bc317b1c228a06e1') + version('7.1', sha256='ffc5bd78b079009382bd01391865646bc9b2e8e72366afc96d62ba891dd9dbce') + version('7.0', sha256='8a872518bf9bd4dc1568f45c716bcde09e3bf7abf5b156ea90405e0fc2e9f07b') + version('6.8', sha256='41e855bdc456759c8c15ef494c636a25cc7b62c55ad132ecd55bec05df64793f') + version('6.7', sha256='e237dd59a2d5b73e156b0b71df49ff64a143b3aaf8f0a65daaf369bb40f5e923') + + depends_on('m4', type='build') + depends_on('autoconf', type='build') + depends_on('automake', type='build') + depends_on('libtool', type='build') + depends_on('flex', type='build') + depends_on('bison', type='build') + depends_on('rpcsvc-proto') + depends_on('acl') + depends_on('libuuid') + depends_on('libtirpc') + depends_on('userspace-rcu') + + def url_for_version(self, version): + url = 'https://download.gluster.org/pub/gluster/glusterfs/{0}/{1}/glusterfs-{1}.tar.gz' + return url.format(version.up_to(1), version) + + def autoreconf(self, spec, prefix): + bash = which('bash') + bash('./autogen.sh') diff --git a/var/spack/repos/builtin/packages/gmsh/package.py b/var/spack/repos/builtin/packages/gmsh/package.py index b5d677bd992..522a315f290 100644 --- a/var/spack/repos/builtin/packages/gmsh/package.py +++ b/var/spack/repos/builtin/packages/gmsh/package.py @@ -19,6 +19,7 @@ class Gmsh(CMakePackage): homepage = 'http://gmsh.info' url = 'http://gmsh.info/src/gmsh-4.4.1-source.tgz' + version('4.5.4', sha256='ccf8c74f43cbe3c371abe79862025d41642b3538a0148f018949494e3b3e2ecd') version('4.4.1', sha256='853c6438fc4e4b765206e66a514b09182c56377bb4b73f1d0d26eda7eb8af0dc') version('4.2.2', sha256='e9ee9f5c606bbec5f2adbb8c3d6023c4e2577f487fa4e4ecfcfc94a241cc8dcc') version('4.0.0', sha256='fb0c8afa37425c6f4315ab3b3124e9e102fcf270a35198423a4002796f04155f') @@ -31,16 +32,18 @@ class Gmsh(CMakePackage): variant('shared', default=True, description='Enables the build of shared libraries') variant('mpi', default=True, description='Builds MPI support for parser and solver') - variant('openmp', default=False, description='Enable OpenMP support') + variant('openmp', default=False, description='Enable OpenMP support') variant('fltk', default=False, description='Enables the build of the FLTK GUI') variant('hdf5', default=False, description='Enables HDF5 support') variant('compression', default=True, description='Enables IO compression through zlib') variant('netgen', default=False, description='Build with Netgen') + variant('opencascade', default=False, description='Build with OpenCASCADE') variant('oce', default=False, description='Build with OCE') variant('petsc', default=False, description='Build with PETSc') variant('slepc', default=False, description='Build with SLEPc (only when PETSc is enabled)') variant('tetgen', default=False, description='Build with Tetgen') - variant('metis', default=False, description='Build with Metis') + variant('metis', default=False, description='Build with Metis') + variant('privateapi', default=False, description='Enable the private API') depends_on('blas') depends_on('lapack') @@ -51,6 +54,7 @@ class Gmsh(CMakePackage): depends_on('fltk', when='+fltk') depends_on('hdf5', when='+hdf5') depends_on('netgen', when='+netgen') + depends_on('opencascade', when='+opencascade') depends_on('oce', when='+oce') depends_on('petsc+mpi', when='+petsc+mpi') depends_on('petsc', when='+petsc~mpi') @@ -60,6 +64,7 @@ class Gmsh(CMakePackage): depends_on('metis', when='+metis') conflicts('+slepc', when='~petsc') + conflicts('+oce', when='+opencascade') def cmake_args(self): spec = self.spec @@ -96,6 +101,9 @@ def cmake_args(self): if '+oce' in spec: env['CASROOT'] = self.spec['oce'].prefix options.append('-DENABLE_OCC=ON') + elif '+opencascade' in spec: + env['CASROOT'] = self.spec['opencascade'].prefix + options.append('-DENABLE_OCC=ON') else: options.append('-DENABLE_OCC=OFF') @@ -142,4 +150,9 @@ def cmake_args(self): if '+compression' in spec: options.append('-DENABLE_COMPRESSED_IO:BOOL=ON') + if '+privateapi' in spec: + options.append('-DENABLE_PRIVATE_API=ON') + else: + options.append('-DENABLE_PRIVATE_API=OFF') + return options diff --git a/var/spack/repos/builtin/packages/gmt/package.py b/var/spack/repos/builtin/packages/gmt/package.py index b9bb7b71d98..003c0d24be1 100644 --- a/var/spack/repos/builtin/packages/gmt/package.py +++ b/var/spack/repos/builtin/packages/gmt/package.py @@ -14,34 +14,49 @@ class Gmt(Package): contour maps to artificially illuminated surfaces and 3D perspective views. """ - homepage = "http://gmt.soest.hawaii.edu/" - url = "https://github.com/GenericMappingTools/gmt/archive/5.4.4.tar.gz" + homepage = "https://www.generic-mapping-tools.org/" + url = "https://github.com/GenericMappingTools/gmt/archive/6.0.0.tar.gz" + git = "https://github.com/GenericMappingTools/gmt.git" + maintainers = ['adamjstewart'] + + version('master', branch='master') + version('6.0.0', sha256='7a733e670f01d99f8fc0da51a4337320d764c06a68746621f83ccf2e3453bcb7') version('5.4.4', sha256='b593dfb101e6507c467619f3d2190a9f78b09d49fe2c27799750b8c4c0cd2da0') version('4.5.9', sha256='9b13be96ccf4bbd38c14359c05dfa7eeeb4b5f06d6f4be9c33d6c3ea276afc86', url='ftp://ftp.soest.hawaii.edu/gmt/legacy/gmt-4.5.9.tar.bz2') - variant('pcre', default=False, description='Enable the PCRE interface') - variant('gdal', default=False, description='Enable the GDAL interface') - variant('fftw', default=True, description='Fast FFTs') - variant('lapack', default=True, description='Fast matrix inversion') - variant('blas', default=True, description='Fast matrix multiplications') + variant('ghostscript', default=False, description='Ability to convert PostScript plots to PDF and rasters') + variant('gdal', default=False, description='Ability to read and write numerous grid and image formats') + variant('pcre', default=False, description='Regular expression support') + variant('fftw', default=False, description='Fast FFTs') + variant('glib', default=False, description='GTHREAD support') + variant('lapack', default=False, description='Fast matrix inversion') + variant('blas', default=False, description='Fast matrix multiplications') + variant('graphicsmagick', default=False, description='Convert images to animated GIFs') + variant('ffmpeg', default=False, description='Convert images to videos') + variant('docs', default=False, description='Build manpage and HTML documentation') - # http://gmt.soest.hawaii.edu/projects/gmt/wiki/BuildingGMT + # https://github.com/GenericMappingTools/gmt/blob/master/BUILDING.md + # https://github.com/GenericMappingTools/gmt/blob/master/MAINTENANCE.md # Required dependencies - depends_on('ghostscript') - depends_on('subversion') - depends_on('cmake@2.8.5:', type='build', when='@5:') + depends_on('cmake@2.8.7:', type='build', when='@5:') depends_on('netcdf-c@4:') depends_on('curl', when='@5.4:') # Optional dependencies - depends_on('pcre', when='+pcre') + depends_on('ghostscript', when='+ghostscript') depends_on('gdal', when='+gdal') + depends_on('pcre', when='+pcre') depends_on('fftw', when='+fftw') + depends_on('glib', when='+glib') depends_on('lapack', when='+lapack') depends_on('blas', when='+blas') + depends_on('graphicsmagick', when='+graphicsmagick') + depends_on('ffmpeg', when='+ffmpeg') + depends_on('py-sphinx@1.4:', when='+docs', type='build') + depends_on('graphicsmagick', type='test') patch('type.patch', when='@4.5.9') @@ -49,9 +64,56 @@ class Gmt(Package): @when('@5:') def install(self, spec, prefix): with working_dir('spack-build', create=True): - cmake('..', *std_cmake_args) + args = std_cmake_args + + args.extend([ + '-DNETCDF_CONFIG={0}'.format( + spec['netcdf-c'].prefix.bin.join('nc-config')), + '-DNETCDF_INCLUDE_DIR={0}'.format( + spec['netcdf-c'].headers.directories[0]), + '-DNETCDF_LIBRARY={0}'.format( + spec['netcdf-c'].libs[0]) + ]) + + # If these options aren't explicitly disabled, + # CMake will search OS for dependencies + if '+ghostscript' in spec: + args.append('-DGS={0}'.format( + spec['ghostscript'].prefix.bin.gs)) + else: + args.append('-DGS=') + + if '+gdal' in spec: + args.extend([ + '-DGDAL_TRANSLATE={0}'.format( + spec['gdal'].prefix.bin.gdal_translate), + '-DOGR2OGR={0}'.format( + spec['gdal'].prefix.bin.ogr2ogr), + ]) + else: + args.extend(['-DGDAL_TRANSLATE=', '-DOGR2OGR=']) + + if 'graphicsmagick' in spec: + args.extend([ + '-DGM={0}'.format( + spec['graphicsmagick'].prefix.bin.gm), + '-DGRAPHICSMAGICK={0}'.format( + spec['graphicsmagick'].prefix.bin.gm), + ]) + else: + args.extend(['-DGM=', '-DGRAPHICSMAGICK=']) + + if '+ffmpeg' in spec: + args.append('-DFFMPEG={0}'.format( + spec['ffmpeg'].prefix.bin.ffmpeg)) + else: + args.append('-DFFMPEG=') + + cmake('..', *args) make() + if self.run_tests: + make('check') make('install') @when('@:4') diff --git a/var/spack/repos/builtin/packages/gnupg/package.py b/var/spack/repos/builtin/packages/gnupg/package.py index 943f14d31f8..d97aa105695 100644 --- a/var/spack/repos/builtin/packages/gnupg/package.py +++ b/var/spack/repos/builtin/packages/gnupg/package.py @@ -25,7 +25,7 @@ class Gnupg(AutotoolsPackage): depends_on('libassuan@2.4:', when='@:2.2.3') depends_on('libassuan@2.5:', when='@2.2.15:') depends_on('pinentry', type='run') - depends_on('libiconv') + depends_on('iconv') depends_on('zlib') def configure_args(self): @@ -42,7 +42,7 @@ def configure_args(self): '--with-libassuan-prefix=' + self.spec['libassuan'].prefix, '--with-ksba-prefix=' + self.spec['libksba'].prefix, '--with-npth-prefix=' + self.spec['npth'].prefix, - '--with-libiconv-prefix=' + self.spec['libiconv'].prefix, + '--with-libiconv-prefix=' + self.spec['iconv'].prefix, '--with-zlib=' + self.spec['zlib'].prefix, '--without-tar', '--without-libiconv-prefix', diff --git a/var/spack/repos/builtin/packages/gnuplot/package.py b/var/spack/repos/builtin/packages/gnuplot/package.py index 17ee973eee9..9fb79e67466 100644 --- a/var/spack/repos/builtin/packages/gnuplot/package.py +++ b/var/spack/repos/builtin/packages/gnuplot/package.py @@ -55,7 +55,7 @@ class Gnuplot(AutotoolsPackage): depends_on('readline') depends_on('pkgconfig', type='build') depends_on('libxpm') - depends_on('libiconv') + depends_on('iconv') # optional dependencies: depends_on('libcerf', when='+libcerf') diff --git a/var/spack/repos/builtin/packages/go/package.py b/var/spack/repos/builtin/packages/go/package.py index 09d5796c09d..50ade3e2c30 100644 --- a/var/spack/repos/builtin/packages/go/package.py +++ b/var/spack/repos/builtin/packages/go/package.py @@ -35,16 +35,18 @@ class Go(Package): extendable = True - version('1.14', sha256='6d643e46ad565058c7a39dac01144172ef9bd476521f42148be59249e4b74389') + version('1.14.1', sha256='2ad2572115b0d1b4cb4c138e6b3a31cee6294cb48af75ee86bec3dca04507676') + version('1.14', sha256='6d643e46ad565058c7a39dac01144172ef9bd476521f42148be59249e4b74389') + version('1.13.9', sha256='34bb19d806e0bc4ad8f508ae24bade5e9fedfa53d09be63b488a9314d2d4f31d') version('1.13.8', sha256='b13bf04633d4d8cf53226ebeaace8d4d2fd07ae6fa676d0844a688339debec34') version('1.13.7', sha256='e4ad42cc5f5c19521fbbbde3680995f2546110b5c6aa2b48c3754ff7af9b41f4') version('1.13.6', sha256='aae5be954bdc40bcf8006eb77e8d8a5dde412722bc8effcdaf9772620d06420c') version('1.13.5', sha256='27d356e2a0b30d9983b60a788cf225da5f914066b37a6b4f69d457ba55a626ff') - version('1.13.4', sha256='95dbeab442ee2746b9acf0934c8e2fc26414a0565c008631b04addb8c02e7624') + version('1.13.4', sha256='95dbeab442ee2746b9acf0934c8e2fc26414a0565c008631b04addb8c02e7624') version('1.13.3', sha256='4f7123044375d5c404280737fbd2d0b17064b66182a65919ffe20ffe8620e3df') version('1.13.2', sha256='1ea68e01472e4276526902b8817abd65cf84ed921977266f0c11968d5e915f44') version('1.13.1', sha256='81f154e69544b9fa92b1475ff5f11e64270260d46e7e36c34aafc8bc96209358') - version('1.13', sha256='3fc0b8b6101d42efd7da1da3029c0a13f22079c0c37ef9730209d8ec665bf122') + version('1.13', sha256='3fc0b8b6101d42efd7da1da3029c0a13f22079c0c37ef9730209d8ec665bf122') version('1.12.17', sha256='de878218c43aa3c3bad54c1c52d95e3b0e5d336e1285c647383e775541a28b25') version('1.12.15', sha256='8aba74417e527524ad5724e6e6c21016795d1017692db76d1b0851c6bdec84c3') version('1.12.14', sha256='39dbf05f7e2ffcb19b08f07d53dcc96feadeb1987fef9e279e7ff0c598213064') diff --git a/var/spack/repos/builtin/packages/grass/package.py b/var/spack/repos/builtin/packages/grass/package.py index c80255fb436..cb83a5cc2ab 100644 --- a/var/spack/repos/builtin/packages/grass/package.py +++ b/var/spack/repos/builtin/packages/grass/package.py @@ -58,7 +58,7 @@ class Grass(AutotoolsPackage): # http://htmlpreview.github.io/?https://github.com/OSGeo/grass/blob/master/REQUIREMENTS.html # General requirements depends_on('gmake@3.81:', type='build') - depends_on('libiconv') + depends_on('iconv') depends_on('zlib') depends_on('flex', type='build') depends_on('bison', type='build') @@ -254,5 +254,8 @@ def configure_args(self): # hence invoke the following function afterwards @run_after('configure') def fix_iconv_linking(self): + if self.spec['iconv'].name != 'libiconv': + return + makefile = FileFilter('include/Make/Platform.make') makefile.filter(r'^ICONVLIB\s*=.*', 'ICONVLIB = -liconv') diff --git a/var/spack/repos/builtin/packages/gromacs/package.py b/var/spack/repos/builtin/packages/gromacs/package.py index 72aca3e5f03..037b25721b1 100644 --- a/var/spack/repos/builtin/packages/gromacs/package.py +++ b/var/spack/repos/builtin/packages/gromacs/package.py @@ -2,8 +2,7 @@ # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) - -from spack import * +import llnl.util.cpu class Gromacs(CMakePackage): @@ -62,12 +61,6 @@ class Gromacs(CMakePackage): description='The build type to build', values=('Debug', 'Release', 'RelWithDebInfo', 'MinSizeRel', 'Reference', 'RelWithAssert', 'Profile')) - variant('simd', default='auto', - description='The SIMD instruction set to use', - values=('auto', 'none', 'SSE2', 'SSE4.1', 'AVX_128_FMA', 'AVX_256', - 'AVX2_128', 'AVX2_256', 'AVX_512', 'AVX_512_KNL', - 'IBM_QPX', 'Sparc64_HPC_ACE', 'IBM_VMX', 'IBM_VSX', - 'ARM_NEON', 'ARM_NEON_ASIMD')) variant('rdtscp', default=True, description='Enable RDTSCP instruction usage') variant('mdrun_only', default=False, description='Enables the build of a cut-down version' @@ -119,13 +112,40 @@ def cmake_args(self): else: options.append('-DGMX_GPU:BOOL=OFF') - simd_value = self.spec.variants['simd'].value - if simd_value == 'auto': - pass - elif simd_value == 'none': - options.append('-DGMX_SIMD:STRING=None') + # Activate SIMD based on properties of the target + target = self.spec.target + if target >= llnl.util.cpu.targets['bulldozer']: + # AMD Family 15h + options.append('-DGMX_SIMD=AVX_128_FMA') + elif target >= llnl.util.cpu.targets['zen']: + # AMD Family 17h + options.append('-DGMX_SIMD=AVX2_128') + elif target >= llnl.util.cpu.targets['power7']: + # IBM Power 7 and beyond + options.append('-DGMX_SIMD=IBM_VSX') + elif target.family == llnl.util.cpu.targets['aarch64']: + # ARMv8 + options.append('-DGMX_SIMD=ARM_NEON_ASIMD') + elif target == llnl.util.cpu.targets['mic_knl']: + # Intel KNL + options.append('-DGMX_SIMD=AVX_512_KNL') + elif target.vendor == 'GenuineIntel': + # Other Intel architectures + simd_features = [ + ('sse2', 'SSE2'), + ('sse4_1', 'SSE4.1'), + ('avx', 'AVX_256'), + ('axv128', 'AVX2_128'), + ('avx2', 'AVX2_256'), + ('avx512', 'AVX_512'), + ] + for feature, flag in reversed(simd_features): + if feature in target: + options.append('-DGMX_SIMD:STRING={0}'.format(flag)) + break else: - options.append('-DGMX_SIMD:STRING=' + simd_value) + # Fall back to this for unknown microarchitectures + options.append('-DGMX_SIMD:STRING=None') if '-rdtscp' in self.spec: options.append('-DGMX_USE_RDTSCP:BOOL=OFF') diff --git a/var/spack/repos/builtin/packages/gtkorvo-dill/2.1-fix-clear_cache.patch b/var/spack/repos/builtin/packages/gtkorvo-dill/2.1-fix-clear_cache.patch new file mode 100644 index 00000000000..cfab7234149 --- /dev/null +++ b/var/spack/repos/builtin/packages/gtkorvo-dill/2.1-fix-clear_cache.patch @@ -0,0 +1,148 @@ +diff -ur spack-src.org/CMakeLists.txt spack-src/CMakeLists.txt +--- spack-src.org/CMakeLists.txt 2020-03-26 16:38:53.358339744 +0900 ++++ spack-src/CMakeLists.txt 2020-03-26 16:44:04.581014766 +0900 +@@ -184,6 +184,9 @@ + CHECK_INCLUDE_FILES(malloc.h HAVE_MALLOC_H) + CHECK_INCLUDE_FILES(memory.h HAVE_MEMORY_H) + INCLUDE_DIRECTORIES(${CMAKE_CURRENT_BINARY_DIR} ${CMAKE_CURRENT_SOURCE_DIR}) ++include(CheckSymbolExists) ++check_symbol_exists(__clear_cache "" CLEAR_CACHE_DEFINED) ++message(STATUS "Clear cache defined is ${CLEAR_CACHE_DEFINED}") + + set (NO_DISASSEMBLER TRUE) + if (ENABLE_DISASSEMBLY) +diff -ur spack-src.org/arm6.c spack-src/arm6.c +--- spack-src.org/arm6.c 2020-03-26 16:38:53.358339744 +0900 ++++ spack-src/arm6.c 2020-03-26 16:45:20.428978615 +0900 +@@ -1524,22 +1524,9 @@ + } + + +-/* Clear the instruction cache from `beg' to `end'. This makes an +- inline system call to SYS_cacheflush. */ +-#define CLEAR_INSN_CACHE(BEG, END) \ +-{ \ +- register unsigned long _beg __asm ("a1") = (unsigned long) (BEG); \ +- register unsigned long _end __asm ("a2") = (unsigned long) (END); \ +- register unsigned long _flg __asm ("a3") = 0; \ +- __asm __volatile ("swi 0x9f0002 @ sys_cacheflush" \ +- : "=r" (_beg) \ +- : "0" (_beg), "r" (_end), "r" (_flg)); \ +-} +-/* +- * Cache flush code grabbed from a Dec 1999 posting on libc-hacker +- * mailing list +- */ +-extern void __clear_cache(char*, char *); ++#ifndef CLEAR_CACHE_DEFINED ++extern void __clear_cache(void *, void *); ++#endif + + static void + arm6_flush(void *base, void *limit) +diff -ur spack-src.org/arm6_rt.c spack-src/arm6_rt.c +--- spack-src.org/arm6_rt.c 2020-03-26 16:38:53.358339744 +0900 ++++ spack-src/arm6_rt.c 2020-03-26 16:48:18.927720543 +0900 +@@ -109,22 +109,9 @@ + } + } + +-/* Clear the instruction cache from `beg' to `end'. This makes an +- inline system call to SYS_cacheflush. */ +-#define CLEAR_INSN_CACHE(BEG, END) \ +-{ \ +- register unsigned long _beg __asm ("a1") = (unsigned long) (BEG); \ +- register unsigned long _end __asm ("a2") = (unsigned long) (END); \ +- register unsigned long _flg __asm ("a3") = 0; \ +- __asm __volatile ("swi 0x9f0002 @ sys_cacheflush" \ +- : "=r" (_beg) \ +- : "0" (_beg), "r" (_end), "r" (_flg)); \ +-} +-/* +- * Cache flush code grabbed from a Dec 1999 posting on libc-hacker +- * mailing list +- */ +-extern void __clear_cache(char*, char *); ++#ifndef CLEAR_CACHE_DEFINED ++extern void __clear_cache(void *, void *); ++#endif + + static void + arm6_flush(void *base, void *limit) +diff -ur spack-src.org/arm8.c spack-src/arm8.c +--- spack-src.org/arm8.c 2020-03-26 16:38:53.358339744 +0900 ++++ spack-src/arm8.c 2020-03-26 16:49:38.386063473 +0900 +@@ -1524,22 +1524,9 @@ + } + + +-/* Clear the instruction cache from `beg' to `end'. This makes an +- inline system call to SYS_cacheflush. */ +-#define CLEAR_INSN_CACHE(BEG, END) \ +-{ \ +- register unsigned long _beg __asm ("a1") = (unsigned long) (BEG); \ +- register unsigned long _end __asm ("a2") = (unsigned long) (END); \ +- register unsigned long _flg __asm ("a3") = 0; \ +- __asm __volatile ("swi 0x9f0002 @ sys_cacheflush" \ +- : "=r" (_beg) \ +- : "0" (_beg), "r" (_end), "r" (_flg)); \ +-} +-/* +- * Cache flush code grabbed from a Dec 1999 posting on libc-hacker +- * mailing list +- */ +-extern void __clear_cache(char*, char *); ++#ifndef CLEAR_CACHE_DEFINED ++extern void __clear_cache(void *, void *); ++#endif + + static void + arm8_flush(void *base, void *limit) +diff -ur spack-src.org/arm8_rt.c spack-src/arm8_rt.c +--- spack-src.org/arm8_rt.c 2020-03-26 16:38:53.358339744 +0900 ++++ spack-src/arm8_rt.c 2020-03-26 16:50:37.902312532 +0900 +@@ -109,22 +109,9 @@ + } + } + +-/* Clear the instruction cache from `beg' to `end'. This makes an +- inline system call to SYS_cacheflush. */ +-#define CLEAR_INSN_CACHE(BEG, END) \ +-{ \ +- register unsigned long _beg __asm ("a1") = (unsigned long) (BEG); \ +- register unsigned long _end __asm ("a2") = (unsigned long) (END); \ +- register unsigned long _flg __asm ("a3") = 0; \ +- __asm __volatile ("swi 0x9f0002 @ sys_cacheflush" \ +- : "=r" (_beg) \ +- : "0" (_beg), "r" (_end), "r" (_flg)); \ +-} +-/* +- * Cache flush code grabbed from a Dec 1999 posting on libc-hacker +- * mailing list +- */ +-extern void __clear_cache(char*, char *); ++#ifndef CLEAR_CACHE_DEFINED ++extern void __clear_cache(void *, void *); ++#endif + + static void + arm8_flush(void *base, void *limit) +diff -ur spack-src.org/config.h.cmake spack-src/config.h.cmake +--- spack-src.org/config.h.cmake 2020-03-26 16:38:53.358339744 +0900 ++++ spack-src/config.h.cmake 2020-03-26 16:52:52.256419382 +0900 +@@ -13,10 +13,13 @@ + #cmakedefine HAVE_DIS_ASM_H + + /* Define to 1 if you have the header file. */ +-#undef HAVE_DLFCN_H ++#cmakedefine HAVE_DLFCN_H + + /* Define to 1 if you have the header file. */ +-#undef HAVE_INTTYPES_H ++#cmakedefine HAVE_INTTYPES_H ++ ++/* Define to 1 if you have __clear_cache is defined */ ++#cmakedefine CLEAR_CACHE_DEFINED + + /* Define to 1 if you have the header file. */ + #cmakedefine HAVE_MALLOC_H diff --git a/var/spack/repos/builtin/packages/gtkorvo-dill/2.4-fix-clear_cache.patch b/var/spack/repos/builtin/packages/gtkorvo-dill/2.4-fix-clear_cache.patch new file mode 100644 index 00000000000..7f47fba9529 --- /dev/null +++ b/var/spack/repos/builtin/packages/gtkorvo-dill/2.4-fix-clear_cache.patch @@ -0,0 +1,148 @@ +diff -ur spack-src.org/CMakeLists.txt spack-src/CMakeLists.txt +--- spack-src.org/CMakeLists.txt 2020-03-26 09:35:43.403836842 +0900 ++++ spack-src/CMakeLists.txt 2020-03-26 09:37:57.397837929 +0900 +@@ -267,6 +267,9 @@ + CHECK_INCLUDE_FILES(stdarg.h STDC_HEADERS) + CHECK_INCLUDE_FILES(malloc.h HAVE_MALLOC_H) + CHECK_INCLUDE_FILES(memory.h HAVE_MEMORY_H) ++include(CheckSymbolExists) ++check_symbol_exists(__clear_cache "" CLEAR_CACHE_DEFINED) ++message(STATUS "Clear cache defined is ${CLEAR_CACHE_DEFINED}") + + set (NO_DISASSEMBLER TRUE) + if (DILL_ENABLE_DISASSEMBLY) +diff -ur spack-src.org/arm6.c spack-src/arm6.c +--- spack-src.org/arm6.c 2020-03-26 09:35:43.403836842 +0900 ++++ spack-src/arm6.c 2020-03-26 09:40:06.021306329 +0900 +@@ -1526,22 +1526,9 @@ + } + + +-/* Clear the instruction cache from `beg' to `end'. This makes an +- inline system call to SYS_cacheflush. */ +-#define CLEAR_INSN_CACHE(BEG, END) \ +-{ \ +- register unsigned long _beg __asm ("a1") = (unsigned long) (BEG); \ +- register unsigned long _end __asm ("a2") = (unsigned long) (END); \ +- register unsigned long _flg __asm ("a3") = 0; \ +- __asm __volatile ("swi 0x9f0002 @ sys_cacheflush" \ +- : "=r" (_beg) \ +- : "0" (_beg), "r" (_end), "r" (_flg)); \ +-} +-/* +- * Cache flush code grabbed from a Dec 1999 posting on libc-hacker +- * mailing list +- */ +-extern void __clear_cache(char*, char *); ++#ifndef CLEAR_CACHE_DEFINED ++extern void __clear_cache(void *, void *); ++#endif + + static void + arm6_flush(void *base, void *limit) +diff -ur spack-src.org/arm6_rt.c spack-src/arm6_rt.c +--- spack-src.org/arm6_rt.c 2020-03-26 09:35:43.403836842 +0900 ++++ spack-src/arm6_rt.c 2020-03-26 09:41:59.823222738 +0900 +@@ -109,22 +109,9 @@ + } + } + +-/* Clear the instruction cache from `beg' to `end'. This makes an +- inline system call to SYS_cacheflush. */ +-#define CLEAR_INSN_CACHE(BEG, END) \ +-{ \ +- register unsigned long _beg __asm ("a1") = (unsigned long) (BEG); \ +- register unsigned long _end __asm ("a2") = (unsigned long) (END); \ +- register unsigned long _flg __asm ("a3") = 0; \ +- __asm __volatile ("swi 0x9f0002 @ sys_cacheflush" \ +- : "=r" (_beg) \ +- : "0" (_beg), "r" (_end), "r" (_flg)); \ +-} +-/* +- * Cache flush code grabbed from a Dec 1999 posting on libc-hacker +- * mailing list +- */ +-extern void __clear_cache(char*, char *); ++#ifndef CLEAR_CACHE_DEFINED ++extern void __clear_cache(void *, void *); ++#endif + + static void + arm6_flush(void *base, void *limit) +diff -ur spack-src.org/arm8.c spack-src/arm8.c +--- spack-src.org/arm8.c 2020-03-26 09:35:43.403836842 +0900 ++++ spack-src/arm8.c 2020-03-26 09:43:04.630008776 +0900 +@@ -1524,22 +1524,9 @@ + } + + +-/* Clear the instruction cache from `beg' to `end'. This makes an +- inline system call to SYS_cacheflush. */ +-#define CLEAR_INSN_CACHE(BEG, END) \ +-{ \ +- register unsigned long _beg __asm ("a1") = (unsigned long) (BEG); \ +- register unsigned long _end __asm ("a2") = (unsigned long) (END); \ +- register unsigned long _flg __asm ("a3") = 0; \ +- __asm __volatile ("swi 0x9f0002 @ sys_cacheflush" \ +- : "=r" (_beg) \ +- : "0" (_beg), "r" (_end), "r" (_flg)); \ +-} +-/* +- * Cache flush code grabbed from a Dec 1999 posting on libc-hacker +- * mailing list +- */ +-extern void __clear_cache(char*, char *); ++#ifndef CLEAR_CACHE_DEFINED ++extern void __clear_cache(void *, void *); ++#endif + + static void + arm8_flush(void *base, void *limit) +diff -ur spack-src.org/arm8_rt.c spack-src/arm8_rt.c +--- spack-src.org/arm8_rt.c 2020-03-26 09:35:43.403836842 +0900 ++++ spack-src/arm8_rt.c 2020-03-26 09:44:19.027799105 +0900 +@@ -109,22 +109,9 @@ + } + } + +-/* Clear the instruction cache from `beg' to `end'. This makes an +- inline system call to SYS_cacheflush. */ +-#define CLEAR_INSN_CACHE(BEG, END) \ +-{ \ +- register unsigned long _beg __asm ("a1") = (unsigned long) (BEG); \ +- register unsigned long _end __asm ("a2") = (unsigned long) (END); \ +- register unsigned long _flg __asm ("a3") = 0; \ +- __asm __volatile ("swi 0x9f0002 @ sys_cacheflush" \ +- : "=r" (_beg) \ +- : "0" (_beg), "r" (_end), "r" (_flg)); \ +-} +-/* +- * Cache flush code grabbed from a Dec 1999 posting on libc-hacker +- * mailing list +- */ +-extern void __clear_cache(char*, char *); ++#ifndef CLEAR_CACHE_DEFINED ++extern void __clear_cache(void *, void *); ++#endif + + static void + arm8_flush(void *base, void *limit) +diff -ur spack-src.org/config.h.cmake spack-src/config.h.cmake +--- spack-src.org/config.h.cmake 2020-03-26 09:35:43.403836842 +0900 ++++ spack-src/config.h.cmake 2020-03-26 09:46:56.124248964 +0900 +@@ -16,10 +16,13 @@ + #cmakedefine HAVE_DIS_ASM_H + + /* Define to 1 if you have the header file. */ +-#undef HAVE_DLFCN_H ++#cmakedefine HAVE_DLFCN_H + + /* Define to 1 if you have the header file. */ +-#undef HAVE_INTTYPES_H ++#cmakedefine HAVE_INTTYPES_H ++ ++/* Define to 1 if you have __clear_cache is defined */ ++#cmakedefine CLEAR_CACHE_DEFINED + + /* Define to 1 if you have the header file. */ + #cmakedefine HAVE_MALLOC_H diff --git a/var/spack/repos/builtin/packages/gtkorvo-dill/package.py b/var/spack/repos/builtin/packages/gtkorvo-dill/package.py index cb5920ae5b5..b708666d4fe 100644 --- a/var/spack/repos/builtin/packages/gtkorvo-dill/package.py +++ b/var/spack/repos/builtin/packages/gtkorvo-dill/package.py @@ -20,6 +20,10 @@ class GtkorvoDill(CMakePackage): version('2.4', sha256='ed7745d13e8c6a556f324dcc0e48a807fc993bdd5bb1daa94c1df116cb7e81fa') version('2.1', sha256='7671e1f3c25ac6a4ec2320cec2c342a2f668efb170e3dba186718ed17d2cf084') + # Ref: https://github.com/GTkorvo/dill/commit/dac6dfcc7fdaceeb4c157f9ecdf5ecc28f20477f + patch('2.4-fix-clear_cache.patch', when='@2.4') + patch('2.1-fix-clear_cache.patch', when='@2.1') + def cmake_args(self): args = [] if self.spec.satisfies('@2.4:'): diff --git a/var/spack/repos/builtin/packages/gtksourceview/package.py b/var/spack/repos/builtin/packages/gtksourceview/package.py index 01876610b2b..ed6737133a4 100644 --- a/var/spack/repos/builtin/packages/gtksourceview/package.py +++ b/var/spack/repos/builtin/packages/gtksourceview/package.py @@ -35,7 +35,7 @@ class Gtksourceview(AutotoolsPackage): depends_on('pango') depends_on('gdk-pixbuf') depends_on('atk') - depends_on('libiconv') + depends_on('iconv') def url_for_version(self, version): url = 'https://download.gnome.org/sources/gtksourceview/' diff --git a/var/spack/repos/builtin/packages/hcol/package.py b/var/spack/repos/builtin/packages/hcol/package.py new file mode 100644 index 00000000000..70278eba930 --- /dev/null +++ b/var/spack/repos/builtin/packages/hcol/package.py @@ -0,0 +1,25 @@ +# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Hcol(Package): + """This is the SPIRAL package for the Hybrid Control Operator Language + (HCOL).""" + + homepage = "https://https://spiral.net" + url = "https://github.com/spiral-software/spiral-package-hcol/archive/1.0.0.zip" + + maintainers = ['spiralgen'] + extends('spiral') + + version('1.0.0', sha256='9a95574e2b061d03d264c32dbf733e893017d1644b6486c7a8a55a3b24783f58') + + # HCOL package is an extension for Spiral. Install the files in their own + # prefix, in "namespaces/packages/hcol". This allows 'spack activate' to + # symlink hcol at the right location for spiral packages. + def install(self, spec, prefix): + install_tree('.', prefix.namespaces.packages.hcol) diff --git a/var/spack/repos/builtin/packages/hdf5/package.py b/var/spack/repos/builtin/packages/hdf5/package.py index 3c62d46f0d6..9d5505b5f78 100644 --- a/var/spack/repos/builtin/packages/hdf5/package.py +++ b/var/spack/repos/builtin/packages/hdf5/package.py @@ -23,7 +23,12 @@ class Hdf5(AutotoolsPackage): version('develop', branch='develop') - version('1.10.6', sha256='5f9a3ee85db4ea1d3b1fa9159352aebc2af72732fc2f58c96a3f0768dba0e9aa') + version('1.12.0', sha256='a62dcb276658cb78e6795dd29bf926ed7a9bc4edf6e77025cd2c689a8f97c17a') + + # HDF5 1.12 broke API compatibility, so we currently prefer the latest + # 1.10 release. packages that want later versions of HDF5 should specify, + # e.g., depends_on("hdf5@1.12:") to get 1.12 or higher. + version('1.10.6', sha256='5f9a3ee85db4ea1d3b1fa9159352aebc2af72732fc2f58c96a3f0768dba0e9aa', preferred=True) version('1.10.5', sha256='6d4ce8bf902a97b050f6f491f4268634e252a63dadd6656a1a9be5b7b7726fa8') version('1.10.4', sha256='8f60dc4dd6ab5fcd23c750d1dc5bca3d0453bdce5c8cdaf0a4a61a9d1122adb2') version('1.10.3', sha256='b600d7c914cfa80ae127cd1a1539981213fee9994ac22ebec9e3845e951d9b39') diff --git a/var/spack/repos/builtin/packages/helics/package.py b/var/spack/repos/builtin/packages/helics/package.py new file mode 100644 index 00000000000..7aa4a06c466 --- /dev/null +++ b/var/spack/repos/builtin/packages/helics/package.py @@ -0,0 +1,105 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Helics(CMakePackage): + """HELICS is a general-purpose, modular, highly-scalable co-simulation + framework that runs cross-platform (Linux, Windows, and Mac OS X) and + supports both event driven and time series simulation.""" + + homepage = "https://github.com/GMLC-TDC/HELICS" + url = "https://github.com/GMLC-TDC/HELICS/releases/download/v2.4.1/Helics-v2.4.1-source.tar.gz" + git = "https://github.com/GMLC-TDC/HELICS.git" + + maintainers = ['nightlark'] + + version('develop', branch='develop', submodules=True) + version('master', branch='master', submodules=True) + version('2.4.2', sha256='957856f06ed6d622f05dfe53df7768bba8fe2336d841252f5fac8345070fa5cb') + version('2.4.1', sha256='ac077e9efe466881ea366721cb31fb37ea0e72a881a717323ba4f3cdda338be4') + + variant('build_type', default='Release', + description='CMake build type', + values=('Debug', 'Release', 'RelWithDebInfo', 'MinSizeRel')) + variant('apps', default=True, description="Install the HELICS apps") + variant('c_shared', default=True, description="Install the C shared library") + variant('cxx_shared', default=True, description="Install the CXX shared library") + variant('zmq', default=True, description="Enable ZeroMQ core types") + variant('tcp', default=True, description="Enable TCP core types") + variant('udp', default=True, description="Enable UDP core type") + variant('ipc', default=True, description="Enable IPC core type") + variant('inproc', default=True, description="Enable in-process core type") + variant('mpi', default=False, description="Enable MPI core type") + variant('boost', default=True, description="Compile with Boost libraries") + variant('asio', default=True, description="Compile with ASIO libraries") + variant('swig', default=False, description="Build language bindings with SWIG") + variant('webserver', default=True, description="Enable the integrated webserver in the HELICS broker server") + + # Build dependency + depends_on('git', type='build', when='@master:') + depends_on('cmake@3.4:', type='build') + depends_on('boost@1.70: ~atomic ~chrono ~date_time ~exception ~filesystem ~graph ~iostreams ~locale ~log ~math ~program_options ~random ~regex ~serialization ~signals ~system ~test ~thread ~timer ~wave', type='build', when='+boost') + depends_on('swig@3.0:', type='build', when='+swig') + + depends_on('libzmq@4.3:', when='+zmq') + depends_on('mpi@2', when='+mpi') + + # OpenMPI doesn't work with HELICS <=2.4.1 + conflicts('^openmpi', when='@:2.4.1 +mpi') + + # Boost is required for ipc and webserver options + conflicts('+ipc', when='~boost') + conflicts('+webserver', when='~boost') + + # ASIO (vendored in HELICS repo) is required for tcp and udp options + conflicts('+tcp', when='~asio') + conflicts('+udp', when='~asio') + + def cmake_args(self): + spec = self.spec + args = [ + '-DHELICS_BUILD_EXAMPLES=OFF', + '-DHELICS_BUILD_TESTS=OFF', + ] + + # HELICS core type CMake options + args.append('-DENABLE_ZMQ_CORE={0}'.format( + 'ON' if '+zmq' in spec else 'OFF')) + args.append('-DENABLE_TCP_CORE={0}'.format( + 'ON' if '+tcp' in spec else 'OFF')) + args.append('-DENABLE_UDP_CORE={0}'.format( + 'ON' if '+udp' in spec else 'OFF')) + args.append('-DENABLE_IPC_CORE={0}'.format( + 'ON' if '+ipc' in spec else 'OFF')) + args.append('-DENABLE_INPROC_CORE={0}'.format( + 'ON' if '+inproc' in spec else 'OFF')) + args.append('-DENABLE_MPI_CORE={0}'.format( + 'ON' if '+mpi' in spec else 'OFF')) + + # HELICS shared library options + args.append('-DHELICS_DISABLE_C_SHARED_LIB={0}'.format( + 'OFF' if '+c_shared' in spec else 'ON')) + args.append('-DHELICS_BUILD_CXX_SHARED_LIB={0}'.format( + 'ON' if '+cxx_shared' in spec else 'OFF')) + + # HELICS executable app options + args.append('-DHELICS_BUILD_APP_EXECUTABLES={0}'.format( + 'ON' if '+apps' in spec else 'OFF')) + args.append('-DHELICS_DISABLE_WEBSERVER={0}'.format( + 'OFF' if '+webserver' in spec else 'ON')) + + # Extra HELICS library dependencies + args.append('-DHELICS_DISABLE_BOOST={0}'.format( + 'OFF' if '+boost' in spec else 'ON')) + args.append('-DHELICS_DISABLE_ASIO={0}'.format( + 'OFF' if '+asio' in spec else 'ON')) + + # SWIG + args.append('-DHELICS_ENABLE_SWIG={0}'.format( + 'ON' if '+swig' in spec else 'OFF')) + + return args diff --git a/var/spack/repos/builtin/packages/hpctoolkit/package.py b/var/spack/repos/builtin/packages/hpctoolkit/package.py index 9a965502de2..d64ce78fadc 100644 --- a/var/spack/repos/builtin/packages/hpctoolkit/package.py +++ b/var/spack/repos/builtin/packages/hpctoolkit/package.py @@ -60,15 +60,16 @@ class Hpctoolkit(AutotoolsPackage): ' +graph +regex +shared +multithreaded visibility=global' ) - depends_on('binutils+libiberty~nls', type='link') + depends_on('binutils+libiberty~nls', type='link', when='@master') + depends_on('binutils@:2.33.1+libiberty~nls', type='link', when='@:2020.03.99') depends_on('boost' + boost_libs) depends_on('bzip2+shared', type='link') depends_on('dyninst@9.3.2:') depends_on('elfutils+bzip2+xz~nls', type='link') depends_on('intel-tbb+shared') depends_on('libdwarf') - depends_on('libmonitor+hpctoolkit') - depends_on('libmonitor+bgq', when='+bgq') + depends_on('libmonitor+hpctoolkit+bgq', when='+bgq') + depends_on('libmonitor+hpctoolkit~bgq', when='~bgq') depends_on('libunwind@1.4: +xz') depends_on('mbedtls+pic') depends_on('xerces-c transcoder=iconv') diff --git a/var/spack/repos/builtin/packages/htop/package.py b/var/spack/repos/builtin/packages/htop/package.py index 9de69ae0bfc..e0d99604678 100644 --- a/var/spack/repos/builtin/packages/htop/package.py +++ b/var/spack/repos/builtin/packages/htop/package.py @@ -18,6 +18,7 @@ class Htop(AutotoolsPackage): version('2.0.2', sha256='179be9dccb80cee0c5e1a1f58c8f72ce7b2328ede30fb71dcdf336539be2f487') depends_on('ncurses') + depends_on('python+pythoncmd', type='build') def configure_args(self): return ['--enable-shared'] diff --git a/var/spack/repos/builtin/packages/intel-daal/package.py b/var/spack/repos/builtin/packages/intel-daal/package.py index 9d5ab0f2eff..8ac099e65d9 100644 --- a/var/spack/repos/builtin/packages/intel-daal/package.py +++ b/var/spack/repos/builtin/packages/intel-daal/package.py @@ -11,6 +11,8 @@ class IntelDaal(IntelPackage): homepage = "https://software.intel.com/en-us/daal" + version('2020.1.217', sha256='3f84dea0ce1038ac1b9c25b3e2c02e9fac440fa36cc8adfce69edfc06fe0edda', + url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/16536/l_daal_2020.1.217.tgz') version('2020.0.166', sha256='695166c9ab32ac5d3006d6d35162db3c98734210507144e315ed7c3b7dbca9c1', url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/16234/l_daal_2020.0.166.tgz') version('2019.5.281', sha256='e92aaedbe35c9daf1c9483260cb2363da8a85fa1aa5566eb38cf4b1f410bc368', diff --git a/var/spack/repos/builtin/packages/intel-ipp/package.py b/var/spack/repos/builtin/packages/intel-ipp/package.py index 1727e488be9..04df142bf5f 100644 --- a/var/spack/repos/builtin/packages/intel-ipp/package.py +++ b/var/spack/repos/builtin/packages/intel-ipp/package.py @@ -11,6 +11,8 @@ class IntelIpp(IntelPackage): homepage = "https://software.intel.com/en-us/intel-ipp" + version('2020.1.217', sha256='0bf8ac7e635e7e602cf201063a1a7dea3779b093104563fdb15e6b7ecf2f00a7', + url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/16534/l_ipp_2020.1.217.tgz') version('2020.0.166', sha256='6844007892ba524e828f245355cee44e8149f4c233abbbea16f7bb55a7d6ecff', url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/16233/l_ipp_2020.0.166.tgz') version('2019.5.281', sha256='61d1e1da1a4a50f1cf02a3ed44e87eed05e94d58b64ef1e67a3bdec363bee713', diff --git a/var/spack/repos/builtin/packages/intel-mkl/package.py b/var/spack/repos/builtin/packages/intel-mkl/package.py index 423223589af..4247f9bb8ed 100644 --- a/var/spack/repos/builtin/packages/intel-mkl/package.py +++ b/var/spack/repos/builtin/packages/intel-mkl/package.py @@ -13,6 +13,8 @@ class IntelMkl(IntelPackage): homepage = "https://software.intel.com/en-us/intel-mkl" + version('2020.1.217', sha256='082a4be30bf4f6998e4d6e3da815a77560a5e66a68e254d161ab96f07086066d', + url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/16533/l_mkl_2020.1.217.tgz') version('2020.0.166', sha256='f6d92deb3ff10b11ba3df26b2c62bb4f0f7ae43e21905a91d553e58f0f5a8ae0', url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/16232/l_mkl_2020.0.166.tgz') version('2019.5.281', sha256='9995ea4469b05360d509c9705e9309dc983c0a10edc2ae3a5384bc837326737e', diff --git a/var/spack/repos/builtin/packages/intel-mpi-benchmarks/package.py b/var/spack/repos/builtin/packages/intel-mpi-benchmarks/package.py new file mode 100644 index 00000000000..a619d0d8cbf --- /dev/null +++ b/var/spack/repos/builtin/packages/intel-mpi-benchmarks/package.py @@ -0,0 +1,81 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class IntelMpiBenchmarks(MakefilePackage): + """Intel(R) MPI Benchmarks provides a set of elementary benchmarks + that conform to MPI-1, MPI-2, and MPI-3 standard. + You can run all of the supported benchmarks, or a subset specified + in the command line using one executable file. + Use command-line parameters to specify various settings, such as + time measurement, message lengths, and selection of communicators. """ + + homepage = "https://software.intel.com/en-us/articles/intel-mpi-benchmarks" + url = "https://github.com/intel/mpi-benchmarks/archive/IMB-v2019.5.tar.gz" + + maintainers = ['carsonwoods'] + + version('2019.5', sha256='61f8e872a3c3076af53007a68e4da3a8d66be2ba7a051dc21e626a4e2d26e651') + version('2019.4', sha256='aeb336be10275c1a2f579b491b6631122876b461ac7148b1d0764f13b7552690') + version('2019.3', sha256='4f256d11bfed9ca6166548486d61a062e67be61f13dd9f30690232720e185f31') + version('2019.2', sha256='0bc2224a913073aaa5958f6ae08341e5fcd39cedc6722a09bfd4a3d7591a340b') + + depends_on('mpi') + + variant( + 'benchmark', default='all', + values=('mpi1', 'ext', 'io', 'nbc', + 'p2p', 'rma', 'mt', 'all'), + multi=False, + description='Specify which benchmark to build') + + def build(self, spec, prefix): + env['CC'] = spec['mpi'].mpicc + env['CXX'] = spec['mpi'].mpicxx + + if 'benchmark=mpi1' in spec: + make('IMB-MPI1') + elif 'benchmark=ext' in spec: + make('IMB-EXT') + elif 'benchmark=io' in spec: + make('IMB-IO') + elif 'benchmark=nbc' in spec: + make('IMB-NBC') + elif 'benchmark=p2p' in spec: + make('IMB-P2P') + elif 'benchmark=rma' in spec: + make('IMB-RMA') + elif 'benchmark=mt' in spec: + make('IMB-MT') + else: + make("all") + + def install(self, spec, prefix): + mkdir(prefix.bin) + + if 'benchmark=mpi1' in spec: + install('IMB-MPI1', prefix.bin) + elif 'benchmark=ext' in spec: + install('IMB-EXT', prefix.bin) + elif 'benchmark=io' in spec: + install('IMB-IO', prefix.bin) + elif 'benchmark=nbc' in spec: + install('IMB-NBC', prefix.bin) + elif 'benchmark=p2p' in spec: + install('IMB-P2P', prefix.bin) + elif 'benchmark=rma' in spec: + install('IMB-RMA', prefix.bin) + elif 'benchmark=mt' in spec: + install('IMB-MT', prefix.bin) + else: + install('IMB-EXT', prefix.bin) + install('IMB-IO', prefix.bin) + install('IMB-MPI1', prefix.bin) + install('IMB-MT', prefix.bin) + install('IMB-NBC', prefix.bin) + install('IMB-P2P', prefix.bin) + install('IMB-RMA', prefix.bin) diff --git a/var/spack/repos/builtin/packages/intel-mpi/package.py b/var/spack/repos/builtin/packages/intel-mpi/package.py index 27885ed1f21..15cb84150ef 100644 --- a/var/spack/repos/builtin/packages/intel-mpi/package.py +++ b/var/spack/repos/builtin/packages/intel-mpi/package.py @@ -11,6 +11,8 @@ class IntelMpi(IntelPackage): homepage = "https://software.intel.com/en-us/intel-mpi-library" + version('2019.7.217', sha256='90383b0023f84ac003a55d8bb29dbcf0c639f43a25a2d8d8698a16e770ac9c07', + url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/16546/l_mpi_2019.7.217.tgz') version('2019.6.166', sha256='119be69f1117c93a9e5e9b8b4643918e55d2a55a78ad9567f77d16cdaf18cd6e', url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/16120/l_mpi_2019.6.166.tgz') version('2019.5.281', sha256='9c59da051f1325b221e5bc4d8b689152e85d019f143069fa39e17989306811f4', diff --git a/var/spack/repos/builtin/packages/intel-parallel-studio/package.py b/var/spack/repos/builtin/packages/intel-parallel-studio/package.py index f1b453c257c..410cd4045e4 100644 --- a/var/spack/repos/builtin/packages/intel-parallel-studio/package.py +++ b/var/spack/repos/builtin/packages/intel-parallel-studio/package.py @@ -20,6 +20,7 @@ class IntelParallelStudio(IntelPackage): # in the 'intel' package. # Cluster Edition (top tier; all components included) + version('cluster.2020.1', sha256='fd11d8de72b2bd60474f8bce7b463e4cbb2255969b9eaf24f689575aa2a2abab', url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/16526/parallel_studio_xe_2020_update1_cluster_edition.tgz') version('cluster.2020.0', sha256='573b1d20707d68ce85b70934cfad15b5ad9cc14124a261c17ddd7717ba842c64', url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/16225/parallel_studio_xe_2020_cluster_edition.tgz') # version('cluster.2019.5', sha256='c03421de616bd4e640ed25ce4103ec9c5c85768a940a5cb5bd1e97b45be33904', url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/15809/parallel_studio_xe_2019_update5_cluster_edition.tgz') @@ -58,6 +59,7 @@ class IntelParallelStudio(IntelPackage): # NB: Pre-2018 download packages for Professional are the same as for # Cluster; differences manifest only in the tokens present in the license # file delivered as part of the purchase. + version('professional.2020.1', sha256='5b547be92ecf50cb338b3038a565f5609135b27aa98a8b7964879eb2331eb29a', url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/16527/parallel_studio_xe_2020_update1_professional_edition.tgz') version('professional.2020.0', sha256='e88cad18d28da50ed9cb87b12adccf13efd91bf94731dc33290481306c6f15ac', url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/16226/parallel_studio_xe_2020_professional_edition.tgz') # version('professional.2019.5', sha256='0ec638330214539361f8632e20759f385a5a78013dcc980ee93743d86d354452', url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/15810/parallel_studio_xe_2019_update5_professional_edition.tgz') @@ -92,6 +94,7 @@ class IntelParallelStudio(IntelPackage): version('professional.2015.1', sha256='84fdf48d1de20e1d580ba5d419a5bc1c55d217a4f5dc1807190ecffe0229a62b', url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/4992/parallel_studio_xe_2015_update1.tgz') # Composer Edition (basic tier; excluded: MPI/..., Advisor/Inspector/Vtune) + version('composer.2020.1', sha256='26c7e7da87b8a83adfd408b2a354d872be97736abed837364c1bf10f4469b01e', url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/16530/parallel_studio_xe_2020_update1_composer_edition.tgz') version('composer.2020.0', sha256='9168045466139b8e280f50f0606b9930ffc720bbc60bc76f5576829ac15757ae', url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/16229/parallel_studio_xe_2020_composer_edition.tgz') # version('composer.2019.5', sha256='e8c8e4b9b46826a02c49325c370c79f896858611bf33ddb7fb204614838ad56c', url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/15813/parallel_studio_xe_2019_update5_composer_edition.tgz') diff --git a/var/spack/repos/builtin/packages/intel-pin/package.py b/var/spack/repos/builtin/packages/intel-pin/package.py index ae0473734c3..e886ba0784c 100644 --- a/var/spack/repos/builtin/packages/intel-pin/package.py +++ b/var/spack/repos/builtin/packages/intel-pin/package.py @@ -15,6 +15,7 @@ class IntelPin(Package): homepage = "http://www.pintool.org" maintainers = ['matthiasdiener'] + version('3.13', sha256='04a36e91f3f85119c3496f364a8806c82bb675f7536a8ab45344c9890b5e2714', url='https://software.intel.com/sites/landingpage/pintool/downloads/pin-3.13-98189-g60a6ef199-gcc-linux.tar.gz') version('3.11', sha256='aa5abca475a6e106a75e6ed4ba518fb75a57549a59f00681e6bd6e3f221bd23a', url='https://software.intel.com/sites/landingpage/pintool/downloads/pin-3.11-97998-g7ecce2dac-gcc-linux.tar.gz') version('3.10', sha256='7c8f14c3a0654bab662b58aba460403138fa44517bd40052501e8e0075b2702a', url='https://software.intel.com/sites/landingpage/pintool/downloads/pin-3.10-97971-gc5e41af74-gcc-linux.tar.gz') version('3.7', sha256='4730328795be61f1addb0e505a3792a4b4ca80b1b9405acf217beec6b5b90fb8', url='https://software.intel.com/sites/landingpage/pintool/downloads/pin-3.7-97619-g0d0c92f4f-gcc-linux.tar.gz') diff --git a/var/spack/repos/builtin/packages/intel-tbb/package.py b/var/spack/repos/builtin/packages/intel-tbb/package.py index 72ed301bd85..94eec8a06e6 100644 --- a/var/spack/repos/builtin/packages/intel-tbb/package.py +++ b/var/spack/repos/builtin/packages/intel-tbb/package.py @@ -17,45 +17,48 @@ class IntelTbb(Package): portable and composable, and that have future-proof scalability. """ homepage = "http://www.threadingbuildingblocks.org/" + url_prefix = 'https://github.com/oneapi-src/oneTBB/' + url = url_prefix + 'archive/v2020.1.tar.gz' # Note: when adding new versions, please check and update the # patches, filters and url_for_version() below as needed. - version('2020.1', sha256='48d51c63b16787af54e1ee4aaf30042087f20564b4eecf9a032d5568bc2f0bf8') - version('2020.0', sha256='8eed2377ac62e6ac10af5a8303ce861e4525ffe491a061b48e8fe094fc741ce9') - version('2019.9', sha256='15652f5328cf00c576f065e5cd3eaf3317422fe82afb67a9bcec0dc065bd2abe') - version('2019.8', sha256='7b1fd8caea14be72ae4175896510bf99c809cd7031306a1917565e6de7382fba') - version('2019.7', sha256='4204a93f4c0fd989fb6f79acae74feb02ee39725c93968773d9b6efeb75c7a6a') - version('2019.6', sha256='2ba197b3964fce8a84429dd15b75eba7434cb89afc54f86d5ee6f726fdbe97fd') - version('2019.5', sha256='2ea82d74dec50e18075b4982b8d360f8bd2bf2950f38e2db483aef82e0047444') - version('2019.4', sha256='342a0a2cd583879850658284b86e9351ea019b4f3fcd731f4c18456f0ce9f900') - version('2019.3', sha256='b2244147bc8159cdd8f06a38afeb42f3237d3fc822555499d7ccfbd4b86f8ece') - version('2019.2', sha256='1245aa394a92099e23ce2f60cdd50c90eb3ddcd61d86cae010ef2f1de61f32d9') - version('2019.1', sha256='a4875c6b6853213083e52ecd303546bdf424568ec67cfc7e51d132a7c037c66a') - version('2019', sha256='4d149895826cea785cd3b9a14f4aa743b6ef0df520eca7ee27d438fdc3d73399') - version('2018.6', sha256='d3e5fbca3cc643d03bf332d576ff85e19aa774b483f148f95cd7d09958372109') - version('2018.5', sha256='c4c2896af527392496c5e01ef8579058a71b6eebbd695924cd138841c13f07be') - version('2018.4', sha256='d5604a04787c8a037d4944eeb89792e7c45f6a83c141b20df7ee89c2fb012ed1') - version('2018.3', sha256='23793c8645480148e9559df96b386b780f92194c80120acce79fcdaae0d81f45') - version('2018.2', sha256='78bb9bae474736d213342f01fe1a6d00c6939d5c75b367e2e43e7bf29a6d8eca') - version('2018.1', sha256='c6462217d4ecef2b44fce63cfdf31f9db4f6ff493869899d497a5aef68b05fc5') - version('2018', sha256='94f643f1edfaccb57d64b503c7c96f00dec64e8635c054bbaa33855d72c5822d') - version('2017.8', sha256='1b1357f280e750d42506212adad02f85ac07c9d3e3c0813104f9122ef350497f') - version('2017.7', sha256='78ad6ec9dd492b9dcc4753938b06716f7458506084adc138ced09aa0aa609b6b') - version('2017.6', sha256='40d5409a6fd7e214a21fd1949df422ba113fa78fde42a6aac40a2fba36e9bcdb') - version('2017.5', sha256='3122c87a35fde759567c8579ba36a36037c6df84c3f9c4df6c9e171f866f352f') - version('2017.4', sha256='ed4f0cfc4acec8a0cf253037e8c555dd32ebe1b80b34fb0e3b2bf54087932562') - version('2017.3', sha256='00a8b2798c498507572e24c2db7bf4896f05b760a38ed9ba566ffd348a7c6cef') - version('2017.2', sha256='85e44041d967ce8c70077dbb57941cfa1d351688855aec47eb14c74eb2075f28') - version('2017.1', sha256='a68bb7926fb9bee2a5f17b293c6d6aa33ccb089c3b321569bd4fe281cf65fa77') - version('2017', sha256='c49139279067df1260dae4f0fe7e4d485898ce45e5f7e314c37eb5da8a0c303a') - version('4.4.6', sha256='1d6b7e7db9141238c70984103f04280605dbcaa7fbcd049d332d2e73deed4f6d') - version('4.4.5', sha256='984308f9dd8a36ff274c124b6f7f7d0ff74d4b7ebdf06511af78e098b5b6e70f') - version('4.4.4', sha256='40e94c1adfd13308d207971629316ae9f76639b24f080bae8757c42d35778f10') - version('4.4.3', sha256='9acb1c4e71edc3d5004ab9f0ed2bbd697ecec28a4315bbd2be8c5365e8214b90') - version('4.4.2', sha256='3f6d7a32ac8b58469de7df3a2fcfe318793241ea39ce73aae1e637dbed833375') - version('4.4.1', sha256='d67c5860ba1116b320b0d60a0ce403b088dc19355ab32c28cdaa3e352609713a') - version('4.4', sha256='88e37f08ffcfaa24a2caf6c1a9084000cce689cc4b11edea7e89b20ab74ceceb') + version('2020.2', sha256='4804320e1e6cbe3a5421997b52199e3c1a3829b2ecb6489641da4b8e32faf500') + version('2020.1', sha256='7c96a150ed22bc3c6628bc3fef9ed475c00887b26d37bca61518d76a56510971') + version('2020.0', sha256='57714f2d2cf33935db33cee93af57eb3ecd5a7bef40c1fb7ca4a41d79684b118') + version('2019.9', sha256='3f5ea81b9caa195f1967a599036b473b2e7c347117330cda99b79cfcf5b77c84') + version('2019.8', sha256='6b540118cbc79f9cbc06a35033c18156c21b84ab7b6cf56d773b168ad2b68566') + version('2019.7', sha256='94847fc627ed081c63ea253e31f23645ed3671548106b095ce303d1da5d76275') + version('2019.6', sha256='21cd496ac768560e70d35e5423878aa3bcf0285f7194be77935d8febf0b18f47') + version('2019.5', sha256='abf9236e6ec9a3675fa59ab56c2192c7ab4f7096a82af118e8efa514b2541578') + version('2019.4', sha256='673e540aba6e526b220cbeacc3e4ff7b19a8689a00d7a09a9dc94396d73b24df') + version('2019.3', sha256='4cb6bde796ae056e7c29f31bfdc6cfd0cfe848925219e9c82a20f09158e81542') + version('2019.2', sha256='3bbe21054bd5b593ef99d4dfe451432cbf1f6f9429cd0dd543e879ef7e4e3327') + version('2019.1', sha256='e6fb8dd1a1ae834b4e5f4ae6c4c87a3362f81a3aaeddfa6325168c6cfee59391') + version('2019', sha256='91f00308a4e431bd9632b439d516134d7084f1eb35f52b7c9b111b46bdfcf093') + version('2018.6', sha256='0ebb5fc877871ef15f7395d6e3c86de4ffedb820dc336383a3ab71fc39426aa7') + version('2018.5', sha256='b8dbab5aea2b70cf07844f86fa413e549e099aa3205b6a04059ca92ead93a372') + version('2018.4', sha256='c973b41b6da3db10efa7e14ce64a850e3fbfbcc16374494a005bf994d53a770a') + version('2018.3', sha256='e5f19d747f6adabfc7daf2cc0a1ddcfab0f26bc083d70ea0a63def4a9f3919c5') + version('2018.2', sha256='733c4dba646573b8285b1923dc106f0d771725bea620baa3659c86ab9312a1f4') + version('2018.1', sha256='a9f51e0d081fbdda441d0150e759c7562318d6d7bc5a0c9a9d8064217d4d8d8d') + version('2018', sha256='d427c58a59863c5f9510fffb3d05cd1bcc7abb94cdde1613407559e88b1263ab') + version('2017.8', sha256='227cc1a8329da67f9957285f0020ad4d73d9ce26cbf88614349b8b74bb189ae1') + version('2017.7', sha256='f487243e5931e967479189ef75946f02e3bb666ea73dcc19ac2828edd5550746') + version('2017.6', sha256='b0f40edd010b90ce2519c1cebfa6f33216a1828d4fba19291b5cd23bd7fe809b') + version('2017.5', sha256='b785e7181317350f0bb20f7bffda20bdecde7e82b824d2e5eb6d408a3b9cbeaf') + version('2017.4', sha256='9a70ae3068767bf8c530bf98b9bbc655e36e82a301b347f7de76f99f401df1dd') + version('2017.3', sha256='230ed3ff32bb3e91df1f59e4a3a567bde02639d9970b7e87cee0421b4c0b0f23') + version('2017.2', sha256='dd37c896f95ca2357e828c24c9c4a169c6a6b5c905b3862a6cab09474d164497') + version('2017.1', sha256='9b5b36b6d0ed97a3a1711b9095e78aed79bc998957a4a6b3d8a7af063523f037') + version('2017', sha256='470544b0f374987273cc12e7706353edba8f9547578291d45b5b29358d4e5e81') + version('4.4.6', sha256='65101b3a0eda38320ec3e3603daa79c54e6a60fb59ed2959738eaf4ce6d17f0a') + version('4.4.5', sha256='2e372703fe444442c77760229897f00bb4babff62f7d0861b3f2783883cb257e') + version('4.4.4', sha256='3ed03838c4e368b78305b0561cac48d369919bb4d9d68edd4d8a3becd6f62f5c') + version('4.4.3', sha256='f0ff2e3735c8057b792f29c96f4f7623c1e4c76abfeda88be48645b8338c0f00') + version('4.4.2', sha256='1ab10e70354685cee3ddf614f3e291434cea86c8eb62031e025f4052278152ad') + version('4.4.1', sha256='05737bf6dd220b31aad63d77ca59c742271f81b4cc6643aa6f93d37450ae32b5') + version('4.4', sha256='93c74b6054c69c86fa49d0fce7c50061fc907cb198a7237b8dd058298fd40c0e') provides('tbb') @@ -113,7 +116,7 @@ class IntelTbb(Package): # 4.4.6 --> 4.4.6.tar.gz # def url_for_version(self, version): - url = 'https://github.com/intel/tbb/archive/{0}.tar.gz' + url = self.url_prefix + 'archive/{0}.tar.gz' if version[0] >= 2020: name = 'v{0}'.format(version) elif version[0] >= 2017 and len(version) > 1: diff --git a/var/spack/repos/builtin/packages/intel/package.py b/var/spack/repos/builtin/packages/intel/package.py index 5f3c19d5788..3bb9ea5c0e2 100644 --- a/var/spack/repos/builtin/packages/intel/package.py +++ b/var/spack/repos/builtin/packages/intel/package.py @@ -13,6 +13,7 @@ class Intel(IntelPackage): # Same as in ../intel-parallel-studio/package.py, Composer Edition, # but the version numbering in Spack differs. + version('19.1.1', sha256='26c7e7da87b8a83adfd408b2a354d872be97736abed837364c1bf10f4469b01e', url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/16530/parallel_studio_xe_2020_update1_composer_edition.tgz') version('19.1.0', sha256='9168045466139b8e280f50f0606b9930ffc720bbc60bc76f5576829ac15757ae', url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/16229/parallel_studio_xe_2020_composer_edition.tgz') version('19.0.5', sha256='e8c8e4b9b46826a02c49325c370c79f896858611bf33ddb7fb204614838ad56c', url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/15813/parallel_studio_xe_2019_update5_composer_edition.tgz') version('19.0.4', sha256='1915993445323e1e78d6de73702a88fa3df2036109cde03d74ee38fef9f1abf2', url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/15537/parallel_studio_xe_2019_update4_composer_edition.tgz') diff --git a/var/spack/repos/builtin/packages/iwyu/iwyu-013-cmake.patch b/var/spack/repos/builtin/packages/iwyu/iwyu-013-cmake.patch new file mode 100644 index 00000000000..78cb1a2d101 --- /dev/null +++ b/var/spack/repos/builtin/packages/iwyu/iwyu-013-cmake.patch @@ -0,0 +1,34 @@ +--- include-what-you-use/CMakeLists.txt 2019-10-24 15:47:52.000000000 -0400 ++++ spack-src/CMakeLists.txt 2020-03-26 07:16:53.284593661 -0400 +@@ -60,21 +60,22 @@ + # Use only major.minor.patch for the resource directory structure; some + # platforms include suffix in LLVM_VERSION. + set(llvm_ver ${LLVM_VERSION_MAJOR}.${LLVM_VERSION_MINOR}.${LLVM_VERSION_PATCH}) +- set(clang_headers_src ${CMAKE_PREFIX_PATH}/lib/clang/${llvm_ver}/include) +- set(clang_headers_dst ${CMAKE_BINARY_DIR}/lib/clang/${llvm_ver}/include) ++ set(clang_headers_src "${LLVM_INSTALL_PREFIX}/lib/clang/${llvm_ver}/include") ++ set(clang_headers_dst "${CMAKE_BINARY_DIR}/lib/clang/${llvm_ver}/include") + +- file(GLOB_RECURSE in_files RELATIVE ${clang_headers_src} ${clang_headers_src}/*) ++ file(GLOB_RECURSE in_files RELATIVE "${clang_headers_src}" ++ "${clang_headers_src}/*") + + set(out_files) + foreach (file ${in_files}) +- set(src ${clang_headers_src}/${file}) +- set(dst ${clang_headers_dst}/${file}) ++ set(src "${clang_headers_src}/${file}") ++ set(dst "${clang_headers_dst}/${file}") + +- add_custom_command(OUTPUT ${dst} +- DEPENDS ${src} +- COMMAND ${CMAKE_COMMAND} -E copy_if_different ${src} ${dst} ++ add_custom_command(OUTPUT "${dst}" ++ DEPENDS "${src}" ++ COMMAND ${CMAKE_COMMAND} -E copy_if_different "${src}" "${dst}" + COMMENT "Copying clang's ${file}...") +- list(APPEND out_files ${dst}) ++ list(APPEND out_files "${dst}") + endforeach() + + add_custom_target(clang-resource-headers ALL DEPENDS ${out_files}) diff --git a/var/spack/repos/builtin/packages/iwyu/package.py b/var/spack/repos/builtin/packages/iwyu/package.py new file mode 100644 index 00000000000..a05ff073b7b --- /dev/null +++ b/var/spack/repos/builtin/packages/iwyu/package.py @@ -0,0 +1,27 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Iwyu(CMakePackage): + """include-what-you-use: A tool for use with clang to analyze #includes in + C and C++ source files + """ + + homepage = "https://include-what-you-use.org" + url = "https://include-what-you-use.org/downloads/include-what-you-use-0.13.src.tar.gz" + + maintainers = ['sethrj'] + + version('0.13', sha256='49294270aa64e8c04182369212cd919f3b3e0e47601b1f935f038c761c265bc9') + version('0.12', sha256='a5892fb0abccb820c394e4e245c00ef30fc94e4ae58a048b23f94047c0816025') + version('0.11', sha256='2d2877726c4aed9518cbb37673ffbc2b7da9c239bf8fe29432da35c1c0ec367a') + + patch('iwyu-013-cmake.patch', when='@0.13') + + depends_on('llvm+clang@9.0:9.999', when='@0.13') + depends_on('llvm+clang@8.0:8.999', when='@0.12') + depends_on('llvm+clang@7.0:7.999', when='@0.11') diff --git a/var/spack/repos/builtin/packages/jasper/package.py b/var/spack/repos/builtin/packages/jasper/package.py index f8351bbbc63..9599aec8f4f 100644 --- a/var/spack/repos/builtin/packages/jasper/package.py +++ b/var/spack/repos/builtin/packages/jasper/package.py @@ -10,13 +10,11 @@ class Jasper(Package): """Library for manipulating JPEG-2000 images""" homepage = "https://www.ece.uvic.ca/~frodo/jasper/" - url = "https://www.ece.uvic.ca/~frodo/jasper/software/jasper-2.0.14.tar.gz" - list_url = homepage + url = "https://github.com/mdadams/jasper/archive/version-2.0.16.tar.gz" - version('2.0.14', sha256='2a1f61e55afe8b4ce8115e1508c5d7cb314d56dfcc2dd323f90c072f88ccf57b', - url="https://www.ece.uvic.ca/~frodo/jasper/software/jasper-2.0.14.tar.gz") - version('1.900.1', sha256='6b905a9c2aca2e275544212666eefc4eb44d95d0a57e4305457b407fe63f9494', - url="https://www.ece.uvic.ca/~frodo/jasper/software/jasper-1.900.1.zip") + version('2.0.16', sha256='f1d8b90f231184d99968f361884e2054a1714fdbbd9944ba1ae4ebdcc9bbfdb1') + version('2.0.14', sha256='85266eea728f8b14365db9eaf1edc7be4c348704e562bb05095b9a077cf1a97b') + version('1.900.1', sha256='c2b03f28166f9dc8ae434918839ae9aa9962b880fcfd24eebddd0a2daeb9192c') variant('jpeg', default=True, description='Enable the use of the JPEG library') variant('opengl', default=False, description='Enable the use of the OpenGL and GLUT libraries') diff --git a/var/spack/repos/builtin/packages/jblob/package.py b/var/spack/repos/builtin/packages/jblob/package.py new file mode 100644 index 00000000000..1aa1738230a --- /dev/null +++ b/var/spack/repos/builtin/packages/jblob/package.py @@ -0,0 +1,35 @@ +# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Jblob(Package): + """ + The German Climate Computing Center + (DKRZ: Deutsches Klimarechenzentrum GmbH) + provides a Long Term Archiving Service for large research + data sets which are relevant for climate or Earth system research. + """ + + homepage = "https://cera-www.dkrz.de/WDCC/ui/cerasearch" + url = "https://cera-www.dkrz.de/jblob/jblob-3.0.zip" + + maintainers = ['ajkotobi'] + + version('3.0', sha256='576b5956358386a8832c6d1d13c410705e54888354a10cfd4f094513458067e4') + + depends_on('java@8:', type='run') + + def setup_run_environment(self, env): + env.set('JAVA_HOME', self.spec['java'].prefix) + + def install(self, spec, prefix): + filter_file('/opt/jblob-' + self.version, prefix, 'jblob') + + mkdir(prefix.bin) + install('jblob', prefix.bin) + install_tree('lib', prefix.lib) + install_tree('docs', prefix.docs) diff --git a/var/spack/repos/builtin/packages/jdk/package.py b/var/spack/repos/builtin/packages/jdk/package.py index 6e9c0329dc3..025d12be659 100644 --- a/var/spack/repos/builtin/packages/jdk/package.py +++ b/var/spack/repos/builtin/packages/jdk/package.py @@ -24,9 +24,7 @@ class Jdk(Package): # automate this process, we need to utilize these additional curl # command-line options. See: # http://stackoverflow.com/questions/10268583/how-to-automate-download-and-installation-of-java-jdk-on-linux - fetch_options = { - 'cookie': 'oraclelicense=accept-securebackup-cookie' - } + fetch_options = {'cookie': 'oraclelicense=accept-securebackup-cookie'} # To add the latest version, go to the homepage listed above, # click "JDK Download", click "Accept License Agreement", right-click the @@ -34,29 +32,29 @@ class Jdk(Package): # found in a link above. The build number can be deciphered from the URL. # Alternatively, run `bin/java -version` after extracting. Replace '+' # symbol in version with '_', otherwise it will be interpreted as a variant - version('12.0.2_10', sha256='2dde6fda89a4ec6e6560ed464e917861c9e40bf576e7a64856dafc55abaaff51', fetch_options=fetch_options, + version('12.0.2_10', sha256='2dde6fda89a4ec6e6560ed464e917861c9e40bf576e7a64856dafc55abaaff51', url='https://download.oracle.com/otn-pub/java/jdk/12.0.2+10/e482c34c86bd4bf8b56c0b35558996b9/jdk-12.0.2_linux-x64_bin.tar.gz') - version('12.0.1_12', sha256='9fd6dcdaf2cfca7da59e39b009a0f5bcd53bec2fb16105f7ca8d689cdab68d75', fetch_options=fetch_options, + version('12.0.1_12', sha256='9fd6dcdaf2cfca7da59e39b009a0f5bcd53bec2fb16105f7ca8d689cdab68d75', url='https://download.oracle.com/otn-pub/java/jdk/12.0.1+12/69cfe15208a647278a19ef0990eea691/jdk-12.0.1_linux-x64_bin.tar.gz') - version('11.0.2_9', sha256='7b4fd8ffcf53e9ff699d964a80e4abf9706b5bdb5644a765c2b96f99e3a2cdc8', fetch_options=fetch_options, + version('11.0.2_9', sha256='7b4fd8ffcf53e9ff699d964a80e4abf9706b5bdb5644a765c2b96f99e3a2cdc8', url='https://download.oracle.com/otn-pub/java/jdk/11.0.2+9/f51449fcd52f4d52b93a989c5c56ed3c/jdk-11.0.2_linux-x64_bin.tar.gz') - version('11.0.1_13', sha256='e7fd856bacad04b6dbf3606094b6a81fa9930d6dbb044bbd787be7ea93abc885', fetch_options=fetch_options, + version('11.0.1_13', sha256='e7fd856bacad04b6dbf3606094b6a81fa9930d6dbb044bbd787be7ea93abc885', url='https://download.oracle.com/otn-pub/java/jdk/11.0.1+13/90cf5d8f270a4347a95050320eef3fb7/jdk-11.0.1_linux-x64_bin.tar.gz') - version('10.0.2_13', sha256='6633c20d53c50c20835364d0f3e172e0cbbce78fff81867488f22a6298fa372b', fetch_options=fetch_options, + version('10.0.2_13', sha256='6633c20d53c50c20835364d0f3e172e0cbbce78fff81867488f22a6298fa372b', url='https://download.oracle.com/otn-pub/java/jdk/10.0.2+13/19aef61b38124481863b1413dce1855f/jdk-10.0.2_linux-x64_bin.tar.gz') - version('10.0.1_10', sha256='ae8ed645e6af38432a56a847597ac61d4283b7536688dbab44ab536199d1e5a4', fetch_options=fetch_options, + version('10.0.1_10', sha256='ae8ed645e6af38432a56a847597ac61d4283b7536688dbab44ab536199d1e5a4', url='https://download.oracle.com/otn-pub/java/jdk/10.0.1+10/fb4372174a714e6b8c52526dc134031e/jdk-10.0.1_linux-x64_bin.tar.gz') - version('1.8.0_241-b07', sha256='419d32677855f676076a25aed58e79432969142bbd778ff8eb57cb618c69e8cb', fetch_options=fetch_options, + version('1.8.0_241-b07', sha256='419d32677855f676076a25aed58e79432969142bbd778ff8eb57cb618c69e8cb', url='https://download.oracle.com/otn-pub/java/jdk/8u241-b07/1f5b5a70bf22433b84d0e960903adac8/jdk-8u241-linux-x64.tar.gz') - version('1.8.0_231-b11', sha256='a011584a2c9378bf70c6903ef5fbf101b30b08937441dc2ec67932fb3620b2cf', fetch_options=fetch_options, + version('1.8.0_231-b11', sha256='a011584a2c9378bf70c6903ef5fbf101b30b08937441dc2ec67932fb3620b2cf', url='https://download.oracle.com/otn-pub/java/jdk/8u231-b11/5b13a193868b4bf28bcb45c792fce896/jdk-8u231-linux-x64.tar.gz') - version('1.8.0_212-b10', sha256='3160c50aa8d8e081c8c7fe0f859ea452922eca5d2ae8f8ef22011ae87e6fedfb', fetch_options=fetch_options, + version('1.8.0_212-b10', sha256='3160c50aa8d8e081c8c7fe0f859ea452922eca5d2ae8f8ef22011ae87e6fedfb', url='https://download.oracle.com/otn-pub/java/jdk/8u212-b10/59066701cf1a433da9770636fbc4c9aa/jdk-8u212-linux-x64.tar.gz') - version('1.8.0_202-b08', sha256='9a5c32411a6a06e22b69c495b7975034409fa1652d03aeb8eb5b6f59fd4594e0', fetch_options=fetch_options, + version('1.8.0_202-b08', sha256='9a5c32411a6a06e22b69c495b7975034409fa1652d03aeb8eb5b6f59fd4594e0', url='https://download.oracle.com/otn-pub/java/jdk/8u202-b08/1961070e4c9b4e26a04e7f5a083f551e/jdk-8u202-linux-x64.tar.gz') - version('1.8.0_141-b15', sha256='041d5218fbea6cd7e81c8c15e51d0d32911573af2ed69e066787a8dc8a39ba4f', fetch_options=fetch_options, + version('1.8.0_141-b15', sha256='041d5218fbea6cd7e81c8c15e51d0d32911573af2ed69e066787a8dc8a39ba4f', url='https://download.oracle.com/otn-pub/java/jdk/8u141-b15/336fa29ff2bb4ef291e347e091f7f4a7/jdk-8u141-linux-x64.tar.gz') - version('1.8.0_131-b11', sha256='62b215bdfb48bace523723cdbb2157c665e6a25429c73828a32f00e587301236', fetch_options=fetch_options, + version('1.8.0_131-b11', sha256='62b215bdfb48bace523723cdbb2157c665e6a25429c73828a32f00e587301236', url='https://download.oracle.com/otn-pub/java/jdk/8u131-b11/d54c1d3a095b4ff2b6607d096fa80163/jdk-8u131-linux-x64.tar.gz') provides('java@12', when='@12.0:12.999') diff --git a/var/spack/repos/builtin/packages/julia/package.py b/var/spack/repos/builtin/packages/julia/package.py index 9dae676d9af..ec86c04886f 100644 --- a/var/spack/repos/builtin/packages/julia/package.py +++ b/var/spack/repos/builtin/packages/julia/package.py @@ -18,6 +18,7 @@ class Julia(Package): maintainers = ['glennpj'] version('master', branch='master') + version('1.4.0', sha256='880c73a08296ce8d94ad9605149f2a2b2b028e7202a700ef725da899300b8be9') version('1.3.1', sha256='053908ec2706eb76cfdc998c077de123ecb1c60c945b4b5057aa3be19147b723') version('1.2.0', sha256='2419b268fc5c3666dd9aeb554815fe7cf9e0e7265bc9b94a43957c31a68d9184') version('1.1.1', sha256='3c5395dd3419ebb82d57bcc49dc729df3b225b9094e74376f8c649ee35ed79c2') diff --git a/var/spack/repos/builtin/packages/kealib/package.py b/var/spack/repos/builtin/packages/kealib/package.py index e164dac4b4a..3e050a213cb 100644 --- a/var/spack/repos/builtin/packages/kealib/package.py +++ b/var/spack/repos/builtin/packages/kealib/package.py @@ -23,12 +23,13 @@ class Kealib(CMakePackage): Development work on this project has been funded by Landcare Research. """ homepage = "http://www.kealib.org/" - url = "https://bitbucket.org/chchrsc/kealib/downloads/kealib-1.4.11.tar.gz" - hg = "https://bitbucket.org/chchrsc/kealib" + url = "https://github.com/ubarsc/kealib/releases/download/kealib-1.4.12/kealib-1.4.12.tar.gz" + git = "https://github.com/ubarsc/kealib" maintainers = ['gillins'] - version('develop', hg=hg) + version('develop', git=git) + version('1.4.12', sha256='0b100e36b3e25e57487aa197d7be47f22e1b30afb16a57fdaa5f877696ec321e') version('1.4.11', sha256='3d64cdec560c7a338ccb38e3a456db4e3b176ac62f945daa6e332e60fe4eca90') version('1.4.10', sha256='b1bd2d6834d2fe09ba456fce77f7a9452b406dbe302f7ef1aabe924e45e6bb5e') version('1.4.9', sha256='1c80489f17114a229097c2e8c61d5e4c82ea63ae631c81a817fef95cfd527174') diff --git a/var/spack/repos/builtin/packages/keyutils/package.py b/var/spack/repos/builtin/packages/keyutils/package.py new file mode 100644 index 00000000000..049dbb1c228 --- /dev/null +++ b/var/spack/repos/builtin/packages/keyutils/package.py @@ -0,0 +1,27 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * +import glob + + +class Keyutils(MakefilePackage): + """These tools are used to control the key management system built + into the Linux kernel.""" + + homepage = "https://git.kernel.org/pub/scm/linux/kernel/git/dhowells/keyutils.git/" + url = "https://git.kernel.org/pub/scm/linux/kernel/git/dhowells/keyutils.git/snapshot/keyutils-1.6.1.tar.gz" + + version('1.6.1', sha256='3c71dcfc6900d07b02f4e061d8fb218a4ae6519c1d283d6a57b8e27718e2f557') + version('1.6', sha256='c6a27b4e3d0122d921f3dcea4b1f02a8616ca844535960d6af76ef67d015b5cf') + version('1.5.10', sha256='e1fdbde234c786b65609a4cf080a2c5fbdb57f049249c139160c85fc3dfa7da9') + version('1.5.9', sha256='2dc0bdb099ab8331e02e5dbbce320359bef76eda0a4ddbd2ba1d1b9d3a8cdff8') + + def install(self, spec, prefix): + install_tree('.', prefix) + mkdirp(prefix.include) + headers = glob.glob(join_path(prefix, '*.h')) + for h in headers: + install(h, prefix.include) diff --git a/var/spack/repos/builtin/packages/kmod/package.py b/var/spack/repos/builtin/packages/kmod/package.py new file mode 100644 index 00000000000..0ee203e7a2d --- /dev/null +++ b/var/spack/repos/builtin/packages/kmod/package.py @@ -0,0 +1,30 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Kmod(AutotoolsPackage): + """kmod is a set of tools to handle common tasks with Linux kernel modules + like insert, remove, list, check properties, resolve dependencies and + aliases.""" + + homepage = "https://github.com/lucasdemarchi/kmod" + url = "https://github.com/lucasdemarchi/kmod/archive/v27.tar.gz" + + version('27', sha256='969c4573b01f4c9e1d3e3c9d179bd16ec999bbb99dd55b7623f42551328478c3') + version('26', sha256='f28bc40ead548dce4a8e956fccfc36fd80f2b40884d270b812f1bfbd886e858c') + version('25', sha256='16a8bbd3ee321d0847847256ea2fd124f6250257c055c8cf97e78f18bf27559c') + version('24', sha256='f7a5ee07d4901c87711880536604de7e31c182d85a72de7b8d7dd04d4ee0aa59') + version('23', sha256='8f139543d82e8ccc2227dec4c016d6656e9789365a6dce73f90b620a53e62ee6') + + depends_on('autoconf', type='build') + depends_on('automake', type='build') + depends_on('libtool', type='build') + depends_on('m4', type='build') + + def autoreconf(self, spec, prefix): + bash = which("bash") + bash('autogen.sh') diff --git a/var/spack/repos/builtin/packages/kvtree/package.py b/var/spack/repos/builtin/packages/kvtree/package.py index be265f5ecdc..40e57ed0ee6 100644 --- a/var/spack/repos/builtin/packages/kvtree/package.py +++ b/var/spack/repos/builtin/packages/kvtree/package.py @@ -10,8 +10,8 @@ class Kvtree(CMakePackage): """KVTree provides a fully extensible C datastructure modeled after perl hashes.""" - homepage = "https://github.com/ECP-VeloC/KVTree" - url = "https://github.com/ECP-VeloC/KVTree/archive/v1.0.2.zip" + homepage = "https://github.com/ecp-veloc/KVTree" + url = "https://github.com/ecp-veloc/KVTree/archive/v1.0.2.zip" git = "https://github.com/ecp-veloc/kvtree.git" tags = ['ecp'] diff --git a/var/spack/repos/builtin/packages/laghos/package.py b/var/spack/repos/builtin/packages/laghos/package.py index 7edb886a83c..438da2f5cf2 100644 --- a/var/spack/repos/builtin/packages/laghos/package.py +++ b/var/spack/repos/builtin/packages/laghos/package.py @@ -18,15 +18,15 @@ class Laghos(MakefilePackage): url = "https://github.com/CEED/Laghos/archive/v1.0.tar.gz" git = "https://github.com/CEED/Laghos.git" - version('develop', branch='master') + version('master', branch='master') + version('3.0', sha256='4db56286e15b42ecdc8d540c4888a7dec698b019df9c7ccb8319b7ea1f92d8b4') version('2.0', sha256='dd3632d5558889beec2cd3c49eb60f633f99e6d886ac868731610dd006c44c14') version('1.1', sha256='53b9bfe2af263c63eb4544ca1731dd26f40b73a0d2775a9883db51821bf23b7f') version('1.0', sha256='af50a126355a41c758fcda335a43fdb0a3cd97e608ba51c485afda3dd84a5b34') variant('metis', default=True, description='Enable/disable METIS support') - depends_on('mfem@develop+mpi+metis', when='@develop+metis') - depends_on('mfem@develop+mpi~metis', when='@develop~metis') + depends_on('metis@4.0.3:', when='+metis') # Recommended mfem version for laghos v2.0 is: ^mfem@3.4.1-laghos-v2.0 depends_on('mfem@3.4.0:+mpi+metis', when='@2.0+metis') @@ -36,6 +36,11 @@ class Laghos(MakefilePackage): depends_on('mfem@3.3.1-laghos-v1.0:+mpi+metis', when='@1.0,1.1+metis') depends_on('mfem@3.3.1-laghos-v1.0:+mpi~metis', when='@1.0,1.1~metis') + # 3.0 requirements + depends_on('hypre@2.11.2', when='@3.0:') + depends_on('mfem@develop+mpi+metis', when='@3.0:+metis') + depends_on('mfem@develop+mpi~metis', when='@3.0:~metis') + @property def build_targets(self): targets = [] @@ -46,6 +51,11 @@ def build_targets(self): targets.append('TEST_MK=%s' % spec['mfem'].package.test_mk) targets.append('CXX=%s' % spec['mpi'].mpicxx) + if self.version >= ver('3.0'): + targets.append('HYPRE_DIR=%s' % spec['hypre'].prefix) + if '+metis' in self.spec: + targets.append('METIS_DIR=%s' % spec['metis'].prefix) + return targets # See lib/spack/spack/build_systems/makefile.py diff --git a/var/spack/repos/builtin/packages/lesstif/package.py b/var/spack/repos/builtin/packages/lesstif/package.py new file mode 100644 index 00000000000..c36b90e9b70 --- /dev/null +++ b/var/spack/repos/builtin/packages/lesstif/package.py @@ -0,0 +1,51 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Lesstif(AutotoolsPackage): + """LessTif is the Hungry Programmers' version of OSF/Motif.""" + + homepage = "https://sourceforge.net/projects/lesstif" + url = "https://sourceforge.net/projects/lesstif/files/lesstif/0.95.2/lesstif-0.95.2.tar.bz2/download" + + version('0.95.2', sha256='eb4aa38858c29a4a3bcf605cfe7d91ca41f4522d78d770f69721e6e3a4ecf7e3') + + variant('shared', default=True, description='Build shared libraries') + variant('static', default=False, description='Build static libraries') + + depends_on('libice') + depends_on('libsm') + depends_on('libxt') + + def patch(self): + filter_file("ACLOCALDIR=.*", + "ACLOCALDIR='${datarootdir}/aclocal'", + "configure") + + def setup_build_environment(self, env): + # 'sed' fails if LANG=en_US.UTF-8 as is often the case on Macs. + # The configure script finds our superenv sed wrapper, sets + # SED, but then doesn't use that variable. + env.set('LANG', 'C') + + def configure_args(self): + spec = self.spec + + args = [ + '--disable-debug', + '--enable-production', + '--disable-dependency-tracking', + '--enable-shared' if '+shared' in spec else '--disable-shared', + '--enable-static' if '+static' in spec else '--disable-static', + ] + + return args + + # LessTif won't install in parallel 'cause several parts of the + # Makefile will try to make the same directory and `mkdir` will fail. + def install(self, spec, prefix): + make('install', parallel=False) diff --git a/var/spack/repos/builtin/packages/lftp/package.py b/var/spack/repos/builtin/packages/lftp/package.py index 508dbdd5869..b67b79bd5b6 100644 --- a/var/spack/repos/builtin/packages/lftp/package.py +++ b/var/spack/repos/builtin/packages/lftp/package.py @@ -18,7 +18,7 @@ class Lftp(AutotoolsPackage): version('4.6.4', sha256='791e783779d3d6b519d0c23155430b9785f2854023eb834c716f5ba78873b15a') depends_on('expat') - depends_on('libiconv') + depends_on('iconv') depends_on('ncurses') depends_on('openssl') depends_on('readline') @@ -27,7 +27,7 @@ class Lftp(AutotoolsPackage): def configure_args(self): return [ '--with-expat={0}'.format(self.spec['expat'].prefix), - '--with-libiconv={0}'.format(self.spec['libiconv'].prefix), + '--with-libiconv={0}'.format(self.spec['iconv'].prefix), '--with-openssl={0}'.format(self.spec['openssl'].prefix), '--with-readline={0}'.format(self.spec['readline'].prefix), '--with-zlib={0}'.format(self.spec['zlib'].prefix), diff --git a/var/spack/repos/builtin/packages/libbacktrace/package.py b/var/spack/repos/builtin/packages/libbacktrace/package.py new file mode 100644 index 00000000000..64e58321148 --- /dev/null +++ b/var/spack/repos/builtin/packages/libbacktrace/package.py @@ -0,0 +1,23 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Libbacktrace(AutotoolsPackage): + """A C library that may be linked into a C/C++ program to produce + symbolic backtraces.""" + + homepage = "https://github.com/ianlancetaylor/libbacktrace" + git = "https://github.com/ianlancetaylor/libbacktrace.git" + maintainers = ['trahay'] + + version('master', branch='master') + version('2020-02-19', commit='ca0de0517f3be44fedf5a2c01cfaf6437d4cae68') + + depends_on('autoconf', type='build') + depends_on('automake', type='build') + depends_on('libtool', type='build') + depends_on('m4', type='build') diff --git a/var/spack/repos/builtin/packages/libc/package.py b/var/spack/repos/builtin/packages/libc/package.py new file mode 100644 index 00000000000..a4fa9ac3a64 --- /dev/null +++ b/var/spack/repos/builtin/packages/libc/package.py @@ -0,0 +1,19 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Libc(Package): + """Dummy libc package to provide `iconv` virtual package""" + + homepage = "https://en.wikipedia.org/wiki/C_standard_library" + url = "" + has_code = False + phases = [] + + version('1.0') # Dummy + variant('iconv', default=False, description='Set to True if libc provides iconv') + provides('iconv', when='+iconv') diff --git a/var/spack/repos/builtin/packages/libfabric/package.py b/var/spack/repos/builtin/packages/libfabric/package.py index bb1730993f3..cddaee99a73 100644 --- a/var/spack/repos/builtin/packages/libfabric/package.py +++ b/var/spack/repos/builtin/packages/libfabric/package.py @@ -3,7 +3,6 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -import os.path from spack import * @@ -16,9 +15,9 @@ class Libfabric(AutotoolsPackage): git = "https://github.com/ofiwg/libfabric.git" version('master', branch='master') - version('1.9.1rc1', sha256='fdf89a0797f0d923aaef2c41cc70be45716e4d07dc5d318365b9c17795eb49ab') + version('1.9.1', sha256='c305c6035c992523e08c7591a6a3707225ba3e72de40443eaed837a10df6771a') version('1.9.0', sha256='559bfb7376c38253c936d0b104591c3394880376d676894895706c4f5f88597c') - version('1.8.1', sha256='3c560b997f9eafd89f961dd8e8a29a81aad3e39aee888e3f3822da419047dc88', preferred=True) + version('1.8.1', sha256='3c560b997f9eafd89f961dd8e8a29a81aad3e39aee888e3f3822da419047dc88') version('1.8.0', sha256='c4763383a96af4af52cd81b3b094227f5cf8e91662f861670965994539b7ee37') version('1.7.1', sha256='f4e9cc48319763cff4943de96bf527b737c9f1d6ac3088b8b5c75d07bd719569') version('1.7.0', sha256='b3dd9cc0fa36fe8c3b9997ba279ec831a905704816c25fe3c4c09fc7eeceaac4') @@ -74,39 +73,6 @@ class Libfabric(AutotoolsPackage): depends_on('automake', when='@develop', type='build') depends_on('libtool', when='@develop', type='build') - resource(name='fabtests', - url='https://github.com/ofiwg/libfabric/releases/download/v1.9.0/fabtests-1.9.0.tar.bz2', - sha256='60cc21db7092334904cbdafd142b2403572976018a22218e7c453195caef366e', - placement='fabtests', when='@1.9.0') - resource(name='fabtests', - url='https://github.com/ofiwg/libfabric/releases/download/v1.8.0/fabtests-1.8.0.tar.gz', - sha256='4b9af18c9c7c8b28eaeac4e6e9148bd2ea7dc6b6f00f8e31c90a6fc536c5bb6c', - placement='fabtests', when='@1.8.0') - resource(name='fabtests', - url='https://github.com/ofiwg/libfabric/releases/download/v1.7.0/fabtests-1.7.0.tar.gz', - sha256='ebb4129dc69dc0e1f48310ce1abb96673d8ddb18166bc595312ebcb96e803de9', - placement='fabtests', when='@1.7.0') - resource(name='fabtests', - url='https://github.com/ofiwg/fabtests/releases/download/v1.6.1/fabtests-1.6.1.tar.gz', - sha256='d357466b868fdaf1560d89ffac4c4e93a679486f1b4221315644d8d3e21174bf', - placement='fabtests', when='@1.6.1') - resource(name='fabtests', - url='https://github.com/ofiwg/fabtests/releases/download/v1.6.0/fabtests-1.6.0.tar.gz', - sha256='dc3eeccccb005205017f5af60681ede15782ce202a0103450a6d56a7ff515a67', - placement='fabtests', when='@1.6.0') - resource(name='fabtests', - url='https://github.com/ofiwg/fabtests/releases/download/v1.5.3/fabtests-1.5.3.tar.gz', - sha256='3835b3bf86cd00d23df0ddba8bf317e4a195e8d5c3c2baa918b373d548f77f29', - placement='fabtests', when='@1.5.3') - resource(name='fabtests', - url='https://github.com/ofiwg/fabtests/releases/download/v1.5.0/fabtests-1.5.0.tar.gz', - sha256='1dddd446c3f1df346899f9a8636f1b4265de5b863103ae24876e9f0c1e40a69d', - placement='fabtests', when='@1.5.0') - resource(name='fabtests', - url='https://github.com/ofiwg/fabtests/releases/download/v1.4.2/fabtests-1.4.2.tar.gz', - sha256='3b78d0ca1b223ff21b7f5b3627e67e358e3c18b700f86b017e2233fee7e88c2e', - placement='fabtests', when='@1.4.2') - conflicts('@1.9.0', when='platform=darwin', msg='This distribution is missing critical files') @@ -119,10 +85,6 @@ def autoreconf(self, spec, prefix): bash = which('bash') bash('./autogen.sh') - if self.run_tests: - with working_dir('fabtests'): - bash('./autogen.sh') - def configure_args(self): args = [] @@ -139,29 +101,6 @@ def configure_args(self): return args - def install(self, spec, prefix): - # Call main install method - super(Libfabric, self).install(spec, prefix) - - # Build and install fabtests, if available - if not os.path.isdir('fabtests'): - return - with working_dir('fabtests'): - configure = Executable('./configure') - configure('--prefix={0}'.format(self.prefix), - '--with-libfabric={0}'.format(self.prefix)) - make() - make('install') - def installcheck(self): fi_info = Executable(self.prefix.bin.fi_info) fi_info() - - # Run fabtests test suite if available - if not os.path.isdir('fabtests'): - return - if self.spec.satisfies('@1.8.0,1.9.0'): - # make test seems broken. - return - with working_dir('fabtests'): - make('test') diff --git a/var/spack/repos/builtin/packages/libffi/package.py b/var/spack/repos/builtin/packages/libffi/package.py index 028c5913550..07ec319fb82 100644 --- a/var/spack/repos/builtin/packages/libffi/package.py +++ b/var/spack/repos/builtin/packages/libffi/package.py @@ -17,8 +17,7 @@ class Libffi(AutotoolsPackage): fetch_options = {'timeout': 60} version('3.2.1', sha256='d06ebb8e1d9a22d19e38d63fdb83954253f39bedc5d46232a05645685722ca37', - url="https://sourceware.org/pub/libffi/libffi-3.2.1.tar.gz", - fetch_options=fetch_options) + url="https://sourceware.org/pub/libffi/libffi-3.2.1.tar.gz") @property def headers(self): diff --git a/var/spack/repos/builtin/packages/libgd/package.py b/var/spack/repos/builtin/packages/libgd/package.py index 232b826bbf5..d4907ae3e19 100644 --- a/var/spack/repos/builtin/packages/libgd/package.py +++ b/var/spack/repos/builtin/packages/libgd/package.py @@ -31,7 +31,7 @@ class Libgd(AutotoolsPackage): depends_on('gettext', type='build') depends_on('pkgconfig', type='build') - depends_on('libiconv') + depends_on('iconv') depends_on('libpng') depends_on('jpeg') depends_on('libtiff') diff --git a/var/spack/repos/builtin/packages/libgit2/package.py b/var/spack/repos/builtin/packages/libgit2/package.py index 58ea638de0e..dcd5259a27c 100644 --- a/var/spack/repos/builtin/packages/libgit2/package.py +++ b/var/spack/repos/builtin/packages/libgit2/package.py @@ -16,8 +16,83 @@ class Libgit2(CMakePackage): homepage = "https://libgit2.github.com/" url = "https://github.com/libgit2/libgit2/archive/v0.26.0.tar.gz" - version('0.28.2', sha256='42b5f1e9b9159d66d86fff0394215c5733b6ef8f9b9d054cdd8c73ad47177fc3') - version('0.26.0', sha256='6a62393e0ceb37d02fe0d5707713f504e7acac9006ef33da1e88960bd78b6eac') + maintainers = ["AndrewGaspar"] - depends_on('cmake@2.8:', type='build') - depends_on('libssh2') + version('1.0.0', sha256='6a1fa16a7f6335ce8b2630fbdbb5e57c4027929ebc56fcd1ac55edb141b409b4') + version('0.99.0', sha256='174024310c1563097a6613a0d3f7539d11a9a86517cd67ce533849065de08a11') + version('0.28.5', sha256='2b7b68aee6f123bc84cc502a9c12738435b8054e7d628962e091cd2a25be4f42') + version('0.28.4', sha256='30f3877469d09f2e4a21be933b4e2800560d16646028dd800744dc5f7fb0c749') + version('0.28.3', sha256='ee5344730fe11ce7c86646e19c2d257757be293f5a567548d398fb3af8b8e53b') + version('0.28.2', sha256='42b5f1e9b9159d66d86fff0394215c5733b6ef8f9b9d054cdd8c73ad47177fc3') + version('0.28.1', sha256='0ca11048795b0d6338f2e57717370208c2c97ad66c6d5eac0c97a8827d13936b') + version('0.28.0', sha256='9d60d64dc77085e8e530e5c66314057eafe0c06e4a7a61149a70ff3e0688f284') + version('0.27.10', sha256='f6fd26378ff71bd7a4b17b576c82c774a2e9c2d6b74b24718a8fb29551e1c4a5') + version('0.27.9', sha256='adf17310b590e6e7618f070c742b5ee028aeeed2c60099bc4190c386b5060de1') + version('0.27.8', sha256='8313873d49dc01e8b880ec334d7430ae67496a89aaa8c6e7bbd3affb47a00c76') + version('0.27.7', sha256='1a5435a483759b1cd96feb12b11abb5231b0688016db506ce5947178f6ba2531') + version('0.27.6', sha256='d98db2ed11ec82fee94dce4819b466524613852c2c9c3426d351c57729ec49da') + version('0.27.5', sha256='15f2775f4f325951d9139ed906502b6c71fee6787cada9b045f5994072ccbd33') + version('0.27.4', sha256='0b7ca31cb959ff1b22afa0da8621782afe61f99242bf716c403802ffbdb21d51') + version('0.27.3', sha256='50a57bd91f57aa310fb7d5e2a340b3779dc17e67b4e7e66111feac5c2432f1a5') + version('0.27.2', sha256='ffacdbd5588aeb03e98e3866a7e2ceace468723a439bdc9bb01362fe140fa9e5') + version('0.27.1', sha256='837b11927bc5f64e7f9ab0376f57cfe3ca5aa52ffd2007ac41184b21124fb086') + version('0.27.0', sha256='545b0458292c786aba334f1bf1c8f73600ae73dd7205a7bb791a187ee48ab8d2') + version('0.26.8', sha256='0f20d7e239be030db33d7350bab38ada2830b3bffab5539730074e71b0267796') + version('0.26.7', sha256='65584ac1f4de2c3ab8491351c8629eb68bad2d65e67f6411bf0333b8976dc4ef') + version('0.26.6', sha256='7669dd47ebdab86ced8888816c552596ec923b6e126704a3445b2081cb0e5662') + version('0.26.5', sha256='52e28a5166564bc4365a2e4112f5e5c6e334708dbf13596241b2fd34efc1b0a9') + version('0.26.4', sha256='292fa2600bbb4e52641793cfcc1c19ffc0bf97b5fd8378d422a6bfe7afffcb97') + version('0.26.3', sha256='0da4e211dfb63c22e5f43f2a4a5373e86a140afa88a25ca6ba3cc2cae58263d2') + version('0.26.2', sha256='747b47d5b02a2387ff81301c694763785181b895690b6eb91ed1ae4b7904307b') + version('0.26.1', sha256='68cd0f8ee9e0ca84dcf0f0267d0a8297471d3365622d22d3da67c57165bb0722') + version('0.26.0', sha256='6a62393e0ceb37d02fe0d5707713f504e7acac9006ef33da1e88960bd78b6eac') + + # Backends + variant( + 'https', default='system', description='HTTPS support', + values=('system', 'openssl', 'none'), multi=False) + variant('ssh', default=True, description='Enable SSH support') + variant('curl', default=False, description='Enable libcurl support (only supported through v0.27)') + + # Build Dependencies + depends_on('cmake@2.8:', type='build', when="@:0.28") + depends_on('cmake@3.5:', type='build', when="@0.99:") + + # Runtime Dependencies + depends_on('libssh2', when='+ssh') + depends_on('openssl', when='https=system platform=linux') + depends_on('openssl', when='https=system platform=cray') + depends_on('openssl', when='https=openssl') + depends_on('curl', when='+curl') + + conflicts('+curl', when='@0.28:') + + def cmake_args(self): + args = [] + if 'https=system' in self.spec: + if 'platform=linux' in self.spec or 'platform=cray' in self.spec: + args.append('-DUSE_HTTPS=OpenSSL') + elif 'platform=darwin' in self.spec: + args.append('-DUSE_HTTPS=SecureTransport') + else: + # Let CMake try to find an HTTPS implementation. Mileage on + # your platform may vary + args.append('-DUSE_HTTPS=ON') + elif 'https=openssl' in self.spec: + args.append('-DUSE_HTTPS=OpenSSL') + else: + args.append('-DUSE_HTTPS=OFF') + + args.append( + '-DUSE_SSH={0}'.format('ON' if '+ssh' in self.spec else 'OFF')) + + # The curl backed is not supported after 0.27.x + if '@:0.27 +curl' in self.spec: + args.append( + '-DCURL={0}'.format('ON' if '+curl' in self.spec else 'OFF')) + + # Control tests + args.append( + '-DBUILD_CLAR={0}'.format('ON' if self.run_tests else 'OFF')) + + return args diff --git a/var/spack/repos/builtin/packages/libiconv/package.py b/var/spack/repos/builtin/packages/libiconv/package.py index 642ad289fd7..9331dfb399d 100644 --- a/var/spack/repos/builtin/packages/libiconv/package.py +++ b/var/spack/repos/builtin/packages/libiconv/package.py @@ -20,6 +20,7 @@ class Libiconv(AutotoolsPackage, GNUMirrorPackage): # We cannot set up a warning for gets(), since gets() is not part # of C11 any more and thus might not exist. patch('gets.patch', when='@1.14') + provides('iconv') conflicts('@1.14', when='%gcc@5:') diff --git a/var/spack/repos/builtin/packages/libiscsi/package.py b/var/spack/repos/builtin/packages/libiscsi/package.py new file mode 100644 index 00000000000..621cc875d64 --- /dev/null +++ b/var/spack/repos/builtin/packages/libiscsi/package.py @@ -0,0 +1,28 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Libiscsi(AutotoolsPackage): + """Libiscsi is a client-side library to implement the iSCSI protocol that can + be used to access the resources of an iSCSI target.""" + + homepage = "https://github.com/sahlberg/libiscsi" + url = "https://github.com/sahlberg/libiscsi/archive/1.19.0.tar.gz" + + version('1.19.0', sha256='c7848ac722c8361d5064654bc6e926c2be61ef11dd3875020a63931836d806df') + version('1.18.0', sha256='464d104e12533dc11f0dd7662cbc2f01c132f94aa4f5bd519e3413ef485830e8') + version('1.17.0', sha256='80a7f75bfaffc8bec9920ba7af3f1d14cd862c35c3c5f2c9617b45b975232112') + version('1.16.0', sha256='35c7be63a8c3a7cee7b697901b6d2dd464e098e1881671eb67462983053b3c7b') + version('1.15.0', sha256='489e625e58c1e6da2fa3536f9c4b12290f2d3fb4ce14edc0583b8ba500605c34') + + depends_on('autoconf', type='build') + depends_on('automake', type='build') + depends_on('libtool', type='build') + depends_on('m4', type='build') + + def autoreconf(self, spec, prefix): + autoreconf('--install', '--force') diff --git a/var/spack/repos/builtin/packages/libnetworkit/0001-Name-agnostic-import-of-tlx-library.patch b/var/spack/repos/builtin/packages/libnetworkit/0001-Name-agnostic-import-of-tlx-library.patch new file mode 100644 index 00000000000..900a298d9b4 --- /dev/null +++ b/var/spack/repos/builtin/packages/libnetworkit/0001-Name-agnostic-import-of-tlx-library.patch @@ -0,0 +1,17 @@ +--- a/CMakeLists.txt ++++ b/CMakeLists.txt +@@ -203,10 +203,11 @@ if(NOT NETWORKIT_EXT_TLX) + "Please run `git submodule update --init` to fetch the submodule.") + endif() + else() ++ file(GLOB tlx_path ${NETWORKIT_EXT_TLX}/lib/**.a) + add_library(tlx STATIC IMPORTED) + set_target_properties(tlx PROPERTIES +- IMPORTED_LOCATION "${NETWORKIT_EXT_TLX}/lib/libtlx.a" +- INTERFACE_INCLUDE_DIRECTORIES "${NETWORKIT_EXT_TLX}/include/") ++ IMPORTED_LOCATION "${tlx_path}" ++ INTERFACE_INCLUDE_DIRECTORIES "${NETWORKIT_EXT_TLX}/include/") + endif() + + ################################################################################ + diff --git a/var/spack/repos/builtin/packages/libnetworkit/package.py b/var/spack/repos/builtin/packages/libnetworkit/package.py new file mode 100644 index 00000000000..5d7f58eb906 --- /dev/null +++ b/var/spack/repos/builtin/packages/libnetworkit/package.py @@ -0,0 +1,44 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Libnetworkit(CMakePackage): + """NetworKit is a growing open-source toolkit for large-scale network + analysis. Its aim is to provide tools for the analysis of large networks + in the size range from thousands to billions of edges. For this purpose, + it implements efficient graph algorithms, many of them parallel to + utilize multicore architectures. These are meant to compute standard + measures of network analysis, such as degree sequences, clustering + coefficients, and centrality measures. In this respect, NetworKit is + comparable to packages such as NetworkX, albeit with a focus on + parallelism and scalability.""" + + homepage = "https://networkit.github.io/" + url = "https://github.com/networkit/networkit/archive/6.1.tar.gz" + + maintainers = ['fabratu'] + + version('6.1', sha256='22c953ea1054c356663b31c77114c2f0c8fec17e0e707aeec23026241beab9b2') + + variant('static', default=False, description='Enables the build of shared libraries') + variant('doc', default=False, description='Enables the build with sphinx documentation') + + depends_on('libtlx') + depends_on('py-sphinx', when='+doc', type='build') + + patch('0001-Name-agnostic-import-of-tlx-library.patch', when='@6.1') + + def cmake_args(self): + spec = self.spec + + tlx_libs = spec['libtlx'].prefix + + args = ['-DNETWORKIT_EXT_TLX=%s' % tlx_libs, + '-DNETWORKIT_STATIC=%s' % + ('ON' if '+static' in spec else 'OFF')] + + return args diff --git a/var/spack/repos/builtin/packages/libnotify/docbook-location.patch b/var/spack/repos/builtin/packages/libnotify/docbook-location.patch new file mode 100644 index 00000000000..5fcaf2ddb77 --- /dev/null +++ b/var/spack/repos/builtin/packages/libnotify/docbook-location.patch @@ -0,0 +1,11 @@ +--- a/meson.build 2020-03-30 14:00:26.851258170 -0400 ++++ b/meson.build 2020-03-30 14:03:43.134195999 -0400 +@@ -54,7 +54,7 @@ + + if get_option('man') + xsltproc = find_program('xsltproc', required: true) +- stylesheet = 'http://docbook.sourceforge.net/release/xsl-ns/current/manpages/docbook.xsl' ++ stylesheet = 'http://cdn.docbook.org/release/xsl/current/manpages/docbook.xsl' + xsltproc_command = [ + xsltproc, + '--nonet', diff --git a/var/spack/repos/builtin/packages/libnotify/package.py b/var/spack/repos/builtin/packages/libnotify/package.py new file mode 100644 index 00000000000..6407fd6406b --- /dev/null +++ b/var/spack/repos/builtin/packages/libnotify/package.py @@ -0,0 +1,57 @@ +# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Libnotify(MesonPackage): + """libnotify is a library for sending desktop notifications""" + + homepage = "https://github.com/GNOME/libnotify" + url = "https://github.com/GNOME/libnotify/archive/0.7.9.tar.gz" + + version('0.7.9', sha256='9bd4f5fa911d27567e7cc2d2d09d69356c16703c4e8d22c0b49a5c45651f3af0') + + # Libnotify is having trouble with finding the DTD and XSLT for docbook, + # which is required for both of these varients. + # variant('docbook', default=False, + # description='Build docbook docs. Currently broken') + # variant('gtkdoc', default=False, + # description='Build with gtkdoc. Currently broken') + + depends_on('pkgconfig', type='build') + depends_on('glib@2.26.0:') + depends_on('gtkplus@2.90:') + depends_on('gobject-introspection') + depends_on('libxslt', type='build') + depends_on('docbook-xsl', type='build') + # depends_on('gtk-doc', when='+gtkdoc', type='build') + # depends_on('xmlto', when='+docbook', type='build') + + patch('docbook-location.patch') + + def meson_args(self): + # spec = self.spec + args = [] + + # if '+docbook' in spec: + # args.append('-Ddocbook_docs=enabled') + # else: + # args.append('-Ddocbook_docs=disabled') + args.append('-Ddocbook_docs=disabled') + + # if self.run_tests: + # args.append('-Dtests=true') + # else: + # args.append('-Dtests=false') + args.append('-Dtests=false') + + # if '+gtkdoc' in spec: + # args.append('-Dgtk_doc=true') + # else: + # args.append('-Dgtk_doc=false') + args.append('-Dgtk_doc=false') + + return args diff --git a/var/spack/repos/builtin/packages/libspatialite/package.py b/var/spack/repos/builtin/packages/libspatialite/package.py index e725fdfe0f0..45fe1f6109c 100644 --- a/var/spack/repos/builtin/packages/libspatialite/package.py +++ b/var/spack/repos/builtin/packages/libspatialite/package.py @@ -21,5 +21,5 @@ class Libspatialite(AutotoolsPackage): depends_on('proj@:5') depends_on('geos') depends_on('freexl') - depends_on('libiconv') + depends_on('iconv') depends_on('libxml2') diff --git a/var/spack/repos/builtin/packages/libtlx/package.py b/var/spack/repos/builtin/packages/libtlx/package.py new file mode 100644 index 00000000000..619f90dfb54 --- /dev/null +++ b/var/spack/repos/builtin/packages/libtlx/package.py @@ -0,0 +1,28 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Libtlx(CMakePackage): + """ tlx is a collection of C++ helpers and extensions universally needed, + but not found in the STL. + The most important design goals and conventions are: + 1) high modularity with as little dependencies between + modules as possible. + 2) attempt to never break existing interfaces. + 3) compile on all platforms with C++ - smartphones, supercomputers, + windows, etc. + 4) zero external dependencies: no additional libraries are required. + 5) warning and bug-freeness on all compilers. + 6) keep overhead down - small overall size such that is can be included + without bloating applications.""" + + homepage = "https://tlx.github.io/" + url = "https://github.com/tlx/tlx/archive/v0.5.20191212.tar.gz" + + maintainers = ['fabratu'] + + version('0.5.20191212', sha256='5e67d3042a390dbb831b6d46437e3c7fadf738bff362aa7376b210b10ecd532d') diff --git a/var/spack/repos/builtin/packages/libunistring/package.py b/var/spack/repos/builtin/packages/libunistring/package.py index 94b1c24b984..9812fb32cb1 100644 --- a/var/spack/repos/builtin/packages/libunistring/package.py +++ b/var/spack/repos/builtin/packages/libunistring/package.py @@ -19,7 +19,7 @@ class Libunistring(AutotoolsPackage, GNUMirrorPackage): version('0.9.7', sha256='2e3764512aaf2ce598af5a38818c0ea23dedf1ff5460070d1b6cee5c3336e797') version('0.9.6', sha256='2df42eae46743e3f91201bf5c100041540a7704e8b9abfd57c972b2d544de41b') - depends_on('libiconv') + depends_on('iconv') # glibc 2.28+ removed libio.h and thus _IO_ftrylockfile patch('removed_libio.patch', when='@:0.9.9') diff --git a/var/spack/repos/builtin/packages/libwebp/package.py b/var/spack/repos/builtin/packages/libwebp/package.py index 9c5f56a8043..fd8dee8e9eb 100644 --- a/var/spack/repos/builtin/packages/libwebp/package.py +++ b/var/spack/repos/builtin/packages/libwebp/package.py @@ -14,7 +14,7 @@ class Libwebp(AutotoolsPackage): homepage = "https://developers.google.com/speed/webp/" url = "https://storage.googleapis.com/downloads.webmproject.org/releases/webp/libwebp-1.0.3.tar.gz" - version('1.0.3', sha256='082d114bcb18a0e2aafc3148d43367c39304f86bf18ba0b2e766447e111a4a91') + version('1.0.3', sha256='e20a07865c8697bba00aebccc6f54912d6bc333bb4d604e6b07491c1a226b34f') variant('libwebpmux', default=False, description='Build libwebpmux') variant('libwebpdemux', default=False, description='Build libwebpdemux') diff --git a/var/spack/repos/builtin/packages/libxml2/package.py b/var/spack/repos/builtin/packages/libxml2/package.py index 8c91852ae32..8c024854ea1 100644 --- a/var/spack/repos/builtin/packages/libxml2/package.py +++ b/var/spack/repos/builtin/packages/libxml2/package.py @@ -23,7 +23,7 @@ class Libxml2(AutotoolsPackage): variant('python', default=False, description='Enable Python support') depends_on('pkgconfig@0.9.0:', type='build') - depends_on('libiconv') + depends_on('iconv') depends_on('zlib') depends_on('xz') @@ -49,7 +49,7 @@ def configure_args(self): spec = self.spec args = ['--with-lzma={0}'.format(spec['xz'].prefix), - '--with-iconv={0}'.format(spec['libiconv'].prefix)] + '--with-iconv={0}'.format(spec['iconv'].prefix)] if '+python' in spec: args.extend([ diff --git a/var/spack/repos/builtin/packages/libxpresent/package.py b/var/spack/repos/builtin/packages/libxpresent/package.py index 0fddba389a1..1305623472c 100644 --- a/var/spack/repos/builtin/packages/libxpresent/package.py +++ b/var/spack/repos/builtin/packages/libxpresent/package.py @@ -15,10 +15,15 @@ class Libxpresent(AutotoolsPackage): version('1.0.0', sha256='92f1bdfb67ae2ffcdb25ad72c02cac5e4912dc9bc792858240df1d7f105946fa') - depends_on('libx11') + depends_on('libx11', type='link') + depends_on('libxext', type='link') + depends_on('libxfixes', type='link') + depends_on('libxrandr', type='link') - depends_on('xproto', type='build') - depends_on('presentproto@1.0:', type='build') - depends_on('xextproto', type='build') + depends_on('xproto', type='link') + depends_on('presentproto@1.0:', type='link') + depends_on('xextproto', type='link') + depends_on('fixesproto', type='link') + depends_on('randrproto', type='link') depends_on('pkgconfig', type='build') depends_on('util-macros', type='build') diff --git a/var/spack/repos/builtin/packages/libxslt/package.py b/var/spack/repos/builtin/packages/libxslt/package.py index 7cdb0e41b22..5da6272d833 100644 --- a/var/spack/repos/builtin/packages/libxslt/package.py +++ b/var/spack/repos/builtin/packages/libxslt/package.py @@ -26,7 +26,7 @@ class Libxslt(AutotoolsPackage): variant('python', default=False, description='Build Python bindings') depends_on('pkgconfig@0.9.0:', type='build') - depends_on('libiconv') + depends_on('iconv') depends_on('libxml2') depends_on('libxml2+python', when='+python') depends_on('xz') diff --git a/var/spack/repos/builtin/packages/libxsmm/package.py b/var/spack/repos/builtin/packages/libxsmm/package.py index 070b24396f4..12cfc32dfd2 100644 --- a/var/spack/repos/builtin/packages/libxsmm/package.py +++ b/var/spack/repos/builtin/packages/libxsmm/package.py @@ -14,10 +14,11 @@ class Libxsmm(MakefilePackage): and deep learning primitives.""" homepage = 'https://github.com/hfp/libxsmm' - url = 'https://github.com/hfp/libxsmm/archive/1.14.tar.gz' + url = 'https://github.com/hfp/libxsmm/archive/1.15.tar.gz' git = 'https://github.com/hfp/libxsmm.git' - version('develop', branch='master') + version('master', branch='master') + version('1.15', sha256='499e5adfbf90cd3673309243c2b56b237d54f86db2437e1ac06c8746b55ab91c') version('1.14', sha256='9c0af4509ea341d1ee2c6c19fc6f19289318c3bd4b17844efeb9e7f9691abf76') version('1.13', sha256='47c034e169820a9633770eece0e0fdd8d4a744e09b81da2af8c2608a4625811e') version('1.12.1', sha256='3687fb98da00ba92cd50b5f0d18b39912c7886dad3856843573aee0cb34e9791') diff --git a/var/spack/repos/builtin/packages/likwid/package.py b/var/spack/repos/builtin/packages/likwid/package.py index c24d5e720a8..8556fb0d8e9 100644 --- a/var/spack/repos/builtin/packages/likwid/package.py +++ b/var/spack/repos/builtin/packages/likwid/package.py @@ -19,7 +19,9 @@ class Likwid(Package): homepage = "https://github.com/RRZE-HPC/likwid" url = "https://github.com/RRZE-HPC/likwid/archive/v5.0.0.tar.gz" git = "https://github.com/RRZE-HPC/likwid.git" + maintainers = ['TomTheBear'] + version('5.0.1', sha256='3757b0cb66e8af0116f9288c7f90543acbd8e2af8f72f77aef447ca2b3e76453') version('5.0.0', sha256='26623f5a1a5fec19d798f0114774a5293d1c93a148538b9591a13e50930fa41e') version('4.3.4', sha256='5c0d1c66b25dac8292a02232f06454067f031a238f010c62f40ef913c6609a83') version('4.3.3', sha256='a681378cd66c1679ca840fb5fac3136bfec93c01b3d78cc1d00a641db325a9a3') @@ -40,8 +42,6 @@ class Likwid(Package): depends_on('perl', type=('build', 'run')) - supported_compilers = {'clang': 'CLANG', 'gcc': 'GCC', 'intel': 'ICC'} - def patch(self): files = glob.glob('perl/*.*') + glob.glob('bench/perl/*.*') @@ -59,13 +59,18 @@ def filter_sbang(self): *files) def install(self, spec, prefix): - if self.compiler.name not in self.supported_compilers: + supported_compilers = {'clang': 'CLANG', 'gcc': 'GCC', 'intel': 'ICC'} + if spec.target.family == 'aarch64': + supported_compilers = {'gcc': 'GCCARMv8', 'clang': 'ARMCLANG'} + elif spec.target.family == 'ppc64' or spec.target.family == 'ppc64le': + supported_compilers = {'gcc': 'GCCPOWER'} + if self.compiler.name not in supported_compilers: raise RuntimeError('{0} is not a supported compiler \ to compile Likwid'.format(self.compiler.name)) filter_file('^COMPILER .*', 'COMPILER = ' + - self.supported_compilers[self.compiler.name], + supported_compilers[self.compiler.name], 'config.mk') filter_file('^PREFIX .*', 'PREFIX = ' + diff --git a/var/spack/repos/builtin/packages/lksctp-tools/package.py b/var/spack/repos/builtin/packages/lksctp-tools/package.py new file mode 100644 index 00000000000..ca168aa320d --- /dev/null +++ b/var/spack/repos/builtin/packages/lksctp-tools/package.py @@ -0,0 +1,20 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class LksctpTools(AutotoolsPackage): + """A Linux SCTP helper library""" + + homepage = "https://github.com/sctp/lksctp-tools" + url = "https://github.com/sctp/lksctp-tools/archive/v1.0.18.tar.gz" + + version('1.0.18', sha256='3e9ab5b3844a8b65fc8152633aafe85f406e6da463e53921583dfc4a443ff03a') + + depends_on('autoconf', type='build') + depends_on('automake', type='build') + depends_on('libtool', type='build') + depends_on('m4', type='build') diff --git a/var/spack/repos/builtin/packages/llvm/package.py b/var/spack/repos/builtin/packages/llvm/package.py index 1644a8e7501..810203a9f00 100644 --- a/var/spack/repos/builtin/packages/llvm/package.py +++ b/var/spack/repos/builtin/packages/llvm/package.py @@ -15,14 +15,17 @@ class Llvm(CMakePackage): is not an acronym; it is the full name of the project. """ - homepage = 'http://llvm.org/' - url = "https://github.com/llvm/llvm-project/archive/llvmorg-7.1.0.tar.gz" - list_url = 'http://releases.llvm.org/download.html' - git = 'https://github.com/llvm/llvm-project' + homepage = "http://llvm.org/" + url = "https://github.com/llvm/llvm-project/archive/llvmorg-7.1.0.tar.gz" + list_url = "http://releases.llvm.org/download.html" + git = "https://github.com/llvm/llvm-project" + maintainers = ['trws', 'naromero77'] - family = 'compiler' # Used by lmod + family = "compiler" # Used by lmod + # fmt: off version('master', branch='master') + version('10.0.0', sha256='b81c96d2f8f40dc61b14a167513d87c0d813aae0251e06e11ae8a4384ca15451') version('9.0.1', sha256='be7b034641a5fda51ffca7f5d840b1a768737779f75f7c4fd18fe2d37820289a') version('9.0.0', sha256='7807fac25330e24e9955ca46cd855dd34bbc9cc4fdba8322366206654d1036f2') version('8.0.0', sha256='d81238b4a69e93e29f74ce56f8107cbfcf0c7d7b40510b7879e98cc031e25167') @@ -43,123 +46,173 @@ class Llvm(CMakePackage): version('3.7.0', sha256='dc00bc230be2006fb87b84f6fe4800ca28bc98e6692811a98195da53c9cb28c6') version('3.6.2', sha256='f75d703a388ba01d607f9cf96180863a5e4a106827ade17b221d43e6db20778a') version('3.5.1', sha256='5d739684170d5b2b304e4fb521532d5c8281492f71e1a8568187bfa38eb5909d') + # fmt: on # NOTE: The debug version of LLVM is an order of magnitude larger than # the release version, and may take up 20-30 GB of space. If you want # to save space, build with `build_type=Release`. - variant('clang', default=True, - description="Build the LLVM C/C++/Objective-C compiler frontend") + variant( + "clang", + default=True, + description="Build the LLVM C/C++/Objective-C compiler frontend", + ) - # TODO: The current version of this package unconditionally disables CUDA. - # Better would be to add a "cuda" variant that: - # - Adds dependency on the "cuda" package when enabled - # - Sets the necessary CMake flags when enabled - # - Disables CUDA (as this current version does) only when the - # variant is also disabled. + variant( + "cuda", + default=False, + description="Build LLVM with CUDA, required for nvptx offload", + ) + variant( + "nvptx_offload_ccs", + default="35,60,70,75", + multi=True, + description="NVIDIA compute cabailities to make inlining capable", + ) - # variant('cuda', default=False, - # description="Build the LLVM with CUDA features enabled") + variant( + "omp_debug", + default=False, + description="Include debugging code in OpenMP runtime libraries", + ) + variant("lldb", default=True, description="Build the LLVM debugger") + variant("lld", default=True, description="Build the LLVM linker") + variant("mlir", default=False, description="Build with MLIR support") + variant( + "internal_unwind", + default=True, + description="Build the libcxxabi libunwind", + ) + variant( + "polly", + default=True, + description="Build the LLVM polyhedral optimization plugin, " + "only builds for 3.7.0+", + ) + variant( + "libcxx", + default=True, + description="Build the LLVM C++ standard library", + ) + variant( + "compiler-rt", + default=True, + description="Build LLVM compiler runtime, including sanitizers", + ) + variant( + "gold", + default=(sys.platform != "darwin"), + description="Add support for LTO with the gold linker plugin", + ) + variant( + "split_dwarf", + default=False, + description="Build with split dwarf information", + ) + variant( + "shared_libs", + default=False, + description="Build all components as shared libraries, faster, " + "less memory to build, less stable", + ) + variant( + "all_targets", + default=False, + description="Build all supported targets, default targets " + ",NVPTX,AMDGPU,CppBackend", + ) + variant( + "build_type", + default="Release", + description="CMake build type", + values=("Debug", "Release", "RelWithDebInfo", "MinSizeRel"), + ) + variant( + "omp_tsan", + default=False, + description="Build with OpenMP capable thread sanitizer", + ) + variant("python", default=False, description="Install python bindings") - variant('lldb', default=True, description="Build the LLVM debugger") - variant('lld', default=True, description="Build the LLVM linker") - variant('internal_unwind', default=True, - description="Build the libcxxabi libunwind") - variant('polly', default=True, - description="Build the LLVM polyhedral optimization plugin, " - "only builds for 3.7.0+") - variant('libcxx', default=True, - description="Build the LLVM C++ standard library") - variant('compiler-rt', default=True, - description="Build LLVM compiler runtime, including sanitizers") - variant('gold', default=(sys.platform != 'darwin'), - description="Add support for LTO with the gold linker plugin") - variant('shared_libs', default=False, - description="Build all components as shared libraries, faster, " - "less memory to build, less stable") - variant('link_dylib', default=False, - description="Build and link the libLLVM shared library rather " - "than static") - variant('all_targets', default=False, - description="Build all supported targets, default targets " - ",NVPTX,AMDGPU,CppBackend") - variant('build_type', default='Release', - description='CMake build type', - values=('Debug', 'Release', 'RelWithDebInfo', 'MinSizeRel')) - variant('omp_tsan', default=False, - description="Build with OpenMP capable thread sanitizer") - variant('python', default=False, description="Install python bindings") - - extends('python', when='+python') + extends("python", when="+python") # Build dependency - depends_on('cmake@3.4.3:', type='build') - depends_on('python@2.7:2.8', when='@:4.999 ~python', type='build') - depends_on('python', when='@5: ~python', type='build') + depends_on("cmake@3.4.3:", type="build") + depends_on("python@2.7:2.8", when="@:4.999 ~python", type="build") + depends_on("python", when="@5: ~python", type="build") # Universal dependency - depends_on('python@2.7:2.8', when='@:4.999+python') - depends_on('python', when='@5:+python') + depends_on("python@2.7:2.8", when="@:4.999+python") + depends_on("python", when="@5:+python") + depends_on("z3", when="@9:") + + # CUDA dependency + depends_on("cuda", when="+cuda") # openmp dependencies - depends_on('perl-data-dumper', type=('build')) + depends_on("perl-data-dumper", type=("build")) + depends_on("hwloc") + depends_on("libelf", when="+cuda") # libomptarget + depends_on("libffi", when="+cuda") # libomptarget # ncurses dependency - depends_on('ncurses+termlib') + depends_on("ncurses+termlib") # lldb dependencies - depends_on('swig', when='+lldb') - depends_on('libedit', when='+lldb') - depends_on('py-six', when='@5.0.0: +lldb +python') + depends_on("swig", when="+lldb") + depends_on("libedit", when="+lldb") + depends_on("py-six", when="@5.0.0: +lldb +python") - # gold support - depends_on('binutils+gold', when='+gold') + # gold support, required for some features + depends_on("binutils+gold", when="+gold") # polly plugin - depends_on('gmp', when='@:3.6.999 +polly') - depends_on('isl', when='@:3.6.999 +polly') + depends_on("gmp", when="@:3.6.999 +polly") + depends_on("isl", when="@:3.6.999 +polly") - conflicts('+clang_extra', when='~clang') - conflicts('+lldb', when='~clang') - conflicts('+libcxx', when='~clang') - conflicts('+internal_unwind', when='~clang') - conflicts('+compiler-rt', when='~clang') + conflicts("+clang_extra", when="~clang") + conflicts("+lldb", when="~clang") + conflicts("+libcxx", when="~clang") + conflicts("+internal_unwind", when="~clang") + conflicts("+compiler-rt", when="~clang") # LLVM 4 and 5 does not build with GCC 8 - conflicts('%gcc@8:', when='@:5') - conflicts('%gcc@:5.0.999', when='@8:') + conflicts("%gcc@8:", when="@:5") + conflicts("%gcc@:5.0.999", when="@8:") # OMP TSAN exists in > 5.x - conflicts('+omp_tsan', when='@:5.99') + conflicts("+omp_tsan", when="@:5.99") + + # MLIR exists in > 10.x + conflicts("+mlir", when="@:9") # Github issue #4986 - patch('llvm_gcc7.patch', when='@4.0.0:4.0.1+lldb %gcc@7.0:') + patch("llvm_gcc7.patch", when="@4.0.0:4.0.1+lldb %gcc@7.0:") # Backport from llvm master + additional fix # see https://bugs.llvm.org/show_bug.cgi?id=39696 # for a bug report about this problem in llvm master. - patch('constexpr_longdouble.patch', when='@6:8+libcxx') - patch('constexpr_longdouble_9.0.patch', when='@9+libcxx') + patch("constexpr_longdouble.patch", when="@6:8+libcxx") + patch("constexpr_longdouble_9.0.patch", when="@9+libcxx") # Backport from llvm master; see # https://bugs.llvm.org/show_bug.cgi?id=38233 # for a bug report about this problem in llvm master. - patch('llvm_py37.patch', when='@4:6 ^python@3.7:') + patch("llvm_py37.patch", when="@4:6 ^python@3.7:") # https://bugs.llvm.org/show_bug.cgi?id=39696 - patch('thread-p9.patch', when='@develop+libcxx') + patch("thread-p9.patch", when="@develop+libcxx") - @run_before('cmake') + @run_before("cmake") def check_darwin_lldb_codesign_requirement(self): - if not self.spec.satisfies('+lldb platform=darwin'): + if not self.spec.satisfies("+lldb platform=darwin"): return - codesign = which('codesign') - mkdir('tmp') - llvm_check_file = join_path('tmp', 'llvm_check') - copy('/usr/bin/false', llvm_check_file) + codesign = which("codesign") + mkdir("tmp") + llvm_check_file = join_path("tmp", "llvm_check") + copy("/usr/bin/false", llvm_check_file) try: - codesign('-f', '-s', 'lldb_codesign', '--dryrun', - llvm_check_file) + codesign("-f", "-s", "lldb_codesign", "--dryrun", llvm_check_file) except ProcessError: # Newer LLVM versions have a simple script that sets up @@ -170,140 +223,202 @@ def check_darwin_lldb_codesign_requirement(self): except Exception: raise RuntimeError( 'The "lldb_codesign" identity must be available to build ' - 'LLVM with LLDB. See https://lldb.llvm.org/resources/' - 'build.html#code-signing-on-macos for details on how to ' - 'create this identity.' + "LLVM with LLDB. See https://lldb.llvm.org/resources/" + "build.html#code-signing-on-macos for details on how to " + "create this identity." ) def setup_build_environment(self, env): - env.append_flags('CXXFLAGS', self.compiler.cxx11_flag) + env.append_flags("CXXFLAGS", self.compiler.cxx11_flag) def setup_run_environment(self, env): - if '+clang' in self.spec: - env.set('CC', join_path(self.spec.prefix.bin, 'clang')) - env.set('CXX', join_path(self.spec.prefix.bin, 'clang++')) + if "+clang" in self.spec: + env.set("CC", join_path(self.spec.prefix.bin, "clang")) + env.set("CXX", join_path(self.spec.prefix.bin, "clang++")) - root_cmakelists_dir = 'llvm' + root_cmakelists_dir = "llvm" def cmake_args(self): spec = self.spec cmake_args = [ - '-DLLVM_REQUIRES_RTTI:BOOL=ON', - '-DLLVM_ENABLE_RTTI:BOOL=ON', - '-DLLVM_ENABLE_EH:BOOL=ON', - '-DCLANG_DEFAULT_OPENMP_RUNTIME:STRING=libomp', - '-DPYTHON_EXECUTABLE:PATH={0}'.format(spec['python'].command.path), + "-DLLVM_REQUIRES_RTTI:BOOL=ON", + "-DLLVM_ENABLE_RTTI:BOOL=ON", + "-DLLVM_ENABLE_EH:BOOL=ON", + "-DCLANG_DEFAULT_OPENMP_RUNTIME:STRING=libomp", + "-DPYTHON_EXECUTABLE:PATH={0}".format(spec["python"].command.path), + "-DLIBOMP_USE_HWLOC=On", ] projects = [] - # TODO: Instead of unconditionally disabling CUDA, add a "cuda" variant - # (see TODO above), and set the paths if enabled. - cmake_args.extend([ - '-DCUDA_TOOLKIT_ROOT_DIR:PATH=IGNORE', - '-DCUDA_SDK_ROOT_DIR:PATH=IGNORE', - '-DCUDA_NVCC_EXECUTABLE:FILEPATH=IGNORE', - '-DLIBOMPTARGET_DEP_CUDA_DRIVER_LIBRARIES:STRING=IGNORE']) + if "+cuda" in spec: + cmake_args.extend( + [ + "-DCUDA_TOOLKIT_ROOT_DIR:PATH=" + spec["cuda"].prefix, + "-DLIBOMPTARGET_NVPTX_COMPUTE_CAPABILITIES={0}".format( + ",".join(spec.variants["nvptx_offload_ccs"].value) + ), + "-DCLANG_OPENMP_NVPTX_DEFAULT_ARCH=sm_{0}".format( + spec.variants["nvptx_offload_ccs"].value[-1] + ), + ] + ) + else: + # still build libomptarget but disable cuda + cmake_args.extend( + [ + "-DCUDA_TOOLKIT_ROOT_DIR:PATH=IGNORE", + "-DCUDA_SDK_ROOT_DIR:PATH=IGNORE", + "-DCUDA_NVCC_EXECUTABLE:FILEPATH=IGNORE", + "-DLIBOMPTARGET_DEP_CUDA_DRIVER_LIBRARIES:STRING=IGNORE", + ] + ) - if '+python' in spec and '+lldb' in spec and spec.satisfies('@5.0.0:'): - cmake_args.append('-DLLDB_USE_SYSTEM_SIX:Bool=TRUE') + if "+omp_debug" in spec: + cmake_args.append("-DLIBOMPTARGET_ENABLE_DEBUG:Bool=ON") - if '~python' in spec and '+lldb' in spec: - cmake_args.append('-DLLDB_DISABLE_PYTHON:Bool=TRUE') + if "+python" in spec and "+lldb" in spec and spec.satisfies("@5.0.0:"): + cmake_args.append("-DLLDB_USE_SYSTEM_SIX:Bool=TRUE") - if '+gold' in spec: - cmake_args.append('-DLLVM_BINUTILS_INCDIR=' + - spec['binutils'].prefix.include) + if "~python" in spec and "+lldb" in spec: + cmake_args.append("-DLLDB_DISABLE_PYTHON:Bool=TRUE") - if '+clang' in spec: - projects.append('clang') - projects.append('clang-tools-extra') - projects.append('openmp') - if '+lldb' in spec: - projects.append('lldb') - if '+lld' in spec: - projects.append('lld') - if '+compiler-rt' in spec: - projects.append('compiler-rt') - if '+libcxx' in spec: - projects.append('libcxx') - projects.append('libcxxabi') - if spec.satisfies('@3.9.0:'): - cmake_args.append('-DCLANG_DEFAULT_CXX_STDLIB=libc++') - if '+internal_unwind' in spec: - projects.append('libunwind') - if '+polly' in spec: - projects.append('polly') - cmake_args.append('-DLINK_POLLY_INTO_TOOLS:Bool=ON') + if "+gold" in spec: + cmake_args.append( + "-DLLVM_BINUTILS_INCDIR=" + spec["binutils"].prefix.include + ) - if '+shared_libs' in spec: - cmake_args.append('-DBUILD_SHARED_LIBS:Bool=ON') + if "+clang" in spec: + projects.append("clang") + projects.append("clang-tools-extra") + projects.append("openmp") + if "+lldb" in spec: + projects.append("lldb") + if "+lld" in spec: + projects.append("lld") + if "+compiler-rt" in spec: + projects.append("compiler-rt") + if "+libcxx" in spec: + projects.append("libcxx") + projects.append("libcxxabi") + if spec.satisfies("@3.9.0:"): + cmake_args.append("-DCLANG_DEFAULT_CXX_STDLIB=libc++") + if "+mlir" in spec: + projects.append("mlir") + if "+internal_unwind" in spec: + projects.append("libunwind") + if "+polly" in spec: + projects.append("polly") + cmake_args.append("-DLINK_POLLY_INTO_TOOLS:Bool=ON") - if '+link_dylib' in spec: - cmake_args.append('-DLLVM_LINK_LLVM_DYLIB:Bool=ON') + if "+shared_libs" in spec: + cmake_args.append("-DBUILD_SHARED_LIBS:Bool=ON") + if "+omp_debug" in spec: + cmake_args.append("-DLIBOMPTARGET_ENABLE_DEBUG:Bool=ON") - if '+all_targets' not in spec: # all is default on cmake + if "+split_dwarf" in spec: + cmake_args.append("-DLLVM_USE_SPLIT_DWARF:Bool=ON") - targets = ['NVPTX', 'AMDGPU'] - if (spec.version < Version('3.9.0')): + if "+all_targets" not in spec: # all is default on cmake + + targets = ["NVPTX", "AMDGPU"] + if spec.version < Version("3.9.0"): # Starting in 3.9.0 CppBackend is no longer a target (see # LLVM_ALL_TARGETS in llvm's top-level CMakeLists.txt for # the complete list of targets) - targets.append('CppBackend') + targets.append("CppBackend") - if spec.target.family == 'x86' or spec.target.family == 'x86_64': - targets.append('X86') - elif spec.target.family == 'arm': - targets.append('ARM') - elif spec.target.family == 'aarch64': - targets.append('AArch64') - elif (spec.target.family == 'sparc' or - spec.target.family == 'sparc64'): - targets.append('Sparc') - elif (spec.target.family == 'ppc64' or - spec.target.family == 'ppc64le' or - spec.target.family == 'ppc' or - spec.target.family == 'ppcle'): - targets.append('PowerPC') + if spec.target.family == "x86" or spec.target.family == "x86_64": + targets.append("X86") + elif spec.target.family == "arm": + targets.append("ARM") + elif spec.target.family == "aarch64": + targets.append("AArch64") + elif ( + spec.target.family == "sparc" + or spec.target.family == "sparc64" + ): + targets.append("Sparc") + elif ( + spec.target.family == "ppc64" + or spec.target.family == "ppc64le" + or spec.target.family == "ppc" + or spec.target.family == "ppcle" + ): + targets.append("PowerPC") cmake_args.append( - '-DLLVM_TARGETS_TO_BUILD:STRING=' + ';'.join(targets)) + "-DLLVM_TARGETS_TO_BUILD:STRING=" + ";".join(targets) + ) - if '+omp_tsan' in spec: - cmake_args.append('-DLIBOMP_TSAN_SUPPORT=ON') + if "+omp_tsan" in spec: + cmake_args.append("-DLIBOMP_TSAN_SUPPORT=ON") - if self.compiler.name == 'gcc': + if self.compiler.name == "gcc": gcc_prefix = ancestor(self.compiler.cc, 2) - cmake_args.append('-DGCC_INSTALL_PREFIX=' + gcc_prefix) + cmake_args.append("-DGCC_INSTALL_PREFIX=" + gcc_prefix) - if spec.satisfies('@4.0.0:'): - if spec.satisfies('platform=cray') or \ - spec.satisfies('platform=linux'): - cmake_args.append('-DCMAKE_BUILD_WITH_INSTALL_RPATH=1') + if spec.satisfies("@4.0.0:"): + if spec.satisfies("platform=cray") or spec.satisfies( + "platform=linux" + ): + cmake_args.append("-DCMAKE_BUILD_WITH_INSTALL_RPATH=1") # Semicolon seperated list of projects to enable cmake_args.append( - '-DLLVM_ENABLE_PROJECTS:STRING={0}'.format(';'.join(projects))) + "-DLLVM_ENABLE_PROJECTS:STRING={0}".format(";".join(projects)) + ) return cmake_args - @run_before('build') + @run_before("build") def pre_install(self): with working_dir(self.build_directory): # When building shared libraries these need to be installed first - make('install-LLVMTableGen') - if self.spec.version >= Version('4.0.0'): + make("install-LLVMTableGen") + if self.spec.version >= Version("4.0.0"): # LLVMDemangle target was added in 4.0.0 - make('install-LLVMDemangle') - make('install-LLVMSupport') + make("install-LLVMDemangle") + make("install-LLVMSupport") - @run_after('install') + @run_after("install") def post_install(self): - if '+python' in self.spec: - install_tree('llvm/bindings/python', site_packages_dir) + spec = self.spec - if '+clang' in self.spec: - install_tree('clang/bindings/python', site_packages_dir) + # unnecessary if we get bootstrap builds in here + if "+cuda" in self.spec: + ompdir = "build-bootstrapped-omp" + # rebuild libomptarget to get bytecode runtime library files + with working_dir(ompdir, create=True): + cmake_args = [ + self.stage.source_path + "/openmp", + "-DCMAKE_C_COMPILER:PATH={0}".format( + spec.prefix.bin + "/clang" + ), + "-DCMAKE_CXX_COMPILER:PATH={0}".format( + spec.prefix.bin + "/clang++" + ), + "-DCMAKE_INSTALL_PREFIX:PATH={0}".format(spec.prefix), + ] + cmake_args.extend(self.cmake_args()) + cmake_args.append('-DLIBOMPTARGET_NVPTX_ENABLE_BCLIB=true') + + # work around bad libelf detection in libomptarget + cmake_args.append( + "-DCMAKE_CXX_FLAGS:String=-I{0} -I{1}".format( + spec["libelf"].prefix.include, + spec["hwloc"].prefix.include, + ) + ) + + cmake(*cmake_args) + make() + make("install") + if "+python" in self.spec: + install_tree("llvm/bindings/python", site_packages_dir) + + if "+clang" in self.spec: + install_tree("clang/bindings/python", site_packages_dir) with working_dir(self.build_directory): - install_tree('bin', join_path(self.prefix, 'libexec', 'llvm')) + install_tree("bin", join_path(self.prefix, "libexec", "llvm")) diff --git a/var/spack/repos/builtin/packages/lmbench/package.py b/var/spack/repos/builtin/packages/lmbench/package.py new file mode 100644 index 00000000000..e38ca29e242 --- /dev/null +++ b/var/spack/repos/builtin/packages/lmbench/package.py @@ -0,0 +1,30 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Lmbench(MakefilePackage): + """lmbench is a suite of simple, portable, ANSI/C microbenchmarks for + UNIX/POSIX. In general, it measures two key features: latency and + bandwidth. lmbench is intended to give system developers insight into + basic costs of key operations.""" + + homepage = "http://lmbench.sourceforge.net/" + git = "https://github.com/intel/lmbench.git" + + version('master', branch='master') + + depends_on('libtirpc') + + def setup_build_environment(self, env): + env.prepend_path('CPATH', self.spec['libtirpc'].prefix.include.tirpc) + env.append_flags('LDFLAGS', '-ltirpc') + + def build(self, spec, prefix): + make('build') + + def install(self, spec, prefix): + install_tree('.', prefix) diff --git a/var/spack/repos/builtin/packages/lmod/package.py b/var/spack/repos/builtin/packages/lmod/package.py index 146ef9e3b4e..2f12d47587d 100644 --- a/var/spack/repos/builtin/packages/lmod/package.py +++ b/var/spack/repos/builtin/packages/lmod/package.py @@ -24,6 +24,7 @@ class Lmod(AutotoolsPackage): version('8.1.5', sha256='3e5846d3d8e593cbcdfa0aed1474569bf5b5cfd19fd288de22051823d449d344') version('8.0.9', sha256='9813c22ae4dd21eb3dc480f6ce307156512092b4bca954bf8aacc15944f23673') version('7.8.15', sha256='00a257f5073d656adc73045997c28f323b7a4f6d901f1c57b7db2b0cd6bee6e6') + version('7.8.1', sha256='74244c22cecd72777e75631f357d2e20ff7f2b9c2ef59e4e38b5a171b7b6eeea') version('7.8', sha256='40388380a36a00c3ce929a9f88c8fffc93deeabf87a7c3f8864a82acad38c3ba') version('7.7.29', sha256='269235d07d8ea387a2578f90bb64cf8ad16b4f28dcce196b293eb48cf1f71fb4') version('7.7.13', sha256='6145f075e5d49e12fcf0e75bb38afb27f205d23ba3496c1ff6c8b2cbaa9908be') @@ -45,6 +46,9 @@ class Lmod(AutotoolsPackage): depends_on('lua-luafilesystem', type=('build', 'run')) depends_on('tcl', type=('build', 'link', 'run')) + variant('auto_swap', default=False, description='Enable auto swapping conflicting modules') + variant('redirect', default=True, description='Enables redirect instead of pager') + patch('fix_tclsh_paths.patch', when='@:6.4.3') patch('0001-fix-problem-with-MODULESHOME-and-issue-271.patch', when='@7.3.28:7.4.10') @@ -64,3 +68,18 @@ def patch(self): if self.spec.version <= Version('6.4.3'): for tclscript in glob('src/*.tcl'): filter_file(r'^#!.*tclsh', '#!@path_to_tclsh@', tclscript) + + def configure_args(self): + args = [] + + if '+auto_swap' in self.spec: + args.append('--with-autoSwap=yes') + else: + args.append('--with-autoSwap=no') + + if '+redirect' in self.spec: + args.append('--with-redirect=yes') + else: + args.append('--with-redirect=no') + + return args diff --git a/var/spack/repos/builtin/packages/lua-luaposix/package.py b/var/spack/repos/builtin/packages/lua-luaposix/package.py index f69c2a36826..f5290eae1c9 100644 --- a/var/spack/repos/builtin/packages/lua-luaposix/package.py +++ b/var/spack/repos/builtin/packages/lua-luaposix/package.py @@ -13,6 +13,7 @@ class LuaLuaposix(Package): url = "https://github.com/luaposix/luaposix/archive/release-v33.4.0.tar.gz" version('33.4.0', sha256='e66262f5b7fe1c32c65f17a5ef5ffb31c4d1877019b4870a5d373e2ab6526a21') + version('33.2.1', sha256='4fb34dfea67f4cf3194cdecc6614c9aea67edc3c4093d34137669ea869c358e1') extends("lua") diff --git a/var/spack/repos/builtin/packages/lua/package.py b/var/spack/repos/builtin/packages/lua/package.py index ea6265e28a0..420e3b041a4 100644 --- a/var/spack/repos/builtin/packages/lua/package.py +++ b/var/spack/repos/builtin/packages/lua/package.py @@ -27,6 +27,9 @@ class Lua(Package): version('5.1.4', sha256='b038e225eaf2a5b57c9bcc35cd13aa8c6c8288ef493d52970c9545074098af3a') version('5.1.3', sha256='6b5df2edaa5e02bf1a2d85e1442b2e329493b30b0c0780f77199d24f087d296d') + variant('shared', default=True, + description='Builds a shared version of the library') + extendable = True depends_on('ncurses') @@ -58,31 +61,33 @@ def install(self, spec, prefix): make('INSTALL_TOP=%s' % prefix, 'install') - static_to_shared_library(join_path(prefix.lib, 'liblua.a'), - arguments=['-lm', '-ldl'], - version=self.version, - compat_version=self.version.up_to(2)) + if '+shared' in spec: + static_to_shared_library(join_path(prefix.lib, 'liblua.a'), + arguments=['-lm', '-ldl'], + version=self.version, + compat_version=self.version.up_to(2)) # compatibility with ax_lua.m4 from autoconf-archive # https://www.gnu.org/software/autoconf-archive/ax_lua.html - with working_dir(prefix.lib): - # e.g., liblua.so.5.1.5 - src_path = 'liblua.{0}.{1}'.format(dso_suffix, - str(self.version.up_to(3))) + if '+shared' in spec: + with working_dir(prefix.lib): + # e.g., liblua.so.5.1.5 + src_path = 'liblua.{0}.{1}'.format(dso_suffix, + str(self.version.up_to(3))) - # For lua version 5.1.X, the symlinks should be: - # liblua5.1.so - # liblua51.so - # liblua-5.1.so - # liblua-51.so - version_formats = [str(self.version.up_to(2)), - Version(str(self.version.up_to(2))).joined] - for version_str in version_formats: - for joiner in ['', '-']: - dest_path = 'liblua{0}{1}.{2}'.format(joiner, - version_str, - dso_suffix) - os.symlink(src_path, dest_path) + # For lua version 5.1.X, the symlinks should be: + # liblua5.1.so + # liblua51.so + # liblua-5.1.so + # liblua-51.so + version_formats = [str(self.version.up_to(2)), + Version(str(self.version.up_to(2))).joined] + for version_str in version_formats: + for joiner in ['', '-']: + dest_path = 'liblua{0}{1}.{2}'.format(joiner, + version_str, + dso_suffix) + os.symlink(src_path, dest_path) with working_dir(os.path.join('luarocks', 'luarocks')): configure('--prefix=' + prefix, '--with-lua=' + prefix) @@ -92,7 +97,8 @@ def install(self, spec, prefix): def append_paths(self, paths, cpaths, path): paths.append(os.path.join(path, '?.lua')) paths.append(os.path.join(path, '?', 'init.lua')) - cpaths.append(os.path.join(path, '?.so')) + if '+shared' in self.spec: + cpaths.append(os.path.join(path, '?.so')) def _setup_dependent_env_helper(self, env, dependent_spec): lua_paths = [] @@ -122,7 +128,8 @@ def setup_dependent_build_environment(self, env, dependent_spec): env, dependent_spec) env.set('LUA_PATH', ';'.join(lua_patterns), separator=';') - env.set('LUA_CPATH', ';'.join(lua_cpatterns), separator=';') + if '+shared' in self.spec: + env.set('LUA_CPATH', ';'.join(lua_cpatterns), separator=';') def setup_dependent_run_environment(self, env, dependent_spec): # For run time environment set only the path for dependent_spec and @@ -132,8 +139,9 @@ def setup_dependent_run_environment(self, env, dependent_spec): if dependent_spec.package.extends(self.spec): env.prepend_path('LUA_PATH', ';'.join(lua_patterns), separator=';') - env.prepend_path('LUA_CPATH', ';'.join(lua_cpatterns), - separator=';') + if '+shared' in spec: + env.prepend_path('LUA_CPATH', ';'.join(lua_cpatterns), + separator=';') def setup_run_environment(self, env): env.prepend_path( @@ -152,10 +160,11 @@ def setup_run_environment(self, env): 'LUA_PATH', os.path.join(self.spec.prefix, self.lua_lib_dir, '?', 'init.lua'), separator=';') - env.prepend_path( - 'LUA_CPATH', - os.path.join(self.spec.prefix, self.lua_lib_dir, '?.so'), - separator=';') + if '+shared' in self.spec: + env.prepend_path( + 'LUA_CPATH', + os.path.join(self.spec.prefix, self.lua_lib_dir, '?.so'), + separator=';') @property def lua_lib_dir(self): diff --git a/var/spack/repos/builtin/packages/lvm2/package.py b/var/spack/repos/builtin/packages/lvm2/package.py index c552fe3ef59..55fa1f1d39e 100644 --- a/var/spack/repos/builtin/packages/lvm2/package.py +++ b/var/spack/repos/builtin/packages/lvm2/package.py @@ -20,6 +20,9 @@ class Lvm2(AutotoolsPackage): homepage = "https://www.sourceware.org/lvm2" url = "https://sourceware.org/pub/lvm2/releases/LVM2.2.03.05.tgz" + # The server is sometimes a bit slow to respond + fetch_options = {'timeout': 60} + version('2.03.05', sha256='ca52815c999b20c6d25e3192f142f081b93d01f07b9d787e99664b169dba2700') version('2.03.04', sha256='f151f36fc0039997d2d9369b607b9262568b1a268afe19fd1535807355402142') version('2.03.03', sha256='cedefa63ec5ae1b62fedbfddfc30706c095be0fc7c6aaed6fd1c50bc8c840dde') diff --git a/var/spack/repos/builtin/packages/magma/package.py b/var/spack/repos/builtin/packages/magma/package.py index 27f2f438e97..2e864630188 100644 --- a/var/spack/repos/builtin/packages/magma/package.py +++ b/var/spack/repos/builtin/packages/magma/package.py @@ -7,10 +7,10 @@ from spack import * -class Magma(CMakePackage): - """The MAGMA project aims to develop a dense linear algebra library similar to - LAPACK but for heterogeneous/hybrid architectures, starting with current - "Multicore+GPU" systems. +class Magma(CMakePackage, CudaPackage): + """The MAGMA project aims to develop a dense linear algebra library similar + to LAPACK but for heterogeneous/hybrid architectures, starting with + current "Multicore+GPU" systems. """ homepage = "http://icl.cs.utk.edu/magma/" @@ -28,13 +28,15 @@ class Magma(CMakePackage): description='Enable Fortran bindings support') variant('shared', default=True, description='Enable shared library') + variant('cuda', default=True, description='Build with CUDA') depends_on('blas') depends_on('lapack') - depends_on('cuda') + depends_on('cuda@8:', when='@2.5.1:') # See PR #14471 - conflicts('%gcc@6:', when='^cuda@:8') - conflicts('%gcc@7:', when='^cuda@:9') + conflicts('~cuda', msg='Magma requires cuda') + conflicts('cuda_arch=none', + msg='Please indicate a CUDA arch value or values') patch('ibm-xl.patch', when='@2.2:2.5.0%xl') patch('ibm-xl.patch', when='@2.2:2.5.0%xl_r') @@ -69,11 +71,14 @@ def cmake_args(self): '-DCMAKE_Fortran_COMPILER=%s' % self.compiler.f77 ]) - if spec.satisfies('^cuda@9.0:'): + if spec.satisfies('^cuda'): + cuda_arch = self.spec.variants['cuda_arch'].value if '@:2.2.0' in spec: - options.extend(['-DGPU_TARGET=sm30']) + capabilities = ' '.join('sm{0}'.format(i) for i in cuda_arch) + options.extend(['-DGPU_TARGET=' + capabilities]) else: - options.extend(['-DGPU_TARGET=sm_30']) + capabilities = ' '.join('sm_{0}'.format(i) for i in cuda_arch) + options.extend(['-DGPU_TARGET=' + capabilities]) if '@2.5.0' in spec: options.extend(['-DMAGMA_SPARSE=OFF']) diff --git a/var/spack/repos/builtin/packages/med/package.py b/var/spack/repos/builtin/packages/med/package.py index 3b416c02742..c4be8dfbea4 100644 --- a/var/spack/repos/builtin/packages/med/package.py +++ b/var/spack/repos/builtin/packages/med/package.py @@ -15,15 +15,17 @@ class Med(CMakePackage): maintainers = ['likask'] + version('4.0.0', sha256='a474e90b5882ce69c5e9f66f6359c53b8b73eb448c5f631fa96e8cd2c14df004') version('3.2.0', sha256='d52e9a1bdd10f31aa154c34a5799b48d4266dc6b4a5ee05a9ceda525f2c6c138') variant('api23', default=True, description='Enable API2.3') depends_on('mpi') - depends_on('hdf5@:1.8.19+mpi') + depends_on('hdf5@:1.8.19+mpi', when='@3.2.0') + depends_on('hdf5@:1.10.2+mpi', when='@4.0.0') # C++11 requires a space between literal and identifier - patch('add_space.patch') + patch('add_space.patch', when='@3.2.0') # FIXME This is minimal installation. diff --git a/var/spack/repos/builtin/packages/mesa-glu/package.py b/var/spack/repos/builtin/packages/mesa-glu/package.py index 9593c562711..bd3c90b0203 100644 --- a/var/spack/repos/builtin/packages/mesa-glu/package.py +++ b/var/spack/repos/builtin/packages/mesa-glu/package.py @@ -12,6 +12,7 @@ class MesaGlu(AutotoolsPackage): homepage = "https://www.mesa3d.org" url = "https://www.mesa3d.org/archive/glu/glu-9.0.0.tar.gz" + version('9.0.1', sha256='f6f484cfcd51e489afe88031afdea1e173aa652697e4c19ddbcb8260579a10f7') version('9.0.0', sha256='4387476a1933f36fec1531178ea204057bbeb04cc2d8396c9ea32720a1f7e264') depends_on('gl@3:') diff --git a/var/spack/repos/builtin/packages/mesa/package.py b/var/spack/repos/builtin/packages/mesa/package.py index d46c75796ed..4132ba0ff49 100644 --- a/var/spack/repos/builtin/packages/mesa/package.py +++ b/var/spack/repos/builtin/packages/mesa/package.py @@ -174,3 +174,11 @@ def configure_args(self): args.append('--with-dri-drivers=' + ','.join(args_dri_drivers)) return args + + @property + def libs(self): + for dir in ['lib64', 'lib']: + libs = find_libraries('libGL', join_path(self.prefix, dir), + shared=True, recursive=False) + if libs: + return libs diff --git a/var/spack/repos/builtin/packages/meson/package.py b/var/spack/repos/builtin/packages/meson/package.py index 2878c712fcb..59edec8167f 100644 --- a/var/spack/repos/builtin/packages/meson/package.py +++ b/var/spack/repos/builtin/packages/meson/package.py @@ -14,6 +14,7 @@ class Meson(PythonPackage): homepage = "http://mesonbuild.com/" url = "https://github.com/mesonbuild/meson/archive/0.49.0.tar.gz" + version('0.53.2', sha256='eab4f5d5dde12d002b7ddd958a9a0658589b63622b6cea2715e0235b95917888') version('0.49.1', sha256='a944e7f25a2bc8e4ba3502ab5835d8a8b8f2530415c9d6fcffb53e0abaea2ced') version('0.49.0', sha256='11bc959e7173e714e4a4e85dd2bd9d0149b0a51c8ba82d5f44cc63735f603c74') version('0.42.0', sha256='6c318a2da3859326a37f8a380e3c50e97aaabff6990067218dffffea674ed76f') @@ -23,7 +24,7 @@ class Meson(PythonPackage): variant('ninjabuild', default=True) depends_on('python@3:', type=('build', 'run')) - depends_on('py-setuptools', type='build') + depends_on('py-setuptools', type=('build', 'run')) depends_on('ninja', when='+ninjabuild', type=('build', 'run')) # By default, Meson strips the rpath on installation. This patch disables diff --git a/var/spack/repos/builtin/packages/minigan/package.py b/var/spack/repos/builtin/packages/minigan/package.py new file mode 100644 index 00000000000..5d3b553bcf1 --- /dev/null +++ b/var/spack/repos/builtin/packages/minigan/package.py @@ -0,0 +1,28 @@ +# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Minigan(Package): + """miniGAN is a generative adversarial network code developed as part of the + Exascale Computing Project's (ECP) ExaLearn project at + Sandia National Laboratories.""" + + homepage = "https://github.com/SandiaMLMiniApps/miniGAN" + url = "https://github.com/SandiaMLMiniApps/miniGAN/archive/1.0.0.tar.gz" + + version('1.0.0', sha256='ef6d5def9c7040af520acc64b7a8b6c8ec4b7901721b11b0cb25a583ea0c8ae3') + + depends_on('python', type=('build', 'run')) + depends_on('py-setuptools', type='build') + depends_on('py-torch', type=('build', 'run')) + depends_on('py-numpy', type=('build', 'run')) + depends_on('py-horovod@master', type=('build', 'run')) + depends_on('py-torchvision', type=('build', 'run')) + depends_on('py-matplotlib@3.0.0', type=('build', 'run')) + + def install(self, spec, prefix): + install_tree('.', prefix) diff --git a/var/spack/repos/builtin/packages/mofem-cephas/package.py b/var/spack/repos/builtin/packages/mofem-cephas/package.py index 28e00565d2f..24e88150841 100644 --- a/var/spack/repos/builtin/packages/mofem-cephas/package.py +++ b/var/spack/repos/builtin/packages/mofem-cephas/package.py @@ -42,7 +42,7 @@ class MofemCephas(CMakePackage): depends_on("parmetis") # Fixed version of hdf5, to remove some problems with dependent # packages, f.e. MED format - depends_on("hdf5@:1.8.19+hl+mpi") + depends_on("hdf5@:1.8.19+hl+mpi+fortran") depends_on("petsc@:3.9.3+mumps+mpi") depends_on('slepc', when='+slepc') depends_on("moab") diff --git a/var/spack/repos/builtin/packages/mono/package.py b/var/spack/repos/builtin/packages/mono/package.py index 73464f0fb94..6a7a96300ac 100644 --- a/var/spack/repos/builtin/packages/mono/package.py +++ b/var/spack/repos/builtin/packages/mono/package.py @@ -25,7 +25,7 @@ class Mono(AutotoolsPackage): # Spack's openssl interacts badly with mono's vendored # "boringssl", don't drag it in w/ cmake depends_on('cmake~openssl', type=('build')) - depends_on('libiconv') + depends_on('iconv') depends_on('perl', type=('build')) version('6.8.0.105', sha256='578799c44c3c86a9eb5daf6dec6c60a24341940fd376371956d4a46cf8612178', @@ -50,6 +50,6 @@ def patch(self): def configure_args(self): args = [] - li = self.spec['libiconv'].prefix + li = self.spec['iconv'].prefix args.append('--with-libiconv-prefix={p}'.format(p=li)) return args diff --git a/var/spack/repos/builtin/packages/mothur/package.py b/var/spack/repos/builtin/packages/mothur/package.py index 3f2283191a4..4e25a4dd3e1 100644 --- a/var/spack/repos/builtin/packages/mothur/package.py +++ b/var/spack/repos/builtin/packages/mothur/package.py @@ -23,7 +23,7 @@ class Mothur(MakefilePackage): depends_on('boost') depends_on('readline') - depends_on('vsearch', when='+vsearch', type='run') + depends_on('vsearch@2.13.3', when='+vsearch', type='run') def edit(self, spec, prefix): makefile = FileFilter('Makefile') diff --git a/var/spack/repos/builtin/packages/motif/package.py b/var/spack/repos/builtin/packages/motif/package.py index 7a6bc0de8f2..c64d35fc430 100644 --- a/var/spack/repos/builtin/packages/motif/package.py +++ b/var/spack/repos/builtin/packages/motif/package.py @@ -33,6 +33,7 @@ class Motif(AutotoolsPackage): depends_on("autoconf", type="build") depends_on("m4", type="build") depends_on("libtool", type="build") + depends_on("pkgconfig", type="build") patch('add_xbitmaps_dependency.patch') diff --git a/var/spack/repos/builtin/packages/mpich/nag_libtool_2.4.2_0.patch b/var/spack/repos/builtin/packages/mpich/nag_libtool_2.4.2_0.patch new file mode 100644 index 00000000000..18d5c2ad657 --- /dev/null +++ b/var/spack/repos/builtin/packages/mpich/nag_libtool_2.4.2_0.patch @@ -0,0 +1,29 @@ +--- a/configure ++++ b/configure +@@ -11563,6 +11563,8 @@ _LT_EOF + lf95*) # Lahey Fortran 8.1 + whole_archive_flag_spec= + tmp_sharedflag='--shared' ;; ++ nagfor*) # NAGFOR 5.3 ++ tmp_sharedflag='-Wl,-shared' ;; + xl[cC]* | bgxl[cC]* | mpixl[cC]*) # IBM XL C 8.0 on PPC (deal with xlf below) + tmp_sharedflag='-qmkshrobj' + tmp_addflag= ;; +@@ -19036,6 +19038,8 @@ _LT_EOF + lf95*) # Lahey Fortran 8.1 + whole_archive_flag_spec_F77= + tmp_sharedflag='--shared' ;; ++ nagfor*) # NAGFOR 5.3 ++ tmp_sharedflag='-Wl,-shared' ;; + xl[cC]* | bgxl[cC]* | mpixl[cC]*) # IBM XL C 8.0 on PPC (deal with xlf below) + tmp_sharedflag='-qmkshrobj' + tmp_addflag= ;; +@@ -22116,6 +22120,8 @@ _LT_EOF + lf95*) # Lahey Fortran 8.1 + whole_archive_flag_spec_FC= + tmp_sharedflag='--shared' ;; ++ nagfor*) # NAGFOR 5.3 ++ tmp_sharedflag='-Wl,-shared' ;; + xl[cC]* | bgxl[cC]* | mpixl[cC]*) # IBM XL C 8.0 on PPC (deal with xlf below) + tmp_sharedflag='-qmkshrobj' + tmp_addflag= ;; diff --git a/var/spack/repos/builtin/packages/mpich/nag_libtool_2.4.2_1.patch b/var/spack/repos/builtin/packages/mpich/nag_libtool_2.4.2_1.patch new file mode 100644 index 00000000000..8fb671093bf --- /dev/null +++ b/var/spack/repos/builtin/packages/mpich/nag_libtool_2.4.2_1.patch @@ -0,0 +1,71 @@ +--- a/confdb/ltmain.sh ++++ b/confdb/ltmain.sh +@@ -180,6 +180,20 @@ func_basename () + func_basename_result=`$ECHO "${1}" | $SED "$basename"` + } # func_basename may be replaced by extended shell implementation + ++# Calculate cc_basename. Skip known compiler wrappers and cross-prefix. ++func_cc_basename () ++{ ++ for cc_temp in $*""; do ++ case $cc_temp in ++ compile | *[\\/]compile | ccache | *[\\/]ccache ) ;; ++ distcc | *[\\/]distcc | purify | *[\\/]purify ) ;; ++ \-*) ;; ++ *) break;; ++ esac ++ done ++ func_cc_basename_result=`$ECHO "$cc_temp" | $SED "s%.*/%%; s%^$host_alias-%%"` ++} ++ + + # func_dirname_and_basename file append nondir_replacement + # perform func_basename and func_dirname in a single function +@@ -6419,6 +6433,13 @@ func_mode_link () + # Convert "-framework foo" to "foo.ltframework" + if test -n "$inherited_linker_flags"; then + tmp_inherited_linker_flags=`$ECHO "$inherited_linker_flags" | $SED 's/-framework \([^ $]*\)/\1.ltframework/g'` ++ ++ # Additionally convert " -pthread" to " -Wl,-pthread" for nagfor ++ func_cc_basename $CC ++ case $func_cc_basename_result in ++ nagfor*) tmp_inherited_linker_flags=`$ECHO "$tmp_inherited_linker_flags" | $SED 's/ -pthread/ -Wl,-pthread/g'` ;; ++ esac ++ + for tmp_inherited_linker_flag in $tmp_inherited_linker_flags; do + case " $new_inherited_linker_flags " in + *" $tmp_inherited_linker_flag "*) ;; +@@ -8001,6 +8022,13 @@ EOF + ;; + esac + ++ # Time to revert the changes made for nagfor. ++ func_cc_basename $CC ++ case $func_cc_basename_result in ++ nagfor*) ++ new_inherited_linker_flags=`$ECHO " $new_inherited_linker_flags" | $SED 's% -Wl,-pthread% -pthread%g'` ;; ++ esac ++ + # move library search paths that coincide with paths to not yet + # installed libraries to the beginning of the library search list + new_libs= +--- a/configure ++++ b/configure +@@ -11614,6 +11614,8 @@ _LT_EOF + whole_archive_flag_spec= + tmp_sharedflag='--shared' ;; + nagfor*) # NAGFOR 5.3 ++ whole_archive_flag_spec='${wl}--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive' ++ compiler_needs_object=yes + tmp_sharedflag='-Wl,-shared' ;; + xl[cC]* | bgxl[cC]* | mpixl[cC]*) # IBM XL C 8.0 on PPC (deal with xlf below) + tmp_sharedflag='-qmkshrobj' +@@ -19246,6 +19248,8 @@ _LT_EOF + whole_archive_flag_spec_FC= + tmp_sharedflag='--shared' ;; + nagfor*) # NAGFOR 5.3 ++ whole_archive_flag_spec_FC='${wl}--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive' ++ compiler_needs_object_FC=yes + tmp_sharedflag='-Wl,-shared' ;; + xl[cC]* | bgxl[cC]* | mpixl[cC]*) # IBM XL C 8.0 on PPC (deal with xlf below) + tmp_sharedflag='-qmkshrobj' diff --git a/var/spack/repos/builtin/packages/mpich/nag_libtool_2.4.2_2.patch b/var/spack/repos/builtin/packages/mpich/nag_libtool_2.4.2_2.patch new file mode 100644 index 00000000000..871ffe6dd25 --- /dev/null +++ b/var/spack/repos/builtin/packages/mpich/nag_libtool_2.4.2_2.patch @@ -0,0 +1,11 @@ +--- a/configure ++++ b/configure +@@ -22237,6 +22237,8 @@ _LT_EOF + whole_archive_flag_spec_F77= + tmp_sharedflag='--shared' ;; + nagfor*) # NAGFOR 5.3 ++ whole_archive_flag_spec_F77='${wl}--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive' ++ compiler_needs_object_F77=yes + tmp_sharedflag='-Wl,-shared' ;; + xl[cC]* | bgxl[cC]* | mpixl[cC]*) # IBM XL C 8.0 on PPC (deal with xlf below) + tmp_sharedflag='-qmkshrobj' diff --git a/var/spack/repos/builtin/packages/mpich/nag_libtool_2.4.6.patch b/var/spack/repos/builtin/packages/mpich/nag_libtool_2.4.6.patch new file mode 100644 index 00000000000..50f73f2dde3 --- /dev/null +++ b/var/spack/repos/builtin/packages/mpich/nag_libtool_2.4.6.patch @@ -0,0 +1,69 @@ +--- a/confdb/ltmain.sh ++++ b/confdb/ltmain.sh +@@ -7867,6 +7867,13 @@ func_mode_link () + # Convert "-framework foo" to "foo.ltframework" + if test -n "$inherited_linker_flags"; then + tmp_inherited_linker_flags=`$ECHO "$inherited_linker_flags" | $SED 's/-framework \([^ $]*\)/\1.ltframework/g'` ++ ++ # Additionally convert " -pthread" to " -Wl,-pthread" for nagfor ++ func_cc_basename $CC ++ case $func_cc_basename_result in ++ nagfor*) tmp_inherited_linker_flags=`$ECHO "$tmp_inherited_linker_flags" | $SED 's/ -pthread/ -Wl,-pthread/g'` ;; ++ esac ++ + for tmp_inherited_linker_flag in $tmp_inherited_linker_flags; do + case " $new_inherited_linker_flags " in + *" $tmp_inherited_linker_flag "*) ;; +@@ -8886,7 +8893,8 @@ func_mode_link () + xlcverstring="$wl-compatibility_version $wl$minor_current $wl-current_version $wl$minor_current.$revision" + verstring="-compatibility_version $minor_current -current_version $minor_current.$revision" + # On Darwin other compilers +- case $CC in ++ func_cc_basename $CC ++ case $func_cc_basename_result in + nagfor*) + verstring="$wl-compatibility_version $wl$minor_current $wl-current_version $wl$minor_current.$revision" + ;; +@@ -9498,6 +9506,13 @@ EOF + ;; + esac + ++ # Time to revert the changes made for nagfor. ++ func_cc_basename $CC ++ case $func_cc_basename_result in ++ nagfor*) ++ new_inherited_linker_flags=`$ECHO " $new_inherited_linker_flags" | $SED 's% -Wl,-pthread% -pthread%g'` ;; ++ esac ++ + # move library search paths that coincide with paths to not yet + # installed libraries to the beginning of the library search list + new_libs= +--- a/configure ++++ b/configure +@@ -12583,6 +12583,8 @@ _LT_EOF + whole_archive_flag_spec= + tmp_sharedflag='--shared' ;; + nagfor*) # NAGFOR 5.3 ++ whole_archive_flag_spec='$wl--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` $wl--no-whole-archive' ++ compiler_needs_object=yes + tmp_sharedflag='-Wl,-shared' ;; + xl[cC]* | bgxl[cC]* | mpixl[cC]*) # IBM XL C 8.0 on PPC (deal with xlf below) + tmp_sharedflag='-qmkshrobj' +@@ -20684,6 +20686,8 @@ _LT_EOF + whole_archive_flag_spec_FC= + tmp_sharedflag='--shared' ;; + nagfor*) # NAGFOR 5.3 ++ whole_archive_flag_spec_FC='$wl--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` $wl--no-whole-archive' ++ compiler_needs_object_FC=yes + tmp_sharedflag='-Wl,-shared' ;; + xl[cC]* | bgxl[cC]* | mpixl[cC]*) # IBM XL C 8.0 on PPC (deal with xlf below) + tmp_sharedflag='-qmkshrobj' +@@ -23958,6 +23962,8 @@ _LT_EOF + whole_archive_flag_spec_F77= + tmp_sharedflag='--shared' ;; + nagfor*) # NAGFOR 5.3 ++ whole_archive_flag_spec_F77='$wl--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` $wl--no-whole-archive' ++ compiler_needs_object_F77=yes + tmp_sharedflag='-Wl,-shared' ;; + xl[cC]* | bgxl[cC]* | mpixl[cC]*) # IBM XL C 8.0 on PPC (deal with xlf below) + tmp_sharedflag='-qmkshrobj' diff --git a/var/spack/repos/builtin/packages/mpich/package.py b/var/spack/repos/builtin/packages/mpich/package.py index 0980c668185..8360046ab43 100644 --- a/var/spack/repos/builtin/packages/mpich/package.py +++ b/var/spack/repos/builtin/packages/mpich/package.py @@ -63,6 +63,9 @@ class Mpich(AutotoolsPackage): ) variant('pci', default=(sys.platform != 'darwin'), description="Support analyzing devices on PCI bus") + variant('libxml2', default=True, + description='Use libxml2 for XML support instead of the custom ' + 'minimalistic implementation') provides('mpi') provides('mpi@:3.0', when='@3:') @@ -86,6 +89,23 @@ class Mpich(AutotoolsPackage): sha256='c7d4ecf865dccff5b764d9c66b6a470d11b0b1a5b4f7ad1ffa61079ad6b5dede', when='@3.3:3.3.0') + # This patch for Libtool 2.4.2 enables shared libraries for NAG and is + # applied by MPICH starting version 3.1. + patch('nag_libtool_2.4.2_0.patch', when='@:3.0%nag') + + # This patch for Libtool 2.4.2 fixes the problem with '-pthread' flag and + # enables convenience libraries for NAG. Starting version 3.1, the order of + # checks for FC and F77 is changed, therefore we need to apply the patch in + # two steps (the patch files can be merged once the support for versions + # 3.1 and older is dropped). + patch('nag_libtool_2.4.2_1.patch', when='@:3.1.3%nag') + patch('nag_libtool_2.4.2_2.patch', when='@:3.1.3%nag') + + # This patch for Libtool 2.4.6 does the same as the previous two. The + # problem is not fixed upstream yet and the upper version constraint is + # given just to avoid application of the patch to the develop version. + patch('nag_libtool_2.4.6.patch', when='@3.1.4:3.3%nag') + depends_on('findutils', type='build') depends_on('pkgconfig', type='build') @@ -96,8 +116,14 @@ class Mpich(AutotoolsPackage): depends_on('ucx', when='netmod=ucx') - depends_on('libpciaccess', when="+pci") - depends_on('libxml2') + # The dependencies on libpciaccess and libxml2 come from the embedded + # hwloc, which, before version 3.3, was used only for Hydra. + depends_on('libpciaccess', when="@:3.2+hydra+pci") + depends_on('libxml2', when='@:3.2+hydra+libxml2') + + # Starting with version 3.3, MPICH uses hwloc directly. + depends_on('libpciaccess', when="@3.3:+pci") + depends_on('libxml2', when='@3.3:+libxml2') # Starting with version 3.3, Hydra can use libslurm for nodelist parsing depends_on('slurm', when='+slurm') @@ -119,6 +145,12 @@ class Mpich(AutotoolsPackage): conflicts('pmi=pmi2', when='device=ch3 netmod=ofi') conflicts('pmi=pmix', when='device=ch3') + # MPICH does not require libxml2 and libpciaccess for versions before 3.3 + # when ~hydra is set: prevent users from setting +libxml2 and +pci in this + # case to avoid generating an identical MPICH installation. + conflicts('+pci', when='@:3.2~hydra') + conflicts('+libxml2', when='@:3.2~hydra') + def setup_build_environment(self, env): env.unset('F90') env.unset('F90FLAGS') @@ -181,6 +213,7 @@ def die_without_fortran(self): def configure_args(self): spec = self.spec config_args = [ + '--disable-silent-rules', '--enable-shared', '--with-pm={0}'.format('hydra' if '+hydra' in spec else 'no'), '--{0}-romio'.format('enable' if '+romio' in spec else 'disable'), @@ -234,4 +267,11 @@ def configure_args(self): config_args.append('--with-ucx={0}'.format( spec['ucx'].prefix)) + # In other cases the argument is redundant. + if '@:3.2+hydra' in spec or '@3.3:' in spec: + # The root configure script passes the argument to the configure + # scripts of all instances of hwloc (there are three copies of it: + # for hydra, for hydra2, and for MPICH itself). + config_args += self.enable_or_disable('libxml2') + return config_args diff --git a/var/spack/repos/builtin/packages/mpip/package.py b/var/spack/repos/builtin/packages/mpip/package.py index 36fe6e31a7a..65c2453c25c 100644 --- a/var/spack/repos/builtin/packages/mpip/package.py +++ b/var/spack/repos/builtin/packages/mpip/package.py @@ -16,14 +16,38 @@ class Mpip(AutotoolsPackage): version('master', branch='master') version("3.4.1", sha256="688bf37d73211e6a915f9fc59c358282a266d166c0a10af07a38a01a473296f0") + variant('shared', default=False, description="Build the shared library") + variant('demangling', default=False, description="Build with demangling support") + variant('setjmp', + default=False, + description="Replace glic backtrace() with setjmp for stack trace") + depends_on("elf") depends_on("libdwarf") depends_on('libunwind', when=os.uname()[4] == "x86_64") depends_on("mpi") + @property + def build_targets(self): + targets = [] + if '+shared' in self.spec: + targets.append('shared') + + return targets + def configure_args(self): config_args = ['--without-f77'] config_args.append("--with-cc=%s" % self.spec['mpi'].mpicc) config_args.append("--with-cxx=%s" % self.spec['mpi'].mpicxx) + if '+demangling' in self.spec: + config_args.append('--enable-demangling') + else: + config_args.append('--disable-demangling') + + if '+setjmp' in self.spec: + config_args.append('--enable-setjmp') + else: + config_args.append('--disable-setjmp') + return config_args diff --git a/var/spack/repos/builtin/packages/mumps/package.py b/var/spack/repos/builtin/packages/mumps/package.py index a56bc111e29..de34b6aa190 100644 --- a/var/spack/repos/builtin/packages/mumps/package.py +++ b/var/spack/repos/builtin/packages/mumps/package.py @@ -241,7 +241,7 @@ def write_makefile_inc(self): else: makefile_conf.extend([ 'LIBEXT = .a', - 'AR = ar vr', + 'AR = ar vr ', 'RANLIB = ranlib' ]) diff --git a/var/spack/repos/builtin/packages/mvapich2/package.py b/var/spack/repos/builtin/packages/mvapich2/package.py index 87e9f77848b..f6301a564a8 100644 --- a/var/spack/repos/builtin/packages/mvapich2/package.py +++ b/var/spack/repos/builtin/packages/mvapich2/package.py @@ -10,10 +10,11 @@ class Mvapich2(AutotoolsPackage): """MVAPICH2 is an MPI implementation for Infiniband networks.""" homepage = "http://mvapich.cse.ohio-state.edu/" - url = "http://mvapich.cse.ohio-state.edu/download/mvapich/mv2/mvapich2-2.3.2.tar.gz" + url = "http://mvapich.cse.ohio-state.edu/download/mvapich/mv2/mvapich2-2.3.3.tar.gz" list_url = "http://mvapich.cse.ohio-state.edu/downloads/" # Prefer the latest stable release + version('2.3.3', sha256='41d3261be57e5bc8aabf4e32981543c015c5443ff032a26f18205985e18c2b73') version('2.3.2', sha256='30cc0d7bcaa075d204692f76bca4d65a539e0f661c7460ffa9f835d6249e1ebf') version('2.3.1', sha256='314e12829f75f3ed83cd4779a972572d1787aac6543a3d024ea7c6080e0ee3bf') version('2.3', sha256='01d5fb592454ddd9ecc17e91c8983b6aea0e7559aa38f410b111c8ef385b50dd') diff --git a/var/spack/repos/builtin/packages/mxnet/package.py b/var/spack/repos/builtin/packages/mxnet/package.py index f3b7cddb23f..492a2573bc1 100644 --- a/var/spack/repos/builtin/packages/mxnet/package.py +++ b/var/spack/repos/builtin/packages/mxnet/package.py @@ -13,6 +13,8 @@ class Mxnet(MakefilePackage): homepage = "http://mxnet.io" url = "https://github.com/apache/incubator-mxnet/releases/download/1.3.0/apache-mxnet-src-1.3.0-incubating.tar.gz" + maintainers = ['adamjstewart'] + version('1.3.0', sha256='c00d6fbb2947144ce36c835308e603f002c1eb90a9f4c5a62f4d398154eed4d2') variant('cuda', default=False, description='Enable CUDA support') diff --git a/var/spack/repos/builtin/packages/nanomsg/package.py b/var/spack/repos/builtin/packages/nanomsg/package.py new file mode 100644 index 00000000000..c3c5bb05e6e --- /dev/null +++ b/var/spack/repos/builtin/packages/nanomsg/package.py @@ -0,0 +1,17 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Nanomsg(CMakePackage): + """The nanomsg library is a simple high-performance + implementation of several 'scalability protocols'""" + + homepage = "https://nanomsg.org/" + url = "https://github.com/nanomsg/nanomsg/archive/1.0.0.tar.gz" + + version('1.1.5', sha256='218b31ae1534ab897cb5c419973603de9ca1a5f54df2e724ab4a188eb416df5a') + version('1.0.0', sha256='24afdeb71b2e362e8a003a7ecc906e1b84fd9f56ce15ec567481d1bb33132cc7') diff --git a/var/spack/repos/builtin/packages/ncl/package.py b/var/spack/repos/builtin/packages/ncl/package.py index 0fa063c52cd..e6e09faae1e 100644 --- a/var/spack/repos/builtin/packages/ncl/package.py +++ b/var/spack/repos/builtin/packages/ncl/package.py @@ -48,7 +48,7 @@ class Ncl(Package): # Extra dependencies that may be missing from build system: depends_on('bison', type='build') depends_on('flex+lex') - depends_on('libiconv') + depends_on('iconv') depends_on('tcsh') # Also, the manual says that ncl requires zlib, but that comes as a @@ -57,7 +57,7 @@ class Ncl(Package): # The following dependencies are required, otherwise several components # fail to compile: depends_on('curl') - depends_on('libiconv') + depends_on('iconv') depends_on('libx11') depends_on('libxaw') depends_on('libxmu') diff --git a/var/spack/repos/builtin/packages/ncurses/package.py b/var/spack/repos/builtin/packages/ncurses/package.py index bee4fd655ce..9df2de49b76 100644 --- a/var/spack/repos/builtin/packages/ncurses/package.py +++ b/var/spack/repos/builtin/packages/ncurses/package.py @@ -20,6 +20,7 @@ class Ncurses(AutotoolsPackage, GNUMirrorPackage): # URL must remain http:// so Spack can bootstrap curl gnu_mirror_path = "ncurses/ncurses-6.1.tar.gz" + version('6.2', sha256='30306e0c76e0f9f1f0de987cf1c82a5c21e1ce6568b9227f7da5b71cbea86c9d') version('6.1', sha256='aa057eeeb4a14d470101eff4597d5833dcef5965331be3528c08d99cebaa0d17') version('6.0', sha256='f551c24b30ce8bfb6e96d9f59b42fbea30fa3a6123384172f9e7284bcf647260') version('5.9', sha256='9046298fb440324c9d4135ecea7879ffed8546dd1b58e59430ea07a4633f563b') diff --git a/var/spack/repos/builtin/packages/netcdf-c/package.py b/var/spack/repos/builtin/packages/netcdf-c/package.py index 01f2acf4253..e7ce1696a8b 100644 --- a/var/spack/repos/builtin/packages/netcdf-c/package.py +++ b/var/spack/repos/builtin/packages/netcdf-c/package.py @@ -5,16 +5,6 @@ from spack import * -import numbers - - -def is_integral(x): - """Any integer value""" - try: - return isinstance(int(x), numbers.Integral) and not isinstance(x, bool) - except ValueError: - return False - class NetcdfC(AutotoolsPackage): """NetCDF (network Common Data Form) is a set of software libraries and @@ -76,22 +66,6 @@ def url_for_version(self, version): # variant('cdmremote', default=False, # description='Enable CDM Remote support') - # These variants control the number of dimensions (i.e. coordinates and - # attributes) and variables (e.g. time, entity ID, number of coordinates) - # that can be used in any particular NetCDF file. - variant( - 'maxdims', - default=1024, - description='Defines the maximum dimensions of NetCDF files.', - values=is_integral - ) - variant( - 'maxvars', - default=8192, - description='Defines the maximum variables of NetCDF files.', - values=is_integral - ) - # The patch for 4.7.0 touches configure.ac. See force_autoreconf below. depends_on('autoconf', type='build', when='@4.7.0') depends_on('automake', type='build', when='@4.7.0') @@ -157,20 +131,6 @@ def force_autoreconf(self): # The patch for 4.7.0 touches configure.ac. return self.spec.satisfies('@4.7.0') - def patch(self): - try: - max_dims = int(self.spec.variants['maxdims'].value) - max_vars = int(self.spec.variants['maxvars'].value) - except (ValueError, TypeError): - raise TypeError('NetCDF variant values max[dims|vars] must be ' - 'integer values.') - - ff = FileFilter(join_path('include', 'netcdf.h')) - ff.filter(r'^(#define\s+NC_MAX_DIMS\s+)\d+(.*)$', - r'\1{0}\2'.format(max_dims)) - ff.filter(r'^(#define\s+NC_MAX_VARS\s+)\d+(.*)$', - r'\1{0}\2'.format(max_vars)) - def configure_args(self): cflags = [] cppflags = [] diff --git a/var/spack/repos/builtin/packages/nfs-ganesha/package.py b/var/spack/repos/builtin/packages/nfs-ganesha/package.py new file mode 100644 index 00000000000..1ea286270db --- /dev/null +++ b/var/spack/repos/builtin/packages/nfs-ganesha/package.py @@ -0,0 +1,29 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class NfsGanesha(CMakePackage): + """NFS-Ganesha is an NFSv3,v4,v4.1 fileserver that runs in user mode + on most UNIX/Linux systems. It also supports the 9p.2000L protocol.""" + + homepage = "https://github.com/nfs-ganesha/nfs-ganesha/wiki" + url = "https://github.com/nfs-ganesha/nfs-ganesha/archive/V3.2.tar.gz" + + version('3.2', sha256='1e3635f0eb0bc32868ea7d923d061d0f6b1bd03b45da34356c7c53d4c0ebafbd') + version('3.1', sha256='c4cf78929f39b8af44b05e813783b2c39e348b485043c6290c4bca705bb5015f') + version('3.0.3', sha256='fcc0361b9a2752be7eb4e990230765e17de373452ac24514be22c81a5447a460') + version('3.0', sha256='136c5642ff21ec6e8a4e77c037f6218a39b2eeba77798b13556f1abbb0923ccd') + + depends_on('bison', type='build') + depends_on('flex', type='build') + depends_on('userspace-rcu') + depends_on('ntirpc') + + root_cmakelists_dir = 'src' + + def setup_build_environment(self, env): + env.prepend_path('CPATH', self.spec['ntirpc'].prefix.include.ntirpc) diff --git a/var/spack/repos/builtin/packages/nfs-utils/package.py b/var/spack/repos/builtin/packages/nfs-utils/package.py new file mode 100644 index 00000000000..be46c57030b --- /dev/null +++ b/var/spack/repos/builtin/packages/nfs-utils/package.py @@ -0,0 +1,34 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class NfsUtils(AutotoolsPackage): + """The NFS Utilities package contains the userspace server and client tools + necessary to use the kernel's NFS abilities. NFS is a protocol that allows + sharing file systems over the network.""" + + homepage = "http://linux-nfs.org/" + url = "https://sourceforge.net/projects/nfs/files/nfs-utils/2.4.2/nfs-utils-2.4.2.tar.gz/download" + + version('2.4.2', sha256='bb08106cd7bd397c6cc34e2461bc7818a664450d2805da08b07e1ced88e5155f') + version('2.4.1', sha256='c0dda96318af554881f4eb1590bfe91f1aba2fba59ed2ac3ba099f80fdf838e9') + version('2.3.4', sha256='36e70b0a583751ead0034ebe5d8826caf2dcc7ee7c0beefe94d6ee5a3b0b2484') + + depends_on('libtirpc') + depends_on('libevent') + depends_on('libdmx') + depends_on('lvm2') + depends_on('keyutils') + depends_on('sqlite') + depends_on('util-linux') + + def setup_build_environment(self, env): + env.append_flags('LDFLAGS', '-lintl') + + def configure_args(self): + args = ['--disable-gss', '--with-rpcgen=internal'] + return args diff --git a/var/spack/repos/builtin/packages/nlopt/package.py b/var/spack/repos/builtin/packages/nlopt/package.py index c533bf60e80..509218ea839 100644 --- a/var/spack/repos/builtin/packages/nlopt/package.py +++ b/var/spack/repos/builtin/packages/nlopt/package.py @@ -17,6 +17,7 @@ class Nlopt(CMakePackage): git = "https://github.com/stevengj/nlopt.git" version('master', branch='master') + version('2.6.1', sha256='66d63a505187fb6f98642703bd0ef006fedcae2f9a6d1efa4f362ea919a02650') version('2.5.0', sha256='c6dd7a5701fff8ad5ebb45a3dc8e757e61d52658de3918e38bab233e7fd3b4ae') variant('shared', default=True, description='Enables the build of shared libraries') diff --git a/var/spack/repos/builtin/packages/ntirpc/package.py b/var/spack/repos/builtin/packages/ntirpc/package.py new file mode 100644 index 00000000000..fa29056e16b --- /dev/null +++ b/var/spack/repos/builtin/packages/ntirpc/package.py @@ -0,0 +1,22 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Ntirpc(CMakePackage): + """New development on tirpc""" + + homepage = "https://github.com/nfs-ganesha/ntirpc" + url = "https://github.com/nfs-ganesha/ntirpc/archive/v3.2.tar.gz" + + version('3.2', sha256='db1639ca2f15df7e30d8c0a820ed9adf4eb623798db03b56a3659eedff49af76') + version('3.1', sha256='280b57db3a37c5b05116a7850460152b1ac53c050fd61ce190f5a5eb55ed3ba1') + version('3.0', sha256='9a6b11c1aa3e7f5f1f491bca0275e759de5bed2d73c8a028af7b6aadb68ac795') + version('1.8.0', sha256='3bb642dccc8f2506b57a03b5d3358654f59f47b33fddfaa5a7330df4cf336f9f') + version('1.7.3', sha256='8713ef095efc44df426bbd2b260ad457e5335bf3008fb97f01b0775c8042e54b') + + depends_on('libnsl') + depends_on('userspace-rcu') diff --git a/var/spack/repos/builtin/packages/numamma/package.py b/var/spack/repos/builtin/packages/numamma/package.py new file mode 100644 index 00000000000..cc714954498 --- /dev/null +++ b/var/spack/repos/builtin/packages/numamma/package.py @@ -0,0 +1,29 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Numamma(CMakePackage): + """NumaMMa is a lightweight memory profiler that reports the + memory access patterns of applications.""" + + homepage = "https://numamma.github.io/numamma/" + url = "https://github.com/numamma/numamma/archive/numamma-1.1.1.tar.gz" + maintainers = ['trahay'] + + version('1.1.1', sha256='f79ca22a95df33a1af529ddd653d043f7f0d32a6d196e559aee8bef8fc74771f') + + depends_on('numap') + depends_on('libbacktrace') + depends_on('numactl') + depends_on('libelf') + + def cmake_args(self): + spec = self.spec + cmake_args = [ + "-DBACKTRACE_DIR:PATH={0}".format(spec["libbacktrace"].prefix) + ] + return cmake_args diff --git a/var/spack/repos/builtin/packages/numap/package.py b/var/spack/repos/builtin/packages/numap/package.py new file mode 100644 index 00000000000..2df4cfd9950 --- /dev/null +++ b/var/spack/repos/builtin/packages/numap/package.py @@ -0,0 +1,20 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Numap(CMakePackage): + """Numap is a Linux library dedicated to memory profiling based on + hardware performance monitoring unit (PMU).""" + + homepage = "https://github.com/numap-library/numap" + git = "https://github.com/numap-library/numap.git" + maintainers = ['trahay'] + + version('master', branch='master') + version('2019-09-06', commit='ffcdb88c64b59b7a3220eb1077d2b237029ca96a') + + depends_on('libpfm4') diff --git a/var/spack/repos/builtin/packages/openblas/lapack-0.3.9-xerbl.patch b/var/spack/repos/builtin/packages/openblas/lapack-0.3.9-xerbl.patch new file mode 100644 index 00000000000..a61f3b3c009 --- /dev/null +++ b/var/spack/repos/builtin/packages/openblas/lapack-0.3.9-xerbl.patch @@ -0,0 +1,14 @@ +diff --git a/lapack-netlib/SRC/sorhr_col.f b/lapack-netlib/SRC/sorhr_col.f +index 38976245..600c19fb 100644 +--- a/lapack-netlib/SRC/sorhr_col.f ++++ b/lapack-netlib/SRC/sorhr_col.f +@@ -282,7 +282,8 @@ + $ NPLUSONE + * .. + * .. External Subroutines .. +- EXTERNAL SCOPY, SLAORHR_COL_GETRFNP, SSCAL, STRSM, XERBLA ++ EXTERNAL SCOPY, SLAORHR_COL_GETRFNP, SSCAL, STRSM, ++ $ XERBLA + * .. + * .. Intrinsic Functions .. + INTRINSIC MAX, MIN diff --git a/var/spack/repos/builtin/packages/openblas/package.py b/var/spack/repos/builtin/packages/openblas/package.py index b98085c19c1..71d2ec4a6bd 100644 --- a/var/spack/repos/builtin/packages/openblas/package.py +++ b/var/spack/repos/builtin/packages/openblas/package.py @@ -38,7 +38,7 @@ class Openblas(MakefilePackage): variant('ilp64', default=False, description='Force 64-bit Fortran native integers') variant('pic', default=True, description='Build position independent code') variant('shared', default=True, description='Build shared libraries') - variant('consistentFPCSR', default=False, description='Synchronize FP CSR between threads (x86/x86_64 only)') + variant('consistent_fpcsr', default=False, description='Synchronize FP CSR between threads (x86/x86_64 only)') variant( 'threads', default='none', @@ -60,6 +60,8 @@ class Openblas(MakefilePackage): patch('openblas_icc_openmp.patch', when='@:0.2.20%intel@16.0:') patch('openblas_icc_fortran.patch', when='%intel@16.0:') patch('openblas_icc_fortran2.patch', when='%intel@18.0:') + # See https://github.com/spack/spack/issues/15385 + patch('lapack-0.3.9-xerbl.patch', when='@0.3.8: %intel') # Fixes compilation error on POWER8 with GCC 7 # https://github.com/xianyi/OpenBLAS/pull/1098 @@ -94,7 +96,10 @@ class Openblas(MakefilePackage): # Add conditions to f_check to determine the Fujitsu compiler patch('openblas_fujitsu.patch', when='%fj') + # See https://github.com/spack/spack/issues/3036 conflicts('%intel@16', when='@0.2.15:0.2.19') + conflicts('+consistent_fpcsr', when='threads=none', + msg='FPCSR consistency only applies to multithreading') @property def parallel(self): @@ -137,12 +142,23 @@ def _read_targets(target_file): return micros - @staticmethod - def _microarch_target_args(microarch, available_targets): + def _microarch_target_args(self): """Given a spack microarchitecture and a list of targets found in OpenBLAS' TargetList.txt, determine the best command-line arguments. """ - args = [] # Return value + # Read available openblas targets + targetlist_name = join_path(self.stage.source_path, "TargetList.txt") + if os.path.exists(targetlist_name): + with open(targetlist_name) as f: + available_targets = self._read_targets(f) + else: + available_targets = [] + + # Get our build microarchitecture + microarch = self.spec.target + + # List of arguments returned by this function + args = [] # List of available architectures, and possible aliases openblas_arch = set(['alpha', 'arm', 'ia64', 'mips', 'mips64', @@ -166,11 +182,14 @@ def _microarch_target_args(microarch, available_targets): if microarch.name in available_targets: break - arch_name = microarch.family.name - if arch_name in openblas_arch: - # Apply possible spack->openblas arch name mapping - arch_name = openblas_arch_map.get(arch_name, arch_name) - args.append('ARCH=' + arch_name) + if self.version >= Version("0.3"): + # 'ARCH' argument causes build errors in older OpenBLAS + # see https://github.com/spack/spack/issues/15385 + arch_name = microarch.family.name + if arch_name in openblas_arch: + # Apply possible spack->openblas arch name mapping + arch_name = openblas_arch_map.get(arch_name, arch_name) + args.append('ARCH=' + arch_name) if microarch.vendor == 'generic': # User requested a generic platform, or we couldn't find a good @@ -190,8 +209,6 @@ def _microarch_target_args(microarch, available_targets): @property def make_defs(self): - spec = self.spec - # Configure fails to pick up fortran from FC=/abs/path/to/fc, but # works fine with FC=/abs/path/to/gfortran. # When mixing compilers make sure that @@ -209,13 +226,7 @@ def make_defs(self): make_defs.append('MAKE_NB_JOBS=0') # flag provided by OpenBLAS # Add target and architecture flags - targetlist_name = join_path(self.stage.source_path, "TargetList.txt") - if os.path.exists(targetlist_name): - with open(targetlist_name) as f: - avail_targets = self._read_targets(f) - else: - avail_targets = [] - make_defs += self._microarch_target_args(spec.target, avail_targets) + make_defs += self._microarch_target_args() if '~shared' in self.spec: if '+pic' in self.spec: @@ -242,7 +253,7 @@ def make_defs(self): # Synchronize floating-point control and status register (FPCSR) # between threads (x86/x86_64 only). - if '+consistentFPCSR' in self.spec: + if '+consistent_fpcsr' in self.spec: make_defs += ['CONSISTENT_FPCSR=1'] # Prevent errors in `as` assembler from newer instructions diff --git a/var/spack/repos/builtin/packages/opendx/package.py b/var/spack/repos/builtin/packages/opendx/package.py new file mode 100644 index 00000000000..2cf5a9e5e36 --- /dev/null +++ b/var/spack/repos/builtin/packages/opendx/package.py @@ -0,0 +1,20 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +class Opendx(AutotoolsPackage): + """Open Visualization Data Explorer.""" + + homepage = "https://github.com/Mwoolsey/OpenDX" + git = "https://github.com/Mwoolsey/OpenDX.git" + + version('master', branch='master') + + depends_on('motif') # lesstif also works, but exhibits odd behaviors + depends_on('gl') + + @run_before('autoreconf') + def distclean(self): + make('distclean') diff --git a/var/spack/repos/builtin/packages/openjdk/package.py b/var/spack/repos/builtin/packages/openjdk/package.py index 9d0b3d54e40..437c1701ddb 100644 --- a/var/spack/repos/builtin/packages/openjdk/package.py +++ b/var/spack/repos/builtin/packages/openjdk/package.py @@ -5,6 +5,32 @@ from spack import * import os +import platform + + +# If you need to add a new version, please be aware that: +# - versions in the following dict are automatically added to the package +# - version tuple must be in the form (checksum, url) +# - checksum must be sha256 +# - package key must be in the form '{os}-{arch}' where 'os' is in the +# format returned by platform.system() and 'arch' by platform.machine() + +_versions = { + '11.0.0-2020-01-01': { + 'Linux-aarch64': ('05c7d9c90edacd853850fbb0f52f8aa482809d0452c599cb9fe0b28b3b4bf329', 'https://github.com/AdoptOpenJDK/openjdk11-binaries/releases/download/jdk11u-2020-01-01-06-13/OpenJDK11U-jdk_aarch64_linux_hotspot_2020-01-01-06-13.tar.gz')}, + '11.0.2': { + 'Linux-x86_64': ('99be79935354f5c0df1ad293620ea36d13f48ec3ea870c838f20c504c9668b57', 'https://download.java.net/java/GA/jdk11/9/GPL/openjdk-11.0.2_linux-x64_bin.tar.gz')}, + '11.0.1': { + 'Linux-x86_64': ('7a6bb980b9c91c478421f865087ad2d69086a0583aeeb9e69204785e8e97dcfd', 'https://download.java.net/java/GA/jdk11/13/GPL/openjdk-11.0.1_linux-x64_bin.tar.gz')}, + '1.8.0_191-b12': { + 'Linux-aarch64': ('8eee0aede947b804f9a5f49c8a38b52aace8a30a9ebd9383b7d06042fb5a237c', 'https://github.com/AdoptOpenJDK/openjdk8-binaries/releases/download/jdk8u191-b12/OpenJDK8U-jdk_aarch64_linux_hotspot_8u191b12.tar.gz')}, + '1.8.0_222-b10': { + 'Linux-x86_64': ('20cff719c6de43f8bb58c7f59e251da7c1fa2207897c9a4768c8c669716dc819', 'https://github.com/AdoptOpenJDK/openjdk8-binaries/releases/download/jdk8u222-b10_openj9-0.15.1/OpenJDK8U-jdk_x64_linux_openj9_8u222b10_openj9-0.15.1.tar.gz')}, + '1.8.0_202-b08': { + 'Linux-x86_64': ('533dcd8d9ca15df231a1eb392fa713a66bca85a8e76d9b4ee30975f3823636b7', 'https://github.com/AdoptOpenJDK/openjdk8-binaries/releases/download/jdk8u202-b08/OpenJDK8U-jdk_x64_linux_openj9_8u202b08_openj9-0.12.0.tar.gz')}, + '1.8.0_40-b25': { + 'Linux-x86_64': ('79e96dce03a14271040023231a7d0ae374b755d48adf68bbdaec30294e4e2b88', 'https://download.java.net/openjdk/jdk8u40/ri/jdk_ri-8u40-b25-linux-x64-10_feb_2015.tar.gz')}, +} class Openjdk(Package): @@ -12,20 +38,17 @@ class Openjdk(Package): homepage = "https://jdk.java.net" - version("11.0.2", sha256="99be79935354f5c0df1ad293620ea36d13f48ec3ea870c838f20c504c9668b57", - url="https://download.java.net/java/GA/jdk11/9/GPL/openjdk-11.0.2_linux-x64_bin.tar.gz") - version("11.0.1", sha256="7a6bb980b9c91c478421f865087ad2d69086a0583aeeb9e69204785e8e97dcfd", - url="https://download.java.net/java/GA/jdk11/13/GPL/openjdk-11.0.1_linux-x64_bin.tar.gz") - version("1.8.0_202-b08", sha256="533dcd8d9ca15df231a1eb392fa713a66bca85a8e76d9b4ee30975f3823636b7", - url="https://github.com/AdoptOpenJDK/openjdk8-binaries/releases/download/jdk8u202-b08/OpenJDK8U-jdk_x64_linux_openj9_8u202b08_openj9-0.12.0.tar.gz") - version('1.8.0_40-b25', sha256='79e96dce03a14271040023231a7d0ae374b755d48adf68bbdaec30294e4e2b88', - url='https://download.java.net/openjdk/jdk8u40/ri/jdk_ri-8u40-b25-linux-x64-10_feb_2015.tar.gz') + for ver, packages in _versions.items(): + key = "{0}-{1}".format(platform.system(), platform.machine()) + pkg = packages.get(key) + if pkg: + version(ver, sha256=pkg[0], url=pkg[1], expand=False) - provides('java@11', when='@11.0:11.99') provides('java@8', when='@1.8.0:1.8.999') + provides('java@11', when='@11.0:11.99') - conflicts('target=ppc64:', msg='openjdk is only available for x86_64') - conflicts('target=ppc64le:', msg='openjdk is only available for x86_64') + conflicts('target=ppc64:', msg='openjdk is only available for x86_64 and aarch64') + conflicts('target=ppc64le:', msg='openjdk is only available for x86_64 and aarch64') # FIXME: # 1. `extends('java')` doesn't work, you need to use `extends('openjdk')` diff --git a/var/spack/repos/builtin/packages/openmpi/package.py b/var/spack/repos/builtin/packages/openmpi/package.py index efb7198444a..a2b75bd67ee 100644 --- a/var/spack/repos/builtin/packages/openmpi/package.py +++ b/var/spack/repos/builtin/packages/openmpi/package.py @@ -80,9 +80,10 @@ class Openmpi(AutotoolsPackage): version('develop', branch='master') # Current - version('4.0.2', sha256='900bf751be72eccf06de9d186f7b1c4b5c2fa9fa66458e53b77778dffdfe4057') # libmpi.so.40.20.2 + version('4.0.3', sha256='1402feced8c3847b3ab8252165b90f7d1fa28c23b6b2ca4632b6e4971267fd03') # libmpi.so.40.20.3 # Still supported + version('4.0.2', sha256='900bf751be72eccf06de9d186f7b1c4b5c2fa9fa66458e53b77778dffdfe4057') # libmpi.so.40.20.2 version('4.0.1', sha256='cce7b6d20522849301727f81282201d609553103ac0b09162cf28d102efb9709') # libmpi.so.40.20.1 version('4.0.0', sha256='2f0b8a36cfeb7354b45dda3c5425ef8393c9b04115570b615213faaa3f97366b') # libmpi.so.40.20.0 version('3.1.5', preferred=True, sha256='fbf0075b4579685eec8d56d34d4d9c963e6667825548554f5bf308610af72133') # libmpi.so.40.10.4 @@ -193,7 +194,7 @@ class Openmpi(AutotoolsPackage): patch('llnl-platforms.patch', when="@1.6.5") patch('configure.patch', when="@1.10.1") patch('fix_multidef_pmi_class.patch', when="@2.0.0:2.0.1") - patch('fix-ucx-1.7.0-api-instability.patch', when='@4.0.0:4.0.3') + patch('fix-ucx-1.7.0-api-instability.patch', when='@4.0.0:4.0.2') # Vader Bug: https://github.com/open-mpi/ompi/issues/5375 # Haven't release fix for 2.1.x diff --git a/var/spack/repos/builtin/packages/openpmd-api/hdf5-1.12.0.patch b/var/spack/repos/builtin/packages/openpmd-api/hdf5-1.12.0.patch new file mode 100644 index 00000000000..f190f12ff1b --- /dev/null +++ b/var/spack/repos/builtin/packages/openpmd-api/hdf5-1.12.0.patch @@ -0,0 +1,23 @@ +From 61ccc18cdd478c6281466f1f77de416559234dd8 Mon Sep 17 00:00:00 2001 +From: Axel Huebl +Date: Tue, 17 Mar 2020 10:51:20 -0700 +Subject: [PATCH] HDF5: H5Oget_info Compatibility + +Update to work with HDF5 1.12.0 signature. +Macro for older releases. +--- + src/IO/HDF5/HDF5IOHandler.cpp | 4 ++++ + 1 file changed, 4 insertions(+) + +diff --git a/src/IO/HDF5/HDF5IOHandler.cpp b/src/IO/HDF5/HDF5IOHandler.cpp +index 7043861b..c125e1f4 100644 +--- a/src/IO/HDF5/HDF5IOHandler.cpp ++++ b/src/IO/HDF5/HDF5IOHandler.cpp +@@ -1535,3 +1535,7 @@ void HDF5IOHandlerImpl::listAttributes(Writable* writable, + H5O_info_t object_info; + herr_t status; ++#if H5_VERSION_GE(1,12,0) ++ status = H5Oget_info(node_id, &object_info, H5O_INFO_NUM_ATTRS); ++#else + status = H5Oget_info(node_id, &object_info); ++#endif diff --git a/var/spack/repos/builtin/packages/openpmd-api/package.py b/var/spack/repos/builtin/packages/openpmd-api/package.py index 0def9bc1f4c..644758923ee 100644 --- a/var/spack/repos/builtin/packages/openpmd-api/package.py +++ b/var/spack/repos/builtin/packages/openpmd-api/package.py @@ -15,6 +15,7 @@ class OpenpmdApi(CMakePackage): maintainers = ['ax3l'] version('dev', branch='dev') + version('0.11.1', tag='0.11.1-alpha') version('0.11.0', tag='0.11.0-alpha') version('0.10.3', tag='0.10.3-alpha') version('0.10.2', tag='0.10.2-alpha') @@ -55,6 +56,10 @@ class OpenpmdApi(CMakePackage): extends('python', when='+python') + # Fix breaking HDF5 1.12.0 API + # https://github.com/openPMD/openPMD-api/pull/696 + patch('hdf5-1.12.0.patch', when='@:0.11.0 +hdf5') + def cmake_args(self): spec = self.spec diff --git a/var/spack/repos/builtin/packages/openspeedshop/package.py b/var/spack/repos/builtin/packages/openspeedshop/package.py index 56f9b0c6891..a0cf6e46cdd 100644 --- a/var/spack/repos/builtin/packages/openspeedshop/package.py +++ b/var/spack/repos/builtin/packages/openspeedshop/package.py @@ -96,7 +96,7 @@ class Openspeedshop(CMakePackage): depends_on("libxml2") - depends_on("qt@3", when='gui=qt3') + depends_on("qt@3:3.9", when='gui=qt3') # Dependencies for the openspeedshop cbtf packages. depends_on("cbtf@develop", when='@develop', type=('build', 'link', 'run')) diff --git a/var/spack/repos/builtin/packages/openssl/package.py b/var/spack/repos/builtin/packages/openssl/package.py index 450990c179a..fe650f71337 100644 --- a/var/spack/repos/builtin/packages/openssl/package.py +++ b/var/spack/repos/builtin/packages/openssl/package.py @@ -25,6 +25,8 @@ class Openssl(Package): # Uses Fake Autotools, should subclass Package # The latest stable version is the 1.1.1 series. This is also our Long Term # Support (LTS) version, supported until 11th September 2023. + version('1.1.1f', sha256='186c6bfe6ecfba7a5b48c47f8a1673d0f3b0e5ba2e25602dd23b629975da3f35') + version('1.1.1e', sha256='694f61ac11cb51c9bf73f54e771ff6022b0327a43bbdfa1b2f19de1662a6dcbe') version('1.1.1d', sha256='1e3a91bc1f9dfce01af26026f856e064eab4c8ee0a8f457b5ae30b40b8b711f2') version('1.1.1c', sha256='f6fb3079ad15076154eda9413fed42877d668e7069d9b87396d0804fdb3f4c90') version('1.1.1b', sha256='5c557b023230413dfb0756f3137a13e6d726838ccd1430888ad15bfb2b43ea4b') @@ -45,6 +47,7 @@ class Openssl(Package): # Uses Fake Autotools, should subclass Package # Our previous LTS version (1.0.2 series) will continue to be supported # until 31st December 2019 (security fixes only during the last year of # support). + version('1.0.2u', sha256='ecd0c6ffb493dd06707d38b14bb4d8c2288bb7033735606569d8f90f89669d16') version('1.0.2t', sha256='14cb464efe7ac6b54799b34456bd69558a749a4931ecfd9cf9f71d7881cac7bc') version('1.0.2s', sha256='cabd5c9492825ce5bd23f3c3aeed6a97f8142f606d893df216411f07d1abab96') version('1.0.2r', sha256='ae51d08bba8a83958e894946f15303ff894d75c2b8bbd44a852b64e3fe11d0d6') diff --git a/var/spack/repos/builtin/packages/otf2/package.py b/var/spack/repos/builtin/packages/otf2/package.py index a44ebc44512..a123f0e8871 100644 --- a/var/spack/repos/builtin/packages/otf2/package.py +++ b/var/spack/repos/builtin/packages/otf2/package.py @@ -32,5 +32,6 @@ def configure_args(self): 'F77={0}'.format(spack_f77), 'FC={0}'.format(spack_fc), 'CFLAGS={0}'.format(self.compiler.pic_flag), - 'CXXFLAGS={0}'.format(self.compiler.pic_flag) + 'CXXFLAGS={0}'.format(self.compiler.pic_flag), + 'PYTHON_FOR_GENERATOR=:' ] diff --git a/var/spack/repos/builtin/packages/papi/package.py b/var/spack/repos/builtin/packages/papi/package.py index c6dc408eb69..7ec1217640a 100644 --- a/var/spack/repos/builtin/packages/papi/package.py +++ b/var/spack/repos/builtin/packages/papi/package.py @@ -23,6 +23,8 @@ class Papi(Package): maintainers = ['G-Ragghianti'] url = "http://icl.cs.utk.edu/projects/papi/downloads/papi-5.4.1.tar.gz" + version('6.0.0.1', sha256='3cd7ed50c65b0d21d66e46d0ba34cd171178af4bbf9d94e693915c1aca1e287f') + version('6.0.0', sha256='3442709dae3405c2845b304c06a8b15395ecf4f3899a89ceb4d715103cb4055f') version('5.7.0', sha256='d1a3bb848e292c805bc9f29e09c27870e2ff4cda6c2fba3b7da8b4bba6547589') version('5.6.0', sha256='49b7293f9ca2d74d6d80bd06b5c4be303663123267b4ac0884cbcae4c914dc47') version('5.5.1', sha256='49dc2c2323f6164c4a7e81b799ed690ee73158671205e71501f849391dd2c2d4') @@ -36,6 +38,7 @@ class Papi(Package): variant('powercap', default=False, description='Enable powercap interface support') variant('rapl', default=False, description='Enable RAPL support') variant('lmsensors', default=False, description='Enable lm_sensors support') + variant('sde', default=False, description='Enable software defined events') depends_on('lm-sensors', when='+lmsensors') @@ -43,15 +46,24 @@ class Papi(Package): # https://bitbucket.org/icl/papi/issues/46/cannot-compile-on-arch-linux patch('https://bitbucket.org/icl/papi/commits/53de184a162b8a7edff48fed01a15980664e15b1/raw', sha256='64c57b3ad4026255238cc495df6abfacc41de391a0af497c27d0ac819444a1f8', when='@5.4.0:5.6.99%gcc@8:') + def setup_build_environment(self, env): + if '+lmsensors' in self.spec and self.version >= Version('6'): + env.set('PAPI_LMSENSORS_ROOT', self.spec['lm-sensors'].prefix) + + def setup_run_environment(self, env): + if '+lmsensors' in self.spec and self.version >= Version('6'): + env.set('PAPI_LMSENSORS_ROOT', self.spec['lm-sensors'].prefix) + def install(self, spec, prefix): if '+lmsensors' in spec: - with working_dir("src/components/lmsensors"): - configure_args = [ - "--with-sensors_incdir=%s/sensors" % - spec['lm-sensors'].headers.directories[0], - "--with-sensors_libdir=%s" % - spec['lm-sensors'].libs.directories[0]] - configure(*configure_args) + if self.version < Version('6'): + with working_dir("src/components/lmsensors"): + configure_args = [ + "--with-sensors_incdir=%s/sensors" % + spec['lm-sensors'].headers.directories[0], + "--with-sensors_libdir=%s" % + spec['lm-sensors'].libs.directories[0]] + configure(*configure_args) with working_dir("src"): configure_args = ["--prefix=%s" % prefix] diff --git a/var/spack/repos/builtin/packages/parallel-netcdf/nag_libtool.patch b/var/spack/repos/builtin/packages/parallel-netcdf/nag_libtool.patch new file mode 100644 index 00000000000..f391c1dc5ef --- /dev/null +++ b/var/spack/repos/builtin/packages/parallel-netcdf/nag_libtool.patch @@ -0,0 +1,69 @@ +--- a/configure ++++ b/configure +@@ -11745,6 +11745,8 @@ _LT_EOF + whole_archive_flag_spec= + tmp_sharedflag='--shared' ;; + nagfor*) # NAGFOR 5.3 ++ whole_archive_flag_spec='$wl--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` $wl--no-whole-archive' ++ compiler_needs_object=yes + tmp_sharedflag='-Wl,-shared' ;; + xl[cC]* | bgxl[cC]* | mpixl[cC]*) # IBM XL C 8.0 on PPC (deal with xlf below) + tmp_sharedflag='-qmkshrobj' +@@ -21030,6 +21032,8 @@ _LT_EOF + whole_archive_flag_spec_F77= + tmp_sharedflag='--shared' ;; + nagfor*) # NAGFOR 5.3 ++ whole_archive_flag_spec_F77='$wl--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` $wl--no-whole-archive' ++ compiler_needs_object_F77=yes + tmp_sharedflag='-Wl,-shared' ;; + xl[cC]* | bgxl[cC]* | mpixl[cC]*) # IBM XL C 8.0 on PPC (deal with xlf below) + tmp_sharedflag='-qmkshrobj' +@@ -24765,6 +24769,8 @@ _LT_EOF + whole_archive_flag_spec_FC= + tmp_sharedflag='--shared' ;; + nagfor*) # NAGFOR 5.3 ++ whole_archive_flag_spec_FC='$wl--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` $wl--no-whole-archive' ++ compiler_needs_object_FC=yes + tmp_sharedflag='-Wl,-shared' ;; + xl[cC]* | bgxl[cC]* | mpixl[cC]*) # IBM XL C 8.0 on PPC (deal with xlf below) + tmp_sharedflag='-qmkshrobj' +--- a/scripts/ltmain.sh ++++ b/scripts/ltmain.sh +@@ -7862,6 +7862,13 @@ func_mode_link () + # Convert "-framework foo" to "foo.ltframework" + if test -n "$inherited_linker_flags"; then + tmp_inherited_linker_flags=`$ECHO "$inherited_linker_flags" | $SED 's/-framework \([^ $]*\)/\1.ltframework/g'` ++ ++ # Additionally convert " -pthread" to " -Wl,-pthread" for nagfor ++ func_cc_basename $CC ++ case $func_cc_basename_result in ++ nagfor*) tmp_inherited_linker_flags=`$ECHO "$tmp_inherited_linker_flags" | $SED 's/ -pthread/ -Wl,-pthread/g'` ;; ++ esac ++ + for tmp_inherited_linker_flag in $tmp_inherited_linker_flags; do + case " $new_inherited_linker_flags " in + *" $tmp_inherited_linker_flag "*) ;; +@@ -8881,7 +8888,8 @@ func_mode_link () + xlcverstring="$wl-compatibility_version $wl$minor_current $wl-current_version $wl$minor_current.$revision" + verstring="-compatibility_version $minor_current -current_version $minor_current.$revision" + # On Darwin other compilers +- case $CC in ++ func_cc_basename $CC ++ case $func_cc_basename_result in + nagfor*) + verstring="$wl-compatibility_version $wl$minor_current $wl-current_version $wl$minor_current.$revision" + ;; +@@ -9493,6 +9501,13 @@ EOF + ;; + esac + ++ # Time to revert the changes made for nagfor. ++ func_cc_basename $CC ++ case $func_cc_basename_result in ++ nagfor*) ++ new_inherited_linker_flags=`$ECHO " $new_inherited_linker_flags" | $SED 's% -Wl,-pthread% -pthread%g'` ;; ++ esac ++ + # move library search paths that coincide with paths to not yet + # installed libraries to the beginning of the library search list + new_libs= diff --git a/var/spack/repos/builtin/packages/parallel-netcdf/package.py b/var/spack/repos/builtin/packages/parallel-netcdf/package.py index 34a1af59845..384717d05ef 100644 --- a/var/spack/repos/builtin/packages/parallel-netcdf/package.py +++ b/var/spack/repos/builtin/packages/parallel-netcdf/package.py @@ -25,7 +25,6 @@ def url_for_version(self, version): return url.format(version.dotted) - version('develop', branch='develop') version('master', branch='master') version('1.12.1', sha256='56f5afaa0ddc256791c405719b6436a83b92dcd5be37fe860dea103aee8250a2') version('1.11.2', sha256='d2c18601b364c35b5acb0a0b46cd6e14cae456e0eb854e5c789cf65f3cd6a2a7') @@ -42,41 +41,74 @@ def url_for_version(self, version): variant('fortran', default=True, description='Build the Fortran Interface') variant('pic', default=True, description='Produce position-independent code (for shared libs)') + variant('shared', default=True, description='Enable shared library') depends_on('mpi') depends_on('m4', type='build') + depends_on('autoconf', when='@master', type='build') + depends_on('automake', when='@master', type='build') + depends_on('libtool', when='@master', type='build') + + conflicts('+shared', when='@:1.9%nag+fortran') + conflicts('+shared', when='@:1.8') + + patch('nag_libtool.patch', when='@1.9:1.12.1%nag') + + @property + def libs(self): + libraries = ['libpnetcdf'] + + query_parameters = self.spec.last_query.extra_parameters + + if 'shared' in query_parameters: + shared = True + elif 'static' in query_parameters: + shared = False + else: + shared = '+shared' in self.spec + + libs = find_libraries( + libraries, root=self.prefix, shared=shared, recursive=True + ) + + if libs: + return libs + + msg = 'Unable to recursively locate {0} {1} libraries in {2}' + raise spack.error.NoLibrariesError( + msg.format('shared' if shared else 'static', + self.spec.name, + self.spec.prefix)) + + @when('@master') + def autoreconf(self, spec, prefix): + with working_dir(self.configure_directory): + # We do not specify '-f' because we need to use libtool files from + # the repository. + autoreconf('-iv') - # See: - # https://trac.mcs.anl.gov/projects/parallel-netcdf/browser/trunk/INSTALL def configure_args(self): - spec = self.spec + args = ['--with-mpi=%s' % self.spec['mpi'].prefix, + 'SEQ_CC=%s' % spack_cc] - args = ['--with-mpi={0}'.format(spec['mpi'].prefix)] - args.append('MPICC={0}'.format(spec['mpi'].mpicc)) - args.append('MPICXX={0}'.format(spec['mpi'].mpicxx)) - args.append('MPIF77={0}'.format(spec['mpi'].mpifc)) - args.append('MPIF90={0}'.format(spec['mpi'].mpifc)) - args.append('SEQ_CC={0}'.format(spack_cc)) + args += self.enable_or_disable('cxx') + args += self.enable_or_disable('fortran') - if '+pic' in spec: - args.extend([ - 'CFLAGS={0}'.format(self.compiler.pic_flag), - 'CXXFLAGS={0}'.format(self.compiler.pic_flag), - 'FFLAGS={0}'.format(self.compiler.pic_flag) - ]) + if '+pic' in self.spec: + args.extend(['{0}FLAGS={1}'.format(lang, self.compiler.pic_flag) + for lang in ['C', 'CXX', 'F', 'FC']]) - if '~cxx' in spec: - args.append('--disable-cxx') - - if '~fortran' in spec: - args.append('--disable-fortran') - - if spec.satisfies('@1.8.0:'): + if self.version >= Version('1.8'): args.append('--enable-relax-coord-bound') - return args + if self.version >= Version('1.9'): + args += self.enable_or_disable('shared') + args.extend(['--enable-static', + '--disable-silent-rules']) - def install(self, spec, prefix): - # Installation fails in parallel - make('install', parallel=False) + if self.spec.satisfies('%nag+fortran+shared'): + args.extend(['ac_cv_prog_fc_v=-Wl,-v', + 'ac_cv_prog_f77_v=-Wl,-v']) + + return args diff --git a/var/spack/repos/builtin/packages/paraview/package.py b/var/spack/repos/builtin/packages/paraview/package.py index 111c8df7a49..48dff877e0a 100644 --- a/var/spack/repos/builtin/packages/paraview/package.py +++ b/var/spack/repos/builtin/packages/paraview/package.py @@ -20,6 +20,7 @@ class Paraview(CMakePackage, CudaPackage): maintainers = ['chuckatkins', 'danlipsa'] version('develop', branch='master', submodules=True) + version('5.8.0', sha256='219e4107abf40317ce054408e9c3b22fb935d464238c1c00c0161f1c8697a3f9') version('5.7.0', sha256='e41e597e1be462974a03031380d9e5ba9a7efcdb22e4ca2f3fec50361f310874') version('5.6.2', sha256='1f3710b77c58a46891808dbe23dc59a1259d9c6b7bb123aaaeaa6ddf2be882ea') version('5.6.0', sha256='cb8c4d752ad9805c74b4a08f8ae6e83402c3f11e38b274dba171b99bb6ac2460') diff --git a/var/spack/repos/builtin/packages/perfstubs/package.py b/var/spack/repos/builtin/packages/perfstubs/package.py new file mode 100644 index 00000000000..9162e749fce --- /dev/null +++ b/var/spack/repos/builtin/packages/perfstubs/package.py @@ -0,0 +1,34 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Perfstubs(CMakePackage): + """Profiling API for adding tool instrumentation support to any project. + + This was motivated by the need to quickly add instrumentation to the + [ADIOS2](https://github.com/ornladios/ADIOS2) library without adding a build + dependency, or tying to a specific measurement tool. + + The initial prototype implementation was tied to TAU, but evolved to this more + generic version, which was extracted as a separate repository for testing and + demonstration purposes. + """ + + homepage = "https://github.com/khuck/perfstubs" + git = "https://github.com/khuck/perfstubs.git" + + version('master', branch='master') + variant('static', default=False, description='Build static executable support') + + def cmake_args(self): + spec = self.spec + + args = [ + '-DPERFSTUBS_USE_STATIC:BOOL={0}'.format( + 'ON' if '+static' in spec else 'OFF') + ] + return args diff --git a/var/spack/repos/builtin/packages/petsc/package.py b/var/spack/repos/builtin/packages/petsc/package.py index 88332caec98..6c5ab4a5641 100644 --- a/var/spack/repos/builtin/packages/petsc/package.py +++ b/var/spack/repos/builtin/packages/petsc/package.py @@ -16,14 +16,16 @@ class Petsc(Package): """ homepage = "http://www.mcs.anl.gov/petsc/index.html" - url = "http://ftp.mcs.anl.gov/pub/petsc/release-snapshots/petsc-3.5.3.tar.gz" - git = "https://gitlab.com/petsc/petsc.git" - + url = "http://ftp.mcs.anl.gov/pub/petsc/release-snapshots/petsc-lite-3.13.0.tar.gz" + git = "https://gitlab.com/petsc/petsc.git" maintainers = ['balay', 'barrysmith', 'jedbrown'] version('develop', branch='master') version('xsdk-0.2.0', tag='xsdk-0.2.0') + version('3.13.0', sha256='f0ea543a54145c5d1387e25b121c3fd1b1ca834032c5a33f6f1d929e95bdf0e5') + version('3.12.5', sha256='d676eb67e79314d6cca6422d7c477d2b192c830b89d5edc6b46934f7453bcfc0') + version('3.12.4', sha256='56a941130da93bbacb3cfa74dcacea1e3cd8e36a0341f9ced09977b1457084c3') version('3.12.3', sha256='91f77d7b0f54056f085b9e27938922db3d9bb1734a2e2a6d26f43d3e6c0cf631') version('3.12.2', sha256='d874b2e198c4cb73551c2eca1d2c5d27da710be4d00517adb8f9eb3d6d0375e8') version('3.12.1', sha256='b72d895d0f4a79acb13ebc782b47b26d10d4e5706d399f533afcd5b3dba13737') @@ -99,6 +101,8 @@ class Petsc(Package): description='Activate X support') variant('batch', default=False, description='Enable when mpiexec is not available to run binaries') + variant('valgrind', default=False, + description='Enable Valgrind Client Request mechanism') # 3.8.0 has a build issue with MKL - so list this conflict explicitly conflicts('^intel-mkl', when='@3.8.0') @@ -143,9 +147,11 @@ class Petsc(Package): depends_on('metis@5:~int64', when='@3.8:+metis~int64') depends_on('metis@5:+int64', when='@3.8:+metis+int64') - depends_on('hdf5+mpi+hl+fortran', when='+hdf5+mpi') + depends_on('hdf5@:1.10.99+mpi+hl+fortran', when='@:3.12.99+hdf5+mpi') + depends_on('hdf5+mpi+hl+fortran', when='@3.13:+hdf5+mpi') depends_on('zlib', when='+hdf5') depends_on('parmetis', when='+metis+mpi') + depends_on('valgrind', when='+valgrind') # Hypre does not support complex numbers. # Also PETSc prefer to build it without internal superlu, likely due to # conflict in headers see @@ -160,14 +166,16 @@ class Petsc(Package): depends_on('hypre@develop~internal-superlu~int64', when='@develop+hypre+mpi~complex~int64') depends_on('superlu-dist@:4.3~int64', when='@3.4.4:3.6.4+superlu-dist+mpi~int64') depends_on('superlu-dist@:4.3+int64', when='@3.4.4:3.6.4+superlu-dist+mpi+int64') - depends_on('superlu-dist@5.0.0:~int64', when='@3.7:3.7.99+superlu-dist+mpi~int64') - depends_on('superlu-dist@5.0.0:+int64', when='@3.7:3.7.99+superlu-dist+mpi+int64') + depends_on('superlu-dist@5.0.0:5.1.3~int64', when='@3.7:3.7.99+superlu-dist+mpi~int64') + depends_on('superlu-dist@5.0.0:5.1.3+int64', when='@3.7:3.7.99+superlu-dist+mpi+int64') depends_on('superlu-dist@5.2:5.2.99~int64', when='@3.8:3.9.99+superlu-dist+mpi~int64') depends_on('superlu-dist@5.2:5.2.99+int64', when='@3.8:3.9.99+superlu-dist+mpi+int64') depends_on('superlu-dist@5.4:5.4.99~int64', when='@3.10:3.10.2+superlu-dist+mpi~int64') depends_on('superlu-dist@5.4:5.4.99+int64', when='@3.10:3.10.2+superlu-dist+mpi+int64') depends_on('superlu-dist@6.1:6.1.99~int64', when='@3.10.3:3.12.99+superlu-dist+mpi~int64') depends_on('superlu-dist@6.1:6.1.99+int64', when='@3.10.3:3.12.99+superlu-dist+mpi+int64') + depends_on('superlu-dist@6.1:6.3.99~int64', when='@3.13.0:3.13.99+superlu-dist+mpi~int64') + depends_on('superlu-dist@6.1:6.3.99+int64', when='@3.13.0:3.13.99+superlu-dist+mpi+int64') depends_on('superlu-dist@xsdk-0.2.0~int64', when='@xsdk-0.2.0+superlu-dist+mpi~int64') depends_on('superlu-dist@xsdk-0.2.0+int64', when='@xsdk-0.2.0+superlu-dist+mpi+int64') depends_on('superlu-dist@develop~int64', when='@develop+superlu-dist+mpi~int64') @@ -181,6 +189,13 @@ class Petsc(Package): depends_on('suite-sparse', when='+suite-sparse') depends_on('libx11', when='+X') + def url_for_version(self, version): + if version >= Version('3.13.0'): + # petsc-lite tarballs are smaller by skipping docs + return "http://ftp.mcs.anl.gov/pub/petsc/release-snapshots/petsc-lite-{0}.tar.gz".format(version) + else: + return "http://ftp.mcs.anl.gov/pub/petsc/release-snapshots/petsc-{0}.tar.gz".format(version) + def mpi_dependent_options(self): if '~mpi' in self.spec: compiler_opts = [ @@ -280,7 +295,7 @@ def install(self, spec, prefix): # Activates library support if needed for library in ('metis', 'hdf5', 'hypre', 'parmetis', - 'mumps', 'trilinos', 'fftw'): + 'mumps', 'trilinos', 'fftw', 'valgrind'): options.append( '--with-{library}={value}'.format( library=library, value=('1' if library in spec else '0')) diff --git a/var/spack/repos/builtin/packages/pfunit/package.py b/var/spack/repos/builtin/packages/pfunit/package.py index 30704ac62ed..ab6bc53d2c5 100644 --- a/var/spack/repos/builtin/packages/pfunit/package.py +++ b/var/spack/repos/builtin/packages/pfunit/package.py @@ -30,10 +30,14 @@ class Pfunit(CMakePackage): variant('shared', default=True, description='Build shared library in addition to static') variant('mpi', default=False, description='Enable MPI') - variant('use_comm_world', default=False, description='Enable MPI_COMM_WORLD for testing') + variant('use_comm_world', default=False, + description='Enable MPI_COMM_WORLD for testing') variant('openmp', default=False, description='Enable OpenMP') variant('docs', default=False, description='Build docs') + variant('max_array_rank', values=int, default=5, + description='Max number of Fortran dimensions of array asserts') + depends_on('python@2.7:', type=('build', 'run')) # python3 too! depends_on('mpi', when='+mpi') @@ -53,7 +57,8 @@ def cmake_args(self): '-DBUILD_SHARED=%s' % ('YES' if '+shared' in spec else 'NO'), '-DCMAKE_Fortran_MODULE_DIRECTORY=%s' % spec.prefix.include, '-DBUILD_DOCS=%s' % ('YES' if '+docs' in spec else 'NO'), - '-DOPENMP=%s' % ('YES' if '+openmp' in spec else 'NO')] + '-DOPENMP=%s' % ('YES' if '+openmp' in spec else 'NO'), + '-DMAX_RANK=%s' % spec.variants['max_array_rank'].value] if spec.satisfies('+mpi'): args.extend(['-DMPI=YES', '-DMPI_USE_MPIEXEC=YES', diff --git a/var/spack/repos/builtin/packages/pgi/package.py b/var/spack/repos/builtin/packages/pgi/package.py index 5be94303355..aa6e60ddcc4 100644 --- a/var/spack/repos/builtin/packages/pgi/package.py +++ b/var/spack/repos/builtin/packages/pgi/package.py @@ -91,6 +91,9 @@ def install(self, spec, prefix): def setup_run_environment(self, env): prefix = Prefix(join_path(self.prefix, 'linux86-64', self.version)) + env.prepend_path('PATH', prefix.bin) + env.prepend_path('MANPATH', prefix.man) + env.prepend_path('LD_LIBRARY_PATH', prefix.lib) env.set('CC', join_path(prefix.bin, 'pgcc')) env.set('CXX', join_path(prefix.bin, 'pgc++')) env.set('F77', join_path(prefix.bin, 'pgfortran')) diff --git a/var/spack/repos/builtin/packages/php/package.py b/var/spack/repos/builtin/packages/php/package.py new file mode 100644 index 00000000000..714bfb0e8f6 --- /dev/null +++ b/var/spack/repos/builtin/packages/php/package.py @@ -0,0 +1,41 @@ +# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Php(AutotoolsPackage): + """ + PHP is a popular general-purpose scripting language that is especially + suited to web development. Fast, flexible and pragmatic, PHP powers + everything from your blog to the most popular websites in the world. + """ + + homepage = "https://php.net/" + url = "https://github.com/php/php-src/archive/php-7.3.13.tar.gz" + + version('7.4.1', sha256='4d9d7c5681bec3af38a935d033657dce09a9913498f8022d7ca163a7f2f493a7') + version('7.4.0', sha256='91d34b48025ab9789216df89e247b6904912eeeaeff38c300ef314bdda8920b0') + version('7.3.13', sha256='e68b8d9e659f2993eee912f05860e546fdc18e459f31cd2771f404df21285f0b') + version('7.3.12', sha256='d0672ea84c0ab184f636acff3230d376d89a2067d59a87a2f1842361ee1f97d6') + version('7.3.11', sha256='4d861b2f3bc640ded8b591ce87250161392a6244a3c84042da0c06fd8c500eb2') + version('7.2.26', sha256='da132a836cec8021c00f22952e6044d91628ee3d2ef92a95d65cf91bad810600') + version('7.2.25', sha256='049b2d291c45cb889d15fcd2bac6da7d15ca5d535d272d2f8879fb834bbf276e') + version('7.2.24', sha256='334c9915733f6a29e1462f64038b1b4b1b21cb18f4f5f980add86792b5550ab3') + version('7.1.33', sha256='f80a795a09328a9441bae4a8a60fa0d6d43ec5adc98f5aa5f51d06f4522c07fe') + + depends_on('autoconf', type='build') + depends_on('automake', type='build') + depends_on('libtool', type='build') + depends_on('m4', type='build') + depends_on('pkgconfig', type='build') + depends_on('bison', type='build') + depends_on('re2c', type='build') + depends_on('libxml2') + depends_on('sqlite') + + def autoreconf(self, spec, prefix): + bash = which('bash') + bash('./buildconf', '--force') diff --git a/var/spack/repos/builtin/packages/plumed/package.py b/var/spack/repos/builtin/packages/plumed/package.py index c8b1f8e9b96..b51d3c12c2d 100644 --- a/var/spack/repos/builtin/packages/plumed/package.py +++ b/var/spack/repos/builtin/packages/plumed/package.py @@ -25,8 +25,9 @@ class Plumed(AutotoolsPackage): git = 'https://github.com/plumed/plumed2.git' version('master', branch='master') - version('2.6b', sha256='3ecda9d46967c8ddd08e820aed974794d926cffb78b262f9d42cdbece3b15677') - version('2.5.3', preferred=True, sha256='543288be667dc4201fc461ecd2dd4878ddfbeac682d0c021c99ea8e501c7c9dc') + version('2.6.0', sha256='3d57ae460607a49547ef38a52c4ac93493a3966857c352280a9c05f5dcdb1820') + version('2.5.4', preferred=True, sha256='a1647e598191f261e75d06351e607475d395af481315052a4c28563ac9989a7f') + version('2.5.3', sha256='543288be667dc4201fc461ecd2dd4878ddfbeac682d0c021c99ea8e501c7c9dc') version('2.5.2', sha256='85d10cc46e2e37c7719cf51c0931278f56c2c8f8a9d86188b2bf97c2535a2ab4') version('2.5.1', sha256='de309980dcfd6f6e0e70e138856f4bd9eb4d8a513906a5e6389f18a5af7f2eba') version('2.5.0', sha256='53e08187ec9f8af2326fa84407e34644a7c51d2af93034309fb70675eee5e4f7') @@ -68,6 +69,7 @@ class Plumed(AutotoolsPackage): depends_on('autoconf', type='build') depends_on('automake', type='build') depends_on('libtool', type='build') + depends_on('py-cython', type='build', when='@2.5:') force_autoreconf = True diff --git a/var/spack/repos/builtin/packages/pmix/package.py b/var/spack/repos/builtin/packages/pmix/package.py index 0ebdce79872..2cb5126b535 100644 --- a/var/spack/repos/builtin/packages/pmix/package.py +++ b/var/spack/repos/builtin/packages/pmix/package.py @@ -39,6 +39,7 @@ class Pmix(AutotoolsPackage): version('3.0.2', sha256='df68f35a3ed9517eeade80b13855cebad8fde2772b36a3f6be87559b6d430670') version('3.0.1', sha256='b81055d2c0d61ef5a451b63debc39c820bcd530490e2e4dcb4cdbacb618c157c') version('3.0.0', sha256='ee8f68107c24b706237a53333d832445315ae37de6773c5413d7fda415a6e2ee') + version('2.2.3', sha256='6fa5d45eb089e29101190c645e986342a24a03a4ea3a936db0b120aafa45b1f0') version('2.2.2', sha256='cd951dbda623fadc5b32ae149d8cc41f9462eac4d718d089340911b1a7c20714') version('2.1.4', sha256='eb72d292e76e200f02cf162a477eecea2559ef3ac2edf50ee95b3fe3983d033e') version('2.1.3', sha256='281283133498e7e5999ed5c6557542c22408bc9eb51ecbcf7696160616782a41') @@ -49,6 +50,10 @@ class Pmix(AutotoolsPackage): depends_on('libevent@2.0.20:2.0.22,2.1.8') depends_on('hwloc@1.11.0:1.11.99,2.0.1:', when='@3.0.0:') + variant('pmi_backwards_compatibility', + default=True, + description="Toggle pmi backwards compatibility") + def configure_args(self): spec = self.spec @@ -57,6 +62,11 @@ def configure_args(self): '--enable-static' ] + if '+pmi_backwards_compatibility' in self.spec: + config_args.append('--enable-pmi-backward-compatibility') + else: + config_args.append('--disable-pmi-backward-compatibility') + # libevent support config_args.append( '--with-libevent={0}'.format(spec['libevent'].prefix)) diff --git a/var/spack/repos/builtin/packages/pocl/package.py b/var/spack/repos/builtin/packages/pocl/package.py index f9b343271a7..42a862d97b6 100644 --- a/var/spack/repos/builtin/packages/pocl/package.py +++ b/var/spack/repos/builtin/packages/pocl/package.py @@ -20,6 +20,9 @@ class Pocl(CMakePackage): git = "https://github.com/pocl/pocl.git" version("master", branch="master") + version('1.4', sha256='ec237faa83bb1c803fbdf7c6e83d8a2ad68b6f0ed1879c3aa16c0e1dcc478742') + version('1.3', sha256='6527e3f47fab7c21e96bc757c4ae3303901f35e23f64642d6da5cc4c4fcc915a') + version('1.2', sha256='0c43e68f336892f3a64cba19beb99d9212f529bedb77f7879c0331450b982d46') version('1.1', sha256='1e8dd0693a88c84937754df947b202871a40545b1b0a97ebefa370b0281c3c53') version('1.0', sha256='94bd86a2f9847c03e6c3bf8dca12af3734f8b272ffeacbc3fa8fcca58844b1d4') version('0.14', sha256='2127bf925a91fbbe3daf2f1bac0da5c8aceb16e2a9434977a3057eade974106a') @@ -48,6 +51,9 @@ class Pocl(CMakePackage): # (see #1616) # These are the supported LLVM versions depends_on("llvm +clang @6.0:7.0", when="@master") + depends_on("llvm +clang @6.0:9.0", when="@1.4") + depends_on("llvm +clang @5.0:8.0", when="@1.3") + depends_on("llvm +clang @5.0:7.0", when="@1.2") depends_on("llvm +clang @5.0:6.0", when="@1.1") depends_on("llvm +clang @4.0:5.0", when="@1.0") depends_on("llvm +clang @3.7:4.0", when="@0.14") diff --git a/var/spack/repos/builtin/packages/poppler/package.py b/var/spack/repos/builtin/packages/poppler/package.py index 8689f817e51..affb9e5359a 100644 --- a/var/spack/repos/builtin/packages/poppler/package.py +++ b/var/spack/repos/builtin/packages/poppler/package.py @@ -49,7 +49,7 @@ class Poppler(CMakePackage): depends_on('qt@4.0:', when='+qt') depends_on('zlib', when='+zlib') depends_on('cairo@1.10.0:', when='+glib') - depends_on('libiconv', when='+iconv') + depends_on('iconv', when='+iconv') depends_on('jpeg', when='+jpeg') depends_on('libpng', when='+png') depends_on('libtiff', when='+tiff') diff --git a/var/spack/repos/builtin/packages/ppopen-appl-bem-at/duplicate_defs.patch b/var/spack/repos/builtin/packages/ppopen-appl-bem-at/duplicate_defs.patch new file mode 100755 index 00000000000..d21d9364e58 --- /dev/null +++ b/var/spack/repos/builtin/packages/ppopen-appl-bem-at/duplicate_defs.patch @@ -0,0 +1,13 @@ +diff --git a/framework_with_templates/OAT_bem-bb-fw-dense-0.1.0.f90 b/framework_with_templates/OAT_bem-bb-fw-dense-0.1.0.f90 +index edabbed..16c6a52 100644 +--- a/framework_with_templates/OAT_bem-bb-fw-dense-0.1.0.f90 ++++ b/framework_with_templates/OAT_bem-bb-fw-dense-0.1.0.f90 +@@ -1201,8 +1201,6 @@ contains + integer i, j + + include "OAT.h" +- integer iusw1_ppohBEMresidual_direct +- integer iusw1_ppohBEMmatvec_direct + + character*100 ctmp + diff --git a/var/spack/repos/builtin/packages/ppopen-appl-bem-at/package.py b/var/spack/repos/builtin/packages/ppopen-appl-bem-at/package.py new file mode 100644 index 00000000000..cba923eeeac --- /dev/null +++ b/var/spack/repos/builtin/packages/ppopen-appl-bem-at/package.py @@ -0,0 +1,58 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * +import os + + +class PpopenApplBemAt(MakefilePackage): + """ + ppOpen-APPL/BEM-AT is ppOpen-APPL/Bem with auto tuning. + If you want to use ppOpen-APPL/BERM-AT, please copy files in + src/framework_with_template from ppOpen-APPL/BEM install directory. + """ + + homepage = "http://ppopenhpc.cc.u-tokyo.ac.jp/ppopenhpc/" + url = "file://{0}/ppohBEM_AT_0.1.0.tar.gz".format(os.getcwd()) + + version('0.1.0', sha256='215034fea7d9f64e6361d8e605e04c7f5d302c87ce048dcd6d146b14d22c17f9') + # In OAT_bem-bb-fw-dense-0.1.0.f90 the 2 variables are defined. + # But ame variables are already defined in include file DAT.h. + # This patch is deleted the variables definitions + # in OAT_bem-bb-fw-dense-0.1.0.f90. + patch('duplicate_defs.patch', when="@0.1.0") + + depends_on('mpi') + depends_on('ppopen-appl-bem', type='run') + + parallel = False + build_directory = 'framework_with_templates' + build_targets = ['SYSTEM=spack'] + + def edit(self, spec, prefix): + flags = ['-O3', self.compiler.openmp_flag] + fflags = flags[:] + if spec.satisfies('%gcc'): + fflags.append('-ffree-line-length-none') + with open(join_path(self.build_directory, 'Makefile'), 'a') as m: + m.write('ifeq ($(SYSTEM),spack)\n') + m.write(' CC = {0}\n'.format(spec['mpi'].mpicc)) + m.write(' F90 = {0}\n'.format(spec['mpi'].mpifc)) + m.write(' CCFLAGS = {0}\n'.format(' '.join(flags))) + m.write(' F90FLAGS = {0}\n'.format(' '.join(fflags))) + m.write(' FFLAGS = {0}\n'.format(' '.join(fflags))) + m.write(' LDFLAGS = {0}\n'.format(' '.join(flags))) + m.write('endif\n') + + def install(self, spec, prefix): + install_src_dir = join_path(prefix.src, self.build_directory) + mkdir(prefix.bin) + mkdirp(install_src_dir) + for f in find(self.build_directory, '*.out'): + copy(f, prefix.bin) + install_src = join_path(prefix.src, self.build_directory) + install_tree(self.build_directory, install_src_dir) + with working_dir(install_src): + make('clean') diff --git a/var/spack/repos/builtin/packages/ppopen-appl-bem/package.py b/var/spack/repos/builtin/packages/ppopen-appl-bem/package.py new file mode 100644 index 00000000000..7260e1befcd --- /dev/null +++ b/var/spack/repos/builtin/packages/ppopen-appl-bem/package.py @@ -0,0 +1,79 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * +import os + + +class PpopenApplBem(MakefilePackage): + """ppOpen-APPL/BEM is software used to support a boundary element analysis + executed on a parallel computer. + + The current version includes a software framework for a parallel BEM + analysis and an H-matrix library. + If you want to use the framework based on dense matrix computations, + please move to the directory 'src/framework' and + 'src/framework_with_template'. + If you want to use the H-matrix library, please + move to the directly 'src/HACApK_with_BEM-BB-framework_1.0.0'. + """ + + homepage = "http://ppopenhpc.cc.u-tokyo.ac.jp/ppopenhpc/" + url = "file://{0}/ppohBEM_0.5.0.tar.gz".format(os.getcwd()) + + version('0.5.0', sha256='bf5e32902c97674c99353ee35de9c89206659e82b9c3d1f6edc9beffbb7c9d5f') + + depends_on('mpi') + + parallel = False + hacapk_src_dir = join_path( + 'HACApK_1.0.0', + 'src', + 'HACApK_with_BEM-BB-framework_1.0.0' + ) + src_directories = [ + join_path('bem-bb-framework_dense', 'src', 'framework_with_templates'), + join_path('bem-bb-framework_dense', 'src', 'framework'), + hacapk_src_dir + ] + + def edit(self, spec, prefix): + flags = [self.compiler.openmp_flag] + fflags = flags[:] + if spec.satisfies('%gcc'): + fflags.append('-ffree-line-length-none') + filter_file( + 'bem-bb-SCM.out', + 'HACApK-bem-bb-sSCM.out', + join_path(self.hacapk_src_dir, 'Makefile') + ) + for d in self.src_directories: + with working_dir(d): + with open('Makefile', 'a') as m: + m.write('ifeq ($(SYSTEM),spack)\n') + m.write(' CC = {0}\n'.format(spec['mpi'].mpicc)) + m.write(' F90 = {0}\n'.format(spec['mpi'].mpifc)) + m.write(' CCFLAGS = {0}\n'.format(' '.join(flags))) + m.write(' F90FLAGS = {0}\n'.format(' '.join(fflags))) + m.write(' FFLAGS = {0}\n'.format(' '.join(fflags))) + m.write(' LDFLAGS = {0}\n'.format(' '.join(flags))) + m.write('endif\n') + + def build(self, spec, prefix): + for d in self.src_directories: + with working_dir(d): + make('SYSTEM=spack') + + def install(self, spec, prefix): + mkdir(prefix.bin) + mkdir(prefix.src) + for d in self.src_directories: + for f in find(d, '*.out'): + copy(f, prefix.bin) + install_src = join_path(prefix.src, os.path.basename(d)) + mkdir(install_src) + install_tree(d, install_src) + with working_dir(install_src): + make('clean') diff --git a/var/spack/repos/builtin/packages/ppopen-appl-fvm/package.py b/var/spack/repos/builtin/packages/ppopen-appl-fvm/package.py new file mode 100644 index 00000000000..2dd6872cc29 --- /dev/null +++ b/var/spack/repos/builtin/packages/ppopen-appl-fvm/package.py @@ -0,0 +1,63 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack import * +import os + + +class PpopenApplFvm(MakefilePackage): + """ + ppOpen-APPL/FVM ia a coupling library that enables weak + coupling on various simulation models, such as an + atmospheric model and an ocean model, a seismic model + and a structure model. For getting very wide + applicability, ppohMATHMP is designed so as that it is + independent from grid structure. Instead of grid + structure, ppOpen-APPL/FVM requires a data set + called 'mapping table'. Mapping table is composed of + a correspondence table of grid indexes between a send + model and a receive model and interpolation coefficients. + A subroutine for making a mapping table file is provided + by ppohMATHMP API. + + Current version of ppohMATHMP is ver.1.0 which targets + scalar data exchange. An exchange code of vector data + which requires rotation calculation is under + """ + + homepage = "http://ppopenhpc.cc.u-tokyo.ac.jp/ppopenhpc/" + url = "file://{0}/ppohFVM_0.3.0.tar.gz".format(os.getcwd()) + + version('0.3.0', sha256='4e05dd71f4eeda62c9683b7c3069a2537f3c2c7e86ba50a00d4963f41d9cbe29') + + depends_on('mpi') + depends_on('metis@:4') + + def edit(self, spec, prefix): + fflags = ['-O3'] + if spec.satisfies('%gcc'): + fflags.append('-ffree-line-length-none') + makefile_in = FileFilter('Makefile.in') + makefile_in.filter( + r'^PREFIX *=.*$', + 'PREFIX = {0}'.format(prefix) + ) + makefile_in.filter( + r'^METISDIR *=.*$', + 'METISDIR = {0}'.format(spec['metis'].prefix.lib) + ) + makefile_in.filter('mpifrtpx', spec['mpi'].mpifc) + makefile_in.filter('frtpx', spack_fc) + makefile_in.filter('-Kfast', ' '.join(fflags)) + makefile_in.filter( + ',openmp', + ' {0}'.format(self.compiler.openmp_flag) + ) + + def install(self, spec, prefix): + make('install') + install_tree('examples', prefix.examples) + install_tree('doc', prefix.doc) diff --git a/var/spack/repos/builtin/packages/ppopen-at/package.py b/var/spack/repos/builtin/packages/ppopen-at/package.py new file mode 100644 index 00000000000..92a8c302de6 --- /dev/null +++ b/var/spack/repos/builtin/packages/ppopen-at/package.py @@ -0,0 +1,28 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack import * +import os + + +class PpopenAt(MakefilePackage): + """ppOpen-AT is a part of the ppOpenHPC""" + + homepage = "http://ppopenhpc.cc.u-tokyo.ac.jp/ppopenhpc/" + url = "file://{0}/ppohAT_1.0.0.tar.gz".format(os.getcwd()) + + version('1.0.0', sha256='2b5664839762c941e0b2dd7c15416e2dcfd5d909558cf7e4347a79ce535f3887') + + def edit(self, spec, prefix): + makefile_in = FileFilter('Makefile.in') + makefile_in.filter('gcc', spack_cxx) + makefile_in.filter('~/ppohAT_1.0.0', prefix) + makefile_in.filter('mkdir', 'mkdir -p') + + def install(self, spec, prefix): + make('install') + install_tree('examples', prefix.examples) + install_tree('doc', prefix.doc) diff --git a/var/spack/repos/builtin/packages/ppopen-math-mp/package.py b/var/spack/repos/builtin/packages/ppopen-math-mp/package.py new file mode 100644 index 00000000000..8f622644850 --- /dev/null +++ b/var/spack/repos/builtin/packages/ppopen-math-mp/package.py @@ -0,0 +1,51 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack import * +import os + + +class PpopenMathMp(MakefilePackage): + """ + ppOpen-Math/MP ia a coupling library that enables weak coupling on various + simulation models, such as an atmospheric model and an ocean model, + a seismic model and a structure model. For getting very wide applicability, + ppOpen-Math/MP is designed so as that it is independent from grid + structure. Instead of grid structure, PpohMATHMP requires a data set + called 'mapping table'. Mapping table is composed of a correspondence + table of grid indexes between a send model and a receive model and + interpolation coefficients. A subroutine for making a mapping table + file is provided by ppOpen-Math/MP API. + + Current version of ppOpen-Math/MP is ver.1.0 which targets scalar + data exchange. An exchange code of vector data which requires rotation + calculation is under development and will be released the next version. + """ + + homepage = "http://ppopenhpc.cc.u-tokyo.ac.jp/ppopenhpc/" + url = "file://{0}/ppohMATHMP_1.0.0.tar.gz".format(os.getcwd()) + + version('1.0.0', sha256='eb85a181286e4e7d071bd7c106fa547d38cfd16df87753e9d4e38da1a84a8f22') + + depends_on('mpi') + + build_directory = 'src' + build_targets = ['FC_XXX=spack'] + parallel = False + + def edit(self, spec, prefix): + flags = ['-I.'] + if spec.satisfies('%gcc'): + flags.append('-ffree-line-length-none') + with open('src/Makefile', 'a') as makefile: + makefile.write('FC_spack = {0}\n'.format(spec['mpi'].mpifc)) + makefile.write('FFLAGS_spack = {0}\n'.format(' '.join(flags))) + makefile.write('AR_spack = ar cr\n') + + def install(self, spec, prefix): + for d in ['include', 'lib', 'doc', 'test']: + mkdir(join_path(prefix, d)) + copy_tree(d, join_path(prefix, d)) diff --git a/var/spack/repos/builtin/packages/ppopen-math-vis/package.py b/var/spack/repos/builtin/packages/ppopen-math-vis/package.py new file mode 100644 index 00000000000..ad2ff3a7c8f --- /dev/null +++ b/var/spack/repos/builtin/packages/ppopen-math-vis/package.py @@ -0,0 +1,45 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack import * +import os + + +class PpopenMathVis(MakefilePackage): + """ + ppOpen-MATH/VIS is a set of libraries for parallel visualization. + + Capabilities of ppOpen-MATH/VIS (ver.0.2.0) are as follows: + + Using background voxels with adaptive mesh refinement (AMR). + Single UCD file. + Flat MPI parallel programming models. + (OpenMP/MPI hybrid will be supported in the future). + Can be called from programs written in both of Fortran 90 and C. + Only FDM-type structured meshes are supported. + """ + + homepage = "http://ppopenhpc.cc.u-tokyo.ac.jp/ppopenhpc/" + url = "file://{0}/ppohVIS_0.2.0.tar.gz".format(os.getcwd()) + + version('0.2.0', sha256='f816885cb9fab4802f9df55c1f1e7f8505867dc8862562bce26d193d6a0dc29d') + + depends_on('mpi') + + def edit(self, spec, prefix): + makefile_in = FileFilter('Makefile.in') + makefile_in.filter('mpifccpx', spec['mpi'].mpicc) + makefile_in.filter('mpiFCCpx', spec['mpi'].mpicxx) + makefile_in.filter('mpifrtpx', spec['mpi'].mpifc) + makefile_in.filter('-Kfast', '-O3') + makefile_in.filter(r'~/ppOpen-HPC/.*', prefix) + + def install(self, spec, prefix): + make('install') + mkdir(join_path(prefix, 'examples')) + copy_tree('examples', join_path(prefix, 'examples')) + mkdir(join_path(prefix, 'doc')) + copy_tree('doc', join_path(prefix, 'doc')) diff --git a/var/spack/repos/builtin/packages/precice/package.py b/var/spack/repos/builtin/packages/precice/package.py index 40661e43291..986bfa7f146 100644 --- a/var/spack/repos/builtin/packages/precice/package.py +++ b/var/spack/repos/builtin/packages/precice/package.py @@ -19,6 +19,7 @@ class Precice(CMakePackage): maintainers = ['fsimonis', 'MakisH'] version('develop', branch='develop') + version('2.0.2', sha256='72864480f32696e7b6da94fd404ef5cd6586e2e1640613e46b75f1afac8569ed') version('2.0.1', sha256='e4fe2d2063042761ab325f8c802f88ae088c90862af288ad1a642967d074bd50') version('2.0.0', sha256='c8979d366f06e35626a8da08a1c589df77ec13972eb524a1ba99a011e245701f') version('1.6.1', sha256='7d0c54faa2c69e52304f36608d93c408629868f16f3201f663a0f9b2008f0763') diff --git a/var/spack/repos/builtin/packages/procps/package.py b/var/spack/repos/builtin/packages/procps/package.py index f8b4ed26336..e18d6206cf8 100644 --- a/var/spack/repos/builtin/packages/procps/package.py +++ b/var/spack/repos/builtin/packages/procps/package.py @@ -23,7 +23,7 @@ class Procps(AutotoolsPackage): depends_on('m4', type='build') depends_on('pkgconfig@0.9.0:', type='build') depends_on('dejagnu', type='test') - depends_on('libiconv') + depends_on('iconv') depends_on('gettext') depends_on('ncurses') @@ -35,7 +35,7 @@ def autoreconf(self, spec, prefix): def configure_args(self): return [ - '--with-libiconv-prefix={0}'.format(self.spec['libiconv'].prefix), + '--with-libiconv-prefix={0}'.format(self.spec['iconv'].prefix), '--with-libintl-prefix={0}'.format(self.spec['gettext'].prefix), '--with-ncurses', # Required to avoid libintl linking errors diff --git a/var/spack/repos/builtin/packages/py-apptools/package.py b/var/spack/repos/builtin/packages/py-apptools/package.py new file mode 100644 index 00000000000..4ac8da1c791 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-apptools/package.py @@ -0,0 +1,20 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +class PyApptools(PythonPackage): + """The apptools project includes a set of packages that Enthought has found + useful in creating a number of applications. They implement functionality + that is commonly needed by many applications.""" + + homepage = "https://docs.enthought.com/apptools" + url = "https://pypi.io/packages/source/a/apptools/apptools-4.5.0.tar.gz" + + version('4.5.0', sha256='260ae0e2a86cb2df2fede631ab6ac8ece694a58a1def78cd015c890c57140582') + + depends_on('py-setuptools', type='build') + depends_on('py-configobj', type=('build', 'run')) + depends_on('py-six', type=('build', 'run')) + depends_on('py-traitsui', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/py-astpretty/package.py b/var/spack/repos/builtin/packages/py-astpretty/package.py new file mode 100644 index 00000000000..b6b4e9855fa --- /dev/null +++ b/var/spack/repos/builtin/packages/py-astpretty/package.py @@ -0,0 +1,21 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class PyAstpretty(PythonPackage): + """Pretty print the output of python stdlib `ast.parse`.""" + + homepage = "https://github.com/asottile/astpretty" + url = "https://pypi.io/packages/source/a/astpretty/astpretty-2.0.0.tar.gz" + + version('2.0.0', sha256='e4724bfd753636ba4a84384702e9796e5356969f40af2596d846ce64addde086') + + variant('typed', default=False, description='Add support for typed comments') + + depends_on('python@3.6.1:', type=('build', 'run')) + depends_on('py-setuptools', type=('build', 'run')) + depends_on('py-typed-ast', type=('build', 'run'), when='+typed') diff --git a/var/spack/repos/builtin/packages/py-astropy-helpers/package.py b/var/spack/repos/builtin/packages/py-astropy-helpers/package.py new file mode 100644 index 00000000000..636fcb8b305 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-astropy-helpers/package.py @@ -0,0 +1,21 @@ +# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class PyAstropyHelpers(PythonPackage): + """The astropy-helpers package includes many build, + installation, and documentation-related tools used by the + Astropy project, but packaged separately for use by other + projects that wish to leverage this work.""" + + homepage = "https://github.com/astropy/astropy-helpers" + url = "https://github.com/astropy/astropy-helpers/archive/v4.0.1.tar.gz" + + version('4.0.1', sha256='88602971c3b63d6aaa6074d013f995d1e234acb3d517d70d7fcebd30cdaf5c89') + + depends_on('python@3.6:', type=('build', 'run')) + depends_on('py-setuptools@30.3:', type='build') diff --git a/var/spack/repos/builtin/packages/py-attrs/package.py b/var/spack/repos/builtin/packages/py-attrs/package.py index cf9950c1ce2..64c8aa5426e 100644 --- a/var/spack/repos/builtin/packages/py-attrs/package.py +++ b/var/spack/repos/builtin/packages/py-attrs/package.py @@ -14,6 +14,7 @@ class PyAttrs(PythonPackage): import_modules = ['attr'] + version('19.3.0', sha256='f7b7ce16570fe9965acd6d30101a28f62fb4a7f9e926b3bbc9b61f8b04247e72') version('19.2.0', sha256='f913492e1663d3c36f502e5e9ba6cd13cf19d7fab50aa13239e420fef95e1396') version('19.1.0', sha256='f0b870f674851ecbfbbbd364d6b5cbdff9dcedbc7f3f5e18a6891057f21fe399') version('18.1.0', sha256='e0d0eb91441a3b53dab4d9b743eafc1ac44476296a2053b6ca3af0b139faf87b') @@ -26,5 +27,6 @@ class PyAttrs(PythonPackage): depends_on('py-hypothesis', type='test') depends_on('py-pympler', type='test') depends_on('py-pytest', type='test') + depends_on('py-pytest@4.3.0:', type='test', when='@19.3.0:') depends_on('py-six', type='test') depends_on('py-zope-interface', type='test') diff --git a/var/spack/repos/builtin/packages/py-backports-weakref/package.py b/var/spack/repos/builtin/packages/py-backports-weakref/package.py index b3e5ad65bca..ef88d792566 100644 --- a/var/spack/repos/builtin/packages/py-backports-weakref/package.py +++ b/var/spack/repos/builtin/packages/py-backports-weakref/package.py @@ -10,7 +10,7 @@ class PyBackportsWeakref(PythonPackage): """Backports of new features in Python's weakref module""" homepage = "https://github.com/PiDelport/backports.weakref" - url = "https://pypi.org/packages/source/b/backports.weakref/backports.weakref-1.0.post1.tar.gz" + url = "https://pypi.io/packages/source/b/backports.weakref/backports.weakref-1.0.post1.tar.gz" version('1.0.post1', sha256='bc4170a29915f8b22c9e7c4939701859650f2eb84184aee80da329ac0b9825c2', preferred=True) version('1.0rc1', sha256='8813bf712a66b3d8b85dc289e1104ed220f1878cf981e2fe756dfaabe9a82892') diff --git a/var/spack/repos/builtin/packages/py-colorlog/package.py b/var/spack/repos/builtin/packages/py-colorlog/package.py index 7eb42c2976a..05268384f29 100644 --- a/var/spack/repos/builtin/packages/py-colorlog/package.py +++ b/var/spack/repos/builtin/packages/py-colorlog/package.py @@ -10,7 +10,7 @@ class PyColorlog(PythonPackage): """A colored formatter for the python logging module""" homepage = "https://github.com/borntyping/python-colorlog" - url = "https://pypi.org/packages/source/c/colorlog/colorlog-4.0.2.tar.gz" + url = "https://pypi.io/packages/source/c/colorlog/colorlog-4.0.2.tar.gz" version('4.0.2', sha256='3cf31b25cbc8f86ec01fef582ef3b840950dea414084ed19ab922c8b493f9b42') version('3.1.4', sha256='418db638c9577f37f0fae4914074f395847a728158a011be2a193ac491b9779d') diff --git a/var/spack/repos/builtin/packages/py-colorpy/package.py b/var/spack/repos/builtin/packages/py-colorpy/package.py index da11077eaf3..909ad673efc 100644 --- a/var/spack/repos/builtin/packages/py-colorpy/package.py +++ b/var/spack/repos/builtin/packages/py-colorpy/package.py @@ -15,7 +15,7 @@ class PyColorpy(PythonPackage): """ homepage = "http://markkness.net/colorpy/ColorPy.html" - url = "https://pypi.org/packages/source/c/colorpy/colorpy-0.1.1.tar.gz" + url = "https://pypi.io/packages/source/c/colorpy/colorpy-0.1.1.tar.gz" version('0.1.1', sha256='e400a7e879adc83c6098dde13cdd093723f3936778c245b1caf88f5f1411170d') diff --git a/var/spack/repos/builtin/packages/py-d2to1/package.py b/var/spack/repos/builtin/packages/py-d2to1/package.py new file mode 100644 index 00000000000..040d0b8d4b5 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-d2to1/package.py @@ -0,0 +1,20 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class PyD2to1(PythonPackage): + """d2to1 (the 'd' is for 'distutils') allows + using distutils2-like setup.cfg files for a package's + metadata with a distribute/setuptools setup.py script.""" + + homepage = "https://github.com/embray/d2to1" + url = "https://github.com/embray/d2to1/archive/0.2.12.tar.gz" + + version('0.2.12.post1', sha256='80e026ccc604850d8171fd8599b3130d234c0d443e1dc4e2039be0b204cea9b4') + version('0.2.12', sha256='04ab9f3ac255d367ecda1eb59379e5031816740c3a3eda95d0dba9f6bb3b7ca4') + + depends_on('py-setuptools', type='build') diff --git a/var/spack/repos/builtin/packages/py-decorator/package.py b/var/spack/repos/builtin/packages/py-decorator/package.py index 80174932ee1..d174fc8a1f7 100644 --- a/var/spack/repos/builtin/packages/py-decorator/package.py +++ b/var/spack/repos/builtin/packages/py-decorator/package.py @@ -12,8 +12,9 @@ class PyDecorator(PythonPackage): various non-trivial examples.""" homepage = "https://github.com/micheles/decorator" - url = "https://pypi.io/packages/source/d/decorator/decorator-4.4.0.tar.gz" + url = "https://pypi.io/packages/source/d/decorator/decorator-4.4.2.tar.gz" + version('4.4.2', sha256='e3a62f0520172440ca0dcc823749319382e377f37f140a0b99ef45fecb84bfe7') version('4.4.0', sha256='86156361c50488b84a3f148056ea716ca587df2f0de1d34750d35c21312725de') version('4.3.2', sha256='33cd704aea07b4c28b3eb2c97d288a06918275dac0ecebdaf1bc8a48d98adb9e') version('4.3.0', sha256='c39efa13fbdeb4506c476c9b3babf6a718da943dab7811c206005a4a956c080c') diff --git a/var/spack/repos/builtin/packages/py-deprecated/package.py b/var/spack/repos/builtin/packages/py-deprecated/package.py new file mode 100644 index 00000000000..7d1fe4af933 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-deprecated/package.py @@ -0,0 +1,20 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class PyDeprecated(PythonPackage): + """Python @deprecated decorator to deprecate old python classes, + functions or methods.""" + + homepage = "https://github.com/tantale/deprecated" + url = "https://github.com/tantale/deprecated/archive/v1.2.7.tar.gz" + + version("1.2.7", sha256="7db3c814ddcac9d79c5bae8a0e82a5bba55cb8e46f3d611d0d8611c34a72a783") + + depends_on("python@2.7:2.8,3.4:", type=("build", "run")) + depends_on("py-wrapt@1.10:1.99999", type=("build", "run")) + depends_on("py-setuptools", type="build") diff --git a/var/spack/repos/builtin/packages/py-dgl/package.py b/var/spack/repos/builtin/packages/py-dgl/package.py new file mode 100644 index 00000000000..b1b1c37586f --- /dev/null +++ b/var/spack/repos/builtin/packages/py-dgl/package.py @@ -0,0 +1,128 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +class PyDgl(CMakePackage): + """Deep Graph Library (DGL). + + DGL is an easy-to-use, high performance and scalable Python package for + deep learning on graphs. DGL is framework agnostic, meaning if a deep graph + model is a component of an end-to-end application, the rest of the logics + can be implemented in any major frameworks, such as PyTorch, Apache MXNet + or TensorFlow.""" + + homepage = "https://www.dgl.ai/" + git = "https://github.com/dmlc/dgl.git" + + maintainers = ['adamjstewart'] + + version('master', branch='master', submodules=True) + version('0.4.2', tag='0.4.2', submodules=True) + + variant('cuda', default=True, description='Build with CUDA') + variant('openmp', default=True, description='Build with OpenMP') + variant('backend', default='pytorch', description='Default backend', + values=['pytorch', 'mxnet', 'tensorflow'], multi=False) + + depends_on('cmake@3.5:', type='build') + depends_on('cuda', when='+cuda') + depends_on('llvm-openmp', when='%clang platform=darwin +openmp') + + # Python dependencies + extends('python') + depends_on('python@3.5:', type=('build', 'run')) + depends_on('py-setuptools', type='build') + depends_on('py-cython', type='build') + depends_on('py-numpy@1.14.0:', type=('build', 'run')) + depends_on('py-scipy@1.1.0:', type=('build', 'run')) + depends_on('py-networkx@2.1:', type=('build', 'run')) + + # Backends + depends_on('py-torch@0.4.1:', when='backend=pytorch', type='run') + depends_on('mxnet@1.5:', when='backend=mxnet', type='run') + depends_on('py-tensorflow@2.0:', when='backend=tensorflow', type='run') + depends_on('py-tfdlpack', when='backend=tensorflow', type='run') + + build_directory = 'build' + + # https://docs.dgl.ai/install/index.html#install-from-source + def cmake_args(self): + args = [] + + if '+cuda' in self.spec: + args.append('-DUSE_CUDA=ON') + else: + args.append('-DUSE_CUDA=OFF') + + if '+openmp' in self.spec: + args.append('-DUSE_OPENMP=ON') + + if self.spec.satisfies('%clang platform=darwin'): + args.extend([ + '-DOpenMP_CXX_FLAGS=' + + self.spec['llvm-openmp'].headers.include_flags, + '-DOpenMP_CXX_LIB_NAMES=' + + self.spec['llvm-openmp'].libs.names[0], + '-DOpenMP_C_FLAGS=' + + self.spec['llvm-openmp'].headers.include_flags, + '-DOpenMP_C_LIB_NAMES=' + + self.spec['llvm-openmp'].libs.names[0], + '-DOpenMP_omp_LIBRARY=' + + self.spec['llvm-openmp'].libs[0], + ]) + else: + args.append('-DUSE_OPENMP=OFF') + + if self.run_tests: + args.append('-DBUILD_CPP_TEST=ON') + else: + args.append('-DBUILD_CPP_TEST=OFF') + + return args + + def install(self, spec, prefix): + with working_dir('python'): + setup_py('install', '--prefix=' + prefix, + '--single-version-externally-managed', '--root=/') + + # Work around installation bug: https://github.com/dmlc/dgl/issues/1379 + install_tree(prefix.dgl, prefix.lib) + + def setup_run_environment(self, env): + # https://docs.dgl.ai/install/backend.html + backend = self.spec.variants['backend'].value + env.set('DGLBACKEND', backend) + + @property + def import_modules(self): + modules = [ + 'dgl', 'dgl.nn', 'dgl.runtime', 'dgl.backend', 'dgl.function', + 'dgl.contrib', 'dgl._ffi', 'dgl.data', 'dgl.runtime.ir', + 'dgl.backend.numpy', 'dgl.contrib.sampling', 'dgl._ffi._cy2', + 'dgl._ffi._cy3', 'dgl._ffi._ctypes', + ] + + if 'backend=pytorch' in self.spec: + modules.extend([ + 'dgl.nn.pytorch', 'dgl.nn.pytorch.conv', 'dgl.backend.pytorch' + ]) + elif 'backend=mxnet' in self.spec: + modules.extend([ + 'dgl.nn.mxnet', 'dgl.nn.mxnet.conv', 'dgl.backend.mxnet' + ]) + elif 'backend=tensorflow' in self.spec: + modules.extend([ + 'dgl.nn.tensorflow', 'dgl.nn.tensorflow.conv', + 'dgl.backend.tensorflow' + ]) + + return modules + + @run_after('install') + @on_package_attributes(run_tests=True) + def import_module_test(self): + with working_dir('spack-test', create=True): + for module in self.import_modules: + python('-c', 'import {0}'.format(module)) diff --git a/var/spack/repos/builtin/packages/py-dill/package.py b/var/spack/repos/builtin/packages/py-dill/package.py index 485dfd9d668..0afa8d329ce 100644 --- a/var/spack/repos/builtin/packages/py-dill/package.py +++ b/var/spack/repos/builtin/packages/py-dill/package.py @@ -12,6 +12,7 @@ class PyDill(PythonPackage): homepage = "https://github.com/uqfoundation/dill" url = "https://pypi.io/packages/source/d/dill/dill-0.2.7.tar.gz" + version('0.3.1', sha256='d3ddddf2806a7bc9858b20c02dc174396795545e9d62f243b34481fd26eb3e2c') version('0.2.9', sha256='f6d6046f9f9195206063dd0415dff185ad593d6ee8b0e67f12597c0f4df4986f') version('0.2.7', sha256='ddda0107e68e4eb1772a9f434f62a513c080c7171bd0dd6fb65d992788509812') version('0.2.6', sha256='6c1ccca68be483fa8c66e85a89ffc850206c26373aa77a97b83d8d0994e7f1fd') @@ -23,6 +24,7 @@ class PyDill(PythonPackage): version('0.2', sha256='aba8d4c81c4136310e6ce333bd6f4f3ea2d53bd367e2f69c864428f260c0308c') depends_on('python@2.5:2.8,3.1:', type=('build', 'run')) + depends_on('python@2.6:2.8,3.1:', type=('build', 'run'), when='@0.3.0:') depends_on('py-setuptools@0.6:', type='build') diff --git a/var/spack/repos/builtin/packages/py-diskcache/package.py b/var/spack/repos/builtin/packages/py-diskcache/package.py new file mode 100644 index 00000000000..042183cdf32 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-diskcache/package.py @@ -0,0 +1,16 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +class PyDiskcache(PythonPackage): + """Disk Cache -- Disk and file backed persistent cache.""" + + homepage = "http://www.grantjenks.com/docs/diskcache/" + url = "https://pypi.io/packages/source/d/diskcache/diskcache-4.1.0.tar.gz" + + version('4.1.0', sha256='bcee5a59f9c264e2809e58d01be6569a3bbb1e36a1e0fb83f7ef9b2075f95ce0') + + depends_on('py-setuptools', type='build') + depends_on('py-tox', type='test') diff --git a/var/spack/repos/builtin/packages/py-ecdsa/package.py b/var/spack/repos/builtin/packages/py-ecdsa/package.py index 18d6d7eddbf..2401b9bd276 100644 --- a/var/spack/repos/builtin/packages/py-ecdsa/package.py +++ b/var/spack/repos/builtin/packages/py-ecdsa/package.py @@ -10,11 +10,12 @@ class PyEcdsa(PythonPackage): """ECDSA cryptographic signature library (pure python)""" homepage = "https://github.com/warner/python-ecdsa" - url = "https://files.pythonhosted.org/packages/source/e/ecdsa/ecdsa-0.13.2.tar.gz" + url = "https://pypi.io/packages/source/e/ecdsa/ecdsa-0.15.tar.gz" + version('0.15', sha256='8f12ac317f8a1318efa75757ef0a651abe12e51fc1af8838fb91079445227277') version('0.13.2', sha256='5c034ffa23413ac923541ceb3ac14ec15a0d2530690413bff58c12b80e56d884') - depends_on('py-setuptools', type='build') - depends_on('py-six', type=('build', 'run')) depends_on('python@2.6:2.8,3.3:', type=('build', 'run')) + depends_on('py-setuptools', type='build') + depends_on('py-six@1.9.0:', type=('build', 'run')) depends_on('openssl', type='test') diff --git a/var/spack/repos/builtin/packages/py-envisage/package.py b/var/spack/repos/builtin/packages/py-envisage/package.py new file mode 100644 index 00000000000..06783c8fcb2 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-envisage/package.py @@ -0,0 +1,25 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +class PyEnvisage(PythonPackage): + """Envisage is a Python-based framework for building extensible + applications, that is, applications whose functionality can be extended by + adding "plug-ins". Envisage provides a standard mechanism for features to + be added to an application, whether by the original developer or by someone + else. In fact, when you build an application using Envisage, the entire + application consists primarily of plug-ins. In this respect, it is similar + to the Eclipse and Netbeans frameworks for Java applications.""" + + homepage = "https://docs.enthought.com/envisage" + url = "https://pypi.io/packages/source/e/envisage/envisage-4.9.2.tar.gz" + + version('4.9.2', sha256='ed9580ac6ea17b333f1cce5b94656aed584798d56d8bd364f996a06fe1ac32eb') + + depends_on('python@2.7:2.8,3.5:', type=('build', 'run')) + depends_on('py-apptools', type=('build', 'run')) + depends_on('py-setuptools', type=('build', 'run')) + depends_on('py-six', type=('build', 'run')) + depends_on('py-traits', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/py-ephem/package.py b/var/spack/repos/builtin/packages/py-ephem/package.py new file mode 100644 index 00000000000..b3f62ae3cf7 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-ephem/package.py @@ -0,0 +1,16 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class PyEphem(PythonPackage): + """PyEphem provides an ephem Python package for + performing high-precision astronomy computations.""" + + homepage = "https://rhodesmill.org/pyephem/" + url = "https://github.com/brandon-rhodes/pyephem/archive/v3.7.7.1.tar.gz" + + version('3.7.7.1', sha256='d9d05d85c0d38a79169acaef25964ac9df2d808f0d833354545b9ef681ff584d') diff --git a/var/spack/repos/builtin/packages/py-espressopp/package.py b/var/spack/repos/builtin/packages/py-espressopp/package.py index a4524da78eb..f243fcb4d28 100644 --- a/var/spack/repos/builtin/packages/py-espressopp/package.py +++ b/var/spack/repos/builtin/packages/py-espressopp/package.py @@ -27,7 +27,7 @@ class PyEspressopp(CMakePackage): depends_on("cmake@2.8:", type='build') depends_on("mpi") - depends_on("boost+serialization+filesystem+system+python+mpi", when='@1.9.4:') + depends_on("boost+serialization+filesystem+system+python+mpi cxxstd=11", when='@1.9.4:') extends("python") depends_on("python@2:2.8") depends_on("py-mpi4py@2.0.0:", when='@1.9.4', type=('build', 'run')) @@ -35,7 +35,7 @@ class PyEspressopp(CMakePackage): depends_on("fftw") depends_on("py-sphinx", when="+ug", type='build') depends_on("py-sphinx", when="+pdf", type='build') - depends_on('py-numpy', type=('build', 'run')) + depends_on('py-numpy@:1.16.6', type=('build', 'run')) depends_on('py-matplotlib', when="+ug", type='build') depends_on('py-matplotlib', when="+pdf", type='build') depends_on("texlive", when="+pdf", type='build') diff --git a/var/spack/repos/builtin/packages/py-exodus-bundler/package.py b/var/spack/repos/builtin/packages/py-exodus-bundler/package.py index b14570f54b5..c13f30eb469 100644 --- a/var/spack/repos/builtin/packages/py-exodus-bundler/package.py +++ b/var/spack/repos/builtin/packages/py-exodus-bundler/package.py @@ -11,7 +11,7 @@ class PyExodusBundler(PythonPackage): ELF binaries from one system to another.""" homepage = "https://github.com/intoli/exodus" - url = "https://pypi.org/packages/source/e/exodus-bundler/exodus-bundler-2.0.2.tar.gz" + url = "https://pypi.io/packages/source/e/exodus-bundler/exodus-bundler-2.0.2.tar.gz" version('2.0.2', sha256='4e896a2034b94cf7b4fb33d86a68e29a7d3b08e57541e444db34dddc6ac1ef68') diff --git a/var/spack/repos/builtin/packages/py-flake8-polyfill/package.py b/var/spack/repos/builtin/packages/py-flake8-polyfill/package.py index 8de1f375051..cc61a2e936b 100644 --- a/var/spack/repos/builtin/packages/py-flake8-polyfill/package.py +++ b/var/spack/repos/builtin/packages/py-flake8-polyfill/package.py @@ -11,10 +11,10 @@ class PyFlake8Polyfill(PythonPackage): for Flake8 plugins that intend to support Flake8 2.x and 3.x simultaneously. """ - homepage = "https://pypi.org/project/flake8-polyfill/" - url = "https://files.pythonhosted.org/packages/e6/67/1c26634a770db5c442e361311bee73cb3a177adb2eb3f7af8953cfd9f553/flake8-polyfill-1.0.2.tar.gz" + homepage = "https://gitlab.com/pycqa/flake8-polyfill" + url = "https://pypi.io/packages/source/f/flake8-polyfill/flake8-polyfill-1.0.2.tar.gz" version('1.0.2', sha256='e44b087597f6da52ec6393a709e7108b2905317d0c0b744cdca6208e670d8eda') - extends('python', ignore='bin/(flake8|pyflakes|pycodestyle)') - depends_on('py-flake8', type='run') + depends_on('py-setuptools', type='build') + depends_on('py-flake8', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/py-gensim/package.py b/var/spack/repos/builtin/packages/py-gensim/package.py index 06eaacfe945..a08cbbe98d1 100644 --- a/var/spack/repos/builtin/packages/py-gensim/package.py +++ b/var/spack/repos/builtin/packages/py-gensim/package.py @@ -8,21 +8,20 @@ class PyGensim(PythonPackage): """Gensim is a Python library for topic modelling, document indexing and - similarity retrieval with large corpora. Target audience is the natural - language processing (NLP) and information retrieval (IR) community.""" + similarity retrieval with large corpora. Target audience is the natural + language processing (NLP) and information retrieval (IR) community.""" - homepage = "https://pypi.org/project/gensim/" - url = "https://files.pythonhosted.org/packages/3a/bc/1415be59292a23ff123298b4b46ec4be80b3bfe72c8d188b58ab2653dee4/gensim-3.8.0.tar.gz" + homepage = "https://radimrehurek.com/gensim" + url = "https://pypi.io/packages/source/g/gensim/gensim-3.8.1.tar.gz" - version('3.8.1', sha256='33277fc0a8d7b0c7ce70fcc74bb82ad39f944c009b334856c6e86bf552b1dfdc', - url='https://files.pythonhosted.org/packages/73/f2/e9af000df6419bf1a63ffed3e6033a1b1d8fcf2f971fcdac15296619aff8/gensim-3.8.1.tar.gz') + maintainers = ['adamjstewart'] + + version('3.8.1', sha256='33277fc0a8d7b0c7ce70fcc74bb82ad39f944c009b334856c6e86bf552b1dfdc') version('3.8.0', sha256='ec5de7ff2bfa8692fa96a846bb5aae52f267fc322fbbe303c1f042d258af5766') - depends_on('python@3.5:', type=('build', 'run')) - + depends_on('python@2.7:2.8,3.5:', type=('build', 'run')) depends_on('py-setuptools', type='build') - - depends_on('py-numpy', type=('build', 'run')) - depends_on('py-scipy', type=('build', 'run')) - depends_on('py-smart-open', type=('build', 'run')) - depends_on('py-six', type=('build', 'run')) + depends_on('py-numpy@1.11.3:', type=('build', 'run')) + depends_on('py-scipy@0.18.1:', type=('build', 'run')) + depends_on('py-six@1.5.0:', type=('build', 'run')) + depends_on('py-smart-open@1.8.1:', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/py-gluoncv/no-unicode-readme.patch b/var/spack/repos/builtin/packages/py-gluoncv/no-unicode-readme.patch new file mode 100644 index 00000000000..43d0a6c5e87 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-gluoncv/no-unicode-readme.patch @@ -0,0 +1,11 @@ +--- a/README.md 2020-04-03 11:56:41.820271316 -0400 ++++ b/README.md 2020-04-03 11:56:55.901335611 -0400 +@@ -72,7 +72,7 @@ + + There are multiple versions of MXNet pre-built package available. Please refer to [mxnet packages](https://gluon-crash-course.mxnet.io/mxnet_packages.html) if you need more details about MXNet versions. + +-# Docs 📖 ++# Docs + GluonCV documentation is available at [our website](https://gluon-cv.mxnet.io/index.html). + + # Examples diff --git a/var/spack/repos/builtin/packages/py-gluoncv/package.py b/var/spack/repos/builtin/packages/py-gluoncv/package.py new file mode 100644 index 00000000000..05966766543 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-gluoncv/package.py @@ -0,0 +1,36 @@ +# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class PyGluoncv(PythonPackage): + """GluonCV provides implementations of state-of-the-art + (SOTA) deep learning algorithms in computer vision. It aims + to help engineers, researchers, and students quickly + prototype products, validate new ideas and learn computer + vision.""" + + homepage = "https://gluon-cv.mxnet.io/" + url = "https://github.com/dmlc/gluon-cv/archive/v0.6.0.tar.gz" + + version('0.6.0', sha256='5ac89d73f34d02b2e60595a5cc35f46d0a69376567fae3a9518005dd89161305') + + depends_on('py-setuptools', type='build') + depends_on('py-numpy', type=('build', 'run')) + depends_on('py-tqdm', type=('build', 'run')) + depends_on('py-requests', type=('build', 'run')) + depends_on('py-matplotlib', type=('build', 'run')) + depends_on('py-portalocker', type=('build', 'run')) + depends_on('py-pillow', type=('build', 'run')) + depends_on('py-scipy', type=('build', 'run')) + depends_on('py-cython', type='build') + + patch('no-unicode-readme.patch') + + def build_args(self, spec, prefix): + args = [] + args.append('--with-cython') + return args diff --git a/var/spack/repos/builtin/packages/py-grpcio/package.py b/var/spack/repos/builtin/packages/py-grpcio/package.py index bdda7c85e37..18981f20e45 100644 --- a/var/spack/repos/builtin/packages/py-grpcio/package.py +++ b/var/spack/repos/builtin/packages/py-grpcio/package.py @@ -10,8 +10,9 @@ class PyGrpcio(PythonPackage): """HTTP/2-based RPC framework.""" homepage = "https://grpc.io/" - url = "https://pypi.io/packages/source/g/grpcio/grpcio-1.25.0.tar.gz" + url = "https://pypi.io/packages/source/g/grpcio/grpcio-1.27.2.tar.gz" + version('1.27.2', sha256='5ae532b93cf9ce5a2a549b74a2c35e3b690b171ece9358519b3039c7b84c887e') version('1.25.0', sha256='c948c034d8997526011960db54f512756fb0b4be1b81140a15b4ef094c6594a4') depends_on('py-setuptools', type='build') diff --git a/var/spack/repos/builtin/packages/py-h5glance/package.py b/var/spack/repos/builtin/packages/py-h5glance/package.py index 93a1be40289..9d3c3c91716 100644 --- a/var/spack/repos/builtin/packages/py-h5glance/package.py +++ b/var/spack/repos/builtin/packages/py-h5glance/package.py @@ -15,7 +15,9 @@ class PyH5glance(PythonPackage): homepage = "https://github.com/European-XFEL/h5glance" url = "https://pypi.io/packages/source/h/h5glance/h5glance-0.4.tar.gz" - version('0.4', sha256='03babaee0d481991062842796126bc9e6b11e2e6e7daba57c26f2b58bf3bbd32') + version('0.6', sha256='203369ab614273aaad3419f151e234609bb8390b201b65f678d7e17c57633e35') + version('0.5', sha256='bc34ee42429f0440b329083e3f67fbf3d7016a4aed9e8b30911e5905217bc8d9') + version('0.4', sha256='03babaee0d481991062842796126bc9e6b11e2e6e7daba57c26f2b58bf3bbd32') depends_on('python@3.5:', type=('build', 'run')) depends_on('py-h5py', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/py-hatchet/package.py b/var/spack/repos/builtin/packages/py-hatchet/package.py new file mode 100644 index 00000000000..91df70b61b3 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-hatchet/package.py @@ -0,0 +1,27 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class PyHatchet(PythonPackage): + """Hatchet is an analysis tool for structured tree or graph performance data + using an indexed Pandas dataframe.""" + + homepage = "https://github.com/LLNL/hatchet" + url = "https://github.com/LLNL/hatchet/archive/v1.0.0.tar.gz" + + maintainers = ["slabasan", "bhatele", "tgamblin"] + + version('1.0.0', sha256='efd218bc9152abde0a8006489a2c432742f00283a114c1eeb6d25abc10f5862d') + + depends_on('python@2.7,3:', type=('build', 'run')) + + depends_on('py-setuptools', type='build') + depends_on('py-matplotlib', type=('build', 'run')) + depends_on('py-numpy', type=('build', 'run')) + depends_on('py-pandas', type=('build', 'run')) + depends_on('py-pydot', type=('build', 'run')) + depends_on('py-pyyaml', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/py-hdfs/package.py b/var/spack/repos/builtin/packages/py-hdfs/package.py index 599a00fad8f..c84c3faca92 100644 --- a/var/spack/repos/builtin/packages/py-hdfs/package.py +++ b/var/spack/repos/builtin/packages/py-hdfs/package.py @@ -9,7 +9,7 @@ class PyHdfs(PythonPackage): """API and command line interface for HDFS""" homepage = "https://hdfscli.readthedocs.io/en/latest/" - url = "https://pypi.org/packages/source/h/hdfs/hdfs-2.1.0.tar.gz" + url = "https://pypi.io/packages/source/h/hdfs/hdfs-2.1.0.tar.gz" version('2.1.0', sha256='a40fe99ccb03b5c3247b33a4110eb21b57405dd7c3f1b775e362e66c19b44bc6') diff --git a/var/spack/repos/builtin/packages/py-horovod/fma.patch b/var/spack/repos/builtin/packages/py-horovod/fma.patch new file mode 100644 index 00000000000..13ccb8c9c5d --- /dev/null +++ b/var/spack/repos/builtin/packages/py-horovod/fma.patch @@ -0,0 +1,52 @@ +From 717e72f91f02d1dc3c859719ef1d804b10f88017 Mon Sep 17 00:00:00 2001 +From: Nicolas V Castet +Date: Mon, 30 Mar 2020 12:47:50 -0500 +Subject: [PATCH] Add extra preprocessor guard for FMA optimization + +Fixes #1832 + +Signed-off-by: Nicolas V Castet +--- + horovod/common/ops/adasum/adasum.h | 8 ++++---- + 1 file changed, 4 insertions(+), 4 deletions(-) + +diff --git a/horovod/common/ops/adasum/adasum.h b/horovod/common/ops/adasum/adasum.h +index 0330f5850..876f7f12b 100644 +--- a/horovod/common/ops/adasum/adasum.h ++++ b/horovod/common/ops/adasum/adasum.h +@@ -19,7 +19,7 @@ + #include + #include + +-#if __AVX__ && __F16C__ ++#if __AVX__ && __F16C__ && __FMA__ + #include + #include + #endif +@@ -104,7 +104,7 @@ template class Adasum { + int count, double& dotProduct, + double& anormsq, double& bnormsq, + int layerid) { +-#if __AVX__ && __F16C__ ++#if __AVX__ && __F16C__ && __FMA__ + if (horovod_datatype == DataType::HOROVOD_FLOAT16) { + ComputeDotAndNormSqrdsfp16((uint16_t*)a, (uint16_t*)b, count, dotProduct, + anormsq, bnormsq, layerid); +@@ -125,7 +125,7 @@ template class Adasum { + double acoeff, void* __restrict__ a, + double bcoeff, void* __restrict__ b, + int layerid) { +-#if __AVX__ && __F16C__ ++#if __AVX__ && __F16C__ && __FMA__ + if (horovod_datatype == DataType::HOROVOD_FLOAT16) { + ScaledAddfp16(count, acoeff, (uint16_t*)a, bcoeff, (uint16_t*)b, layerid); + } else +@@ -425,7 +425,7 @@ template class Adasum { + } + + +-#if __AVX__ && __F16C__ ++#if __AVX__ && __F16C__ && __FMA__ + inline void ComputeDotAndNormSqrdsfp16(const uint16_t* __restrict__ a, + const uint16_t* __restrict__ b, + int len, double& dotProduct, diff --git a/var/spack/repos/builtin/packages/py-horovod/package.py b/var/spack/repos/builtin/packages/py-horovod/package.py new file mode 100644 index 00000000000..c740486853d --- /dev/null +++ b/var/spack/repos/builtin/packages/py-horovod/package.py @@ -0,0 +1,130 @@ +# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +class PyHorovod(PythonPackage): + """Horovod is a distributed deep learning training framework for + TensorFlow, Keras, PyTorch, and Apache MXNet.""" + + homepage = "https://github.com/horovod" + git = "https://github.com/horovod/horovod.git" + + maintainers = ['adamjstewart'] + + version('master', branch='master', submodules=True) + version('0.19.1', tag='v0.19.1', submodules=True) + version('0.19.0', tag='v0.19.0', submodules=True) + version('0.18.2', tag='v0.18.2', submodules=True) + version('0.18.1', tag='v0.18.1', submodules=True) + version('0.18.0', tag='v0.18.0', submodules=True) + version('0.17.1', tag='v0.17.1', submodules=True) + version('0.17.0', tag='v0.17.0', submodules=True) + version('0.16.4', tag='v0.16.4', submodules=True) + version('0.16.3', tag='v0.16.3', submodules=True) + version('0.16.2', tag='v0.16.2', submodules=True) + + # https://github.com/horovod/horovod/blob/master/docs/install.rst + variant('frameworks', default='pytorch', + description='Deep learning frameworks to build support for', + values=('tensorflow', 'pytorch', 'mxnet', 'keras', 'spark'), + multi=True) + variant('controllers', default='mpi', + description='Controllers to coordinate work between processes', + values=('mpi', 'gloo'), multi=True) + variant('tensor_ops', default='nccl', + description='Framework to use for GPU/CPU operations', + values=('nccl', 'mpi', 'gloo', 'ccl'), multi=False) + + # Required dependencies + depends_on('py-setuptools', type='build') + depends_on('py-cloudpickle', type=('build', 'run')) + depends_on('py-psutil', type=('build', 'run')) + depends_on('py-pyyaml', type=('build', 'run')) + depends_on('py-six', type=('build', 'run')) + + # Framework dependencies + depends_on('py-tensorflow@1.1.0:', type=('build', 'link', 'run'), when='frameworks=tensorflow') + depends_on('py-torch@0.4.0:', type=('build', 'run'), when='frameworks=pytorch') + depends_on('py-torchvision', type=('build', 'run'), when='frameworks=pytorch') + depends_on('py-cffi@1.4.0:', type=('build', 'run'), when='frameworks=pytorch') + depends_on('mxnet@1.4.1:+python', type=('build', 'link', 'run'), when='frameworks=mxnet') + depends_on('py-keras@2.0.8,2.1.2:', type=('build', 'run'), when='frameworks=keras') + depends_on('py-h5py@2.9:', type=('build', 'run'), when='frameworks=spark') + depends_on('py-numpy', type=('build', 'run'), when='frameworks=spark') + depends_on('py-petastorm@0.8.2', type=('build', 'run'), when='frameworks=spark') + depends_on('py-pyarrow@0.15.0:', type=('build', 'run'), when='frameworks=spark') + depends_on('py-pyspark@2.3.2:', type=('build', 'run'), when='frameworks=spark') + + # Controller dependencies + depends_on('mpi', when='controllers=mpi') + # There does not appear to be a way to use an external Gloo installation + depends_on('cmake', type='build', when='controllers=gloo') + + # Tensor Operations dependencies + depends_on('nccl', when='tensor_ops=nccl') + depends_on('mpi', when='tensor_ops=mpi') + # There does not appear to be a way to use an external Gloo installation + depends_on('cmake', type='build', when='tensor_ops=gloo') + + # Test dependencies + depends_on('py-mock', type='test') + depends_on('py-pytest', type='test') + depends_on('py-pytest-forked', type='test') + + conflicts('controllers=gloo', when='platform=darwin', msg='Gloo cannot be compiled on MacOS') + + # https://github.com/horovod/horovod/pull/1835 + patch('fma.patch', when='@0.19.0:0.19.1') + + def setup_build_environment(self, env): + # Frameworks + if 'frameworks=tensorflow' in self.spec: + env.set('HOROVOD_WITH_TENSORFLOW', 1) + else: + env.set('HOROVOD_WITHOUT_TENSORFLOW', 1) + if 'frameworks=pytorch' in self.spec: + env.set('HOROVOD_WITH_PYTORCH', 1) + else: + env.set('HOROVOD_WITHOUT_PYTORCH', 1) + if 'frameworks=mxnet' in self.spec: + env.set('HOROVOD_WITH_MXNET', 1) + else: + env.set('HOROVOD_WITHOUT_MXNET', 1) + + # Controllers + if 'controllers=mpi' in self.spec: + env.set('HOROVOD_WITH_MPI', 1) + else: + env.set('HOROVOD_WITHOUT_MPI', 1) + if 'controllers=gloo' in self.spec: + env.set('HOROVOD_WITH_GLOO', 1) + else: + env.set('HOROVOD_WITHOUT_GLOO', 1) + + # Tensor Operations + if 'tensor_ops=nccl' in self.spec: + env.set('HOROVOD_GPU', 'CUDA') + + env.set('HOROVOD_CUDA_HOME', self.spec['cuda'].prefix) + env.set('HOROVOD_CUDA_INCLUDE', + self.spec['cuda'].headers.directories[0]) + env.set('HOROVOD_CUDA_LIB', self.spec['cuda'].libs.directories[0]) + + env.set('HOROVOD_NCCL_HOME', self.spec['nccl'].prefix) + env.set('HOROVOD_NCCL_INCLUDE', + self.spec['nccl'].headers.directories[0]) + env.set('HOROVOD_NCCL_LIB', self.spec['nccl'].libs.directories[0]) + + env.set('HOROVOD_GPU_ALLREDUCE', 'NCCL') + env.set('HOROVOD_GPU_BROADCAST', 'NCCL') + else: + env.set('HOROVOD_CPU_OPERATIONS', + self.spec.variants['tensor_ops'].value.upper()) + + @run_after('install') + @on_package_attributes(run_tests=True) + def install_test(self): + horovodrun = Executable(self.prefix.bin.horovodrun) + horovodrun('--check-build') diff --git a/var/spack/repos/builtin/packages/py-humanfriendly/package.py b/var/spack/repos/builtin/packages/py-humanfriendly/package.py index a1b63e82332..c023e2e41bf 100644 --- a/var/spack/repos/builtin/packages/py-humanfriendly/package.py +++ b/var/spack/repos/builtin/packages/py-humanfriendly/package.py @@ -8,9 +8,12 @@ class PyHumanfriendly(PythonPackage): """Human friendly output for text interfaces using Python""" - homepage = "https://pypi.org/project/humanfriendly/" - url = "https://files.pythonhosted.org/packages/26/71/e7daf57e819a70228568ff5395fdbc4de81b63067b93167e07825fcf0bcf/humanfriendly-4.18.tar.gz" + homepage = "https://humanfriendly.readthedocs.io/" + url = "https://pypi.io/packages/source/h/humanfriendly/humanfriendly-8.1.tar.gz" + version('8.1', sha256='25c2108a45cfd1e8fbe9cdb30b825d34ef5d5675c8e11e4775c9aedbfb0bdee2') version('4.18', sha256='33ee8ceb63f1db61cce8b5c800c531e1a61023ac5488ccde2ba574a85be00a85') + depends_on('python@2.7:2.8,3.5:', type=('build', 'run')) depends_on('py-setuptools', type='build') + depends_on('py-monotonic', when='^python@:2', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/py-ics/package.py b/var/spack/repos/builtin/packages/py-ics/package.py index 7c91e2c5ff0..c2c4a71346a 100644 --- a/var/spack/repos/builtin/packages/py-ics/package.py +++ b/var/spack/repos/builtin/packages/py-ics/package.py @@ -29,6 +29,7 @@ class PyIcs(PythonPackage): homepage = "https://github.com/C4ptainCrunch/ics.py" url = "https://github.com/C4ptainCrunch/ics.py/archive/v0.6.tar.gz" + version('0.7', sha256='48c637e5eb8dfc817b1f3f6b3f662ba19cfcc25f8f71eb42f5d07e6f2c573994') version('0.6', sha256='4947263136202d0489d4f5e5c7175dfd2db5d3508b8b003ddeaef96347f68830') depends_on('python@3.6:', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/py-jprops/package.py b/var/spack/repos/builtin/packages/py-jprops/package.py index 16fed619ddd..2e6749077f3 100644 --- a/var/spack/repos/builtin/packages/py-jprops/package.py +++ b/var/spack/repos/builtin/packages/py-jprops/package.py @@ -10,7 +10,7 @@ class PyJprops(PythonPackage): """Java properties file parser for Python""" homepage = "https://github.com/mgood/jprops/" - url = "https://pypi.org/packages/source/j/jprops/jprops-2.0.2.tar.gz" + url = "https://pypi.io/packages/source/j/jprops/jprops-2.0.2.tar.gz" version('2.0.2', sha256='d297231833b6cd0a3f982a48fe148a7f9817f2895661743d166b267e4d3d5b2c') diff --git a/var/spack/repos/builtin/packages/py-matplotlib/package.py b/var/spack/repos/builtin/packages/py-matplotlib/package.py index e5d898f1df4..4e5dbf91356 100644 --- a/var/spack/repos/builtin/packages/py-matplotlib/package.py +++ b/var/spack/repos/builtin/packages/py-matplotlib/package.py @@ -13,7 +13,7 @@ class PyMatplotlib(PythonPackage): and interactive visualizations in Python.""" homepage = "https://matplotlib.org/" - url = "https://pypi.io/packages/source/m/matplotlib/matplotlib-3.2.0.tar.gz" + url = "https://pypi.io/packages/source/m/matplotlib/matplotlib-3.2.1.tar.gz" maintainers = ['adamjstewart'] @@ -27,6 +27,7 @@ class PyMatplotlib(PythonPackage): 'matplotlib.testing.jpl_units' ] + version('3.2.1', sha256='ffe2f9cdcea1086fc414e82f42271ecf1976700b8edd16ca9d376189c6d93aee') version('3.2.0', sha256='651d76daf9168250370d4befb09f79875daa2224a9096d97dfc3ed764c842be4') version('3.1.3', sha256='db3121f12fb9b99f105d1413aebaeb3d943f269f3d262b45586d12765866f0c6') version('3.1.2', sha256='8e8e2c2fe3d873108735c6ee9884e6f36f467df4a143136209cff303b183bada') diff --git a/var/spack/repos/builtin/packages/py-mayavi/package.py b/var/spack/repos/builtin/packages/py-mayavi/package.py new file mode 100644 index 00000000000..1fe477bbc70 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-mayavi/package.py @@ -0,0 +1,23 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +class PyMayavi(PythonPackage): + """Mayavi: 3D visualization of scientific data in Python.""" + + homepage = "https://docs.enthought.com/mayavi/mayavi/index.html" + url = "https://pypi.io/packages/source/m/mayavi/mayavi-4.7.1.tar.bz2" + + version('4.7.1', sha256='be51fb6f886f304f7c593c907e6a2e88d7919f8f446cdccfcd184fa35b3db724') + + depends_on('py-setuptools', type='build') + depends_on('py-apptools', type=('build', 'run')) + depends_on('py-envisage', type=('build', 'run')) + depends_on('py-numpy', type=('build', 'run')) + depends_on('py-pyface@6.1.1:', type=('build', 'run')) + depends_on('py-pygments', type=('build', 'run')) + depends_on('py-traits@4.6.0:', type=('build', 'run')) + depends_on('py-traitsui@6.0.0:', type=('build', 'run')) + depends_on('vtk+python', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/py-merlin/package.py b/var/spack/repos/builtin/packages/py-merlin/package.py index 7ae05a8014c..c4185e9193c 100644 --- a/var/spack/repos/builtin/packages/py-merlin/package.py +++ b/var/spack/repos/builtin/packages/py-merlin/package.py @@ -7,15 +7,30 @@ class PyMerlin(PythonPackage): - """A custom version of Phillip J. Eby's setuptools.""" + """Merlin Workflow for HPC.""" - homepage = "https://pypi.org/project/merlin/" - url = "https://pypi.io/packages/source/m/merlin/merlin-1.8.tar.gz" + homepage = "https://github.com/LLNL/merlin" + url = "https://pypi.io/packages/source/m/merlin/merlin-1.4.1.tar.gz" + git = "https://github.com/LLNL/merlin.git" - version('1.8', sha256='a1ba9c13c74daa1724dd3820f1c241d7594d487b11f35347606986028c1881fd') + version('1.4.1', sha256='9d515cfdbcde2443892afd92b78dbc5bf2aed2060ed3a336e683188e015bca7c') + version('master', branch='master') + version('develop', branch='develop') - depends_on('python@:2', type=('build', 'run')) + depends_on('python@3.6:', type=('build', 'run')) + depends_on('py-setuptools', type=('build', 'run')) - def test(self): - # Unit tests are missing from tarball - pass + depends_on('py-pytest', type='test') + + depends_on('py-cached-property', type=('build', 'run')) + depends_on('py-celery@4.3.0:+redis', type=('build', 'run')) + depends_on('py-coloredlogs@10.0:', type=('build', 'run')) + depends_on('py-cryptography', type=('build', 'run')) + depends_on('py-importlib-resources', when="^python@3.0:3.6.99", type=('build', 'run')) + depends_on('py-maestrowf@1.1.7dev0:', when="@1.2.0:", type=('build', 'run')) + depends_on('py-maestrowf@1.1.6:', when="@:1.1.99", type=('build', 'run')) + depends_on('py-numpy', type=('build', 'run')) + depends_on('py-parse', type=('build', 'run')) + depends_on('py-psutil@5.1.0:', type=('build', 'run')) + depends_on('py-pyyaml@5.1.2:', type=('build', 'run')) + depends_on('py-tabulate', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/py-merlinwf/package.py b/var/spack/repos/builtin/packages/py-merlinwf/package.py deleted file mode 100644 index 2082e779978..00000000000 --- a/var/spack/repos/builtin/packages/py-merlinwf/package.py +++ /dev/null @@ -1,38 +0,0 @@ -# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other -# Spack Project Developers. See the top-level COPYRIGHT file for details. -# -# SPDX-License-Identifier: (Apache-2.0 OR MIT) - -from spack import * - - -class PyMerlinwf(PythonPackage): - """Merlin Workflow for HPC.""" - - homepage = "https://github.com/LLNL/merlin" - url = "https://pypi.io/packages/source/m/merlinwf/merlinwf-1.2.3.tar.gz" - git = "https://github.com/LLNL/merlin.git" - - version('1.2.3', sha256='6b13a315f3e8e2894ea05d9cc072639f02eaf71ae0fdbd2bafebd1c20c8470ab') - version('1.1.1', sha256='306055a987e42a79ce348a3f9d71293ed8a9b7f5909c26b6fd233d6a176fff6d') - version('1.0.5', sha256='d66f50eac84ff9d7aa484f2d9655dc60f0352196d333284d81b6623a6f0aa180') - version('master', branch='master') - version('develop', branch='develop') - - depends_on('python@3.6:', type=('build', 'run')) - depends_on('py-setuptools', type=('build', 'run')) - - depends_on('py-pytest', type='test') - - depends_on('py-cached-property', type=('build', 'run')) - depends_on('py-celery@4.3.0:+redis', type=('build', 'run')) - depends_on('py-coloredlogs@10.0:', type=('build', 'run')) - depends_on('py-cryptography', type=('build', 'run')) - depends_on('py-importlib-resources', when="^python@3.0:3.6.99", type=('build', 'run')) - depends_on('py-maestrowf@1.1.7dev0:', when="@1.2.0:", type=('build', 'run')) - depends_on('py-maestrowf@1.1.6:', when="@:1.1.99", type=('build', 'run')) - depends_on('py-numpy', type=('build', 'run')) - depends_on('py-parse', type=('build', 'run')) - depends_on('py-psutil@5.1.0:', type=('build', 'run')) - depends_on('py-pyyaml@5.1.2:', type=('build', 'run')) - depends_on('py-tabulate', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/py-multiprocess/package.py b/var/spack/repos/builtin/packages/py-multiprocess/package.py index 97a58a905d9..f382ff050fa 100644 --- a/var/spack/repos/builtin/packages/py-multiprocess/package.py +++ b/var/spack/repos/builtin/packages/py-multiprocess/package.py @@ -12,15 +12,17 @@ class PyMultiprocess(PythonPackage): homepage = "https://github.com/uqfoundation/multiprocess" url = "https://pypi.io/packages/source/m/multiprocess/multiprocess-0.70.5.zip" + version('0.70.9', sha256='9fd5bd990132da77e73dec6e9613408602a4612e1d73caf2e2b813d2b61508e5') version('0.70.7', sha256='3394f1fbd0d87112690a877e49eb7917d851ee8d822294d522dd4deae12febdb') version('0.70.5', sha256='c4c196f3c4561dc1d78139c3e73709906a222d2fc166ef3eef895d8623df7267') version('0.70.4', sha256='a692c6dc8392c25b29391abb58a9fbdc1ac38bca73c6f27d787774201e68e12c') - depends_on('python@2.6:2.8,3.1:') + depends_on('python@2.5:2.8,3.1:', type=('build', 'run')) depends_on('py-setuptools@0.6:', type='build') depends_on('py-dill@0.2.6:', type=('build', 'run')) depends_on('py-dill@0.2.9:', type=('build', 'run'), when='@0.70.7:') + depends_on('py-dill@0.3.1:', type=('build', 'run'), when='@0.70.9:') def url_for_version(self, version): url = self.url.rsplit('/', 1)[0] diff --git a/var/spack/repos/builtin/packages/py-neobolt/package.py b/var/spack/repos/builtin/packages/py-neobolt/package.py new file mode 100644 index 00000000000..7e19e529fd9 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-neobolt/package.py @@ -0,0 +1,18 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class PyNeobolt(PythonPackage): + """Neo4j Bolt connector for Python""" + + homepage = "https://github.com/neo4j-drivers/neobolt" + url = "https://pypi.io/packages/source/n/neobolt/neobolt-1.7.16.tar.gz" + + version('1.7.16', sha256='ca4e87679fe3ed39aec23638658e02dbdc6bbc3289a04e826f332e05ab32275d') + + depends_on('py-setuptools', type='build') + depends_on('python@2.7:2.8,3.4:', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/py-neotime/package.py b/var/spack/repos/builtin/packages/py-neotime/package.py new file mode 100644 index 00000000000..31769361c14 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-neotime/package.py @@ -0,0 +1,20 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class PyNeotime(PythonPackage): + """Nanosecond resolution temporal types""" + + homepage = "https://neotime.readthedocs.io/" + url = "https://pypi.io/packages/source/n/neotime/neotime-1.7.4.tar.gz" + + version('1.7.4', sha256='4e0477ba0f24e004de2fa79a3236de2bd941f20de0b5db8d976c52a86d7363eb') + + depends_on('python@2.7:2.8,3.4:', type=('build', 'run')) + depends_on('py-setuptools', type='build') + depends_on('py-pytz', type=('build', 'run')) + depends_on('py-six', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/py-networkx/package.py b/var/spack/repos/builtin/packages/py-networkx/package.py index da616eefd4e..04f82dac03a 100644 --- a/var/spack/repos/builtin/packages/py-networkx/package.py +++ b/var/spack/repos/builtin/packages/py-networkx/package.py @@ -9,16 +9,29 @@ class PyNetworkx(PythonPackage): """NetworkX is a Python package for the creation, manipulation, and study of the structure, dynamics, and functions of complex networks.""" - homepage = "http://networkx.github.io/" - url = "https://pypi.io/packages/source/n/networkx/networkx-1.11.tar.gz" - version('2.2', sha256='45e56f7ab6fe81652fb4bc9f44faddb0e9025f469f602df14e3b2551c2ea5c8b', - url='https://pypi.io/packages/source/n/networkx/networkx-2.2.zip') - version('2.1', sha256='64272ca418972b70a196cb15d9c85a5a6041f09a2f32e0d30c0255f25d458bb1', - url='https://pypi.io/packages/source/n/networkx/networkx-2.1.zip') + homepage = "http://networkx.github.io/" + url = "https://pypi.io/packages/source/n/networkx/networkx-2.4.tar.gz" + + version('2.4', sha256='f8f4ff0b6f96e4f9b16af6b84622597b5334bf9cae8cf9b2e42e7985d5c95c64') + version('2.3', sha256='8311ddef63cf5c5c5e7c1d0212dd141d9a1fe3f474915281b73597ed5f1d4e3d') + version('2.2', sha256='45e56f7ab6fe81652fb4bc9f44faddb0e9025f469f602df14e3b2551c2ea5c8b') + version('2.1', sha256='64272ca418972b70a196cb15d9c85a5a6041f09a2f32e0d30c0255f25d458bb1') + version('2.0', sha256='cd5ff8f75d92c79237f067e2f0876824645d37f017cfffa5b7c9678cae1454aa') version('1.11', sha256='0d0e70e10dfb47601cbb3425a00e03e2a2e97477be6f80638fef91d54dd1e4b8') version('1.10', sha256='ced4095ab83b7451cec1172183eff419ed32e21397ea4e1971d92a5808ed6fb8') - depends_on('py-decorator', type=('build', 'run')) - depends_on('py-decorator@4.1.0:', type=('build', 'run'), when='@2.1:') + depends_on('python@2.7:', type=('build', 'run')) + depends_on('python@3.5:', type=('build', 'run'), when='@2.3:') depends_on('py-setuptools', type='build') + depends_on('py-decorator@3.4.0:', type=('build', 'run')) + depends_on('py-decorator@4.1.0:', type=('build', 'run'), when='@2.0:') + depends_on('py-decorator@4.3.0:', type=('build', 'run'), when='@2.2:') + + def url_for_version(self, version): + ext = 'tar.gz' + if Version('2.0') <= version <= Version('2.3'): + ext = 'zip' + + url = 'https://pypi.io/packages/source/n/networkx/networkx-{0}.{1}' + return url.format(version, ext) diff --git a/var/spack/repos/builtin/packages/py-numpy/package.py b/var/spack/repos/builtin/packages/py-numpy/package.py index c685a6fb4cc..639ff48d8e5 100644 --- a/var/spack/repos/builtin/packages/py-numpy/package.py +++ b/var/spack/repos/builtin/packages/py-numpy/package.py @@ -16,7 +16,7 @@ class PyNumpy(PythonPackage): number capabilities""" homepage = "https://numpy.org/" - url = "https://pypi.io/packages/source/n/numpy/numpy-1.18.1.zip" + url = "https://pypi.io/packages/source/n/numpy/numpy-1.18.2.zip" git = "https://github.com/numpy/numpy.git" maintainers = ['adamjstewart'] @@ -30,6 +30,7 @@ class PyNumpy(PythonPackage): ] version('master', branch='master') + version('1.18.2', sha256='e7894793e6e8540dbeac77c87b489e331947813511108ae097f1715c018b8f3d') version('1.18.1', sha256='b6ff59cee96b454516e47e7721098e6ceebef435e3e21ac2d6c3b8b02628eb77') version('1.18.0', sha256='a9d72d9abaf65628f0f31bbb573b7d9304e43b1e6bbae43149c17737a42764c4') version('1.17.5', sha256='16507ba6617f62ae3c6ab1725ae6f550331025d4d9a369b83f6d5a470446c342') diff --git a/var/spack/repos/builtin/packages/py-onnx/package.py b/var/spack/repos/builtin/packages/py-onnx/package.py index fd6d6b0b6db..b92e910db27 100644 --- a/var/spack/repos/builtin/packages/py-onnx/package.py +++ b/var/spack/repos/builtin/packages/py-onnx/package.py @@ -28,3 +28,7 @@ class PyOnnx(PythonPackage): depends_on('py-six', type=('build', 'run')) depends_on('py-typing@3.6.4:', type=('build', 'run')) depends_on('py-typing-extensions@3.6.4:', type=('build', 'run')) + depends_on('cmake@3.1:', type='build') + + # 'python_out' does not recognize dllexport_decl. + patch('remove_dllexport_decl.patch', when='@:1.6.0') diff --git a/var/spack/repos/builtin/packages/py-onnx/remove_dllexport_decl.patch b/var/spack/repos/builtin/packages/py-onnx/remove_dllexport_decl.patch new file mode 100644 index 00000000000..ede1fe66f2a --- /dev/null +++ b/var/spack/repos/builtin/packages/py-onnx/remove_dllexport_decl.patch @@ -0,0 +1,11 @@ +--- spack-src/CMakeLists.txt.org 2020-03-24 14:01:58.856142450 +0900 ++++ spack-src/CMakeLists.txt 2020-03-24 14:01:05.715872685 +0900 +@@ -204,7 +204,7 @@ + ${ONNX_DLLEXPORT_STR}${CMAKE_CURRENT_BINARY_DIR}) + if(BUILD_ONNX_PYTHON) + list(APPEND PROTOC_ARGS --python_out +- ${ONNX_DLLEXPORT_STR}${CMAKE_CURRENT_BINARY_DIR}) ++ ${CMAKE_CURRENT_BINARY_DIR}) + if(ONNX_GEN_PB_TYPE_STUBS) + # Haven't figured out how to generate mypy stubs on Windows yet + if(NOT WIN32) diff --git a/var/spack/repos/builtin/packages/py-pep8-naming/package.py b/var/spack/repos/builtin/packages/py-pep8-naming/package.py index ed123b773d1..fe3e929996c 100644 --- a/var/spack/repos/builtin/packages/py-pep8-naming/package.py +++ b/var/spack/repos/builtin/packages/py-pep8-naming/package.py @@ -9,10 +9,11 @@ class PyPep8Naming(PythonPackage): """Check PEP-8 naming conventions, plugin for flake8.""" - homepage = "https://pypi.org/project/pep8-naming/" - url = "https://files.pythonhosted.org/packages/3e/4a/125425d6b1e017f48dfc9c961f4bb9510168db7a090618906c750184ed03/pep8-naming-0.7.0.tar.gz" + homepage = "https://github.com/PyCQA/pep8-naming" + url = "https://pypi.io/packages/source/p/pep8-naming/pep8-naming-0.10.0.tar.gz" - extends('python', ignore='bin/(flake8|pyflakes|pycodestyle)') - version('0.7.0', sha256='624258e0dd06ef32a9daf3c36cc925ff7314da7233209c5b01f7e5cdd3c34826') + version('0.10.0', sha256='f3b4a5f9dd72b991bf7d8e2a341d2e1aa3a884a769b5aaac4f56825c1763bf3a') + version('0.7.0', sha256='624258e0dd06ef32a9daf3c36cc925ff7314da7233209c5b01f7e5cdd3c34826') - depends_on('py-flake8-polyfill', type='run') + depends_on('py-setuptools', type='build') + depends_on('py-flake8-polyfill@1.0.2:1.999', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/py-petastorm/package.py b/var/spack/repos/builtin/packages/py-petastorm/package.py new file mode 100644 index 00000000000..2c6cc5e34ff --- /dev/null +++ b/var/spack/repos/builtin/packages/py-petastorm/package.py @@ -0,0 +1,30 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +class PyPetastorm(PythonPackage): + """Petastorm is a library enabling the use of Parquet storage from + Tensorflow, Pytorch, and other Python-based ML training frameworks.""" + + homepage = "https://github.com/uber/petastorm" + url = "https://pypi.io/packages/source/p/petastorm/petastorm-0.8.2.tar.gz" + + maintainers = ['adamjstewart'] + + version('0.8.2', sha256='7782c315e1ee8d15c7741e3eea41e77b9efce661cf58aa0220a801db64f52f91') + + depends_on('py-setuptools', type='build') + depends_on('py-dill@0.2.1:', type=('build', 'run')) + depends_on('py-diskcache@3.0.0:', type=('build', 'run')) + depends_on('py-future@0.10.2:', type=('build', 'run')) + depends_on('py-futures@2.0:', type=('build', 'run'), when='^python@:2') + depends_on('py-numpy@1.13.3:', type=('build', 'run')) + depends_on('py-packaging@15.0:', type=('build', 'run')) + depends_on('py-pandas@0.19.0:', type=('build', 'run')) + depends_on('py-psutil@4.0.0:', type=('build', 'run')) + depends_on('py-pyspark@2.1.0:', type=('build', 'run')) + depends_on('py-pyzmq@14.0.0:', type=('build', 'run')) + depends_on('py-pyarrow@0.12.0:', type=('build', 'run')) + depends_on('py-six@1.5.0:', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/py-portalocker/package.py b/var/spack/repos/builtin/packages/py-portalocker/package.py new file mode 100644 index 00000000000..f4aa5145509 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-portalocker/package.py @@ -0,0 +1,18 @@ +# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class PyPortalocker(PythonPackage): + """Portalocker is a library to provide an easy API to file + locking.""" + + homepage = "https://github.com/WoLpH/portalocker" + url = "https://github.com/WoLpH/portalocker/archive/v1.6.0.tar.gz" + + version('1.6.0', sha256='084ff315ccb9fb38a7c06155d409da5df29647da7c6d2bc2b24637f9f79001ff') + + depends_on('py-setuptools@38.3.0:', type='build') diff --git a/var/spack/repos/builtin/packages/py-progress/package.py b/var/spack/repos/builtin/packages/py-progress/package.py index 3ae16e5e59e..cff57e9f779 100644 --- a/var/spack/repos/builtin/packages/py-progress/package.py +++ b/var/spack/repos/builtin/packages/py-progress/package.py @@ -10,7 +10,7 @@ class PyProgress(PythonPackage): """Easy progress reporting for Python""" homepage = "https://github.com/verigak/progress/" - url = "https://pypi.org/packages/source/p/progress/progress-1.4.tar.gz" + url = "https://pypi.io/packages/source/p/progress/progress-1.4.tar.gz" version('1.4', sha256='5e2f9da88ed8236a76fffbee3ceefd259589cf42dfbc2cec2877102189fae58a') diff --git a/var/spack/repos/builtin/packages/py-progressbar2/package.py b/var/spack/repos/builtin/packages/py-progressbar2/package.py index 31f2688a570..196d3035916 100644 --- a/var/spack/repos/builtin/packages/py-progressbar2/package.py +++ b/var/spack/repos/builtin/packages/py-progressbar2/package.py @@ -11,10 +11,11 @@ class PyProgressbar2(PythonPackage): """A progress bar for Python 2 and Python 3""" homepage = "https://github.com/WoLpH/python-progressbar" - url = "https://files.pythonhosted.org/packages/source/p/progressbar2/progressbar2-3.39.3.tar.gz" + url = "https://pypi.io/packages/source/p/progressbar2/progressbar2-3.50.1.tar.gz" + version('3.50.1', sha256='2c21c14482016162852c8265da03886c2b4dea6f84e5a817ad9b39f6bd82a772') version('3.39.3', sha256='8e5b5419e04193bb7c3fea71579937bbbcd64c26472b929718c2fe7ec420fe39') depends_on('py-setuptools', type='build') depends_on('py-six', type=('build', 'run')) - depends_on('py-python-utils', type=('build', 'run')) + depends_on('py-python-utils@2.3.0:', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/py-py2neo/package.py b/var/spack/repos/builtin/packages/py-py2neo/package.py index ef4ed46cd59..69e98d7b0b6 100644 --- a/var/spack/repos/builtin/packages/py-py2neo/package.py +++ b/var/spack/repos/builtin/packages/py-py2neo/package.py @@ -11,12 +11,22 @@ class PyPy2neo(PythonPackage): within Python applications and from the command line.""" homepage = "http://py2neo.org/" - url = "https://github.com/nigelsmall/py2neo/archive/py2neo-2.0.8.tar.gz" + url = "https://pypi.io/packages/source/p/py2neo/py2neo-2.0.8.tar.gz" - version('2.0.8', sha256='57b4a1c4aa800e03904b2adfd7c8ec467b072bae2d24baf150fd580916255f2e') - version('2.0.7', sha256='aa7c86fec70823111d2f932cb20a978889f1c47c2f58461309f644ecb9a22204') - version('2.0.6', sha256='bcf00ebc82a80c7e2da00288e8f90f81682abfc991e19d92d21726c2deac823f') - version('2.0.5', sha256='024b42261b06e5e2c92a1f24e62398847f090862005add0b5c69a79a7e1e87b5') - version('2.0.4', sha256='19074b7b892f2e989f39eae21fc59b26a05e1a820adad8aa58bc470b70d9056d') + version('4.3.0', sha256='a218ccb4b636e3850faa6b74ebad80f00600217172a57f745cf223d38a219222') + version('2.0.8', sha256='06167f5a91a0d9b9b73431baacd876f2d507650a681fdce1fcf3b383a9b991c1') + version('2.0.7', sha256='9b154053eb93c7f5fb3ebd48b6a5b99df450d3f2e9c6682153c6f8d59369378c') + version('2.0.6', sha256='6bb828d6d3e48b4d095b3f7d79dbb690a47633f0a9812eb62f141b042bab3186') + version('2.0.5', sha256='2c04d4223d2d356c4800c586f30c048757334f9391553c852c29aebf2368d101') + version('2.0.4', sha256='727726b87268ca1e929191b960a5473409e5bd81559ee83a304951104bb6b866') depends_on("py-setuptools", type='build') + depends_on("py-certifi", type=('build', 'run'), when='@4.3.0:') + depends_on("py-click@7.0", type=('build', 'run'), when='@4.3.0:') + depends_on("py-colorama", type=('build', 'run'), when='@4.3.0:') + depends_on("py-neobolt@1.7.12:1.7.999", type=('build', 'run'), when='@4.3.0:') + depends_on("py-neotime@1.7.4:1.7.999", type=('build', 'run'), when='@4.3.0:') + depends_on("py-prompt-toolkit@2.0.7:2.0.999", type=('build', 'run'), when='@4.3.0:') + depends_on("py-pygments@2.3.1:2.3.999", type=('build', 'run'), when='@4.3.0:') + depends_on("py-pytz", type=('build', 'run'), when='@4.3.0:') + depends_on("py-urllib3@1.23:1.24", type=('build', 'run'), when='@4.3.0:') diff --git a/var/spack/repos/builtin/packages/py-pyarrow/package.py b/var/spack/repos/builtin/packages/py-pyarrow/package.py index ed104821453..4daa0e00614 100644 --- a/var/spack/repos/builtin/packages/py-pyarrow/package.py +++ b/var/spack/repos/builtin/packages/py-pyarrow/package.py @@ -13,7 +13,7 @@ class PyPyarrow(PythonPackage): """ homepage = "http://arrow.apache.org" - url = 'https://pypi.org/packages/source/p/pyarrow/pyarrow-0.15.1.tar.gz' + url = 'https://pypi.io/packages/source/p/pyarrow/pyarrow-0.15.1.tar.gz' version('0.15.1', sha256='7ad074690ba38313067bf3bbda1258966d38e2037c035d08b9ffe3cce07747a5') version('0.12.1', sha256='10db6e486c918c3af999d0114a22d92770687e3a6607ea3f14e6748854824c2a') diff --git a/var/spack/repos/builtin/packages/py-pybind11/package.py b/var/spack/repos/builtin/packages/py-pybind11/package.py index cda6cada32f..3fe7402a0f5 100644 --- a/var/spack/repos/builtin/packages/py-pybind11/package.py +++ b/var/spack/repos/builtin/packages/py-pybind11/package.py @@ -4,6 +4,7 @@ # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * +import os class PyPybind11(CMakePackage): @@ -23,6 +24,7 @@ class PyPybind11(CMakePackage): maintainers = ['ax3l'] version('master', branch='master') + version('2.5.0', sha256='97504db65640570f32d3fdf701c25a340c8643037c3b69aec469c10c93dc8504') version('2.4.3', sha256='1eed57bc6863190e35637290f97a20c81cfe4d9090ac0a24f3bbf08f265eb71d') version('2.3.0', sha256='0f34838f2c8024a6765168227ba587b3687729ebf03dc912f88ff75c7aa9cfe8') version('2.2.4', sha256='b69e83658513215b8d1443544d0549b7d231b9f201f6fc787a2b2218b408181e') @@ -56,6 +58,8 @@ def cmake_args(self): def setup_build_environment(self, env): env.set('PYBIND11_USE_CMAKE', 1) + # https://github.com/pybind/pybind11/pull/1995 + @when('@:2.4.99') def patch(self): """ see https://github.com/spack/spack/issues/13559 """ filter_file('import sys', @@ -74,9 +78,11 @@ def test(self): with working_dir('spack-test', create=True): # test include helper points to right location python = self.spec['python'].command - inc = python( + py_inc = python( '-c', 'import pybind11 as py; ' + self.spec['python'].package.print_string('py.get_include()'), - output=str) - assert inc.strip() == str(self.prefix.include) + output=str).strip() + for inc in [py_inc, self.prefix.include]: + inc_file = join_path(inc, 'pybind11', 'pybind11.h') + assert os.path.isfile(inc_file) diff --git a/var/spack/repos/builtin/packages/py-pyelftools/package.py b/var/spack/repos/builtin/packages/py-pyelftools/package.py index a69d0a2e7e9..0607560e0b3 100644 --- a/var/spack/repos/builtin/packages/py-pyelftools/package.py +++ b/var/spack/repos/builtin/packages/py-pyelftools/package.py @@ -10,6 +10,9 @@ class PyPyelftools(PythonPackage): """A pure-Python library for parsing and analyzing ELF files and DWARF debugging information""" homepage = "https://pypi.python.org/pypi/pyelftools" - url = "https://pypi.io/packages/source/p/pyelftools/pyelftools-0.23.tar.gz" + url = "https://pypi.io/packages/source/p/pyelftools/pyelftools-0.26.tar.gz" + version('0.26', sha256='86ac6cee19f6c945e8dedf78c6ee74f1112bd14da5a658d8c9d4103aed5756a2') version('0.23', sha256='fc57aadd096e8f9b9b03f1a9578f673ee645e1513a5ff0192ef439e77eab21de') + + depends_on('py-setuptools', when='@0.25:', type='build') diff --git a/var/spack/repos/builtin/packages/py-pyface/package.py b/var/spack/repos/builtin/packages/py-pyface/package.py new file mode 100644 index 00000000000..197e3d9ab24 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-pyface/package.py @@ -0,0 +1,33 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +class PyPyface(PythonPackage): + """The pyface project contains a toolkit-independent GUI abstraction layer, + which is used to support the "visualization" features of the Traits + package. Thus, you can write code in terms of the Traits API (views, items, + editors, etc.), and let pyface and your selected toolkit and back-end take + care of the details of displaying them.""" + + homepage = "https://docs.enthought.com/pyface" + url = "https://pypi.io/packages/source/p/pyface/pyface-6.1.2.tar.gz" + + version('6.1.2', sha256='7c2ac3d5cbec85e8504b3b0b63e9307be12c6d710b46bae372ce6562d41f4fbc') + + variant('backend', default='pyqt5', description='Default backend', + values=('wx', 'pyqt', 'pyqt5', 'pyside'), multi=False) + + depends_on('py-setuptools', type='build') + depends_on('py-traits', type=('build', 'run')) + + # Backends + depends_on('py-wxpython@2.8.10:', when='backend=wx', type=('build', 'run')) + depends_on('py-numpy', when='backend=wx', type=('build', 'run')) + depends_on('py-pyqt4@4.10:', when='backend=pyqt', type=('build', 'run')) + depends_on('py-pygments', when='backend=pyqt', type=('build', 'run')) + depends_on('py-pyqt5@5:', when='backend=pyqt5', type=('build', 'run')) + depends_on('py-pygments', when='backend=pyqt5', type=('build', 'run')) + depends_on('py-pyside@1.2:', when='backend=pyside', type=('build', 'run')) + depends_on('py-pygments', when='backend=pyside', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/py-pyfits/package.py b/var/spack/repos/builtin/packages/py-pyfits/package.py new file mode 100644 index 00000000000..f013175d339 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-pyfits/package.py @@ -0,0 +1,19 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class PyPyfits(PythonPackage): + """The PyFITS module is a Python library providing access to + FITS(Flexible Image Transport System) files.""" + + homepage = "https://github.com/spacetelescope/pyfits" + url = "https://github.com/spacetelescope/PyFITS/archive/3.5.tar.gz" + + version('3.5', sha256='fd32596ee09170a70ddc87d0dfc5503d860ef6b68abcff486d7aa6993dff6162') + + depends_on('py-setuptools', type='build') + depends_on('py-numpy', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/py-pyqt4/package.py b/var/spack/repos/builtin/packages/py-pyqt4/package.py index c0e3ae1085b..6c5af83cb26 100644 --- a/var/spack/repos/builtin/packages/py-pyqt4/package.py +++ b/var/spack/repos/builtin/packages/py-pyqt4/package.py @@ -4,7 +4,6 @@ # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * -import os class PyPyqt4(SIPPackage): @@ -29,19 +28,12 @@ class PyPyqt4(SIPPackage): version('4.11.3', sha256='853780dcdbe2e6ba785d703d059b096e1fc49369d3e8d41a060be874b8745686', url='http://sourceforge.net/projects/pyqt/files/PyQt4/PyQt-4.11.3/PyQt-x11-gpl-4.11.3.tar.gz') - variant('qsci', default=False, description='Build with QScintilla python bindings') + # API files can be installed regardless if QScintilla is installed or not + variant('qsci_api', default=False, description='Install PyQt API file for QScintilla') # Supposedly can also be built with Qt 5 compatibility layer depends_on('qt@:4') - depends_on('qscintilla', when='+qsci') - - # For building Qscintilla python bindings - resource(name='qscintilla', - url='https://www.riverbankcomputing.com/static/Downloads/QScintilla/2.10.2/QScintilla_gpl-2.10.2.tar.gz', - sha256='14b31d20717eed95ea9bea4cd16e5e1b72cee7ebac647cba878e0f6db6a65ed0', - destination='spack-resource-qscintilla', - when='^qscintilla@2.10.2' - ) + depends_on('py-sip module=PyQt4.sip') # https://www.riverbankcomputing.com/static/Docs/PyQt4/installation.html def configure_file(self): @@ -53,49 +45,7 @@ def configure_args(self): '--sipdir', self.prefix.share.sip.PyQt4, '--stubsdir', join_path(site_packages_dir, 'PyQt4') ] - if '+qsci' in self.spec: - args.extend(['--qsci-api-destdir', self.prefix.share.qsci]) + if '+qsci_api' in self.spec: + args.extend(['--qsci-api', + '--qsci-api-destdir', self.prefix.share.qsci]) return args - - @run_after('install') - def make_qsci(self): - if '+qsci' in self.spec: - rsrc_py_path = os.path.join( - self.stage.source_path, - 'spack-resource-qscintilla/QScintilla_gpl-' + - str(self.spec['qscintilla'].version), 'Python') - with working_dir(rsrc_py_path): - pydir = join_path(site_packages_dir, 'PyQt4') - python = self.spec['python'].command - python('configure.py', - '--sip=' + self.prefix.bin.sip, - '--qsci-incdir=' + - self.spec['qscintilla'].prefix.include, - '--qsci-libdir=' + self.spec['qscintilla'].prefix.lib, - '--qsci-sipdir=' + self.prefix.share.sip.PyQt4, - '--apidir=' + self.prefix.share.qsci, - '--destdir=' + pydir, - '--pyqt-sipdir=' + self.prefix.share.sip.PyQt4, - '--sip-incdir=' + python_include_dir, - '--stubsdir=' + pydir) - - # Fix build errors - # "QAbstractScrollArea: No such file or directory" - # "qprinter.h: No such file or directory" - # ".../Qsci.so: undefined symbol: _ZTI10Qsci...." - qscipro = FileFilter('Qsci/Qsci.pro') - link_qscilibs = 'LIBS += -L' + self.prefix.lib +\ - ' -lqscintilla2_qt4' - qscipro.filter('TEMPLATE = lib', - 'TEMPLATE = lib\nQT += widgets' + - '\nQT += printsupport\n' + link_qscilibs) - - make() - - # Fix installation prefixes - makefile = FileFilter('Makefile') - makefile.filter(r'\$\(INSTALL_ROOT\)', '') - makefile = FileFilter('Qsci/Makefile') - makefile.filter(r'\$\(INSTALL_ROOT\)', '') - - make('install') diff --git a/var/spack/repos/builtin/packages/py-pyqt5/package.py b/var/spack/repos/builtin/packages/py-pyqt5/package.py index b91833389b8..e26c66a891c 100644 --- a/var/spack/repos/builtin/packages/py-pyqt5/package.py +++ b/var/spack/repos/builtin/packages/py-pyqt5/package.py @@ -4,7 +4,6 @@ # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * -import os class PyPyqt5(SIPPackage): @@ -26,26 +25,20 @@ class PyPyqt5(SIPPackage): 'PyQt5.QtXmlPatterns' ] + version('5.13.1', sha256='54b7f456341b89eeb3930e786837762ea67f235e886512496c4152ebe106d4af') version('5.13.0', sha256='0cdbffe5135926527b61cc3692dd301cd0328dd87eeaf1313e610787c46faff9') version('5.12.3', sha256='0db0fa37debab147450f9e052286f7a530404e2aaddc438e97a7dcdf56292110') - variant('qsci', default=False, description='Build with QScintilla python bindings') + # API files can be installed regardless if Qscintilla is installed or not + variant('qsci_api', default=False, description='Install PyQt API file for QScintilla') # Without opengl support, I got the following error: # sip: QOpenGLFramebufferObject is undefined depends_on('qt@5:+opengl') depends_on('python@2.6:', type=('build', 'run')) depends_on('py-enum34', type=('build', 'run'), when='^python@:3.3') - - depends_on('qscintilla', when='+qsci') - - # For building Qscintilla python bindings - resource(name='qscintilla', - url='https://www.riverbankcomputing.com/static/Downloads/QScintilla/2.10.2/QScintilla_gpl-2.10.2.tar.gz', - sha256='14b31d20717eed95ea9bea4cd16e5e1b72cee7ebac647cba878e0f6db6a65ed0', - destination='spack-resource-qscintilla', - when='^qscintilla@2.10.2' - ) + depends_on('py-sip module=PyQt5.sip', type=('build', 'run')) + depends_on('py-sip@:4.19.18 module=PyQt5.sip', type=('build', 'run'), when='@:5.13.0') # https://www.riverbankcomputing.com/static/Docs/PyQt5/installation.html def configure_args(self): @@ -57,52 +50,7 @@ def configure_args(self): self.spec['python'].package.site_packages_dir, 'PyQt5'), ] - if '+qsci' in self.spec: - args.extend(['--qsci-api-destdir', self.prefix.share.qsci]) + if '+qsci_api' in self.spec: + args.extend(['--qsci-api', + '--qsci-api-destdir', self.prefix.share.qsci]) return args - - @run_after('install') - def make_qsci(self): - if '+qsci' in self.spec: - rsrc_py_path = os.path.join( - self.stage.source_path, - 'spack-resource-qscintilla/QScintilla_gpl-' + - str(self.spec['qscintilla'].version), 'Python') - with working_dir(rsrc_py_path): - pydir = join_path( - self.prefix, - self.spec['python'].package.site_packages_dir, - 'PyQt5') - python = self.spec['python'].command - python('configure.py', '--pyqt=PyQt5', - '--sip=' + self.prefix.bin.sip, - '--qsci-incdir=' + - self.spec['qscintilla'].prefix.include, - '--qsci-libdir=' + self.spec['qscintilla'].prefix.lib, - '--qsci-sipdir=' + self.prefix.share.sip.PyQt5, - '--apidir=' + self.prefix.share.qsci, - '--destdir=' + pydir, - '--pyqt-sipdir=' + self.prefix.share.sip.PyQt5, - '--sip-incdir=' + python_include_dir, - '--stubsdir=' + pydir) - - # Fix build errors - # "QAbstractScrollArea: No such file or directory" - # "qprinter.h: No such file or directory" - # ".../Qsci.so: undefined symbol: _ZTI10Qsci...." - qscipro = FileFilter('Qsci/Qsci.pro') - link_qscilibs = 'LIBS += -L' + self.prefix.lib +\ - ' -lqscintilla2_qt5' - qscipro.filter('TEMPLATE = lib', - 'TEMPLATE = lib\nQT += widgets' + - '\nQT += printsupport\n' + link_qscilibs) - - make() - - # Fix installation prefixes - makefile = FileFilter('Makefile') - makefile.filter(r'\$\(INSTALL_ROOT\)', '') - makefile = FileFilter('Qsci/Makefile') - makefile.filter(r'\$\(INSTALL_ROOT\)', '') - - make('install') diff --git a/var/spack/repos/builtin/packages/py-pyspark/package.py b/var/spack/repos/builtin/packages/py-pyspark/package.py index 15285d3c304..7a7cf1d286a 100644 --- a/var/spack/repos/builtin/packages/py-pyspark/package.py +++ b/var/spack/repos/builtin/packages/py-pyspark/package.py @@ -10,7 +10,7 @@ class PyPyspark(PythonPackage): """Python bindings for Apache Spark""" homepage = "http://spark.apache.org" - url = "https://pypi.org/packages/source/p/pyspark/pyspark-2.3.0.tar.gz" + url = "https://pypi.io/packages/source/p/pyspark/pyspark-2.3.0.tar.gz" version('2.3.0', sha256='0b3536910e154c36a94239f0ba0a201f476aadc72006409e5787198ffd01986e') diff --git a/var/spack/repos/builtin/packages/py-pythia/package.py b/var/spack/repos/builtin/packages/py-pythia/package.py deleted file mode 100644 index 721d7470faf..00000000000 --- a/var/spack/repos/builtin/packages/py-pythia/package.py +++ /dev/null @@ -1,20 +0,0 @@ -# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other -# Spack Project Developers. See the top-level COPYRIGHT file for details. -# -# SPDX-License-Identifier: (Apache-2.0 OR MIT) - -from spack import * - - -class PyPythia(PythonPackage): - """Pythia refers to the Pyre framework and a collection of packages that - interact with it, such as an interface to the ACIS solid modelling package. - """ - - homepage = "https://geodynamics.org/cig/software/pythia/" - url = "https://geodynamics.org/cig/software/github/pythia/v0.8.1.18/pythia-0.8.1.18.tar.gz" - - version('0.8.1.18', sha256='f6025e6d70046dc71e375eded3d731506f8dd79e2e53b7e1436754439dcdef1e') - - depends_on('python@:2', type=('build', 'run')) - depends_on('py-merlin', type='build') diff --git a/var/spack/repos/builtin/packages/py-python-swiftclient/package.py b/var/spack/repos/builtin/packages/py-python-swiftclient/package.py new file mode 100644 index 00000000000..bd8f766624b --- /dev/null +++ b/var/spack/repos/builtin/packages/py-python-swiftclient/package.py @@ -0,0 +1,29 @@ +# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class PyPythonSwiftclient(PythonPackage): + """This is a python client for the Swift API.""" + + homepage = "https://docs.openstack.org/python-swiftclient" + url = "https://pypi.io/packages/source/p/python-swiftclient/python-swiftclient-3.9.0.tar.gz" + + maintainers = ['ajkotobi'] + + import_modules = ['sys', 'setuptools', 'requests'] + + version('3.9.0', sha256='4f2097492e4c76e948882fc859bfa033ade09bed72f8e6b328e34a3467d9a377') + version('3.8.1', sha256='3a013303643f77a99befa05582dfb93671e1fba1aed9f4a517418129700aedb8') + version('3.8.0', sha256='107a9d5356663365a9f7c0b3a2b55da97a0a9ba7f10da2319b3972481510f33d') + version('3.7.1', sha256='06bda5a6f81ea132e5cb52d0eb0616a0ab0958b4ec0d1cb7f850f04bf178852f') + + depends_on('py-setuptools', type=('build', 'run')) + depends_on('python@2.7:', type=('build', 'run')) + depends_on('py-futures@3:', type=('build', 'run'), when='^python@:2') + depends_on('py-requests@1.1.0:', type=('build', 'run')) + depends_on('py-six@1.9:', type=('build', 'run')) + depends_on('py-pbr', type='build') diff --git a/var/spack/repos/builtin/packages/py-python-utils/package.py b/var/spack/repos/builtin/packages/py-python-utils/package.py index bc6057a0780..6c8e3aa732d 100644 --- a/var/spack/repos/builtin/packages/py-python-utils/package.py +++ b/var/spack/repos/builtin/packages/py-python-utils/package.py @@ -12,8 +12,12 @@ class PyPythonUtils(PythonPackage): which make common patterns shorter and easier.""" homepage = "https://github.com/WoLpH/python-utils" - url = "https://files.pythonhosted.org/packages/source/p/python-utils/python-utils-2.3.0.tar.gz" + url = "https://pypi.io/packages/source/p/python-utils/python-utils-2.4.0.tar.gz" + version('2.4.0', sha256='f21fc09ff58ea5ebd1fd2e8ef7f63e39d456336900f26bdc9334a03a3f7d8089') version('2.3.0', sha256='34aaf26b39b0b86628008f2ae0ac001b30e7986a8d303b61e1357dfcdad4f6d3') depends_on('py-setuptools', type='build') + depends_on('py-six', type=('build', 'run')) + depends_on('py-pytest', type='test') + depends_on('py-pytest-runner', type='test') diff --git a/var/spack/repos/builtin/packages/py-pywcs/package.py b/var/spack/repos/builtin/packages/py-pywcs/package.py new file mode 100644 index 00000000000..dea2adb2d92 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-pywcs/package.py @@ -0,0 +1,24 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class PyPywcs(PythonPackage): + """pywcs is a set of routines for + handling the FITS World Coordinate System (WCS) standard.""" + + homepage = "https://github.com/spacetelescope/pywcs" + url = "https://github.com/spacetelescope/pywcs/archive/1.12.1.tar.gz" + + version('1.12.1', sha256='efd4e0ea190e3a2521ebcde583452e126acdeac85cc8a9c78c8a96f10805b5e1') + + depends_on('python@2.6:', type=('build', 'run')) + depends_on('py-setuptools', type='build') + depends_on('py-d2to1@0.2.3:', type='build') + depends_on('py-stsci-distutils@0.3.2:', type='build') + depends_on('py-numpy@1.5.1:', type=('build', 'run')) + depends_on('py-pyfits@1.4:', type=('build', 'run')) + depends_on('py-astropy@0.3.1:', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/py-requests-futures/package.py b/var/spack/repos/builtin/packages/py-requests-futures/package.py index 4c79bcf871a..a11d577e1cc 100644 --- a/var/spack/repos/builtin/packages/py-requests-futures/package.py +++ b/var/spack/repos/builtin/packages/py-requests-futures/package.py @@ -10,7 +10,7 @@ class PyRequestsFutures(PythonPackage): """Asynchronous Python HTTP Requests for Humans using Futures""" homepage = "https://github.com/ross/requests-futures" - url = "https://pypi.org/packages/source/r/requests-futures/requests-futures-1.0.0.tar.gz" + url = "https://pypi.io/packages/source/r/requests-futures/requests-futures-1.0.0.tar.gz" version('1.0.0', sha256='35547502bf1958044716a03a2f47092a89efe8f9789ab0c4c528d9c9c30bc148') diff --git a/var/spack/repos/builtin/packages/py-resultsfile/package.py b/var/spack/repos/builtin/packages/py-resultsfile/package.py new file mode 100644 index 00000000000..6f00a3ab1d7 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-resultsfile/package.py @@ -0,0 +1,22 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class PyResultsfile(PythonPackage): + """Python module to read output files of quantum chemistry programs""" + + homepage = "https://gitlab.com/scemama/resultsFile" + url = "https://gitlab.com/scemama/resultsFile/-/archive/v1.0/resultsFile-v1.0.tar.gz" + git = "https://gitlab.com/scemama/resultsFile.git" + + maintainers = ['scemama'] + + version('2.0', sha256='2a34208254e4bea155695690437f6a59bf5f7b0ddb421d6c1a2d377510f018f7') + version('1.0', sha256='e029054b2727131da9684fa2ec9fb8b6a3225dc7f648216a9390267b2d5d60c3') + + depends_on('python@2.7:2.8.999', type=('build', 'run'), when='@1.0:1.999') + depends_on('python@3:', type=('build', 'run'), when='@2.0:') diff --git a/var/spack/repos/builtin/packages/py-scoop/package.py b/var/spack/repos/builtin/packages/py-scoop/package.py index 2fc309bbd63..e1e25abd019 100644 --- a/var/spack/repos/builtin/packages/py-scoop/package.py +++ b/var/spack/repos/builtin/packages/py-scoop/package.py @@ -13,10 +13,11 @@ class PyScoop(PythonPackage): environments, from heterogeneous grids to supercomputers.""" homepage = "https://github.com/soravux/scoop" - url = "https://files.pythonhosted.org/packages/source/s/scoop/scoop-0.7.1.1.tar.gz" + url = "https://pypi.io/packages/source/s/scoop/scoop-0.7.1.1.tar.gz" version('0.7.1.1', sha256='d8b6444c7bac901171e3327a97e241dde63f060354e162a65551fd8083ca62b4') depends_on('py-setuptools', type='build') depends_on('py-greenlet@0.3.4:', type=('build', 'run')) depends_on('py-pyzmq@13.1.0:', type=('build', 'run')) + depends_on('py-argparse@1.1:', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/py-sentencepiece/package.py b/var/spack/repos/builtin/packages/py-sentencepiece/package.py new file mode 100644 index 00000000000..9d9552a47b9 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-sentencepiece/package.py @@ -0,0 +1,24 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +class PySentencepiece(PythonPackage): + """Unsupervised text tokenizer for Neural Network-based text generation. + + These are the Python bindings.""" + + homepage = "https://github.com/google/sentencepiece/blob/master/python/README.md" + url = "https://github.com/google/sentencepiece/archive/v0.1.85.tar.gz" + + maintainers = ['adamjstewart'] + + version('0.1.85', sha256='dd4956287a1b6af3cbdbbd499b7227a859a4e3f41c9882de5e6bdd929e219ae6') + + depends_on('sentencepiece') + depends_on('sentencepiece@0.1.85', when='@0.1.85') + depends_on('pkgconfig', type='build') + depends_on('py-setuptools', type='build') + + build_directory = 'python' diff --git a/var/spack/repos/builtin/packages/py-sip/package.py b/var/spack/repos/builtin/packages/py-sip/package.py index 09c6b743b14..d454f05740c 100644 --- a/var/spack/repos/builtin/packages/py-sip/package.py +++ b/var/spack/repos/builtin/packages/py-sip/package.py @@ -4,6 +4,7 @@ # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * +import os class PySip(Package): @@ -16,6 +17,9 @@ class PySip(Package): hg = "https://www.riverbankcomputing.com/hg/sip" version('develop', hg=hg) # wasn't actually able to clone this + version('4.19.21', sha256='6af9979ab41590e8311b8cc94356718429ef96ba0e3592bdd630da01211200ae') + version('4.19.20', sha256='04cc2f87ac97e8718d8e1ef036e3ec26050ab44c21f9277618d5b67432fcbfd6') + version('4.19.19', sha256='5436b61a78f48c7e8078e93a6b59453ad33780f80c644e5f3af39f94be1ede44') version('4.19.18', sha256='c0bd863800ed9b15dcad477c4017cdb73fa805c25908b0240564add74d697e1e') version('4.19.15', sha256='2b5c0b2c0266b467b365c21376d50dde61a3236722ab87ff1e8dacec283eb610') version('4.19.13', sha256='e353a7056599bf5fbd5d3ff9842a6ab2ea3cf4e0304a0f925ec5862907c0d15e') @@ -53,3 +57,14 @@ def build(self, spec, prefix): def install(self, spec, prefix): make('install') + + @run_after('install') + def extend_path_setup(self): + # See github issue #14121 and PR #15297 + module = self.spec.variants['module'].value + if module != 'sip': + module = module.split('.')[0] + with working_dir(site_packages_dir): + with open(os.path.join(module, '__init__.py'), 'w') as f: + f.write('from pkgutil import extend_path\n') + f.write('__path__ = extend_path(__path__, __name__)\n') diff --git a/var/spack/repos/builtin/packages/py-smart-open/package.py b/var/spack/repos/builtin/packages/py-smart-open/package.py index 5def4a84f38..fd2ed92ef2b 100644 --- a/var/spack/repos/builtin/packages/py-smart-open/package.py +++ b/var/spack/repos/builtin/packages/py-smart-open/package.py @@ -13,9 +13,13 @@ class PySmartOpen(PythonPackage): different formats.""" homepage = "https://github.com/piskvorky/smart_open" - url = "https://github.com/RaRe-Technologies/smart_open/archive/1.8.4.tar.gz" + url = "https://pypi.io/packages/source/s/smart_open/smart_open-1.10.0.tar.gz" - version('1.8.4', sha256='788e07f035defcbb62e3c1e313329a70b0976f4f65406ee767db73ad5d2d04f9') + version('1.10.0', sha256='bea5624c0c2e49987c227bdf3596573157eccd96fd1d53198856c8d53948fa2c') + version('1.8.4', sha256='788e07f035defcbb62e3c1e313329a70b0976f4f65406ee767db73ad5d2d04f9') depends_on('py-setuptools', type='build') + depends_on('py-requests', type=('build', 'run')) depends_on('py-boto3', type=('build', 'run')) + depends_on('py-google-cloud-storage', type=('build', 'run')) + depends_on('py-bz2file', when='^python@:2', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/py-spatialist/package.py b/var/spack/repos/builtin/packages/py-spatialist/package.py index e1beee57e5f..ff7386fd00f 100644 --- a/var/spack/repos/builtin/packages/py-spatialist/package.py +++ b/var/spack/repos/builtin/packages/py-spatialist/package.py @@ -12,16 +12,24 @@ class PySpatialist(PythonPackage): processing using GDAL and OGR.""" homepage = "https://github.com/johntruckenbrodt/spatialist" - url = "https://files.pythonhosted.org/packages/source/s/spatialist/spatialist-0.2.8.tar.gz" + url = "https://pypi.io/packages/source/s/spatialist/spatialist-0.4.tar.gz" + maintainers = ['adamjstewart'] + + version('0.4', sha256='153b118022c06ad2d1d51fb6cd9ecbfc8020bc1995b643ec7fa689a8c5dde7e9') version('0.2.8', sha256='97de7f9c0fbf28497ef28970bdf8093a152e691a783e7edad22998cb235154c6') + depends_on('python@2.7.9:', type=('build', 'run')) depends_on('py-setuptools', type='build') + depends_on('py-setuptools-scm', type='build') depends_on('py-progressbar2', type=('build', 'run')) - depends_on('py-pathos@0.2.0:', type=('build', 'run')) - depends_on('py-numpy', type=('build', 'run')) - depends_on('py-matplotlib', type=('build', 'run')) - depends_on('py-jupyter-core', type=('build', 'run')) + depends_on('py-jupyter', type=('build', 'run')) depends_on('py-ipython', type=('build', 'run')) depends_on('py-ipywidgets', type=('build', 'run')) + depends_on('py-matplotlib', type=('build', 'run')) + depends_on('py-prompt-toolkit@2.0.10:2.0.999', type=('build', 'run')) + depends_on('py-pathos@0.2:', type=('build', 'run')) + depends_on('py-numpy', type=('build', 'run')) + depends_on('py-scoop', type=('build', 'run')) depends_on('py-tblib', type=('build', 'run')) + depends_on('py-pyyaml', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/py-spatialite/package.py b/var/spack/repos/builtin/packages/py-spatialite/package.py new file mode 100644 index 00000000000..e78cff81ad3 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-spatialite/package.py @@ -0,0 +1,20 @@ +# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class PySpatialite(PythonPackage): + """Wrapper for standard Python module "sqlite3" which adds SpatiaLite + support. + """ + + homepage = "https://github.com/malexer/spatialite" + url = "https://pypi.io/packages/source/s/spatialite/spatialite-0.0.3.tar.gz" + + version('0.0.3', sha256='a0761f239a52f326b14ce41ba61b6614dfcc808b978a0bec4a37c1de9ad9071e') + + depends_on('py-setuptools', type='build') + depends_on('libspatialite') diff --git a/var/spack/repos/builtin/packages/py-sphinx/package.py b/var/spack/repos/builtin/packages/py-sphinx/package.py index 550b503e6c8..5aed1f39eb7 100644 --- a/var/spack/repos/builtin/packages/py-sphinx/package.py +++ b/var/spack/repos/builtin/packages/py-sphinx/package.py @@ -10,7 +10,7 @@ class PySphinx(PythonPackage): """Sphinx Documentation Generator.""" homepage = "http://sphinx-doc.org" - url = "https://pypi.io/packages/source/S/Sphinx/Sphinx-2.2.0.tar.gz" + url = "https://pypi.io/packages/source/S/Sphinx/Sphinx-3.0.0.tar.gz" import_modules = [ 'sphinx', 'sphinx.testing', 'sphinx.ext', 'sphinx.pycode', @@ -22,6 +22,7 @@ class PySphinx(PythonPackage): 'sphinx.environment.collectors', 'sphinx.environment.adapters' ] + version('3.0.0', sha256='6a099e6faffdc3ceba99ca8c2d09982d43022245e409249375edf111caf79ed3') version('2.2.0', sha256='0d586b0f8c2fc3cc6559c5e8fd6124628110514fda0e5d7c82e682d749d2e845') version('1.8.4', sha256='c1c00fc4f6e8b101a0d037065043460dffc2d507257f2f11acaed71fd2b0c83c') version('1.8.2', sha256='120732cbddb1b2364471c3d9f8bfd4b0c5b550862f99a65736c77f970b142aea') diff --git a/var/spack/repos/builtin/packages/py-statsmodels/package.py b/var/spack/repos/builtin/packages/py-statsmodels/package.py index eebc2066406..26c006bccff 100644 --- a/var/spack/repos/builtin/packages/py-statsmodels/package.py +++ b/var/spack/repos/builtin/packages/py-statsmodels/package.py @@ -13,31 +13,32 @@ class PyStatsmodels(PythonPackage): homepage = "http://www.statsmodels.org" url = "https://pypi.io/packages/source/s/statsmodels/statsmodels-0.8.0.tar.gz" + version('0.10.2', sha256='9cd2194c6642a8754e85f9a6e6912cdf996bebf6ff715d3cc67f65dadfd37cc9') version('0.10.1', sha256='320659a80f916c2edf9dfbe83512d9004bb562b72eedb7d9374562038697fa10') version('0.8.0', sha256='26431ab706fbae896db7870a0892743bfbb9f5c83231644692166a31d2d86048') variant('plotting', default=False, description='With matplotlib') - depends_on('python@:3.6', when='@:0.8.0', type=('build', 'run')) + depends_on('python@:3.6', when='@:0.8.0', type=('build', 'run')) + depends_on('python@2.7:2.8,3.4:', when='@0.10.1:', type=('build', 'run')) # according to http://www.statsmodels.org/dev/install.html earlier versions # might work. depends_on('py-setuptools@0.6c5:', type='build') - depends_on('py-numpy@1.7.0:', type=('build', 'run'), when='@0.8.0') - depends_on('py-pandas@0.12:', type=('build', 'run'), when='@0.8.0') - depends_on('py-patsy@0.2.1:', type=('build', 'run'), when='@0.8.0') - depends_on('py-scipy@0.11:', type=('build', 'run'), when='@0.8.0') - depends_on('py-matplotlib@1.3:', type=('build', 'run'), when='@0.8.0 +plotting') - # patsy@0.5.1 works around a Python change # https://github.com/statsmodels/statsmodels/issues/5343 and # https://github.com/pydata/patsy/pull/131 - depends_on('py-numpy', type=('build', 'run'), when='@0.10.1') - depends_on('py-pandas', type=('build', 'run'), when='@0.10.1') - depends_on('py-patsy', type=('build', 'run'), when='@0.10.1') - depends_on('py-scipy@0.5.1:', type=('build', 'run'), when='@0.10.1') - depends_on('py-matplotlib', type=('build', 'run'), when='@0.10.1 +plotting') + + depends_on('py-numpy@1.7.0:', type=('build', 'run'), when='@0.8.0:') + depends_on('py-numpy@1.11.0:', type=('build', 'run'), when='@0.10.1:') + depends_on('py-pandas@0.12:', type=('build', 'run'), when='@0.8.0:') + depends_on('py-pandas@0.19:', type=('build', 'run'), when='@0.10.1:') + depends_on('py-patsy@0.2.1:', type=('build', 'run'), when='@0.8.0:') + depends_on('py-patsy@0.4.0:', type=('build', 'run'), when='@0.10.1:') + depends_on('py-scipy@0.11:', type=('build', 'run'), when='@0.8.0:') + depends_on('py-scipy@0.18:', type=('build', 'run'), when='@0.10.1:') + depends_on('py-matplotlib@1.3:', type=('build', 'run'), when='@0.8.0 +plotting') depends_on('py-pytest', type='test') diff --git a/var/spack/repos/builtin/packages/py-stsci-distutils/package.py b/var/spack/repos/builtin/packages/py-stsci-distutils/package.py new file mode 100644 index 00000000000..f14a6ebd468 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-stsci-distutils/package.py @@ -0,0 +1,19 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class PyStsciDistutils(PythonPackage): + """This package contains utilities used to + package some of STScI's Python projects.""" + + homepage = "https://github.com/spacetelescope/stsci.distutils" + url = "https://github.com/spacetelescope/stsci.distutils/archive/0.3.8.tar.gz" + + version('0.3.8', sha256='a52f3ec3b392a9cecd98d143b678c27346cbfa8f34c34698821d7e167907edce') + + depends_on('py-setuptools', type='build') + depends_on('py-d2to1', type='build') diff --git a/var/spack/repos/builtin/packages/py-tblib/package.py b/var/spack/repos/builtin/packages/py-tblib/package.py index 5234fde4e04..f32e292c125 100644 --- a/var/spack/repos/builtin/packages/py-tblib/package.py +++ b/var/spack/repos/builtin/packages/py-tblib/package.py @@ -11,8 +11,10 @@ class PyTblib(PythonPackage): """Traceback fiddling library. Allows you to pickle tracebacks.""" homepage = "https://github.com/ionelmc/python-tblib" - url = "https://files.pythonhosted.org/packages/source/t/tblib/tblib-1.4.0.tar.gz" + url = "https://pypi.io/packages/source/t/tblib/tblib-1.6.0.tar.gz" + version('1.6.0', sha256='229bee3754cb5d98b4837dd5c4405e80cfab57cb9f93220410ad367f8b352344') version('1.4.0', sha256='bd1ad564564a158ff62c290687f3db446038f9ac11a0bf6892712e3601af3bcd') + depends_on('python@2.7:2.8,3.5:', type=('build', 'run')) depends_on('py-setuptools', type='build') diff --git a/var/spack/repos/builtin/packages/py-tensorflow/0001-Remove-contrib-cloud-bigtable-and-storage-ops-kernel.patch b/var/spack/repos/builtin/packages/py-tensorflow/0001-Remove-contrib-cloud-bigtable-and-storage-ops-kernel.patch new file mode 100644 index 00000000000..bb742ff1b42 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-tensorflow/0001-Remove-contrib-cloud-bigtable-and-storage-ops-kernel.patch @@ -0,0 +1,76 @@ +--- a/tensorflow/core/BUILD.orig 2020-01-22 18:43:57.000000000 -0500 ++++ b/tensorflow/core/BUILD 2020-03-26 16:33:17.318229701 -0400 +@@ -107,8 +107,6 @@ + load( + "//tensorflow/core/platform:default/build_config.bzl", + "tf_additional_all_protos", +- "tf_additional_cloud_kernel_deps", +- "tf_additional_cloud_op_deps", + "tf_additional_core_deps", + "tf_additional_cupti_wrapper_deps", + "tf_additional_device_tracer_cuda_deps", +@@ -1427,7 +1425,7 @@ + ]) + if_tensorrt([ + "//tensorflow/compiler/tf2tensorrt:trt_engine_resource_ops_op_lib", + "//tensorflow/compiler/tf2tensorrt:trt_op_libs", +- ]) + tf_additional_cloud_op_deps(), ++ ]), + alwayslink = 1, + ) + +@@ -1590,7 +1588,7 @@ + "//tensorflow/core/kernels:summary_kernels", + "//tensorflow/core/kernels:training_ops", + "//tensorflow/core/kernels:word2vec_kernels", +- ] + tf_additional_cloud_kernel_deps() + if_not_windows([ ++ ] + if_not_windows([ + "//tensorflow/core/kernels:fact_op", + "//tensorflow/core/kernels:array_not_windows", + "//tensorflow/core/kernels:math_not_windows", + +diff --git a/tensorflow/core/platform/default/build_config.bzl b/tensorflow/core/platform/default/build_config.bzl +index b822effa14e5..61a19bdf128f 100644 +--- a/tensorflow/core/platform/default/build_config.bzl ++++ b/tensorflow/core/platform/default/build_config.bzl +@@ -682,38 +682,6 @@ def tf_additional_core_deps(): + ], + }) + +-# TODO(jart, jhseu): Delete when GCP is default on. +-def tf_additional_cloud_op_deps(): +- return select({ +- "//tensorflow:android": [], +- "//tensorflow:ios": [], +- "//tensorflow:linux_s390x": [], +- "//tensorflow:windows": [], +- "//tensorflow:api_version_2": [], +- "//tensorflow:windows_and_api_version_2": [], +- "//tensorflow:no_gcp_support": [], +- "//conditions:default": [ +- "//tensorflow/contrib/cloud:bigquery_reader_ops_op_lib", +- "//tensorflow/contrib/cloud:gcs_config_ops_op_lib", +- ], +- }) +- +-# TODO(jhseu): Delete when GCP is default on. +-def tf_additional_cloud_kernel_deps(): +- return select({ +- "//tensorflow:android": [], +- "//tensorflow:ios": [], +- "//tensorflow:linux_s390x": [], +- "//tensorflow:windows": [], +- "//tensorflow:api_version_2": [], +- "//tensorflow:windows_and_api_version_2": [], +- "//tensorflow:no_gcp_support": [], +- "//conditions:default": [ +- "//tensorflow/contrib/cloud/kernels:bigquery_reader_ops", +- "//tensorflow/contrib/cloud/kernels:gcs_config_ops", +- ], +- }) +- + def tf_lib_proto_parsing_deps(): + return [ + ":protos_all_cc", +-- +2.19.1 + diff --git a/var/spack/repos/builtin/packages/py-tensorflow/package.py b/var/spack/repos/builtin/packages/py-tensorflow/package.py index d493c32283f..64b97b05b26 100644 --- a/var/spack/repos/builtin/packages/py-tensorflow/package.py +++ b/var/spack/repos/builtin/packages/py-tensorflow/package.py @@ -3,8 +3,6 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -import glob -import os import sys @@ -241,6 +239,11 @@ class PyTensorflow(Package, CudaPackage): patch('io_bazel_rules_docker2.patch', when='@1.15:2.0') # Avoide build error: "name 'new_http_archive' is not defined" patch('http_archive.patch', when='@1.12.3') + # Backport of 837c8b6b upstream + # "Remove contrib cloud bigtable and storage ops/kernels." + # Allows 2.0.* releases to build with '--config=nogcp' + patch('0001-Remove-contrib-cloud-bigtable-and-storage-ops-kernel.patch', + when='@2.0.0:2.0.1') phases = ['configure', 'build', 'install'] @@ -581,6 +584,12 @@ def post_configure_fixes(self): spec['nccl'].prefix.include + '"', '.tf_configure.bazelrc') + # see tensorflow issue #31187 on github + if spec.satisfies('@2.0.0:2.0.1'): + filter_file(r'\#define RUY_DONOTUSEDIRECTLY_AVX512 1', + '#define RUY_DONOTUSEDIRECTLY_AVX512 0', + 'tensorflow/lite/experimental/ruy/platform.h') + if spec.satisfies('+cuda'): libs = spec['cuda'].libs.directories libs.extend(spec['cudnn'].libs.directories) @@ -679,26 +688,12 @@ def build(self, spec, prefix): build_pip_package = Executable( 'bazel-bin/tensorflow/tools/pip_package/build_pip_package') - build_pip_package(tmp_path) + buildpath = join_path(self.stage.source_path, 'spack-build') + build_pip_package('--src', buildpath) def install(self, spec, prefix): - with working_dir('spack-build', create=True): - for fn in glob.iglob(join_path( - '../bazel-bin/tensorflow/tools/pip_package', - 'build_pip_package.runfiles/org_tensorflow/*')): - dst = os.path.basename(fn) - if not os.path.exists(dst): - os.symlink(fn, dst) - for fn in glob.iglob('../tensorflow/tools/pip_package/*'): - dst = os.path.basename(fn) - if not os.path.exists(dst): - os.symlink(fn, dst) - - # macOS is case-insensitive, and BUILD file in directory - # containing setup.py causes the following error message: - # error: could not create 'build': File exists - # Delete BUILD file to prevent this. - os.remove('BUILD') + buildpath = join_path(self.stage.source_path, 'spack-build') + with working_dir(buildpath): setup_py('install', '--prefix={0}'.format(prefix), '--single-version-externally-managed', '--root=/') diff --git a/var/spack/repos/builtin/packages/py-testinfra/package.py b/var/spack/repos/builtin/packages/py-testinfra/package.py index bae58db30df..40e7cd440cf 100644 --- a/var/spack/repos/builtin/packages/py-testinfra/package.py +++ b/var/spack/repos/builtin/packages/py-testinfra/package.py @@ -12,7 +12,7 @@ class PyTestinfra(PythonPackage): Chef and so on.""" homepage = "https://testinfra.readthedocs.io" - url = "https://pypi.python.org/packages/source/t/testinfra/testinfra-1.11.1.tar.gz" + url = "https://pypi.io/packages/source/t/testinfra/testinfra-1.11.1.tar.gz" version('1.18.0', sha256='4a0a70355b007729d78446c86bffd80bcea4ffe9adc9571f9c9779476c49153d') version('1.13.0', sha256='b5afa23d71ee49ad81aed104e4a0f1c02819ef791291cd308fe27aa7f3d3b01f') diff --git a/var/spack/repos/builtin/packages/py-tfdlpack/package.py b/var/spack/repos/builtin/packages/py-tfdlpack/package.py new file mode 100644 index 00000000000..678cc2d6db4 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-tfdlpack/package.py @@ -0,0 +1,45 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +class PyTfdlpack(CMakePackage): + """Tensorflow plugin for DLPack.""" + + homepage = "https://github.com/VoVAllen/tf-dlpack" + git = "https://github.com/VoVAllen/tf-dlpack.git" + + maintainers = ['adamjstewart'] + + version('master', branch='master', submodules=True) + version('0.1.1', tag='v0.1.1', submodules=True) + + variant('cuda', default=True, description='Build with CUDA support') + + depends_on('cmake@3.5:', type='build') + depends_on('cuda', when='+cuda') + + # Python dependencies + extends('python') + depends_on('py-setuptools', type='build') + depends_on('py-tensorflow', type=('build', 'run')) + + def cmake_args(self): + args = ['-DPYTHON_EXECUTABLE=' + self.spec['python'].command.path] + + if '+cuda' in self.spec: + args.append('-DUSE_CUDA=ON') + else: + args.append('-DUSE_CUDA=OFF') + + return args + + def install(self, spec, prefix): + with working_dir('python'): + setup_py('install', '--prefix=' + prefix, + '--single-version-externally-managed', '--root=/') + + def setup_run_environment(self, env): + # Prevent TensorFlow from taking over the whole GPU + env.set('TF_FORCE_GPU_ALLOW_GROWTH', 'true') diff --git a/var/spack/repos/builtin/packages/py-torch/package.py b/var/spack/repos/builtin/packages/py-torch/package.py index e40bc840feb..a60486a64ca 100644 --- a/var/spack/repos/builtin/packages/py-torch/package.py +++ b/var/spack/repos/builtin/packages/py-torch/package.py @@ -105,22 +105,22 @@ class PyTorch(PythonPackage, CudaPackage): cuda_arch_conflict = ('This version of Torch/Caffe2 only supports compute ' 'capabilities ') - conflicts('cuda_arch=none', when='+cuda+caffe2', + conflicts('cuda_arch=none', when='+cuda', msg='Must specify CUDA compute capabilities of your GPU, see ' 'https://developer.nvidia.com/cuda-gpus') - conflicts('cuda_arch=52', when='@1.3.0:+cuda+caffe2', + conflicts('cuda_arch=52', when='@1.3.0:+cuda', msg=cuda_arch_conflict + '>=5.3') - conflicts('cuda_arch=50', when='@1.3.0:+cuda+caffe2', + conflicts('cuda_arch=50', when='@1.3.0:+cuda', msg=cuda_arch_conflict + '>=5.3') - conflicts('cuda_arch=35', when='@1.3.0:+cuda+caffe2', + conflicts('cuda_arch=35', when='@1.3.0:+cuda', msg=cuda_arch_conflict + '>=5.3') - conflicts('cuda_arch=32', when='@1.3.0:+cuda+caffe2', + conflicts('cuda_arch=32', when='@1.3.0:+cuda', msg=cuda_arch_conflict + '>=5.3') - conflicts('cuda_arch=30', when='@1.3.0:+cuda+caffe2', + conflicts('cuda_arch=30', when='@1.3.0:+cuda', msg=cuda_arch_conflict + '>=5.3') - conflicts('cuda_arch=30', when='@1.2.0:+cuda+caffe2', + conflicts('cuda_arch=30', when='@1.2.0:+cuda', msg=cuda_arch_conflict + '>=3.2') - conflicts('cuda_arch=20', when='@1.0.0:+cuda+caffe2', + conflicts('cuda_arch=20', when='@1.0.0:+cuda', msg=cuda_arch_conflict + '>=3.0') # Required dependencies @@ -154,7 +154,7 @@ class PyTorch(PythonPackage, CudaPackage): # TODO: See if there is a way to use an external mkldnn installation. # Currently, only older versions of py-torch use an external mkldnn # library. - depends_on('intel-mkl-dnn', when='@0.4:0.4.1+mkldnn') + depends_on('dnnl', when='@0.4:0.4.1+mkldnn') # TODO: add dependency: https://github.com/Maratyszcza/NNPACK # depends_on('nnpack', when='+nnpack') depends_on('qnnpack', when='+qnnpack') @@ -287,9 +287,11 @@ def enable_or_disable(variant, keyword='USE', var=None, newer=False): enable_or_disable('zstd', newer=True) enable_or_disable('tbb', newer=True) - def test(self): - pass - def install_test(self): with working_dir('test'): python('run_test.py') + + # Tests need to be re-added since `phases` was overridden + run_after('install')( + PythonPackage._run_default_install_time_test_callbacks) + run_after('install')(PythonPackage.sanity_check_prefix) diff --git a/var/spack/repos/builtin/packages/py-torchtext/package.py b/var/spack/repos/builtin/packages/py-torchtext/package.py new file mode 100644 index 00000000000..6bcae3d1e9d --- /dev/null +++ b/var/spack/repos/builtin/packages/py-torchtext/package.py @@ -0,0 +1,24 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +class PyTorchtext(PythonPackage): + """Text utilities and datasets for PyTorch.""" + + homepage = "https://github.com/pytorch/text" + url = "https://pypi.io/packages/source/t/torchtext/torchtext-0.5.0.tar.gz" + + maintainers = ['adamjstewart'] + + version('0.5.0', sha256='7f22e24e9b939fff56b9118c78dc07aafec8dcc67164de15b9b5ed339e4179c6') + + depends_on('python@2.7:2.8,3.5:', type=('build', 'run')) + depends_on('py-setuptools', type='build') + depends_on('py-tqdm', type=('build', 'run')) + depends_on('py-requests', type=('build', 'run')) + depends_on('py-torch@0.4.0:', type=('build', 'run')) + depends_on('py-numpy', type=('build', 'run')) + depends_on('py-six', type=('build', 'run')) + depends_on('py-sentencepiece', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/py-traits/package.py b/var/spack/repos/builtin/packages/py-traits/package.py new file mode 100644 index 00000000000..868e1de0a2f --- /dev/null +++ b/var/spack/repos/builtin/packages/py-traits/package.py @@ -0,0 +1,16 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +class PyTraits(PythonPackage): + """Explicitly typed attributes for Python.""" + + homepage = "https://docs.enthought.com/traits" + url = "https://pypi.io/packages/source/t/traits/traits-6.0.0.tar.gz" + + version('6.0.0', sha256='dbcd70166feca434130a1193284d5819ca72ffbc8dbce8deeecc0cebb41a3bfb') + + depends_on('python@3.5:', type=('build', 'run')) + depends_on('py-setuptools', type='build') diff --git a/var/spack/repos/builtin/packages/py-traitsui/package.py b/var/spack/repos/builtin/packages/py-traitsui/package.py new file mode 100644 index 00000000000..f77e92cc4fd --- /dev/null +++ b/var/spack/repos/builtin/packages/py-traitsui/package.py @@ -0,0 +1,36 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +class PyTraitsui(PythonPackage): + """The TraitsUI project contains a toolkit-independent GUI abstraction + layer, which is used to support the "visualization" features of the Traits + package. Thus, you can write model in terms of the Traits API and specify a + GUI in terms of the primitives supplied by TraitsUI (views, items, editors, + etc.), and let TraitsUI and your selected toolkit and back-end take care of + the details of displaying them.""" + + homepage = "https://docs.enthought.com/traitsui" + url = "https://pypi.io/packages/source/t/traitsui/traitsui-6.1.3.tar.gz" + + version('6.1.3', sha256='48381763b181efc58eaf288431d1d92d028d0d97dfdd33eba7809aae8aef814f') + + variant('backend', default='pyqt5', description='Default backend', + values=('wx', 'pyqt', 'pyqt5', 'pyside'), multi=False) + + depends_on('py-setuptools', type='build') + depends_on('py-traits', type=('build', 'run')) + depends_on('py-pyface@6.0.0:', type=('build', 'run')) + depends_on('py-six', type=('build', 'run')) + + # Backends + depends_on('py-wxpython@2.8.10:', when='backend=wx', type=('build', 'run')) + depends_on('py-numpy', when='backend=wx', type=('build', 'run')) + depends_on('py-pyqt4@4.10:', when='backend=pyqt', type=('build', 'run')) + depends_on('py-pygments', when='backend=pyqt', type=('build', 'run')) + depends_on('py-pyqt5@5:', when='backend=pyqt5', type=('build', 'run')) + depends_on('py-pygments', when='backend=pyqt5', type=('build', 'run')) + depends_on('py-pyside@1.2:', when='backend=pyside', type=('build', 'run')) + depends_on('py-pygments', when='backend=pyside', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/py-tuiview/package.py b/var/spack/repos/builtin/packages/py-tuiview/package.py index 95090a26949..9757dd23430 100644 --- a/var/spack/repos/builtin/packages/py-tuiview/package.py +++ b/var/spack/repos/builtin/packages/py-tuiview/package.py @@ -11,11 +11,13 @@ class PyTuiview(PythonPackage): table manipulation abilities. """ - homepage = "https://bitbucket.org/chchrsc/tuiview" - url = "https://bitbucket.org/chchrsc/tuiview/get/tuiview-1.1.7.tar.gz" + homepage = "https://github.com/ubarsc/tuiview" + url = "https://github.com/ubarsc/tuiview/releases/download/tuiview-1.2.6/tuiview-1.2.6.tar.gz" + version('1.2.6', sha256='61b136fa31c949d7a7a4dbf8562e6fc677d5b1845b152ec39e337f4eb2e91662') version('1.1.7', sha256='fbf0bf29cc775357dad4f8a2f0c2ffa98bbf69d603a96353e75b321adef67573') - depends_on("py-pyqt4", type=('build', 'run')) + depends_on("py-pyqt4", type=('build', 'run'), when='@:1.1.99') + depends_on("py-pyqt5", type=('build', 'run'), when='@1.2.0:') depends_on("py-numpy", type=('build', 'run')) - depends_on("gdal") + depends_on("gdal@1.11.0:+python") diff --git a/var/spack/repos/builtin/packages/py-usgs/package.py b/var/spack/repos/builtin/packages/py-usgs/package.py index 14d20b62cd4..69f2b970563 100644 --- a/var/spack/repos/builtin/packages/py-usgs/package.py +++ b/var/spack/repos/builtin/packages/py-usgs/package.py @@ -10,7 +10,9 @@ class PyUsgs(PythonPackage): """Client library for interfacing with USGS datasets""" homepage = "https://github.com/kapadia/usgs" - url = "https://pypi.org/packages/source/u/usgs/usgs-0.2.7.tar.gz" + url = "https://pypi.io/packages/source/u/usgs/usgs-0.2.7.tar.gz" + + maintainers = ['adamjstewart'] version('0.2.7', sha256='484e569ea1baf9574e11ccf15219957364690dcf06ee3d09afef030df944e79b') diff --git a/var/spack/repos/builtin/packages/py-uwsgi/package.py b/var/spack/repos/builtin/packages/py-uwsgi/package.py index aed1882b045..fdc990d48d1 100644 --- a/var/spack/repos/builtin/packages/py-uwsgi/package.py +++ b/var/spack/repos/builtin/packages/py-uwsgi/package.py @@ -11,7 +11,7 @@ class PyUwsgi(PythonPackage): """Web Application framework for low overhead web services""" homepage = "https://github.com/unbit/uwsgi/" - url = "https://pypi.org/packages/source/u/uwsgi/uwsgi-2.0.18.tar.gz" + url = "https://pypi.io/packages/source/u/uwsgi/uwsgi-2.0.18.tar.gz" version('2.0.18', sha256='4972ac538800fb2d421027f49b4a1869b66048839507ccf0aa2fda792d99f583') diff --git a/var/spack/repos/builtin/packages/python/package.py b/var/spack/repos/builtin/packages/python/package.py index b35584d2c9d..3b675a02b64 100644 --- a/var/spack/repos/builtin/packages/python/package.py +++ b/var/spack/repos/builtin/packages/python/package.py @@ -670,6 +670,11 @@ def libs(self): # to ask Python where its LIBDIR is. libdir = self.get_config_var('LIBDIR') + # In Ubuntu 16.04.6 and python 2.7.12 from the system, lib could be + # in LBPL + # https://mail.python.org/pipermail/python-dev/2013-April/125733.html + libpl = self.get_config_var('LIBPL') + # The system Python installation on macOS and Homebrew installations # install libraries into a Frameworks directory frameworkprefix = self.get_config_var('PYTHONFRAMEWORKPREFIX') @@ -679,6 +684,8 @@ def libs(self): if os.path.exists(os.path.join(libdir, ldlibrary)): return LibraryList(os.path.join(libdir, ldlibrary)) + elif os.path.exists(os.path.join(libpl, ldlibrary)): + return LibraryList(os.path.join(libpl, ldlibrary)) elif os.path.exists(os.path.join(frameworkprefix, ldlibrary)): return LibraryList(os.path.join(frameworkprefix, ldlibrary)) else: diff --git a/var/spack/repos/builtin/packages/qca/package.py b/var/spack/repos/builtin/packages/qca/package.py index cbf1da89329..9a040bc6674 100644 --- a/var/spack/repos/builtin/packages/qca/package.py +++ b/var/spack/repos/builtin/packages/qca/package.py @@ -20,8 +20,10 @@ class Qca(CMakePackage): homepage = "https://userbase.kde.org/QCA" url = "https://github.com/KDE/qca/archive/v2.1.3.tar.gz" - version('2.2.1', sha256='c67fc0fa8ae6cb3d0ba0fbd8fca8ee8e4c5061b99f1fd685fd7d9800cef17f6b') - version('2.1.3', sha256='a5135ffb0250a40e9c361eb10cd3fe28293f0cf4e5c69d3761481eafd7968067') + version('2.3.0', sha256='39aa18f0985d82949f4dccce04af3eb8d4b6b64e0c71785786738d38d8183b0a') + version('2.2.90', sha256='074ac753b51a6fa15503be9418f7430effe368fd31dc41567942d832e539b17e') + version('2.2.1', sha256='c67fc0fa8ae6cb3d0ba0fbd8fca8ee8e4c5061b99f1fd685fd7d9800cef17f6b') + version('2.1.3', sha256='a5135ffb0250a40e9c361eb10cd3fe28293f0cf4e5c69d3761481eafd7968067') depends_on('qt@4.2:') diff --git a/var/spack/repos/builtin/packages/qgis/package.py b/var/spack/repos/builtin/packages/qgis/package.py index af3fe5db3ea..1edf4c29720 100644 --- a/var/spack/repos/builtin/packages/qgis/package.py +++ b/var/spack/repos/builtin/packages/qgis/package.py @@ -17,9 +17,11 @@ class Qgis(CMakePackage): maintainers = ['adamjstewart', 'Sinan81'] + version('3.12.1', sha256='a7dc7af768b8960c08ce72a06c1f4ca4664f4197ce29c7fe238429e48b2881a8') version('3.12.0', sha256='19e9c185dfe88cad7ee6e0dcf5ab7b0bbfe1672307868a53bf771e0c8f9d5e9c') # Prefer latest long term release - version('3.10.3', sha256='0869704df9120dd642996ff1ed50213ac8247650aa0640b62f8c9c581c05d7a7', preferred=True) + version('3.10.4', sha256='a032e2b8144c2fd825bc26766f586cfb1bd8574bc72efd1aa8ce18dfff8b6c9f', preferred=True) + version('3.10.3', sha256='0869704df9120dd642996ff1ed50213ac8247650aa0640b62f8c9c581c05d7a7') version('3.10.2', sha256='381cb01a8ac2f5379a915b124e9c830d727d2c67775ec49609c7153fe765a6f7') version('3.10.1', sha256='466ac9fad91f266cf3b9d148f58e2adebd5b9fcfc03e6730eb72251e6c34c8ab') version('3.10.0', sha256='25eb1c41d9fb922ffa337a720dfdceee43cf2d38409923f087c2010c9742f012') @@ -63,28 +65,28 @@ class Qgis(CMakePackage): # Ref. for dependencies: # http://htmlpreview.github.io/?https://raw.github.com/qgis/QGIS/master/doc/INSTALL.html # https://github.com/qgis/QGIS/blob/master/INSTALL - depends_on('qt+dbus') - depends_on('proj@4.4.0:') - depends_on('geos@3.4.0:') - depends_on('sqlite@3.0.0: +column_metadata') - depends_on('libspatialite@4.2.0:') - depends_on('libspatialindex') + depends_on('exiv2') + depends_on('expat@1.95:') depends_on('gdal@2.1.0: +python', type=('build', 'link', 'run')) + depends_on('geos@3.4.0:') + depends_on('libspatialindex') + depends_on('libspatialite@4.2.0:') + depends_on('libzip') + depends_on('proj@4.4.0:') + depends_on('py-psycopg2', type=('build', 'run')) # TODO: is build dependency necessary? + depends_on('py-pyqt4', when='@2') + depends_on('py-pyqt5@5.3:', when='@3') + depends_on('py-requests', type=('build', 'run')) # TODO: is build dependency necessary? + depends_on('python@2.7:2.8', type=('build', 'run'), when='@2') + depends_on('python@3.0.0:', type=('build', 'run'), when='@3') + depends_on('qca@2.2.1') + depends_on('qjson') + depends_on('qscintilla +python') + depends_on('qt+dbus') + depends_on('qtkeychain@0.5:', when='@3:') depends_on('qwt@5:') depends_on('qwtpolar') - depends_on('expat@1.95:') - depends_on('qca@2.2.1') - depends_on('py-pyqt4 +qsci', when='@2') - depends_on('py-pyqt5@5.3: +qsci', when='@3') - depends_on('qscintilla') - depends_on('qjson') - depends_on('py-requests', type=('build', 'run')) # TODO: is build dependency necessary? - depends_on('py-psycopg2', type=('build', 'run')) # TODO: is build dependency necessary? - depends_on('qtkeychain@0.5:', when='@3:') - depends_on('libzip') - depends_on('exiv2') - depends_on('python@3.0.0:', type=('build', 'run'), when='@3') - depends_on('python@2.7:2.8', type=('build', 'run'), when='@2') + depends_on('sqlite@3.0.0: +column_metadata') # Runtime python dependencies, not mentioned in install instructions depends_on('py-pyyaml', type='run') @@ -114,6 +116,8 @@ class Qgis(CMakePackage): depends_on('qtkeychain@:1.5.99', when='^qt@4') depends_on('qt@:4', when='@2') + patch('pyqt5.patch', when='^qt@5') + def cmake_args(self): spec = self.spec args = [] diff --git a/var/spack/repos/builtin/packages/qgis/pyqt5.patch b/var/spack/repos/builtin/packages/qgis/pyqt5.patch new file mode 100644 index 00000000000..1e6b7e6149c --- /dev/null +++ b/var/spack/repos/builtin/packages/qgis/pyqt5.patch @@ -0,0 +1,25 @@ +diff --git a/cmake/FindPyQt5.py b/cmake/FindPyQt5.py +index 6a55a0f801..d6eda1fa1e 100644 +--- a/cmake/FindPyQt5.py ++++ b/cmake/FindPyQt5.py +@@ -40,6 +40,7 @@ except ImportError: + import sys + cfg = sipconfig.Configuration() + sip_dir = cfg.default_sip_dir ++ pyqt_prefix = os.sep.join(PyQt5.QtCore.__file__.split(os.sep)[0:-5]) + if sys.platform.startswith('freebsd'): + py_version = str(sys.version_info.major) + str(sys.version_info.minor) + sip_dir = sip_dir.replace(py_version, '') +@@ -53,9 +54,9 @@ except ImportError: + 'pyqt_version': PyQt5.QtCore.PYQT_VERSION, + 'pyqt_version_str': PyQt5.QtCore.PYQT_VERSION_STR, + 'pyqt_sip_flags': PyQt5.QtCore.PYQT_CONFIGURATION['sip_flags'], +- 'pyqt_mod_dir': os.path.join(cfg.default_mod_dir, "PyQt5"), +- 'pyqt_sip_dir': sip_dir, +- 'pyqt_bin_dir': cfg.default_bin_dir, ++ 'pyqt_mod_dir': os.path.dirname(PyQt5.QtCore.__file__), ++ 'pyqt_sip_dir': os.path.join(pyqt_prefix,'share','sip','PyQt5'), ++ 'pyqt_bin_dir': os.path.join(pyqt_prefix,'bin'), + } + pyqtcfg = sipconfig.Configuration([cfg]) + diff --git a/var/spack/repos/builtin/packages/qmcpack/package.py b/var/spack/repos/builtin/packages/qmcpack/package.py index 64a0e2c2837..9484e8a1693 100644 --- a/var/spack/repos/builtin/packages/qmcpack/package.py +++ b/var/spack/repos/builtin/packages/qmcpack/package.py @@ -44,11 +44,10 @@ class Qmcpack(CMakePackage, CudaPackage): description='Build the complex (general twist/k-point) version') variant('mixed', default=False, description='Build the mixed precision (mixture of single and ' - 'double precision) version for gpu and cpu') + 'double precision) version') variant('soa', default=True, description='Build with Structure-of-Array instead of ' - 'Array-of-Structure code. Only for CPU code' - 'and only in mixed precision') + 'Array-of-Structure code. Only for CPU code') variant('timers', default=False, description='Build with support for timers') variant('da', default=False, @@ -58,11 +57,21 @@ class Qmcpack(CMakePackage, CudaPackage): variant('qe', default=False, description='Install with patched Quantum Espresso 6.4.1') variant('afqmc', default=False, - description='Install with AFQMC support') + description='Install with AFQMC support. NOTE that if used in ' + 'combination with CUDA, only AFQMC will have CUDA.') + variant('ppconvert', default=False, + description='Install with pseudopotential converter.') + # Notes about CUDA-centric peculiarities: + # # cuda variant implies mixed precision variant by default, but there is # no way to express this in variant syntax, need something like # variant('+mixed', default=True, when='+cuda', description="...") + # + # cuda+afqmc variant will not build the legacy CUDA code in real-space + # QMCPACK. This is due to a conflict in the build system. This is not + # worth fixing since the legacy CUDA code, will be superseded + # by the OpenMP 4.5 code. # high-level variant conflicts conflicts( @@ -85,8 +94,9 @@ class Qmcpack(CMakePackage, CudaPackage): conflicts('^openblas+ilp64', msg='QMCPACK does not support OpenBLAS 64-bit integer variant') - conflicts('^intel-mkl+ilp64', - msg='QMCPACK does not support MKL 64-bit integer variant') + # Omitted for now due to concretizer bug + # conflicts('^intel-mkl+ilp64', + # msg='QMCPACK does not support MKL 64-bit integer variant') # QMCPACK 3.6.0 or later requires support for C++14 compiler_warning = 'QMCPACK 3.6.0 or later requires a ' \ @@ -177,6 +187,8 @@ class Qmcpack(CMakePackage, CudaPackage): patch_checksum = 'c066c79901a612cf8848135e0d544efb114534cca70b90bfccc8ed989d3d9dde' patch(patch_url, sha256=patch_checksum, when='@3.1.0:3.3.0') + # the default flag_handler for Spack causes problems for QMCPACK + # https://spack.readthedocs.io/en/latest/packaging_guide.html#the-build-environment: flag_handler = CMakePackage.build_system_flags @when('@:3.7.0') @@ -187,6 +199,15 @@ def patch(self): '${LIBXML2_HOME}/lib $ENV{LIBXML2_HOME}/lib', 'CMake/FindLibxml2QMC.cmake') + @property + def build_targets(self): + spec = self.spec + targets = ['all'] + if '+ppconvert' in spec: + targets.append('ppconvert') + + return targets + def cmake_args(self): spec = self.spec args = [] @@ -251,7 +272,12 @@ def cmake_args(self): # tested. if '+cuda' in spec: - args.append('-DQMC_CUDA=1') + # Cannot support both CUDA builds at the same time, see + # earlier notes in this package. + if '+afqmc' in spec: + args.append('-DENABLE_CUDA=1') + else: + args.append('-DQMC_CUDA=1') cuda_arch_list = spec.variants['cuda_arch'].value cuda_arch = cuda_arch_list[0] if len(cuda_arch_list) > 1: @@ -311,21 +337,29 @@ def cmake_args(self): # Next two environment variables were introduced in QMCPACK 3.5.0 # Prior to v3.5.0, these lines should be benign but CMake # may issue a warning. - if 'intel-mkl' in spec: + if '^mkl' in spec: args.append('-DENABLE_MKL=1') args.append('-DMKL_ROOT=%s' % env['MKLROOT']) else: args.append('-DENABLE_MKL=0') + # ppconvert is not build by default because it may exhibit numerical + # issues on some systems + if '+ppconvert' in spec: + args.append('-DBUILD_PPCONVERT=1') + else: + args.append('-DBUILD_PPCONVERT=0') + return args - # QMCPACK 3.6.0 release and later has a functional 'make install', - # the Spack 'def install' is retained for backwards compatiblity. - # Note that the two install methods differ in their directory - # structure. Additionally, we follow the recommendation on the Spack - # website for defining the compilers to be the MPI compiler wrappers. + # QMCPACK needs custom install method for a couple of reasons: + # Firstly, wee follow the recommendation on the Spack website + # for defining the compilers variables to be the MPI compiler wrappers. # https://spack.readthedocs.io/en/latest/packaging_guide.html#compiler-wrappers - @when('@3.6.0:') + # + # Note that 3.6.0 release and later has a functioning 'make install', + # but still does not install nexus, manual, etc. So, there is no compelling + # reason to use QMCPACK's built-in version at this time. def install(self, spec, prefix): if '+mpi' in spec: env['CC'] = spec['mpi'].mpicc @@ -333,57 +367,25 @@ def install(self, spec, prefix): env['F77'] = spec['mpi'].mpif77 env['FC'] = spec['mpi'].mpifc - with working_dir(self.build_directory): - make('install') - - @when('@:3.5.0') - def install(self, spec, prefix): - if '+mpi' in spec: - env['CC'] = spec['mpi'].mpicc - env['CXX'] = spec['mpi'].mpicxx - env['F77'] = spec['mpi'].mpif77 - env['FC'] = spec['mpi'].mpifc - - # QMCPACK 'make install' does nothing, which causes - # Spack to throw an error. - # - # This install method creates the top level directory - # and copies the bin subdirectory into the appropriate - # location. We do not copy include or lib at this time due - # to technical difficulties in qmcpack itself. - + # create top-level directory mkdirp(prefix) - # We assume cwd is self.stage.source_path - - # install manual + # We assume cwd is self.stage.source_path, then + # install manual, labs, and nexus install_tree('manual', prefix.manual) - - # install nexus + install_tree('labs', prefix.labs) install_tree('nexus', prefix.nexus) + # install binaries with working_dir(self.build_directory): - mkdirp(prefix) - - # install binaries install_tree('bin', prefix.bin) - # QMCPACK 3.6.0 install directory structure changed, thus there - # thus are two version of the setup_run_environment method - @when('@:3.5.0') def setup_run_environment(self, env): """Set-up runtime environment for QMCPACK. - Set PYTHONPATH for basic analysis scripts and for Nexus.""" - env.prepend_path('PYTHONPATH', self.prefix.nexus) + Set PATH and PYTHONPATH for basic analysis scripts for Nexus.""" - @when('@3.6.0:') - def setup_run_environment(self, env): - """Set-up runtime environment for QMCPACK. - Set PYTHONPATH for basic analysis scripts and for Nexus. Binaries - are in the 'prefix' directory instead of 'prefix.bin' which is - not set by the default module environment""" - env.prepend_path('PATH', self.prefix) - env.prepend_path('PYTHONPATH', self.prefix) + env.prepend_path('PATH', self.prefix.nexus.bin) + env.prepend_path('PYTHONPATH', self.prefix.nexus.lib) @run_after('build') @on_package_attributes(run_tests=True) diff --git a/var/spack/repos/builtin/packages/qscintilla/package.py b/var/spack/repos/builtin/packages/qscintilla/package.py index 9bee719c136..ec20257676d 100644 --- a/var/spack/repos/builtin/packages/qscintilla/package.py +++ b/var/spack/repos/builtin/packages/qscintilla/package.py @@ -20,10 +20,14 @@ class Qscintilla(QMakePackage): version('2.10.2', sha256='14b31d20717eed95ea9bea4cd16e5e1b72cee7ebac647cba878e0f6db6a65ed0', preferred=True) variant('designer', default=False, description="Enable pluging for Qt-Designer") - # No 'python' variant, since Python bindings will be - # built by PyQt5+qsci instead + variant('python', default=False, description="Build python bindings") depends_on('qt') + depends_on('py-pyqt5 +qsci_api', type=('build', 'run'), when='+python ^qt@5') + depends_on('py-pyqt4 +qsci_api', type=('build', 'run'), when='+python ^qt@4') + depends_on('python', type=('build', 'run'), when='+python') + + extends('python', when='+python') @run_before('qmake') def chdir(self): @@ -66,3 +70,73 @@ def postinstall(self): makefile.filter(r'\$\(INSTALL_ROOT\)' + self.spec['qt'].prefix, '$(INSTALL_ROOT)') make('install') + + @run_after('install') + def make_qsci(self): + if '+python' in self.spec: + if '^py-pyqt4' in self.spec: + py_pyqtx = 'py-pyqt4' + pyqtx = 'PyQt4' + elif '^py-pyqt5' in self.spec: + py_pyqtx = 'py-pyqt5' + pyqtx = 'PyQt5' + + with working_dir(join_path(self.stage.source_path, 'Python')): + pydir = join_path( + self.prefix, + self.spec['python'].package.site_packages_dir, + pyqtx) + mkdirp(os.path.join(self.prefix.share.sip, pyqtx)) + python = self.spec['python'].command + python('configure.py', '--pyqt=' + pyqtx, + '--sip=' + self.spec['py-sip'].prefix.bin.sip, + '--qsci-incdir=' + self.spec.prefix.include, + '--qsci-libdir=' + self.spec.prefix.lib, + '--qsci-sipdir=' + + os.path.join(self.prefix.share.sip, pyqtx), + '--apidir=' + self.prefix.share.qsci, + '--destdir=' + pydir, + '--pyqt-sipdir=' + os.path.join( + self.spec[py_pyqtx].prefix.share.sip, pyqtx), + '--sip-incdir=' + + join_path(self.spec['py-sip'].prefix.include, + 'python' + + str(self.spec['python'].version.up_to(2))), + '--stubsdir=' + pydir) + + # Fix build errors + # "QAbstractScrollArea: No such file or directory" + # "qprinter.h: No such file or directory" + # ".../Qsci.so: undefined symbol: _ZTI10Qsci...." + qscipro = FileFilter('Qsci/Qsci.pro') + if '^qt@4' in self.spec: + qtx = 'qt4' + elif '^qt@5' in self.spec: + qtx = 'qt5' + + link_qscilibs = 'LIBS += -L' + self.prefix.lib +\ + ' -lqscintilla2_' + qtx + qscipro.filter('TEMPLATE = lib', + 'TEMPLATE = lib\nQT += widgets' + + '\nQT += printsupport\n' + link_qscilibs) + + make() + + # Fix installation prefixes + makefile = FileFilter('Makefile') + makefile.filter(r'\$\(INSTALL_ROOT\)', '') + makefile = FileFilter('Qsci/Makefile') + makefile.filter(r'\$\(INSTALL_ROOT\)', '') + + make('install') + + @run_after('install') + def extend_path_setup(self): + # See github issue #14121 and PR #15297 + module = self.spec['py-sip'].variants['module'].value + if module != 'sip': + module = module.split('.')[0] + with working_dir(site_packages_dir): + with open(os.path.join(module, '__init__.py'), 'w') as f: + f.write('from pkgutil import extend_path\n') + f.write('__path__ = extend_path(__path__, __name__)\n') diff --git a/var/spack/repos/builtin/packages/qt/package.py b/var/spack/repos/builtin/packages/qt/package.py index c07aa5c86eb..408ebc9f591 100644 --- a/var/spack/repos/builtin/packages/qt/package.py +++ b/var/spack/repos/builtin/packages/qt/package.py @@ -24,6 +24,7 @@ class Qt(Package): phases = ['configure', 'build', 'install'] + version('5.14.2', sha256='c6fcd53c744df89e7d3223c02838a33309bd1c291fcb6f9341505fe99f7f19fa') version('5.14.1', sha256='6f17f488f512b39c2feb57d83a5e0a13dcef32999bea2e2a8f832f54a29badb8') version('5.14.0', sha256='be9a77cd4e1f9d70b58621d0753be19ea498e6b0da0398753e5038426f76a8ba') version('5.13.1', sha256='adf00266dc38352a166a9739f1a24a1e36f1be9c04bf72e16e142a256436974e') @@ -128,13 +129,13 @@ class Qt(Package): depends_on("gperf", when='+webkit') depends_on("gtkplus", when='+gtk') depends_on("openssl", when='+ssl') - depends_on("sqlite", when='+sql', type=('build', 'run')) - depends_on("sqlite+column_metadata", when='+sql%intel', type=('build', 'run')) + depends_on("sqlite+column_metadata", when='+sql', type=('build', 'run')) depends_on("libpng@1.2.57", when='@3') + depends_on("libsm", when='@3') depends_on("pcre+multibyte", when='@5.0:5.8') depends_on("inputproto", when='@:5.8') - depends_on("openssl@:1.0.999", when='@:5.9+ssl') + depends_on("openssl@:1.0.999", when='@4:5.9+ssl') depends_on("glib", when='@4:') depends_on("libpng", when='@4:') diff --git a/var/spack/repos/builtin/packages/quantum-espresso/package.py b/var/spack/repos/builtin/packages/quantum-espresso/package.py index 403a8a3082c..52fb46c7843 100644 --- a/var/spack/repos/builtin/packages/quantum-espresso/package.py +++ b/var/spack/repos/builtin/packages/quantum-espresso/package.py @@ -76,11 +76,12 @@ class QuantumEspresso(Package): patch('dspev_drv_elpa.patch', when='@6.1.0:+patch+elpa ^elpa@2016.05.003') # Conflicts + # Omitted for now due to concretizer bug # MKL with 64-bit integers not supported. - conflicts( - '^mkl+ilp64', - msg='Quantum ESPRESSO does not support MKL 64-bit integer variant' - ) + # conflicts( + # '^mkl+ilp64', + # msg='Quantum ESPRESSO does not support MKL 64-bit integer variant' + # ) # We can't ask for scalapack or elpa if we don't want MPI conflicts( diff --git a/var/spack/repos/builtin/packages/r-openssl/package.py b/var/spack/repos/builtin/packages/r-openssl/package.py index 36ddd5ed7d6..71f648b480c 100644 --- a/var/spack/repos/builtin/packages/r-openssl/package.py +++ b/var/spack/repos/builtin/packages/r-openssl/package.py @@ -30,3 +30,8 @@ class ROpenssl(RPackage): depends_on('r-askpass', when='@1.2:', type=('build', 'run')) depends_on('openssl@1.0.1:') + + def flag_handler(self, name, flags): + if name == 'cflags': + flags.append(self.compiler.c99_flag) + return (flags, None, None) diff --git a/var/spack/repos/builtin/packages/r-remotes/package.py b/var/spack/repos/builtin/packages/r-remotes/package.py index ca51212143c..1c2c1fba383 100644 --- a/var/spack/repos/builtin/packages/r-remotes/package.py +++ b/var/spack/repos/builtin/packages/r-remotes/package.py @@ -10,12 +10,13 @@ class RRemotes(RPackage): """Download and install R packages stored in 'GitHub', 'BitBucket', or plain 'subversion' or 'git' repositories. This package provides the 'install_*' functions in 'devtools'. Indeed most of the code was copied - over from 'devtools'.""" + over from 'devtools'. """ homepage = "https://github.com/r-lib/remotes#readme" url = "https://cloud.r-project.org/src/contrib/remotes_2.1.0.tar.gz" list_url = "https://cloud.r-project.org/src/contrib/Archive/remotes" + version('2.1.1', sha256='4e590746fce618094089372b185e1ea234b3337b23c44c44118e942d0fb5118b') version('2.1.0', sha256='8944c8f6fc9f0cd0ca04d6cf1221b716eee08facef9f4b4c4d91d0346d6d68a7') depends_on('r@3.0.0:', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/r-sys/package.py b/var/spack/repos/builtin/packages/r-sys/package.py index bd9aff95195..1901e54354b 100644 --- a/var/spack/repos/builtin/packages/r-sys/package.py +++ b/var/spack/repos/builtin/packages/r-sys/package.py @@ -18,3 +18,8 @@ class RSys(RPackage): list_url = "https://cloud.r-project.org/src/contrib/Archive/sys" version('3.2', sha256='2819498461fe2ce83d319d1a47844e86bcea6d01d10861818dba289e7099bbcc') + + def flag_handler(self, name, flags): + if name == 'cflags': + flags.append(self.compiler.c99_flag) + return (flags, None, None) diff --git a/var/spack/repos/builtin/packages/r/change_optflags_tmp.patch b/var/spack/repos/builtin/packages/r/change_optflags_tmp.patch new file mode 100644 index 00000000000..4e39b02be74 --- /dev/null +++ b/var/spack/repos/builtin/packages/r/change_optflags_tmp.patch @@ -0,0 +1,67 @@ +diff -ur R-3.6.3.org/configure R-3.6.3/configure +--- R-3.6.3.org/configure 2020-03-09 11:09:16.060825352 +0900 ++++ R-3.6.3/configure 2020-03-09 11:10:47.011280195 +0900 +@@ -6470,13 +6470,13 @@ + CFLAGS=$ac_save_CFLAGS + elif test $ac_cv_prog_cc_g = yes; then + if test "$GCC" = yes; then +- CFLAGS="-g -O2" ++ CFLAGS="-g -O1" + else + CFLAGS="-g" + fi + else + if test "$GCC" = yes; then +- CFLAGS="-O2" ++ CFLAGS="-O1" + else + CFLAGS= + fi +@@ -7445,13 +7445,13 @@ + FCFLAGS=$ac_save_FCFLAGS + elif test $ac_cv_prog_fc_g = yes; then + if test "x$ac_cv_fc_compiler_gnu" = xyes; then +- FCFLAGS="-g -O2" ++ FCFLAGS="-g -O1" + else + FCFLAGS="-g" + fi + else + if test "x$ac_cv_fc_compiler_gnu" = xyes; then +- FCFLAGS="-O2" ++ FCFLAGS="-O1" + else + FCFLAGS= + fi +@@ -7717,13 +7717,13 @@ + CXXFLAGS=$ac_save_CXXFLAGS + elif test $ac_cv_prog_cxx_g = yes; then + if test "$GXX" = yes; then +- CXXFLAGS="-g -O2" ++ CXXFLAGS="-g -O1" + else + CXXFLAGS="-g" + fi + else + if test "$GXX" = yes; then +- CXXFLAGS="-O2" ++ CXXFLAGS="-O1" + else + CXXFLAGS= + fi +@@ -8336,13 +8336,13 @@ + OBJCFLAGS=$ac_save_OBJCFLAGS + elif test $ac_cv_prog_objc_g = yes; then + if test "$GOBJC" = yes; then +- OBJCFLAGS="-g -O2" ++ OBJCFLAGS="-g -O1" + else + OBJCFLAGS="-g" + fi + else + if test "$GOBJC" = yes; then +- OBJCFLAGS="-O2" ++ OBJCFLAGS="-O1" + else + OBJCFLAGS= + fi diff --git a/var/spack/repos/builtin/packages/r/package.py b/var/spack/repos/builtin/packages/r/package.py index 9257c5e733d..1893aca9ac3 100644 --- a/var/spack/repos/builtin/packages/r/package.py +++ b/var/spack/repos/builtin/packages/r/package.py @@ -86,6 +86,12 @@ class R(AutotoolsPackage): patch('zlib.patch', when='@:3.3.2') + # R cannot be built with '-O2' optimization + # with Fujitsu Compiler @4.1.0 now. + # Until the Fujitsu compiler resolves this problem, + # temporary fix to lower the optimization level. + patch('change_optflags_tmp.patch', when='%fj@4.1.0') + filter_compiler_wrappers( 'Makeconf', relative_root=os.path.join('rlib', 'R', 'etc') ) diff --git a/var/spack/repos/builtin/packages/range-v3/package.py b/var/spack/repos/builtin/packages/range-v3/package.py index 261922ceb79..9c7a9cd6a32 100644 --- a/var/spack/repos/builtin/packages/range-v3/package.py +++ b/var/spack/repos/builtin/packages/range-v3/package.py @@ -22,6 +22,7 @@ class RangeV3(CMakePackage): maintainers = ['chissg'] version('develop', branch='master') + version('0.10.0', sha256='5a1cd44e7315d0e8dcb1eee4df6802221456a9d1dbeac53da02ac7bd4ea150cd') version('0.5.0', sha256='32e30b3be042246030f31d40394115b751431d9d2b4e0f6d58834b2fd5594280') version('0.4.0', sha256='5dbc878b7dfc500fb04b6b9f99d63993a2731ea34b0a4b8d5f670a5a71a18e39') version('0.3.7', sha256='e6b0fb33bfd07ec32d54bcddd3e8d62e995a3cf0b64b34788ec264da62581207') diff --git a/var/spack/repos/builtin/packages/rankstr/package.py b/var/spack/repos/builtin/packages/rankstr/package.py index 6cf2dc42f9f..98d7aea87fd 100644 --- a/var/spack/repos/builtin/packages/rankstr/package.py +++ b/var/spack/repos/builtin/packages/rankstr/package.py @@ -9,8 +9,8 @@ class Rankstr(CMakePackage): """Assign one-to-one mapping of MPI ranks to strings""" - homepage = "https://github.com/ECP-VeloC/rankstr" - url = "https://github.com/ECP-VeloC/rankstr/archive/v0.0.2.zip" + homepage = "https://github.com/ecp-veloc/rankstr" + url = "https://github.com/ecp-veloc/rankstr/archive/v0.0.2.zip" git = "https://github.com/ecp-veloc/rankstr.git" tags = ['ecp'] diff --git a/var/spack/repos/builtin/packages/redset/package.py b/var/spack/repos/builtin/packages/redset/package.py index d06eaca1087..88ea8b92d49 100644 --- a/var/spack/repos/builtin/packages/redset/package.py +++ b/var/spack/repos/builtin/packages/redset/package.py @@ -9,8 +9,8 @@ class Redset(CMakePackage): """Create MPI communicators for disparate redundancy sets""" - homepage = "https://github.com/ECP-VeloC/redset" - url = "https://github.com/ECP-VeloC/redset/archive/v0.0.3.zip" + homepage = "https://github.com/ecp-veloc/redset" + url = "https://github.com/ecp-veloc/redset/archive/v0.0.3.zip" git = "https://github.com/ecp-veloc/redset.git" tags = ['ecp'] diff --git a/var/spack/repos/builtin/packages/revbayes/package.py b/var/spack/repos/builtin/packages/revbayes/package.py index 7cf707a72eb..5e02dc974da 100644 --- a/var/spack/repos/builtin/packages/revbayes/package.py +++ b/var/spack/repos/builtin/packages/revbayes/package.py @@ -15,10 +15,10 @@ class Revbayes(CMakePackage): git = "https://github.com/revbayes/revbayes.git" version('develop', branch='development') - version('1.0.13', sha256='472b4ccc44d813c1ff1b8d27e8ccf3d96388de79aa1688b3714f683ba65038fa') - version('1.0.12', sha256='d79f3a9bc72305cab35009d11e1f027fcaacde7329a4c49b5b8285588a8d3588') - version('1.0.11', sha256='7e81b1952e3a63cb84617fa632f4ccdf246b4d79e7d537a423540de047dadf50') - version('1.0.10', sha256='95e9affe8ca8d62880cf46778b6ec9dd8726e62a185670ebcbadf2eb2bb79f93') + version('1.0.13', sha256='e85e2e1fe182fe9f504900150d936a06d252a362c591b9d3d8272dd085aa85d9') + version('1.0.12', sha256='80c926bb6b37288d02e36e07b44e4663841cd1fe541e2cc0b0e44c89ca929759') + version('1.0.11', sha256='03052194baa220dde7e622a739f09f34393f67ea00a0b163b409d313d7fc7c02') + version('1.0.10', sha256='6a3cf303e7224b0b32637bd8e2c3c2cf2621f5dbe599cd74ce4b0c215d0fcd2d') variant('mpi', default=True, description='Enable MPI parallel support') @@ -27,6 +27,12 @@ class Revbayes(CMakePackage): conflicts('%gcc@7.1.0:', when='@:1.0.12') + def url_for_version(self, version): + if version > Version('1.0.13'): + return 'https://github.com/revbayes/revbayes/archive/v{0}.tar.gz'.format(version) + else: + return 'https://github.com/revbayes/revbayes.archive/archive/v{0}.tar.gz'.format(version) + @property def root_cmakelists_dir(self): if self.spec.version > Version('1.0.13') and '+mpi' in self.spec: @@ -38,6 +44,11 @@ def root_cmakelists_dir(self): def regenerate(self): with working_dir(join_path('projects', 'cmake')): mkdirp('build') + if self.spec.version > Version('1.0.13'): + generate_version = Executable('./generate_version_number.sh') + generate_version() + dest = join_path('..', '..', 'src', 'revlanguage', 'utils') + install('GitVersion.cpp', dest) edit = FileFilter('regenerate.sh') edit.filter('boost="true"', 'boost="false"') if '+mpi' in self.spec: diff --git a/var/spack/repos/builtin/packages/root/package.py b/var/spack/repos/builtin/packages/root/package.py index d6275bd882e..9db1a702540 100644 --- a/var/spack/repos/builtin/packages/root/package.py +++ b/var/spack/repos/builtin/packages/root/package.py @@ -26,6 +26,8 @@ class Root(CMakePackage): # Development version (when more recent than production). # Production version + version('6.20.02', sha256='0997586bf097c0afbc6f08edbffcebf5eb6a4237262216114ba3f5c8087dcba6') + version('6.20.00', sha256='68421eb0434b38b66346fa8ea6053a0fdc9a6d254e4a72019f4e3633ae118bf0') version('6.18.04', sha256='315a85fc8363f8eb1bffa0decbf126121258f79bd273513ed64795675485cfa4', preferred=True) @@ -110,6 +112,9 @@ class Root(CMakePackage): # otherwise it crashes with the internal minuit library variant('minuit', default=True, description='Automatically search for support libraries') + variant('mlp', default=False, + description="Enable support for TMultilayerPerceptron " + "classes' federation") variant('mysql', default=False) variant('opengl', default=True, description='Enable OpenGL support') @@ -143,7 +148,7 @@ class Root(CMakePackage): description='TBB multi-threading support') variant('threads', default=True, description='Enable using thread library') - variant('tmva', default=True, + variant('tmva', default=False, description='Build TMVA multi variate analysis library') variant('unuran', default=True, description='Use UNURAN for random number generation') @@ -185,6 +190,7 @@ class Root(CMakePackage): depends_on('xxhash', when='@6.13.02:') # See cmake_args, below. depends_on('xz') depends_on('zlib') + depends_on('zstd', when='@6.20:') # X-Graphics depends_on('libx11', when="+x") @@ -203,8 +209,14 @@ class Root(CMakePackage): # Qt4 depends_on('qt@:4.999', when='+qt4') - # TMVA - depends_on('py-numpy', when='+tmva') + # Python + depends_on('python@2.7:', when='+python', type=('build', 'run')) + depends_on('py-numpy', type=('build', 'run'), when='+tmva') + # This numpy dependency was not intended and will hopefully + # be fixed in 6.20.04. + # See: https://sft.its.cern.ch/jira/browse/ROOT-10626 + depends_on('py-numpy', type=('build', 'run'), + when='@6.20.00:6.20.03 +python') # Optional dependencies depends_on('davix @0.7.1:', when='+davix') @@ -219,7 +231,6 @@ class Root(CMakePackage): depends_on('postgresql', when='+postgres') depends_on('pythia6+root', when='+pythia6') depends_on('pythia8', when='+pythia8') - depends_on('python@2.7:', when='+python', type=('build', 'run')) depends_on('r', when='+r', type=('build', 'run')) depends_on('r-rcpp', when='+r', type=('build', 'run')) depends_on('r-rinside', when='+r', type=('build', 'run')) @@ -255,6 +266,7 @@ class Root(CMakePackage): # Incompatible variants conflicts('+opengl', when='~x', msg='OpenGL requires X') conflicts('+tmva', when='~gsl', msg='TVMA requires GSL') + conflicts('+tmva', when='~mlp', msg='TVMA requires MLP') conflicts('cxxstd=11', when='+root7', msg='root7 requires at least C++14') # Feature removed in 6.18: @@ -357,6 +369,7 @@ def cmake_args(self): ['minimal'], ['minuit'], ['minuit2', 'minuit'], + ['mlp'], ['monalisa', False], ['mysql'], ['odbc'], diff --git a/var/spack/repos/builtin/packages/rust/package.py b/var/spack/repos/builtin/packages/rust/package.py index 2ae30b5c035..59e827f67dc 100644 --- a/var/spack/repos/builtin/packages/rust/package.py +++ b/var/spack/repos/builtin/packages/rust/package.py @@ -4,59 +4,455 @@ # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * +from six import iteritems class Rust(Package): - """The rust programming language toolchain""" + """The Rust programming language toolchain - homepage = "http://www.rust-lang.org" - git = "https://github.com/rust-lang/rust.git" + This package can bootstrap any version of the Rust compiler since Rust + 1.23. It does this by downloading the platform-appropriate binary + distribution of the desired version of the rust compiler, and then building + that compiler from source. + """ - version('develop', branch='master') - version('1.41.0', tag='1.41.0') - version('1.34.0', tag='1.34.0') - version('1.32.0', tag='1.32.0') - version('1.31.1', tag='1.31.1') - version('1.31.0', tag='1.31.0') # "Rust 2018" edition - version('1.30.1', tag='1.30.1') + homepage = "https://www.rust-lang.org" + url = "https://static.rust-lang.org/dist/rustc-1.42.0-src.tar.gz" + + maintainers = ["AndrewGaspar"] + + phases = ['configure', 'build', 'install'] extendable = True - # Rust - depends_on("llvm") - depends_on("curl") - depends_on("git") - depends_on("cmake") - depends_on("binutils") - depends_on("python@:2.8") + variant( + 'rustfmt', + default=True, + description='Formatting tool for Rust code' + ) - # Cargo - depends_on("openssl") + variant( + 'analysis', + default=True, + description='Outputs code analysis that can be consumed by other tools' + ) - phases = ['configure', 'install'] + variant( + 'clippy', + default=True, + description='Linting tool for Rust' + ) + + variant( + 'rls', + default=False, + description='The Rust Language Server can be used for IDE integration' + ) + + variant( + 'src', + default=True, + description='Install Rust source files' + ) + + depends_on('cmake', type='build') + depends_on('python@:2.8', type='build') + depends_on('openssl') + depends_on('libssh2') + depends_on('libgit2') + + # Version Notes: + # Here's some information on why your favorite Rust version may be missing. + # + # < 1.23: + # Rust seems to eagerly search for ar next to cc. Spack makes wrappers for + # cc and c++, but not for ar, so no ar is found. In future versions, ar + # can be specified in the config. + # + # < 1.17: + # The `x.py` bootstrapping script did not exist prior to Rust 1.17. It + # would be possible to support both, but for simplicitly, we only support + # Rust 1.17 and newer + version('1.42.0', sha256='d2e8f931d16a0539faaaacd801e0d92c58df190269014b2360c6ab2a90ee3475') + version('1.41.1', sha256='38c93d016e6d3e083aa15e8f65511d3b4983072c0218a529f5ee94dd1de84573') + version('1.41.0', sha256='5546822c09944c4d847968e9b7b3d0e299f143f307c00fa40e84a99fabf8d74b') + version('1.40.0', sha256='dd97005578defc10a482bff3e4e728350d2099c60ffcf1f5e189540c39a549ad') + version('1.39.0', sha256='b4a1f6b6a93931f270691aba4fc85eee032fecda973e6b9c774cd06857609357') + version('1.38.0', sha256='644263ca7c7106f8ee8fcde6bb16910d246b30668a74be20b8c7e0e9f4a52d80') + version('1.37.0', sha256='120e7020d065499cc6b28759ff04153bfdc2ac9b5adeb252331a4eb87cbe38c3') + version('1.36.0', sha256='04c4e4d7213d036d6aaed392841496d272146312c0290f728b7400fccd15bb1b') + version('1.35.0', sha256='5a4d637a716bac18d085f44dd87ef48b32195f71b967d872d80280b38cff712d') + version('1.34.2', sha256='c69a4a85a1c464368597df8878cb9e1121aae93e215616d45ad7d23af3052f56') + version('1.34.1', sha256='b0c785264d17e1dac4598627c248a2d5e07dd39b6666d1881fcfc8e2cf4c40a7') + version('1.34.0', sha256='7ac85acffd79dd3a7c44305d9eaabd1f1e7116e2e6e11e770e4bf5f92c0f1f59') + version('1.33.0', sha256='5a01a8d7e65126f6079042831385e77485fa5c014bf217e9f3e4aff36a485d94') + version('1.32.0', sha256='4c594c7712a0e7e8eae6526c464bf6ea1d82f77b4f61717c3fc28fb27ba2224a') + version('1.31.1', sha256='91d2fc22f08d986adab7a54eb3a6a9b99e490f677d2d092e5b9e4e069c23686a') + version('1.30.1', sha256='36a38902dbd9a3e1240d46ab0f2ca40d2fd07c2ab6508ed7970c6c4c036b5b29') + version('1.30.0', sha256='cd0ba83fcca55b64c0c9f23130fe731dfc1882b73ae21bef96be8f2362c108ee') + version('1.29.2', sha256='5088e796aa2e47478cdf41e7243fc5443fafab0a7c70a11423e57c80c04167c9') + version('1.29.1', sha256='f1b0728b66ce6bce6d72bbe5ea9e3a24ea22a045665da2ed8fcdfad14f61a349') + version('1.29.0', sha256='a4eb34ffd47f76afe2abd813f398512d5a19ef00989d37306217c9c9ec2f61e9') + version('1.28.0', sha256='1d5a81729c6f23a0a23b584dd249e35abe9c6f7569cee967cc42b1758ecd6486') + version('1.27.2', sha256='9a818c50cdb7880abeaa68b3d97792711e6c64c1cdfb6efdc23f75b8ced0e15d') + version('1.27.1', sha256='2133beb01ddc3aa09eebc769dd884533c6cfb08ce684f042497e097068d733d1') + version('1.27.0', sha256='2cb9803f690349c9fd429564d909ddd4676c68dc48b670b8ddf797c2613e2d21') + version('1.26.2', sha256='fb9ecf304488c9b56600ab20cfd1937482057f7e5db7899fddb86e0774548700') + version('1.26.1', sha256='70a7961bd8ec43b2c01e9896e90b0a06804a7fbe0a5c05acc7fd6fed19500df0') + version('1.26.0', sha256='4fb09bc4e233b71dcbe08a37a3f38cabc32219745ec6a628b18a55a1232281dd') + version('1.25.0', sha256='eef63a0aeea5147930a366aee78cbde248bb6e5c6868801bdf34849152965d2d') + version('1.24.1', sha256='3ea53d45e8d2e9a41afb3340cf54b9745f845b552d802d607707cf04450761ef') + version('1.24.0', sha256='bb8276f6044e877e447f29f566e4bbf820fa51fea2f912d59b73233ffd95639f') + version('1.23.0', sha256='7464953871dcfdfa8afcc536916a686dd156a83339d8ec4d5cb4eb2fe146cb91') + + # The Rust bootstrapping process requires a bootstrapping compiler. The + # easiest way to do this is to download the binary distribution of the + # same version of the compiler and build with that. + # + # This dictionary contains a version: hash dictionary for each supported + # Rust target. + rust_releases = { + '1.42.0': { + 'x86_64-unknown-linux-gnu': '7d1e07ad9c8a33d8d039def7c0a131c5917aa3ea0af3d0cc399c6faf7b789052', + 'powerpc64le-unknown-linux-gnu': '805b08fa1e0aad4d706301ca1f13e2d80810d385cece2c15070360b3c4bd6e4a', + 'aarch64-unknown-linux-gnu': 'fdd39f856a062af265012861949ff6654e2b7103be034d046bec84ebe46e8d2d', + 'x86_64-apple-darwin': 'db1055c46e0d54b99da05e88c71fea21b3897e74a4f5ff9390e934f3f050c0a8' + }, + '1.41.1': { + 'x86_64-unknown-linux-gnu': 'a6d5a3b3f574aafc8f787fea37aad9fb8a7946b383ae5348146927192ff0bef0', + 'powerpc64le-unknown-linux-gnu': 'f9b53ca636625b3a2dd87600b6274223c11f866c9b5a34b638ea0013186659d3', + 'aarch64-unknown-linux-gnu': 'd54c0f9165b86216b6f1b499f451141407939c5dc6b36c89a3772895a1370242', + 'x86_64-apple-darwin': '16615288cf74239783de1b435d329f3d56ed13803c7c10cd4b207d7c8ffa8f67' + }, + '1.41.0': { + 'x86_64-unknown-linux-gnu': '343ba8ef7397eab7b3bb2382e5e4cb08835a87bff5c8074382c0b6930a41948b', + 'powerpc64le-unknown-linux-gnu': 'ba231b0d8273d6928f61e2be3456e816a1de8050135e20c0623dc7a6ea03ba68', + 'aarch64-unknown-linux-gnu': '79ddfb5e2563d0ee09a567fbbe121a2aed3c3bc61255b2787f2dd42183a10f27', + 'x86_64-apple-darwin': 'b6504003ab70b11f278e0243a43ba9d6bf75e8ad6819b4058a2b6e3991cc8d7a' + }, + '1.40.0': { + 'x86_64-unknown-linux-gnu': 'fc91f8b4bd18314e83a617f2389189fc7959146b7177b773370d62592d4b07d0', + 'powerpc64le-unknown-linux-gnu': 'b1a23e35c383f99e647df6a9239b1dc9313e293deb70a76ba58e8ebe55ef623b', + 'aarch64-unknown-linux-gnu': '639271f59766d291ebdade6050e7d05d61cb5c822a3ef9a1e2ab185fed68d729', + 'x86_64-apple-darwin': '749ca5e0b94550369cc998416b8854c13157f5d11d35e9b3276064b6766bcb83' + }, + '1.39.0': { + 'x86_64-unknown-linux-gnu': 'b10a73e5ba90034fe51f0f02cb78f297ed3880deb7d3738aa09dc5a4d9704a25', + 'powerpc64le-unknown-linux-gnu': '53b3fd942c52709f7e6fe11ea572d086e315a57a40b84b9b3290ac0ec8c7c84a', + 'aarch64-unknown-linux-gnu': 'e27dc8112fe577012bd88f30e7c92dffd8c796478ce386c49465c03b6db8209f', + 'x86_64-apple-darwin': '3736d49c5e9592844e1a5d5452883aeaf8f1e25d671c1bc8f01e81c1766603b5' + }, + '1.38.0': { + 'x86_64-unknown-linux-gnu': 'adda26b3f0609dbfbdc2019da4a20101879b9db2134fae322a4e863a069ec221', + 'powerpc64le-unknown-linux-gnu': 'f9ed1bb6525abdd4dd6ef10782ad45d2f71496e0c3c88e806b510c81a91c4ff7', + 'aarch64-unknown-linux-gnu': '06afd6d525326cea95c3aa658aaa8542eab26f44235565bb16913ac9d12b7bda', + 'x86_64-apple-darwin': 'bd301b78ddcd5d4553962b115e1dca5436dd3755ed323f86f4485769286a8a5a' + }, + '1.37.0': { + 'x86_64-unknown-linux-gnu': 'cb573229bfd32928177c3835fdeb62d52da64806b844bc1095c6225b0665a1cb', + 'powerpc64le-unknown-linux-gnu': '27c59ec40e9e9f71490dc00bf165156ae3ea77c20ffa4b5e5fd712e67527b477', + 'aarch64-unknown-linux-gnu': '263ef98fa3a6b2911b56f89c06615cdebf6ef676eb9b2493ad1539602f79b6ba', + 'x86_64-apple-darwin': 'b2310c97ffb964f253c4088c8d29865f876a49da2a45305493af5b5c7a3ca73d' + }, + '1.36.0': { + 'x86_64-unknown-linux-gnu': '15e592ec52f14a0586dcebc87a957e472c4544e07359314f6354e2b8bd284c55', + 'powerpc64le-unknown-linux-gnu': '654a7a18d881811c09f630b0c917825b586e94a6142eceaede6b8046718e4054', + 'aarch64-unknown-linux-gnu': 'db78c24d93756f9fe232f081dbc4a46d38f8eec98353a9e78b9b164f9628042d', + 'x86_64-apple-darwin': '91f151ec7e24f5b0645948d439fc25172ec4012f0584dd16c3fb1acb709aa325' + }, + '1.35.0': { + 'x86_64-unknown-linux-gnu': 'cf600e2273644d8629ed57559c70ca8db4023fd0156346facca9ab3ad3e8f86c', + 'powerpc64le-unknown-linux-gnu': 'a933955adec386d75d126e78df5b9941936e156acb3353fc44b85995a81c7bb2', + 'aarch64-unknown-linux-gnu': '31e6da56e67838fd2874211ae896a433badf67c13a7b68481f1d5f7dedcc5952', + 'x86_64-apple-darwin': 'ac14b1c7dc330dcb53d8641d74ebf9b32aa8b03b9d650bcb9258030d8b10dbd6' + }, + '1.34.2': { + 'x86_64-unknown-linux-gnu': '2bf6622d980a52832bae141304e96f317c8a1ccd2dfd69a134a14033e6e43c0f', + 'powerpc64le-unknown-linux-gnu': '4ddd55014bbd954b3499859bfa3146bff471de21c1d73fc6e7cccde290fc1918', + 'aarch64-unknown-linux-gnu': '15fc6b7ec121df9d4e42483dd12c677203680bec8c69b6f4f62e5a35a07341a8', + 'x86_64-apple-darwin': '6fdd4bf7fe26dded0cd57b41ab5f0500a5a99b7bc770523a425e9e34f63d0fd8' + }, + '1.34.1': { + 'x86_64-unknown-linux-gnu': '8e2eead11bd5bf61409e29018d007c6fc874bcda2ff54db3d04d1691e779c14e', + 'powerpc64le-unknown-linux-gnu': '94ac92d08afcfa2d77ae207e91b57c00cb48ff7ba08a27ed3deb2493f33e8fb1', + 'aarch64-unknown-linux-gnu': '0565e50dae58759a3a5287abd61b1a49dfc086c4d6acf2ce604fe1053f704e53', + 'x86_64-apple-darwin': 'f4e46b9994ccfab4a84059298d1dc8fd446b1bbb7449462e0459948f7debea0e' + }, + '1.34.0': { + 'x86_64-unknown-linux-gnu': '170647ed41b497dc937a6b2556700210bc4be187b1735029ef9ccf52e2cb5ab8', + 'powerpc64le-unknown-linux-gnu': '3027e87802e161cce6f3a23d961f6d73b9ed6e829b2cd7af5dfccf6e1207e552', + 'aarch64-unknown-linux-gnu': '370c3a8fb9a69df36d645a95e622fb59ac5b513baecddde706cedaf20defa269', + 'x86_64-apple-darwin': 'e6bea8d865cc7341c17fa3b8f25f7989e6b04f53e9da24878addc524f3a32664' + }, + '1.33.0': { + 'x86_64-unknown-linux-gnu': '6623168b9ee9de79deb0d9274c577d741ea92003768660aca184e04fe774393f', + 'powerpc64le-unknown-linux-gnu': 'db885aa4c2c6896c85257be2ade5c9edea660ca6878970683e8d5796618329b5', + 'aarch64-unknown-linux-gnu': 'a308044e4076b62f637313ea803fa0a8f340b0f1b53136856f2c43afcabe5387', + 'x86_64-apple-darwin': '864e7c074a0b88e38883c87c169513d072300bb52e1d320a067bd34cf14f66bd' + }, + '1.32.0': { + 'x86_64-unknown-linux-gnu': 'e024698320d76b74daf0e6e71be3681a1e7923122e3ebd03673fcac3ecc23810', + 'powerpc64le-unknown-linux-gnu': 'd6d5c9154f4459465d68ebd4fa1e17bad4b6cfe219667dddd9123c3bfb5dd839', + 'aarch64-unknown-linux-gnu': '60def40961728212da4b3a9767d5a2ddb748400e150a5f8a6d5aa0e1b8ba1cee', + 'x86_64-apple-darwin': 'f0dfba507192f9b5c330b5984ba71d57d434475f3d62bd44a39201e36fa76304' + }, + '1.31.1': { + 'x86_64-unknown-linux-gnu': 'a64685535d0c457f49a8712a096a5c21564cd66fd2f7da739487f028192ebe3c', + 'powerpc64le-unknown-linux-gnu': 'a6f61b7a8a06a2b0a785391cc3e6bb8004aa72095eea80db1561039f5bb3e975', + 'aarch64-unknown-linux-gnu': '29a7c6eb536fefd0ca459e48dfaea006aa8bff8a87aa82a9b7d483487033632a', + 'x86_64-apple-darwin': '8398b1b303bdf0e7605d08b87070a514a4f588797c6fb3593718cb9cec233ad6' + }, + '1.30.1': { + 'x86_64-unknown-linux-gnu': 'a01a493ed8946fc1c15f63e74fc53299b26ebf705938b4d04a388a746dfdbf9e', + 'powerpc64le-unknown-linux-gnu': 'a7d4806e6702bdbad5017eeddc62f7ff7eb2438b1b9c39cbc90c2b1207f8e65f', + 'aarch64-unknown-linux-gnu': '6d87d81561285abd6c1987e07b60b2d723936f037c4b46eedcc12e8566fd3874', + 'x86_64-apple-darwin': '3ba1704a7defe3d9a6f0c1f68792c084da83bcba85e936d597bac0c019914b94' + }, + '1.30.0': { + 'x86_64-unknown-linux-gnu': 'f620e3125cc505c842150bd873c0603432b6cee984cdae8b226cf92c8aa1a80f', + 'powerpc64le-unknown-linux-gnu': '0b53e257dc3d9f3d75cd97be569d3bf456d2c0af57ed0bd5e7a437227d8f465a', + 'aarch64-unknown-linux-gnu': '9690c7c50eba5a8461184ee4138b4c284bad31ccc4aa1f2ddeec58b253e6363e', + 'x86_64-apple-darwin': '07008d90932712282bc599f1e9a226e97879c758dc1f935e6e2675e45694cc1b' + }, + '1.29.2': { + 'x86_64-unknown-linux-gnu': 'e9809825c546969a9609ff94b2793c9107d7d9bed67d557ed9969e673137e8d8', + 'powerpc64le-unknown-linux-gnu': '344003b808c20424c4699c9452bd37cdee23857dd4aa125e67d1d6e4bc992091', + 'aarch64-unknown-linux-gnu': 'e11461015ca7106ef8ebf00859842bf4be518ee170226cb8eedaaa666946509f', + 'x86_64-apple-darwin': '63f54e3013406b39fcb5b84bcf5e8ce85860d0b97a1e156700e467bf5fb5d5f2' + }, + '1.29.1': { + 'x86_64-unknown-linux-gnu': 'b36998aea6d58525f25d89f1813b6bfd4cad6ff467e27bd11e761a20dde43745', + 'powerpc64le-unknown-linux-gnu': '26a6d652ade6b6a96e6af18e846701ee28f912233372dfe15432139252f88958', + 'aarch64-unknown-linux-gnu': '2685224f67b2ef951e0e8b48829f786cbfed95e19448ba292ac33af719843dbe', + 'x86_64-apple-darwin': '07b07fbd6fab2390e19550beb8008745a8626cc5e97b72dc659061c1c3b3d008' + }, + '1.29.0': { + 'x86_64-unknown-linux-gnu': '09f99986c17b1b6b1bfbc9dd8785e0e4693007c5feb67915395d115c1a3aea9d', + 'powerpc64le-unknown-linux-gnu': 'd6954f1da53f7b3618fba3284330d99b6142bb25d9febba6dbfedad59ca53329', + 'aarch64-unknown-linux-gnu': '0ed3be0fd9f847afeb4e587fff61f6769ea61b53719d3ea999326284e8975b36', + 'x86_64-apple-darwin': '28a0473637585742f6d80ccd8afd88b6b400e65d623c33cb892412759444da93' + }, + '1.28.0': { + 'x86_64-unknown-linux-gnu': '2a1390340db1d24a9498036884e6b2748e9b4b057fc5219694e298bdaa37b810', + 'powerpc64le-unknown-linux-gnu': '255818156ec1f795ed808a44b4fdb8019187d5ebb7f837ae8f55a1ca40862bb6', + 'aarch64-unknown-linux-gnu': '9b6fbcee73070332c811c0ddff399fa31965bec62ef258656c0c90354f6231c1', + 'x86_64-apple-darwin': '5d7a70ed4701fe9410041c1eea025c95cad97e5b3d8acc46426f9ac4f9f02393' + }, + '1.27.2': { + 'x86_64-unknown-linux-gnu': '5028a18e913ef3eb53e8d8119d2cc0594442725e055a9361012f8e26f754f2bf', + 'powerpc64le-unknown-linux-gnu': '11034d150e811d4903b09fd42f0cb76d467a6365a158101493405fff1054572f', + 'aarch64-unknown-linux-gnu': 'cf84da70269c0e50bb3cc3d248bae1ffcd70ee69dc5a4e3513b54fefc6685fb4', + 'x86_64-apple-darwin': '30c5cc58759caa4efdf2ea7d8438633139c98bee3408beb29ceb26985f3f5f70' + }, + '1.27.1': { + 'x86_64-unknown-linux-gnu': '435778a837af764da2a7a7fb4d386b7b78516c7dfc732d892858e9a8a539989b', + 'powerpc64le-unknown-linux-gnu': 'a08e6b6fed3329fcd1220b2ee4cd7a311d99121cf780fb6e1c6353bfeddfb176', + 'aarch64-unknown-linux-gnu': 'd1146b240e6f628224c3a67e3aae2a57e6c25d544115e5ece9ce91861ec92b3a', + 'x86_64-apple-darwin': '475be237962d6aef1038a2faada26fda1e0eaea5d71d6950229a027a9c2bfe08' + }, + '1.27.0': { + 'x86_64-unknown-linux-gnu': '235ad78e220b10a2d0267aea1e2c0f19ef5eaaff53ad6ff8b12c1d4370dec9a3', + 'powerpc64le-unknown-linux-gnu': '847774a751e848568215739d384e3baf4d6ec37d27fb3add7a8789208c213aff', + 'aarch64-unknown-linux-gnu': 'e74ebc33dc3fc19e501a677a87b619746efdba2901949a0319176352f556673a', + 'x86_64-apple-darwin': 'a1d48190992e01aac1a181bce490c80cb2c1421724b4ff0e2fb7e224a958ce0f' + }, + '1.26.2': { + 'x86_64-unknown-linux-gnu': 'd2b4fb0c544874a73c463993bde122f031c34897bb1eeb653d2ba2b336db83e6', + 'powerpc64le-unknown-linux-gnu': 'ea045869074ae3617eeb51207ce183e6915784b9ed615ecb92ce082ddb86ec1f', + 'aarch64-unknown-linux-gnu': '3dfad0dc9c795f7ee54c2099c9b7edf06b942adbbf02e9ed9e5d4b5e3f1f3759', + 'x86_64-apple-darwin': 'f193705d4c0572a358670dbacbf0ffadcd04b3989728b442f4680fa1e065fa72' + }, + '1.26.1': { + 'x86_64-unknown-linux-gnu': 'b7e964bace1286696d511c287b945f3ece476ba77a231f0c31f1867dfa5080e0', + 'powerpc64le-unknown-linux-gnu': 'ad8b2f6dd8c5cca1251d65b75ed2120aae3c5375d2c8ed690259cf4a652d7d3c', + 'aarch64-unknown-linux-gnu': 'd4a369053c2dfd5f457de6853557dab563944579fa4bb55bc919bacf259bff6d', + 'x86_64-apple-darwin': 'ebf898b9fa7e2aafc53682a41f18af5ca6660ebe82dd78f28cd9799fe4dc189a' + }, + '1.26.0': { + 'x86_64-unknown-linux-gnu': '13691d7782577fc9f110924b26603ade1990de0b691a3ce2dc324b4a72a64a68', + 'powerpc64le-unknown-linux-gnu': '3ba3a4905730ec01007ca1096d9fc3780f4e81f71139a619e1f526244301b7f4', + 'aarch64-unknown-linux-gnu': 'e12dc84bdb569cdb382268a5fe6ae6a8e2e53810cb890ec3a7133c20ba8451ac', + 'x86_64-apple-darwin': '38708803c3096b8f101d1919ee2d7e723b0adf1bc1bb986b060973b57d8c7c28' + }, + '1.25.0': { + 'x86_64-unknown-linux-gnu': '06fb45fb871330a2d1b32a27badfe9085847fe824c189ddc5204acbe27664f5e', + 'powerpc64le-unknown-linux-gnu': '79eeb2a7fafa2e0f65f29a1dc360df69daa725347e4b6a533684f1c07308cc6e', + 'aarch64-unknown-linux-gnu': '19a43451439e515a216d0a885d14203f9a92502ee958abf86bf7000a7d73d73d', + 'x86_64-apple-darwin': 'fcd0302b15e857ba4a80873360cf5453275973c64fa82e33bfbed02d88d0ad17' + }, + '1.24.1': { + 'x86_64-unknown-linux-gnu': '4567e7f6e5e0be96e9a5a7f5149b5452828ab6a386099caca7931544f45d5327', + 'powerpc64le-unknown-linux-gnu': '6f6c4bebbd7d6dc9989bf372c512dea55af8f56a1a0cfe97784667f0ac5430ee', + 'aarch64-unknown-linux-gnu': '64bb25a9689b18ddadf025b90d9bdb150b809ebfb74432dc69cc2e46120adbb2', + 'x86_64-apple-darwin': '9d4aacdb5849977ea619d399903c9378163bd9c76ea11dac5ef6eca27849f501' + }, + '1.24.0': { + 'x86_64-unknown-linux-gnu': '336cf7af6c857cdaa110e1425719fa3a1652351098dc73f156e5bf02ed86443c', + 'powerpc64le-unknown-linux-gnu': '25d9b965a63ad2f345897028094d4c7eafa432237b478754ccbcc299f80629c8', + 'aarch64-unknown-linux-gnu': 'a981de306164b47f3d433c1d53936185260642849c79963af7e07d36b063a557', + 'x86_64-apple-darwin': '1aecba7cab4bc1a9e0e931c04aa00849e930b567d243da7b676ede8f527a2992' + }, + '1.23.0': { + 'x86_64-unknown-linux-gnu': '9a34b23a82d7f3c91637e10ceefb424539dcfa327c2dcd292ff10c047b1fdc7e', + 'powerpc64le-unknown-linux-gnu': '60f1a1cc182c516de08c1f42ada01604a3d94383e9dded6b237ae2233999437b', + 'aarch64-unknown-linux-gnu': '38379fbd976d2286cb73f21466db40a636a583b9f8a80af5eea73617c7912bc7', + 'x86_64-apple-darwin': '9274e977322bb4b153f092255ac9bd85041142c73eaabf900cb2ef3d3abb2eba' + } + } + + # This dictionary maps Rust target architectures to Spack constraints that + # match that target. + rust_archs = { + 'x86_64-unknown-linux-gnu': [ + {'platform': 'linux', 'target': 'x86_64:'}, + {'platform': 'cray', 'target': 'x86_64:'} + ], + 'powerpc64le-unknown-linux-gnu': [ + {'platform': 'linux', 'target': 'ppc64le:'}, + {'platform': 'cray', 'target': 'ppc64le:'} + ], + 'aarch64-unknown-linux-gnu': [ + {'platform': 'linux', 'target': 'aarch64:'}, + {'platform': 'cray', 'target': 'aarch64:'} + ], + 'x86_64-apple-darwin': [ + {'platform': 'darwin', 'target': 'x86_64:'} + ] + } + + # This loop generates resources for each binary distribution, and maps + # them to the version of the compiler they bootstrap. This is in place + # of listing each resource explicitly, which would be potentially even + # more verbose. + # + # NOTE: This loop should technically specify the architecture to be the + # _host_ architecture, not the target architecture, in order to support + # cross compiling. I'm not sure Spack provides a way to specify a + # distinction in the when clause, though. + for rust_version, rust_targets in iteritems(rust_releases): + for rust_target, rust_sha256 in iteritems(rust_targets): + for rust_arch in rust_archs[rust_target]: + resource( + name='rust-{version}-{target}'.format( + version=rust_version, + target=rust_target + ), + url='https://static.rust-lang.org/dist/rust-{version}-{target}.tar.gz'.format( + version=rust_version, + target=rust_target + ), + sha256=rust_sha256, + destination='spack_bootstrap_stage', + when='@{version} platform={platform} target={target}'\ + .format( + version=rust_version, + platform=rust_arch['platform'], + target=rust_arch['target'] + ) + ) + + # This routine returns the target architecture we intend to build for. + def get_rust_target(self): + if 'platform=linux' in self.spec or 'platform=cray' in self.spec: + if 'target=x86_64:' in self.spec: + return 'x86_64-unknown-linux-gnu' + elif 'target=ppc64le:' in self.spec: + return 'powerpc64le-unknown-linux-gnu' + elif 'target=aarch64:' in self.spec: + return 'aarch64-unknown-linux-gnu' + elif 'platform=darwin target=x86_64:' in self.spec: + return 'x86_64-apple-darwin' + + raise InstallError( + "rust is not supported for '{0}'".format( + self.spec.architecture + )) def configure(self, spec, prefix): - configure_args = [ - '--prefix=%s' % prefix, - '--llvm-root=' + spec['llvm'].prefix, - # Workaround for "FileCheck does not exist" error - '--disable-codegen-tests', - # Includes Cargo in the build - # https://github.com/rust-lang/cargo/issues/3772#issuecomment-283109482 - '--enable-extended', - # Prevent build from writing bash completion into system path - '--sysconfdir=%s' % join_path(prefix, 'etc/') - ] + target = self.get_rust_target() + # See the NOTE above the resource loop - should be host architecture, + # not target aarchitecture if we're to support cross-compiling. + bootstrapping_install = Executable( + './spack_bootstrap_stage/rust-{version}-{target}/install.sh' + .format( + version=spec.version, + target=target + ) + ) + # install into the staging area + bootstrapping_install('--prefix={0}'.format( + join_path(self.stage.source_path, 'spack_bootstrap') + )) - configure(*configure_args) + boot_bin = join_path(self.stage.source_path, 'spack_bootstrap/bin') - # Build system defaults to searching in the same path as Spack's - # compiler wrappers which causes the build to fail - filter_file( - '#ar = "ar"', - 'ar = "%s"' % join_path(spec['binutils'].prefix.bin, 'ar'), - 'config.toml') + # Always build rustc and cargo + tools = ['rustc', 'cargo'] + # Only make additional components available in 'rust-bootstrap' + if '+rustfmt' in self.spec: + tools.append('rustfmt') + if '+analysis' in self.spec: + tools.append('analysis') + if '@1.33: +clippy' in self.spec: + tools.append('clippy') + if '+rls' in self.spec: + tools.append('rls') + if '+src' in self.spec: + tools.append('src') + + ar = which('ar', required=True) + + # build.tools was introduced in Rust 1.25 + tools_spec = 'tools={0}'.format(tools) if '@1.25:' in self.spec else '' + # This is a temporary fix due to rust 1.42 breaking self bootstrapping + # See: https://github.com/rust-lang/rust/issues/69953 + # + # In general, this should be safe because bootstrapping typically + # ensures everything but the bootstrapping script is warning free for + # the latest set of warning. + deny_warnings_spec = \ + 'deny-warnings = false' if '@1.42.0' in self.spec else '' + + with open('config.toml', 'w') as out_file: + out_file.write("""\ +[build] +cargo = "{cargo}" +rustc = "{rustc}" +docs = false +vendor = true +extended = true +verbose = 2 +{tools_spec} + +[rust] +channel = "stable" +rpath = true +{deny_warnings_spec} + +[target.{target}] +ar = "{ar}" + +[install] +prefix = "{prefix}" +sysconfdir = "etc" +""".format( + cargo=join_path(boot_bin, 'cargo'), + rustc=join_path(boot_bin, 'rustc'), + prefix=prefix, + target=target, + deny_warnings_spec=deny_warnings_spec, + ar=ar.path, + tools_spec=tools_spec + ) + ) + + def build(self, spec, prefix): + python('./x.py', 'build', extra_env={ + # vendored libgit2 wasn't correctly building (couldn't find the + # vendored libssh2), so let's just have spack build it + 'LIBSSH2_SYS_USE_PKG_CONFIG': '1', + 'LIBGIT2_SYS_USE_PKG_CONFIG': '1' + }) def install(self, spec, prefix): - make() - make("install") + python('./x.py', 'install') diff --git a/var/spack/repos/builtin/packages/scr/package.py b/var/spack/repos/builtin/packages/scr/package.py index 63d0af34eb8..3e0e1456cdd 100644 --- a/var/spack/repos/builtin/packages/scr/package.py +++ b/var/spack/repos/builtin/packages/scr/package.py @@ -17,7 +17,10 @@ class Scr(CMakePackage): url = "https://github.com/LLNL/scr/archive/v1.2.0.tar.gz" git = "https://github.com/llnl/scr.git" - version('master', branch='master') + version('develop', branch='develop') + version('legacy', branch='legacy') + + version('2.0.0', sha256='471978ae0afb56a20847d3989b994fbd680d1dea21e77a5a46a964b6e3deed6b') version('1.2.2', sha256='764a85638a9e8762667ec1f39fa5f7da7496fca78de379a22198607b3e027847') version('1.2.1', sha256='23acab2dc7203e9514455a5168f2fd57bc590affb7a1876912b58201513628fe') version('1.2.0', sha256='e3338ab2fa6e9332d2326c59092b584949a083a876adf5a19d4d5c7a1bbae047') @@ -26,6 +29,14 @@ class Scr(CMakePackage): depends_on('zlib') depends_on('mpi') + # SCR legacy is anything 2.x.x or earlier + # SCR components is anything 3.x.x or later + depends_on('er', when="@3:") + depends_on('kvtree', when="@3:") + depends_on('rankstr', when="@3:") + depends_on('filo', when="@3:") + depends_on('spath', when="@3:") + variant('dtcmp', default=True, description="Build with DTCMP. " "Necessary to enable user directory naming at runtime") diff --git a/var/spack/repos/builtin/packages/seacas/package.py b/var/spack/repos/builtin/packages/seacas/package.py index 0e5ac9b72ac..f9790370a07 100644 --- a/var/spack/repos/builtin/packages/seacas/package.py +++ b/var/spack/repos/builtin/packages/seacas/package.py @@ -26,6 +26,8 @@ class Seacas(CMakePackage): # ###################### Versions ########################## version('master', branch='master') + version('2020-03-16', sha256='2eb404f3dcb17c3e7eacf66978372830d40ef3722788207741fcd48417807af6') + version('2020-01-16', sha256='5ae84f61e410a4f3f19153737e0ac0493b144f20feb1bbfe2024f76613d8bff5') version('2019-12-18', sha256='f82cfa276ebc5fe6054852383da16eba7a51c81e6640c73b5f01fc3109487c6f') version('2019-10-14', sha256='ca4cf585cdbc15c25f302140fe1f61ee1a30d72921e032b9a854492b6c61fb91') version('2019-08-20', sha256='a82c1910c2b37427616dc3716ca0b3c1c77410db6723aefb5bea9f47429666e5') diff --git a/var/spack/repos/builtin/packages/sentencepiece/package.py b/var/spack/repos/builtin/packages/sentencepiece/package.py new file mode 100644 index 00000000000..f3b168db66a --- /dev/null +++ b/var/spack/repos/builtin/packages/sentencepiece/package.py @@ -0,0 +1,20 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +class Sentencepiece(CMakePackage): + """Unsupervised text tokenizer for Neural Network-based text generation. + + This is the C++ package.""" + + homepage = "https://github.com/google/sentencepiece" + url = "https://github.com/google/sentencepiece/archive/v0.1.85.tar.gz" + + maintainers = ['adamjstewart'] + + version('0.1.85', sha256='dd4956287a1b6af3cbdbbd499b7227a859a4e3f41c9882de5e6bdd929e219ae6') + + depends_on('cmake@3.1:', type='build') + depends_on('gperftools') # optional, 10-40% performance improvement diff --git a/var/spack/repos/builtin/packages/sfcgal/package.py b/var/spack/repos/builtin/packages/sfcgal/package.py index 021ba1bb97b..c1e80174e0c 100644 --- a/var/spack/repos/builtin/packages/sfcgal/package.py +++ b/var/spack/repos/builtin/packages/sfcgal/package.py @@ -20,8 +20,8 @@ class Sfcgal(CMakePackage): version('1.3.7', sha256='30ea1af26cb2f572c628aae08dd1953d80a69d15e1cac225390904d91fce031b') # Ref: http://oslandia.github.io/SFCGAL/installation.html - depends_on('cgal@4.3 +core') - depends_on('boost@1.54.0:1.69.0') + depends_on('cgal@4.3: +core') + depends_on('boost@1.54.0:') depends_on('mpfr@2.2.1:') depends_on('gmp@4.2:') diff --git a/var/spack/repos/builtin/packages/shuffile/package.py b/var/spack/repos/builtin/packages/shuffile/package.py index 1dc23f1a2c3..e7a8da2c0e8 100644 --- a/var/spack/repos/builtin/packages/shuffile/package.py +++ b/var/spack/repos/builtin/packages/shuffile/package.py @@ -9,8 +9,8 @@ class Shuffile(CMakePackage): """Shuffle files between MPI ranks""" - homepage = "https://github.com/ECP-VeloC/shuffile" - url = "https://github.com/ECP-VeloC/shuffile/archive/v0.0.3.zip" + homepage = "https://github.com/ecp-veloc/shuffile" + url = "https://github.com/ecp-veloc/shuffile/archive/v0.0.3.zip" git = "https://github.com/ecp-veloc/shuffile.git" tags = ['ecp'] diff --git a/var/spack/repos/builtin/packages/silo/package.py b/var/spack/repos/builtin/packages/silo/package.py index 9b980ba0ac8..98e8b4be155 100644 --- a/var/spack/repos/builtin/packages/silo/package.py +++ b/var/spack/repos/builtin/packages/silo/package.py @@ -28,6 +28,7 @@ class Silo(AutotoolsPackage): variant('mpi', default=True, description='Compile with MPI Compatibility') + depends_on('hdf5@:1.10.999', when='@:4.10.2') depends_on('hdf5~mpi', when='~mpi') depends_on('mpi', when='+mpi') depends_on('hdf5+mpi', when='+mpi') diff --git a/var/spack/repos/builtin/packages/singularity/package.py b/var/spack/repos/builtin/packages/singularity/package.py index e97f18ce833..7e808880cf9 100644 --- a/var/spack/repos/builtin/packages/singularity/package.py +++ b/var/spack/repos/builtin/packages/singularity/package.py @@ -27,6 +27,7 @@ class Singularity(MakefilePackage): maintainers = ['ArangoGutierrez', 'alalazo'] version('master', branch='master') + version('3.5.3', sha256='0c76f1e3808bf4c10e92b17150314b2b816be79f8101be448a6e9d7a96c9e486') version('3.5.2', sha256='f9c21e289377a4c40ed7a78a0c95e1ff416dec202ed49a6c616dd2c37700eab8') version('3.4.1', sha256='638fd7cc5ab2a20e779b8768f73baf21909148339d6c4edf6ff61349c53a70c2') version('3.4.0', sha256='eafb27f1ffbed427922ebe2b5b95d1c9c09bfeb897518867444fe230e3e35e41') diff --git a/var/spack/repos/builtin/packages/sirius/package.py b/var/spack/repos/builtin/packages/sirius/package.py index f45aa077819..ff81b120d4d 100644 --- a/var/spack/repos/builtin/packages/sirius/package.py +++ b/var/spack/repos/builtin/packages/sirius/package.py @@ -19,6 +19,8 @@ class Sirius(CMakePackage, CudaPackage): version('develop', branch='develop') version('master', branch='master') + version('6.5.2', sha256='c18adc45b069ebae03f94eeeeed031ee99b3d8171fa6ee73c7c6fb1e42397fe7') + version('6.5.1', sha256='599dd0fa25a4e83db2a359257a125e855d4259188cf5b0065b8e7e66378eacf3') version('6.5.0', sha256='5544f3abbb71dcd6aa08d18aceaf53c38373de4cbd0c3af44fbb39c20cfeb7cc') version('6.4.4', sha256='1c5de9565781847658c3cc11edcb404e6e6d1c5a9dfc81e977de7a9a7a162c8a') version('6.4.3', sha256='4d1effeadb84b3e1efd7d9ac88018ef567aa2e0aa72e1112f0abf2e493e2a189') diff --git a/var/spack/repos/builtin/packages/slurm/package.py b/var/spack/repos/builtin/packages/slurm/package.py index bfa6621f077..c0d4cc01a91 100644 --- a/var/spack/repos/builtin/packages/slurm/package.py +++ b/var/spack/repos/builtin/packages/slurm/package.py @@ -22,8 +22,11 @@ class Slurm(AutotoolsPackage): """ homepage = 'https://slurm.schedmd.com' - url = 'https://github.com/SchedMD/slurm/archive/slurm-17-02-6-1.tar.gz' + url = 'https://github.com/SchedMD/slurm/archive/slurm-19-05-6-1.tar.gz' + version('19-05-6-1', sha256='1b83bce4260af06d644253b1f2ec2979b80b4418c631e9c9f48c2729ae2c95ba') + version('19-05-5-1', sha256='e53e67bd0bb4c37a9c481998764a746467a96bc41d6527569080514f36452c07') + version('18-08-9-1', sha256='32eb0b612ca18ade1e35c3c9d3b4d71aba2b857446841606a9e54d0a417c3b03') version('18-08-0-1', sha256='62129d0f2949bc8a68ef86fe6f12e0715cbbf42f05b8da6ef7c3e7e7240b50d9') version('17-11-9-2', sha256='6e34328ed68262e776f524f59cca79ac75bcd18030951d45ea545a7ba4c45906') version('17-02-6-1', sha256='97b3a3639106bd6d44988ed018e2657f3d640a3d5c105413d05b4721bc8ee25e') @@ -35,6 +38,8 @@ class Slurm(AutotoolsPackage): variant('hdf5', default=False, description='Enable hdf5 support') variant('readline', default=True, description='Enable readline support') variant('pmix', default=False, description='Enable PMIx support') + variant('sysconfdir', default='PREFIX/etc', values=any, + description='Set system configuration path (possibly /etc/slurm)') # TODO: add variant for BG/Q and Cray support @@ -94,6 +99,10 @@ def configure_args(self): else: args.append('--without-pmix') + sysconfdir = spec.variants['sysconfdir'].value + if sysconfdir != 'PREFIX/etc': + args.append('--sysconfdir={0}'.format(sysconfdir)) + return args def install(self, spec, prefix): diff --git a/var/spack/repos/builtin/packages/source-highlight/package.py b/var/spack/repos/builtin/packages/source-highlight/package.py new file mode 100644 index 00000000000..eee706518a6 --- /dev/null +++ b/var/spack/repos/builtin/packages/source-highlight/package.py @@ -0,0 +1,21 @@ +# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack import * + + +class SourceHighlight(AutotoolsPackage, GNUMirrorPackage): + """This program, given a source file, produces a document with syntax + highlighting. It also provides a C++ highlight library + (since version 3.0). """ + + homepage = "https://www.gnu.org/software/src-highlite/" + gnu_mirror_path = "src-highlite/source-highlight-3.1.8.tar.gz" + + version('3.1.9', sha256='3a7fd28378cb5416f8de2c9e77196ec915145d44e30ff4e0ee8beb3fe6211c91') + version('3.1.8', sha256='01336a7ea1d1ccc374201f7b81ffa94d0aecb33afc7d6903ebf9fbf33a55ada3') + + depends_on('boost') diff --git a/var/spack/repos/builtin/packages/spath/package.py b/var/spack/repos/builtin/packages/spath/package.py new file mode 100644 index 00000000000..5ccd259f485 --- /dev/null +++ b/var/spack/repos/builtin/packages/spath/package.py @@ -0,0 +1,34 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Spath(CMakePackage): + """Represent and manipulate file system paths""" + + homepage = "https://github.com/ecp-veloc/spath" + git = "https://github.com/ecp-veloc/spath.git" + + tags = ['ecp'] + + version('master', branch='master') + + variant('mpi', default=True, description="Build with MPI support.") + depends_on('mpi', when='+mpi') + + def cmake_args(self): + args = [] + + if self.spec.satisfies('platform=cray'): + args.append("-DSPATH_LINK_STATIC=ON") + + if "+mpi" in self.spec: + args.append('-DMPI=ON') + args.append("-DMPI_C_COMPILER=%s" % self.spec['mpi'].mpicc) + else: + args.append('-DMPI=OFF') + + return args diff --git a/var/spack/repos/builtin/packages/spdlog/package.py b/var/spack/repos/builtin/packages/spdlog/package.py index e4aad3bf421..15a0cc1d9da 100644 --- a/var/spack/repos/builtin/packages/spdlog/package.py +++ b/var/spack/repos/builtin/packages/spdlog/package.py @@ -12,6 +12,8 @@ class Spdlog(CMakePackage): homepage = "https://github.com/gabime/spdlog" url = "https://github.com/gabime/spdlog/archive/v0.9.0.tar.gz" + version('1.5.0', sha256='b38e0bbef7faac2b82fed550a0c19b0d4e7f6737d5321d4fd8f216b80f8aee8a') + version('1.4.2', sha256='821c85b120ad15d87ca2bc44185fa9091409777c756029125a02f81354072157') version('1.4.1', sha256='3291958eb54ed942d1bd3aef1b4f8ccf70566cbc04d34296ec61eb96ceb73cff') version('1.2.1', sha256='867a4b7cedf9805e6f76d3ca41889679054f7e5a3b67722fe6d0eae41852a767') version('1.2.0', sha256='0ba31b9e7f8e43a7be328ab0236d57810e5d4fc8a1a7842df665ae22d5cbd128') diff --git a/var/spack/repos/builtin/packages/spiral/package.py b/var/spack/repos/builtin/packages/spiral/package.py new file mode 100644 index 00000000000..5011875e131 --- /dev/null +++ b/var/spack/repos/builtin/packages/spiral/package.py @@ -0,0 +1,65 @@ +# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Spiral(CMakePackage): + """SPIRAL is a program generation system for linear transforms and other + mathematical functions that produces very high performance code for a wide + spectrum of hardware platforms.""" + + homepage = "https://spiral.net" + url = "https://github.com/spiral-software/spiral-software/archive/8.1.2.tar.gz" + + maintainers = ['spiralgen'] + + version('8.1.2', sha256='506f1dbf923aa1c9f19f05444fa947085715eef37c9d2494d133fcaaa1dd50bc') + + extendable = True + + # No dependencies. + + def build(self, spec, prefix): + with working_dir(self.build_directory): + make('all') + make('install/local') + + # For some reason the make install/local doesn't seem to install + # the gap exe...though it does work if run manually + gapfil = join_path(self.build_directory, 'gap/src/gap') + dest = join_path(self.stage.source_path, 'gap/bin') + install(gapfil, dest) + + def install(self, spec, prefix): + mkdirp(prefix.gap.bin) + gapfil = join_path(self.build_directory, 'gap/src/gap') + install(gapfil, prefix.gap.bin) + with working_dir(join_path(self.build_directory, 'gap')): + files = ('spiral', 'spirald', '_spiral.g') + for fil in files: + install(fil, prefix) + set_executable(join_path(prefix, fil)) + + with working_dir(self.stage.source_path): + files = ('LICENSE', 'README.md', 'ReleaseNotes.md') + for fil in files: + install(fil, prefix) + + mkdirp(prefix.gap.lib) + mkdirp(prefix.gap.grp) + mkdirp(prefix.namespaces) + mkdirp(prefix.profiler) + mkdirp(prefix.tests) + + print("self.stage.source_path = " + self.stage.source_path) + with working_dir(self.stage.source_path): + install_tree('namespaces', prefix.namespaces) + install_tree('profiler', prefix.profiler) + install_tree('tests', prefix.tests) + + with working_dir(join_path(self.stage.source_path, 'gap')): + install_tree('lib', prefix.gap.lib) + install_tree('grp', prefix.gap.grp) diff --git a/var/spack/repos/builtin/packages/sqlite/package.py b/var/spack/repos/builtin/packages/sqlite/package.py index f8dd0e49484..4e79463f36b 100644 --- a/var/spack/repos/builtin/packages/sqlite/package.py +++ b/var/spack/repos/builtin/packages/sqlite/package.py @@ -38,7 +38,7 @@ class Sqlite(AutotoolsPackage): '(unsafe for <3.26.0.0 due to Magellan).') variant('rtree', default=False, description='Build with Rtree module') - variant('column_metadata', default=False, description="Build with COLUMN_METADATA") + variant('column_metadata', default=True, description="Build with COLUMN_METADATA") # See https://blade.tencent.com/magellan/index_en.html conflicts('+fts', when='@:3.25.99.99') diff --git a/var/spack/repos/builtin/packages/sshpass/package.py b/var/spack/repos/builtin/packages/sshpass/package.py new file mode 100644 index 00000000000..499761a867d --- /dev/null +++ b/var/spack/repos/builtin/packages/sshpass/package.py @@ -0,0 +1,29 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Sshpass(AutotoolsPackage): + """Sshpass is a tool for non-interactivly performing password + authentication with SSH's so called "interactive keyboard + password authentication". Most user should use SSH's more + secure public key authentiaction instead.""" + + homepage = "https://sourceforge.net/projects/sshpass/" + url = "https://sourceforge.net/projects/sshpass/files/sshpass/1.06/sshpass-1.06.tar.gz" + + version('1.06', sha256='c6324fcee608b99a58f9870157dfa754837f8c48be3df0f5e2f3accf145dee60') + version('1.05', sha256='c3f78752a68a0c3f62efb3332cceea0c8a1f04f7cf6b46e00ec0c3000bc8483e') + version('1.04', sha256='e8abb9a409f25928722251a5855a74854f6d64af3eb136b804a04fd630d70c80') + version('1.03', sha256='5e8082343f5eae43598bb5723fa11bf49d3c9864dc58c7513fe1a90658e52b2f') + version('1.02', sha256='e580d999eefbd847c5cd0b36315cb6cd187315c4e7d1cb182b9f94c12c7c6a86') + version('1.01', sha256='e2adc378d61b72e63b4381fe123de3c63bd4093c9553d3219e83878f379754f4') + version('1.00', sha256='71d4be85a464a8ce2ae308bc04dcb342918f3989b6a81c74217b5df7f11471f8') + + depends_on('m4', type='build') + depends_on('autoconf', type='build') + depends_on('automake', type='build') + depends_on('libtool', type='build') diff --git a/var/spack/repos/builtin/packages/superlu-dist/package.py b/var/spack/repos/builtin/packages/superlu-dist/package.py index 989158356d8..799b413f7d3 100644 --- a/var/spack/repos/builtin/packages/superlu-dist/package.py +++ b/var/spack/repos/builtin/packages/superlu-dist/package.py @@ -18,6 +18,7 @@ class SuperluDist(CMakePackage): version('develop', branch='master') version('xsdk-0.2.0', tag='xsdk-0.2.0') + version('6.3.0', sha256='daf3264706caccae2b8fd5a572e40275f1e128fa235cb7c21ee2f8051c11af95') version('6.1.1', sha256='35d25cff592c724439870444ed45e1d1d15ca2c65f02ccd4b83a6d3c9d220bd1') version('6.1.0', sha256='92c6d1424dd830ee2d1e7396a418a5f6645160aea8472e558c4e4bfe006593c4') version('6.0.0', sha256='ff6cdfa0263d595708bbb6d11fb780915d8cfddab438db651e246ea292f37ee4') diff --git a/var/spack/repos/builtin/packages/swig/package.py b/var/spack/repos/builtin/packages/swig/package.py index ac48fb12464..ebc04dd077d 100644 --- a/var/spack/repos/builtin/packages/swig/package.py +++ b/var/spack/repos/builtin/packages/swig/package.py @@ -23,6 +23,7 @@ class Swig(AutotoolsPackage): url = "http://prdownloads.sourceforge.net/swig/swig-3.0.12.tar.gz" version('master', git='https://github.com/swig/swig.git') + version('4.0.1', sha256='7a00b4d0d53ad97a14316135e2d702091cd5f193bb58bcfcd8bc59d41e7887a9') version('4.0.0', sha256='e8a39cd6437e342cdcbd5af27a9bf11b62dc9efec9248065debcb8276fcbb925') version('3.0.12', sha256='7cf9f447ae7ed1c51722efc45e7f14418d15d7a1e143ac9f09a668999f4fc94d') version('3.0.11', sha256='d9031d531d7418829a54d0d51c4ed9007016b213657ec70be44031951810566e') diff --git a/var/spack/repos/builtin/packages/symengine/package.py b/var/spack/repos/builtin/packages/symengine/package.py index ceb1b9a5ed4..a9c866edb77 100644 --- a/var/spack/repos/builtin/packages/symengine/package.py +++ b/var/spack/repos/builtin/packages/symengine/package.py @@ -14,7 +14,8 @@ class Symengine(CMakePackage): url = "https://github.com/symengine/symengine/archive/v0.2.0.tar.gz" git = "https://github.com/symengine/symengine.git" - version('develop', branch='master') + version('master', branch='master') + version('0.6.0', sha256='4d2caa86c03eaaa8ed004084d02f87b5c51b6229f8ba70d161227e22d6302f0a') version('0.5.0', sha256='5d02002f00d16a0928d1056e6ecb8f34fd59f3bfd8ed0009a55700334dbae29b') version('0.4.0', sha256='dd755901a9e2a49e53ba3bbe3f565f94265af05299e57a7b592186dd35916a1b') version('0.3.0', sha256='591463cb9e741d59f6dfd39a7943e3865d3afe9eac47d1a9cbf5ca74b9c49476') diff --git a/var/spack/repos/builtin/packages/tar/package.py b/var/spack/repos/builtin/packages/tar/package.py index de12fd23447..15c2e4bc0ef 100644 --- a/var/spack/repos/builtin/packages/tar/package.py +++ b/var/spack/repos/builtin/packages/tar/package.py @@ -19,7 +19,7 @@ class Tar(AutotoolsPackage, GNUMirrorPackage): version('1.29', sha256='cae466e6e58c7292355e7080248f244db3a4cf755f33f4fa25ca7f9a7ed09af0') version('1.28', sha256='6a6b65bac00a127a508533c604d5bf1a3d40f82707d56f20cefd38a05e8237de') - depends_on('libiconv') + depends_on('iconv') patch('tar-pgi.patch', when='@1.29') patch('config-pgi.patch', when='@:1.29') @@ -29,5 +29,5 @@ class Tar(AutotoolsPackage, GNUMirrorPackage): def configure_args(self): return [ - '--with-libiconv-prefix={0}'.format(self.spec['libiconv'].prefix), + '--with-libiconv-prefix={0}'.format(self.spec['iconv'].prefix), ] diff --git a/var/spack/repos/builtin/packages/tau/package.py b/var/spack/repos/builtin/packages/tau/package.py index e6eaa43bf87..03298410294 100644 --- a/var/spack/repos/builtin/packages/tau/package.py +++ b/var/spack/repos/builtin/packages/tau/package.py @@ -87,7 +87,7 @@ class Tau(Package): depends_on('libdwarf', when='+libdwarf') depends_on('libelf', when='+libdwarf') # TAU requires the ELF header support, libiberty and demangle. - depends_on('binutils+libiberty+headers~nls', when='+binutils') + depends_on('binutils@:2.33.1+libiberty+headers~nls', when='+binutils') depends_on('python@2.7:', when='+python') depends_on('libunwind', when='+libunwind') depends_on('mpi', when='+mpi', type=('build', 'run', 'link')) @@ -95,6 +95,7 @@ class Tau(Package): depends_on('gasnet', when='+gasnet') depends_on('adios2', when='+adios2') depends_on('sqlite', when='+sqlite') + depends_on('hwloc') # Elf only required from 2.28.1 on conflicts('+libelf', when='@:2.28.0') @@ -104,8 +105,6 @@ class Tau(Package): conflicts('+adios2', when='@:2.29.1') conflicts('+sqlite', when='@:2.29.1') - filter_compiler_wrappers('tau_cc.sh', 'Makefile.tau', relative_root='bin') - def set_compiler_options(self, spec): useropt = ["-O2 -g", self.rpath_args] @@ -271,11 +270,15 @@ def install(self, spec, prefix): compiler_specific_options = self.set_compiler_options(spec) options.extend(compiler_specific_options) configure(*options) + make("install") # Link arch-specific directories into prefix since there is # only one arch per prefix the way spack installs. self.link_tau_arch_dirs() + # TAU may capture Spack's internal compiler wrapper. Replace + # it with the correct compiler. + self.fix_tau_compilers() def link_tau_arch_dirs(self): for subdir in os.listdir(self.prefix): @@ -285,6 +288,22 @@ def link_tau_arch_dirs(self): if os.path.isdir(src) and not os.path.exists(dest): os.symlink(join_path(subdir, d), dest) + def fix_tau_compilers(self): + filter_file('FULL_CC=' + spack_cc, 'FULL_CC=' + self.compiler.cc, + self.prefix + '/include/Makefile', backup=False, + string=True) + filter_file('FULL_CXX=' + spack_cxx, 'FULL_CXX=' + + self.compiler.cxx, self.prefix + '/include/Makefile', + backup=False, string=True) + for makefile in os.listdir(self.prefix.lib): + if makefile.startswith('Makefile.tau'): + filter_file('FULL_CC=' + spack_cc, 'FULL_CC=' + + self.compiler.cc, self.prefix.lib + "/" + + makefile, backup=False, string=True) + filter_file('FULL_CXX=' + spack_cxx, 'FULL_CXX=' + + self.compiler.cxx, self.prefix.lib + + "/" + makefile, backup=False, string=True) + def setup_run_environment(self, env): pattern = join_path(self.prefix.lib, 'Makefile.*') files = glob.glob(pattern) diff --git a/var/spack/repos/builtin/packages/tixi/package.py b/var/spack/repos/builtin/packages/tixi/package.py new file mode 100644 index 00000000000..74a7ed7c656 --- /dev/null +++ b/var/spack/repos/builtin/packages/tixi/package.py @@ -0,0 +1,24 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Tixi(CMakePackage): + """TiXI is a fast and simple XML interface library and could be used + from applications written in C, C++, Fortran, JAVA and Python. """ + + homepage = "https://github.com/DLR-SC/tixi" + url = "https://github.com/DLR-SC/tixi/archive/v3.0.3.tar.gz" + git = "https://github.com/DLR-SC/tixi.git" + + version('3.0.3', sha256='3584e0cec6ab811d74fb311a9af0663736b1d7f11b81015fcb378efaf5ad3589') + version('2.2.4', sha256='9080d2a617b7c411b9b4086de23998ce86e261b88075f38c73d3ce25da94b21c') + + depends_on('python', type='build') + depends_on('expat') + depends_on('curl') + depends_on('libxml2') + depends_on('libxslt') diff --git a/var/spack/repos/builtin/packages/triangle/package.py b/var/spack/repos/builtin/packages/triangle/package.py index 882b8b472b3..f8703f33b31 100644 --- a/var/spack/repos/builtin/packages/triangle/package.py +++ b/var/spack/repos/builtin/packages/triangle/package.py @@ -18,6 +18,8 @@ class Triangle(Package): version('1.6', sha256='1766327add038495fa3499e9b7cc642179229750f7201b94f8e1b7bee76f8480') + depends_on('libx11', type='link') + def install(self, spec, prefix): make() mkdirp(prefix.bin) diff --git a/var/spack/repos/builtin/packages/trilinos/package.py b/var/spack/repos/builtin/packages/trilinos/package.py index 66e281f28b3..fd623fe8db3 100644 --- a/var/spack/repos/builtin/packages/trilinos/package.py +++ b/var/spack/repos/builtin/packages/trilinos/package.py @@ -86,18 +86,24 @@ class Trilinos(CMakePackage): description='Compile with Boost') variant('cgns', default=False, description='Enable CGNS') - variant('adios2', default=False, + variant('adios2', default=False, description='Enable ADIOS2') + variant('glm', default=True, + description='Compile with GLM') variant('gtest', default=True, description='Compile with Gtest') variant('hdf5', default=True, description='Compile with HDF5') variant('hypre', default=True, description='Compile with Hypre preconditioner') + variant('matio', default=True, + description='Compile with Matio') variant('metis', default=True, description='Compile with METIS and ParMETIS') variant('mumps', default=True, description='Compile with support for MUMPS solvers') + variant('netcdf', default=True, + description='Compile with netcdf') variant('pnetcdf', default=False, description='Compile with parallel-netcdf') variant('suite-sparse', default=True, @@ -312,6 +318,8 @@ class Trilinos(CMakePackage): # ADIOS2 was only added after v12.14.1 conflicts('+adios2', when='@:12.14.1') conflicts('+adios2', when='@xsdk-0.2.0') + conflicts('+pnetcdf', when='~netcdf') + # ###################### Dependencies ########################## # Everything should be compiled position independent (-fpic) @@ -319,17 +327,17 @@ class Trilinos(CMakePackage): depends_on('lapack') depends_on('boost', when='+boost') depends_on('boost', when='+dtk') - depends_on('matio') - depends_on('glm') + depends_on('matio', when='+matio') + depends_on('glm', when='+glm') depends_on('metis@5:', when='+metis') depends_on('suite-sparse', when='+suite-sparse') depends_on('zlib', when="+zlib") # MPI related dependencies depends_on('mpi') - depends_on('netcdf-c+mpi', when="~pnetcdf") - depends_on('netcdf-c+mpi+parallel-netcdf', when="+pnetcdf@master,12.12.1:") - depends_on('parallel-netcdf', when="+pnetcdf@master,12.12.1:") + depends_on('netcdf-c+mpi', when="+netcdf~pnetcdf") + depends_on('netcdf-c+mpi+parallel-netcdf', when="+netcdf+pnetcdf@master,12.12.1:") + depends_on('parallel-netcdf', when="+netcdf+pnetcdf@master,12.12.1:") depends_on('parmetis', when='+metis') depends_on('cgns', when='+cgns') depends_on('adios2', when='+adios2') @@ -537,14 +545,23 @@ def cmake_args(self): '-DTPL_ENABLE_LAPACK=ON', '-DLAPACK_LIBRARY_NAMES=%s' % ';'.join(lapack.names), '-DLAPACK_LIBRARY_DIRS=%s' % ';'.join(lapack.directories), - '-DTPL_ENABLE_Netcdf:BOOL=ON', - '-DNetCDF_ROOT:PATH=%s' % spec['netcdf-c'].prefix, + '-DTPL_ENABLE_GLM:BOOL=%s' % ('ON' if '+glm' in spec else 'OFF'), + '-DTPL_ENABLE_Matio:BOOL=%s' % ( + 'ON' if '+matio' in spec else 'OFF'), '-DTPL_ENABLE_X11:BOOL=%s' % ( 'ON' if '+x11' in spec else 'OFF'), '-DTrilinos_ENABLE_Gtest:BOOL=%s' % ( 'ON' if '+gtest' in spec else 'OFF'), ]) + if '+netcdf' in spec: + options.extend([ + '-DTPL_ENABLE_Netcdf:BOOL=ON', + '-DNetCDF_ROOT:PATH=%s' % spec['netcdf-c'].prefix + ]) + else: + options.extend(['-DTPL_ENABLE_Netcdf:BOOL=OFF']) + if '+hypre' in spec: options.extend([ '-DTPL_ENABLE_HYPRE:BOOL=ON', diff --git a/var/spack/repos/builtin/packages/unixodbc/package.py b/var/spack/repos/builtin/packages/unixodbc/package.py index 2802f87aa5e..b93e7eb166d 100644 --- a/var/spack/repos/builtin/packages/unixodbc/package.py +++ b/var/spack/repos/builtin/packages/unixodbc/package.py @@ -16,5 +16,5 @@ class Unixodbc(AutotoolsPackage): version('2.3.4', sha256='2e1509a96bb18d248bf08ead0d74804957304ff7c6f8b2e5965309c632421e39') - depends_on('libiconv') + depends_on('iconv') depends_on('libtool') diff --git a/var/spack/repos/builtin/packages/unqlite/0001-Removed-the-STATIC-key-word-to-enable-building-a-sha.patch b/var/spack/repos/builtin/packages/unqlite/0001-Removed-the-STATIC-key-word-to-enable-building-a-sha.patch new file mode 100644 index 00000000000..70602845e27 --- /dev/null +++ b/var/spack/repos/builtin/packages/unqlite/0001-Removed-the-STATIC-key-word-to-enable-building-a-sha.patch @@ -0,0 +1,26 @@ +From 7c14b18c4967c04344ceba2da90467cd27ee5678 Mon Sep 17 00:00:00 2001 +From: Matthieu Dorier +Date: Thu, 2 Apr 2020 12:43:19 +0100 +Subject: [PATCH] Removed the STATIC key word to enable building a shared + library + +--- + CMakeLists.txt | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) + +diff --git a/CMakeLists.txt b/CMakeLists.txt +index f2bb3cd..efb63a7 100644 +--- a/CMakeLists.txt ++++ b/CMakeLists.txt +@@ -18,7 +18,7 @@ SET(SOURCES_UNQLITE + ) + + SET(UNQLITE_STATIC_LIB unqlite) +-ADD_LIBRARY(${UNQLITE_STATIC_LIB} STATIC ${HEADERS_UNQLITE} ${SOURCES_UNQLITE}) ++ADD_LIBRARY(${UNQLITE_STATIC_LIB} ${HEADERS_UNQLITE} ${SOURCES_UNQLITE}) + + INSTALL(TARGETS ${UNQLITE_STATIC_LIB} COMPONENT devel ARCHIVE DESTINATION lib) + INSTALL(FILES ${HEADERS_UNQLITE} COMPONENT devel DESTINATION include) +-- +2.20.1 + diff --git a/var/spack/repos/builtin/packages/unqlite/package.py b/var/spack/repos/builtin/packages/unqlite/package.py new file mode 100644 index 00000000000..ff746cc75a7 --- /dev/null +++ b/var/spack/repos/builtin/packages/unqlite/package.py @@ -0,0 +1,25 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Unqlite(CMakePackage): + """UnQLite is a in-process software library which implements a self-contained, + serverless, zero-configuration, transactional NoSQL database engine.""" + + homepage = "https://unqlite.org/" + url = "https://github.com/symisc/unqlite/archive/v1.1.9.tar.gz" + git = 'https://github.com/symisc/unqlite.git' + + version('master', branch='master') + version('1.1.9', sha256='33d5b5e7b2ca223942e77d31112d2e20512bc507808414451c8a98a7be5e15c0') + + # This patch corresponds to https://github.com/symisc/unqlite/pull/99 + patch('0001-Removed-the-STATIC-key-word-to-enable-building-a-sha.patch', when='@1.1.9') + + def cmake_args(self): + args = ["-DBUILD_SHARED_LIBS:BOOL=ON"] + return args diff --git a/var/spack/repos/builtin/packages/upcxx/package.py b/var/spack/repos/builtin/packages/upcxx/package.py index e4806c4c9c5..d5c74bebb97 100644 --- a/var/spack/repos/builtin/packages/upcxx/package.py +++ b/var/spack/repos/builtin/packages/upcxx/package.py @@ -10,7 +10,7 @@ def cross_detect(): if spack.architecture.platform().name == 'cray': if which('srun'): return 'cray-aries-slurm' - if which('alps'): + if which('aprun'): return 'cray-aries-alps' return 'none' @@ -25,9 +25,17 @@ class Upcxx(Package): homepage = "https://upcxx.lbl.gov" maintainers = ['bonachea'] + git = 'https://bonachea@bitbucket.org/berkeleylab/upcxx.git' + version('develop', branch='develop') + version('master', branch='master') + + version('2020.3.0', sha256='01be35bef4c0cfd24e9b3d50c88866521b9cac3ad4cbb5b1fc97aea55078810f') version('2019.9.0', sha256='7d67ccbeeefb59de9f403acc719f52127a30801a2c2b9774a1df03f850f8f1d4') version('2019.3.2', sha256='dcb0b337c05a0feb2ed5386f5da6c60342412b49cab10f282f461e74411018ad') + variant('mpi', default=False, + description='Enables MPI-based spawners and mpi-conduit') + variant('cuda', default=False, description='Builds a CUDA-enabled version of UPC++') @@ -35,10 +43,19 @@ class Upcxx(Package): description="UPC++ cross-compile target (autodetect by default)") conflicts('cross=none', when='platform=cray', - msg='None is unacceptable on Cray.') + msg='cross=none is unacceptable on Cray.' + + 'Please specify an appropriate "cross" value') + depends_on('mpi', when='+mpi') depends_on('cuda', when='+cuda') - depends_on('python@2.7.5:2.999', type=("build", "run")) + # Require Python2 2.7.5+ up to v2019.9.0 + depends_on('python@2.7.5:2.999', + type=("build", "run"), when='@:2019.9.0') + # v2020.3.0 and later also permit Python3 + depends_on('python@2.7.5:', type=("build", "run"), when='@2020.3.0:') + + # All flags should be passed to the build-env in autoconf-like vars + flag_handler = env_flags def url_for_version(self, version): if version > Version('2019.3.2'): @@ -48,8 +65,14 @@ def url_for_version(self, version): return url.format(version) def setup_build_environment(self, env): - if 'platform=cray' in self.spec: - env.set('GASNET_CONFIGURE_ARGS', '--enable-mpi=probe') + # ensure we use the correct python + env.set('UPCXX_PYTHON', self.spec['python'].command.path) + + if '+mpi' in self.spec: + env.set('GASNET_CONFIGURE_ARGS', + '--enable-mpi --enable-mpi-compat') + else: + env.set('GASNET_CONFIGURE_ARGS', '--without-mpicc') if 'cross=none' not in self.spec: env.set('CROSS', self.spec.variants['cross'].value) @@ -59,6 +82,9 @@ def setup_build_environment(self, env): env.set('UPCXX_CUDA_NVCC', self.spec['cuda'].prefix.bin.nvcc) def setup_run_environment(self, env): + # ensure we use the correct python + env.set('UPCXX_PYTHON', self.spec['python'].command.path) + env.set('UPCXX_INSTALL', self.prefix) env.set('UPCXX', self.prefix.bin.upcxx) if 'platform=cray' in self.spec: @@ -76,7 +102,72 @@ def setup_dependent_build_environment(self, env, dependent_spec): env.set('UPCXX_NETWORK', 'aries') def install(self, spec, prefix): - env['CC'] = self.compiler.cc - env['CXX'] = self.compiler.cxx - installsh = Executable("./install") - installsh(prefix) + # UPC++ follows autoconf naming convention for LDLIBS, which is 'LIBS' + if (env.get('LDLIBS')): + env['LIBS'] = env['LDLIBS'] + + if spec.version <= Version('2019.9.0'): + env['CC'] = self.compiler.cc + if '+mpi' in self.spec: + if 'platform=cray' in self.spec: + env['GASNET_CONFIGURE_ARGS'] += \ + " --with-mpicc=" + self.compiler.cc + else: + env['CXX'] = spec['mpi'].mpicxx + else: + env['CXX'] = self.compiler.cxx + installsh = Executable("./install") + installsh(prefix) + else: + if 'platform=cray' in self.spec: + # Spack loads the cray-libsci module incorrectly on ALCF theta, + # breaking the Cray compiler wrappers + # cray-libsci is irrelevant to our build, so disable it + for var in ['PE_PKGCONFIG_PRODUCTS', 'PE_PKGCONFIG_LIBS']: + env[var] = ":".join( + filter(lambda x: "libsci" not in x.lower(), + env[var].split(":"))) + # Undo spack compiler wrappers: + # the C/C++ compilers must work post-install + # hack above no longer works after the fix to UPC++ issue #287 + real_cc = join_path(env['CRAYPE_DIR'], 'bin', 'cc') + real_cxx = join_path(env['CRAYPE_DIR'], 'bin', 'CC') + # workaround a bug in the UPC++ installer: (issue #346) + env['GASNET_CONFIGURE_ARGS'] += \ + " --with-cc=" + real_cc + " --with-cxx=" + real_cxx + if '+mpi' in self.spec: + env['GASNET_CONFIGURE_ARGS'] += " --with-mpicc=" + real_cc + else: + real_cc = self.compiler.cc + real_cxx = self.compiler.cxx + if '+mpi' in self.spec: + real_cxx = spec['mpi'].mpicxx + + env['CC'] = real_cc + env['CXX'] = real_cxx + + installsh = Executable("./configure") + installsh('--prefix=' + prefix) + + make() + + make('install') + + @run_after('install') + @on_package_attributes(run_tests=True) + def test_install(self): + if self.spec.version <= Version('2019.9.0'): + spack.main.send_warning_to_tty( + "run_tests not supported in UPC++ version " + + self.spec.version.string + " -- SKIPPED") + else: + # enable testing of unofficial conduits (mpi) + test_networks = 'NETWORKS=$(CONDUITS)' + # build hello world against installed tree in all configurations + make('test_install', test_networks) + make('tests-clean') # cleanup + # build all tests for all networks in debug mode + make('tests', test_networks) + if 'cross=none' in self.spec: + make('run-tests', 'NETWORKS=smp') # runs tests for smp backend + make('tests-clean') # cleanup diff --git a/var/spack/repos/builtin/packages/userspace-rcu/examples.patch b/var/spack/repos/builtin/packages/userspace-rcu/examples.patch new file mode 100644 index 00000000000..ef65ab4b79a --- /dev/null +++ b/var/spack/repos/builtin/packages/userspace-rcu/examples.patch @@ -0,0 +1,13 @@ +diff --git a/doc/examples/Makefile.am b/doc/examples/Makefile.am +index edf00eb..cd491df 100644 +--- a/doc/examples/Makefile.am ++++ b/doc/examples/Makefile.am +@@ -141,7 +141,7 @@ all-local: + else \ + rel_build_subdir="../"; \ + fi; \ +- $(MAKE) -f dist-files/Makefile CC="$(CC)" CPPFLAGS="$(CPPFLAGS)" AM_CPPFLAGS="$(AM_CPPFLAGS) -I"$${rel_src_subdir}/$(top_srcdir)/include/" -I"$${rel_src_subdir}/$(top_srcdir)/src/" -I"$${rel_build_subdir}$(top_builddir)/include/" -I"$${rel_build_subdir}$(top_builddir)/include/src/"" CFLAGS="$(CFLAGS)" AM_CFLAGS="$(AM_CFLAGS)" LDFLAGS="$(LDFLAGS)" AM_LDFLAGS="$(AM_LDFLAGS) -L../../../src/.libs/ -Wl,-rpath "$(PWD)/../../src/.libs/"" AM_V_P="$(AM_V_P)" AM_V_at="$(AM_V_at)" $(AM_MAKEFLAGS) all; ++ $(MAKE) -f dist-files/Makefile CC="$(CC)" CPPFLAGS="$(CPPFLAGS)" AM_CPPFLAGS="$(AM_CPPFLAGS) -I"$${rel_src_subdir}/$(top_srcdir)/include/" -I"$${rel_src_subdir}/$(top_srcdir)/src/" -I"$${rel_build_subdir}$(top_builddir)/include/" -I"$${rel_build_subdir}$(top_builddir)/include/src/"" CFLAGS="$(CFLAGS)" AM_CFLAGS="$(AM_CFLAGS)" LDFLAGS="$(LDFLAGS)" AM_LDFLAGS="$(AM_LDFLAGS) -L../../../src/.libs/ -Wl,-rpath="$(PWD)/../../src/.libs/"" AM_V_P="$(AM_V_P)" AM_V_at="$(AM_V_at)" $(AM_MAKEFLAGS) all; + + clean-local: + $(AM_V_at)$(MAKE) -f dist-files/Makefile $(AM_MAKEFLAGS) clean; \ diff --git a/var/spack/repos/builtin/packages/userspace-rcu/package.py b/var/spack/repos/builtin/packages/userspace-rcu/package.py new file mode 100644 index 00000000000..be03459d343 --- /dev/null +++ b/var/spack/repos/builtin/packages/userspace-rcu/package.py @@ -0,0 +1,31 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class UserspaceRcu(AutotoolsPackage): + """liburcu is a LGPLv2.1 userspace RCU (read-copy-update) library. This + data synchronization library provides read-side access which scales + linearly with the number of cores.""" + + homepage = "http://liburcu.org/" + url = "https://github.com/urcu/userspace-rcu/archive/v0.11.1.tar.gz" + + version('0.11.1', sha256='a0ed8995edfbeac5f5eb2f152a8f3654040ecfc99a746bfe3da3bccf435b7d5d') + version('0.11.0', sha256='7834e4692565b491b9d2d258095d6c05089c9bae8a1bef280c338d15ba02e9ac') + version('0.10.2', sha256='e117c416fced894e24720cc1b38247074a13020f19d6704b38e554cbcb993d06') + version('0.9.6', sha256='4d9e4ca40c079e0b0e9f912a9092589b97fbaf80eb6537e9ae70d48c09472efa') + + depends_on('m4', type='build') + depends_on('autoconf', type='build') + depends_on('automake', type='build') + depends_on('libtool', type='build') + + patch('examples.patch', sha256='49aa8fa99d3a1315c639d2a90014079c34a7d0a6dde110b6cbb7b02f87324742') + + def autoreconf(self, spec, prefix): + bash = which('bash') + bash('./bootstrap') diff --git a/var/spack/repos/builtin/packages/util-linux/package.py b/var/spack/repos/builtin/packages/util-linux/package.py index 49afb16f545..ccd6bba9aa3 100644 --- a/var/spack/repos/builtin/packages/util-linux/package.py +++ b/var/spack/repos/builtin/packages/util-linux/package.py @@ -9,29 +9,41 @@ class UtilLinux(AutotoolsPackage): """Util-linux is a suite of essential utilities for any Linux system.""" - homepage = "http://freecode.com/projects/util-linux" + homepage = "https://github.com/karelzak/util-linux" url = "https://www.kernel.org/pub/linux/utils/util-linux/v2.29/util-linux-2.29.2.tar.gz" list_url = "https://www.kernel.org/pub/linux/utils/util-linux" list_depth = 1 + version('2.35.1', sha256='37ac05d82c6410d89bc05d43cee101fefc8fe6cf6090b3ce7a1409a6f35db606') + version('2.35', sha256='98acab129a8490265052e6c1e033ca96d68758a13bb7fcd232c06bf16cc96238') + version('2.34', sha256='b62c92e5e1629642113cd41cec1ee86d1ee7e36b8ffe8ec3ac89c11797e9ac25') + version('2.33', sha256='952fb0d3498e81bd67b3c48e283c80cb12c719bc2357ec5801e7d420991ad319') version('2.29.2', sha256='29ccdf91d2c3245dc705f0ad3bf729ac41d8adcdbeff914e797c552ecb04a4c7') version('2.29.1', sha256='a6a7adba65a368e6dad9582d9fbedee43126d990df51266eaee089a73c893653') version('2.25', sha256='7e43273a9e2ab99b5a54ac914fddf5d08ba7ab9b114c550e9f03474672bd23a1') depends_on('python@2.7:') depends_on('pkgconfig') + depends_on('gettext', when='+libmount') # Make it possible to disable util-linux's libuuid so that you may # reliably depend_on(`libuuid`). variant('libuuid', default=True, description='Build libuuid') + variant('libmount', default=False, description='Build libmount.so with gettext') def url_for_version(self, version): url = "https://www.kernel.org/pub/linux/utils/util-linux/v{0}/util-linux-{1}.tar.gz" return url.format(version.up_to(2), version) + def setup_build_environment(self, env): + if '+libmount' in self.spec: + env.append_flags('LDFLAGS', '-L{0} -lintl'.format( + self.spec['gettext'].prefix.lib)) + def configure_args(self): config_args = [ '--disable-use-tty-group', + '--disable-makeinstall-chown', ] config_args.extend(self.enable_or_disable('libuuid')) return config_args diff --git a/var/spack/repos/builtin/packages/valgrind/package.py b/var/spack/repos/builtin/packages/valgrind/package.py index 684a5bdfa8f..3f759efeac9 100644 --- a/var/spack/repos/builtin/packages/valgrind/package.py +++ b/var/spack/repos/builtin/packages/valgrind/package.py @@ -22,6 +22,9 @@ class Valgrind(AutotoolsPackage): url = "https://sourceware.org/pub/valgrind/valgrind-3.13.0.tar.bz2" git = "git://sourceware.org/git/valgrind.git" + # The server is sometimes a bit slow to respond + fetch_options = {'timeout': 60} + version('develop', branch='master') version('3.15.0', sha256='417c7a9da8f60dd05698b3a7bc6002e4ef996f14c13f0ff96679a16873e78ab1') version('3.14.0', sha256='037c11bfefd477cc6e9ebe8f193bb237fe397f7ce791b4a4ce3fa1c6a520baa5') diff --git a/var/spack/repos/builtin/packages/veccore/package.py b/var/spack/repos/builtin/packages/veccore/package.py new file mode 100644 index 00000000000..a339358a084 --- /dev/null +++ b/var/spack/repos/builtin/packages/veccore/package.py @@ -0,0 +1,40 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Veccore(CMakePackage, CudaPackage): + """SIMD Vectorization Library for VecGeom and GeantV""" + + homepage = "https://gitlab.cern.ch/VecGeom/VecCore" + url = "https://gitlab.cern.ch/VecGeom/VecCore/-/archive/v0.6.0/VecCore-v0.6.0.tar.gz" + git = "https://gitlab.cern.ch/VecGeom/VecCore.git" + + maintainers = ['drbenmorgan', 'sethrj'] + + version('master', branch='master') + version('0.6.0', sha256='e7ff874ba2a8201624795cbe11c84634863e4ac7da691a936772d4202ef54413') + version('0.5.2', sha256='0cfaa830b9d10fb9df4ced5208a742623da08520fea5949461fe81637a27db15') + version('0.5.1', sha256='5ef3a8d8692d8f82641aae76b58405b8b3a1539a8f21b23d66a5df8327eeafc4') + version('0.5.0', sha256='aba3e0217c0cd829290c9fe63f1db865838aa25312ae0a09effdcb186f7771be') + version('0.4.2', sha256='4a3bb944bce63dc1dc9757ba53624b822e1aff5ed088d542039a20227ed2b715') + + variant('cxxstd', + default='11', + values=('11', '14', '17'), + multi=False, + description='Use the specified C++ standard when building.') + conflicts('cxxstd=14', when='@:0.5') + conflicts('cxxstd=17', when='@:0.5') + + def cmake_args(self): + define = CMakePackage.define + return [ + define('VC', False), + define('UMESIMD', False), + self.define_from_variant('CMAKE_CXX_STANDARD', 'cxxstd'), + self.define_from_variant('CUDA'), + ] diff --git a/var/spack/repos/builtin/packages/vecgeom/package.py b/var/spack/repos/builtin/packages/vecgeom/package.py index ed6e516c8aa..20352f77123 100644 --- a/var/spack/repos/builtin/packages/vecgeom/package.py +++ b/var/spack/repos/builtin/packages/vecgeom/package.py @@ -5,49 +5,97 @@ from spack import * -import platform -class Vecgeom(CMakePackage): +class Vecgeom(CMakePackage, CudaPackage): """The vectorized geometry library for particle-detector simulation (toolkits).""" homepage = "https://gitlab.cern.ch/VecGeom/VecGeom" - url = "https://gitlab.cern.ch/api/v4/projects/VecGeom%2FVecGeom/repository/archive.tar.gz?sha=v0.3.rc" + url = "https://gitlab.cern.ch/VecGeom/VecGeom/-/archive/v1.1.6/VecGeom-v1.1.6.tar.gz" + git = "https://gitlab.cern.ch/VecGeom/VecGeom.git" - version('01.01.03', git='https://gitlab.cern.ch/VecGeom/VecGeom.git', tag='v01.01.03', preferred=True) - version('01.00.00', git='https://gitlab.cern.ch/VecGeom/VecGeom.git', tag='v01.00.00') - version('00.05.00', git='https://gitlab.cern.ch/VecGeom/VecGeom.git', tag='v00.05.00') + maintainers = ['drbenmorgan', 'sethrj'] + + version('master', branch='master') + version('1.1.6', sha256='c4806a6b67d01b40074b8cc6865d78574a6a1c573be51696f2ecdf98b9cb954a') + version('1.1.5', sha256='da674f3bbc75c30f56c1a2d251fa8930c899f27fa64b03a36569924030d87b95') + version('1.1.3', sha256='ada09e8b6b2fa6c058290302b2cb5a6c2e644192aab1623c31d18c6a2f4c01c8') + version('1.1.0', sha256='e9d1ef83ff591fe4f9ef744a4d3155a3dc7e90ddb6735b24f3afe4c2dc3f7064') + version('1.0.1', sha256='1eae7ac9014c608e8d8db5568058b8c0fea1a1dc7a8f54157a3a1c997b6fd9eb') + version('0.5.2', tag='v00.05.02', + commit='a7e0828c915ff936a79e672d1dd84b087a323b51') version('0.3.rc', sha256='a87a9ea4ab126b59ff9c79182bc0911ead3d76dd197194742e2a35ccd341299d') - variant('cxxstd', - default='17', - values=('11', '14', '17'), - multi=False, - description='Use the specified C++ standard when building.') - variant('vector', - default='native', - values=('sse3', 'sse4.2', 'native'), - multi=False, - description='Specify the instruction set for vectorization.') + _cxxstd_values = ('11', '14', '17') + variant('cxxstd', default='11', values=_cxxstd_values, multi=False, + description='Use the specified C++ standard when building') + variant('gdml', default=True, + description='Support native GDML geometry descriptions') + variant('geant4', default=False, + description='Support Geant4 geometry construction') + variant('root', default=False, + description='Support ROOT geometry construction') + variant('shared', default=True, + description='Build shared libraries') - depends_on('cmake@3.5:', type='build') + depends_on('veccore@0.5.2:', type=('build', 'link'), when='@1.1.0:') + depends_on('veccore@0.4.2', type=('build', 'link'), when='@:1.0') + depends_on('veccore+cuda', type=('build', 'link'), when='+cuda') + + conflicts('+cuda', when='@:1.1.5') + + for std in _cxxstd_values: + depends_on('geant4 cxxstd=' + std, when='+geant4 cxxstd=' + std) + depends_on('root cxxstd=' + std, when='+root cxxstd=' + std) + depends_on('veccore cxxstd=' + std, when='cxxstd=' + std) + depends_on('xerces-c cxxstd=' + std, when='+gdml cxxstd=' + std) def cmake_args(self): - options = [ - '-DBACKEND=Scalar', - '-DGEANT4=OFF', - '-DUSOLIDS=ON', - '-DUSOLIDS_VECGEOM=ON', - '-DROOT=OFF', - '-DNO_SPECIALIZATION=ON', - '-DCMAKE_VERBOSE_MAKEFILE=TRUE'] - options.append('-DCMAKE_CXX_STANDARD={0}'. - format(self.spec.variants['cxxstd'].value)) - arch = platform.machine() - if arch == 'x86_64': - options.append('-DVECGEOM_VECTOR={0}'. - format(self.spec.variants['vector'].value)) + # Possible target options are from the main CMakeLists.txt, assuming + # "best" is last + target = self.spec.target + vecgeom_arch = "sse2 sse3 ssse3 sse4.1 sse4.2 avx avx2".split() + for feature in reversed(vecgeom_arch): + if feature.replace('.', '_') in target: + target_instructions = feature + break else: - options.append('-DVECGEOM_VECTOR=' + arch) + # No features available (could be 'generic' arch) + target_instructions = 'empty' + + define = CMakePackage.define + options = [ + define('BACKEND', 'Scalar'), + define('BUILTIN_VECCORE', False), + define('NO_SPECIALIZATION', True), + define('VECGEOM_VECTOR', target_instructions), + self.define_from_variant('BUILD_SHARED_LIBS', 'shared'), + self.define_from_variant('CMAKE_CXX_STANDARD', 'cxxstd'), + self.define_from_variant('CUDA'), + self.define_from_variant('GDML'), + self.define_from_variant('GEANT4'), + self.define_from_variant('ROOT'), + ] + + # Set testing flags + build_tests = self.run_tests + options.extend([ + define('BUILD_TESTING', build_tests), + define('CTEST', build_tests), + define('GDMLTESTING', build_tests and '+gdml' in self.spec), + ]) + + if '+cuda' in self.spec: + arch = self.spec.variants['cuda_arch'].value + if len(arch) != 1 or arch[0] == 'none': + raise InstallError("Exactly one cuda_arch must be specified") + options.append(define('CUDA_ARCH', arch[0])) + + if self.spec.satisfies("@:0.5.2"): + options.extend([ + define('USOLIDS', True), + define('USOLIDS_VECGEOM', True), + ]) + return options diff --git a/var/spack/repos/builtin/packages/verrou/package.py b/var/spack/repos/builtin/packages/verrou/package.py index 265ee2b0963..f92443d1441 100644 --- a/var/spack/repos/builtin/packages/verrou/package.py +++ b/var/spack/repos/builtin/packages/verrou/package.py @@ -32,18 +32,24 @@ class Verrou(AutotoolsPackage): version('2.0.0', sha256='798df6e426ec57646a2a626d756b72f0171647ae5b07c982952dae2d71e26045') version('1.1.0', sha256='b5105f61c65680f31551199cd143b2e15f412c34c821537998a7165e315dde2d') + # The server is sometimes a bit slow to respond + timeout = {'timeout': 60} + resource(name='valgrind-3.15.0', url='https://sourceware.org/pub/valgrind/valgrind-3.15.0.tar.bz2', sha256='417c7a9da8f60dd05698b3a7bc6002e4ef996f14c13f0ff96679a16873e78ab1', - when='@2.2.0:') + when='@2.2.0:', + fetch_options=timeout) resource(name='valgrind-3.14.0', url='https://sourceware.org/pub/valgrind/valgrind-3.14.0.tar.bz2', sha256='037c11bfefd477cc6e9ebe8f193bb237fe397f7ce791b4a4ce3fa1c6a520baa5', - when='@2.1.0:2.1.99') + when='@2.1.0:2.1.99', + fetch_options=timeout) resource(name='valgrind-3.13.0', url='https://sourceware.org/pub/valgrind/valgrind-3.13.0.tar.bz2', sha256='d76680ef03f00cd5e970bbdcd4e57fb1f6df7d2e2c071635ef2be74790190c3b', - when='@1.1.0:2.0.99') + when='@1.1.0:2.0.99', + fetch_options=timeout) variant('fma', default=True, description='Activates fused multiply-add support for Verrou') diff --git a/var/spack/repos/builtin/packages/warpx/package.py b/var/spack/repos/builtin/packages/warpx/package.py index 7755beba9c5..55cdf3cb13b 100644 --- a/var/spack/repos/builtin/packages/warpx/package.py +++ b/var/spack/repos/builtin/packages/warpx/package.py @@ -22,21 +22,34 @@ class Warpx(MakefilePackage): version('master', tag='master') - depends_on('mpi') - variant('dims', default='3', - values=('1', '2', '3'), + values=('2', '3', 'rz'), multi=False, description='Number of spatial dimensions') - + variant('backend', + default='openmp', + values=('openmp', 'cuda', 'hip'), + multi=True, + description='Programming model for compute kernels') + variant('mpi', default=True, description='Enable MPI support') variant('psatd', default=False, description='Enable PSATD solver') - variant('do_electrostatic', default=False, description='Include electrostatic solver') variant('debug', default=False, description='Enable debugging features') - variant('tprof', default=False, description='Enable tiny profiling features') - variant('openmp', default=True, description='Enable OpenMP features') + variant('tprof', default=True, description='Enable tiny profiling features') + variant('openpmd', default=True, description='Enable openPMD I/O') + variant('ascent', default=False, description='Enable Ascent in situ vis') + depends_on('cuda', when='backend=cuda') + depends_on('mpi', when='+mpi') depends_on('fftw@3:', when='+psatd') + depends_on('fftw +mpi', when='+psatd +mpi') + depends_on('pkgconfig', type='build', when='+openpmd') + depends_on('python', type='build') # AMReX' build system info + depends_on('openpmd-api@0.11.0:,dev', when='+openpmd') + depends_on('openpmd-api +mpi', when='+openpmd +mpi') + depends_on('ascent', when='+ascent') + depends_on('ascent +cuda', when='+ascent backend=cuda') + depends_on('ascent +mpi ^conduit~hdf5', when='+ascent +mpi') resource(name='amrex', git='https://github.com/AMReX-Codes/amrex.git', @@ -47,43 +60,50 @@ class Warpx(MakefilePackage): git='https://bitbucket.org/berkeleylab/picsar.git', tag='master') - @property - def build_targets(self): - if self.spec.satisfies('%clang'): - return ['CXXFLAGS={0}'.format(self.compiler.cxx11_flag)] - else: - return [] + conflicts('backend=cuda', when='backend=hip', + msg='WarpX can be compiled with either CUDA or HIP backend') + conflicts('backend=hip', msg='WarpX\' HIP backend is not yet implemented') def edit(self, spec, prefix): - comp = 'gcc' - vendors = {'%gcc': 'gcc', '%intel': 'intel'} + vendors = {'%gcc': 'gcc', '%intel': 'intel', '%clang': 'llvm'} for key, value in vendors.items(): if self.spec.satisfies(key): comp = value - def torf(s): - "Returns the string TRUE or FALSE" - return repr(s in spec).upper() + # Returns the string TRUE or FALSE + torf = lambda s: repr(s in spec).upper() - makefile = FileFilter('GNUmakefile') - makefile.filter('AMREX_HOME .*', 'AMREX_HOME = amrex') - makefile.filter('PICSAR_HOME .*', 'PICSAR_HOME = picsar') - makefile.filter('COMP .*', 'COMP = {0}'.format(comp)) - makefile.filter('DIM .*', - 'DIM = {0}'.format(int(spec.variants['dims'].value))) - makefile.filter('USE_PSATD .*', - 'USE_PSATD = {0}'.format(torf('+psatd'))) - makefile.filter('DO_ELECTROSTATIC .*', - 'DO_ELECTROSTATIC = %s' % torf('+do_electrostatic')) try: self.compiler.openmp_flag except UnsupportedCompilerFlag: use_omp = 'FALSE' else: - use_omp = torf('+openmp') + use_omp = torf('backend=openmp') + + makefile = FileFilter('GNUmakefile') + makefile.filter('AMREX_HOME .*', 'AMREX_HOME = amrex') + makefile.filter('PICSAR_HOME .*', 'PICSAR_HOME = picsar') + makefile.filter('COMP .*', 'COMP = {0}'.format(comp)) + makefile.filter('USE_MPI .*', + 'USE_MPI = {0}'.format(torf('+mpi'))) + if 'dims=rz' in spec: + makefile.filter('USE_RZ .*', 'USE_RZ = TRUE') + else: + makefile.filter('DIM .*', 'DIM = {0}'.format( + int(spec.variants['dims'].value))) + makefile.filter('USE_PSATD .*', + 'USE_PSATD = {0}'.format(torf('+psatd'))) makefile.filter('USE_OMP .*', 'USE_OMP = {0}'.format(use_omp)) + makefile.filter('USE_GPU .*', + 'USE_GPU = {0}'.format(torf('backend=cuda'))) + makefile.filter('USE_HIP .*', + 'USE_HIP = {0}'.format(torf('backend=hip'))) + makefile.filter('USE_OPENPMD .*', + 'USE_OPENPMD = {0}'.format(torf('+openpmd'))) + makefile.filter('USE_ASCENT_INSITU .*', + 'USE_ASCENT_INSITU = {0}'.format(torf('+ascent'))) makefile.filter('DEBUG .*', 'DEBUG = {0}'.format(torf('+debug'))) makefile.filter('TINY_PROFILE .*', diff --git a/var/spack/repos/builtin/packages/wget/package.py b/var/spack/repos/builtin/packages/wget/package.py index 122b138d106..c9ef5e9dd81 100644 --- a/var/spack/repos/builtin/packages/wget/package.py +++ b/var/spack/repos/builtin/packages/wget/package.py @@ -43,7 +43,7 @@ class Wget(AutotoolsPackage, GNUMirrorPackage): depends_on('perl@5.12.0:', type='build') depends_on('pkgconfig', type='build') - depends_on('libiconv') + depends_on('iconv') depends_on('valgrind', type='test') diff --git a/var/spack/repos/builtin/packages/xerces-c/package.py b/var/spack/repos/builtin/packages/xerces-c/package.py index 05af3d1a5c9..21caf00e83d 100644 --- a/var/spack/repos/builtin/packages/xerces-c/package.py +++ b/var/spack/repos/builtin/packages/xerces-c/package.py @@ -45,7 +45,7 @@ class XercesC(AutotoolsPackage): multi=False, description='Use the specified transcoder') - depends_on('libiconv', type='link', when='transcoder=gnuiconv') + depends_on('iconv', type='link', when='transcoder=gnuiconv') depends_on('icu4c', type='link', when='transcoder=icu') # Pass flags to configure. This is necessary for CXXFLAGS or else @@ -61,7 +61,7 @@ def flag_handler(self, name, flags): # There is no --with-pkg for gnuiconv. if name == 'ldflags' and 'transcoder=gnuiconv' in spec: - flags.append(spec['libiconv'].libs.ld_flags) + flags.append(spec['iconv'].libs.ld_flags) return (None, None, flags) diff --git a/var/spack/repos/builtin/packages/xios/bld_extern_1.x.patch b/var/spack/repos/builtin/packages/xios/bld_extern_1.x.patch deleted file mode 100644 index fe52da11923..00000000000 --- a/var/spack/repos/builtin/packages/xios/bld_extern_1.x.patch +++ /dev/null @@ -1,27 +0,0 @@ ---- a/bld.cfg 2017-12-23 15:21:01.458603785 +1100 -+++ b/bld.cfg 2017-12-23 15:36:36.667684136 +1100 -@@ -24,9 +24,6 @@ - - search_src true - src::zzz . --src::date $PWD/extern/boost/src/date_time --src::blitz $PWD/extern/blitz/src --src::netcdf $PWD/extern/netcdf4 - src::remap $PWD/extern/remap/src - bld::lib xios - bld::target libxios.a -@@ -46,11 +43,11 @@ - bld::tool::fc %FCOMPILER - bld::tool::fpp %FPP - bld::tool::cpp %CPP --bld::tool::cppflags %CBASE_INC -I${PWD}/extern/src_netcdf -I${PWD}/extern/boost/include -I${PWD}/extern/rapidxml/include -I${PWD}/extern/blitz/include --bld::tool::fppflags %BASE_INC -I${PWD}/extern/boost/include -I${PWD}/extern/rapidxml/include -+bld::tool::cppflags %CBASE_INC -I${PWD}/extern/rapidxml/include -+bld::tool::fppflags %BASE_INC -I${PWD}/extern/rapidxml/include - bld::tool::ld %LINKER - bld::tool::ldflags %LD_FLAGS --bld::tool::cflags %CFLAGS %CBASE_INC -I${PWD}/extern/src_netcdf -I${PWD}/extern/boost/include -I${PWD}/extern/rapidxml/include -I${PWD}/extern/blitz/include -+bld::tool::cflags %CFLAGS %CBASE_INC -I${PWD}/extern/rapidxml/include - bld::tool::fflags %FFLAGS %FBASE_INC - bld::tool::cppkeys %CPP_KEY - bld::tool::fppkeys %CPP_KEY diff --git a/var/spack/repos/builtin/packages/xios/package.py b/var/spack/repos/builtin/packages/xios/package.py index 55769026cf1..54eca9fead0 100644 --- a/var/spack/repos/builtin/packages/xios/package.py +++ b/var/spack/repos/builtin/packages/xios/package.py @@ -13,9 +13,13 @@ class Xios(Package): homepage = "https://forge.ipsl.jussieu.fr/ioserver/wiki" + version('develop', svn='http://forge.ipsl.jussieu.fr/ioserver/svn/XIOS/trunk') + version('2.5', revision=1860, + svn='http://forge.ipsl.jussieu.fr/ioserver/svn/XIOS/branchs/xios-2.5') + version('2.0', revision=1627, + svn='http://forge.ipsl.jussieu.fr/ioserver/svn/XIOS/branchs/xios-2.0') version('1.0', revision=910, svn='http://forge.ipsl.jussieu.fr/ioserver/svn/XIOS/branchs/xios-1.0') - version('develop', svn='http://forge.ipsl.jussieu.fr/ioserver/svn/XIOS/trunk') variant('mode', values=('debug', 'dev', 'prod'), default='dev', description='Build for debugging, development or production') @@ -24,7 +28,6 @@ class Xios(Package): # Use spack versions of blitz and netcdf-c for compatibility # with recent compilers and optimised platform libraries: patch('bld_extern_1.0.patch', when='@:1.0') - patch('bld_extern_1.x.patch', when='@1.1:') # Workaround bug #17782 in llvm, where reading a double # followed by a character is broken (e.g. duration '1d'): diff --git a/var/spack/repos/builtin/packages/xrootd/package.py b/var/spack/repos/builtin/packages/xrootd/package.py index 6f0f9268c13..438e5d385b1 100644 --- a/var/spack/repos/builtin/packages/xrootd/package.py +++ b/var/spack/repos/builtin/packages/xrootd/package.py @@ -13,6 +13,7 @@ class Xrootd(CMakePackage): homepage = "http://xrootd.org" url = "http://xrootd.org/download/v4.6.0/xrootd-4.6.0.tar.gz" + version('4.11.2', sha256='4620824db97fcc37dc3dd26110da8e5c3aab1d8302e4921d4f32e83207060603') version('4.10.0', sha256='f07f85e27d72e9e8ff124173c7b53619aed8fcd36f9d6234c33f8f7fd511995b') version('4.8.5', sha256='42e4d2cc6f8b442135f09bcc12c7be38b1a0c623a005cb5e69ff3d27997bdf73') version('4.8.4', sha256='f148d55b16525567c0f893edf9bb2975f7c09f87f0599463e19e1b456a9d95ba') diff --git a/var/spack/repos/builtin/packages/xsbench/package.py b/var/spack/repos/builtin/packages/xsbench/package.py index 3648edc298a..b6040714d42 100644 --- a/var/spack/repos/builtin/packages/xsbench/package.py +++ b/var/spack/repos/builtin/packages/xsbench/package.py @@ -28,7 +28,12 @@ class Xsbench(MakefilePackage): depends_on('mpi', when='+mpi') - build_directory = 'src' + @property + def build_directory(self): + if self.spec.satisfies('@:18'): + return 'src' + else: + return 'openmp-threading' @property def build_targets(self): @@ -50,4 +55,5 @@ def build_targets(self): def install(self, spec, prefix): mkdir(prefix.bin) - install('src/XSBench', prefix.bin) + with working_dir(self.build_directory): + install('XSBench', prefix.bin) diff --git a/var/spack/repos/builtin/packages/xsdk-examples/package.py b/var/spack/repos/builtin/packages/xsdk-examples/package.py new file mode 100644 index 00000000000..418651ec72a --- /dev/null +++ b/var/spack/repos/builtin/packages/xsdk-examples/package.py @@ -0,0 +1,39 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack import * + + +class XsdkExamples(CMakePackage): + """xSDK Examples show usage of libraries in the xSDK package.""" + + homepage = 'http://xsdk.info' + url = 'https://github.com/xsdk-project/xsdk-examples/archive/v0.1.0.tar.gz' + + maintainers = ['acfisher', 'balay', 'balos1', 'luszczek'] + + version('0.1.0', sha256='d24cab1db7c0872b6474d69e598df9c8e25d254d09c425fb0a6a8d6469b8018f') + + depends_on('xsdk@0.5.0', when='@0.1.0') + + def cmake_args(self): + spec = self.spec + args = [ + '-DCMAKE_C_COMPILER=%s' % spec['mpi'].mpicc, + '-DMPI_DIR=%s' % spec['mpi'].prefix, + '-DSUNDIALS_DIR=%s' % spec['sundials'].prefix, + '-DPETSC_DIR=%s' % spec['petsc'].prefix, + '-DPETSC_INCLUDE_DIR=%s' % spec['petsc'].prefix.include, + '-DPETSC_LIBRARY_DIR=%s' % spec['petsc'].prefix.lib, + '-DSUPERLUDIST_INCLUDE_DIR=%s' % + spec['superlu-dist'].prefix.include, + '-DSUPERLUDIST_LIBRARY_DIR=%s' % spec['superlu-dist'].prefix.lib, + ] + if 'trilinos' in spec: + args.extend([ + '-DTRILINOS_DIR:PATH=%s' % spec['trilinos'].prefix, + ]) + return args diff --git a/var/spack/repos/builtin/packages/xsetpointer/package.py b/var/spack/repos/builtin/packages/xsetpointer/package.py index 57775a6a192..cfd89eb061e 100644 --- a/var/spack/repos/builtin/packages/xsetpointer/package.py +++ b/var/spack/repos/builtin/packages/xsetpointer/package.py @@ -14,9 +14,9 @@ class Xsetpointer(AutotoolsPackage): version('1.0.1', sha256='54be93b20fd6f1deac67246d6e214a60b02dcfbf05295e43751f7a04edb986ac') - depends_on('libxi') - depends_on('libx11') + depends_on('libxi', type='link') + depends_on('libx11', type='link') + depends_on('inputproto@1.4:', type='link') - depends_on('inputproto@1.4:', type='build') depends_on('pkgconfig', type='build') depends_on('util-macros', type='build') diff --git a/var/spack/repos/builtin/packages/xts/package.py b/var/spack/repos/builtin/packages/xts/package.py index 19149f9ec9f..89dcc96bf76 100644 --- a/var/spack/repos/builtin/packages/xts/package.py +++ b/var/spack/repos/builtin/packages/xts/package.py @@ -15,14 +15,17 @@ class Xts(AutotoolsPackage): version('0.99.1', sha256='d04d987b9a9f8b3921dfe8de8577d0c2a0f21d2c4c3196948fc9805838a352e6') - depends_on('libx11') - depends_on('libxext') - depends_on('libxi') - depends_on('libxtst') - depends_on('libxau') - depends_on('libxt') - depends_on('libxmu') - depends_on('libxaw') + depends_on('libx11', type='link') + depends_on('libxext', type='link') + depends_on('libxi', type='link') + depends_on('libxtst', type='link') + depends_on('libxau', type='link') + depends_on('libxt', type='link') + depends_on('libxmu', type='link') + depends_on('libxaw', type='link') + depends_on('inputproto', type='link') + depends_on('recordproto', type='link') + depends_on('fixesproto', type='link') depends_on('xtrans', type='build') depends_on('bdftopcf', type='build') diff --git a/var/spack/repos/builtin/packages/xz/package.py b/var/spack/repos/builtin/packages/xz/package.py index 428993a816f..cbaaaa3ec6f 100644 --- a/var/spack/repos/builtin/packages/xz/package.py +++ b/var/spack/repos/builtin/packages/xz/package.py @@ -13,9 +13,10 @@ class Xz(AutotoolsPackage): to LZMA Utils.""" homepage = "http://tukaani.org/xz/" - url = "http://tukaani.org/xz/xz-5.2.4.tar.bz2" + url = "http://tukaani.org/xz/xz-5.2.5.tar.bz2" list_url = "http://tukaani.org/xz/old.html" + version('5.2.5', sha256='5117f930900b341493827d63aa910ff5e011e0b994197c3b71c08a20228a42df') version('5.2.4', sha256='3313fd2a95f43d88e44264e6b015e7d03053e681860b0d5d3f9baca79c57b7bf') version('5.2.3', sha256='fd9ca16de1052aac899ad3495ad20dfa906c27b4a5070102a2ec35ca3a4740c1') version('5.2.2', sha256='6ff5f57a4b9167155e35e6da8b529de69270efb2b4cf3fbabf41a4ee793840b5') diff --git a/var/spack/repos/builtin/packages/yaml-cpp/package.py b/var/spack/repos/builtin/packages/yaml-cpp/package.py index 321792bd2ef..3679b571d83 100644 --- a/var/spack/repos/builtin/packages/yaml-cpp/package.py +++ b/var/spack/repos/builtin/packages/yaml-cpp/package.py @@ -23,6 +23,8 @@ class YamlCpp(CMakePackage): variant('shared', default=True, description='Enable build of shared libraries') + variant('static', default=False, + description='Build with static libraries') variant('pic', default=True, description='Build with position independent code') variant('tests', default=False, @@ -65,6 +67,8 @@ def cmake_args(self): options.extend([ '-DBUILD_SHARED_LIBS:BOOL=%s' % ( 'ON' if '+shared' in spec else 'OFF'), + '-DBUILD_STATIC_LIBS=%s' % ( + 'ON' if '+static' in spec else 'OFF'), '-DCMAKE_POSITION_INDEPENDENT_CODE:BOOL=%s' % ( 'ON' if '+pic' in spec else 'OFF'), '-DYAML_CPP_BUILD_TESTS:BOOL=%s' % ( diff --git a/var/spack/repos/builtin/packages/yarn/package.py b/var/spack/repos/builtin/packages/yarn/package.py new file mode 100644 index 00000000000..c9a5299a10b --- /dev/null +++ b/var/spack/repos/builtin/packages/yarn/package.py @@ -0,0 +1,22 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Yarn(Package): + """Fast, reliable, and secure dependency management.""" + + homepage = "https://yarnpkg.com" + url = "https://github.com/yarnpkg/yarn/releases/download/v1.22.4/yarn-v1.22.4.tar.gz" + + version('1.22.4', sha256='bc5316aa110b2f564a71a3d6e235be55b98714660870c5b6b2d2d3f12587fb58') + version('1.22.2', sha256='de4cff575ae7151f8189bf1d747f026695d768d0563e2860df407ab79c70693d') + version('1.22.1', sha256='3af905904932078faa8f485d97c928416b30a86dd09dcd76e746a55c7f533b72') + version('1.22.0', sha256='de8871c4e2822cba80d58c2e72366fb78567ec56e873493c9ca0cca76c60f9a5') + version('1.21.1', sha256='d1d9f4a0f16f5ed484e814afeb98f39b82d4728c6c8beaafb5abc99c02db6674') + + def install(self, spec, prefix): + install_tree('.', prefix) diff --git a/var/spack/repos/builtin/packages/z3/package.py b/var/spack/repos/builtin/packages/z3/package.py index bd02c0c0dbd..71eafb0a3b8 100644 --- a/var/spack/repos/builtin/packages/z3/package.py +++ b/var/spack/repos/builtin/packages/z3/package.py @@ -14,6 +14,7 @@ class Z3(MakefilePackage): homepage = "https://github.com/Z3Prover/z3/wiki" url = "https://github.com/Z3Prover/z3/archive/z3-4.5.0.tar.gz" + version('4.8.7', sha256='8c1c49a1eccf5d8b952dadadba3552b0eac67482b8a29eaad62aa7343a0732c3') version('4.5.0', sha256='aeae1d239c5e06ac183be7dd853775b84698db1265cb2258e5918a28372d4a0c') version('4.4.1', sha256='50967cca12c5c6e1612d0ccf8b6ebf5f99840a783d6cf5216336a2b59c37c0ce') version('4.4.0', sha256='65b72f9eb0af50949e504b47080fb3fc95f11c435633041d9a534473f3142cba') diff --git a/var/spack/repos/builtin/packages/zfs/package.py b/var/spack/repos/builtin/packages/zfs/package.py new file mode 100644 index 00000000000..819dc2d7553 --- /dev/null +++ b/var/spack/repos/builtin/packages/zfs/package.py @@ -0,0 +1,28 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Zfs(AutotoolsPackage): + """OpenZFS is an advanced file system and volume manager which was + originally developed for Solaris and is now maintained by the OpenZFS + community. This repository contains the code for running OpenZFS on + Linux and FreeBSD.""" + + homepage = "https://zfsonlinux.org/" + url = "https://github.com/openzfs/zfs/releases/download/zfs-0.8.3/zfs-0.8.3.tar.gz" + + version('0.8.3', sha256='545a4897ce30c2d2dd9010a0fdb600a0d3d45805e2387093c473efc03aa9d7fd') + version('0.8.2', sha256='47608e257c8ecebb918014ef1da6172c3a45d990885891af18e80f5cc28beab8') + version('0.8.1', sha256='0af79fde44b7b8ecb94d5166ce2e4fff7409c20ed874c2d759db92909e6c2799') + version('0.8.0', sha256='0fd92e87f4b9df9686f18e2ac707c16b2eeaf00f682d41c20ea519f3a0fe4705') + + depends_on('libuuid') + depends_on('libtirpc') + depends_on('util-linux') + + def setup_build_environment(self, env): + env.prepend_path('CPATH', self.spec['util-linux'].prefix.include)