Merge branch 'develop' into features/shared

This commit is contained in:
Carson Woods
2020-07-15 16:24:52 -04:00
211 changed files with 4672 additions and 439 deletions

View File

@@ -3,13 +3,12 @@ name: linux builds
on:
push:
branches:
- master
- develop
- releases/**
pull_request:
branches:
- master
- develop
- releases/**
paths-ignore:
# Don't run if we only modified packages in the built-in repository
- 'var/spack/repos/builtin/**'

View File

@@ -60,3 +60,83 @@ jobs:
uses: codecov/codecov-action@v1
with:
flags: unittests,linux
flake8:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
with:
python-version: 3.8
- name: Install Python packages
run: |
pip install --upgrade pip six setuptools flake8
- name: Setup git configuration
run: |
# Need this for the git tests to succeed.
git --version
git config --global user.email "spack@example.com"
git config --global user.name "Test User"
git fetch -u origin develop:develop
- name: Run flake8 tests
run: |
share/spack/qa/run-flake8-tests
shell:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
with:
python-version: 3.8
- name: Install System packages
run: |
sudo apt-get -y update
sudo apt-get install -y coreutils gfortran gnupg2 mercurial ninja-build patchelf zsh fish
# Needed for kcov
sudo apt-get -y install cmake binutils-dev libcurl4-openssl-dev zlib1g-dev libdw-dev libiberty-dev
- name: Install Python packages
run: |
pip install --upgrade pip six setuptools codecov coverage
- name: Setup git configuration
run: |
# Need this for the git tests to succeed.
git --version
git config --global user.email "spack@example.com"
git config --global user.name "Test User"
git fetch -u origin develop:develop
- name: Install kcov for bash script coverage
env:
KCOV_VERSION: 38
run: |
KCOV_ROOT=$(mktemp -d)
wget --output-document=${KCOV_ROOT}/${KCOV_VERSION}.tar.gz https://github.com/SimonKagstrom/kcov/archive/v${KCOV_VERSION}.tar.gz
tar -C ${KCOV_ROOT} -xzvf ${KCOV_ROOT}/${KCOV_VERSION}.tar.gz
mkdir -p ${KCOV_ROOT}/build
cd ${KCOV_ROOT}/build && cmake -Wno-dev ${KCOV_ROOT}/kcov-${KCOV_VERSION} && cd -
make -C ${KCOV_ROOT}/build && sudo make -C ${KCOV_ROOT}/build install
- name: Run shell tests
env:
COVERAGE: true
run: |
share/spack/qa/run-shell-tests
- name: Upload to codecov.io
uses: codecov/codecov-action@v1
with:
flags: shelltests,linux
documentation:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
with:
python-version: 3.8
- name: Install System packages
run: |
sudo apt-get -y update
sudo apt-get install -y coreutils ninja-build graphviz
- name: Install Python packages
run: |
pip install --upgrade pip six setuptools
pip install --upgrade -r lib/spack/docs/requirements.txt
- name: Build documentation
run: |
share/spack/qa/run-doc-tests

View File

@@ -32,7 +32,7 @@ jobs:
- name: spack install
run: |
. .github/workflows/install_spack.sh
spack install -v py-jupyter %clang
spack install -v py-jupyter %apple-clang
install_scipy_clang:
name: scipy, mpl, pd
@@ -42,9 +42,9 @@ jobs:
- name: spack install
run: |
. .github/workflows/install_spack.sh
spack install -v py-scipy %clang
spack install -v py-matplotlib %clang
spack install -v py-pandas %clang
spack install -v py-scipy %apple-clang
spack install -v py-matplotlib %apple-clang
spack install -v py-pandas %apple-clang
install_mpi4py_clang:
name: mpi4py, petsc4py
@@ -54,5 +54,5 @@ jobs:
- name: spack install
run: |
. .github/workflows/install_spack.sh
spack install -v py-mpi4py %clang
spack install -v py-petsc4py %clang
spack install -v py-mpi4py %apple-clang
spack install -v py-petsc4py %apple-clang

View File

@@ -3,13 +3,12 @@ name: macos tests
on:
push:
branches:
- master
- develop
- releases/**
pull_request:
branches:
- master
- develop
- releases/**
jobs:
build:

View File

@@ -3,13 +3,12 @@ name: python version check
on:
push:
branches:
- master
- develop
- releases/**
pull_request:
branches:
- master
- develop
- releases/**
jobs:
validate:

View File

@@ -1,101 +1,32 @@
#=============================================================================
# Project settings
#=============================================================================
# Only build master and develop on push; do not build every branch.
# Only build releases and develop on push; do not build every branch.
branches:
only:
- master
- develop
- /^releases\/.*$/
#=============================================================================
# Build matrix
#=============================================================================
dist: bionic
jobs:
fast_finish: true
include:
- stage: 'style checks'
python: '3.8'
os: linux
language: python
env: TEST_SUITE=flake8
- stage: 'unit tests + documentation'
python: '2.6'
dist: trusty
os: linux
language: python
addons:
apt:
# Everything but patchelf, that is not available for trusty
packages:
- ccache
- gfortran
- graphviz
- gnupg2
- kcov
- mercurial
- ninja-build
- realpath
- zsh
- fish
env: [ TEST_SUITE=unit, COVERAGE=true ]
- python: '3.8'
os: linux
language: python
env: [ TEST_SUITE=shell, COVERAGE=true, KCOV_VERSION=38 ]
- python: '3.8'
os: linux
language: python
env: TEST_SUITE=doc
stages:
- 'style checks'
- 'unit tests + documentation'
#=============================================================================
# Environment
#=============================================================================
# Docs need graphviz to build
language: python
python: '2.6'
dist: trusty
os: linux
addons:
# for Linux builds, we use APT
apt:
packages:
- ccache
- coreutils
- gfortran
- graphviz
- gnupg2
- kcov
- mercurial
- ninja-build
- patchelf
- realpath
- zsh
- fish
update: true
# ~/.ccache needs to be cached directly as Travis is not taking care of it
# (possibly because we use 'language: python' and not 'language: c')
cache:
pip: true
ccache: true
directories:
- ~/.ccache
before_install:
- ccache -M 2G && ccache -z
# Install kcov manually, since it's not packaged for bionic beaver
- if [[ "$KCOV_VERSION" ]]; then
sudo apt-get -y install cmake binutils-dev libcurl4-openssl-dev zlib1g-dev libdw-dev libiberty-dev;
KCOV_ROOT=$(mktemp -d);
wget --output-document=${KCOV_ROOT}/${KCOV_VERSION}.tar.gz https://github.com/SimonKagstrom/kcov/archive/v${KCOV_VERSION}.tar.gz;
tar -C ${KCOV_ROOT} -xzvf ${KCOV_ROOT}/${KCOV_VERSION}.tar.gz;
mkdir -p ${KCOV_ROOT}/build;
cd ${KCOV_ROOT}/build && cmake -Wno-dev ${KCOV_ROOT}/kcov-${KCOV_VERSION} && cd - ;
make -C ${KCOV_ROOT}/build && sudo make -C ${KCOV_ROOT}/build install;
- if [[ "$TRAVIS_DIST" == "trusty" ]]; then
share/spack/qa/install_patchelf.sh;
else
sudo apt-get update;
sudo apt-get -y install patchelf;
fi
# Install various dependencies
@@ -103,12 +34,8 @@ install:
- pip install --upgrade pip
- pip install --upgrade six
- pip install --upgrade setuptools
- pip install --upgrade codecov coverage==4.5.4
- pip install --upgrade flake8
- pip install --upgrade pep8-naming
- if [[ "$TEST_SUITE" == "doc" ]]; then
pip install --upgrade -r lib/spack/docs/requirements.txt;
fi
before_script:
# Need this for the git tests to succeed.
@@ -118,31 +45,12 @@ before_script:
# Need this to be able to compute the list of changed files
- git fetch origin ${TRAVIS_BRANCH}:${TRAVIS_BRANCH}
#=============================================================================
# Building
#=============================================================================
script:
- share/spack/qa/run-$TEST_SUITE-tests
- python bin/spack -h
- python bin/spack help -a
- python bin/spack -p --lines 20 spec mpileaks%gcc ^elfutils@0.170
- python bin/spack test -x --verbose
after_success:
- ccache -s
- case "$TEST_SUITE" in
unit)
if [[ "$COVERAGE" == "true" ]]; then
codecov --env PYTHON_VERSION
--required
--flags "${TEST_SUITE}${TRAVIS_OS_NAME}";
fi
;;
shell)
codecov --env PYTHON_VERSION
--required
--flags "${TEST_SUITE}${TRAVIS_OS_NAME}";
esac
#=============================================================================
# Notifications
#=============================================================================
notifications:
email:
recipients:

View File

@@ -78,11 +78,29 @@ these guidelines with [Travis CI](https://travis-ci.org/spack/spack). To
run these tests locally, and for helpful tips on git, see our
[Contribution Guide](http://spack.readthedocs.io/en/latest/contribution_guide.html).
Spack uses a rough approximation of the
[Git Flow](http://nvie.com/posts/a-successful-git-branching-model/)
branching model. The ``develop`` branch contains the latest
contributions, and ``master`` is always tagged and points to the latest
stable release.
Spack's `develop` branch has the latest contributions. Pull requests
should target `develop`, and users who want the latest package versions,
features, etc. can use `develop`.
Releases
--------
For multi-user site deployments or other use cases that need very stable
software installations, we recommend using Spack's
[stable releases](https://github.com/spack/spack/releases).
Each Spack release series also has a corresponding branch, e.g.
`releases/v0.14` has `0.14.x` versions of Spack, and `releases/v0.13` has
`0.13.x` versions. We backport important bug fixes to these branches but
we do not advance the package versions or make other changes that would
change the way Spack concretizes dependencies within a release branch.
So, you can base your Spack deployment on a release branch and `git pull`
to get fixes, without the package churn that comes with `develop`.
The latest release is always available with the `releases/latest` tag.
See the [docs on releases](https://spack.readthedocs.io/en/latest/developer_guide.html#releases)
for more details.
Code of Conduct
------------------------

View File

@@ -23,8 +23,12 @@ packages:
daal: [intel-daal]
elf: [elfutils]
fftw-api: [fftw]
gl: [mesa+opengl, opengl]
glx: [mesa+glx, opengl]
gl: [libglvnd-fe, mesa+opengl~glvnd, opengl~glvnd]
glx: [libglvnd-fe+glx, mesa+glx~glvnd, opengl+glx~glvnd]
egl: [libglvnd-fe+egl, opengl+egl~glvnd]
libglvnd-be-gl: [mesa+glvnd, opengl+glvnd]
libglvnd-be-glx: [mesa+glx+glvnd, opengl+glx+glvnd]
libglvnd-be-egl: [opengl+egl+glvnd]
glu: [mesa-glu, openglu]
golang: [gcc]
iconv: [libiconv]

View File

@@ -45,7 +45,7 @@ Environments:
&& echo " view: /opt/view") > /opt/spack-environment/spack.yaml
# Install the software, remove unnecessary deps
RUN cd /opt/spack-environment && spack install && spack gc -y
RUN cd /opt/spack-environment && spack env activate . && spack install && spack gc -y
# Strip all the binaries
RUN find -L /opt/view/* -type f -exec readlink -f '{}' \; | \
@@ -267,7 +267,7 @@ following ``Dockerfile``:
&& echo " view: /opt/view") > /opt/spack-environment/spack.yaml
# Install the software, remove unnecessary deps
RUN cd /opt/spack-environment && spack install && spack gc -y
RUN cd /opt/spack-environment && spack env activate . && spack install && spack gc -y
# Strip all the binaries
RUN find -L /opt/view/* -type f -exec readlink -f '{}' \; | \

View File

@@ -27,11 +27,22 @@ correspond to one feature/bugfix/extension/etc. One can create PRs with
changes relevant to different ideas, however reviewing such PRs becomes tedious
and error prone. If possible, try to follow the **one-PR-one-package/feature** rule.
Spack uses a rough approximation of the `Git Flow <http://nvie.com/posts/a-successful-git-branching-model/>`_
branching model. The develop branch contains the latest contributions, and
master is always tagged and points to the latest stable release. Therefore, when
you send your request, make ``develop`` the destination branch on the
`Spack repository <https://github.com/spack/spack>`_.
--------
Branches
--------
Spack's ``develop`` branch has the latest contributions. Nearly all pull
requests should start from ``develop`` and target ``develop``.
There is a branch for each major release series. Release branches
originate from ``develop`` and have tags for each point release in the
series. For example, ``releases/v0.14`` has tags for ``0.14.0``,
``0.14.1``, ``0.14.2``, etc. versions of Spack. We backport important bug
fixes to these branches, but we do not advance the package versions or
make other changes that would change the way Spack concretizes
dependencies. Currently, the maintainers manage these branches by
cherry-picking from ``develop``. See :ref:`releases` for more
information.
----------------------
Continuous Integration

View File

@@ -495,3 +495,393 @@ The bottom of the output shows the top most time consuming functions,
slowest on top. The profiling support is from Python's built-in tool,
`cProfile
<https://docs.python.org/2/library/profile.html#module-cProfile>`_.
.. _releases:
--------
Releases
--------
This section documents Spack's release process. It is intended for
project maintainers, as the tasks described here require maintainer
privileges on the Spack repository. For others, we hope this section at
least provides some insight into how the Spack project works.
.. _release-branches:
^^^^^^^^^^^^^^^^
Release branches
^^^^^^^^^^^^^^^^
There are currently two types of Spack releases: :ref:`major releases
<major-releases>` (``0.13.0``, ``0.14.0``, etc.) and :ref:`point releases
<point-releases>` (``0.13.1``, ``0.13.2``, ``0.13.3``, etc.). Here is a
diagram of how Spack release branches work::
o branch: develop (latest version)
|
o merge v0.14.1 into develop
|\
| o branch: releases/v0.14, tag: v0.14.1
o | merge v0.14.0 into develop
|\|
| o tag: v0.14.0
|/
o merge v0.13.2 into develop
|\
| o branch: releases/v0.13, tag: v0.13.2
o | merge v0.13.1 into develop
|\|
| o tag: v0.13.1
o | merge v0.13.0 into develop
|\|
| o tag: v0.13.0
o |
| o
|/
o
The ``develop`` branch has the latest contributions, and nearly all pull
requests target ``develop``.
Each Spack release series also has a corresponding branch, e.g.
``releases/v0.14`` has ``0.14.x`` versions of Spack, and
``releases/v0.13`` has ``0.13.x`` versions. A major release is the first
tagged version on a release branch. Minor releases are back-ported from
develop onto release branches. This is typically done by cherry-picking
bugfix commits off of ``develop``.
To avoid version churn for users of a release series, minor releases
should **not** make changes that would change the concretization of
packages. They should generally only contain fixes to the Spack core.
Both major and minor releases are tagged. After each release, we merge
the release branch back into ``develop`` so that the version bump and any
other release-specific changes are visible in the mainline. As a
convenience, we also tag the latest release as ``releases/latest``,
so that users can easily check it out to get the latest
stable version. See :ref:`merging-releases` for more details.
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Scheduling work for releases
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
We schedule work for releases by creating `GitHub projects
<https://github.com/spack/spack/projects>`_. At any time, there may be
several open release projects. For example, here are two releases (from
some past version of the page linked above):
.. image:: images/projects.png
Here, there's one release in progress for ``0.15.1`` and another for
``0.16.0``. Each of these releases has a project board containing issues
and pull requests. GitHub shows a status bar with completed work in
green, work in progress in purple, and work not started yet in gray, so
it's fairly easy to see progress.
Spack's project boards are not firm commitments, and we move work between
releases frequently. If we need to make a release and some tasks are not
yet done, we will simply move them to next minor or major release, rather
than delaying the release to complete them.
For more on using GitHub project boards, see `GitHub's documentation
<https://docs.github.com/en/github/managing-your-work-on-github/about-project-boards>`_.
.. _major-releases:
^^^^^^^^^^^^^^^^^^^^^
Making Major Releases
^^^^^^^^^^^^^^^^^^^^^
Assuming you've already created a project board and completed the work
for a major release, the steps to make the release are as follows:
#. Create two new project boards:
* One for the next major release
* One for the next point release
#. Move any tasks that aren't done yet to one of the new project boards.
Small bugfixes should go to the next point release. Major features,
refactors, and changes that could affect concretization should go in
the next major release.
#. Create a branch for the release, based on ``develop``:
.. code-block:: console
$ git checkout -b releases/v0.15 develop
For a version ``vX.Y.Z``, the branch's name should be
``releases/vX.Y``. That is, you should create a ``releases/vX.Y``
branch if you are preparing the ``X.Y.0`` release.
#. Bump the version in ``lib/spack/spack/__init__.py``. See `this example from 0.13.0
<https://github.com/spack/spack/commit/8eeb64096c98b8a43d1c587f13ece743c864fba9>`_
#. Updaate the release version lists in these files to include the new version:
* ``lib/spack/spack/schema/container.py``
* ``lib/spack/spack/container/images.json``
**TODO**: We should get rid of this step in some future release.
#. Update ``CHANGELOG.md`` with major highlights in bullet form. Use
proper markdown formatting, like `this example from 0.15.0
<https://github.com/spack/spack/commit/d4bf70d9882fcfe88507e9cb444331d7dd7ba71c>`_.
#. Push the release branch to GitHub.
#. Make sure CI passes on the release branch, including:
* Regular unit tests
* Build tests
* The E4S pipeline at `gitlab.spack.io <https://gitlab.spack.io>`_
If CI is not passing, submit pull requests to ``develop`` as normal
and keep rebasing the release branch on ``develop`` until CI passes.
#. Follow the steps in :ref:`publishing-releases`.
#. Follow the steps in :ref:`merging-releases`.
#. Follow the steps in :ref:`announcing-releases`.
.. _point-releases:
^^^^^^^^^^^^^^^^^^^^^
Making Point Releases
^^^^^^^^^^^^^^^^^^^^^
This assumes you've already created a project board for a point release
and completed the work to be done for the release. To make a point
release:
#. Create one new project board for the next point release.
#. Move any cards that aren't done yet to the next project board.
#. Check out the release branch (it should already exist). For the
``X.Y.Z`` release, the release branch is called ``releases/vX.Y``. For
``v0.15.1``, you would check out ``releases/v0.15``:
.. code-block:: console
$ git checkout releases/v0.15
#. Cherry-pick each pull request in the ``Done`` column of the release
project onto the release branch.
This is **usually** fairly simple since we squash the commits from the
vast majority of pull requests, which means there is only one commit
per pull request to cherry-pick. For example, `this pull request
<https://github.com/spack/spack/pull/15777>`_ has three commits, but
the were squashed into a single commit on merge. You can see the
commit that was created here:
.. image:: images/pr-commit.png
You can easily cherry pick it like this (assuming you already have the
release branch checked out):
.. code-block:: console
$ git cherry-pick 7e46da7
For pull requests that were rebased, you'll need to cherry-pick each
rebased commit individually. There have not been any rebased PRs like
this in recent point releases.
.. warning::
It is important to cherry-pick commits in the order they happened,
otherwise you can get conflicts while cherry-picking. When
cherry-picking onto a point release, look at the merge date,
**not** the number of the pull request or the date it was opened.
Sometimes you may **still** get merge conflicts even if you have
cherry-picked all the commits in order. This generally means there
is some other intervening pull request that the one you're trying
to pick depends on. In these cases, you'll need to make a judgment
call:
1. If the dependency is small, you might just cherry-pick it, too.
If you do this, add it to the release board.
2. If it is large, then you may decide that this fix is not worth
including in a point release, in which case you should remove it
from the release project.
3. You can always decide to manually back-port the fix to the release
branch if neither of the above options makes sense, but this can
require a lot of work. It's seldom the right choice.
#. Bump the version in ``lib/spack/spack/__init__.py``. See `this example from 0.14.1
<https://github.com/spack/spack/commit/ff0abb9838121522321df2a054d18e54b566b44a>`_.
#. Updaate the release version lists in these files to include the new version:
* ``lib/spack/spack/schema/container.py``
* ``lib/spack/spack/container/images.json``
**TODO**: We should get rid of this step in some future release.
#. Update ``CHANGELOG.md`` with a list of bugfixes. This is typically just a
summary of the commits you cherry-picked onto the release branch. See
`the changelog from 0.14.1
<https://github.com/spack/spack/commit/ff0abb9838121522321df2a054d18e54b566b44a>`_.
#. Push the release branch to GitHub.
#. Make sure CI passes on the release branch, including:
* Regular unit tests
* Build tests
* The E4S pipeline at `gitlab.spack.io <https://gitlab.spack.io>`_
If CI does not pass, you'll need to figure out why, and make changes
to the release branch until it does. You can make more commits, modify
or remove cherry-picked commits, or cherry-pick **more** from
``develop`` to make this happen.
#. Follow the steps in :ref:`publishing-releases`.
#. Follow the steps in :ref:`merging-releases`.
#. Follow the steps in :ref:`announcing-releases`.
.. _publishing-releases:
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Publishing a release on GitHub
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
#. Go to `github.com/spack/spack/releases
<https://github.com/spack/spack/releases>`_ and click ``Draft a new
release``. Set the following:
* ``Tag version`` should start with ``v`` and contain *all three*
parts of the version, .g. ``v0.15.1``. This is the name of the tag
that will be created.
* ``Target`` should be the ``releases/vX.Y`` branch (e.g., ``releases/v0.15``).
* ``Release title`` should be ``vX.Y.Z`` (To match the tag, e.g., ``v0.15.1``).
* For the text, paste the latest release markdown from your ``CHANGELOG.md``.
You can save the draft and keep coming back to this as you prepare the release.
#. When you are done, click ``Publish release``.
#. Immediately after publishing, go back to
`github.com/spack/spack/releases
<https://github.com/spack/spack/releases>`_ and download the
auto-generated ``.tar.gz`` file for the release. It's the ``Source
code (tar.gz)`` link.
#. Click ``Edit`` on the release you just did and attach the downloaded
release tarball as a binary. This does two things:
#. Makes sure that the hash of our releases doesn't change over time.
GitHub sometimes annoyingly changes they way they generate
tarballs, and then hashes can change if you rely on the
auto-generated tarball links.
#. Gets us download counts on releases visible through the GitHub
API. GitHub tracks downloads of artifacts, but *not* the source
links. See the `releases
page <https://api.github.com/repos/spack/spack/releases>`_ and search
for ``download_count`` to see this.
.. _merging-releases:
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Updating `releases/latest` and `develop`
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
If the new release is the **highest** Spack release yet, you should
also tag it as ``releases/latest``. For example, suppose the highest
release is currently ``0.15.3``:
* If you are releasing ``0.15.4`` or ``0.16.0``, then you should tag
it with ``releases/latest``, as these are higher than ``0.15.3``.
* If you are making a new release of an **older** major version of
Spack, e.g. ``0.14.4``, then you should not tag it as
``releases/latest`` (as there are newer major versions).
To tag ``releases/latest``, do this:
.. code-block:: console
$ git checkout releases/vX.Y # vX.Y is the new release's branch
$ git tag --force releases/latest
$ git push --tags
The ``--force`` argument makes ``git`` overwrite the existing
``releases/latest`` tag with the new one.
We also merge each release that we tag as ``releases/latest`` into ``develop``.
Make sure to do this with a merge commit:
.. code-block:: console
$ git checkout develop
$ git merge --no-ff vX.Y.Z # vX.Y.Z is the new release's tag
$ git push
We merge back to ``develop`` because it:
* updates the version and ``CHANGELOG.md`` on ``develop``.
* ensures that your release tag is reachable from the head of
``develop``
We *must* use a real merge commit (via the ``--no-ff`` option) because it
ensures that the release tag is reachable from the tip of ``develop``.
This is necessary for ``spack -V`` to work properly -- it uses ``git
describe --tags`` to find the last reachable tag in the repository and
reports how far we are from it. For example:
.. code-block:: console
$ spack -V
0.14.2-1486-b80d5e74e5
This says that we are at commit ``b80d5e74e5``, which is 1,486 commits
ahead of the ``0.14.2`` release.
We put this step last in the process because it's best to do it only once
the release is complete and tagged. If you do it before you've tagged the
release and later decide you want to tag some later commit, you'll need
to merge again.
.. _announcing-releases:
^^^^^^^^^^^^^^^^^^^^
Announcing a release
^^^^^^^^^^^^^^^^^^^^
We announce releases in all of the major Spack communication channels.
Publishing the release takes care of GitHub. The remaining channels are
Twitter, Slack, and the mailing list. Here are the steps:
#. Make a tweet to announce the release. It should link to the release's
page on GitHub. You can base it on `this example tweet
<https://twitter.com/spackpm/status/1231761858182307840>`_.
#. Ping ``@channel`` in ``#general`` on Slack (`spackpm.slack.com
<https://spackpm.slack.com>`_) with a link to the tweet. The tweet
will be shown inline so that you do not have to retype your release
announcement.
#. Email the Spack mailing list to let them know about the release. As
with the tweet, you likely want to link to the release's page on
GitHub. It's also helpful to include some information directly in the
email. You can base yours on this `example email
<https://groups.google.com/forum/#!topic/spack/WT4CT9i_X4s>`_.
Once you've announced the release, congratulations, you're done! You've
finished making the release!

View File

@@ -811,6 +811,100 @@ to add the following to ``packages.yaml``:
present in PATH, however it will have lower precedence compared to paths
from other dependencies. This ensures that binaries in Spack dependencies
are preferred over system binaries.
^^^^^^
OpenGL
^^^^^^
To use hardware-accelerated rendering from a system-supplied OpenGL driver,
add something like the following to your ``packages`` configuration:
.. code-block:: yaml
packages:
opengl:
paths:
opengl+glx@4.5: /usr
buildable: False
all:
providers:
gl: [opengl]
glx: [opengl]
For `EGL <https://www.khronos.org/egl>` support, or for certain modern drivers,
OpenGL calls are dispatched dynamically at run time to the hardware graphics
implementation. This dynamic dispatch is performed using `libglvnd
<https://github.com/NVIDIA/libglvnd>`. In this mode, the graphics library
(e.g.: opengl) must be built to work with libglvnd. Applications then link
against libglvnd instead of the underlying implementation. Environment
variables set at run time govern the process by which libglvnd loads the
underlying implementation and dispatches calls to it. See `this
<https://github.com/NVIDIA/libglvnd/issues/177#issuecomment-496562769>` comment
for details on loading a specific GLX implementation and `this
<https://github.com/NVIDIA/libglvnd/blob/master/src/EGL/icd_enumeration.md>`
page for information about EGL ICD enumeration.
This codependency between libglvnd and the underlying implementation is modeled
in Spack with two packages for libglvnd: libglvnd, which provides libglvnd
proper; and libglvnd-fe, a bundle package that depends on libglvnd and an
implementation. Implementations that work through libglvnd are no longer
providers for graphics virtual dependencies, like "gl" or "glx", but instead
provide libglvnd versions of these dependencies ("libglvnd-be-gl",
"libglvnd-be-glx", etc.). The libglvnd-fe package depends on these
"libglvnd-be-..." virtual packages, which provide the actual implementation.
It also depends on libglvnd, itself, and exposes its libraries to downstream
applications. For correct operation, the Spack package for the underlying
implementation has to set the runtime environment to ensure that it is loaded
when an application linked against libglvnd runs. This last detail is
important for users who want to set up an external OpenGL implementation that
requires libglvnd to work. This setup requires modifying the ``modules``
configuration so that modules generated for the external OpenGL implementation
set the necessary environment variables.
.. code-block:: yaml
packages:
opengl:
paths:
opengl@4.5+glx+egl+glvnd: /does/not/exist
buildable: False
variants:+glx+egl+glvnd
libglvnd-fe:
variants:+gl+glx+egl
all:
providers:
glvnd-be-gl: [opengl]
glvnd-be-glx: [opengl]
glvnd-be-egl: [opengl]
gl: [libglvnd-fe]
glx: [libglvnd-fe]
egl: [libglvnd-fe]
.. code-block:: yaml
modules:
tcl:
opengl@4.5+glx+glvnd:
environment:
set:
__GLX_VENDOR_LIBRARY_NAME: nvidia
opengl@4.5+egl+glvnd:
environment:
set:
__EGL_VENDOR_LIBRARY_FILENAMES: /usr/share/glvnd/egl_vendor.d/10_nvidia.json
One final detail about the above example is that it avoids setting the true
root of the external OpenGL implementation, instead opting to set it to a path
that is not expected to exist on the system. This is done for two reasons.
First, Spack would add directories under this root to environment variables
that would affect the process of building and installing other packages, such
as ``PATH`` and ``PKG_CONFIG_PATH``. These additions may potentially prevent
those packages from installing successfully, and this risk is especially great
for paths that house many libraries and applications, like ``/usr``. Second,
providing the true root of the external implementation in the ``packages``
configuration is not necessary because libglvnd need only the environment
variables set above in the ``modules`` configuration to determine what OpenGL
implementation to dispatch calls to at run time.
^^^
Git
@@ -818,7 +912,7 @@ Git
Some Spack packages use ``git`` to download, which might not work on
some computers. For example, the following error was
encountered on a Macintosh during ``spack install julia-master``:
encountered on a Macintosh during ``spack install julia@master``:
.. code-block:: console

Binary file not shown.

After

Width:  |  Height:  |  Size: 44 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 68 KiB

View File

@@ -82,9 +82,9 @@ or Amazon Elastic Kubernetes Service (`EKS <https://aws.amazon.com/eks>`_), thou
topics are outside the scope of this document.
Spack's pipelines are now making use of the
`trigger <https://docs.gitlab.com/12.9/ee/ci/yaml/README.html#trigger>` syntax to run
`trigger <https://docs.gitlab.com/12.9/ee/ci/yaml/README.html#trigger>`_ syntax to run
dynamically generated
`child pipelines <https://docs.gitlab.com/12.9/ee/ci/parent_child_pipelines.html>`.
`child pipelines <https://docs.gitlab.com/12.9/ee/ci/parent_child_pipelines.html>`_.
Note that the use of dynamic child pipelines requires running Gitlab version
``>= 12.9``.

View File

@@ -1405,11 +1405,12 @@ The main points that are implemented below:
- export CXXFLAGS="-std=c++11"
install:
- if ! which spack >/dev/null; then
- |
if ! which spack >/dev/null; then
mkdir -p $SPACK_ROOT &&
git clone --depth 50 https://github.com/spack/spack.git $SPACK_ROOT &&
echo -e "config:""\n build_jobs:"" 2" > $SPACK_ROOT/etc/spack/config.yaml **
echo -e "packages:""\n all:""\n target:"" ['x86_64']"
printf "config:\n build_jobs: 2\n" > $SPACK_ROOT/etc/spack/config.yaml &&
printf "packages:\n all:\n target: ['x86_64']\n" \
> $SPACK_ROOT/etc/spack/packages.yaml;
fi
- travis_wait spack install cmake@3.7.2~openssl~ncurses

View File

@@ -31,17 +31,17 @@
class ProcessController(object):
"""Wrapper around some fundamental process control operations.
This allows one process to drive another similar to the way a shell
would, by sending signals and I/O.
This allows one process (the controller) to drive another (the
minion) similar to the way a shell would, by sending signals and I/O.
"""
def __init__(self, pid, master_fd,
def __init__(self, pid, controller_fd,
timeout=1, sleep_time=1e-1, debug=False):
"""Create a controller to manipulate the process with id ``pid``
Args:
pid (int): id of process to control
master_fd (int): master file descriptor attached to pid's stdin
controller_fd (int): controller fd attached to pid's stdin
timeout (int): time in seconds for wait operations to time out
(default 1 second)
sleep_time (int): time to sleep after signals, to control the
@@ -58,7 +58,7 @@ def __init__(self, pid, master_fd,
"""
self.pid = pid
self.pgid = os.getpgid(pid)
self.master_fd = master_fd
self.controller_fd = controller_fd
self.timeout = timeout
self.sleep_time = sleep_time
self.debug = debug
@@ -67,8 +67,8 @@ def __init__(self, pid, master_fd,
self.ps = which("ps", required=True)
def get_canon_echo_attrs(self):
"""Get echo and canon attributes of the terminal of master_fd."""
cfg = termios.tcgetattr(self.master_fd)
"""Get echo and canon attributes of the terminal of controller_fd."""
cfg = termios.tcgetattr(self.controller_fd)
return (
bool(cfg[3] & termios.ICANON),
bool(cfg[3] & termios.ECHO),
@@ -82,7 +82,7 @@ def horizontal_line(self, name):
)
def status(self):
"""Print debug message with status info for the child."""
"""Print debug message with status info for the minion."""
if self.debug:
canon, echo = self.get_canon_echo_attrs()
sys.stderr.write("canon: %s, echo: %s\n" % (
@@ -94,12 +94,12 @@ def status(self):
sys.stderr.write("\n")
def input_on(self):
"""True if keyboard input is enabled on the master_fd pty."""
"""True if keyboard input is enabled on the controller_fd pty."""
return self.get_canon_echo_attrs() == (False, False)
def background(self):
"""True if pgid is in a background pgroup of master_fd's terminal."""
return self.pgid != os.tcgetpgrp(self.master_fd)
"""True if pgid is in a background pgroup of controller_fd's tty."""
return self.pgid != os.tcgetpgrp(self.controller_fd)
def tstp(self):
"""Send SIGTSTP to the controlled process."""
@@ -115,18 +115,18 @@ def cont(self):
def fg(self):
self.horizontal_line("fg")
with log.ignore_signal(signal.SIGTTOU):
os.tcsetpgrp(self.master_fd, os.getpgid(self.pid))
os.tcsetpgrp(self.controller_fd, os.getpgid(self.pid))
time.sleep(self.sleep_time)
def bg(self):
self.horizontal_line("bg")
with log.ignore_signal(signal.SIGTTOU):
os.tcsetpgrp(self.master_fd, os.getpgrp())
os.tcsetpgrp(self.controller_fd, os.getpgrp())
time.sleep(self.sleep_time)
def write(self, byte_string):
self.horizontal_line("write '%s'" % byte_string.decode("utf-8"))
os.write(self.master_fd, byte_string)
os.write(self.controller_fd, byte_string)
def wait(self, condition):
start = time.time()
@@ -156,50 +156,51 @@ def wait_running(self):
class PseudoShell(object):
"""Sets up master and child processes with a PTY.
"""Sets up controller and minion processes with a PTY.
You can create a ``PseudoShell`` if you want to test how some
function responds to terminal input. This is a pseudo-shell from a
job control perspective; ``master_function`` and ``child_function``
are set up with a pseudoterminal (pty) so that the master can drive
the child through process control signals and I/O.
job control perspective; ``controller_function`` and ``minion_function``
are set up with a pseudoterminal (pty) so that the controller can drive
the minion through process control signals and I/O.
The two functions should have signatures like this::
def master_function(proc, ctl, **kwargs)
def child_function(**kwargs)
def controller_function(proc, ctl, **kwargs)
def minion_function(**kwargs)
``master_function`` is spawned in its own process and passed three
``controller_function`` is spawned in its own process and passed three
arguments:
proc
the ``multiprocessing.Process`` object representing the child
the ``multiprocessing.Process`` object representing the minion
ctl
a ``ProcessController`` object tied to the child
a ``ProcessController`` object tied to the minion
kwargs
keyword arguments passed from ``PseudoShell.start()``.
``child_function`` is only passed ``kwargs`` delegated from
``minion_function`` is only passed ``kwargs`` delegated from
``PseudoShell.start()``.
The ``ctl.master_fd`` will have its ``master_fd`` connected to
``sys.stdin`` in the child process. Both processes will share the
The ``ctl.controller_fd`` will have its ``controller_fd`` connected to
``sys.stdin`` in the minion process. Both processes will share the
same ``sys.stdout`` and ``sys.stderr`` as the process instantiating
``PseudoShell``.
Here are the relationships between processes created::
._________________________________________________________.
| Child Process | pid 2
| - runs child_function | pgroup 2
| Minion Process | pid 2
| - runs minion_function | pgroup 2
|_________________________________________________________| session 1
^
| create process with master_fd connected to stdin
| create process with controller_fd connected to stdin
| stdout, stderr are the same as caller
._________________________________________________________.
| Master Process | pid 1
| - runs master_function | pgroup 1
| - uses ProcessController and master_fd to control child | session 1
| Controller Process | pid 1
| - runs controller_function | pgroup 1
| - uses ProcessController and controller_fd to | session 1
| control minion |
|_________________________________________________________|
^
| create process
@@ -207,51 +208,51 @@ def child_function(**kwargs)
._________________________________________________________.
| Caller | pid 0
| - Constructs, starts, joins PseudoShell | pgroup 0
| - provides master_function, child_function | session 0
| - provides controller_function, minion_function | session 0
|_________________________________________________________|
"""
def __init__(self, master_function, child_function):
def __init__(self, controller_function, minion_function):
self.proc = None
self.master_function = master_function
self.child_function = child_function
self.controller_function = controller_function
self.minion_function = minion_function
# these can be optionally set to change defaults
self.controller_timeout = 1
self.sleep_time = 0
def start(self, **kwargs):
"""Start the master and child processes.
"""Start the controller and minion processes.
Arguments:
kwargs (dict): arbitrary keyword arguments that will be
passed to master and child functions
passed to controller and minion functions
The master process will create the child, then call
``master_function``. The child process will call
``child_function``.
The controller process will create the minion, then call
``controller_function``. The minion process will call
``minion_function``.
"""
self.proc = multiprocessing.Process(
target=PseudoShell._set_up_and_run_master_function,
args=(self.master_function, self.child_function,
target=PseudoShell._set_up_and_run_controller_function,
args=(self.controller_function, self.minion_function,
self.controller_timeout, self.sleep_time),
kwargs=kwargs,
)
self.proc.start()
def join(self):
"""Wait for the child process to finish, and return its exit code."""
"""Wait for the minion process to finish, and return its exit code."""
self.proc.join()
return self.proc.exitcode
@staticmethod
def _set_up_and_run_child_function(
tty_name, stdout_fd, stderr_fd, ready, child_function, **kwargs):
"""Child process wrapper for PseudoShell.
def _set_up_and_run_minion_function(
tty_name, stdout_fd, stderr_fd, ready, minion_function, **kwargs):
"""Minion process wrapper for PseudoShell.
Handles the mechanics of setting up a PTY, then calls
``child_function``.
``minion_function``.
"""
# new process group, like a command or pipeline launched by a shell
@@ -266,45 +267,45 @@ def _set_up_and_run_child_function(
if kwargs.get("debug"):
sys.stderr.write(
"child: stdin.isatty(): %s\n" % sys.stdin.isatty())
"minion: stdin.isatty(): %s\n" % sys.stdin.isatty())
# tell the parent that we're really running
if kwargs.get("debug"):
sys.stderr.write("child: ready!\n")
sys.stderr.write("minion: ready!\n")
ready.value = True
try:
child_function(**kwargs)
minion_function(**kwargs)
except BaseException:
traceback.print_exc()
@staticmethod
def _set_up_and_run_master_function(
master_function, child_function, controller_timeout, sleep_time,
**kwargs):
"""Set up a pty, spawn a child process, and execute master_function.
def _set_up_and_run_controller_function(
controller_function, minion_function, controller_timeout,
sleep_time, **kwargs):
"""Set up a pty, spawn a minion process, execute controller_function.
Handles the mechanics of setting up a PTY, then calls
``master_function``.
``controller_function``.
"""
os.setsid() # new session; this process is the controller
master_fd, child_fd = os.openpty()
pty_name = os.ttyname(child_fd)
controller_fd, minion_fd = os.openpty()
pty_name = os.ttyname(minion_fd)
# take controlling terminal
pty_fd = os.open(pty_name, os.O_RDWR)
os.close(pty_fd)
ready = multiprocessing.Value('i', False)
child_process = multiprocessing.Process(
target=PseudoShell._set_up_and_run_child_function,
minion_process = multiprocessing.Process(
target=PseudoShell._set_up_and_run_minion_function,
args=(pty_name, sys.stdout.fileno(), sys.stderr.fileno(),
ready, child_function),
ready, minion_function),
kwargs=kwargs,
)
child_process.start()
minion_process.start()
# wait for subprocess to be running and connected.
while not ready.value:
@@ -315,30 +316,31 @@ def _set_up_and_run_master_function(
sys.stderr.write("pid: %d\n" % os.getpid())
sys.stderr.write("pgid: %d\n" % os.getpgrp())
sys.stderr.write("sid: %d\n" % os.getsid(0))
sys.stderr.write("tcgetpgrp: %d\n" % os.tcgetpgrp(master_fd))
sys.stderr.write("tcgetpgrp: %d\n" % os.tcgetpgrp(controller_fd))
sys.stderr.write("\n")
child_pgid = os.getpgid(child_process.pid)
sys.stderr.write("child pid: %d\n" % child_process.pid)
sys.stderr.write("child pgid: %d\n" % child_pgid)
sys.stderr.write("child sid: %d\n" % os.getsid(child_process.pid))
minion_pgid = os.getpgid(minion_process.pid)
sys.stderr.write("minion pid: %d\n" % minion_process.pid)
sys.stderr.write("minion pgid: %d\n" % minion_pgid)
sys.stderr.write(
"minion sid: %d\n" % os.getsid(minion_process.pid))
sys.stderr.write("\n")
sys.stderr.flush()
# set up master to ignore SIGTSTP, like a shell
# set up controller to ignore SIGTSTP, like a shell
signal.signal(signal.SIGTSTP, signal.SIG_IGN)
# call the master function once the child is ready
# call the controller function once the minion is ready
try:
controller = ProcessController(
child_process.pid, master_fd, debug=kwargs.get("debug"))
minion_process.pid, controller_fd, debug=kwargs.get("debug"))
controller.timeout = controller_timeout
controller.sleep_time = sleep_time
error = master_function(child_process, controller, **kwargs)
error = controller_function(minion_process, controller, **kwargs)
except BaseException:
error = 1
traceback.print_exc()
child_process.join()
minion_process.join()
# return whether either the parent or child failed
return error or child_process.exitcode
# return whether either the parent or minion failed
return error or minion_process.exitcode

View File

@@ -466,7 +466,7 @@ def build_tarball(spec, outdir, force=False, rel=False, unsigned=False,
web_util.push_to_url(
specfile_path, remote_specfile_path, keep_original=False)
tty.msg('Buildache for "%s" written to \n %s' %
tty.msg('Buildcache for "%s" written to \n %s' %
(spec, remote_spackfile_path))
try:
@@ -498,6 +498,7 @@ def download_tarball(spec):
# stage the tarball into standard place
stage = Stage(url, name="build_cache", keep=True)
stage.create()
try:
stage.fetch()
return stage.save_filename
@@ -602,15 +603,11 @@ def is_backup_file(file):
if not is_backup_file(text_name):
text_names.append(text_name)
# If we are installing back to the same location don't replace anything
# If we are not installing back to the same install tree do the relocation
if old_layout_root != new_layout_root:
paths_to_relocate = [old_spack_prefix, old_layout_root]
paths_to_relocate.extend(prefix_to_hash.keys())
files_to_relocate = list(filter(
lambda pathname: not relocate.file_is_relocatable(
pathname, paths_to_relocate=paths_to_relocate),
map(lambda filename: os.path.join(workdir, filename),
buildinfo['relocate_binaries'])))
files_to_relocate = [os.path.join(workdir, filename)
for filename in buildinfo.get('relocate_binaries')
]
# If the buildcache was not created with relativized rpaths
# do the relocation of path in binaries
if (spec.architecture.platform == 'darwin' or
@@ -646,6 +643,13 @@ def is_backup_file(file):
new_spack_prefix,
prefix_to_prefix)
paths_to_relocate = [old_prefix, old_layout_root]
paths_to_relocate.extend(prefix_to_hash.keys())
files_to_relocate = list(filter(
lambda pathname: not relocate.file_is_relocatable(
pathname, paths_to_relocate=paths_to_relocate),
map(lambda filename: os.path.join(workdir, filename),
buildinfo['relocate_binaries'])))
# relocate the install prefixes in binary files including dependencies
relocate.relocate_text_bin(files_to_relocate,
old_prefix, new_prefix,
@@ -653,6 +657,17 @@ def is_backup_file(file):
new_spack_prefix,
prefix_to_prefix)
# If we are installing back to the same location
# relocate the sbang location if the spack directory changed
else:
if old_spack_prefix != new_spack_prefix:
relocate.relocate_text(text_names,
old_layout_root, new_layout_root,
old_prefix, new_prefix,
old_spack_prefix,
new_spack_prefix,
prefix_to_prefix)
def extract_tarball(spec, filename, allow_root=False, unsigned=False,
force=False):

View File

@@ -174,6 +174,14 @@ def clean_environment():
for v in build_system_vars:
env.unset(v)
# Unset mpi environment vars. These flags should only be set by
# mpi providers for packages with mpi dependencies
mpi_vars = [
'MPICC', 'MPICXX', 'MPIFC', 'MPIF77', 'MPIF90'
]
for v in mpi_vars:
env.unset(v)
build_lang = spack.config.get('config:build_language')
if build_lang:
# Override language-related variables. This can be used to force

View File

@@ -12,8 +12,9 @@
class CudaPackage(PackageBase):
"""Auxiliary class which contains CUDA variant, dependencies and conflicts
and is meant to unify and facilitate its usage.
Maintainers: ax3l, svenevs
"""
maintainers = ['ax3l', 'svenevs']
# https://docs.nvidia.com/cuda/cuda-compiler-driver-nvcc/index.html#gpu-feature-list
# https://developer.nvidia.com/cuda-gpus
@@ -25,6 +26,7 @@ class CudaPackage(PackageBase):
'50', '52', '53',
'60', '61', '62',
'70', '72', '75',
'80',
]
# FIXME: keep cuda and cuda_arch separate to make usage easier until
@@ -48,6 +50,7 @@ def cuda_flags(arch_list):
# CUDA version vs Architecture
# https://en.wikipedia.org/wiki/CUDA#GPUs_supported
# https://docs.nvidia.com/cuda/cuda-toolkit-release-notes/index.html#deprecated-features
depends_on('cuda@:6.0', when='cuda_arch=10')
depends_on('cuda@:6.5', when='cuda_arch=11')
depends_on('cuda@2.1:6.5', when='cuda_arch=12')
@@ -58,8 +61,8 @@ def cuda_flags(arch_list):
depends_on('cuda@5.0:10.2', when='cuda_arch=30')
depends_on('cuda@5.0:10.2', when='cuda_arch=32')
depends_on('cuda@5.0:10.2', when='cuda_arch=35')
depends_on('cuda@6.5:10.2', when='cuda_arch=37')
depends_on('cuda@5.0:', when='cuda_arch=35')
depends_on('cuda@6.5:', when='cuda_arch=37')
depends_on('cuda@6.0:', when='cuda_arch=50')
depends_on('cuda@6.5:', when='cuda_arch=52')
@@ -73,6 +76,8 @@ def cuda_flags(arch_list):
depends_on('cuda@9.0:', when='cuda_arch=72')
depends_on('cuda@10.0:', when='cuda_arch=75')
depends_on('cuda@11.0:', when='cuda_arch=80')
# There are at least three cases to be aware of for compiler conflicts
# 1. Linux x86_64
# 2. Linux ppc64le
@@ -88,12 +93,15 @@ def cuda_flags(arch_list):
conflicts('%gcc@7:', when='+cuda ^cuda@:9.1' + arch_platform)
conflicts('%gcc@8:', when='+cuda ^cuda@:10.0.130' + arch_platform)
conflicts('%gcc@9:', when='+cuda ^cuda@:10.2.89' + arch_platform)
conflicts('%gcc@:4,10:', when='+cuda ^cuda@:11.0.2' + arch_platform)
conflicts('%pgi@:14.8', when='+cuda ^cuda@:7.0.27' + arch_platform)
conflicts('%pgi@:15.3,15.5:', when='+cuda ^cuda@7.5' + arch_platform)
conflicts('%pgi@:16.2,16.0:16.3', when='+cuda ^cuda@8' + arch_platform)
conflicts('%pgi@:15,18:', when='+cuda ^cuda@9.0:9.1' + arch_platform)
conflicts('%pgi@:16', when='+cuda ^cuda@9.2.88:10' + arch_platform)
conflicts('%pgi@:17', when='+cuda ^cuda@10.2.89' + arch_platform)
conflicts('%pgi@:16,19:', when='+cuda ^cuda@9.2.88:10' + arch_platform)
conflicts('%pgi@:17,20:',
when='+cuda ^cuda@10.1.105:10.2.89' + arch_platform)
conflicts('%pgi@:17,20.2:', when='+cuda ^cuda@11.0.2' + arch_platform)
conflicts('%clang@:3.4', when='+cuda ^cuda@:7.5' + arch_platform)
conflicts('%clang@:3.7,4:',
when='+cuda ^cuda@8.0:9.0' + arch_platform)
@@ -104,7 +112,8 @@ def cuda_flags(arch_list):
conflicts('%clang@:3.7,7.1:', when='+cuda ^cuda@10.1.105' + arch_platform)
conflicts('%clang@:3.7,8.1:',
when='+cuda ^cuda@10.1.105:10.1.243' + arch_platform)
conflicts('%clang@:3.2,9.0:', when='+cuda ^cuda@10.2.89' + arch_platform)
conflicts('%clang@:3.2,9:', when='+cuda ^cuda@10.2.89' + arch_platform)
conflicts('%clang@:5,10:', when='+cuda ^cuda@11.0.2' + arch_platform)
# x86_64 vs. ppc64le differ according to NVidia docs
# Linux ppc64le compiler conflicts from Table from the docs below:
@@ -119,6 +128,8 @@ def cuda_flags(arch_list):
conflicts('%gcc@6:', when='+cuda ^cuda@:9' + arch_platform)
conflicts('%gcc@8:', when='+cuda ^cuda@:10.0.130' + arch_platform)
conflicts('%gcc@9:', when='+cuda ^cuda@:10.1.243' + arch_platform)
# officially, CUDA 11.0.2 only supports the system GCC 8.3 on ppc64le
conflicts('%gcc@:4,10:', when='+cuda ^cuda@:11.0.2' + arch_platform)
conflicts('%pgi', when='+cuda ^cuda@:8' + arch_platform)
conflicts('%pgi@:16', when='+cuda ^cuda@:9.1.185' + arch_platform)
conflicts('%pgi@:17', when='+cuda ^cuda@:10' + arch_platform)
@@ -128,6 +139,7 @@ def cuda_flags(arch_list):
conflicts('%clang@7:', when='+cuda ^cuda@10.0.130' + arch_platform)
conflicts('%clang@7.1:', when='+cuda ^cuda@:10.1.105' + arch_platform)
conflicts('%clang@8.1:', when='+cuda ^cuda@:10.2.89' + arch_platform)
conflicts('%clang@:5,10.0:', when='+cuda ^cuda@11.0.2' + arch_platform)
# Intel is mostly relevant for x86_64 Linux, even though it also
# exists for Mac OS X. No information prior to CUDA 3.2 or Intel 11.1
@@ -141,11 +153,13 @@ def cuda_flags(arch_list):
conflicts('%intel@17.0:', when='+cuda ^cuda@:8.0.60')
conflicts('%intel@18.0:', when='+cuda ^cuda@:9.9')
conflicts('%intel@19.0:', when='+cuda ^cuda@:10.0')
conflicts('%intel@19.1:', when='+cuda ^cuda@:10.1')
conflicts('%intel@19.2:', when='+cuda ^cuda@:11.0.2')
# XL is mostly relevant for ppc64le Linux
conflicts('%xl@:12,14:', when='+cuda ^cuda@:9.1')
conflicts('%xl@:12,14:15,17:', when='+cuda ^cuda@9.2')
conflicts('%xl@17:', when='+cuda ^cuda@:10.2.89')
conflicts('%xl@:12,17:', when='+cuda ^cuda@:11.0.2')
# Mac OS X
# platform = ' platform=darwin'
@@ -156,7 +170,7 @@ def cuda_flags(arch_list):
# `clang-apple@x.y.z as a possible fix.
# Compiler conflicts will be eventual taken from here:
# https://docs.nvidia.com/cuda/cuda-installation-guide-mac-os-x/index.html#abstract
conflicts('platform=darwin', when='+cuda ^cuda@11.0:')
conflicts('platform=darwin', when='+cuda ^cuda@11.0.2:')
# Make sure cuda_arch can not be used without +cuda
for value in cuda_arch_values:

View File

@@ -493,7 +493,7 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
after_script = None
if custom_spack_repo:
if not custom_spack_ref:
custom_spack_ref = 'master'
custom_spack_ref = 'develop'
before_script = [
('git clone "{0}"'.format(custom_spack_repo)),
'pushd ./spack && git checkout "{0}" && popd'.format(

View File

@@ -65,7 +65,7 @@ def checksum(parser, args):
version_lines = spack.stage.get_checksums_for_versions(
url_dict, pkg.name, keep_stage=args.keep_stage,
batch=(args.batch or len(args.versions) > 0),
batch=(args.batch or len(args.versions) > 0 or len(url_dict) == 1),
fetch_options=pkg.fetch_options)
print()

View File

@@ -445,6 +445,9 @@ def setup_parser(subparser):
subparser.add_argument(
'--skip-editor', action='store_true',
help="skip the edit session for the package (e.g., automation)")
subparser.add_argument(
'-b', '--batch', action='store_true',
help="don't ask which versions to checksum")
class BuildSystemGuesser:
@@ -511,7 +514,7 @@ def __call__(self, stage, url):
# Determine the build system based on the files contained
# in the archive.
for pattern, bs in clues:
if any(re.search(pattern, l) for l in lines):
if any(re.search(pattern, line) for line in lines):
self.build_system = bs
break
@@ -629,7 +632,8 @@ def get_versions(args, name):
versions = spack.stage.get_checksums_for_versions(
url_dict, name, first_stage_function=guesser,
keep_stage=args.keep_stage, batch=True)
keep_stage=args.keep_stage,
batch=(args.batch or len(url_dict) == 1))
else:
versions = unhashed_versions

View File

@@ -22,6 +22,7 @@
import sys
import textwrap
import time
import traceback
from six import StringIO
from six import string_types
from six import with_metaclass
@@ -1744,7 +1745,23 @@ def uninstall_by_spec(spec, force=False, deprecator=None):
with spack.store.db.prefix_write_lock(spec):
if pkg is not None:
spack.hooks.pre_uninstall(spec)
try:
spack.hooks.pre_uninstall(spec)
except Exception as error:
if force:
error_msg = (
"One or more pre_uninstall hooks have failed"
" for {0}, but Spack is continuing with the"
" uninstall".format(str(spec)))
if isinstance(error, spack.error.SpackError):
error_msg += (
"\n\nError message: {0}".format(str(error)))
tty.warn(error_msg)
# Note that if the uninstall succeeds then we won't be
# seeing this error again and won't have another chance
# to run the hook.
else:
raise
# Uninstalling in Spack only requires removing the prefix.
if not spec.external:
@@ -1765,7 +1782,20 @@ def uninstall_by_spec(spec, force=False, deprecator=None):
spack.store.db.remove(spec)
if pkg is not None:
spack.hooks.post_uninstall(spec)
try:
spack.hooks.post_uninstall(spec)
except Exception:
# If there is a failure here, this is our only chance to do
# something about it: at this point the Spec has been removed
# from the DB and prefix, so the post-uninstallation hooks
# will not have another chance to run.
error_msg = (
"One or more post-uninstallation hooks failed for"
" {0}, but the prefix has been removed (if it is not"
" external).".format(str(spec)))
tb_msg = traceback.format_exc()
error_msg += "\n\nThe error:\n\n{0}".format(tb_msg)
tty.warn(error_msg)
tty.msg("Successfully uninstalled %s" % spec.short_spec)

View File

@@ -804,15 +804,17 @@ def relocate_text(
where they should be relocated
"""
# TODO: reduce the number of arguments (8 seems too much)
sbang_regex = r'#!/bin/bash {0}/bin/sbang'.format(orig_spack)
new_sbang = r'#!/bin/bash {0}/bin/sbang'.format(new_spack)
orig_sbang = '#!/bin/bash {0}/bin/sbang'.format(orig_spack)
new_sbang = '#!/bin/bash {0}/bin/sbang'.format(new_spack)
for file in files:
_replace_prefix_text(file, orig_install_prefix, new_install_prefix)
for orig_dep_prefix, new_dep_prefix in new_prefixes.items():
_replace_prefix_text(file, orig_dep_prefix, new_dep_prefix)
_replace_prefix_text(file, orig_layout_root, new_layout_root)
_replace_prefix_text(file, sbang_regex, new_sbang)
# relocate the sbang location only if the spack directory changed
if orig_spack != new_spack:
_replace_prefix_text(file, orig_sbang, new_sbang)
def relocate_text_bin(

View File

@@ -1143,8 +1143,6 @@ def read():
assert vals['read'] == 1
@pytest.mark.skipif('macos' in os.environ.get('GITHUB_WORKFLOW', ''),
reason="Skip failing test for GA on MacOS")
def test_lock_debug_output(lock_path):
host = socket.getfqdn()

View File

@@ -111,7 +111,7 @@ def test_log_subproc_and_echo_output_capfd(capfd, tmpdir):
# Tests below use a pseudoterminal to test llnl.util.tty.log
#
def simple_logger(**kwargs):
"""Mock logger (child) process for testing log.keyboard_input."""
"""Mock logger (minion) process for testing log.keyboard_input."""
def handler(signum, frame):
running[0] = False
signal.signal(signal.SIGUSR1, handler)
@@ -125,7 +125,7 @@ def handler(signum, frame):
def mock_shell_fg(proc, ctl, **kwargs):
"""PseudoShell master function for test_foreground_background."""
"""PseudoShell controller function for test_foreground_background."""
ctl.fg()
ctl.status()
ctl.wait_enabled()
@@ -134,7 +134,7 @@ def mock_shell_fg(proc, ctl, **kwargs):
def mock_shell_fg_no_termios(proc, ctl, **kwargs):
"""PseudoShell master function for test_foreground_background."""
"""PseudoShell controller function for test_foreground_background."""
ctl.fg()
ctl.status()
ctl.wait_disabled_fg()
@@ -143,7 +143,7 @@ def mock_shell_fg_no_termios(proc, ctl, **kwargs):
def mock_shell_bg(proc, ctl, **kwargs):
"""PseudoShell master function for test_foreground_background."""
"""PseudoShell controller function for test_foreground_background."""
ctl.bg()
ctl.status()
ctl.wait_disabled()
@@ -152,7 +152,7 @@ def mock_shell_bg(proc, ctl, **kwargs):
def mock_shell_tstp_cont(proc, ctl, **kwargs):
"""PseudoShell master function for test_foreground_background."""
"""PseudoShell controller function for test_foreground_background."""
ctl.tstp()
ctl.wait_stopped()
@@ -163,7 +163,7 @@ def mock_shell_tstp_cont(proc, ctl, **kwargs):
def mock_shell_tstp_tstp_cont(proc, ctl, **kwargs):
"""PseudoShell master function for test_foreground_background."""
"""PseudoShell controller function for test_foreground_background."""
ctl.tstp()
ctl.wait_stopped()
@@ -177,7 +177,7 @@ def mock_shell_tstp_tstp_cont(proc, ctl, **kwargs):
def mock_shell_tstp_tstp_cont_cont(proc, ctl, **kwargs):
"""PseudoShell master function for test_foreground_background."""
"""PseudoShell controller function for test_foreground_background."""
ctl.tstp()
ctl.wait_stopped()
@@ -194,7 +194,7 @@ def mock_shell_tstp_tstp_cont_cont(proc, ctl, **kwargs):
def mock_shell_bg_fg(proc, ctl, **kwargs):
"""PseudoShell master function for test_foreground_background."""
"""PseudoShell controller function for test_foreground_background."""
ctl.bg()
ctl.status()
ctl.wait_disabled()
@@ -207,7 +207,7 @@ def mock_shell_bg_fg(proc, ctl, **kwargs):
def mock_shell_bg_fg_no_termios(proc, ctl, **kwargs):
"""PseudoShell master function for test_foreground_background."""
"""PseudoShell controller function for test_foreground_background."""
ctl.bg()
ctl.status()
ctl.wait_disabled()
@@ -220,7 +220,7 @@ def mock_shell_bg_fg_no_termios(proc, ctl, **kwargs):
def mock_shell_fg_bg(proc, ctl, **kwargs):
"""PseudoShell master function for test_foreground_background."""
"""PseudoShell controller function for test_foreground_background."""
ctl.fg()
ctl.status()
ctl.wait_enabled()
@@ -233,7 +233,7 @@ def mock_shell_fg_bg(proc, ctl, **kwargs):
def mock_shell_fg_bg_no_termios(proc, ctl, **kwargs):
"""PseudoShell master function for test_foreground_background."""
"""PseudoShell controller function for test_foreground_background."""
ctl.fg()
ctl.status()
ctl.wait_disabled_fg()
@@ -299,7 +299,7 @@ def test_foreground_background(test_fn, termios_on_or_off, tmpdir):
def synchronized_logger(**kwargs):
"""Mock logger (child) process for testing log.keyboard_input.
"""Mock logger (minion) process for testing log.keyboard_input.
This logger synchronizes with the parent process to test that 'v' can
toggle output. It is used in ``test_foreground_background_output`` below.
@@ -330,7 +330,7 @@ def handler(signum, frame):
def mock_shell_v_v(proc, ctl, **kwargs):
"""PseudoShell master function for test_foreground_background_output."""
"""Controller function for test_foreground_background_output."""
write_lock = kwargs["write_lock"]
v_lock = kwargs["v_lock"]
@@ -357,7 +357,7 @@ def mock_shell_v_v(proc, ctl, **kwargs):
def mock_shell_v_v_no_termios(proc, ctl, **kwargs):
"""PseudoShell master function for test_foreground_background_output."""
"""Controller function for test_foreground_background_output."""
write_lock = kwargs["write_lock"]
v_lock = kwargs["v_lock"]
@@ -395,9 +395,9 @@ def test_foreground_background_output(
shell = PseudoShell(test_fn, synchronized_logger)
log_path = str(tmpdir.join("log.txt"))
# Locks for synchronizing with child
write_lock = multiprocessing.Lock() # must be held by child to write
v_lock = multiprocessing.Lock() # held while master is in v mode
# Locks for synchronizing with minion
write_lock = multiprocessing.Lock() # must be held by minion to write
v_lock = multiprocessing.Lock() # held while controller is in v mode
with termios_on_or_off():
shell.start(
@@ -423,16 +423,16 @@ def test_foreground_background_output(
with open(log_path) as log:
log = log.read().strip().split("\n")
# Master and child process coordinate with locks such that the child
# Controller and minion process coordinate with locks such that the minion
# writes "off" when echo is off, and "on" when echo is on. The
# output should contain mostly "on" lines, but may contain an "off"
# or two. This is because the master toggles echo by sending "v" on
# stdin to the child, but this is not synchronized with our locks.
# or two. This is because the controller toggles echo by sending "v" on
# stdin to the minion, but this is not synchronized with our locks.
# It's good enough for a test, though. We allow at most 2 "off"'s in
# the output to account for the race.
assert (
['forced output', 'on'] == uniq(output) or
output.count("off") <= 2 # if master_fd is a bit slow
output.count("off") <= 2 # if controller_fd is a bit slow
)
# log should be off for a while, then on, then off

View File

@@ -14,7 +14,7 @@
NOTAR_EXTS = ["zip", "tgz", "tbz2", "txz"]
# Add PRE_EXTS and EXTS last so that .tar.gz is matched *before* .tar or .gz
ALLOWED_ARCHIVE_TYPES = [".".join(l) for l in product(
ALLOWED_ARCHIVE_TYPES = [".".join(ext) for ext in product(
PRE_EXTS, EXTS)] + PRE_EXTS + EXTS + NOTAR_EXTS
@@ -36,7 +36,7 @@ def decompressor_for(path, extension=None):
bunzip2 = which('bunzip2', required=True)
return bunzip2
tar = which('tar', required=True)
tar.add_default_arg('-xf')
tar.add_default_arg('-oxf')
return tar

View File

@@ -133,7 +133,7 @@ def __init__(self, hexdigest, **kwargs):
@property
def hash_name(self):
"""Get the name of the hash function this Checker is using."""
return self.hash_fun().name
return self.hash_fun().name.lower()
def check(self, filename):
"""Read the file with the specified name and check its checksum

View File

@@ -0,0 +1,22 @@
#!/bin/sh
#
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
#
# Description:
# Install patchelf for use in buildcache unit tests
#
# Usage:
# install-patchelf.sh
#
set -ex
if [ "$TRAVIS_OS_NAME" = "linux" ]; then
olddir=$PWD
cd /tmp
wget https://github.com/NixOS/patchelf/archive/0.10.tar.gz
tar -xvf 0.10.tar.gz
cd patchelf-0.10 && ./bootstrap.sh && ./configure --prefix=/usr && make && sudo make install && cd $olddir
fi

View File

@@ -18,7 +18,7 @@
ORIGINAL_PATH="$PATH"
. "$(dirname $0)/setup.sh"
check_dependencies $coverage git hg svn
check_dependencies $coverage kcov git hg svn
# Clean the environment by removing Spack from the path and getting rid of
# the spack shell function

View File

@@ -37,11 +37,7 @@ bin/spack -h
bin/spack help -a
# Profile and print top 20 lines for a simple call to spack spec
if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then
spack -p --lines 20 spec openmpi
else
spack -p --lines 20 spec mpileaks%gcc ^elfutils@0.170
fi
spack -p --lines 20 spec mpileaks%gcc ^elfutils@0.170
#-----------------------------------------------------------
# Run unit tests with code coverage

View File

@@ -26,14 +26,11 @@ if [[ "$COVERAGE" == "true" ]]; then
coverage=coverage
coverage_run="coverage run"
# bash coverage depends on some other factors -- there are issues with
# kcov for Python 2.6, unit tests, and build tests.
if [[ $TRAVIS_PYTHON_VERSION != 2.6 ]]; then
mkdir -p coverage
cc_script="$SPACK_ROOT/lib/spack/env/cc"
bashcov=$(realpath ${QA_DIR}/bashcov)
sed -i~ "s@#\!/bin/bash@#\!${bashcov}@" "$cc_script"
fi
# bash coverage depends on some other factors
mkdir -p coverage
cc_script="$SPACK_ROOT/lib/spack/env/cc"
bashcov=$(realpath ${QA_DIR}/bashcov)
sed -i~ "s@#\!/bin/bash@#\!${bashcov}@" "$cc_script"
fi
#
@@ -74,6 +71,9 @@ check_dependencies() {
spack_package=mercurial
pip_package=mercurial
;;
kcov)
spack_package=kcov
;;
svn)
spack_package=subversion
;;

View File

@@ -639,7 +639,7 @@ _spack_containerize() {
_spack_create() {
if $list_options
then
SPACK_COMPREPLY="-h --help --keep-stage -n --name -t --template -r --repo -N --namespace -f --force --skip-editor"
SPACK_COMPREPLY="-h --help --keep-stage -n --name -t --template -r --repo -N --namespace -f --force --skip-editor -b --batch"
else
SPACK_COMPREPLY=""
fi

View File

@@ -7,7 +7,7 @@ RUN mkdir {{ paths.environment }} \
{{ manifest }} > {{ paths.environment }}/spack.yaml
# Install the software, remove unecessary deps
RUN cd {{ paths.environment }} && spack env activate . && spack install && spack gc -y
RUN cd {{ paths.environment }} && spack env activate . && spack install --fail-fast && spack gc -y
{% if strip %}
# Strip all the binaries

View File

@@ -12,7 +12,7 @@ EOF
# Install all the required software
. /opt/spack/share/spack/setup-env.sh
spack env activate .
spack install
spack install --fail-fast
spack gc -y
spack env deactivate
spack env activate --sh -d . >> {{ paths.environment }}/environment_modifications.sh

View File

@@ -0,0 +1,155 @@
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
import os
class Corge(Package):
"""A toy package to test dependencies"""
homepage = "https://www.example.com"
url = "https://github.com/gartung/corge/archive/v3.0.0.tar.gz"
version('3.0.0',
sha256='5058861c3b887511387c725971984cec665a8307d660158915a04d7786fed6bc')
depends_on('quux')
def install(self, spec, prefix):
corge_cc = '''#include <iostream>
#include <stdexcept>
#include "corge.h"
#include "corge_version.h"
#include "quux/quux.h"
const int Corge::version_major = corge_version_major;
const int Corge::version_minor = corge_version_minor;
Corge::Corge()
{
}
int
Corge::get_version() const
{
return 10 * version_major + version_minor;
}
int
Corge::corgegate() const
{
int corge_version = get_version();
std::cout << "Corge::corgegate version " << corge_version
<< " invoked" << std::endl;
std::cout << "Corge config directory = %s" <<std::endl;
Quux quux;
int quux_version = quux.quuxify();
if(quux_version != corge_version) {
throw std::runtime_error(
"Corge found an incompatible version of Garply.");
}
return corge_version;
}
'''
corge_h = '''#ifndef CORGE_H_
class Corge
{
private:
static const int version_major;
static const int version_minor;
public:
Corge();
int get_version() const;
int corgegate() const;
};
#endif // CORGE_H_
'''
corge_version_h = '''
const int corge_version_major = %s;
const int corge_version_minor = %s;
'''
corgegator_cc = '''
#include <iostream>
#include "corge.h"
int
main(int argc, char* argv[])
{
std::cout << "corgerator called with ";
if (argc == 0) {
std::cout << "no command-line arguments" << std::endl;
} else {
std::cout << "command-line arguments:";
for (int i = 0; i < argc; ++i) {
std::cout << " \"" << argv[i] << "\"";
}
std::cout << std::endl;
}
std::cout << "corgegating.."<<std::endl;
Corge corge;
corge.corgegate();
std::cout << "done."<<std::endl;
return 0;
}
'''
mkdirp(prefix.lib64)
mkdirp('%s/corge' % prefix.include)
mkdirp('%s/corge' % self.stage.source_path)
with open('%s/corge_version.h' % self.stage.source_path, 'w') as f:
f.write(corge_version_h % (self.version[0], self.version[1:]))
with open('%s/corge/corge.cc' % self.stage.source_path, 'w') as f:
f.write(corge_cc % prefix.config)
with open('%s/corge/corge.h' % self.stage.source_path, 'w') as f:
f.write(corge_h)
with open('%s/corge/corgegator.cc' % self.stage.source_path, 'w') as f:
f.write(corgegator_cc)
gpp = which('/usr/bin/g++')
gpp('-Dcorge_EXPORTS',
'-I%s' % self.stage.source_path,
'-I%s' % spec['quux'].prefix.include,
'-I%s' % spec['garply'].prefix.include,
'-O2', '-g', '-DNDEBUG', '-fPIC',
'-o', 'corge.cc.o',
'-c', 'corge/corge.cc')
gpp('-Dcorge_EXPORTS',
'-I%s' % self.stage.source_path,
'-I%s' % spec['quux'].prefix.include,
'-I%s' % spec['garply'].prefix.include,
'-O2', '-g', '-DNDEBUG', '-fPIC',
'-o', 'corgegator.cc.o',
'-c', 'corge/corgegator.cc')
gpp('-fPIC', '-O2', '-g', '-DNDEBUG', '-shared',
'-Wl,-soname,libcorge.so', '-o', 'libcorge.so', 'corge.cc.o',
'-Wl,-rpath,%s:%s::::' %
(spec['quux'].prefix.lib64, spec['garply'].prefix.lib64),
'%s/libquux.so' % spec['quux'].prefix.lib64,
'%s/libgarply.so' % spec['garply'].prefix.lib64)
gpp('-O2', '-g', '-DNDEBUG', '-rdynamic',
'corgegator.cc.o', '-o', 'corgegator',
'-Wl,-rpath,%s:%s:%s:::' % (prefix.lib64,
spec['quux'].prefix.lib64,
spec['garply'].prefix.lib64),
'libcorge.so',
'%s/libquux.so' % spec['quux'].prefix.lib64,
'%s/libgarply.so' % spec['garply'].prefix.lib64)
copy('corgegator', '%s/corgegator' % prefix.lib64)
copy('libcorge.so', '%s/libcorge.so' % prefix.lib64)
copy('%s/corge/corge.h' % self.stage.source_path,
'%s/corge/corge.h' % prefix.include)
mkdirp(prefix.bin)
copy('corge_version.h', '%s/corge_version.h' % prefix.bin)
os.symlink('%s/corgegator' % prefix.lib64,
'%s/corgegator' % prefix.bin)
os.symlink('%s/quuxifier' % spec['quux'].prefix.lib64,
'%s/quuxifier' % prefix.bin)
os.symlink('%s/garplinator' % spec['garply'].prefix.lib64,
'%s/garplinator' % prefix.bin)

View File

@@ -0,0 +1,112 @@
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
import os
class Garply(Package):
"""Toy package for testing dependencies"""
homepage = "https://www.example.com"
url = "https://github.com/gartung/garply/archive/v3.0.0.tar.gz"
version('3.0.0',
sha256='534ac8ba7a6fed7e8bbb543bd43ca04999e65337445a531bd296939f5ac2f33d')
def install(self, spec, prefix):
garply_h = '''#ifndef GARPLY_H_
class Garply
{
private:
static const int version_major;
static const int version_minor;
public:
Garply();
int get_version() const;
int garplinate() const;
};
#endif // GARPLY_H_
'''
garply_cc = '''#include "garply.h"
#include "garply_version.h"
#include <iostream>
const int Garply::version_major = garply_version_major;
const int Garply::version_minor = garply_version_minor;
Garply::Garply() {}
int
Garply::get_version() const
{
return 10 * version_major + version_minor;
}
int
Garply::garplinate() const
{
std::cout << "Garply::garplinate version " << get_version()
<< " invoked" << std::endl;
std::cout << "Garply config dir = %s" << std::endl;
return get_version();
}
'''
garplinator_cc = '''#include "garply.h"
#include <iostream>
int
main()
{
Garply garply;
garply.garplinate();
return 0;
}
'''
garply_version_h = '''const int garply_version_major = %s;
const int garply_version_minor = %s;
'''
mkdirp(prefix.lib64)
mkdirp('%s/garply' % prefix.include)
mkdirp('%s/garply' % self.stage.source_path)
with open('%s/garply_version.h' % self.stage.source_path, 'w') as f:
f.write(garply_version_h % (self.version[0], self.version[1:]))
with open('%s/garply/garply.h' % self.stage.source_path, 'w') as f:
f.write(garply_h)
with open('%s/garply/garply.cc' % self.stage.source_path, 'w') as f:
f.write(garply_cc % prefix.config)
with open('%s/garply/garplinator.cc' %
self.stage.source_path, 'w') as f:
f.write(garplinator_cc)
gpp = which('/usr/bin/g++')
gpp('-Dgarply_EXPORTS',
'-I%s' % self.stage.source_path,
'-O2', '-g', '-DNDEBUG', '-fPIC',
'-o', 'garply.cc.o',
'-c', '%s/garply/garply.cc' % self.stage.source_path)
gpp('-Dgarply_EXPORTS',
'-I%s' % self.stage.source_path,
'-O2', '-g', '-DNDEBUG', '-fPIC',
'-o', 'garplinator.cc.o',
'-c', '%s/garply/garplinator.cc' % self.stage.source_path)
gpp('-fPIC', '-O2', '-g', '-DNDEBUG', '-shared',
'-Wl,-soname,libgarply.so', '-o', 'libgarply.so', 'garply.cc.o')
gpp('-O2', '-g', '-DNDEBUG', '-rdynamic',
'garplinator.cc.o', '-o', 'garplinator',
'-Wl,-rpath,%s' % prefix.lib64,
'libgarply.so')
copy('libgarply.so', '%s/libgarply.so' % prefix.lib64)
copy('garplinator', '%s/garplinator' % prefix.lib64)
copy('%s/garply/garply.h' % self.stage.source_path,
'%s/garply/garply.h' % prefix.include)
mkdirp(prefix.bin)
copy('garply_version.h', '%s/garply_version.h' % prefix.bin)
os.symlink('%s/garplinator' % prefix.lib64,
'%s/garplinator' % prefix.bin)

View File

@@ -7,16 +7,17 @@
class Patchelf(AutotoolsPackage):
"""
PatchELF is a small utility to modify the
dynamic linker and RPATH of ELF executables.
"""
"""PatchELF is a small utility to modify the dynamic linker and RPATH of
ELF executables."""
homepage = "https://nixos.org/patchelf.html"
url = "http://nixos.org/releases/patchelf/patchelf-0.8/patchelf-0.8.tar.gz"
list_url = "http://nixos.org/releases/patchelf/"
url = "https://nixos.org/releases/patchelf/patchelf-0.10/patchelf-0.10.tar.gz"
list_url = "https://nixos.org/releases/patchelf/"
list_depth = 1
version('0.9', '3c265508526760f233620f35d79c79fc')
version('0.8', '407b229e6a681ffb0e2cdd5915cb2d01')
version('0.10', sha256='b2deabce05c34ce98558c0efb965f209de592197b2c88e930298d740ead09019')
version('0.9', sha256='f2aa40a6148cb3b0ca807a1bf836b081793e55ec9e5540a5356d800132be7e0a')
version('0.8', sha256='14af06a2da688d577d64ff8dac065bb8903bbffbe01d30c62df7af9bf4ce72fe')
def install(self, spec, prefix):
install_tree(self.stage.source_path, prefix)

View File

@@ -0,0 +1,132 @@
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
import os
class Quux(Package):
"""Toy package for testing dependencies"""
homepage = "https://www.example.com"
url = "https://github.com/gartung/quux/archive/v3.0.0.tar.gz"
version('3.0.0',
sha256='b91bc96fb746495786bddac2c527039177499f2f76d3fa9dcf0b393859e68484')
depends_on('garply')
def install(self, spec, prefix):
quux_cc = '''#include "quux.h"
#include "garply/garply.h"
#include "quux_version.h"
#include <iostream>
#include <stdexcept>
const int Quux::version_major = quux_version_major;
const int Quux::version_minor = quux_version_minor;
Quux::Quux() {}
int
Quux::get_version() const
{
return 10 * version_major + version_minor;
}
int
Quux::quuxify() const
{
int quux_version = get_version();
std::cout << "Quux::quuxify version " << quux_version
<< " invoked" <<std::endl;
std::cout << "Quux config directory is %s" <<std::endl;
Garply garply;
int garply_version = garply.garplinate();
if (garply_version != quux_version) {
throw std::runtime_error(
"Quux found an incompatible version of Garply.");
}
return quux_version;
}
'''
quux_h = '''#ifndef QUUX_H_
class Quux
{
private:
static const int version_major;
static const int version_minor;
public:
Quux();
int get_version() const;
int quuxify() const;
};
#endif // QUUX_H_
'''
quuxifier_cc = '''
#include "quux.h"
#include <iostream>
int
main()
{
Quux quux;
quux.quuxify();
return 0;
}
'''
quux_version_h = '''const int quux_version_major = %s;
const int quux_version_minor = %s;
'''
mkdirp(prefix.lib64)
mkdirp('%s/quux' % prefix.include)
with open('%s/quux_version.h' % self.stage.source_path, 'w') as f:
f.write(quux_version_h % (self.version[0], self.version[1:]))
with open('%s/quux/quux.cc' % self.stage.source_path, 'w') as f:
f.write(quux_cc % (prefix.config))
with open('%s/quux/quux.h' % self.stage.source_path, 'w') as f:
f.write(quux_h)
with open('%s/quux/quuxifier.cc' % self.stage.source_path, 'w') as f:
f.write(quuxifier_cc)
gpp = which('/usr/bin/g++')
gpp('-Dquux_EXPORTS',
'-I%s' % self.stage.source_path,
'-I%s' % spec['garply'].prefix.include,
'-O2', '-g', '-DNDEBUG', '-fPIC',
'-o', 'quux.cc.o',
'-c', 'quux/quux.cc')
gpp('-Dquux_EXPORTS',
'-I%s' % self.stage.source_path,
'-I%s' % spec['garply'].prefix.include,
'-O2', '-g', '-DNDEBUG', '-fPIC',
'-o', 'quuxifier.cc.o',
'-c', 'quux/quuxifier.cc')
gpp('-fPIC', '-O2', '-g', '-DNDEBUG', '-shared',
'-Wl,-soname,libquux.so', '-o', 'libquux.so', 'quux.cc.o',
'-Wl,-rpath,%s:%s::::' % (prefix.lib64,
spec['garply'].prefix.lib64),
'%s/libgarply.so' % spec['garply'].prefix.lib64)
gpp('-O2', '-g', '-DNDEBUG', '-rdynamic',
'quuxifier.cc.o', '-o', 'quuxifier',
'-Wl,-rpath,%s:%s::::' % (prefix.lib64,
spec['garply'].prefix.lib64),
'libquux.so',
'%s/libgarply.so' % spec['garply'].prefix.lib64)
copy('libquux.so', '%s/libquux.so' % prefix.lib64)
copy('quuxifier', '%s/quuxifier' % prefix.lib64)
copy('%s/quux/quux.h' % self.stage.source_path,
'%s/quux/quux.h' % prefix.include)
mkdirp(prefix.bin)
copy('quux_version.h', '%s/quux_version.h' % prefix.bin)
os.symlink('%s/quuxifier' % prefix.lib64, '%s/quuxifier' % prefix.bin)
os.symlink('%s/garplinator' % spec['garply'].prefix.lib64,
'%s/garplinator' % prefix.bin)

View File

@@ -10,10 +10,11 @@ class AbseilCpp(CMakePackage):
"""Abseil Common Libraries (C++) """
homepage = "https://abseil.io/"
url = "https://github.com/abseil/abseil-cpp/archive/20180600.tar.gz"
url = "https://github.com/abseil/abseil-cpp/archive/20200225.2.tar.gz"
maintainers = ['jcftang']
version('20200225.2', sha256='f41868f7a938605c92936230081175d1eae87f6ea2c248f41077c8f88316f111')
version('20200225.1', sha256='0db0d26f43ba6806a8a3338da3e646bb581f0ca5359b3a201d8fb8e4752fd5f8')
version('20190808', sha256='8100085dada279bf3ee00cd064d43b5f55e5d913be0dfe2906f06f8f28d5b37e')
version('20181200', sha256='e2b53bfb685f5d4130b84c4f3050c81bf48c497614dc85d91dbd3ed9129bce6d')

View File

@@ -24,6 +24,10 @@ class Acl(AutotoolsPackage):
depends_on('automake', type='build')
depends_on('libtool', type='build')
depends_on('attr')
depends_on('gettext')
def setup_build_environment(self, env):
env.append_flags('LDFLAGS', '-lintl')
def autoreconf(self, spec, prefix):
bash = which('bash')

View File

@@ -6,7 +6,7 @@
from spack import *
class Acts(CMakePackage):
class Acts(CMakePackage, CudaPackage):
"""
A Common Tracking Software (Acts)
@@ -35,6 +35,9 @@ class Acts(CMakePackage):
# Supported Acts versions
version('master', branch='master')
version('0.25.2', commit='76bf1f3e4be51d4d27126b473a2caa8d8a72b320')
version('0.25.1', commit='6e8a1ea6d2c7385a78e3e190efb2a8a0c1fa957f')
version('0.25.0', commit='0aca171951a214299e8ff573682b1c5ecec63d42')
version('0.24.0', commit='ef4699c8500bfea59a5fe88bed67fde2f00f0adf')
version('0.23.0', commit='dc443dd7e663bc4d7fb3c1e3f1f75aaf57ffd4e4')
version('0.22.1', commit='ca1b8b1645db6b552f44c48d2ff34c8c29618f3a')
@@ -92,8 +95,8 @@ class Acts(CMakePackage):
depends_on('boost @1.62:1.69.99 +program_options +test', when='@:0.10.3')
depends_on('boost @1.69: +filesystem +program_options +test', when='@0.10.4:')
depends_on('cmake @3.11:', type='build')
depends_on('dd4hep @1.10: +xercesc', when='+dd4hep')
depends_on('dd4hep @1.10: +geant4 +xercesc', when='+dd4hep +geant4')
depends_on('dd4hep @1.10:', when='+dd4hep')
depends_on('dd4hep @1.10: +geant4', when='+dd4hep +geant4')
depends_on('eigen @3.2.9:', type='build')
depends_on('geant4', when='+geant4')
depends_on('hepmc3@3.1:', when='+hepmc3')
@@ -141,6 +144,7 @@ def example_cmake_variant(cmake_label, spack_variant):
args = [
cmake_variant("BENCHMARKS", "benchmarks"),
cmake_variant("CUDA_PLUGIN", "cuda"),
cmake_variant("DD4HEP_PLUGIN", "dd4hep"),
cmake_variant("DIGITIZATION_PLUGIN", "digitization"),
cmake_variant("EXAMPLES", "examples"),
@@ -157,6 +161,10 @@ def example_cmake_variant(cmake_label, spack_variant):
cmake_variant("TGEO_PLUGIN", "tgeo")
]
cuda_arch = spec.variants['cuda_arch'].value
if cuda_arch != 'none':
args.append('-DCUDA_FLAGS=-arch=sm_{0}'.format(cuda_arch[0]))
if 'root' in spec:
cxxstd = spec['root'].variants['cxxstd'].value
args.append("-DCMAKE_CXX_STANDARD={0}".format(cxxstd))

View File

@@ -9,28 +9,41 @@
class Amber(Package, CudaPackage):
"""Amber is a suite of biomolecular simulation programs.
"""Amber is a suite of biomolecular simulation programs together
with Amber tools.
Note: A manual download is required for Amber.
Spack will search your current directory for the download file.
Alternatively, add this file to a mirror so that Spack can find it.
Note: The version number is composed of the Amber version (major)
and the tools version (minor). A manual download is required for
both Amber and Amber tools.
Spack will search your current directory for the download files.
Alternatively, add the files to a mirror so that Spack can find them.
For instructions on how to set up a mirror, see
http://spack.readthedocs.io/en/latest/mirrors.html"""
homepage = "http://ambermd.org/"
url = "file://{0}/Amber18.tar.bz2".format(os.getcwd())
url = "file://{0}/Amber18.tar.bz2".format(os.getcwd())
maintainers = ['hseara']
version('18', sha256='2060897c0b11576082d523fb63a51ba701bc7519ff7be3d299d5ec56e8e6e277')
version('16', sha256='3b7ef281fd3c46282a51b6a6deed9ed174a1f6d468002649d84bfc8a2577ae5d')
def url_for_version(self, version):
url = "file://{0}/Amber{1}.tar.bz2".format(
os.getcwd(), version.up_to(1))
return url
version(
'18.20', sha256='2060897c0b11576082d523fb63a51ba701bc7519ff7be3d299d5ec56e8e6e277')
version(
'18.19', sha256='2060897c0b11576082d523fb63a51ba701bc7519ff7be3d299d5ec56e8e6e277')
version(
'16.16', sha256='3b7ef281fd3c46282a51b6a6deed9ed174a1f6d468002649d84bfc8a2577ae5d')
resources = [
# [version amber, version ambertools , sha256sum]
('18', '20', 'b1e1f8f277c54e88abc9f590e788bbb2f7a49bcff5e8d8a6eacfaf332a4890f9'),
('18', '19', '0c86937904854b64e4831e047851f504ec45b42e593db4ded92c1bee5973e699'),
('16', '16', '7b876afe566e9dd7eb6a5aa952a955649044360f15c1f5d4d91ba7f41f3105fa'),
]
for ver, ambertools_ver, checksum in resources:
resource(when='@{0}'.format(ver),
resource(when='@{0}.{1}'.format(ver, ambertools_ver),
name='AmberTools',
url='file://{0}/AmberTools{1}.tar.bz2'.format(os.getcwd(),
ambertools_ver),
@@ -100,10 +113,14 @@ class Amber(Package, CudaPackage):
depends_on('cuda@7.5.18', when='@:16+cuda')
# conflicts
conflicts('+x11', when='platform=cray', msg='x11 amber applications not available for cray')
conflicts('+openmp', when='%clang', msg='OpenMP optimizations not available for the clang compiler')
conflicts('+openmp', when='%apple-clang', msg='OpenMP optimizations not available for the Apple clang compiler')
conflicts('+openmp', when='%pgi', msg='OpenMP optimizations not available for the pgi compiler')
conflicts('+x11', when='platform=cray',
msg='x11 amber applications not available for cray')
conflicts('+openmp', when='%clang',
msg='OpenMP not available for the clang compiler')
conflicts('+openmp', when='%apple-clang',
msg='OpenMP not available for the Apple clang compiler')
conflicts('+openmp', when='%pgi',
msg='OpenMP not available for the pgi compiler')
def setup_build_environment(self, env):
amber_src = self.stage.source_path

View File

@@ -20,4 +20,5 @@ class Amdblis(BlisBase):
url = "https://github.com/amd/blis/archive/2.1.tar.gz"
git = "https://github.com/amd/blis.git"
version('2.2', sha256='e1feb60ac919cf6d233c43c424f6a8a11eab2c62c2c6e3f2652c15ee9063c0c9')
version('2.1', sha256='3b1d611d46f0f13b3c0917e27012e0f789b23dbefdddcf877b20327552d72fb3')

View File

@@ -18,6 +18,7 @@ class Amrex(CMakePackage):
maintainers = ['mic84', 'asalmgren']
version('develop', branch='development')
version('20.07', sha256='c386f566f4c57ee56b5630f79ce2c6117d5a612a4aab69b7b26e48d577251165')
version('20.06', sha256='be2f2a5107111fcb8b3928b76024b370c7cb01a9e5dd79484cf7fcf59d0b4858')
version('20.05', sha256='97d753bb75e845a0a959ec1a044a48e6adb86dd008b5e29ce7a01d49ed276338')
version('20.04', sha256='a7ece54d5d89cc00fd555551902a0d4d0fb50db15d2600f441353eed0dddd83b')
@@ -57,6 +58,12 @@ class Amrex(CMakePackage):
values=('Debug', 'Release'))
variant('sundials', default=False,
description='Build AMReX with SUNDIALS support')
variant('hdf5', default=False,
description='Enable HDF5-based I/O')
variant('hypre', default=False,
description='Enable Hypre interfaces')
variant('petsc', default=False,
description='Enable PETSc interfaces')
# Build dependencies
depends_on('mpi', when='+mpi')
@@ -68,6 +75,24 @@ class Amrex(CMakePackage):
conflicts('%apple-clang')
conflicts('%clang')
# Check options compatibility
conflicts('+sundials', when='~fortran',
msg='AMReX SUNDIALS support needs AMReX Fortran API (+fortran)')
conflicts('+hdf5', when='@:20.06',
msg='AMReX HDF5 support needs AMReX newer than version 20.06')
conflicts('+hypre', when='@:20.06',
msg='AMReX Hypre support needs AMReX newer than version 20.06')
conflicts('+hypre', when='~fortran',
msg='AMReX Hypre support needs AMReX Fortran API (+fortran)')
conflicts('+hypre', when='~linear_solvers',
msg='AMReX Hypre support needs variant +linear_solvers')
conflicts('+petsc', when='@:20.06',
msg='AMReX PETSc support needs AMReX newer than version 20.06')
conflicts('+petsc', when='~fortran',
msg='AMReX PETSc support needs AMReX Fortran API (+fortran)')
conflicts('+petsc', when='~linear_solvers',
msg='AMReX PETSc support needs variant +linear_solvers')
def url_for_version(self, version):
if version >= Version('20.05'):
url = "https://github.com/AMReX-Codes/amrex/releases/download/{0}/amrex-{0}.tar.gz"
@@ -89,11 +114,16 @@ def cmake_args(self):
self.spec.variants['precision'].value.upper(),
'-DENABLE_EB:BOOL=%s' % self.cmake_is_on('+eb'),
'-DXSDK_ENABLE_Fortran:BOOL=%s' % self.cmake_is_on('+fortran'),
'-DENABLE_FORTRAN_INTERFACES:BOOL=%s'
% self.cmake_is_on('+fortran'),
'-DENABLE_LINEAR_SOLVERS:BOOL=%s' %
self.cmake_is_on('+linear_solvers'),
'-DENABLE_AMRDATA:BOOL=%s' % self.cmake_is_on('+amrdata'),
'-DENABLE_PARTICLES:BOOL=%s' % self.cmake_is_on('+particles'),
'-DENABLE_SUNDIALS:BOOL=%s' % self.cmake_is_on('+sundials')
'-DENABLE_SUNDIALS:BOOL=%s' % self.cmake_is_on('+sundials'),
'-DENABLE_HDF5:BOOL=%s' % self.cmake_is_on('+hdf5'),
'-DENABLE_HYPRE:BOOL=%s' % self.cmake_is_on('+hypre'),
'-DENABLE_PETSC:BOOL=%s' % self.cmake_is_on('+petsc'),
]
if self.spec.satisfies('%fj'):
args.append('-DCMAKE_Fortran_MODDIR_FLAG=-M')

View File

@@ -7,7 +7,7 @@
class Aspect(CMakePackage):
"""Parallel, extendible finite element code to simulate convection in the
"""Parallel and extensible Finite Element code to simulate convection in the
Earth's mantle and elsewhere."""
homepage = "https://aspect.geodynamics.org"
@@ -17,6 +17,7 @@ class Aspect(CMakePackage):
maintainers = ['tjhei']
version('develop', branch='master')
version('2.2.0', sha256='6dc31c4b991c8a96495ba0e9a3c92e57f9305ba94b8dbed3c8c5cfbab91ec5c1')
version('2.1.0', sha256='bd574d60ed9df1f4b98e68cd526a074d0527c0792763187c9851912327d861a3')
version('2.0.1', sha256='0bf5600c42afce9d39c1d285b0654ecfdeb0f30e9f3421651c95f54ca01ac165')
version('2.0.0', sha256='d485c07f54248e824bdfa35f3eec8971b65e8b7114552ffa2c771bc0dede8cc0')
@@ -26,9 +27,11 @@ class Aspect(CMakePackage):
values=('Debug', 'Release'))
variant('gui', default=False, description='Enable the deal.II parameter GUI')
variant('fpe', default=False, description='Enable floating point exception checks')
variant('opendap', default=False, description='Enable OPeNDAP support for remote file access')
depends_on('dealii+p4est+trilinos+mpi')
depends_on('dealii-parameter-gui', when='+gui')
depends_on('libdap4', when='+opendap')
def cmake_args(self):
return [

View File

@@ -0,0 +1,23 @@
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Atf(AutotoolsPackage):
"""ATF, or Automated Testing Framework, is a collection of libraries
to write test programs in C, C++ and POSIX shell."""
homepage = "https://github.com/jmmv/atf"
url = "https://github.com/jmmv/atf/archive/atf-0.21.tar.gz"
version('0.21', sha256='da6b02d6e7242f768a7aaa7b7e52378680456e4bd9a913b6636187079c98f3cd')
version('0.20', sha256='3677cf957d7f574835b8bdd385984ba928d5695b3ff28f958e4227f810483ab7')
version('0.19', sha256='f9b1d76dad7c34ae61a75638edc517fc05b10fa4c8f97b1d13d739bffee79b16')
depends_on('m4', type='build')
depends_on('autoconf', type='build')
depends_on('automake', type='build')
depends_on('libtool', type='build')

View File

@@ -0,0 +1,31 @@
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Babelflow(CMakePackage):
"""BabelFlow is an Embedded Domain Specific Language to describe
algorithms using a task graph abstraction which allows them to be
executed on top of one of several available runtime systems."""
homepage = "https://github.com/sci-visus/BabelFlow"
git = 'https://github.com/sci-visus/BabelFlow.git'
maintainers = ['spetruzza']
version('develop',
branch='ascent',
submodules=True)
depends_on('mpi')
variant("shared", default=True, description="Build Babelflow as shared libs")
def cmake_args(self):
args = [
'-DBUILD_SHARED_LIBS:BOOL={0}'.format(
'ON' if '+shared' in spec else 'OFF')]
return args

View File

@@ -9,13 +9,14 @@
class Bbcp(Package):
"""Securely and quickly copy data from source to target"""
homepage = "http://www.slac.stanford.edu/~abh/bbcp/"
git = "http://www.slac.stanford.edu/~abh/bbcp/bbcp.git"
homepage = "https://www.slac.stanford.edu/~abh/bbcp/"
git = "https://www.slac.stanford.edu/~abh/bbcp/bbcp.git"
version('git', branch='master')
version('master', branch='master')
depends_on('zlib')
depends_on('openssl')
depends_on('libnsl')
def install(self, spec, prefix):
cd("src")

View File

@@ -14,8 +14,17 @@ class Bison(AutotoolsPackage, GNUMirrorPackage):
generalized LR (GLR) parser employing LALR(1) parser tables."""
homepage = "https://www.gnu.org/software/bison/"
gnu_mirror_path = "bison/bison-3.4.2.tar.gz"
gnu_mirror_path = "bison/bison-3.6.4.tar.gz"
version('3.6.4', sha256='8183de64b5383f3634942c7b151bf2577f74273b2731574cdda8a8f3a0ab13e9')
version('3.6.3', sha256='4b4c4943931e811f1073006ce3d8ee022a02b11b501e9cbf4def3613b24a3e63')
version('3.6.2', sha256='e28ed3aad934de2d1df68be209ac0b454f7b6d3c3d6d01126e5cd2cbadba089a')
version('3.6.1', sha256='1120f8bfe2cc13e5e1e3f671dc41b1a535ca5a75a70d5b349c19da9d4389f74d')
version('3.6', sha256='f630645e330bde5847266cc5c8194f0135ced75cced150358d9abe572b95f81c')
version('3.5.3', sha256='34e201d963156618a0ea5bc87220f660a1e08403dd3c7c7903d4f38db3f40039')
version('3.5.2', sha256='b4dbb6dd080f4db7f344f16506502973ca2b15f15c7dbbd1c1c278a456d094fa')
version('3.5.1', sha256='4cef2322d96751be1c0d04f3e57adbb30e7fea83af9c00f98efa6e7509296f25')
version('3.5', sha256='0b36200b9868ee289b78cefd1199496b02b76899bbb7e84ff1c0733a991313d1')
version('3.4.2', sha256='ff3922af377d514eca302a6662d470e857bd1a591e96a2050500df5a9d59facf')
version('3.4.1', sha256='7007fc89c216fbfaff5525359b02a7e5b612694df5168c74673f67055f015095')
version('3.3.2', sha256='0fda1d034185397430eb7b0c9e140fb37e02fbfc53b90252fa5575e382b6dbd1')

View File

@@ -21,5 +21,6 @@ def setup_build_environment(self, env):
env.set('MACHTYPE', 'x86_64')
def install(self, spec, prefix):
filter_file('CC=.*', 'CC={0}'.format(spack_cc), 'inc/common.mk')
mkdirp(prefix.bin)
make("BINDIR=%s" % prefix.bin)

View File

@@ -24,6 +24,7 @@ class Bliss(Package):
patch("Makefile.spack.patch")
def install(self, spec, prefix):
filter_file('__DATE__', ' __DATE__ ', 'bliss.cc')
# The Makefile isn't portable; use our own instead
makeargs = ["-f", "Makefile.spack",
"PREFIX=%s" % prefix, "GMP_PREFIX=%s" % spec["gmp"].prefix]

View File

@@ -27,5 +27,12 @@ class Blktrace(MakefilePackage):
depends_on('libaio')
def edit(self, spec, prefix):
makefiles = ['Makefile', 'btreplay/Makefile',
'btt/Makefile', 'iowatcher/Makefile']
for m in makefiles:
makefile = FileFilter(m)
makefile.filter('CC.*=.*', 'CC = {0}'.format(spack_cc))
def install(self, spec, prefix):
install_tree('.', prefix)

View File

@@ -22,5 +22,6 @@ class Brpc(CMakePackage):
depends_on('gflags')
depends_on('protobuf')
depends_on('leveldb')
depends_on('openssl')
patch('narrow.patch', sha256='d7393029443853ddda6c09e3d2185ac2f60920a36a8b685eb83b6b80c1535539', when='@:0.9.7')

View File

@@ -0,0 +1,13 @@
diff --git a/ksw.c b/ksw.c
index 9793e5e..2eecef4 100644
--- a/ksw.c
+++ b/ksw.c
@@ -26,7 +26,7 @@
#include <stdlib.h>
#include <stdint.h>
#include <assert.h>
-#include <emmintrin.h>
+#include <SSE2NEON.h>
#include "ksw.h"
#ifdef USE_MALLOC_WRAPPERS

View File

@@ -4,6 +4,7 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
import platform
class Bwa(Package):
@@ -19,10 +20,20 @@ class Bwa(Package):
url='https://github.com/lh3/bwa/archive/0.7.12.tar.gz')
depends_on('zlib')
depends_on('sse2neon', when='target=aarch64:')
patch('bwa_for_aarch64.patch', sha256='b77213b16cf8760f01e32f9a0b2cd8988cf7bac48a11267100f703cbd55c4bfd', when='target=aarch64:')
def install(self, spec, prefix):
filter_file(r'^INCLUDES=',
"INCLUDES=-I%s" % spec['zlib'].prefix.include, 'Makefile')
zlib_inc_path = spec['zlib'].prefix.include
if platform.machine() == 'aarch64':
sse2neon_inc_path = spec['sse2neon'].prefix.include
filter_file(r'^INCLUDES=', "INCLUDES=-I%s -I%s" %
(zlib_inc_path, sse2neon_inc_path),
'Makefile')
else:
filter_file(r'^INCLUDES=', "INCLUDES=-I%s" %
zlib_inc_path, 'Makefile')
filter_file(r'^LIBS=', "LIBS=-L%s " % spec['zlib'].prefix.lib,
'Makefile')
make()

View File

@@ -30,7 +30,7 @@ class Cantera(SConsPackage):
depends_on('googletest+gmock', when='@2.3.0:')
depends_on('eigen', when='@2.3.0:')
depends_on('boost')
depends_on('sundials@:3.1.2', when='+sundials') # must be compiled with -fPIC
depends_on('sundials@:3.1.2+lapack', when='+sundials') # must be compiled with -fPIC
depends_on('blas')
depends_on('lapack')

View File

@@ -0,0 +1,13 @@
diff --git a/configure b/configure
index 04f1a59..602c6cc 100755
--- a/configure
+++ b/configure
@@ -2434,7 +2434,7 @@ if test $($CHARMC -V | awk '{print $3}') -lt $MINIMUM_CHARM_VERSION; then
fi
CHARM_PATH=${CHARMC%/bin/charmc}
-CONV_CONFIG=${CHARM_PATH}/tmp/conv-config.sh
+CONV_CONFIG=${CHARM_PATH}/include/conv-config.sh
CHARMINC=${CHARM_PATH}/include
. ${CONV_CONFIG}

View File

@@ -0,0 +1,46 @@
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Changa(AutotoolsPackage):
"""ChaNGa (Charm N-body GrAvity solver) is a code to perform collisionless
N-body simulations. It can perform cosmological simulations with periodic
boundary conditions in comoving coordinates or simulations of isolated
stellar systems. It also can include hydrodynamics using the Smooth
Particle Hydrodynamics (SPH) technique. It uses a Barnes-Hut tree to
calculate gravity, with hexadecapole expansion of nodes and
Ewald summation for periodic forces. Timestepping is done with a leapfrog
integrator with individual timesteps for each particle."""
homepage = "http://faculty.washington.edu/trq/hpcc/tools/changa.html"
url = "https://github.com/N-BodyShop/changa/archive/v3.4.tar.gz"
git = "https://github.com/N-BodyShop/changa.git"
version('master', branch='master')
version('3.4', sha256='c2bceb6ac00025dfd704bb6960bc17c6df7c746872185845d1e75f47e6ce2a94')
patch("fix_configure_path.patch")
resource(
name="utility",
url="https://github.com/N-BodyShop/utility/archive/v3.4.tar.gz",
sha256="19f9f09023ce9d642e848a36948788fb29cd7deb8e9346cdaac4c945f1416667",
placement="utility"
)
depends_on("charmpp build-target=ChaNGa")
def configure_args(self):
args = []
args.append("STRUCT_DIR={0}/utility/structures"
.format(self.stage.source_path))
return args
def install(self, spec, prefix):
with working_dir(self.build_directory):
mkdirp(prefix.bin)
install('ChaNGa', prefix.bin)
install('charmrun', prefix.bin)

View File

@@ -52,7 +52,7 @@ class Charmpp(Package):
"build-target",
default="LIBS",
# AMPI also builds charm++, LIBS also builds AMPI and charm++
values=("charm++", "AMPI", "LIBS"),
values=("charm++", "AMPI", "LIBS", "ChaNGa"),
description="Specify the target to build"
)
@@ -217,7 +217,7 @@ def install(self, spec, prefix):
present on the system")
target = spec.variants["build-target"].value
builddir = prefix + "/" + str(self.charmarch)
builddir = prefix
# We assume that Spack's compiler wrappers make this work. If
# not, then we need to query the compiler vendor from Spack

View File

@@ -11,6 +11,7 @@ class Cppcheck(MakefilePackage):
homepage = "http://cppcheck.sourceforge.net/"
url = "https://downloads.sourceforge.net/project/cppcheck/cppcheck/1.78/cppcheck-1.78.tar.bz2"
version('2.1', sha256='ab26eeef039e5b58aac01efb8cb664f2cc16bf9879c61bc93cd00c95be89a5f7')
version('1.87', sha256='e3b0a46747822471df275417d4b74b56ecac88367433e7428f39288a32c581ca')
version('1.81', sha256='bb694f37ae0b5fed48c6cdc2fb5e528daf32cefc64e16b1a520c5411323cf27e')
version('1.78', sha256='e42696f7d6321b98cb479ad9728d051effe543b26aca8102428f60b9850786b1')

View File

@@ -15,13 +15,22 @@ class Cpprestsdk(CMakePackage):
homepage = "https://github.com/Microsoft/cpprestsdk"
url = "https://github.com/Microsoft/cpprestsdk/archive/v2.9.1.tar.gz"
version('2.10.16', git='https://github.com/Microsoft/cpprestsdk', branch='v2.10.16', submodules=True)
version('2.9.1', sha256='537358760acd782f4d2ed3a85d92247b4fc423aff9c85347dc31dbb0ab9bab16')
depends_on('boost@:1.69.0')
depends_on('openssl')
# Ref: https://github.com/microsoft/cpprestsdk/commit/f9f518e4ad84577eb684ad8235181e4495299af4
# Ref: https://github.com/Microsoft/cpprestsdk/commit/6b2e0480018530b616f61d5cdc786c92ba148bb7
# Ref: https://github.com/microsoft/cpprestsdk/commit/70c1b14f39f5d47984fdd8a31fc357ebb5a37851
patch('Release.patch')
patch('Release.patch', when='@2.9.1')
root_cmakelists_dir = 'Release'
def cmake_args(self):
args = [
'-DWERROR:BOOL=Off'
]
return args

View File

@@ -22,6 +22,9 @@
# format returned by platform.system() and 'arch' by platform.machine()
_versions = {
'11.0.2': {
'Linux-x86_64': ('48247ada0e3f106051029ae8f70fbd0c238040f58b0880e55026374a959a69c1', 'http://developer.download.nvidia.com/compute/cuda/11.0.2/local_installers/cuda_11.0.2_450.51.05_linux.run'),
'Linux-ppc64le': ('db06d0f3fbf6f7aa1f106fc921ad1c86162210a26e8cb65b171c5240a3bf75da', 'http://developer.download.nvidia.com/compute/cuda/11.0.2/local_installers/cuda_11.0.2_450.51.05_linux_ppc64le.run')},
'10.2.89': {
'Linux-x86_64': ('560d07fdcf4a46717f2242948cd4f92c5f9b6fc7eae10dd996614da913d5ca11', 'http://developer.download.nvidia.com/compute/cuda/10.2/Prod/local_installers/cuda_10.2.89_440.33.01_linux.run'),
'Linux-ppc64le': ('5227774fcb8b10bd2d8714f0a716a75d7a2df240a9f2a49beb76710b1c0fc619', 'http://developer.download.nvidia.com/compute/cuda/10.2/Prod/local_installers/cuda_10.2.89_440.33.01_linux_ppc64le.run')},

View File

@@ -15,6 +15,7 @@ class Curl(AutotoolsPackage):
# URL must remain http:// so Spack can bootstrap curl
url = "http://curl.haxx.se/download/curl-7.60.0.tar.bz2"
version('7.71.0', sha256='600f00ac2481a89548a4141ddf983fd9386165e1960bac91d0a1c81dca5dd341')
version('7.68.0', sha256='207f54917dd6a2dc733065ccf18d61bb5bebeaceb5df49cd9445483e8623eeb9')
version('7.63.0', sha256='9bab7ed4ecff77020a312d84cc5fb7eb02d58419d218f267477a724a17fd8dd8')
version('7.60.0', sha256='897dfb2204bd99be328279f88f55b7c61592216b0542fcbe995c60aa92871e9b')
@@ -51,6 +52,7 @@ class Curl(AutotoolsPackage):
conflicts('platform=linux', when='+darwinssl')
depends_on('openssl', when='~darwinssl')
depends_on('libidn2')
depends_on('zlib')
depends_on('nghttp2', when='+nghttp2')
depends_on('libssh2', when='+libssh2')
@@ -61,6 +63,8 @@ def configure_args(self):
spec = self.spec
args = ['--with-zlib={0}'.format(spec['zlib'].prefix)]
args.append('--with-libidn2={0}'.format(spec['libidn2'].prefix))
if spec.satisfies('+darwinssl'):
args.append('--with-darwinssl')
else:

View File

@@ -22,6 +22,7 @@ class Dd4hep(CMakePackage):
maintainers = ['vvolkl', 'drbenmorgan']
version('master', branch='master')
version('1.13.0', sha256='0b1f9d902ebe21a9178c1e41204c066b29f68c8836fd1d03a9ce979811ddb295')
version('1.12.1', sha256='85e8c775ec03c499ce10911e228342e757c81ce9ef2a9195cb253b85175a2e93')
version('1.12.0', sha256='133a1fb8ce0466d2482f3ebb03e60b3bebb9b2d3e33d14ba15c8fbb91706b398')
version('1.11.2', sha256='96a53dd26cb8df11c6dae54669fbc9cc3c90dd47c67e07b24be9a1341c95abc4')

View File

@@ -0,0 +1,147 @@
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
import os
class Dftbplus(MakefilePackage):
"""DFTB+ is an implementation of the
Density Functional based Tight Binding (DFTB) method,
containing many extensions to the original method."""
homepage = "https://www.dftbplus.org"
url = "https://github.com/dftbplus/dftbplus/archive/19.1.tar.gz"
version('19.1', sha256='4d07f5c6102f06999d8cfdb1d17f5b59f9f2b804697f14b3bc562e3ea094b8a8')
resource(name='slakos',
url='https://github.com/dftbplus/testparams/archive/dftbplus-18.2.tar.gz',
sha256='bd191b3d240c1a81a8754a365e53a78b581fc92eb074dd5beb8b56a669a8d3d1',
destination='external/slakos',
when='@18.2:')
variant('mpi', default=True,
description="Build an MPI-paralelised version of the code.")
variant('gpu', default=False,
description="Use the MAGMA library "
"for GPU accelerated computation")
variant('elsi', default=False,
description="Use the ELSI library for large scale systems. "
"Only has any effect if you build with '+mpi'")
variant('sockets', default=False,
description="Whether the socket library "
"(external control) should be linked")
variant('arpack', default=False,
description="Use ARPACK for excited state DFTB functionality")
variant('transport', default=False,
description="Whether transport via libNEGF should be included. "
"Only affects parallel build. "
"(serial version is built without libNEGF/transport)")
variant('dftd3', default=False,
description="Use DftD3 dispersion library "
"(if you need this dispersion model)")
depends_on('lapack')
depends_on('blas')
depends_on('scalapack', when="+mpi")
depends_on('mpi', when="+mpi")
depends_on('elsi', when="+elsi")
depends_on('magma', when="+gpu")
depends_on('arpack-ng', when="+arpack")
depends_on('dftd3-lib@0.9.2', when="+dftd3")
def edit(self, spec, prefix):
"""
First, change the ROOT variable, because, for some reason,
the Makefile and the spack install script run in different directories
Then, if using GCC, rename the file 'sys/make.x86_64-linux-gnu'
to make.arch.
After that, edit the make.arch to point to the dependencies
And the last thing we do here is to set the installdir
"""
dircwd = os.getcwd()
makefile = FileFilter("makefile")
makefile.filter("ROOT := .*", "ROOT := {0}".format(dircwd))
archmake = join_path(".", "sys", "make.x86_64-linux-gnu")
copy(archmake, join_path(dircwd, "make.arch"))
march = FileFilter(join_path(dircwd, 'make.arch'))
mconfig = FileFilter(join_path(dircwd, 'make.config'))
mconfig.filter('INSTALLDIR := .*', 'INSTALLDIR := {0}'.format(prefix))
if '+gpu' in self.spec:
march.filter('MAGMADIR = .*',
'MAGMADIR = {0}'.format(spec['magma'].prefix))
mconfig.filter('WITH_GPU := .*', 'WITH_GPU := 1')
if '+mpi' in self.spec:
march.filter('SCALAPACKDIR = .*',
'SCALAPACKDIR = {0}'.format(spec['scalapack'].prefix))
march.filter('LIB_LAPACK = -l.*',
'LIB_LAPACK = {0}'.format(spec['blas'].libs.ld_flags))
march.filter('mpifort', '{0}'.format(spec['mpi'].mpifc))
mconfig.filter('WITH_MPI := .*', 'WITH_MPI := 1')
if '+elsi' in self.spec:
mconfig.filter('WITH_ELSI := .*', 'WITH_ELSI := 1')
has_pexsi = '+enable_pexsi' in spec['elsi']
mconfig.filter('WITH_PEXSI := .*', 'WITH_PEXSI := {0}'.format(
'1' if has_pexsi is True else '0'
))
march.filter("ELSIINCDIR .*", "ELSIINCDIR = {0}".format(
spec['elsi'].prefix.include
))
march.filter("ELSIDIR .*",
"ELSIDIR = {0}".format(spec['elsi'].prefix))
else:
march.filter('LIB_LAPACK += -l.*', 'LIB_LAPACK += {0}'.format(
spec['blas'].libs.ld_flags))
if '+sockets' in self.spec:
mconfig.filter('WITH_SOCKETS := .*', 'WITH_SOCKETS := 1')
if '+transport' in self.spec:
mconfig.filter('WITH_TRANSPORT := .*', 'WITH_TRANSPORT := 1')
if '+arpack' in self.spec:
march.filter('ARPACK_LIBS = .*', 'ARPACK_LIBS = {0}'.format(
spec['arpack-ng'].libs.ld_flags
))
mconfig.filter('WITH_ARPACK := .*', 'WITH_ARPACK := 1')
if '+dftd3' in self.spec:
march.filter('COMPILE_DFTD3 = .*', 'COMPILE_DFTD3 = 0')
march.filter('DFTD3_INCS = .*', 'DFTD3_INCS = -I{0}'.format(
spec['dftd3-lib'].prefix.include
))
march.filter('DFTD3_LIBS = .*',
'DFTD3_LIBS = -L{0} -ldftd3'.format(
spec['dftd3-lib'].prefix))
mconfig.filter('WITH_DFTD3 := .*', 'WITH_DFTD3 := 1')

View File

@@ -0,0 +1,38 @@
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Dftd3Lib(MakefilePackage):
"""A dispersion correction for density functionals,
Hartree-Fock and semi-empirical quantum chemical methods"""
homepage = "https://www.chemie.uni-bonn.de/pctc/mulliken-center/software/dft-d3/dft-d3"
url = "https://github.com/dftbplus/dftd3-lib/archive/0.9.2.tar.gz"
version('0.9.2', sha256='4178f3cf2f3e7e982a7084ec66bac92b4fdf164537d9fc0ada840a11b784f0e0')
# This fixes a concurrency bug, where make would try to start compiling
# the dftd3 target before the lib target ended.
# Since the library is small, disabling causes not much harm
parallel = False
def edit(self, spec, prefix):
makefile = FileFilter('make.arch')
makefile.filter("FC = gfortran", "")
makefile.filter("LN = gfortran", "LN = $(FC)")
def install(self, spec, prefix):
mkdir(prefix.lib)
mkdir(prefix.bin)
mkdir(prefix.include)
install("lib/libdftd3.a", prefix.lib)
install("prg/dftd3", prefix.bin)
install("lib/dftd3_api.mod", prefix.include)
install("lib/dftd3_common.mod", prefix.include)
install("lib/dftd3_core.mod", prefix.include)
install("lib/dftd3_pars.mod", prefix.include)
install("lib/dftd3_sizes.mod", prefix.include)

View File

@@ -14,9 +14,10 @@ class Elpa(AutotoolsPackage):
homepage = 'http://elpa.mpcdf.mpg.de/'
url = 'http://elpa.mpcdf.mpg.de/elpa-2015.11.001.tar.gz'
version('2020.05.001', sha256='66ff1cf332ce1c82075dc7b5587ae72511d2bcb3a45322c94af6b01996439ce5')
version('2019.11.001', sha256='10374a8f042e23c7e1094230f7e2993b6f3580908a213dbdf089792d05aff357')
version('2019.05.002', sha256='d2eab5e5d74f53601220b00d18185670da8c00c13e1c1559ecfb0cd7cb2c4e8d')
version('2018.11.001',
sha256='cc27fe8ba46ce6e6faa8aea02c8c9983052f8e73a00cfea38abf7613cb1e1b16')
version('2018.11.001', sha256='cc27fe8ba46ce6e6faa8aea02c8c9983052f8e73a00cfea38abf7613cb1e1b16')
version('2018.05.001.rc1', sha256='598c01da20600a4514ea4d503b93e977ac0367e797cab7a7c1b0e0e3e86490db')
version('2017.11.001', sha256='59f99c3abe2190fac0db8a301d0b9581ee134f438669dbc92551a54f6f861820')
version('2017.05.003', sha256='bccd49ce35a323bd734b17642aed8f2588fea4cc78ee8133d88554753bc3bf1b')

View File

@@ -17,6 +17,8 @@ class Energyplus(Package):
homepage = "https://energyplus.net"
# versions require explicit URLs as they contain hashes
version('9.3.0', sha256='c939dc4f867224e110485a8e0712ce4cfb1e06f8462bc630b54f83a18c93876c',
url="https://github.com/NREL/EnergyPlus/releases/download/v9.3.0/EnergyPlus-9.3.0-baff08990c-Linux-x86_64.tar.gz")
version('8.9.0', sha256='13a5192b25815eb37b3ffd019ce3b99fd9f854935f8cc4362814f41c56e9ca98',
url="https://github.com/NREL/EnergyPlus/releases/download/v8.9.0-WithIDDFixes/EnergyPlus-8.9.0-eba93e8e1b-Linux-x86_64.tar.gz")
@@ -25,13 +27,14 @@ def install(self, spec, prefix):
# and then symlink the appropriate targets
# there is only one folder with a semi-predictable name so we glob it
install_tree(glob.glob('EnergyPlus*')[0],
join_path(prefix.lib, 'energyplus'))
source_dir = '.'
if spec.satisfies('@:8.9.9'):
source_dir = glob.glob('EnergyPlus*')[0]
install_tree(source_dir, prefix.lib.enregyplus)
mkdirp(prefix.bin)
os.symlink(join_path(prefix.lib, 'energyplus/energyplus'),
join_path(prefix.bin, 'energyplus'))
os.symlink(join_path(prefix.lib, 'energyplus/EPMacro'),
join_path(prefix.bin, 'EPMacro'))
os.symlink(join_path(prefix.lib, 'energyplus/ExpandObjects'),
join_path(prefix.bin, 'ExpandObjects'))
for b in ['energyplus', 'EPMacro', 'ExpandObjects']:
os.symlink(join_path(prefix.lib.energyplus, b),
join_path(prefix.bin, b))

View File

@@ -169,7 +169,7 @@ def edit(self, spec, prefix):
os.environ['ESMF_CXXLINKLIBS'] = '-lmpifort'
elif '^openmpi' in spec:
os.environ['ESMF_COMM'] = 'openmpi'
elif '^intel-parallel-studio+mpi' in spec:
elif '^intel-parallel-studio+mpi' in spec or '^intel-mpi' in spec:
os.environ['ESMF_COMM'] = 'intelmpi'
else:
# Force use of the single-processor MPI-bypass library.

View File

@@ -0,0 +1,50 @@
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Examl(MakefilePackage):
"""
Exascale Maximum Likelihood (ExaML) code for phylogenetic inference
using MPI. This code implements the popular RAxML search algorithm
for maximum likelihood based inference of phylogenetic trees.
"""
homepage = "https://github.com/stamatak/ExaML"
url = "https://github.com/stamatak/ExaML/archive/v3.0.22.tar.gz"
maintainers = ['robqiao']
version('3.0.22', sha256='802e673b0c2ea83fdbe6b060048d83f22b6978933a04be64fb9b4334fe318ca3')
version('3.0.21', sha256='6c7e6c5d7bf4ab5cfbac5cc0d577885272a803c142e06b531693a6a589102e2e')
version('3.0.20', sha256='023681248bbc7f19821b509948d79301e46bbf275aa90bf12e9f4879639a023b')
version('3.0.19', sha256='3814230bf7578b8396731dc87ce665d0b1a671d8effd571f924c5b7936ae1c9e')
version('3.0.18', sha256='1bacb5124d943d921e7beae52b7062626d0ce3cf2f83e3aa3acf6ea26cf9cd87')
version('3.0.17', sha256='90a859e0b8fff697722352253e748f03c57b78ec5fbc1ae72f7e702d299dac67')
version('3.0.16', sha256='abc922994332d40892e30f077e4644db08cd59662da8e2a9197d1bd8bcb9aa5f')
version('3.0.15', sha256='da5e66a63d6fa34b640535c359d8daf67f23bd2fcc958ac604551082567906b0')
version('3.0.14', sha256='698b538996946ae23a2d6fa1e230c210832e59080da33679ff7d6b342a9e6180')
version('3.0.13', sha256='893aecb5545798235a17975aa07268693d3526d0aee0ed59a2d6e791248791ed')
variant('mpi', default=True, description='Enable MPI parallel support')
depends_on('mpi', when='+mpi')
def build(self, spec, prefix):
#####################
# Build Directories #
#####################
with working_dir('examl'):
make('-f', 'Makefile.SSE3.gcc')
with working_dir('parser'):
make('-f', 'Makefile.SSE3.gcc')
def install(self, spec, prefix):
mkdirp(prefix.bin)
install("examl/examl", prefix.bin)
install("parser/parse-examl", prefix.bin)
install_tree("manual", prefix.manual)
install_tree("testData", prefix.testData)

View File

@@ -37,6 +37,9 @@ def cmake_args(self):
spec = self.spec
args = []
# allow flatcc to be built with more compilers
args.append('-DFLATCC_ALLOW_WERROR=OFF')
if '+shared' in spec:
args.append('-DBUILD_SHARED_LIBS=ON')
args.append('-DFLATCC_INSTALL=ON')

View File

@@ -15,6 +15,7 @@ class Frontistr(CMakePackage):
git = "https://gitlab.com/FrontISTR-Commons/FrontISTR.git"
maintainers = ['hiroshi.okuda', 'kgoto', 'morita', 'inagaki', 'michioga']
version('5.1', tag='v5.1')
version('5.0', tag='v5.0')
version('master', tag='master')

View File

@@ -18,7 +18,7 @@ class Gdal(AutotoolsPackage):
"""
homepage = "https://www.gdal.org/"
url = "https://download.osgeo.org/gdal/3.1.1/gdal-3.1.1.tar.xz"
url = "https://download.osgeo.org/gdal/3.1.2/gdal-3.1.2.tar.xz"
list_url = "https://download.osgeo.org/gdal/"
list_depth = 1
@@ -29,6 +29,7 @@ class Gdal(AutotoolsPackage):
'osgeo.gdal_array', 'osgeo.gdalconst'
]
version('3.1.2', sha256='767c8d0dfa20ba3283de05d23a1d1c03a7e805d0ce2936beaff0bb7d11450641')
version('3.1.1', sha256='97154a606339a6c1d87c80fb354d7456fe49828b2ef9a3bc9ed91771a03d2a04')
version('3.1.0', sha256='e754a22242ccbec731aacdb2333b567d4c95b9b02d3ba1ea12f70508d244fcda')
version('3.0.4', sha256='5569a4daa1abcbba47a9d535172fc335194d9214fdb96cd0f139bb57329ae277')

View File

@@ -15,6 +15,7 @@ class Gdb(AutotoolsPackage, GNUMirrorPackage):
homepage = "https://www.gnu.org/software/gdb"
gnu_mirror_path = "gdb/gdb-7.10.tar.gz"
version('9.2', sha256='38ef247d41ba7cc3f6f93a612a78bab9484de9accecbe3b0150a3c0391a3faf0')
version('9.1', sha256='fcda54d4f35bc53fb24b50009a71ca98410d71ff2620942e3c829a7f5d614252')
version('8.3.1', sha256='26ce655216cd03f4611518a7a1c31d80ec8e884c16715e9ba8b436822e51434b')
version('8.3', sha256='b2266ec592440d0eec18ee1790f8558b3b8a2845b76cc83a872e39b501ce8a28')

View File

@@ -14,13 +14,17 @@ class GdkPixbuf(Package):
preparation for the change to GTK+ 3."""
homepage = "https://developer.gnome.org/gdk-pixbuf/"
url = "https://ftp.acc.umu.se/pub/gnome/sources/gdk-pixbuf/2.38/gdk-pixbuf-2.38.0.tar.xz"
url = "https://ftp.acc.umu.se/pub/gnome/sources/gdk-pixbuf/2.40/gdk-pixbuf-2.40.0.tar.xz"
list_url = "https://ftp.acc.umu.se/pub/gnome/sources/gdk-pixbuf/"
list_depth = 1
version('2.40.0', sha256='1582595099537ca8ff3b99c6804350b4c058bb8ad67411bbaae024ee7cead4e6')
version('2.38.2', sha256='73fa651ec0d89d73dd3070b129ce2203a66171dfc0bd2caa3570a9c93d2d0781')
version('2.38.0', sha256='dd50973c7757bcde15de6bcd3a6d462a445efd552604ae6435a0532fbbadae47')
version('2.31.2', sha256='9e467ed09894c802499fb2399cd9a89ed21c81700ce8f27f970a833efb1e47aa')
variant('x11', default=False, description="Enable X11 support")
depends_on('meson@0.46.0:', type='build', when='@2.37.92:')
depends_on('meson@0.45.0:', type='build', when='@2.37.0:')
depends_on('ninja', type='build', when='@2.37.0:')
@@ -37,6 +41,7 @@ class GdkPixbuf(Package):
depends_on('zlib')
depends_on('libtiff')
depends_on('gobject-introspection')
depends_on('libx11', when='+x11')
# Replace the docbook stylesheet URL with the one that our
# docbook-xsl package uses/recognizes.
@@ -54,7 +59,9 @@ def setup_dependent_run_environment(self, env, dependent_spec):
def install(self, spec, prefix):
with working_dir('spack-build', create=True):
meson('..', *std_meson_args)
meson_args = std_meson_args
meson_args += ['-Dx11={0}'.format('+x11' in spec)]
meson('..', *meson_args)
ninja('-v')
if self.run_tests:
ninja('test')

View File

@@ -0,0 +1,10 @@
diff --git a/third_party/CMakeLists.txt b/third_party/CMakeLists.txt
index 884e50bf6..40618311a 100644
--- a/third_party/CMakeLists.txt
+++ b/third_party/CMakeLists.txt
@@ -1,4 +1,4 @@
-if(GINKGO_BUILD_CUDA)
+if(GINKGO_BUILD_CUDA OR (GINKGO_BUILD_HIP AND GINKGO_HIP_PLATFORM STREQUAL "nvcc"))
enable_language(CUDA)
if (GINKGO_USE_EXTERNAL_CAS)
include(CudaArchitectureSelector RESULT_VARIABLE GINKGO_CAS_FILE)

View File

@@ -18,6 +18,8 @@ class Ginkgo(CMakePackage, CudaPackage):
version('develop', branch='develop')
version('master', branch='master')
version('1.2.0', commit='b4be2be961fd5db45c3d02b5e004d73550722e31') # v1.2.0
version('1.1.1', commit='08d2c5200d3c78015ac8a4fd488bafe1e4240cf5') # v1.1.1
version('1.1.0', commit='b9bec8225442b3eb2a85a870efa112ab767a17fb') # v1.1.0
version('1.0.0', commit='45244641e0c2b19ba33aecd25153c0bddbcbe1a0') # v1.0.0
@@ -44,6 +46,8 @@ def cmake_args(self):
'ON' if '+full_optimizations' in spec else 'OFF'),
'-DGINKGO_DEVEL_TOOLS=%s' % (
'ON' if '+develtools' in spec else 'OFF'),
# Drop HIP support for now
'-DGINKGO_BUILD_HIP=OFF',
# As we are not exposing benchmarks, examples, tests nor doc
# as part of the installation, disable building them altogether.
'-DGINKGO_BUILD_BENCHMARKS=OFF',

View File

@@ -16,6 +16,8 @@ class Glew(Package):
depends_on("cmake", type='build')
depends_on("gl")
depends_on('libsm')
depends_on('libice')
def install(self, spec, prefix):
options = []

View File

@@ -13,6 +13,7 @@ class Global(Package):
homepage = "http://www.gnu.org/software/global"
url = "http://tamacom.com/global/global-6.5.tar.gz"
version('6.6.4', sha256='987e8cb956c53f8ebe4453b778a8fde2037b982613aba7f3e8e74bcd05312594')
version('6.5', sha256='4afd12db1aa600277b39113cc2d61dc59bd6c6b4ee8033da8bb6dd0c39a4c6a9')
depends_on('exuberant-ctags', type=('build', 'run'))

View File

@@ -34,6 +34,7 @@ class Glusterfs(AutotoolsPackage):
depends_on('libuuid')
depends_on('libtirpc')
depends_on('userspace-rcu')
depends_on('pkgconfig', type='build')
def url_for_version(self, version):
url = 'https://download.gluster.org/pub/gluster/glusterfs/{0}/{1}/glusterfs-{1}.tar.gz'

View File

@@ -15,12 +15,13 @@ class Gmt(Package):
"""
homepage = "https://www.generic-mapping-tools.org/"
url = "https://github.com/GenericMappingTools/gmt/archive/6.0.0.tar.gz"
url = "https://github.com/GenericMappingTools/gmt/archive/6.1.0.tar.gz"
git = "https://github.com/GenericMappingTools/gmt.git"
maintainers = ['adamjstewart']
version('master', branch='master')
version('6.1.0', sha256='f76ad7f444d407dfd7e5762644eec3a719c6aeb06d877bf746fe51abd79b1a9e')
version('6.0.0', sha256='7a733e670f01d99f8fc0da51a4337320d764c06a68746621f83ccf2e3453bcb7')
version('5.4.4', sha256='b593dfb101e6507c467619f3d2190a9f78b09d49fe2c27799750b8c4c0cd2da0')
version('4.5.9', sha256='9b13be96ccf4bbd38c14359c05dfa7eeeb4b5f06d6f4be9c33d6c3ea276afc86',
@@ -41,7 +42,7 @@ class Gmt(Package):
# https://github.com/GenericMappingTools/gmt/blob/master/MAINTENANCE.md
# Required dependencies
depends_on('cmake@2.8.7:', type='build', when='@5:')
depends_on('cmake@2.8.12:', type='build', when='@5:')
depends_on('netcdf-c@4:')
depends_on('curl', when='@5.4:')
@@ -59,6 +60,8 @@ class Gmt(Package):
depends_on('graphicsmagick', type='test')
# https://github.com/GenericMappingTools/gmt/pull/3603
patch('regexp.patch', when='@6.1.0')
patch('type.patch', when='@4.5.9')
@when('@5:')

View File

@@ -0,0 +1,11 @@
--- a/src/gmt_regexp.c 2020-07-04 15:13:40.000000000 -0500
+++ b/src/gmt_regexp.c 2020-07-05 18:35:02.000000000 -0500
@@ -217,7 +217,7 @@
/* this is when errors have been encountered */
regerror(status, &re, err_msg, MAX_ERR_LENGTH);
GMT_Report (GMT->parent, GMT_MSG_ERROR, "gmtlib_regexp_match: POSIX ERE matching error: %s\n", err_msg); /* Report error. */
- return (-GMT_RUNTIME_ERROR;)
+ return (-GMT_RUNTIME_ERROR);
}
return (0); /* No match */

View File

@@ -17,6 +17,7 @@ class Gnutls(AutotoolsPackage):
homepage = "http://www.gnutls.org"
url = "https://www.gnupg.org/ftp/gcrypt/gnutls/v3.5/gnutls-3.5.19.tar.xz"
version('3.6.14', sha256='5630751adec7025b8ef955af4d141d00d252a985769f51b4059e5affa3d39d63')
version('3.6.8', sha256='aa81944e5635de981171772857e72be231a7e0f559ae0292d2737de475383e83')
version('3.6.7.1', sha256='881b26409ecd8ea4c514fd3fbdb6fae5fab422ca7b71116260e263940a4bbbad')
version('3.5.19', sha256='1936eb64f03aaefd6eb16cef0567457777618573826b94d03376bb6a4afadc44')

View File

@@ -36,11 +36,13 @@ class Go(Package):
extendable = True
version('1.14.5', sha256='ca4c080c90735e56152ac52cd77ae57fe573d1debb1a58e03da9cc362440315c')
version('1.14.4', sha256='7011af3bbc2ac108d1b82ea8abb87b2e63f78844f0259be20cde4d42c5c40584')
version('1.14.3', sha256='93023778d4d1797b7bc6a53e86c3a9b150c923953225f8a48a2d5fabc971af56')
version('1.14.2', sha256='98de84e69726a66da7b4e58eac41b99cbe274d7e8906eeb8a5b7eb0aadee7f7c')
version('1.14.1', sha256='2ad2572115b0d1b4cb4c138e6b3a31cee6294cb48af75ee86bec3dca04507676')
version('1.14', sha256='6d643e46ad565058c7a39dac01144172ef9bd476521f42148be59249e4b74389')
version('1.13.13', sha256='ab7e44461e734ce1fd5f4f82c74c6d236e947194d868514d48a2b1ea73d25137')
version('1.13.12', sha256='17ba2c4de4d78793a21cc659d9907f4356cd9c8de8b7d0899cdedcef712eba34')
version('1.13.11', sha256='89ed1abce25ad003521c125d6583c93c1280de200ad221f961085200a6c00679')
version('1.13.10', sha256='eb9ccc8bf59ed068e7eff73e154e4f5ee7eec0a47a610fb864e3332a2fdc8b8c')

View File

@@ -27,6 +27,7 @@ class Gpdb(AutotoolsPackage):
version('5.23.0', sha256='b06a797eb941362d5473b84d5def349b5ce12ce87ab116bea7c74ad193738ae9')
depends_on('zstd')
depends_on('py-setuptools@:44.99.99')
depends_on('apr')
depends_on('libevent')
depends_on('curl')
@@ -35,7 +36,17 @@ class Gpdb(AutotoolsPackage):
depends_on('libxml2')
depends_on('flex')
depends_on('readline')
depends_on('py-subprocess32', type=('build', 'run'))
depends_on('python@:2.8.0', type=('build', 'run'))
depends_on('py-lockfile', type=('build', 'run'))
depends_on('py-psutil', type=('build', 'run'))
depends_on('py-utils@:1.0.0', type=('build', 'run'))
def configure_args(self):
args = ['--disable-orca']
args = ['--with-python', '--disable-orca', '--enable-depend',
'--with-libxml']
return args
def setup_run_environment(self, env):
env.append_path('GPHOME', self.prefix)
env.append_path('PYTHONPATH', self.prefix.lib.python)

View File

@@ -0,0 +1,65 @@
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Grads(AutotoolsPackage):
"""The Grid Analysis and Display System (GrADS) is an interactive
desktop tool that is used for easy access, manipulation, and visualization
of earth science data. GrADS has two data models for handling gridded and
station data. GrADS supports many data file formats, including
binary (stream or sequential), GRIB (version 1 and 2), NetCDF,
HDF (version 4 and 5), and BUFR (for station data)."""
homepage = "http://cola.gmu.edu/grads/grads.php"
url = "ftp://cola.gmu.edu/grads/2.2/grads-2.2.1-src.tar.gz"
version('2.2.1', sha256='695e2066d7d131720d598bac0beb61ac3ae5578240a5437401dc0ffbbe516206')
variant('geotiff', default=True, description="Enable GeoTIFF support")
variant('shapefile', default=True, description="Enable Shapefile support")
"""
# FIXME: Fails with undeclared functions (tdefi, tdef, ...) in gauser.c
variant('hdf5', default=False, description="Enable HDF5 support")
variant('hdf4', default=False, description="Enable HDF4 support")
variant('netcdf', default=False, description="Enable NetCDF support")
depends_on('hdf5', when='+hdf5')
depends_on('hdf', when='+hdf4')
depends_on('netcdf-c', when='+netcdf')
"""
depends_on('libgeotiff', when='+geotiff')
depends_on('shapelib', when='+shapefile')
depends_on('udunits')
depends_on('libgd')
depends_on('libxmu')
depends_on('cairo +X +pdf +fc +ft')
depends_on('readline')
depends_on('pkgconfig', type='build')
def setup_build_environment(self, env):
env.set('SUPPLIBS', '/')
def setup_run_environment(self, env):
env.set('GADDIR', self.prefix.data)
@run_after('install')
def copy_data(self):
with working_dir(self.build_directory):
install_tree('data', self.prefix.data)
with working_dir(self.package_dir):
install('udpt', self.prefix.data)
filter_file(
r'({lib})',
self.prefix.lib,
self.prefix.data.udpt
)
def configure_args(self):
args = []
args.extend(self.with_or_without('geotiff'))
return args

View File

@@ -0,0 +1,2 @@
gxdisplay Cairo {lib}/libgxdCairo.so
gxprint Cairo {lib}/libgxpCairo.so

View File

@@ -11,8 +11,14 @@ class Grpc(CMakePackage):
maintainers = ['nazavode']
homepage = "https://grpc.io"
url = "https://github.com/grpc/grpc/archive/v1.27.0.tar.gz"
url = "https://github.com/grpc/grpc/archive/v1.30.0.tar.gz"
version('1.30.0', sha256='419dba362eaf8f1d36849ceee17c3e2ff8ff12ac666b42d3ff02a164ebe090e9')
version('1.29.1', sha256='0343e6dbde66e9a31c691f2f61e98d79f3584e03a11511fad3f10e3667832a45')
version('1.29.0', sha256='c0a6b40a222e51bea5c53090e9e65de46aee2d84c7fa7638f09cb68c3331b983')
version('1.28.2', sha256='4bec3edf82556b539f7e9f3d3801cba540e272af87293a3f4178504239bd111e')
version('1.28.1', sha256='4cbce7f708917b6e58b631c24c59fe720acc8fef5f959df9a58cdf9558d0a79b')
version('1.28.0', sha256='d6277f77e0bb922d3f6f56c0f93292bb4cfabfc3c92b31ee5ccea0e100303612')
version('1.27.0', sha256='3ccc4e5ae8c1ce844456e39cc11f1c991a7da74396faabe83d779836ef449bce')
version('1.26.0', sha256='2fcb7f1ab160d6fd3aaade64520be3e5446fc4c6fa7ba6581afdc4e26094bd81')
version('1.25.0', sha256='ffbe61269160ea745e487f79b0fd06b6edd3d50c6d9123f053b5634737cf2f69')
@@ -29,7 +35,7 @@ class Grpc(CMakePackage):
depends_on('openssl')
depends_on('zlib')
depends_on('c-ares')
depends_on('abseil-cpp', when='@1.27.0:')
depends_on('abseil-cpp', when='@1.27:')
def cmake_args(self):
args = [

View File

@@ -12,7 +12,10 @@ class Gunrock(CMakePackage, CudaPackage):
homepage = "https://gunrock.github.io/docs/"
git = "https://github.com/gunrock/gunrock.git"
version('master', submodules=True)
# tagged versions are broken. See
# https://github.com/gunrock/gunrock/issues/777
# Hence, prefer master version.
version('master', submodules=True, preferred=True)
version('1.1', submodules=True, tag='v1.1')
version('1.0', submodules=True, tag='v1.0')
version('0.5.1', submodules=True, tag='v0.5.1')
@@ -23,9 +26,100 @@ class Gunrock(CMakePackage, CudaPackage):
version('0.2', submodules=True, tag='v0.2')
version('0.1', submodules=True, tag='v0.1')
depends_on('cuda')
variant('cuda', default=True, description="Build with Cuda support")
variant('lib', default=True, description='Build main gunrock library')
variant('shared_libs', default=True, description='Turn off to build for static libraries')
variant('tests', default=True, description='Build functional tests / examples')
variant('mgpu_tests', default=False, description='Builds Gunrock applications and enables the ctest framework for single GPU implementations')
variant('cuda_verbose_ptxas', default=False, description='Enable verbose output from the PTXAS assembler')
variant('google_tests', default=False, description='Build unit tests using googletest')
variant('code_coverage', default=False, description="run code coverage on Gunrock's source code")
# apps
msg = 'select either all or individual applications'
variant(
"applications",
values=disjoint_sets(
('all',), ('bc', 'bfs', 'cc', 'pr', 'sssp', 'dobfs', 'hits',
'salsa', 'mst', 'wtf', 'topk')
).allow_empty_set().with_default('all').with_error(msg),
description="Application to be built"
)
variant('boost', default=True, description='Build with Boost')
variant('metis', default=False, description='Build with Metis support')
depends_on('googletest', when='+google_tests')
depends_on('lcov', when='+code_coverage')
depends_on('boost@1.58.0:', when='+boost')
depends_on('metis', when='+metis')
conflicts('cuda_arch=none', when='+cuda',
msg='Must specify CUDA compute capabilities of your GPU. \
See "spack info gunrock"')
def cmake_args(self):
spec = self.spec
args = []
args.extend([
'-DGUNROCK_BUILD_LIB={0}'.format(
'ON' if '+lib' in spec else 'OFF'),
'-DGUNROCK_BUILD_SHARED_LIBS={0}'.format(
'ON' if '+shared_libs' in spec else 'OFF'),
'-DGUNROCK_BUILD_TESTS={0}'.format(
'ON' if '+tests' in spec else 'OFF'),
'-DGUNROCK_MGPU_TESTS={0}'.format(
'ON' if '+mgpu_tests' in spec else 'OFF'),
'-DCUDA_VERBOSE_PTXAS={0}'.format(
'ON' if '+cuda_verbose_ptxas' in spec else 'OFF'),
'-DGUNROCK_GOOGLE_TESTS={0}'.format(
'ON' if '+google_tests' in spec else 'OFF'),
'-DGUNROCK_CODE_COVERAGE={0}'.format(
'ON' if '+code_coverage' in spec else 'OFF'),
])
# turn off auto detect, which undoes custom cuda arch options
args.append('-DCUDA_AUTODETECT_GENCODE=OFF')
cuda_arch_list = self.spec.variants['cuda_arch'].value
if cuda_arch_list[0] != 'none':
for carch in cuda_arch_list:
args.append('-DGUNROCK_GENCODE_SM' + carch + '=ON')
app_list = self.spec.variants['applications'].value
if app_list[0] != 'none':
args.extend([
'-DGUNROCK_BUILD_APPLICATIONS={0}'.format(
'ON' if spec.satisfies('applications=all') else 'OFF'),
'-DGUNROCK_APP_BC={0}'.format(
'ON' if spec.satisfies('applications=bc') else 'OFF'),
'-DGUNROCK_APP_BFS={0}'.format(
'ON' if spec.satisfies('applications=bfs') else 'OFF'),
'-DGUNROCK_APP_CC={0}'.format(
'ON' if spec.satisfies('applications=cc') else 'OFF'),
'-DGUNROCK_APP_PR={0}'.format(
'ON' if spec.satisfies('applications=pr') else 'OFF'),
'-DGUNROCK_APP_SSSP={0}'.format(
'ON' if spec.satisfies('applications=sssp') else 'OFF'),
'-DGUNROCK_APP_DOBFS={0}'.format(
'ON' if spec.satisfies('applications=dobfs') else 'OFF'),
'-DGUNROCK_APP_HITS={0}'.format(
'ON' if spec.satisfies('applications=hits') else 'OFF'),
'-DGUNROCK_APP_SALSA={0}'.format(
'ON' if spec.satisfies('applications=salsa') else 'OFF'),
'-DGUNROCK_APP_MST={0}'.format(
'ON' if spec.satisfies('applications=mst') else 'OFF'),
'-DGUNROCK_APP_WTF={0}'.format(
'ON' if spec.satisfies('applications=wtf') else 'OFF'),
'-DGUNROCK_APP_TOPK={0}'.format(
'ON' if spec.satisfies('applications=topk') else 'OFF'),
])
return args
def install(self, spec, prefix):
with working_dir(self.build_directory):
install_tree('bin', prefix.bin)
install_tree('lib', prefix.lib)
# bin dir is created only if tests/examples are built
if '+tests' in spec:
install_tree('bin', prefix.bin)

View File

@@ -7,21 +7,24 @@
class Hbase(Package):
"""
Apache HBase is an open-source, distributed, versioned, column-oriented
"""Apache HBase is an open-source, distributed, versioned, column-oriented
store modeled after Google' Bigtable: A Distributed Storage System for
Structured Data by Chang et al. Just as Bigtable leverages the distributed
data storage provided by the Google File System, HBase provides
Bigtable-like capabilities on top of Apache Hadoop.
"""
Bigtable-like capabilities on top of Apache Hadoop."""
homepage = "https://github.com/apache/hbase"
url = "https://github.com/apache/hbase/archive/rel/2.2.2.tar.gz"
homepage = "https://archive.apache.org/"
url = "https://archive.apache.org/dist/hbase/2.2.4/hbase-2.2.4-bin.tar.gz"
list_url = "https://archive.apache.org/dist/hbase"
list_depth = 1
version('2.2.2', sha256='e9a58946e9adff1cac23a0b261ecf32da32f8d2ced0706af1d04e8a67d582926')
version('2.1.8', sha256='121cea4c554879c8401f676c8eb49e39bd35d41c358e919379ad4a318844c8de')
version('2.2.5', sha256='25d08f8f038d9de5beb43dfb0392e8a8b34eae7e0f2670d6c2c172abc3855194')
version('2.2.4', sha256='ec91b628352931e22a091a206be93061b6bf5364044a28fb9e82f0023aca3ca4')
version('2.2.3', sha256='ea8fa72aa6220e038e30bd7c439d181b10bd7225383f7f2d224ebb5f5397310a')
version('2.2.2', sha256='97dcca3a031925a379a0ee6bbfb6007533fb4fdb982c23345e5fc04d6c52bebc')
version('2.1.8', sha256='d8296e8405b1c39c73f0dd03fc6b4d2af754035724168fd56e8f2a0ff175ad90')
depends_on('java@7:', type='run')
depends_on('java@8', type='run')
def install(self, spec, prefix):
install_tree('.', prefix)

View File

@@ -0,0 +1,36 @@
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Heputils(MakefilePackage):
"""Generic tools for high energy physics, e.g. vectors, event records,
math and other util functions."""
homepage = "https://bitbucket.org/andybuckley/heputils/src/default/"
url = "https://bitbucket.org/andybuckley/heputils/get/heputils-1.3.2.tar.gz"
version('1.3.2', sha256='be43586979ab1a81a55348d795c2f63a5da19fc6367d5f66f354217c76c809c0')
version('1.3.1', sha256='7f33ef44364a3d3a39cc65005fb6aa9dfd06bd1b18b41151c0e5e3d28d6ba15b')
version('1.3.0', sha256='1ec9d9d71d409ce6b2e668e4927b1090ddf2ee9acf25457f767925cf89b24852')
version('1.2.1', sha256='99f0b27cddffb98977d37418d53f3386e5defda547aeb4c4fda00ab6fcf2cc31')
version('1.2.0', sha256='0f9f96bd7589f9aec8f1271524b8622291216fe2294ffed772b84d010759eaef')
version('1.1.0', sha256='671374641cdb6dc093327b69da2d2854df805b6eb8e90f0efefb0788ee4a2edd')
version('1.0.8', sha256='9b9a45ebff1367cd2ab1ec4ee8c0e124a9b7ed66c75d8961412163ade1962d91')
version('1.0.7', sha256='481a26755d4e2836563d1f8fcdad663bfa7e21b9878c01bd8a73a67876726b81')
version('1.0.6', sha256='1ecd8597ef7921a63606b21136900a05a818c9342da7994a42aae768ecca507f')
version('1.0.5', sha256='efff3d7d6973822f1dced903017f86661e2d054ff3f0d4fe926de2347160e329')
version('1.0.4', sha256='aeca00c1012bce469c6fe6393edbf4f33043ab671c97a8283a21861caee8b1b4')
version('1.0.3', sha256='8e7ebe0ad5e87a97cbbff7097092ed8afe5a2d1ecae0f4d4f9a7bf694e221d40')
version('1.0.2', sha256='83ba7876d884406463cc8ae42214038b7d6c40ead77a1532d64bc96887173f75')
version('1.0.1', sha256='4bfccc4f4380becb776343e546deb2474deeae79f053ba8ca22287827b8bd4b1')
version('1.0.0', sha256='4f71c2bee6736ed87d0151e62546d2fc9ff639db58172c26dcf033e5bb1ea04c')
def build(self, spec, prefix):
return
def install(self, spec, prefix):
make('install', 'PREFIX={0}'.format(prefix))

View File

@@ -0,0 +1,66 @@
--- Utilities/Histogram.h.orig 2018-11-07 14:46:18.967689784 +0100
+++ Utilities/Histogram.h 2018-11-07 14:46:32.755601195 +0100
@@ -111,7 +111,7 @@
* Function to add a weighted point to the histogram
*/
void addWeighted(double input, double weight) {
- if(isnan(input)) return;
+ if(std::isnan(input)) return;
unsigned int ibin;
for(ibin=1; ibin<_bins.size(); ++ibin) {
if(input<_bins[ibin].limit)
--- Contrib/AlpGen/BasicLesHouchesFileReader.cc.orig 2018-11-07 15:38:46.819306762 +0100
+++ Contrib/AlpGen/BasicLesHouchesFileReader.cc 2018-11-07 15:39:04.927190111 +0100
@@ -383,9 +383,9 @@
>> hepeup.PUP[i][3] >> hepeup.PUP[i][4]
>> hepeup.VTIMUP[i] >> hepeup.SPINUP[i] ) )
return false;
- if(isnan(hepeup.PUP[i][0])||isnan(hepeup.PUP[i][1])||
- isnan(hepeup.PUP[i][2])||isnan(hepeup.PUP[i][3])||
- isnan(hepeup.PUP[i][4]))
+ if(std::isnan(hepeup.PUP[i][0])||std::isnan(hepeup.PUP[i][1])||
+ std::isnan(hepeup.PUP[i][2])||std::isnan(hepeup.PUP[i][3])||
+ std::isnan(hepeup.PUP[i][4]))
throw Exception()
<< "nan's as momenta in Les Houches file "
<< Exception::eventerror;
--- DipoleShower/Base/DipoleChain.cc.orig 2018-12-14 16:51:47.597597788 +0100
+++ DipoleShower/Base/DipoleChain.cc 2018-12-14 16:52:09.450796951 +0100
@@ -15,6 +15,7 @@
#include "Herwig++/DipoleShower/Utility/DipolePartonSplitter.h"
#include <boost/utility.hpp>
+#include <boost/next_prior.hpp>
using namespace Herwig;
--- Exsample2/exsample/config.h.orig 2018-12-14 16:56:31.729186139 +0100
+++ Exsample2/exsample/config.h 2018-12-14 16:56:55.969406851 +0100
@@ -25,6 +25,7 @@
#include <limits>
#include <boost/utility.hpp>
+#include <boost/next_prior.hpp>
#include <boost/scoped_array.hpp>
#include <boost/scoped_ptr.hpp>
--- DipoleShower/Base/DipoleEventRecord.cc.orig 2018-12-14 16:58:33.878298134 +0100
+++ DipoleShower/Base/DipoleEventRecord.cc 2018-12-14 16:58:47.983426512 +0100
@@ -19,6 +19,7 @@
#include "ThePEG/PDF/PartonExtractor.h"
#include <boost/utility.hpp>
+#include <boost/next_prior.hpp>
#include <algorithm>
--- ./Exsample2/BinnedStatistics.h.orig 2018-12-14 17:11:37.396421246 +0100
+++ ./Exsample2/BinnedStatistics.h 2018-12-14 17:12:00.762633435 +0100
@@ -16,6 +16,7 @@
#include "ThePEG/Repository/UseRandom.h"
#include <boost/utility.hpp>
+#include <boost/next_prior.hpp>
namespace Herwig {

View File

@@ -0,0 +1,52 @@
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Herwigpp(AutotoolsPackage):
"""Herwig is a multi-purpose particle physics event generator.
This package provides old Herwig++ 2 generator"""
homepage = "https://herwig.hepforge.org/"
url = "http://lcgpackages.web.cern.ch/lcgpackages/tarFiles/sources/MCGeneratorsTarFiles/Herwig++-2.7.1.tar.bz2"
version('2.7.1', '80a189376bb65f5ec4e64f42e76c00ea9102d8224010563a424fc11e619a6ad6')
patch('herwig++-2.7.1.patch', when='@2.7.1', level=0)
depends_on('gsl')
depends_on('boost')
depends_on('fastjet')
depends_on('thepeg@1.9.2', when='@2.7.1')
def configure_args(self):
args = ['--with-gsl=' + self.spec['gsl'].prefix,
'--with-thepeg=' + self.spec['thepeg'].prefix,
'--with-fastjet=' + self.spec['fastjet'].prefix,
'--with-boost=' + self.spec['boost'].prefix]
return args
def build(self, spec, prefix):
make()
with working_dir('Contrib'):
make()
with working_dir('Contrib/AlpGen'):
make('BasicLesHouchesFileReader.so',
"HERWIGINCLUDE=-I{0}/include".format(self.stage.source_path))
make('AlpGenHandler.so',
"HERWIGINCLUDE=-I{0}/include".format(self.stage.source_path))
def install(self, spec, prefix):
make('install')
install(
join_path(self.stage.source_path,
'Contrib', 'AlpGen', 'AlpGenHandler.so'),
join_path(prefix.lib, 'Herwig++', 'AlpGenHandler.so'))
install(
join_path(self.stage.source_path,
'Contrib', 'AlpGen', 'BasicLesHouchesFileReader.so'),
join_path(prefix.lib, 'Herwig++', 'BasicLesHouchesFileReader.so'))

View File

@@ -0,0 +1,78 @@
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
import os
import platform
class Hpcg(AutotoolsPackage):
"""HPCG is a software package that performs a fixed number of multigrid
preconditioned (using a symmetric Gauss-Seidel smoother) conjugate gradient
(PCG) iterations using double precision (64 bit) floating point values."""
homepage = "https://www.hpcg-benchmark.org"
url = "http://www.hpcg-benchmark.org/downloads/hpcg-3.1.tar.gz"
git = "https://github.com/hpcg-benchmark/hpcg.git"
version('develop', branch='master')
version('3.1', sha256='33a434e716b79e59e745f77ff72639c32623e7f928eeb7977655ffcaade0f4a4')
variant('openmp', default=True, description='Enable OpenMP support')
patch('https://github.com/hpcg-benchmark/hpcg/commit/e9e0b7e6cae23e1f30dd983c2ce2d3bd34d56f75.patch', sha256='23b9de83042eb7a8207fdddcfa79ae2cc1a17e8e623e2224c7751d7c328ee482', when='%gcc@9:')
depends_on('mpi@1.1:')
arch = '{0}-{1}'.format(platform.system(), platform.processor())
build_targets = ['arch={0}'.format(arch)]
def configure(self, spec, prefix):
CXXFLAGS = '-O3 -ffast-math '
CXXFLAGS += '-ftree-vectorize -ftree-vectorizer-verbose=0 '
if '+openmp' in self.spec:
CXXFLAGS += self.compiler.openmp_flag
config = [
# Shell
'SHELL = /bin/sh',
'CD = cd',
'CP = cp',
'LN_S = ln -fs',
'MKDIR = mkdir -p',
'RM = /bin/rm -f',
'TOUCH = touch',
# Platform identifier
'ARCH = {0}'.format(self.arch),
# HPCG Directory Structure / HPCG library
'TOPdir = {0}'.format(os.getcwd()),
'SRCdir = $(TOPdir)/src',
'INCdir = $(TOPdir)/src',
'BINdir = $(TOPdir)/bin',
# Message Passing library (MPI)
'MPinc = -I{0}'.format(spec['mpi'].prefix.include),
'MPlib = -L{0}'.format(spec['mpi'].prefix.lib),
# HPCG includes / libraries / specifics
'HPCG_INCLUDES = -I$(INCdir) -I$(INCdir)/$(arch) $(MPinc)',
'HPCG_LIBS =',
'HPCG_OPTS =',
'HPCG_DEFS = $(HPCG_OPTS) $(HPCG_INCLUDES)',
# Compilers / linkers - Optimization flags
'CXX = {0}'.format(spec['mpi'].mpicxx),
'CXXFLAGS = $(HPCG_DEFS) {0}'.format(CXXFLAGS),
'LINKER = $(CXX)',
'LINKFLAGS = $(CXXFLAGS)',
'ARCHIVER = ar',
'ARFLAGS = r',
'RANLIB = echo',
]
# Write configuration options to include file
with open('setup/Make.{0}'.format(self.arch), 'w') as makefile:
for var in config:
makefile.write('{0}\n'.format(var))
def install(self, spec, prefix):
# Manual installation
install_tree('bin', prefix.bin)

View File

@@ -4,6 +4,7 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
import os.path
class Jmol(Package):
@@ -11,15 +12,26 @@ class Jmol(Package):
with features for chemicals, crystals, materials and biomolecules."""
homepage = "http://jmol.sourceforge.net/"
url = "https://sourceforge.net/projects/jmol/files/Jmol/Version%2014.8/Jmol%2014.8.0/Jmol-14.8.0-binary.tar.gz"
url = "https://sourceforge.net/projects/jmol/files/Jmol/Version%2014.8/Jmol%2014.8.0/Jmol-14.8.0-binary.tar.gz"
version('14.31.0', sha256='eee0703773607c8bd6d51751d0d062c3e10ce44c11e1d7828e4ea3d5f710e892')
version('14.8.0', sha256='8ec45e8d289aa0762194ca71848edc7d736121ddc72276031a253a3651e6d588')
def url_for_version(self, version):
url = 'https://sourceforge.net/projects/jmol/files/Jmol/Version%20{0}/Jmol%20{1}/Jmol-{1}-binary.tar.gz'
return url.format(version.up_to(2), version)
depends_on('java', type='run')
def install(self, spec, prefix):
install_tree('jmol-{0}'.format(self.version), prefix)
if os.path.exists('jmol-{0}'.format(self.version)):
# tar ball contains subdir with different versions
install_tree('jmol-{0}'.format(self.version), prefix)
else:
# no subdirs - tarball was unpacked in spack-src
install_tree('./', prefix)
def setup_run_environment(self, env):
env.prepend_path('PATH', self.prefix)
env.set('JMOL_HOME', self.prefix)
env.prepend_path('PATH', self.spec['java'].prefix.bin)

View File

@@ -14,6 +14,8 @@ class Jube(PythonPackage):
homepage = "https://www.fz-juelich.de/jsc/jube/"
url = "https://apps.fz-juelich.de/jsc/jube/jube2/download.php?version=2.2.2"
version('2.4.0', sha256='87c02555f3d1a8ecaff139cf8e7a7167cabd1049c8cc77f1bd8f4484e210d524', extension='tar.gz')
version('2.3.0', sha256='6051d45af2ff35031ccc460185fbfa61f7f36ea14f17a0d51a9e62cd7af3709a', extension="tar.gz")
version('2.2.2', sha256='135bc03cf07c4624ef2cf581ba5ec52eb44ca1dac15cffb83637e86170980477', extension="tar.gz")
version('2.2.1', sha256='68751bf2e17766650ccddb7a5321dd1ac8b34ffa3585db392befbe9ff180ddd9', extension="tar.gz")
version('2.2.0', sha256='bc825884fc8506d0fb7b3b5cbb5ad4c7e82b1fe1d7ec861ca33699adfc8100f1', extension="tar.gz")

View File

@@ -25,4 +25,4 @@ class Keepalived(AutotoolsPackage):
version('2.0.12', sha256='fd50e433d784cfd948de5726752cf89ab7001f587fe10a5110c6c7cbda4b7b5e')
version('2.0.11', sha256='a298b0c02a20959cfc365b62c14f45abd50d5e0595b2869f5bce10ec2392fa48')
depends_on('openssl', type='build')
depends_on('openssl')

View File

@@ -13,6 +13,7 @@ class Lhapdf(AutotoolsPackage):
homepage = "https://lhapdf.hepforge.org/"
url = "https://lhapdf.hepforge.org/downloads/?f=LHAPDF-6.2.3.tar.gz"
version('6.3.0', sha256='ed4d8772b7e6be26d1a7682a13c87338d67821847aa1640d78d67d2cef8b9b5d')
version('6.2.3', sha256='d6e63addc56c57b6286dc43ffc56d901516f4779a93a0f1547e14b32cfd82dd1')
depends_on('autoconf', type='build')

Some files were not shown because too many files have changed in this diff Show More