diff --git a/.codecov.yml b/.codecov.yml index a70b19c39c1..cc4954132f5 100644 --- a/.codecov.yml +++ b/.codecov.yml @@ -4,8 +4,7 @@ coverage: range: 60...90 status: project: - default: - threshold: 0.3% + default: yes ignore: - lib/spack/spack/test/.* diff --git a/.github/workflows/install_spack.sh b/.github/workflows/install_spack.sh new file mode 100755 index 00000000000..5efae461a53 --- /dev/null +++ b/.github/workflows/install_spack.sh @@ -0,0 +1,5 @@ +#!/usr/bin/env sh +git clone https://github.com/spack/spack.git +echo -e "config:\n build_jobs: 2" > spack/etc/spack/config.yaml +. spack/share/spack/setup-env.sh +spack compilers diff --git a/.github/workflows/linux_unit_tests.yaml b/.github/workflows/linux_unit_tests.yaml new file mode 100644 index 00000000000..0fe20200ad3 --- /dev/null +++ b/.github/workflows/linux_unit_tests.yaml @@ -0,0 +1,61 @@ +name: linux tests + +on: + push: + branches: + - master + - develop + pull_request: + branches: + - master + - develop +jobs: + unittests: + runs-on: ubuntu-latest + strategy: + matrix: + python-version: [2.7, 3.5, 3.6, 3.7, 3.8] + + steps: + - uses: actions/checkout@v2 + - name: Setup Python ${{ matrix.python-version }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + - name: Install System packages + run: | + sudo apt-get -y update + sudo apt-get install -y coreutils gfortran graphviz gnupg2 mercurial ninja-build patchelf + # Needed for kcov + sudo apt-get -y install cmake binutils-dev libcurl4-openssl-dev zlib1g-dev libdw-dev libiberty-dev + - name: Install Python packages + run: | + pip install --upgrade pip six setuptools codecov coverage + - name: Setup git configuration + run: | + # Need this for the git tests to succeed. + git --version + git config --global user.email "spack@example.com" + git config --global user.name "Test User" + git fetch -u origin develop:develop + - name: Install kcov for bash script coverage + env: + KCOV_VERSION: 34 + run: | + KCOV_ROOT=$(mktemp -d) + wget --output-document=${KCOV_ROOT}/${KCOV_VERSION}.tar.gz https://github.com/SimonKagstrom/kcov/archive/v${KCOV_VERSION}.tar.gz + tar -C ${KCOV_ROOT} -xzvf ${KCOV_ROOT}/${KCOV_VERSION}.tar.gz + mkdir -p ${KCOV_ROOT}/build + cd ${KCOV_ROOT}/build && cmake -Wno-dev ${KCOV_ROOT}/kcov-${KCOV_VERSION} && cd - + make -C ${KCOV_ROOT}/build && sudo make -C ${KCOV_ROOT}/build install + - name: Run unit tests + env: + COVERAGE: true + run: | + share/spack/qa/run-unit-tests + coverage combine + coverage xml + - name: Upload to codecov.io + uses: codecov/codecov-action@v1 + with: + flags: unittests,linux diff --git a/.github/workflows/macos_python.yml b/.github/workflows/macos_python.yml new file mode 100644 index 00000000000..e136102a466 --- /dev/null +++ b/.github/workflows/macos_python.yml @@ -0,0 +1,58 @@ +# These are nightly package tests for macOS +# focus areas: +# - initial user experience +# - scientific python stack +name: macOS builds nightly + +on: + schedule: + # nightly at 1 AM + - cron: '0 1 * * *' + +# GitHub Action Limits +# https://help.github.com/en/actions/reference/workflow-syntax-for-github-actions + +jobs: + install_gcc: + name: gcc with clang + runs-on: macos-latest + steps: + - uses: actions/checkout@v2 + - name: spack install + run: | + . .github/workflows/install_spack.sh + spack install -v gcc + + install_jupyter_clang: + name: jupyter + runs-on: macos-latest + timeout-minutes: 700 + steps: + - uses: actions/checkout@v2 + - name: spack install + run: | + . .github/workflows/install_spack.sh + spack install -v py-jupyter %clang + + install_scipy_clang: + name: scipy, mpl, pd + runs-on: macos-latest + steps: + - uses: actions/checkout@v2 + - name: spack install + run: | + . .github/workflows/install_spack.sh + spack install -v py-scipy %clang + spack install -v py-matplotlib %clang + spack install -v py-pandas %clang + + install_mpi4py_clang: + name: mpi4py, petsc4py + runs-on: macos-latest + steps: + - uses: actions/checkout@v2 + - name: spack install + run: | + . .github/workflows/install_spack.sh + spack install -v py-mpi4py %clang + spack install -v py-petsc4py %clang diff --git a/.github/workflows/macos_unit_tests.yaml b/.github/workflows/macos_unit_tests.yaml new file mode 100644 index 00000000000..71e59a7b143 --- /dev/null +++ b/.github/workflows/macos_unit_tests.yaml @@ -0,0 +1,48 @@ +name: macos tests + +on: + push: + branches: + - master + - develop + pull_request: + branches: + - master + - develop +jobs: + build: + + runs-on: macos-latest + strategy: + matrix: + python-version: [3.7] + + steps: + - uses: actions/checkout@v2 + - name: Setup Python ${{ matrix.python-version }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + - name: Install Python packages + run: | + pip install --upgrade pip six setuptools + pip install --upgrade codecov coverage + pip install --upgrade flake8 pep8-naming + - name: Setup Homebrew packages + run: | + brew update + brew upgrade + brew install gcc gnupg2 dash kcov + - name: Run unit tests + run: | + git --version + git fetch -u origin develop:develop + . share/spack/setup-env.sh + coverage run $(which spack) test + coverage combine + coverage xml + - name: Upload to codecov.io + uses: codecov/codecov-action@v1 + with: + file: ./coverage.xml + flags: unittests,macos diff --git a/.gitignore b/.gitignore index da05b2bcf04..e2f485ab924 100644 --- a/.gitignore +++ b/.gitignore @@ -4,6 +4,7 @@ /var/spack/environments /var/spack/repos/*/index.yaml /var/spack/repos/*/lock +__pycache__/ *.pyc /opt *~ @@ -33,3 +34,7 @@ lib/spack/spack/test/.cache .project .cproject .pydevproject + +# VSCode files +.vscode +.devcontainer diff --git a/.travis.yml b/.travis.yml index 86b9f4d3dbb..b59d1666d8b 100644 --- a/.travis.yml +++ b/.travis.yml @@ -12,7 +12,7 @@ branches: # Build matrix #============================================================================= -dist: xenial +dist: bionic jobs: fast_finish: true @@ -22,45 +22,33 @@ jobs: os: linux language: python env: TEST_SUITE=flake8 -# Shell integration with module files - - python: '3.8' - os: linux - language: python - env: [ TEST_SUITE=bootstrap ] - stage: 'unit tests + documentation' python: '2.6' dist: trusty os: linux language: python + addons: + apt: + # Everything but patchelf, that is not available for trusty + packages: + - ccache + - gfortran + - graphviz + - gnupg2 + - kcov + - mercurial + - ninja-build + - realpath + - zsh env: [ TEST_SUITE=unit, COVERAGE=true ] - - python: '2.7' - os: linux - language: python - env: [ TEST_SUITE=unit, COVERAGE=true ] - - python: '3.5' - os: linux - language: python - env: TEST_SUITE=unit - - python: '3.6' - os: linux - language: python - env: TEST_SUITE=unit - - python: '3.7' - os: linux - language: python - env: TEST_SUITE=unit - python: '3.8' os: linux language: python - env: [ TEST_SUITE=unit, COVERAGE=true ] + env: [ TEST_SUITE=shell, COVERAGE=true, KCOV_VERSION=38 ] - python: '3.8' os: linux language: python env: TEST_SUITE=doc - - os: osx - language: generic - env: [ TEST_SUITE=unit, PYTHON_VERSION=2.7, COVERAGE=true ] - if: type != pull_request stages: - 'style checks' @@ -77,29 +65,14 @@ addons: apt: packages: - ccache - - cmake + - coreutils - gfortran - graphviz - gnupg2 - - kcov - mercurial - ninja-build - - perl - - perl-base - - realpath - - r-base - - r-base-core - - r-base-dev + - patchelf - zsh - # for Mac builds, we use Homebrew - homebrew: - packages: - - python@2 - - gcc - - gnupg2 - - ccache - - dash - - kcov update: true # ~/.ccache needs to be cached directly as Travis is not taking care of it @@ -110,15 +83,18 @@ cache: directories: - ~/.ccache -# Work around Travis's lack of support for Python on OSX before_install: - - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then - pip2 install --upgrade pip; - pip2 install virtualenv; - virtualenv venv; - source venv/bin/activate; - fi - ccache -M 2G && ccache -z + # Install kcov manually, since it's not packaged for bionic beaver + - if [[ "$KCOV_VERSION" ]]; then + sudo apt-get -y install cmake binutils-dev libcurl4-openssl-dev zlib1g-dev libdw-dev libiberty-dev; + KCOV_ROOT=$(mktemp -d); + wget --output-document=${KCOV_ROOT}/${KCOV_VERSION}.tar.gz https://github.com/SimonKagstrom/kcov/archive/v${KCOV_VERSION}.tar.gz; + tar -C ${KCOV_ROOT} -xzvf ${KCOV_ROOT}/${KCOV_VERSION}.tar.gz; + mkdir -p ${KCOV_ROOT}/build; + cd ${KCOV_ROOT}/build && cmake -Wno-dev ${KCOV_ROOT}/kcov-${KCOV_VERSION} && cd - ; + make -C ${KCOV_ROOT}/build && sudo make -C ${KCOV_ROOT}/build install; + fi # Install various dependencies install: @@ -156,6 +132,10 @@ after_success: --flags "${TEST_SUITE}${TRAVIS_OS_NAME}"; fi ;; + shell) + codecov --env PYTHON_VERSION + --required + --flags "${TEST_SUITE}${TRAVIS_OS_NAME}"; esac #============================================================================= @@ -163,6 +143,8 @@ after_success: #============================================================================= notifications: email: - recipients: tgamblin@llnl.gov + recipients: + - tgamblin@llnl.gov + - massimiliano.culpo@gmail.com on_success: change on_failure: always diff --git a/README.md b/README.md index a97d8fef57f..d278378ce7a 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,10 @@ # Spack Spack -[![Build Status](https://travis-ci.org/spack/spack.svg?branch=develop)](https://travis-ci.org/spack/spack) +[![MacOS Tests](https://github.com/spack/spack/workflows/macos%20tests/badge.svg)](https://github.com/spack/spack/actions) +[![Linux Tests](https://github.com/spack/spack/workflows/linux%20tests/badge.svg)](https://github.com/spack/spack/actions) [![Linux Builds](https://github.com/spack/spack/workflows/linux%20builds/badge.svg)](https://github.com/spack/spack/actions) +[![macOS Builds (nightly)](https://github.com/spack/spack/workflows/macOS%20builds%20nightly/badge.svg?branch=develop)](https://github.com/spack/spack/actions?query=workflow%3A%22macOS+builds+nightly%22) +[![Build Status](https://travis-ci.com/spack/spack.svg?branch=develop)](https://travis-ci.com/spack/spack) [![codecov](https://codecov.io/gh/spack/spack/branch/develop/graph/badge.svg)](https://codecov.io/gh/spack/spack) [![Read the Docs](https://readthedocs.org/projects/spack/badge/?version=latest)](https://spack.readthedocs.io) [![Slack](https://spackpm.herokuapp.com/badge.svg)](https://spackpm.herokuapp.com) @@ -120,4 +123,4 @@ See [LICENSE-MIT](https://github.com/spack/spack/blob/develop/LICENSE-MIT), SPDX-License-Identifier: (Apache-2.0 OR MIT) -LLNL-CODE-647188 +LLNL-CODE-811652 diff --git a/etc/spack/defaults/config.yaml b/etc/spack/defaults/config.yaml index 2d8672a11d7..f4427b1312f 100644 --- a/etc/spack/defaults/config.yaml +++ b/etc/spack/defaults/config.yaml @@ -157,3 +157,7 @@ config: # Has no effect on macOS. DO NOT MIX these within the same install tree. # See the Spack documentation for details. shared_linking: 'rpath' + + # Set to 'false' to allow installation on filesystems that doesn't allow setgid bit + # manipulation by unprivileged user (e.g. AFS) + allow_sgid: true diff --git a/etc/spack/defaults/packages.yaml b/etc/spack/defaults/packages.yaml index 2c91b75e45d..dcfbf76b51b 100644 --- a/etc/spack/defaults/packages.yaml +++ b/etc/spack/defaults/packages.yaml @@ -40,6 +40,7 @@ packages: opencl: [pocl] pil: [py-pillow] pkgconfig: [pkgconf, pkg-config] + rpc: [libtirpc] scalapack: [netlib-scalapack] sycl: [hipsycl] szip: [libszip, libaec] diff --git a/lib/spack/docs/binary_caches.rst b/lib/spack/docs/binary_caches.rst index 614ad2864c8..6e8c91c9431 100644 --- a/lib/spack/docs/binary_caches.rst +++ b/lib/spack/docs/binary_caches.rst @@ -57,6 +57,12 @@ Build caches are installed via: $ spack buildcache install +^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +List of popular build caches +^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +* `Extreme-scale Scientific Software Stack (E4S) `_: `build cache `_ + ---------- Relocation diff --git a/lib/spack/docs/build_settings.rst b/lib/spack/docs/build_settings.rst index cfd850af28a..9f67d8c14f5 100644 --- a/lib/spack/docs/build_settings.rst +++ b/lib/spack/docs/build_settings.rst @@ -158,6 +158,45 @@ Spack can then use any of the listed external implementations of MPI to satisfy a dependency, and will choose depending on the compiler and architecture. +.. _cmd-spack-external-find: + +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Automatically Find External Packages +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +You can run the :ref:`spack external find ` command +to search for system-provided packages and add them to ``packages.yaml``. +After running this command your ``packages.yaml`` may include new entries: + +.. code-block:: yaml + + packages: + cmake: + paths: + cmake@3.17.2: /usr + +Generally this is useful for detecting a small set of commonly-used packages; +for now this is generally limited to finding build-only dependencies. +Specific limitations include: + +* Packages are not discoverable by default: For a package to be + discoverable with ``spack external find``, it needs to add special + logic. See :ref:`here ` for more details. +* The current implementation only collects and examines executable files, + so it is typically only useful for build/run dependencies (in some cases + if a library package also provides an executable, it may be possible to + extract a meaningful Spec by running the executable - for example the + compiler wrappers in MPI implementations). +* The logic does not search through module files, it can only detect + packages with executables defined in ``PATH``; you can help Spack locate + externals which use module files by loading any associated modules for + packages that you want Spack to know about before running + ``spack external find``. +* Spack does not overwrite existing entries in the package configuration: + If there is an external defined for a spec at any configuration scope, + then Spack will not add a new external entry (``spack config blame packages`` + can help locate all external entries). + .. _concretization-preferences: -------------------------- diff --git a/lib/spack/docs/build_systems/autotoolspackage.rst b/lib/spack/docs/build_systems/autotoolspackage.rst index a2baedd576a..a4c15e51557 100644 --- a/lib/spack/docs/build_systems/autotoolspackage.rst +++ b/lib/spack/docs/build_systems/autotoolspackage.rst @@ -233,7 +233,124 @@ You may have noticed that most of the Autotools flags are of the form ``--without-baz``. Since these flags are so common, Spack provides a couple of helper functions to make your life easier. -TODO: document ``with_or_without`` and ``enable_or_disable``. +""""""""""""""""" +enable_or_disable +""""""""""""""""" + +Autotools flags for simple boolean variants can be automatically +generated by calling the ``enable_or_disable`` method. This is +typically used to enable or disable some feature within the package. + +.. code-block:: python + + variant( + 'memchecker', + default=False, + description='Memchecker support for debugging [degrades performance]' + ) + config_args.extend(self.enable_or_disable('memchecker')) + +In this example, specifying the variant ``+memchecker`` will generate +the following configuration options: + +.. code-block:: console + + --enable-memchecker + +""""""""""""""" +with_or_without +""""""""""""""" + +Autotools flags for more complex variants, including boolean variants +and multi-valued variants, can be automatically generated by calling +the ``with_or_without`` method. + +.. code-block:: python + + variant( + 'schedulers', + values=disjoint_sets( + ('auto',), ('alps', 'lsf', 'tm', 'slurm', 'sge', 'loadleveler') + ).with_non_feature_values('auto', 'none'), + description="List of schedulers for which support is enabled; " + "'auto' lets openmpi determine", + ) + if 'schedulers=auto' not in spec: + config_args.extend(self.with_or_without('schedulers')) + +In this example, specifying the variant ``schedulers=slurm,sge`` will +generate the following configuration options: + +.. code-block:: console + + --with-slurm --with-sge + +``enable_or_disable`` is actually functionally equivalent with +``with_or_without``, and accepts the same arguments and variant types; +but idiomatic autotools packages often follow these naming +conventions. + +"""""""""""""""" +activation_value +"""""""""""""""" + +Autotools parameters that require an option can still be automatically +generated, using the ``activation_value`` argument to +``with_or_without`` (or, rarely, ``enable_or_disable``). + +.. code-block:: python + + variant( + 'fabrics', + values=disjoint_sets( + ('auto',), ('psm', 'psm2', 'verbs', 'mxm', 'ucx', 'libfabric') + ).with_non_feature_values('auto', 'none'), + description="List of fabrics that are enabled; " + "'auto' lets openmpi determine", + ) + if 'fabrics=auto' not in spec: + config_args.extend(self.with_or_without('fabrics', + activation_value='prefix')) + +``activation_value`` accepts a callable that generates the configure +parameter value given the variant value; but the special value +``prefix`` tells Spack to automatically use the dependenency's +installation prefix, which is the most common use for such +parameters. In this example, specifying the variant +``fabrics=libfabric`` will generate the following configuration +options: + +.. code-block:: console + + --with-libfabric= + +"""""""""""""""""""" +activation overrides +"""""""""""""""""""" + +Finally, the behavior of either ``with_or_without`` or +``enable_or_disable`` can be overridden for specific variant +values. This is most useful for multi-values variants where some of +the variant values require atypical behavior. + +.. code-block:: python + + def with_or_without_verbs(self, activated): + # Up through version 1.6, this option was named --with-openib. + # In version 1.7, it was renamed to be --with-verbs. + opt = 'verbs' if self.spec.satisfies('@1.7:') else 'openib' + if not activated: + return '--without-{0}'.format(opt) + return '--with-{0}={1}'.format(opt, self.spec['rdma-core'].prefix) + +Defining ``with_or_without_verbs`` overrides the behavior of a +``fabrics=verbs`` variant, changing the configure-time option to +``--with-openib`` for older versions of the package and specifying an +alternative dependency name: + +.. code-block:: + + --with-openib= ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Configure script in a sub-directory diff --git a/lib/spack/docs/build_systems/cudapackage.rst b/lib/spack/docs/build_systems/cudapackage.rst index b4561c69ed8..15150599d37 100644 --- a/lib/spack/docs/build_systems/cudapackage.rst +++ b/lib/spack/docs/build_systems/cudapackage.rst @@ -37,7 +37,7 @@ In order to use it, just add another base class to your package, for example: if '+cuda' in spec: options.append('-DWITH_CUDA=ON') cuda_arch = spec.variants['cuda_arch'].value - if cuda_arch is not None: + if cuda_arch != 'none': options.append('-DCUDA_FLAGS=-arch=sm_{0}'.format(cuda_arch[0])) else: options.append('-DWITH_CUDA=OFF') diff --git a/lib/spack/docs/build_systems/intelpackage.rst b/lib/spack/docs/build_systems/intelpackage.rst index 153b41bdc75..66f473cbf82 100644 --- a/lib/spack/docs/build_systems/intelpackage.rst +++ b/lib/spack/docs/build_systems/intelpackage.rst @@ -1055,6 +1055,6 @@ Footnotes 2. Set the hash length in ``install-path-scheme``, also in ``config.yaml`` (:ref:`q.v. `). 3. You will want to set the *same* hash length for - :ref:`tcl module files ` - if you have Spack produce them for you, under ``naming_scheme`` in - ``modules.yaml``. Other module dialects cannot be altered in this manner. + :ref:`module files ` + if you have Spack produce them for you, under ``projections`` in + ``modules.yaml``. diff --git a/lib/spack/docs/build_systems/wafpackage.rst b/lib/spack/docs/build_systems/wafpackage.rst index 1916630f4ce..36fc21a7722 100644 --- a/lib/spack/docs/build_systems/wafpackage.rst +++ b/lib/spack/docs/build_systems/wafpackage.rst @@ -47,8 +47,9 @@ Each phase provides a ```` function that runs: where ```` is the number of parallel jobs to build with. Each phase also has a ```` function that can pass arguments to this call. -All of these functions are empty except for the ``configure_args`` -function, which passes ``--prefix=/path/to/installation/prefix``. +All of these functions are empty. The ``configure`` phase +automatically adds ``--prefix=/path/to/installation/prefix``, so you +don't need to add that in the ``configure_args``. ^^^^^^^ Testing diff --git a/lib/spack/docs/environments.rst b/lib/spack/docs/environments.rst index a51c3012fd7..b80e36b4b2b 100644 --- a/lib/spack/docs/environments.rst +++ b/lib/spack/docs/environments.rst @@ -281,7 +281,7 @@ in the lockfile, nor does it install the spec. The ``spack add`` command is environment aware. It adds to the currently active environment. All environment aware commands can also -be called using the ``spack -E`` flag to specify the environment. +be called using the ``spack -e`` flag to specify the environment. .. code-block:: console @@ -292,7 +292,7 @@ or .. code-block:: console - $ spack -E myenv add python + $ spack -e myenv add python .. _environments_concretization: @@ -602,7 +602,7 @@ files are identical. spack: definitions: - first: [libelf, libdwarf] - - compilers: ['%gcc', '^intel'] + - compilers: ['%gcc', '%intel'] - second: - $first - matrix: @@ -676,6 +676,40 @@ The valid variables for a ``when`` clause are: #. ``hostname``. The hostname of the system (if ``hostname`` is an executable in the user's PATH). +"""""""""""""""""""""""" +SpecLists as Constraints +"""""""""""""""""""""""" + +Dependencies and compilers in Spack can be both packages in an +environment and constraints on other packages. References to SpecLists +allow a shorthand to treat packages in a list as either a compiler or +a dependency using the ``$%`` or ``$^`` syntax respectively. + +For example, the following environment has three root packages: +``gcc@8.1.0``, ``mvapich2@2.3.1 %gcc@8.1.0``, and ``hdf5+mpi +%gcc@8.1.0 ^mvapich2@2.3.1``. + +.. code-block:: yaml + + spack: + definitions: + - compilers: [gcc@8.1.0] + - mpis: [mvapich2@2.3.1] + - packages: [hdf5+mpi] + + specs: + - $compilers + - matrix: + - [$mpis] + - [$%compilers] + - matrix: + - [$packages] + - [$^mpis] + - [$%compilers] + +This allows for a much-needed reduction in redundancy between packages +and constraints. + ^^^^^^^^^^^^^^^^^^^^^^^^^ Environment-managed Views ^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/lib/spack/docs/getting_started.rst b/lib/spack/docs/getting_started.rst index 8de9d276623..1fbddbd4a98 100644 --- a/lib/spack/docs/getting_started.rst +++ b/lib/spack/docs/getting_started.rst @@ -71,10 +71,6 @@ This automatically adds Spack to your ``PATH`` and allows the ``spack`` command to be used to execute spack :ref:`commands ` and :ref:`useful packaging commands `. -If :ref:`environment-modules ` is -installed and available, the ``spack`` command can also load and unload -:ref:`modules `. - ^^^^^^^^^^^^^^^^^ Clean Environment ^^^^^^^^^^^^^^^^^ @@ -900,9 +896,8 @@ Core Spack Utilities ^^^^^^^^^^^^^^^^^^^^ Core Spack uses the following packages, mainly to download and unpack -source code, and to load generated environment modules: ``curl``, -``env``, ``git``, ``go``, ``hg``, ``svn``, ``tar``, ``unzip``, -``patch``, ``environment-modules``. +source code: ``curl``, ``env``, ``git``, ``go``, ``hg``, ``svn``, +``tar``, ``unzip``, ``patch`` As long as the user's environment is set up to successfully run these programs from outside of Spack, they should work inside of Spack as @@ -910,10 +905,6 @@ well. They can generally be activated as in the ``curl`` example above; or some systems might already have an appropriate hand-built environment module that may be loaded. Either way works. -If you find that you are missing some of these programs, ``spack`` can -build some of them for you with ``spack bootstrap``. Currently supported -programs are ``environment-modules``. - A few notes on specific programs in this list: """""""""""""""""""""""""" @@ -941,45 +932,6 @@ other programs will also not work, because they also rely on OpenSSL. Once ``curl`` has been installed, you can similarly install the others. -.. _InstallEnvironmentModules: - -""""""""""""""""""" -Environment Modules -""""""""""""""""""" - -In order to use Spack's generated module files, you must have -installed ``environment-modules`` or ``lmod``. The simplest way -to get the latest version of either of these tools is installing -it as part of Spack's bootstrap procedure: - -.. code-block:: console - - $ spack bootstrap - -.. warning:: - At the moment ``spack bootstrap`` is only able to install ``environment-modules``. - Extending its capabilities to prefer ``lmod`` where possible is in the roadmap, - and likely to happen before the next release. - -Alternatively, on many Linux distributions, you can install a pre-built binary -from the vendor's repository. On Fedora/RHEL/CentOS, for example, this can be -done with the command: - -.. code-block:: console - - $ yum install environment-modules - -Once you have the tool installed and available in your path, you can source -Spack's setup file: - -.. code-block:: console - - $ source share/spack/setup-env.sh - -This activates :ref:`shell support ` and makes commands like -``spack load`` available for use. - - ^^^^^^^^^^^^^^^^^ Package Utilities ^^^^^^^^^^^^^^^^^ diff --git a/lib/spack/docs/module_file_support.rst b/lib/spack/docs/module_file_support.rst index 01a13cdf3e4..73047a7754b 100644 --- a/lib/spack/docs/module_file_support.rst +++ b/lib/spack/docs/module_file_support.rst @@ -17,22 +17,16 @@ Spack integrates with `Environment Modules `_ by providing post-install hooks that generate module files and commands to manipulate them. -.. note:: - - If your machine does not already have a module system installed, - we advise you to use either Environment Modules or LMod. See :ref:`InstallEnvironmentModules` - for more details. - .. _shell-support: ---------------------------- Using module files via Spack ---------------------------- -If you have installed a supported module system either manually or through -``spack bootstrap``, you should be able to run either ``module avail`` or -``use -l spack`` to see what module files have been installed. Here is -sample output of those programs, showing lots of installed packages: +If you have installed a supported module system you should be able to +run either ``module avail`` or ``use -l spack`` to see what module +files have been installed. Here is sample output of those programs, +showing lots of installed packages: .. code-block:: console @@ -93,9 +87,7 @@ Note that in the latter case it is necessary to explicitly set ``SPACK_ROOT`` before sourcing the setup file (you will get a meaningful error message if you don't). -When ``bash`` and ``ksh`` users update their environment with ``setup-env.sh``, it will check for spack-installed environment modules and add the ``module`` command to their environment; This only occurs if the module command is not already available. You can install ``environment-modules`` with ``spack bootstrap`` as described in :ref:`InstallEnvironmentModules`. - -Finally, if you want to have Spack's shell support available on the command line at +If you want to have Spack's shell support available on the command line at any login you can put this source line in one of the files that are sourced at startup (like ``.profile``, ``.bashrc`` or ``.cshrc``). Be aware though that the startup time may be slightly increased because of that. @@ -467,14 +459,14 @@ is compiled with ``gcc@4.4.7``, with the only exception of any ``gcc`` or any ``llvm`` installation. -.. _modules-naming-scheme: +.. _modules-projections: -""""""""""""""""""""""""""" -Customize the naming scheme -""""""""""""""""""""""""""" +""""""""""""""""""""""""""""""" +Customize the naming of modules +""""""""""""""""""""""""""""""" The names of environment modules generated by spack are not always easy to -fully comprehend due to the long hash in the name. There are two module +fully comprehend due to the long hash in the name. There are three module configuration options to help with that. The first is a global setting to adjust the hash length. It can be set anywhere from 0 to 32 and has a default length of 7. This is the representation of the hash in the module file name and @@ -508,20 +500,46 @@ version of python a set of python extensions is associated with. Likewise, the ``openblas`` string is attached to any program that has openblas in the spec, most likely via the ``+blas`` variant specification. +The most heavyweight solution to module naming is to change the entire +naming convention for module files. This uses the projections format +covered in :ref:`adding_projections_to_views`. + +.. code-block:: yaml + + modules: + tcl: + projections: + all: '{name}/{version}-{compiler.name}-{compiler.version}-module' + ^mpi: '{name}/{version}-{^mpi.name}-{^mpi.version}-{compiler.name}-{compiler.version}-module' + +will create module files that are nested in directories by package +name, contain the version and compiler name and version, and have the +word ``module`` before the hash for all specs that do not depend on +mpi, and will have the same information plus the MPI implementation +name and version for all packages that depend on mpi. + +When specifying module names by projection for Lmod modules, we +recommend NOT including names of dependencies (e.g., MPI, compilers) +that are already in the LMod hierarchy. + + + .. note:: - TCL module files - A modification that is specific to ``tcl`` module files is the possibility - to change the naming scheme of modules. + TCL modules + TCL modules also allow for explicit conflicts between modulefiles. .. code-block:: yaml - modules: - tcl: - naming_scheme: '{name}/{version}-{compiler.name}-{compiler.version}' - all: - conflict: - - '{name}' - - 'intel/14.0.1' + modules: + enable: + - tcl + tcl: + projections: + all: '{name}/{version}-{compiler.name}-{compiler.version}' + all: + conflict: + - '{name}' + - 'intel/14.0.1' will create module files that will conflict with ``intel/14.0.1`` and with the base directory of the same module, effectively preventing the possibility to @@ -546,6 +564,8 @@ most likely via the ``+blas`` variant specification. lmod: core_compilers: - 'gcc@4.8' + core_specs: + - 'python' hierarchy: - 'mpi' - 'lapack' @@ -555,6 +575,15 @@ most likely via the ``+blas`` variant specification. implementations of ``mpi`` and ``lapack``, and let LMod switch safely from one to the other. + All packages built with a compiler in ``core_compilers`` and all + packages that satisfy a spec in ``core_specs`` will be put in the + ``Core`` hierarchy of the lua modules. + +.. warning:: + Consistency of Core packages + The user is responsible for maintining consistency among core packages, as ``core_specs`` + bypasses the hierarchy that allows LMod to safely switch between coherent software stacks. + .. warning:: Deep hierarchies and ``lmod spider`` For hierarchies that are deeper than three layers ``lmod spider`` may have some issues. diff --git a/lib/spack/docs/packaging_guide.rst b/lib/spack/docs/packaging_guide.rst index 26f843aa0e8..840c29454b9 100644 --- a/lib/spack/docs/packaging_guide.rst +++ b/lib/spack/docs/packaging_guide.rst @@ -2169,13 +2169,17 @@ Adding the following to a package: .. code-block:: python - conflicts('%intel', when='@1.2') + conflicts('%intel', when='@:1.2', + msg=' <= v1.2 cannot be built with Intel ICC, ' + 'please use a newer release.') we express the fact that the current package *cannot be built* with the Intel -compiler when we are trying to install version "1.2". The ``when`` argument can -be omitted, in which case the conflict will always be active. +compiler when we are trying to install a version "<=1.2". The ``when`` argument +can be omitted, in which case the conflict will always be active. Conflicts are always evaluated after the concretization step has been performed, and if any match is found a detailed error message is shown to the user. +You can add an additional message via the ``msg=`` parameter to a conflict that +provideds more specific instructions for users. .. _packaging_extensions: @@ -4044,6 +4048,70 @@ File functions :py:func:`touch(path) ` Create an empty file at ``path``. +.. _make-package-findable: + +---------------------------------------------------------- +Making a package discoverable with ``spack external find`` +---------------------------------------------------------- + +To make a package discoverable with +:ref:`spack external find ` you must +define one or more executables associated with the package and must +implement a method to generate a Spec when given an executable. + +The executables are specified as a package level ``executables`` +attribute which is a list of strings (see example below); each string +is treated as a regular expression (e.g. 'gcc' would match 'gcc', 'gcc-8.3', +'my-weird-gcc', etc.). + +The method ``determine_spec_details`` has the following signature: + +.. code-block:: python + + def determine_spec_details(prefix, exes_in_prefix): + # exes_in_prefix = a set of paths, each path is an executable + # prefix = a prefix that is common to each path in exes_in_prefix + + # return None or [] if none of the exes represent an instance of + # the package. Return one or more Specs for each instance of the + # package which is thought to be installed in the provided prefix + +``determine_spec_details`` takes as parameters a set of discovered +executables (which match those specified by the user) as well as a +common prefix shared by all of those executables. The function must +return one or more Specs associated with the executables (it can also +return ``None`` to indicate that no provided executables are associated +with the package). + +Say for example we have a package called ``foo-package`` which +builds an executable called ``foo``. ``FooPackage`` would appear as +follows: + +.. code-block:: python + + class FooPackage(Package): + homepage = "..." + url = "..." + + version(...) + + # Each string provided here is treated as a regular expression, and + # would match for example 'foo', 'foobar', and 'bazfoo'. + executables = ['foo'] + + @classmethod + def determine_spec_details(cls, prefix, exes_in_prefix): + candidates = list(x for x in exes_in_prefix + if os.path.basename(x) == 'foo') + if not candidates: + return + # This implementation is lazy and only checks the first candidate + exe_path = candidates[0] + exe = spack.util.executable.Executable(exe_path) + output = exe('--version') + version_str = ... # parse output for version string + return Spec('foo-package@{0}'.format(version_str)) + .. _package-lifecycle: ----------------------------- diff --git a/lib/spack/docs/pipelines.rst b/lib/spack/docs/pipelines.rst index 0f092d5c5cb..ee3b4e8ad62 100644 --- a/lib/spack/docs/pipelines.rst +++ b/lib/spack/docs/pipelines.rst @@ -32,30 +32,46 @@ for setting up a build pipeline are as follows: #. Create a repository on your gitlab instance #. Add a ``spack.yaml`` at the root containing your pipeline environment (see below for details) -#. Add a ``.gitlab-ci.yml`` at the root containing a single job, similar to +#. Add a ``.gitlab-ci.yml`` at the root containing two jobs (one to generate + the pipeline dynamically, and one to run the generated jobs), similar to this one: .. code-block:: yaml - pipeline-job: + stages: [generate, build] + + generate-pipeline: + stage: generate tags: - - ... script: - - spack ci start + - spack ci generate + --output-file "${CI_PROJECT_DIR}/jobs_scratch_dir/pipeline.yml" + artifacts: + paths: + - "${CI_PROJECT_DIR}/jobs_scratch_dir/pipeline.yml" + + build-jobs: + stage: build + trigger: + include: + - artifact: "jobs_scratch_dir/pipeline.yml" + job: generate-pipeline + strategy: depend + #. Add any secrets required by the CI process to environment variables using the CI web ui #. Push a commit containing the ``spack.yaml`` and ``.gitlab-ci.yml`` mentioned above to the gitlab repository -The ````, above, is used to pick one of your configured runners, -while the use of the ``spack ci start`` command implies that runner has an -appropriate version of spack installed and configured for use. Of course, there -are myriad ways to customize the process. You can configure CDash reporting -on the progress of your builds, set up S3 buckets to mirror binaries built by -the pipeline, clone a custom spack repository/ref for use by the pipeline, and -more. +The ````, above, is used to pick one of your configured runners to +run the pipeline generation phase (this is implemented in the ``spack ci generate`` +command, which assumes the runner has an appropriate version of spack installed +and configured for use). Of course, there are many ways to customize the process. +You can configure CDash reporting on the progress of your builds, set up S3 buckets +to mirror binaries built by the pipeline, clone a custom spack repository/ref for +use by the pipeline, and more. While it is possible to set up pipelines on gitlab.com, the builds there are limited to 60 minutes and generic hardware. It is also possible to @@ -64,21 +80,30 @@ Gitlab to Google Kubernetes Engine (`GKE `_), though those topics are outside the scope of this document. +Spack's pipelines are now making use of the +`trigger ` syntax to run +dynamically generated +`child pipelines `. +Note that the use of dynamic child pipelines requires running Gitlab version +``>= 12.9``. + ----------------------------------- Spack commands supporting pipelines ----------------------------------- -Spack provides a command `ci` with sub-commands for doing various things related -to automated build pipelines. All of the ``spack ci ...`` commands must be run -from within a environment, as each one makes use of the environment for different -purposes. Additionally, some options to the commands (or conditions present in -the spack environment file) may require particular environment variables to be +Spack provides a command ``ci`` with two sub-commands: ``spack ci generate`` generates +a pipeline (a .gitlab-ci.yml file) from a spack environment, and ``spack ci rebuild`` +checks a spec against a remote mirror and possibly rebuilds it from source and updates +the binary mirror with the latest built package. Both ``spack ci ...`` commands must +be run from within the same environment, as each one makes use of the environment for +different purposes. Additionally, some options to the commands (or conditions present +in the spack environment file) may require particular environment variables to be set in order to function properly. Examples of these are typically secrets needed for pipeline operation that should not be visible in a spack environment file. These environment variables are described in more detail :ref:`ci_environment_variables`. -.. _cmd_spack_ci: +.. _cmd-spack-ci: ^^^^^^^^^^^^^^^^^^ ``spack ci`` @@ -87,16 +112,7 @@ file. These environment variables are described in more detail Super-command for functionality related to generating pipelines and executing pipeline jobs. -.. _cmd_spack_ci_start: - -^^^^^^^^^^^^^^^^^^ -``spack ci start`` -^^^^^^^^^^^^^^^^^^ - -Currently this command is a short-cut to first run ``spack ci generate``, followed -by ``spack ci pushyaml``. - -.. _cmd_spack_ci_generate: +.. _cmd-spack-ci-generate: ^^^^^^^^^^^^^^^^^^^^^ ``spack ci generate`` @@ -105,40 +121,7 @@ by ``spack ci pushyaml``. Concretizes the specs in the active environment, stages them (as described in :ref:`staging_algorithm`), and writes the resulting ``.gitlab-ci.yml`` to disk. -.. _cmd_spack_ci_pushyaml: - -^^^^^^^^^^^^^^^^^^^^^ -``spack ci pushyaml`` -^^^^^^^^^^^^^^^^^^^^^ - -Generates a commit containing the generated ``.gitlab-ci.yml`` and pushes it to a -``DOWNSTREAM_CI_REPO``, which is frequently the same repository. The branch -created has the same name as the current branch being tested, but has ``multi-ci-`` -prepended to the branch name. Once Gitlab CI has full support for dynamically -defined workloads, this command will be deprecated. - -Until this command is no longer needed and can be deprecated, there are -a few gotchas to note. While you can embed your username and password in the -`DOWNSTREAM_CI_REPO` url, you may not be able to have Gitlab mask the value, as -it will likely contain characters that Gitlab cannot currently mask. Another -option is to set up an SSH token, but for this to work, the associated SSH -key must be passphrase-less so that it can be provided in an automated manner. - -If you attempt to set up an SSH token that does require a passphrase, you may -see a log message similar to: - -``` -fatal: https:////:/info/refs not valid: is this a git repository? -``` - -In this case, you can try a passphrase-less SSH key, or else embed your gitlab -username and password in the `DOWNSTREAM_CI_REPO` as in the following example: - -``` -https://:@//.git -``` - -.. _cmd_spack_ci_rebuild: +.. _cmd-spack-ci-rebuild: ^^^^^^^^^^^^^^^^^^^^ ``spack ci rebuild`` @@ -179,14 +162,14 @@ sections describing a build pipeline: - os=ubuntu18.04 runner-attributes: tags: - - spack-k8s - image: spack/spack_builder_ubuntu_18.04 + - spack-kube + image: spack/ubuntu-bionic - match: - os=centos7 runner-attributes: tags: - - spack-k8s - image: spack/spack_builder_centos_7 + - spack-kube + image: spack/centos7 cdash: build-group: Release Testing url: https://cdash.spack.io @@ -389,22 +372,29 @@ containing the url and branch/tag you want to clone (calling them, for example, ``SPACK_REPO`` and ``SPACK_REF``), use them to clone spack in your pre-ci ``before_script``, and finally pass those same values along to the workload generation process via the ``spack-repo`` and ``spack-ref`` cli args. Here's -an example: +the ``generate-pipeline`` job from the top of this document, updated to clone +a custom spack and make sure the generated rebuild jobs will clone it too: .. code-block:: yaml - pipeline-job: + generate-pipeline: tags: - before_script: - git clone ${SPACK_REPO} --branch ${SPACK_REF} - . ./spack/share/spack/setup-env.sh script: - - spack ci start --spack-repo ${SPACK_REPO} --spack-ref ${SPACK_REF} <...args> + - spack ci generate + --spack-repo ${SPACK_REPO} --spack-ref ${SPACK_REF} + --output-file "${CI_PROJECT_DIR}/jobs_scratch_dir/pipeline.yml" after_script: - rm -rf ./spack + artifacts: + paths: + - "${CI_PROJECT_DIR}/jobs_scratch_dir/pipeline.yml" -If the ``spack ci start`` command receives those extra command line arguments, + +If the ``spack ci generate`` command receives those extra command line arguments, then it adds similar ``before_script`` and ``after_script`` sections for each of the ``spack ci rebuild`` jobs it generates (cloning and sourcing a custom spack in the ``before_script`` and removing it again in the ``after_script``). @@ -451,11 +441,3 @@ SPACK_SIGNING_KEY ^^^^^^^^^^^^^^^^^ Needed to sign/verify binary packages from the remote binary mirror. - -^^^^^^^^^^^^^^^^^^ -DOWNSTREAM_CI_REPO -^^^^^^^^^^^^^^^^^^ - -Needed until Gitlab CI supports dynamic job generation. Can contain connection -credentials embedded in the url, and could be the same repository or a different -one. diff --git a/lib/spack/docs/workflows.rst b/lib/spack/docs/workflows.rst index 239ee55a6ac..17ca6950823 100644 --- a/lib/spack/docs/workflows.rst +++ b/lib/spack/docs/workflows.rst @@ -1358,6 +1358,14 @@ The main points that are implemented below: the spack builds in the config. (The Travis yaml parser is a bit buggy on the echo command.) +#. Without control for the user, Travis jobs will run on various + ``x86_64`` microarchitectures. If you plan to cache build results, + e.g. to accelerate dependency builds, consider building for the + generic ``x86_64`` target only. + Limiting the microarchitecture will also find more packages when + working with the + `E4S Spack build cache `_. + #. Builds over 10 minutes need to be prefixed with ``travis_wait``. Alternatively, generate output once with ``spack install -v``. @@ -1400,7 +1408,9 @@ The main points that are implemented below: - if ! which spack >/dev/null; then mkdir -p $SPACK_ROOT && git clone --depth 50 https://github.com/spack/spack.git $SPACK_ROOT && - echo -e "config:""\n build_jobs:"" 2" > $SPACK_ROOT/etc/spack/config.yaml; + echo -e "config:""\n build_jobs:"" 2" > $SPACK_ROOT/etc/spack/config.yaml ** + echo -e "packages:""\n all:""\n target:"" ['x86_64']" + > $SPACK_ROOT/etc/spack/packages.yaml; fi - travis_wait spack install cmake@3.7.2~openssl~ncurses - travis_wait spack install boost@1.62.0~graph~iostream~locale~log~wave @@ -1432,8 +1442,7 @@ The following functionality is prepared: #. Base image: the example starts from a minimal ubuntu. -#. Pre-install the spack dependencies, including modules from the packages. - This avoids needing to build those from scratch via ``spack bootstrap``. +#. Pre-install the spack dependencies. Package installs are followed by a clean-up of the system package index, to avoid outdated information and it saves space. diff --git a/lib/spack/env/cc b/lib/spack/env/cc index f2b8bf577f5..b5913c5f10b 100755 --- a/lib/spack/env/cc +++ b/lib/spack/env/cc @@ -15,9 +15,9 @@ # 1. It allows Spack to swap compilers into and out of builds easily. # 2. It adds several options to the compile line so that spack # packages can find their dependencies at build time and run time: -# -I arguments for dependency /include directories. -# -L arguments for dependency /lib directories. -# -Wl,-rpath arguments for dependency /lib directories. +# -I and/or -isystem arguments for dependency /include directories. +# -L arguments for dependency /lib directories. +# -Wl,-rpath arguments for dependency /lib directories. # # This is an array of environment variables that need to be set before @@ -251,10 +251,11 @@ input_command="$*" # # Parse the command line arguments. # -# We extract -L, -I, and -Wl,-rpath arguments from the command line and -# recombine them with Spack arguments later. We parse these out so that -# we can make sure that system paths come last, that package arguments -# come first, and that Spack arguments are injected properly. +# We extract -L, -I, -isystem and -Wl,-rpath arguments from the +# command line and recombine them with Spack arguments later. We +# parse these out so that we can make sure that system paths come +# last, that package arguments come first, and that Spack arguments +# are injected properly. # # All other arguments, including -l arguments, are treated as # 'other_args' and left in their original order. This ensures that @@ -273,12 +274,24 @@ system_libdirs=() system_rpaths=() libs=() other_args=() +isystem_system_includes=() +isystem_includes=() while [ -n "$1" ]; do # an RPATH to be added after the case statement. rp="" case "$1" in + -isystem*) + arg="${1#-isystem}" + isystem_was_used=true + if [ -z "$arg" ]; then shift; arg="$1"; fi + if system_dir "$arg"; then + isystem_system_includes+=("$arg") + else + isystem_includes+=("$arg") + fi + ;; -I*) arg="${1#-I}" if [ -z "$arg" ]; then shift; arg="$1"; fi @@ -425,12 +438,6 @@ then esac fi -# Prepend include directories -IFS=':' read -ra include_dirs <<< "$SPACK_INCLUDE_DIRS" -if [[ $mode == cpp || $mode == cc || $mode == as || $mode == ccld ]]; then - includes=("${includes[@]}" "${include_dirs[@]}") -fi - IFS=':' read -ra rpath_dirs <<< "$SPACK_RPATH_DIRS" if [[ $mode == ccld || $mode == ld ]]; then @@ -481,9 +488,22 @@ args=() # flags assembled earlier args+=("${flags[@]}") -# include directory search paths +# Insert include directories just prior to any system include directories + for dir in "${includes[@]}"; do args+=("-I$dir"); done +for dir in "${isystem_includes[@]}"; do args+=("-isystem$dir"); done + +IFS=':' read -ra spack_include_dirs <<< "$SPACK_INCLUDE_DIRS" +if [[ $mode == cpp || $mode == cc || $mode == as || $mode == ccld ]]; then + if [[ "$isystem_was_used" == "true" ]] ; then + for dir in "${spack_include_dirs[@]}"; do args+=("-isystem$dir"); done + else + for dir in "${spack_include_dirs[@]}"; do args+=("-I$dir"); done + fi +fi + for dir in "${system_includes[@]}"; do args+=("-I$dir"); done +for dir in "${isystem_system_includes[@]}"; do args+=("-isystem$dir"); done # Library search paths for dir in "${libdirs[@]}"; do args+=("-L$dir"); done diff --git a/lib/spack/llnl/util/argparsewriter.py b/lib/spack/llnl/util/argparsewriter.py index f43595145e4..8ecf6acc88e 100644 --- a/lib/spack/llnl/util/argparsewriter.py +++ b/lib/spack/llnl/util/argparsewriter.py @@ -45,18 +45,18 @@ def __init__(self, prog, description, usage, class ArgparseWriter(argparse.HelpFormatter): """Analyzes an argparse ArgumentParser for easy generation of help.""" - def __init__(self, prog, out=sys.stdout, aliases=False): + def __init__(self, prog, out=None, aliases=False): """Initializes a new ArgparseWriter instance. Parameters: prog (str): the program name - out (file object): the file to write to + out (file object): the file to write to (default sys.stdout) aliases (bool): whether or not to include subparsers for aliases """ super(ArgparseWriter, self).__init__(prog) self.level = 0 self.prog = prog - self.out = out + self.out = sys.stdout if out is None else out self.aliases = aliases def parse(self, parser, prog): @@ -167,7 +167,7 @@ def write(self, parser): class ArgparseRstWriter(ArgparseWriter): """Write argparse output as rst sections.""" - def __init__(self, prog, out=sys.stdout, aliases=False, + def __init__(self, prog, out=None, aliases=False, rst_levels=_rst_levels): """Create a new ArgparseRstWriter. @@ -178,6 +178,7 @@ def __init__(self, prog, out=sys.stdout, aliases=False, rst_levels (list of str): list of characters for rst section headings """ + out = sys.stdout if out is None else out super(ArgparseRstWriter, self).__init__(prog, out, aliases) self.rst_levels = rst_levels diff --git a/lib/spack/llnl/util/tty/color.py b/lib/spack/llnl/util/tty/color.py index 79f62c00403..b6f5ec782f9 100644 --- a/lib/spack/llnl/util/tty/color.py +++ b/lib/spack/llnl/util/tty/color.py @@ -215,20 +215,22 @@ def cextra(string): return len(''.join(re.findall(r'\033[^m]*m', string))) -def cwrite(string, stream=sys.stdout, color=None): +def cwrite(string, stream=None, color=None): """Replace all color expressions in string with ANSI control codes and write the result to the stream. If color is False, this will write plain text with no color. If True, then it will always write colored output. If not supplied, then it will be set based on stream.isatty(). """ + stream = sys.stdout if stream is None else stream if color is None: color = get_color_when() stream.write(colorize(string, color=color)) -def cprint(string, stream=sys.stdout, color=None): +def cprint(string, stream=None, color=None): """Same as cwrite, but writes a trailing newline to the stream.""" + stream = sys.stdout if stream is None else stream cwrite(string + "\n", stream, color) diff --git a/lib/spack/spack/architecture.py b/lib/spack/spack/architecture.py index 38ed5baa7be..963fecd3751 100644 --- a/lib/spack/spack/architecture.py +++ b/lib/spack/spack/architecture.py @@ -209,14 +209,15 @@ def optimization_flags(self, compiler): compiler_version = compiler.version version_number, suffix = cpu.version_components(compiler.version) if not version_number or suffix not in ('', 'apple'): - # Try to deduce the correct version. Depending on where this - # function is called we might get either a CompilerSpec or a - # fully fledged compiler object + # Try to deduce the underlying version of the compiler, regardless + # of its name in compilers.yaml. Depending on where this function + # is called we might get either a CompilerSpec or a fully fledged + # compiler object. import spack.spec if isinstance(compiler, spack.spec.CompilerSpec): compiler = spack.compilers.compilers_for_spec(compiler).pop() try: - compiler_version = compiler.cc_version(compiler.cc) + compiler_version = compiler.get_real_version() except spack.util.executable.ProcessError as e: # log this and just return compiler.version instead tty.debug(str(e)) diff --git a/lib/spack/spack/binary_distribution.py b/lib/spack/spack/binary_distribution.py index 0fb6feae028..926f4679a10 100644 --- a/lib/spack/spack/binary_distribution.py +++ b/lib/spack/spack/binary_distribution.py @@ -514,7 +514,7 @@ def make_package_relative(workdir, spec, allow_root): platform.system().lower() == 'linux'): relocate.make_elf_binaries_relative(cur_path_names, orig_path_names, old_layout_root) - relocate.check_files_relocatable(cur_path_names, allow_root) + relocate.raise_if_not_relocatable(cur_path_names, allow_root) orig_path_names = list() cur_path_names = list() for linkname in buildinfo.get('relocate_links', []): @@ -532,7 +532,7 @@ def check_package_relocatable(workdir, spec, allow_root): cur_path_names = list() for filename in buildinfo['relocate_binaries']: cur_path_names.append(os.path.join(workdir, filename)) - relocate.check_files_relocatable(cur_path_names, allow_root) + relocate.raise_if_not_relocatable(cur_path_names, allow_root) def relocate_package(spec, allow_root): @@ -615,17 +615,13 @@ def is_backup_file(file): prefix_to_prefix, rel, old_prefix, new_prefix) - # Relocate links to the new install prefix - link_names = [linkname - for linkname in buildinfo.get('relocate_links', [])] - relocate.relocate_links(link_names, - old_layout_root, - new_layout_root, - old_prefix, - new_prefix, - prefix_to_prefix) + # Relocate links to the new install prefix + links = [link for link in buildinfo.get('relocate_links', [])] + relocate.relocate_links( + links, old_layout_root, old_prefix, new_prefix + ) - # For all buildcaches + # For all buildcaches # relocate the install prefixes in text files including dependencies relocate.relocate_text(text_names, old_layout_root, new_layout_root, @@ -636,7 +632,6 @@ def is_backup_file(file): # relocate the install prefixes in binary files including dependencies relocate.relocate_text_bin(files_to_relocate, - old_layout_root, new_layout_root, old_prefix, new_prefix, old_spack_prefix, new_spack_prefix, diff --git a/lib/spack/spack/build_environment.py b/lib/spack/spack/build_environment.py index 21ab6895ec1..f997110b4c9 100644 --- a/lib/spack/spack/build_environment.py +++ b/lib/spack/spack/build_environment.py @@ -32,6 +32,7 @@ Skimming this module is a nice way to get acquainted with the types of calls you can make from within the install() function. """ +import re import inspect import multiprocessing import os @@ -53,6 +54,7 @@ import spack.paths import spack.schema.environment import spack.store +import spack.architecture as arch from spack.util.string import plural from spack.util.environment import ( env_flag, filter_system_paths, get_path, is_system_path, @@ -60,7 +62,7 @@ from spack.util.environment import system_dirs from spack.error import NoLibrariesError, NoHeadersError from spack.util.executable import Executable -from spack.util.module_cmd import load_module, get_path_from_module +from spack.util.module_cmd import load_module, get_path_from_module, module from spack.util.log_parse import parse_log_events, make_log_context @@ -147,6 +149,19 @@ def clean_environment(): env.unset('DYLD_LIBRARY_PATH') env.unset('DYLD_FALLBACK_LIBRARY_PATH') + # On Cray "cluster" systems, unset CRAY_LD_LIBRARY_PATH to avoid + # interference with Spack dependencies. + # CNL requires these variables to be set (or at least some of them, + # depending on the CNL version). + hostarch = arch.Arch(arch.platform(), 'default_os', 'default_target') + on_cray = str(hostarch.platform) == 'cray' + using_cnl = re.match(r'cnl\d+', str(hostarch.os)) + if on_cray and not using_cnl: + env.unset('CRAY_LD_LIBRARY_PATH') + for varname in os.environ.keys(): + if 'PKGCONF' in varname: + env.unset(varname) + build_lang = spack.config.get('config:build_language') if build_lang: # Override language-related variables. This can be used to force @@ -349,10 +364,6 @@ def set_build_environment_variables(pkg, env, dirty): extra_rpaths = ':'.join(compiler.extra_rpaths) env.set('SPACK_COMPILER_EXTRA_RPATHS', extra_rpaths) - implicit_rpaths = compiler.implicit_rpaths() - if implicit_rpaths: - env.set('SPACK_COMPILER_IMPLICIT_RPATHS', ':'.join(implicit_rpaths)) - # Add bin directories from dependencies to the PATH for the build. for prefix in build_prefixes: for dirname in ['bin', 'bin64']: @@ -415,7 +426,7 @@ def _set_variables_for_single_module(pkg, module): if getattr(module, marker, False): return - jobs = spack.config.get('config:build_jobs') if pkg.parallel else 1 + jobs = spack.config.get('config:build_jobs', 16) if pkg.parallel else 1 jobs = min(jobs, multiprocessing.cpu_count()) assert jobs is not None, "no default set for config:build_jobs" @@ -532,7 +543,7 @@ def _static_to_shared_library(arch, compiler, static_lib, shared_lib=None, # TODO: Compiler arguments should not be hardcoded but provided by # the different compiler classes. - if 'linux' in arch: + if 'linux' in arch or 'cray' in arch: soname = os.path.basename(shared_lib) if compat_version: @@ -609,7 +620,7 @@ def get_rpaths(pkg): # module show output. if pkg.compiler.modules and len(pkg.compiler.modules) > 1: rpaths.append(get_path_from_module(pkg.compiler.modules[1])) - return rpaths + return list(dedupe(filter_system_paths(rpaths))) def get_std_cmake_args(pkg): @@ -717,11 +728,21 @@ def setup_package(pkg, dirty): load_module("cce") load_module(mod) + # kludge to handle cray libsci being automatically loaded by PrgEnv + # modules on cray platform. Module unload does no damage when + # unnecessary + module('unload', 'cray-libsci') + if pkg.architecture.target.module_name: load_module(pkg.architecture.target.module_name) load_external_modules(pkg) + implicit_rpaths = pkg.compiler.implicit_rpaths() + if implicit_rpaths: + build_env.set('SPACK_COMPILER_IMPLICIT_RPATHS', + ':'.join(implicit_rpaths)) + # Make sure nothing's strange about the Spack environment. validate(build_env, tty.warn) build_env.apply_modifications() @@ -801,12 +822,11 @@ def child_process(child_pipe, input_stream): setup_package(pkg, dirty=dirty) return_value = function() child_pipe.send(return_value) - except StopIteration as e: - # StopIteration is used to stop installations - # before the final stage, mainly for debug purposes - tty.msg(e) - child_pipe.send(None) + except StopPhase as e: + # Do not create a full ChildError from this, it's not an error + # it's a control statement. + child_pipe.send(e) except BaseException: # catch ANYTHING that goes wrong in the child process exc_type, exc, tb = sys.exc_info() @@ -858,15 +878,20 @@ def child_process(child_pipe, input_stream): child_result = parent_pipe.recv() p.join() + # If returns a StopPhase, raise it + if isinstance(child_result, StopPhase): + # do not print + raise child_result + # let the caller know which package went wrong. if isinstance(child_result, InstallError): child_result.pkg = pkg - # If the child process raised an error, print its output here rather - # than waiting until the call to SpackError.die() in main(). This - # allows exception handling output to be logged from within Spack. - # see spack.main.SpackCommand. if isinstance(child_result, ChildError): + # If the child process raised an error, print its output here rather + # than waiting until the call to SpackError.die() in main(). This + # allows exception handling output to be logged from within Spack. + # see spack.main.SpackCommand. child_result.print_context() raise child_result @@ -1055,3 +1080,13 @@ def __reduce__(self): def _make_child_error(msg, module, name, traceback, build_log, context): """Used by __reduce__ in ChildError to reconstruct pickled errors.""" return ChildError(msg, module, name, traceback, build_log, context) + + +class StopPhase(spack.error.SpackError): + """Pickle-able exception to control stopped builds.""" + def __reduce__(self): + return _make_stop_phase, (self.message, self.long_message) + + +def _make_stop_phase(msg, long_msg): + return StopPhase(msg, long_msg) diff --git a/lib/spack/spack/build_systems/autotools.py b/lib/spack/spack/build_systems/autotools.py index 6fc77bee6ac..72a2c0afbb0 100644 --- a/lib/spack/spack/build_systems/autotools.py +++ b/lib/spack/spack/build_systems/autotools.py @@ -11,6 +11,7 @@ import shutil import stat import sys +import re from subprocess import PIPE from subprocess import check_call @@ -56,8 +57,9 @@ class AutotoolsPackage(PackageBase): #: This attribute is used in UI queries that need to know the build #: system base class build_system_class = 'AutotoolsPackage' - #: Whether or not to update ``config.guess`` on old architectures - patch_config_guess = True + #: Whether or not to update ``config.guess`` and ``config.sub`` on old + #: architectures + patch_config_files = True #: Whether or not to update ``libtool`` #: (currently only for Arm/Clang/Fujitsu compilers) patch_libtool = True @@ -86,79 +88,99 @@ def archive_files(self): return [os.path.join(self.build_directory, 'config.log')] @run_after('autoreconf') - def _do_patch_config_guess(self): - """Some packages ship with an older config.guess and need to have - this updated when installed on a newer architecture. In particular, - config.guess fails for PPC64LE for version prior to a 2013-06-10 - build date (automake 1.13.4) and for ARM (aarch64).""" + def _do_patch_config_files(self): + """Some packages ship with older config.guess/config.sub files and + need to have these updated when installed on a newer architecture. + In particular, config.guess fails for PPC64LE for version prior + to a 2013-06-10 build date (automake 1.13.4) and for ARM (aarch64).""" - if not self.patch_config_guess or ( + if not self.patch_config_files or ( not self.spec.satisfies('target=ppc64le:') and not self.spec.satisfies('target=aarch64:') ): return - my_config_guess = None - config_guess = None - if os.path.exists('config.guess'): - # First search the top-level source directory - my_config_guess = 'config.guess' + + # TODO: Expand this to select the 'config.sub'-compatible architecture + # for each platform (e.g. 'config.sub' doesn't accept 'power9le', but + # does accept 'ppc64le'). + if self.spec.satisfies('target=ppc64le:'): + config_arch = 'ppc64le' + elif self.spec.satisfies('target=aarch64:'): + config_arch = 'aarch64' else: - # Then search in all sub directories. - # We would like to use AC_CONFIG_AUX_DIR, but not all packages - # ship with their configure.in or configure.ac. - d = '.' - dirs = [os.path.join(d, o) for o in os.listdir(d) - if os.path.isdir(os.path.join(d, o))] - for dirname in dirs: - path = os.path.join(dirname, 'config.guess') + config_arch = 'local' + + my_config_files = {'guess': None, 'sub': None} + config_files = {'guess': None, 'sub': None} + config_args = {'guess': [], 'sub': [config_arch]} + + for config_name in config_files.keys(): + config_file = 'config.{0}'.format(config_name) + if os.path.exists(config_file): + # First search the top-level source directory + my_config_files[config_name] = config_file + else: + # Then search in all sub directories recursively. + # We would like to use AC_CONFIG_AUX_DIR, but not all packages + # ship with their configure.in or configure.ac. + config_path = next((os.path.join(r, f) + for r, ds, fs in os.walk('.') for f in fs + if f == config_file), None) + my_config_files[config_name] = config_path + + if my_config_files[config_name] is not None: + try: + config_path = my_config_files[config_name] + check_call([config_path] + config_args[config_name], + stdout=PIPE, stderr=PIPE) + # The package's config file already runs OK, so just use it + continue + except Exception as e: + tty.debug(e) + else: + continue + + # Look for a spack-installed automake package + if 'automake' in self.spec: + automake_dir = 'automake-' + str(self.spec['automake'].version) + automake_path = os.path.join(self.spec['automake'].prefix, + 'share', automake_dir) + path = os.path.join(automake_path, config_file) if os.path.exists(path): - my_config_guess = path + config_files[config_name] = path + # Look for the system's config.guess + if (config_files[config_name] is None and + os.path.exists('/usr/share')): + automake_dir = [s for s in os.listdir('/usr/share') if + "automake" in s] + if automake_dir: + automake_path = os.path.join('/usr/share', automake_dir[0]) + path = os.path.join(automake_path, config_file) + if os.path.exists(path): + config_files[config_name] = path + if config_files[config_name] is not None: + try: + config_path = config_files[config_name] + my_config_path = my_config_files[config_name] - if my_config_guess is not None: - try: - check_call([my_config_guess], stdout=PIPE, stderr=PIPE) - # The package's config.guess already runs OK, so just use it - return - except Exception as e: - tty.debug(e) - else: - return + check_call([config_path] + config_args[config_name], + stdout=PIPE, stderr=PIPE) - # Look for a spack-installed automake package - if 'automake' in self.spec: - automake_path = os.path.join(self.spec['automake'].prefix, 'share', - 'automake-' + - str(self.spec['automake'].version)) - path = os.path.join(automake_path, 'config.guess') - if os.path.exists(path): - config_guess = path - # Look for the system's config.guess - if config_guess is None and os.path.exists('/usr/share'): - automake_dir = [s for s in os.listdir('/usr/share') if - "automake" in s] - if automake_dir: - automake_path = os.path.join('/usr/share', automake_dir[0]) - path = os.path.join(automake_path, 'config.guess') - if os.path.exists(path): - config_guess = path - if config_guess is not None: - try: - check_call([config_guess], stdout=PIPE, stderr=PIPE) - mod = os.stat(my_config_guess).st_mode & 0o777 | stat.S_IWUSR - os.chmod(my_config_guess, mod) - shutil.copyfile(config_guess, my_config_guess) - return - except Exception as e: - tty.debug(e) + m = os.stat(my_config_path).st_mode & 0o777 | stat.S_IWUSR + os.chmod(my_config_path, m) + shutil.copyfile(config_path, my_config_path) + continue + except Exception as e: + tty.debug(e) - raise RuntimeError('Failed to find suitable config.guess') + raise RuntimeError('Failed to find suitable ' + config_file) @run_before('configure') - def _set_autotools_environment_varoables(self): + def _set_autotools_environment_variables(self): """Many autotools builds use a version of mknod.m4 that fails when running as root unless FORCE_UNSAFE_CONFIGURE is set to 1. - We set this to 1 and expect the user to take responsibiltiy if + We set this to 1 and expect the user to take responsibility if they are running as root. They have to anyway, as this variable doesn't actually prevent configure from doing bad things as root. Without it, configure just fails halfway through, but it can @@ -185,6 +207,8 @@ def _do_patch_libtool(self): if line == 'pic_flag=""\n': line = 'pic_flag="{0}"\n'\ .format(self.compiler.cc_pic_flag) + if self.spec.satisfies('%fj') and 'fjhpctag.o' in line: + line = re.sub(r'/\S*/fjhpctag.o', '', line) sys.stdout.write(line) @property diff --git a/lib/spack/spack/build_systems/cmake.py b/lib/spack/spack/build_systems/cmake.py index d7da957a9d6..dfa2c9e47aa 100644 --- a/lib/spack/spack/build_systems/cmake.py +++ b/lib/spack/spack/build_systems/cmake.py @@ -331,9 +331,9 @@ def cmake_args(self): def cmake(self, spec, prefix): """Runs ``cmake`` in the build directory""" - options = [os.path.abspath(self.root_cmakelists_dir)] - options += self.std_cmake_args + options = self.std_cmake_args options += self.cmake_args() + options.append(os.path.abspath(self.root_cmakelists_dir)) with working_dir(self.build_directory, create=True): inspect.getmodule(self).cmake(*options) diff --git a/lib/spack/spack/build_systems/intel.py b/lib/spack/spack/build_systems/intel.py index 4017f2e526c..a74d5e96137 100644 --- a/lib/spack/spack/build_systems/intel.py +++ b/lib/spack/spack/build_systems/intel.py @@ -1073,6 +1073,15 @@ def _setup_dependent_env_callback( # which performs dizzyingly similar but necessarily different # actions, and (b) function code leaves a bit more breathing # room within the suffocating corset of flake8 line length. + + # Intel MPI since 2019 depends on libfabric which is not in the + # lib directory but in a directory of its own which should be + # included in the rpath + if self.version >= ver('2019'): + d = ancestor(self.component_lib_dir('mpi')) + libfabrics_path = os.path.join(d, 'libfabric', 'lib') + env.append_path('SPACK_COMPILER_EXTRA_RPATHS', + libfabrics_path) else: raise InstallError('compilers_of_client arg required for MPI') diff --git a/lib/spack/spack/build_systems/waf.py b/lib/spack/spack/build_systems/waf.py index 6bf9a432e03..a1581660f22 100644 --- a/lib/spack/spack/build_systems/waf.py +++ b/lib/spack/spack/build_systems/waf.py @@ -75,13 +75,14 @@ def waf(self, *args, **kwargs): def configure(self, spec, prefix): """Configures the project.""" - args = self.configure_args() + args = ['--prefix={0}'.format(self.prefix)] + args += self.configure_args() self.waf('configure', *args) def configure_args(self): """Arguments to pass to configure.""" - return ['--prefix={0}'.format(self.prefix)] + return [] def build(self, spec, prefix): """Executes the build.""" diff --git a/lib/spack/spack/build_systems/xorg.py b/lib/spack/spack/build_systems/xorg.py index 815bb7c26b5..ae28b30929b 100644 --- a/lib/spack/spack/build_systems/xorg.py +++ b/lib/spack/spack/build_systems/xorg.py @@ -14,9 +14,14 @@ class XorgPackage(spack.package.PackageBase): xorg_mirror_path = None #: List of x.org mirrors used by Spack + # Note: x.org mirrors are a bit tricky, since many are out-of-sync or off. + # A good package to test with is `util-macros`, which had a "recent" + # release. base_mirrors = [ 'https://www.x.org/archive/individual/', 'https://mirrors.ircam.fr/pub/x.org/individual/', + 'https://mirror.transip.net/xorg/individual/', + 'ftp://ftp.freedesktop.org/pub/xorg/individual/', 'http://xorg.mirrors.pair.com/individual/' ] diff --git a/lib/spack/spack/ci.py b/lib/spack/spack/ci.py index cbdfccb8bfc..ce74abf29cc 100644 --- a/lib/spack/spack/ci.py +++ b/lib/spack/spack/ci.py @@ -24,7 +24,6 @@ import spack.compilers as compilers import spack.config as cfg import spack.environment as ev -from spack.dependency import all_deptypes from spack.error import SpackError import spack.hash_types as ht from spack.main import SpackCommand @@ -34,6 +33,10 @@ import spack.util.web as web_util +JOB_RETRY_CONDITIONS = [ + 'always', +] + spack_gpg = SpackCommand('gpg') spack_compiler = SpackCommand('compiler') @@ -360,7 +363,6 @@ def compute_spec_deps(spec_list): } """ - deptype = all_deptypes spec_labels = {} specs = [] @@ -380,7 +382,7 @@ def append_dep(s, d): rkey, rlabel = spec_deps_key_label(spec) - for s in spec.traverse(deptype=deptype): + for s in spec.traverse(deptype=all): if s.external: tty.msg('Will not stage external pkg: {0}'.format(s)) continue @@ -392,7 +394,7 @@ def append_dep(s, d): } append_dep(rlabel, slabel) - for d in s.dependencies(deptype=deptype): + for d in s.dependencies(deptype=all): dkey, dlabel = spec_deps_key_label(d) if d.external: tty.msg('Will not stage external dep: {0}'.format(d)) @@ -400,11 +402,11 @@ def append_dep(s, d): append_dep(slabel, dlabel) - for l, d in spec_labels.items(): + for spec_label, spec_holder in spec_labels.items(): specs.append({ - 'label': l, - 'spec': d['spec'], - 'root_spec': d['root'], + 'label': spec_label, + 'spec': spec_holder['spec'], + 'root_spec': spec_holder['root'], }) deps_json_obj = { @@ -431,8 +433,24 @@ def pkg_name_from_spec_label(spec_label): return spec_label[:spec_label.index('/')] +def format_job_needs(phase_name, strip_compilers, dep_jobs, + osname, build_group, enable_artifacts_buildcache): + needs_list = [] + for dep_job in dep_jobs: + needs_list.append({ + 'job': get_job_name(phase_name, + strip_compilers, + dep_job, + osname, + build_group), + 'artifacts': enable_artifacts_buildcache, + }) + return needs_list + + def generate_gitlab_ci_yaml(env, print_summary, output_file, - custom_spack_repo=None, custom_spack_ref=None): + custom_spack_repo=None, custom_spack_ref=None, + run_optimizer=False): # FIXME: What's the difference between one that opens with 'spack' # and one that opens with 'env'? This will only handle the former. with spack.concretize.disable_compiler_existence_check(): @@ -466,6 +484,10 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file, tty.verbose("Using CDash auth token from environment") cdash_auth_token = os.environ.get('SPACK_CDASH_AUTH_TOKEN') + is_pr_pipeline = ( + os.environ.get('SPACK_IS_PR_PIPELINE', '').lower() == 'true' + ) + # Make sure we use a custom spack if necessary before_script = None after_script = None @@ -473,10 +495,9 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file, if not custom_spack_ref: custom_spack_ref = 'master' before_script = [ - ('git clone "{0}" --branch "{1}" --depth 1 ' - '--single-branch'.format(custom_spack_repo, custom_spack_ref)), - # Next line just shows spack version in pipeline output - 'pushd ./spack && git rev-parse HEAD && popd', + ('git clone "{0}"'.format(custom_spack_repo)), + 'pushd ./spack && git checkout "{0}" && popd'.format( + custom_spack_ref), '. "./spack/share/spack/setup-env.sh"', ] after_script = [ @@ -538,6 +559,9 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file, stage_names = [] + max_length_needs = 0 + max_needs_job = '' + for phase in phases: phase_name = phase['name'] strip_compilers = phase['strip-compilers'] @@ -601,25 +625,35 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file, root_spec, main_phase, strip_compilers), 'SPACK_JOB_SPEC_PKG_NAME': release_spec.name, 'SPACK_COMPILER_ACTION': compiler_action, + 'SPACK_IS_PR_PIPELINE': str(is_pr_pipeline), } job_dependencies = [] if spec_label in dependencies: - for dep_label in dependencies[spec_label]: - dep_pkg = pkg_name_from_spec_label(dep_label) - dep_spec = spec_labels[dep_label]['rootSpec'][dep_pkg] - dep_job_name = get_job_name( - phase_name, strip_compilers, dep_spec, osname, - build_group) - job_dependencies.append(dep_job_name) + if enable_artifacts_buildcache: + dep_jobs = [ + d for d in release_spec.traverse(deptype=all, + root=False) + ] + else: + dep_jobs = [] + for dep_label in dependencies[spec_label]: + dep_pkg = pkg_name_from_spec_label(dep_label) + dep_root = spec_labels[dep_label]['rootSpec'] + dep_jobs.append(dep_root[dep_pkg]) + + job_dependencies.extend( + format_job_needs(phase_name, strip_compilers, dep_jobs, + osname, build_group, + enable_artifacts_buildcache)) # This next section helps gitlab make sure the right # bootstrapped compiler exists in the artifacts buildcache by # creating an artificial dependency between this spec and its # compiler. So, if we are in the main phase, and if the # compiler we are supposed to use is listed in any of the - # bootstrap spec lists, then we will add one more dependency to - # "job_dependencies" (that compiler). + # bootstrap spec lists, then we will add more dependencies to + # the job (that compiler and maybe it's dependencies as well). if is_main_phase(phase_name): compiler_pkg_spec = compilers.pkg_spec_for_compiler( release_spec.compiler) @@ -627,12 +661,25 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file, bs_arch = bs['spec'].architecture if (bs['spec'].satisfies(compiler_pkg_spec) and bs_arch == release_spec.architecture): - c_job_name = get_job_name(bs['phase-name'], - bs['strip-compilers'], - bs['spec'], - str(bs_arch), - build_group) - job_dependencies.append(c_job_name) + # We found the bootstrap compiler this release spec + # should be built with, so for DAG scheduling + # purposes, we will at least add the compiler spec + # to the jobs "needs". But if artifact buildcache + # is enabled, we'll have to add all transtive deps + # of the compiler as well. + dep_jobs = [bs['spec']] + if enable_artifacts_buildcache: + dep_jobs = [ + d for d in bs['spec'].traverse(deptype=all) + ] + + job_dependencies.extend( + format_job_needs(bs['phase-name'], + bs['strip-compilers'], + dep_jobs, + str(bs_arch), + build_group, + enable_artifacts_buildcache)) if enable_cdash_reporting: cdash_build_name = get_cdash_build_name( @@ -647,7 +694,7 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file, job_vars['SPACK_CDASH_BUILD_NAME'] = cdash_build_name job_vars['SPACK_RELATED_BUILDS_CDASH'] = ';'.join( - related_builds) + sorted(related_builds)) variables.update(job_vars) @@ -657,7 +704,12 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file, ] if enable_artifacts_buildcache: - artifact_paths.append('local_mirror/build_cache') + bc_root = 'local_mirror/build_cache' + artifact_paths.extend([os.path.join(bc_root, p) for p in [ + bindist.tarball_name(release_spec, '.spec.yaml'), + bindist.tarball_name(release_spec, '.cdashid'), + bindist.tarball_directory_name(release_spec), + ]]) job_object = { 'stage': stage_name, @@ -668,9 +720,18 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file, 'paths': artifact_paths, 'when': 'always', }, - 'dependencies': job_dependencies, + 'needs': sorted(job_dependencies, key=lambda d: d['job']), + 'retry': { + 'max': 2, + 'when': JOB_RETRY_CONDITIONS, + } } + length_needs = len(job_dependencies) + if length_needs > max_length_needs: + max_length_needs = length_needs + max_needs_job = job_name + if before_script: job_object['before_script'] = before_script @@ -691,6 +752,9 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file, tty.debug('{0} build jobs generated in {1} stages'.format( job_id, stage_id)) + tty.debug('The max_needs_job is {0}, with {1} needs'.format( + max_needs_job, max_length_needs)) + # Use "all_job_names" to populate the build group for this set if enable_cdash_reporting and cdash_auth_token: try: @@ -701,7 +765,7 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file, else: tty.warn('Unable to populate buildgroup without CDash credentials') - if final_job_config: + if final_job_config and not is_pr_pipeline: # Add an extra, final job to regenerate the index final_stage = 'stage-rebuild-index' final_job = { @@ -721,8 +785,17 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file, output_object['stages'] = stage_names + sorted_output = {} + for output_key, output_value in sorted(output_object.items()): + sorted_output[output_key] = output_value + + # TODO(opadron): remove this or refactor + if run_optimizer: + import spack.ci_optimization as ci_opt + sorted_output = ci_opt.optimizer(sorted_output) + with open(output_file, 'w') as outf: - outf.write(syaml.dump_config(output_object, default_flow_style=True)) + outf.write(syaml.dump_config(sorted_output, default_flow_style=True)) def url_encode_string(input_string): diff --git a/lib/spack/spack/ci_optimization.py b/lib/spack/spack/ci_optimization.py new file mode 100644 index 00000000000..693802d06d5 --- /dev/null +++ b/lib/spack/spack/ci_optimization.py @@ -0,0 +1,377 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +import collections + +try: + # dynamically import to keep vermin from complaining + collections_abc = __import__('collections.abc') +except ImportError: + collections_abc = collections + +import copy +import hashlib + +import spack.util.spack_yaml as syaml + + +def matches(obj, proto): + """Returns True if the test object "obj" matches the prototype object + "proto". + + If obj and proto are mappings, obj matches proto if (key in obj) and + (obj[key] matches proto[key]) for every key in proto. + + If obj and proto are sequences, obj matches proto if they are of the same + length and (a matches b) for every (a,b) in zip(obj, proto). + + Otherwise, obj matches proto if obj == proto. + + Precondition: proto must not have any reference cycles + """ + if isinstance(obj, collections_abc.Mapping): + if not isinstance(proto, collections_abc.Mapping): + return False + + return all( + (key in obj and matches(obj[key], val)) + for key, val in proto.items() + ) + + if (isinstance(obj, collections_abc.Sequence) and + not isinstance(obj, str)): + + if not (isinstance(proto, collections_abc.Sequence) and + not isinstance(proto, str)): + return False + + if len(obj) != len(proto): + return False + + return all( + matches(obj[index], val) + for index, val in enumerate(proto) + ) + + return obj == proto + + +def subkeys(obj, proto): + """Returns the test mapping "obj" after factoring out the items it has in + common with the prototype mapping "proto". + + Consider a recursive merge operation, merge(a, b) on mappings a and b, that + returns a mapping, m, whose keys are the union of the keys of a and b, and + for every such key, "k", its corresponding value is: + + - merge(a[key], b[key]) if a[key] and b[key] are mappings, or + - b[key] if (key in b) and not matches(a[key], b[key]), + or + - a[key] otherwise + + + If obj and proto are mappings, the returned object is the smallest object, + "a", such that merge(a, proto) matches obj. + + Otherwise, obj is returned. + """ + if not (isinstance(obj, collections_abc.Mapping) and + isinstance(proto, collections_abc.Mapping)): + return obj + + new_obj = {} + for key, value in obj.items(): + if key not in proto: + new_obj[key] = value + continue + + if (matches(value, proto[key]) and + matches(proto[key], value)): + continue + + if isinstance(value, collections_abc.Mapping): + new_obj[key] = subkeys(value, proto[key]) + continue + + new_obj[key] = value + + return new_obj + + +def add_extends(yaml, key): + """Modifies the given object "yaml" so that it includes an "extends" key + whose value features "key". + + If "extends" is not in yaml, then yaml is modified such that + yaml["extends"] == key. + + If yaml["extends"] is a str, then yaml is modified such that + yaml["extends"] == [yaml["extends"], key] + + If yaml["extends"] is a list that does not include key, then key is + appended to the list. + + Otherwise, yaml is left unchanged. + """ + + has_key = ('extends' in yaml) + extends = yaml.get('extends') + + if has_key and not isinstance(extends, (str, collections_abc.Sequence)): + return + + if extends is None: + yaml['extends'] = key + return + + if isinstance(extends, str): + if extends != key: + yaml['extends'] = [extends, key] + return + + if key not in extends: + extends.append(key) + + +def common_subobject(yaml, sub): + """Factor prototype object "sub" out of the values of mapping "yaml". + + Consider a modified copy of yaml, "new", where for each key, "key" in yaml: + + - If yaml[key] matches sub, then new[key] = subkeys(yaml[key], sub). + - Otherwise, new[key] = yaml[key]. + + If the above match criteria is not satisfied for any such key, then (yaml, + None) is returned. The yaml object is returned unchanged. + + Otherwise, each matching value in new is modified as in + add_extends(new[key], common_key), and then new[common_key] is set to sub. + The common_key value is chosen such that it does not match any preexisting + key in new. In this case, (new, common_key) is returned. + """ + match_list = set(k for k, v in yaml.items() if matches(v, sub)) + + if not match_list: + return yaml, None + + common_prefix = '.c' + common_index = 0 + + while True: + common_key = ''.join((common_prefix, str(common_index))) + if common_key not in yaml: + break + common_index += 1 + + new_yaml = {} + + for key, val in yaml.items(): + new_yaml[key] = copy.deepcopy(val) + + if not matches(val, sub): + continue + + new_yaml[key] = subkeys(new_yaml[key], sub) + add_extends(new_yaml[key], common_key) + + new_yaml[common_key] = sub + + return new_yaml, common_key + + +def print_delta(name, old, new, applied=None): + delta = new - old + reldelta = (1000 * delta) // old + reldelta = (reldelta // 10, reldelta % 10) + + if applied is None: + applied = (new <= old) + + print('\n'.join(( + '{} {}:', + ' before: {: 10d}', + ' after : {: 10d}', + ' delta : {:+10d} ({:=+3d}.{}%)', + )).format( + name, + ('+' if applied else 'x'), + old, + new, + delta, + reldelta[0], + reldelta[1] + )) + + +def try_optimization_pass(name, yaml, optimization_pass, *args, **kwargs): + """Try applying an optimization pass and return information about the + result + + "name" is a string describing the nature of the pass. If it is a non-empty + string, summary statistics are also printed to stdout. + + "yaml" is the object to apply the pass to. + + "optimization_pass" is the function implementing the pass to be applied. + + "args" and "kwargs" are the additional arguments to pass to optimization + pass. The pass is applied as + + >>> (new_yaml, *other_results) = optimization_pass(yaml, *args, **kwargs) + + The pass's results are greedily rejected if it does not modify the original + yaml document, or if it produces a yaml document that serializes to a + larger string. + + Returns (new_yaml, yaml, applied, other_results) if applied, or + (yaml, new_yaml, applied, other_results) otherwise. + """ + result = optimization_pass(yaml, *args, **kwargs) + new_yaml, other_results = result[0], result[1:] + + if new_yaml is yaml: + # pass was not applied + return (yaml, new_yaml, False, other_results) + + pre_size = len(syaml.dump_config(yaml, default_flow_style=True)) + post_size = len(syaml.dump_config(new_yaml, default_flow_style=True)) + + # pass makes the size worse: not applying + applied = (post_size <= pre_size) + if applied: + yaml, new_yaml = new_yaml, yaml + + if name: + print_delta(name, pre_size, post_size, applied) + + return (yaml, new_yaml, applied, other_results) + + +def build_histogram(iterator, key): + """Builds a histogram of values given an iterable of mappings and a key. + + For each mapping "m" with key "key" in iterator, the value m[key] is + considered. + + Returns a list of tuples (hash, count, proportion, value), where + + - "hash" is a sha1sum hash of the value. + - "count" is the number of occurences of values that hash to "hash". + - "proportion" is the proportion of all values considered above that + hash to "hash". + - "value" is one of the values considered above that hash to "hash". + Which value is chosen when multiple values hash to the same "hash" is + undefined. + + The list is sorted in descending order by count, yielding the most + frequently occuring hashes first. + """ + buckets = collections.defaultdict(int) + values = {} + + num_objects = 0 + for obj in iterator: + num_objects += 1 + + try: + val = obj[key] + except (KeyError, TypeError): + continue + + value_hash = hashlib.sha1() + value_hash.update(syaml.dump_config(val).encode()) + value_hash = value_hash.hexdigest() + + buckets[value_hash] += 1 + values[value_hash] = val + + return [(h, buckets[h], float(buckets[h]) / num_objects, values[h]) + for h in sorted(buckets.keys(), key=lambda k: -buckets[k])] + + +def optimizer(yaml): + original_size = len(syaml.dump_config(yaml, default_flow_style=True)) + + # try factoring out commonly repeated portions + common_job = { + 'variables': { + 'SPACK_COMPILER_ACTION': 'NONE', + 'SPACK_RELATED_BUILDS_CDASH': '' + }, + + 'after_script': ['rm -rf "./spack"'], + + 'artifacts': { + 'paths': ['jobs_scratch_dir', 'cdash_report'], + 'when': 'always' + }, + } + + # look for a list of tags that appear frequently + _, count, proportion, tags = next(iter( + build_histogram(yaml.values(), 'tags')), + (None,) * 4) + + # If a list of tags is found, and there are more than one job that uses it, + # *and* the jobs that do use it represent at least 70% of all jobs, then + # add the list to the prototype object. + if tags and count > 1 and proportion >= 0.70: + common_job['tags'] = tags + + # apply common object factorization + yaml, other, applied, rest = try_optimization_pass( + 'general common object factorization', + yaml, common_subobject, common_job) + + # look for a common script, and try factoring that out + _, count, proportion, script = next(iter( + build_histogram(yaml.values(), 'script')), + (None,) * 4) + + if script and count > 1 and proportion >= 0.70: + yaml, other, applied, rest = try_optimization_pass( + 'script factorization', + yaml, common_subobject, {'script': script}) + + # look for a common before_script, and try factoring that out + _, count, proportion, script = next(iter( + build_histogram(yaml.values(), 'before_script')), + (None,) * 4) + + if script and count > 1 and proportion >= 0.70: + yaml, other, applied, rest = try_optimization_pass( + 'before_script factorization', + yaml, common_subobject, {'before_script': script}) + + # Look specifically for the SPACK_ROOT_SPEC environment variables. + # Try to factor them out. + h = build_histogram(( + getattr(val, 'get', lambda *args: {})('variables') + for val in yaml.values()), 'SPACK_ROOT_SPEC') + + # In this case, we try to factor out *all* instances of the SPACK_ROOT_SPEC + # environment variable; not just the one that appears with the greatest + # frequency. We only require that more than 1 job uses a given instance's + # value, because we expect the value to be very large, and so expect even + # few-to-one factorizations to yield large space savings. + counter = 0 + for _, count, proportion, spec in h: + if count <= 1: + continue + + counter += 1 + + yaml, other, applied, rest = try_optimization_pass( + 'SPACK_ROOT_SPEC factorization ({count})'.format(count=counter), + yaml, + common_subobject, + {'variables': {'SPACK_ROOT_SPEC': spec}}) + + new_size = len(syaml.dump_config(yaml, default_flow_style=True)) + + print('\n') + print_delta('overall summary', original_size, new_size) + print('\n') + return yaml diff --git a/lib/spack/spack/cmd/__init__.py b/lib/spack/spack/cmd/__init__.py index 83e12004a19..5172bdee078 100644 --- a/lib/spack/spack/cmd/__init__.py +++ b/lib/spack/spack/cmd/__init__.py @@ -9,6 +9,7 @@ import re import sys import argparse +import ruamel.yaml as yaml import six @@ -16,7 +17,7 @@ from llnl.util.lang import attr_setdefault, index_by from llnl.util.tty.colify import colify from llnl.util.tty.color import colorize -from llnl.util.filesystem import working_dir +from llnl.util.filesystem import join_path import spack.config import spack.error @@ -26,6 +27,7 @@ import spack.store import spack.util.spack_json as sjson import spack.util.string +from ruamel.yaml.error import MarkedYAMLError # cmd has a submodule called "list" so preserve the python list module @@ -43,11 +45,28 @@ def python_name(cmd_name): return cmd_name.replace("-", "_") +def require_python_name(pname): + """Require that the provided name is a valid python name (per + python_name()). Useful for checking parameters for function + prerequisites.""" + if python_name(pname) != pname: + raise PythonNameError(pname) + + def cmd_name(python_name): """Convert module name (with ``_``) to command name (with ``-``).""" return python_name.replace('_', '-') +def require_cmd_name(cname): + """Require that the provided name is a valid command name (per + cmd_name()). Useful for checking parameters for function + prerequisites. + """ + if cmd_name(cname) != cname: + raise CommandNameError(cname) + + #: global, cached list of all commands -- access through all_commands() _all_commands = None @@ -91,6 +110,7 @@ def get_module(cmd_name): cmd_name (str): name of the command for which to get a module (contains ``-``, not ``_``). """ + require_cmd_name(cmd_name) pname = python_name(cmd_name) try: @@ -102,8 +122,6 @@ def get_module(cmd_name): tty.debug('Imported {0} from built-in commands'.format(pname)) except ImportError: module = spack.extensions.get_module(cmd_name) - if not module: - raise attr_setdefault(module, SETUP_PARSER, lambda *args: None) # null-op attr_setdefault(module, DESCRIPTION, "") @@ -116,14 +134,16 @@ def get_module(cmd_name): def get_command(cmd_name): - """Imports the command's function from a module and returns it. + """Imports the command function associated with cmd_name. + + The function's name is derived from cmd_name using python_name(). Args: - cmd_name (str): name of the command for which to get a module - (contains ``-``, not ``_``). + cmd_name (str): name of the command (contains ``-``, not ``_``). """ + require_cmd_name(cmd_name) pname = python_name(cmd_name) - return getattr(get_module(pname), pname) + return getattr(get_module(cmd_name), pname) def parse_specs(args, **kwargs): @@ -415,8 +435,39 @@ def format_list(specs): def spack_is_git_repo(): """Ensure that this instance of Spack is a git clone.""" - with working_dir(spack.paths.prefix): - return os.path.isdir('.git') + return is_git_repo(spack.paths.prefix) + + +def is_git_repo(path): + dotgit_path = join_path(path, '.git') + if os.path.isdir(dotgit_path): + # we are in a regular git repo + return True + if os.path.isfile(dotgit_path): + # we might be in a git worktree + try: + with open(dotgit_path, "rb") as f: + dotgit_content = yaml.load(f) + return os.path.isdir(dotgit_content.get("gitdir", dotgit_path)) + except MarkedYAMLError: + pass + return False + + +class PythonNameError(spack.error.SpackError): + """Exception class thrown for impermissible python names""" + def __init__(self, name): + self.name = name + super(PythonNameError, self).__init__( + '{0} is not a permissible Python name.'.format(name)) + + +class CommandNameError(spack.error.SpackError): + """Exception class thrown for impermissible command names""" + def __init__(self, name): + self.name = name + super(CommandNameError, self).__init__( + '{0} is not a permissible Spack command name.'.format(name)) ######################################## diff --git a/lib/spack/spack/cmd/bootstrap.py b/lib/spack/spack/cmd/bootstrap.py deleted file mode 100644 index 53c284ab6d8..00000000000 --- a/lib/spack/spack/cmd/bootstrap.py +++ /dev/null @@ -1,80 +0,0 @@ -# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other -# Spack Project Developers. See the top-level COPYRIGHT file for details. -# -# SPDX-License-Identifier: (Apache-2.0 OR MIT) - -import llnl.util.cpu -import llnl.util.tty as tty - -import spack.repo -import spack.spec -import spack.cmd.common.arguments as arguments - -description = "Bootstrap packages needed for spack to run smoothly" -section = "admin" -level = "long" - - -def setup_parser(subparser): - arguments.add_common_arguments(subparser, ['jobs']) - subparser.add_argument( - '--keep-prefix', action='store_true', dest='keep_prefix', - help="don't remove the install prefix if installation fails") - subparser.add_argument( - '--keep-stage', action='store_true', dest='keep_stage', - help="don't remove the build stage if installation succeeds") - arguments.add_common_arguments(subparser, ['no_checksum']) - subparser.add_argument( - '-v', '--verbose', action='store_true', dest='verbose', - help="display verbose build output while installing") - - cache_group = subparser.add_mutually_exclusive_group() - cache_group.add_argument( - '--use-cache', action='store_true', dest='use_cache', default=True, - help="check for pre-built Spack packages in mirrors (default)") - cache_group.add_argument( - '--no-cache', action='store_false', dest='use_cache', default=True, - help="do not check for pre-built Spack packages in mirrors") - cache_group.add_argument( - '--cache-only', action='store_true', dest='cache_only', default=False, - help="only install package from binary mirrors") - - cd_group = subparser.add_mutually_exclusive_group() - arguments.add_common_arguments(cd_group, ['clean', 'dirty']) - - -def bootstrap(parser, args, **kwargs): - kwargs.update({ - 'keep_prefix': args.keep_prefix, - 'keep_stage': args.keep_stage, - 'install_deps': 'dependencies', - 'verbose': args.verbose, - 'dirty': args.dirty, - 'use_cache': args.use_cache, - 'cache_only': args.cache_only - }) - - # Define requirement dictionary defining general specs which need - # to be satisfied, and the specs to install when the general spec - # isn't satisfied. - requirement_dict = { - # Install environment-modules with generic optimizations - 'environment-modules': 'environment-modules~X target={0}'.format( - llnl.util.cpu.host().family - ) - } - - for requirement in requirement_dict: - installed_specs = spack.store.db.query(requirement) - if(len(installed_specs) > 0): - tty.msg("Requirement %s is satisfied with installed " - "package %s" % (requirement, installed_specs[0])) - else: - # Install requirement - spec_to_install = spack.spec.Spec(requirement_dict[requirement]) - spec_to_install.concretize() - tty.msg("Installing %s to satisfy requirement for %s" % - (spec_to_install, requirement)) - kwargs['explicit'] = True - package = spack.repo.get(spec_to_install) - package.do_install(**kwargs) diff --git a/lib/spack/spack/cmd/checksum.py b/lib/spack/spack/cmd/checksum.py index eaeaf5337f9..97e7833af0d 100644 --- a/lib/spack/spack/cmd/checksum.py +++ b/lib/spack/spack/cmd/checksum.py @@ -26,6 +26,9 @@ def setup_parser(subparser): subparser.add_argument( '--keep-stage', action='store_true', help="don't clean up staging area when command completes") + subparser.add_argument( + '-b', '--batch', action='store_true', + help="don't ask which versions to checksum") arguments.add_common_arguments(subparser, ['package']) subparser.add_argument( 'versions', nargs=argparse.REMAINDER, @@ -33,6 +36,11 @@ def setup_parser(subparser): def checksum(parser, args): + # Did the user pass 'package@version' string? + if len(args.versions) == 0 and '@' in args.package: + args.versions = [args.package.split('@')[1]] + args.package = args.package.split('@')[0] + # Make sure the user provided a package and not a URL if not valid_fully_qualified_module_name(args.package): tty.die("`spack checksum` accepts package names, not URLs.") @@ -57,6 +65,7 @@ def checksum(parser, args): version_lines = spack.stage.get_checksums_for_versions( url_dict, pkg.name, keep_stage=args.keep_stage, + batch=(args.batch or len(args.versions) > 0), fetch_options=pkg.fetch_options) print() diff --git a/lib/spack/spack/cmd/ci.py b/lib/spack/spack/cmd/ci.py index e87da8c8af4..3e57c6656af 100644 --- a/lib/spack/spack/cmd/ci.py +++ b/lib/spack/spack/cmd/ci.py @@ -34,37 +34,6 @@ def setup_parser(subparser): setup_parser.parser = subparser subparsers = subparser.add_subparsers(help='CI sub-commands') - start = subparsers.add_parser('start', help=ci_start.__doc__) - start.add_argument( - '--output-file', default=None, - help="Absolute path to file where generated jobs file should be " + - "written. The default is .gitlab-ci.yml in the root of the " + - "repository.") - start.add_argument( - '--copy-to', default=None, - help="Absolute path of additional location where generated jobs " + - "yaml file should be copied. Default is not to copy.") - start.add_argument( - '--spack-repo', default=None, - help="Provide a url for this argument if a custom spack repo " + - "should be cloned as a step in each generated job.") - start.add_argument( - '--spack-ref', default=None, - help="Provide a git branch or tag if a custom spack branch " + - "should be checked out as a step in each generated job. " + - "This argument is ignored if no --spack-repo is provided.") - start.add_argument( - '--downstream-repo', default=None, - help="Url to repository where commit containing jobs yaml file " + - "should be pushed.") - start.add_argument( - '--branch-name', default='default-branch', - help="Name of current branch, used in generation of pushed commit.") - start.add_argument( - '--commit-sha', default='none', - help="SHA of current commit, used in generation of pushed commit.") - start.set_defaults(func=ci_start) - # Dynamic generation of the jobs yaml from a spack environment generate = subparsers.add_parser('generate', help=ci_generate.__doc__) generate.add_argument( @@ -85,22 +54,13 @@ def setup_parser(subparser): help="Provide a git branch or tag if a custom spack branch " + "should be checked out as a step in each generated job. " + "This argument is ignored if no --spack-repo is provided.") + generate.add_argument( + '--optimize', action='store_true', + help="(Experimental) run the generated document through a series of " + "optimization passes designed to reduce the size of the " + "generated file.") generate.set_defaults(func=ci_generate) - # Commit and push jobs yaml to a downstream CI repo - pushyaml = subparsers.add_parser('pushyaml', help=ci_pushyaml.__doc__) - pushyaml.add_argument( - '--downstream-repo', default=None, - help="Url to repository where commit containing jobs yaml file " + - "should be pushed.") - pushyaml.add_argument( - '--branch-name', default='default-branch', - help="Name of current branch, used in generation of pushed commit.") - pushyaml.add_argument( - '--commit-sha', default='none', - help="SHA of current commit, used in generation of pushed commit.") - pushyaml.set_defaults(func=ci_pushyaml) - # Check a spec against mirror. Rebuild, create buildcache and push to # mirror (if necessary). rebuild = subparsers.add_parser('rebuild', help=ci_rebuild.__doc__) @@ -120,6 +80,7 @@ def ci_generate(args): copy_yaml_to = args.copy_to spack_repo = args.spack_repo spack_ref = args.spack_ref + run_optimizer = args.optimize if not output_file: gen_ci_dir = os.getcwd() @@ -131,7 +92,8 @@ def ci_generate(args): # Generate the jobs spack_ci.generate_gitlab_ci_yaml( - env, True, output_file, spack_repo, spack_ref) + env, True, output_file, spack_repo, spack_ref, + run_optimizer=run_optimizer) if copy_yaml_to: copy_to_dir = os.path.dirname(copy_yaml_to) @@ -140,64 +102,6 @@ def ci_generate(args): shutil.copyfile(output_file, copy_yaml_to) -def ci_pushyaml(args): - """Push the generated jobs yaml file to a remote repository. The file - (.gitlab-ci.yaml) is expected to be in the current directory, which - should be the root of the repository.""" - downstream_repo = args.downstream_repo - branch_name = args.branch_name - commit_sha = args.commit_sha - - if not downstream_repo: - tty.die('No downstream repo to push to, exiting') - - working_dir = os.getcwd() - jobs_yaml = os.path.join(working_dir, '.gitlab-ci.yml') - git_dir = os.path.join(working_dir, '.git') - - if not os.path.exists(jobs_yaml): - tty.die('.gitlab-ci.yml must exist in current directory') - - if not os.path.exists(git_dir): - tty.die('.git directory must exist in current directory') - - # Create a temporary working directory - with spack_ci.TemporaryDirectory() as temp_dir: - git = exe.which('git', required=True) - - # Push a commit with the generated file to the downstream ci repo - saved_git_dir = os.path.join(temp_dir, 'original-git-dir') - - shutil.move('.git', saved_git_dir) - - git('init', '.') - - git('config', 'user.email', 'robot@spack.io') - git('config', 'user.name', 'Spack Build Bot') - - git('add', '.') - - # If the environment contains a spack directory, do not commit - # or push it with any other generated products - if os.path.exists('./spack') and os.path.isdir('./spack'): - git('rm', '-rf', '--cached', 'spack') - - tty.msg('git commit') - commit_message = '{0} {1} ({2})'.format( - 'Auto-generated commit testing', branch_name, commit_sha) - - git('commit', '-m', '{0}'.format(commit_message)) - - tty.msg('git push') - git('remote', 'add', 'downstream', downstream_repo) - push_to_branch = 'master:multi-ci-{0}'.format(branch_name) - git('push', '--force', 'downstream', push_to_branch) - - shutil.rmtree('.git') - shutil.move(saved_git_dir, '.git') - git('reset', '--hard', 'HEAD') - - def ci_rebuild(args): """This command represents a gitlab-ci job, corresponding to a single release spec. As such it must first decide whether or not the spec it @@ -239,6 +143,7 @@ def ci_rebuild(args): compiler_action = get_env_var('SPACK_COMPILER_ACTION') cdash_build_name = get_env_var('SPACK_CDASH_BUILD_NAME') related_builds = get_env_var('SPACK_RELATED_BUILDS_CDASH') + pr_env_var = get_env_var('SPACK_IS_PR_PIPELINE') gitlab_ci = None if 'gitlab-ci' in yaml_root: @@ -291,11 +196,18 @@ def ci_rebuild(args): local_mirror_dir = os.path.join(ci_artifact_dir, 'local_mirror') build_cache_dir = os.path.join(local_mirror_dir, 'build_cache') + spack_is_pr_pipeline = True if pr_env_var == 'True' else False + enable_artifacts_mirror = False artifact_mirror_url = None if 'enable-artifacts-buildcache' in gitlab_ci: enable_artifacts_mirror = gitlab_ci['enable-artifacts-buildcache'] - if enable_artifacts_mirror: + if enable_artifacts_mirror or spack_is_pr_pipeline: + # If this is a PR pipeline, we will override the setting to + # make sure that artifacts buildcache is enabled. Otherwise + # jobs will not have binary deps available since we do not + # allow pushing binaries to remote mirror during PR pipelines + enable_artifacts_mirror = True artifact_mirror_url = 'file://' + local_mirror_dir mirror_msg = 'artifact buildcache enabled, mirror url: {0}'.format( artifact_mirror_url) @@ -441,9 +353,12 @@ def ci_rebuild(args): spack_ci.copy_stage_logs_to_artifacts(job_spec, job_log_dir) - # 4) create buildcache on remote mirror - spack_ci.push_mirror_contents(env, job_spec, job_spec_yaml_path, - remote_mirror_url, cdash_build_id) + # 4) create buildcache on remote mirror, but not if this is + # running to test a spack PR + if not spack_is_pr_pipeline: + spack_ci.push_mirror_contents( + env, job_spec, job_spec_yaml_path, remote_mirror_url, + cdash_build_id) # 5) create another copy of that buildcache on "local artifact # mirror" (only done if cash reporting is enabled) @@ -468,13 +383,6 @@ def ci_rebuild(args): job_spec, build_cache_dir, True, remote_mirror_url) -def ci_start(args): - """Kicks of the CI process (currently just calls ci_generate() then - ci_push())""" - ci_generate(args) - ci_pushyaml(args) - - def ci(parser, args): if args.func: args.func(args) diff --git a/lib/spack/spack/cmd/commands.py b/lib/spack/spack/cmd/commands.py index 3664fce477e..be934e0048b 100644 --- a/lib/spack/spack/cmd/commands.py +++ b/lib/spack/spack/cmd/commands.py @@ -78,9 +78,10 @@ def setup_parser(subparser): class SpackArgparseRstWriter(ArgparseRstWriter): """RST writer tailored for spack documentation.""" - def __init__(self, prog, out=sys.stdout, aliases=False, + def __init__(self, prog, out=None, aliases=False, documented_commands=[], rst_levels=['-', '-', '^', '~', ':', '`']): + out = sys.stdout if out is None else out super(SpackArgparseRstWriter, self).__init__( prog, out, aliases, rst_levels) self.documented = documented_commands diff --git a/lib/spack/spack/cmd/common/arguments.py b/lib/spack/spack/cmd/common/arguments.py index b93f265c7ab..e5945bda9c3 100644 --- a/lib/spack/spack/cmd/common/arguments.py +++ b/lib/spack/spack/cmd/common/arguments.py @@ -111,7 +111,7 @@ def __call__(self, parser, namespace, jobs, option_string): def default(self): # This default is coded as a property so that look-up # of this value is done only on demand - return min(spack.config.get('config:build_jobs'), + return min(spack.config.get('config:build_jobs', 16), multiprocessing.cpu_count()) @default.setter diff --git a/lib/spack/spack/cmd/compiler.py b/lib/spack/spack/cmd/compiler.py index 625466f8c7f..c8f52af04d5 100644 --- a/lib/spack/spack/cmd/compiler.py +++ b/lib/spack/spack/cmd/compiler.py @@ -159,7 +159,19 @@ def compiler_list(args): tty.msg("Available compilers") index = index_by(spack.compilers.all_compilers(scope=args.scope), lambda c: (c.spec.name, c.operating_system, c.target)) - ordered_sections = sorted(index.items(), key=lambda item: item[0]) + + # For a container, take each element which does not evaluate to false and + # convert it to a string. For elements which evaluate to False (e.g. None) + # convert them to '' (in which case it still evaluates to False but is a + # string type). Tuples produced by this are guaranteed to be comparable in + # Python 3 + convert_str = ( + lambda tuple_container: + tuple(str(x) if x else '' for x in tuple_container)) + + index_str_keys = list( + (convert_str(x), y) for x, y in index.items()) + ordered_sections = sorted(index_str_keys, key=lambda item: item[0]) for i, (key, compilers) in enumerate(ordered_sections): if i >= 1: print() diff --git a/lib/spack/spack/cmd/create.py b/lib/spack/spack/cmd/create.py index 304b531b492..7d12dc98a7d 100644 --- a/lib/spack/spack/cmd/create.py +++ b/lib/spack/spack/cmd/create.py @@ -629,7 +629,7 @@ def get_versions(args, name): versions = spack.stage.get_checksums_for_versions( url_dict, name, first_stage_function=guesser, - keep_stage=args.keep_stage) + keep_stage=args.keep_stage, batch=True) else: versions = unhashed_versions diff --git a/lib/spack/spack/cmd/dependencies.py b/lib/spack/spack/cmd/dependencies.py index 7f390341ef4..bbccbe23ee9 100644 --- a/lib/spack/spack/cmd/dependencies.py +++ b/lib/spack/spack/cmd/dependencies.py @@ -3,6 +3,8 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) +import sys + import llnl.util.tty as tty from llnl.util.tty.colify import colify @@ -43,7 +45,9 @@ def dependencies(parser, args): spec = spack.cmd.disambiguate_spec(specs[0], env) format_string = '{name}{@version}{%compiler}{/hash:7}' - tty.msg("Dependencies of %s" % spec.format(format_string, color=True)) + if sys.stdout.isatty(): + tty.msg( + "Dependencies of %s" % spec.format(format_string, color=True)) deps = spack.store.db.installed_relatives( spec, 'children', args.transitive, deptype=args.deptype) if deps: diff --git a/lib/spack/spack/cmd/dependents.py b/lib/spack/spack/cmd/dependents.py index 89fd15ffdab..5563701801a 100644 --- a/lib/spack/spack/cmd/dependents.py +++ b/lib/spack/spack/cmd/dependents.py @@ -3,6 +3,8 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) +import sys + import llnl.util.tty as tty from llnl.util.tty.colify import colify @@ -84,7 +86,8 @@ def dependents(parser, args): spec = spack.cmd.disambiguate_spec(specs[0], env) format_string = '{name}{@version}{%compiler}{/hash:7}' - tty.msg("Dependents of %s" % spec.cformat(format_string)) + if sys.stdout.isatty(): + tty.msg("Dependents of %s" % spec.cformat(format_string)) deps = spack.store.db.installed_relatives( spec, 'parents', args.transitive) if deps: diff --git a/lib/spack/spack/cmd/dev_build.py b/lib/spack/spack/cmd/dev_build.py index c1004f24b38..d9ad0fb8916 100644 --- a/lib/spack/spack/cmd/dev_build.py +++ b/lib/spack/spack/cmd/dev_build.py @@ -38,9 +38,17 @@ def setup_parser(subparser): '-q', '--quiet', action='store_true', dest='quiet', help="do not display verbose build output while installing") subparser.add_argument( + '--drop-in', type=str, dest='shell', default=None, + help="drop into a build environment in a new shell, e.g. bash, zsh") + arguments.add_common_arguments(subparser, ['spec']) + + stop_group = subparser.add_mutually_exclusive_group() + stop_group.add_argument( + '-b', '--before', type=str, dest='before', default=None, + help="phase to stop before when installing (default None)") + stop_group.add_argument( '-u', '--until', type=str, dest='until', default=None, help="phase to stop after when installing (default None)") - arguments.add_common_arguments(subparser, ['spec']) cd_group = subparser.add_mutually_exclusive_group() arguments.add_common_arguments(cd_group, ['clean', 'dirty']) @@ -91,4 +99,10 @@ def dev_build(self, args): verbose=not args.quiet, keep_stage=True, # don't remove source dir for dev build. dirty=args.dirty, + stop_before=args.before, stop_at=args.until) + + # drop into the build environment of the package? + if args.shell is not None: + spack.build_environment.setup_package(package, dirty=False) + os.execvp(args.shell, [args.shell]) diff --git a/lib/spack/spack/cmd/env.py b/lib/spack/spack/cmd/env.py index a8bc1e5bbea..43c125e8f24 100644 --- a/lib/spack/spack/cmd/env.py +++ b/lib/spack/spack/cmd/env.py @@ -208,10 +208,14 @@ def _env_create(name_or_path, init_file=None, dir=False, with_view=None): env = ev.Environment(name_or_path, init_file, with_view) env.write() tty.msg("Created environment in %s" % env.path) + tty.msg("You can activate this environment with:") + tty.msg(" spack env activate %s" % env.path) else: env = ev.create(name_or_path, init_file, with_view) env.write() tty.msg("Created environment '%s' in %s" % (name_or_path, env.path)) + tty.msg("You can activate this environment with:") + tty.msg(" spack env activate %s" % (name_or_path)) return env diff --git a/lib/spack/spack/cmd/external.py b/lib/spack/spack/cmd/external.py new file mode 100644 index 00000000000..f93deaba037 --- /dev/null +++ b/lib/spack/spack/cmd/external.py @@ -0,0 +1,271 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from __future__ import print_function +from collections import defaultdict, namedtuple +import argparse +import os +import re +import six + +import spack +import spack.error +import llnl.util.tty as tty +import spack.util.spack_yaml as syaml +import spack.util.environment +import llnl.util.filesystem + +description = "add external packages to Spack configuration" +section = "config" +level = "short" + + +def setup_parser(subparser): + sp = subparser.add_subparsers( + metavar='SUBCOMMAND', dest='external_command') + + find_parser = sp.add_parser('find', help=external_find.__doc__) + find_parser.add_argument( + '--not-buildable', action='store_true', default=False, + help="packages with detected externals won't be built with Spack") + find_parser.add_argument('packages', nargs=argparse.REMAINDER) + + +def is_executable(path): + return os.path.isfile(path) and os.access(path, os.X_OK) + + +def _get_system_executables(): + """Get the paths of all executables available from the current PATH. + + For convenience, this is constructed as a dictionary where the keys are + the executable paths and the values are the names of the executables + (i.e. the basename of the executable path). + + There may be multiple paths with the same basename. In this case it is + assumed there are two different instances of the executable. + """ + path_hints = spack.util.environment.get_path('PATH') + search_paths = llnl.util.filesystem.search_paths_for_executables( + *path_hints) + + path_to_exe = {} + # Reverse order of search directories so that an exe in the first PATH + # entry overrides later entries + for search_path in reversed(search_paths): + for exe in os.listdir(search_path): + exe_path = os.path.join(search_path, exe) + if is_executable(exe_path): + path_to_exe[exe_path] = exe + return path_to_exe + + +ExternalPackageEntry = namedtuple( + 'ExternalPackageEntry', + ['spec', 'base_dir']) + + +def _generate_pkg_config(external_pkg_entries): + """Generate config according to the packages.yaml schema for a single + package. + + This does not generate the entire packages.yaml. For example, given some + external entries for the CMake package, this could return:: + + { 'paths': { + 'cmake@3.17.1': '/opt/cmake-3.17.1/', + 'cmake@3.16.5': '/opt/cmake-3.16.5/' + } + } + """ + paths_dict = syaml.syaml_dict() + for e in external_pkg_entries: + if not _spec_is_valid(e.spec): + continue + paths_dict[str(e.spec)] = e.base_dir + pkg_dict = syaml.syaml_dict() + pkg_dict['paths'] = paths_dict + + return pkg_dict + + +def _spec_is_valid(spec): + try: + str(spec) + except spack.error.SpackError: + # It is assumed here that we can at least extract the package name from + # the spec so we can look up the implementation of + # determine_spec_details + tty.warn('Constructed spec for {0} does not have a string' + ' representation'.format(spec.name)) + return False + + try: + spack.spec.Spec(str(spec)) + except spack.error.SpackError: + tty.warn('Constructed spec has a string representation but the string' + ' representation does not evaluate to a valid spec: {0}' + .format(str(spec))) + return False + + return True + + +def external_find(args): + if args.packages: + packages_to_check = list(spack.repo.get(pkg) for pkg in args.packages) + else: + packages_to_check = spack.repo.path.all_packages() + + pkg_to_entries = _get_external_packages(packages_to_check) + _update_pkg_config(pkg_to_entries, args.not_buildable) + + +def _group_by_prefix(paths): + groups = defaultdict(set) + for p in paths: + groups[os.path.dirname(p)].add(p) + return groups.items() + + +def _convert_to_iterable(single_val_or_multiple): + x = single_val_or_multiple + if x is None: + return [] + elif isinstance(x, six.string_types): + return [x] + elif isinstance(x, spack.spec.Spec): + # Specs are iterable, but a single spec should be converted to a list + return [x] + + try: + iter(x) + return x + except TypeError: + return [x] + + +def _determine_base_dir(prefix): + # Given a prefix where an executable is found, assuming that prefix ends + # with /bin/, strip off the 'bin' directory to get a Spack-compatible + # prefix + assert os.path.isdir(prefix) + if os.path.basename(prefix) == 'bin': + return os.path.dirname(prefix) + + +def _get_predefined_externals(): + # Pull from all scopes when looking for preexisting external package + # entries + pkg_config = spack.config.get('packages') + already_defined_specs = set() + for pkg_name, per_pkg_cfg in pkg_config.items(): + paths = per_pkg_cfg.get('paths', {}) + already_defined_specs.update(spack.spec.Spec(k) for k in paths) + modules = per_pkg_cfg.get('modules', {}) + already_defined_specs.update(spack.spec.Spec(k) for k in modules) + return already_defined_specs + + +def _update_pkg_config(pkg_to_entries, not_buildable): + predefined_external_specs = _get_predefined_externals() + + pkg_to_cfg = {} + for pkg_name, ext_pkg_entries in pkg_to_entries.items(): + new_entries = list( + e for e in ext_pkg_entries + if (e.spec not in predefined_external_specs)) + + pkg_config = _generate_pkg_config(new_entries) + if not_buildable: + pkg_config['buildable'] = False + pkg_to_cfg[pkg_name] = pkg_config + + cfg_scope = spack.config.default_modify_scope() + pkgs_cfg = spack.config.get('packages', scope=cfg_scope) + + spack.config._merge_yaml(pkgs_cfg, pkg_to_cfg) + spack.config.set('packages', pkgs_cfg, scope=cfg_scope) + + +def _get_external_packages(packages_to_check, system_path_to_exe=None): + if not system_path_to_exe: + system_path_to_exe = _get_system_executables() + + exe_pattern_to_pkgs = defaultdict(list) + for pkg in packages_to_check: + if hasattr(pkg, 'executables'): + for exe in pkg.executables: + exe_pattern_to_pkgs[exe].append(pkg) + + pkg_to_found_exes = defaultdict(set) + for exe_pattern, pkgs in exe_pattern_to_pkgs.items(): + compiled_re = re.compile(exe_pattern) + for path, exe in system_path_to_exe.items(): + if compiled_re.search(exe): + for pkg in pkgs: + pkg_to_found_exes[pkg].add(path) + + pkg_to_entries = defaultdict(list) + resolved_specs = {} # spec -> exe found for the spec + + for pkg, exes in pkg_to_found_exes.items(): + if not hasattr(pkg, 'determine_spec_details'): + tty.warn("{0} must define 'determine_spec_details' in order" + " for Spack to detect externally-provided instances" + " of the package.".format(pkg.name)) + continue + + # TODO: iterate through this in a predetermined order (e.g. by package + # name) to get repeatable results when there are conflicts. Note that + # if we take the prefixes returned by _group_by_prefix, then consider + # them in the order that they appear in PATH, this should be sufficient + # to get repeatable results. + for prefix, exes_in_prefix in _group_by_prefix(exes): + # TODO: multiple instances of a package can live in the same + # prefix, and a package implementation can return multiple specs + # for one prefix, but without additional details (e.g. about the + # naming scheme which differentiates them), the spec won't be + # usable. + specs = _convert_to_iterable( + pkg.determine_spec_details(prefix, exes_in_prefix)) + + if not specs: + tty.debug( + 'The following executables in {0} were decidedly not' + 'part of the package {1}: {2}' + .format(prefix, pkg.name, ', '.join(exes_in_prefix)) + ) + + for spec in specs: + pkg_prefix = _determine_base_dir(prefix) + + if not pkg_prefix: + tty.debug("{0} does not end with a 'bin/' directory: it" + " cannot be added as a Spack package" + .format(prefix)) + continue + + if spec in resolved_specs: + prior_prefix = ', '.join(resolved_specs[spec]) + + tty.debug( + "Executables in {0} and {1} are both associated" + " with the same spec {2}" + .format(prefix, prior_prefix, str(spec))) + continue + else: + resolved_specs[spec] = prefix + + pkg_to_entries[pkg.name].append( + ExternalPackageEntry(spec=spec, base_dir=pkg_prefix)) + + return pkg_to_entries + + +def external(parser, args): + action = {'find': external_find} + + action[args.external_command](args) diff --git a/lib/spack/spack/cmd/find.py b/lib/spack/spack/cmd/find.py index fa36e1cd267..db229aca7e3 100644 --- a/lib/spack/spack/cmd/find.py +++ b/lib/spack/spack/cmd/find.py @@ -7,6 +7,7 @@ import copy import os +import sys import llnl.util.tty as tty import llnl.util.tty.color as color @@ -236,7 +237,7 @@ def find(parser, args): else: if env: display_env(env, args, decorator) - if args.groups: + if sys.stdout.isatty() and args.groups: tty.msg("%s" % plural(len(results), 'installed package')) cmd.display_specs( results, args, decorator=decorator, all_headers=True) diff --git a/lib/spack/spack/cmd/info.py b/lib/spack/spack/cmd/info.py index 81a68dae966..fa674317ab7 100644 --- a/lib/spack/spack/cmd/info.py +++ b/lib/spack/spack/cmd/info.py @@ -8,6 +8,7 @@ import textwrap from six.moves import zip_longest +import llnl.util.tty as tty import llnl.util.tty.color as color from llnl.util.tty.colify import colify @@ -53,11 +54,9 @@ def variant(s): class VariantFormatter(object): - def __init__(self, variants, max_widths=(30, 20, 30)): + def __init__(self, variants): self.variants = variants self.headers = ('Name [Default]', 'Allowed values', 'Description') - # Set max headers lengths - self.max_column_widths = max_widths # Formats fmt_name = '{0} [{1}]' @@ -67,7 +66,7 @@ def __init__(self, variants, max_widths=(30, 20, 30)): # than that self.column_widths = [len(x) for x in self.headers] - # Update according to line lengths + # Expand columns based on max line lengths for k, v in variants.items(): candidate_max_widths = ( len(fmt_name.format(k, self.default(v))), # Name [Default] @@ -81,12 +80,18 @@ def __init__(self, variants, max_widths=(30, 20, 30)): max(self.column_widths[2], candidate_max_widths[2]) ) - # Reduce to at most the maximum allowed - self.column_widths = ( - min(self.column_widths[0], self.max_column_widths[0]), - min(self.column_widths[1], self.max_column_widths[1]), - min(self.column_widths[2], self.max_column_widths[2]) + # Don't let name or possible values be less than max widths + _, cols = tty.terminal_size() + max_name = min(self.column_widths[0], 30) + max_vals = min(self.column_widths[1], 20) + + # allow the description column to extend as wide as the terminal. + max_description = min( + self.column_widths[2], + # min width 70 cols, 14 cols of margins and column spacing + max(cols, 70) - max_name - max_vals - 14, ) + self.column_widths = (max_name, max_vals, max_description) # Compute the format self.fmt = "%%-%ss%%-%ss%%s" % ( @@ -114,10 +119,8 @@ def lines(self): '{0} [{1}]'.format(k, self.default(v)), width=self.column_widths[0] ) - allowed = textwrap.wrap( - v.allowed_values, - width=self.column_widths[1] - ) + allowed = v.allowed_values.replace('True, False', 'on, off') + allowed = textwrap.wrap(allowed, width=self.column_widths[1]) description = textwrap.wrap( v.description, width=self.column_widths[2] diff --git a/lib/spack/spack/cmd/mirror.py b/lib/spack/spack/cmd/mirror.py index 1473550a560..2d338204d3b 100644 --- a/lib/spack/spack/cmd/mirror.py +++ b/lib/spack/spack/cmd/mirror.py @@ -45,6 +45,15 @@ def setup_parser(subparser): " (this requires significant time and space)") create_parser.add_argument( '-f', '--file', help="file with specs of packages to put in mirror") + create_parser.add_argument( + '--exclude-file', + help="specs which Spack should not try to add to a mirror" + " (listed in a file, one per line)") + create_parser.add_argument( + '--exclude-specs', + help="specs which Spack should not try to add to a mirror" + " (specified on command line)") + create_parser.add_argument( '--skip-unstable-versions', action='store_true', help="don't cache versions unless they identify a stable (unchanging)" @@ -232,9 +241,7 @@ def _read_specs_from_file(filename): return specs -def mirror_create(args): - """Create a directory to be used as a spack mirror, and fill it with - package archives.""" +def _determine_specs_to_mirror(args): if args.specs and args.all: raise SpackError("Cannot specify specs on command line if you" " chose to mirror all specs with '--all'") @@ -264,6 +271,7 @@ def mirror_create(args): tty.die("Cannot pass specs on the command line with --file.") specs = _read_specs_from_file(args.file) + env_specs = None if not specs: # If nothing is passed, use environment or all if no active env if not args.all: @@ -273,12 +281,9 @@ def mirror_create(args): env = ev.get_env(args, 'mirror') if env: - mirror_specs = env.specs_by_hash.values() + env_specs = env.all_specs() else: specs = [Spec(n) for n in spack.repo.all_package_names()] - mirror_specs = spack.mirror.get_all_versions(specs) - mirror_specs.sort( - key=lambda s: (s.name, s.version)) else: # If the user asked for dependencies, traverse spec DAG get them. if args.dependencies: @@ -297,11 +302,38 @@ def mirror_create(args): msg = 'Skipping {0} as it is an external spec.' tty.msg(msg.format(spec.cshort_spec)) + if env_specs: + if args.versions_per_spec: + tty.warn("Ignoring '--versions-per-spec' for mirroring specs" + " in environment.") + mirror_specs = env_specs + else: if num_versions == 'all': mirror_specs = spack.mirror.get_all_versions(specs) else: mirror_specs = spack.mirror.get_matching_versions( specs, num_versions=num_versions) + mirror_specs.sort( + key=lambda s: (s.name, s.version)) + + exclude_specs = [] + if args.exclude_file: + exclude_specs.extend(_read_specs_from_file(args.exclude_file)) + if args.exclude_specs: + exclude_specs.extend( + spack.cmd.parse_specs(str(args.exclude_specs).split())) + if exclude_specs: + mirror_specs = list( + x for x in mirror_specs + if not any(x.satisfies(y, strict=True) for y in exclude_specs)) + + return mirror_specs + + +def mirror_create(args): + """Create a directory to be used as a spack mirror, and fill it with + package archives.""" + mirror_specs = _determine_specs_to_mirror(args) mirror = spack.mirror.Mirror( args.directory or spack.config.get('config:source_cache')) diff --git a/lib/spack/spack/cmd/modules/__init__.py b/lib/spack/spack/cmd/modules/__init__.py index 6555b8c8a7e..7d51224e146 100644 --- a/lib/spack/spack/cmd/modules/__init__.py +++ b/lib/spack/spack/cmd/modules/__init__.py @@ -119,8 +119,9 @@ def one_spec_or_raise(specs): " this command with debug output enabled for more details.") -def loads(module_type, specs, args, out=sys.stdout): +def loads(module_type, specs, args, out=None): """Prompt the list of modules associated with a list of specs""" + out = sys.stdout if out is None else out # Get a comprehensive list of specs if args.recurse_dependencies: diff --git a/lib/spack/spack/cmd/uninstall.py b/lib/spack/spack/cmd/uninstall.py index 22b554178f4..f7b47a599e4 100644 --- a/lib/spack/spack/cmd/uninstall.py +++ b/lib/spack/spack/cmd/uninstall.py @@ -29,7 +29,8 @@ error_message = """You can either: a) use a more specific spec, or - b) use `spack uninstall --all` to uninstall ALL matching specs. + b) specify the spec by its hash (e.g. `spack uninstall /hash`), or + c) use `spack uninstall --all` to uninstall ALL matching specs. """ # Arguments for display_specs when we find ambiguity @@ -42,6 +43,18 @@ def setup_parser(subparser): + epilog_msg = ("Specs to be uninstalled are specified using the spec syntax" + " (`spack help --spec`) and can be identified by their " + "hashes. To remove packages that are needed only at build " + "time and were not explicitly installed see `spack gc -h`." + "\n\nWhen using the --all option ALL packages matching the " + "supplied specs will be uninstalled. For instance, " + "`spack uninstall --all libelf` uninstalls all the versions " + "of `libelf` currently present in Spack's store. If no spec " + "is supplied, all installed packages will be uninstalled. " + "If used in an environment, all packages in the environment " + "will be uninstalled.") + subparser.epilog = epilog_msg subparser.add_argument( '-f', '--force', action='store_true', dest='force', help="remove regardless of whether other packages or environments " @@ -50,12 +63,8 @@ def setup_parser(subparser): subparser, ['recurse_dependents', 'yes_to_all', 'installed_specs']) subparser.add_argument( '-a', '--all', action='store_true', dest='all', - help="USE CAREFULLY. Remove ALL installed packages that match each " - "supplied spec. i.e., if you `uninstall --all libelf`," - " ALL versions of `libelf` are uninstalled. If no spec is " - "supplied, all installed packages will be uninstalled. " - "If used in an environment, all packages in the environment " - "will be uninstalled.") + help="remove ALL installed packages that match each supplied spec" + ) subparser.add_argument( 'packages', diff --git a/lib/spack/spack/cmd/versions.py b/lib/spack/spack/cmd/versions.py index 723f89ce08e..366307f0b2c 100644 --- a/lib/spack/spack/cmd/versions.py +++ b/lib/spack/spack/cmd/versions.py @@ -21,6 +21,10 @@ def setup_parser(subparser): subparser.add_argument('-s', '--safe-only', action='store_true', help='only list safe versions of the package') + subparser.add_argument( + '-c', '--concurrency', default=32, type=int, + help='number of concurrent requests' + ) arguments.add_common_arguments(subparser, ['package']) @@ -45,7 +49,7 @@ def versions(parser, args): if sys.stdout.isatty(): tty.msg('Remote versions (not yet checksummed):') - fetched_versions = pkg.fetch_remote_versions() + fetched_versions = pkg.fetch_remote_versions(args.concurrency) remote_versions = set(fetched_versions).difference(safe_versions) if not remote_versions: diff --git a/lib/spack/spack/cmd/view.py b/lib/spack/spack/cmd/view.py index 18c836736e6..151f6c15640 100644 --- a/lib/spack/spack/cmd/view.py +++ b/lib/spack/spack/cmd/view.py @@ -33,8 +33,6 @@ YamlFilesystemView. ''' -import os - import llnl.util.tty as tty from llnl.util.link_tree import MergeConflictError from llnl.util.tty.color import colorize @@ -45,13 +43,15 @@ import spack.schema.projections from spack.config import validate from spack.filesystem_view import YamlFilesystemView +from spack.filesystem_view import view_symlink, view_hardlink, view_copy from spack.util import spack_yaml as s_yaml description = "project packages to a compact naming scheme on the filesystem." section = "environments" level = "short" -actions_link = ["symlink", "add", "soft", "hardlink", "hard"] +actions_link = ["symlink", "add", "soft", "hardlink", "hard", "copy", + "relocate"] actions_remove = ["remove", "rm"] actions_status = ["statlink", "status", "check"] @@ -111,7 +111,10 @@ def setup_parser(sp): help='add package files to a filesystem view via symbolic links'), "hardlink": ssp.add_parser( 'hardlink', aliases=['hard'], - help='add packages files to a filesystem via via hard links'), + help='add packages files to a filesystem view via hard links'), + "copy": ssp.add_parser( + 'copy', aliases=['relocate'], + help='add package files to a filesystem view via copy/relocate'), "remove": ssp.add_parser( 'remove', aliases=['rm'], help='remove packages from a filesystem view'), @@ -125,7 +128,7 @@ def setup_parser(sp): act.add_argument('path', nargs=1, help="path to file system view directory") - if cmd in ("symlink", "hardlink"): + if cmd in ("symlink", "hardlink", "copy"): # invalid for remove/statlink, for those commands the view needs to # already know its own projections. help_msg = "Initialize view using projections from file." @@ -157,7 +160,7 @@ def setup_parser(sp): so["nargs"] = "+" act.add_argument('specs', **so) - for cmd in ["symlink", "hardlink"]: + for cmd in ["symlink", "hardlink", "copy"]: act = file_system_view_actions[cmd] act.add_argument("-i", "--ignore-conflicts", action='store_true') @@ -179,11 +182,19 @@ def view(parser, args): else: ordered_projections = {} + # What method are we using for this view + if args.action in ("hardlink", "hard"): + link_fn = view_hardlink + elif args.action in ("copy", "relocate"): + link_fn = view_copy + else: + link_fn = view_symlink + view = YamlFilesystemView( path, spack.store.layout, projections=ordered_projections, ignore_conflicts=getattr(args, "ignore_conflicts", False), - link=os.link if args.action in ["hardlink", "hard"] else os.symlink, + link=link_fn, verbose=args.verbose) # Process common args and specs diff --git a/lib/spack/spack/compiler.py b/lib/spack/spack/compiler.py index 8afbe48c0c6..784fd5544ea 100644 --- a/lib/spack/spack/compiler.py +++ b/lib/spack/spack/compiler.py @@ -3,6 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) +import contextlib import os import platform import re @@ -19,6 +20,7 @@ import spack.spec import spack.architecture import spack.util.executable +import spack.util.module_cmd import spack.compilers from spack.util.environment import filter_system_paths @@ -244,6 +246,14 @@ def enable_new_dtags(self): return '' return '--enable-new-dtags' + @property + def debug_flags(self): + return ['-g'] + + @property + def opt_flags(self): + return ['-O', '-O0', '-O1', '-O2', '-O3'] + # Cray PrgEnv name that can be used to load this compiler PrgEnv = None # Name of module used to switch versions of this compiler @@ -296,8 +306,10 @@ def implicit_rpaths(self): if self.enable_implicit_rpaths is False: return [] + # Put CXX first since it has the most linking issues + # And because it has flags that affect linking exe_paths = [ - x for x in [self.cc, self.cxx, self.fc, self.f77] if x] + x for x in [self.cxx, self.cc, self.fc, self.f77] if x] link_dirs = self._get_compiler_link_paths(exe_paths) all_required_libs = ( @@ -312,16 +324,24 @@ def required_libs(self): # By default every compiler returns the empty list return [] - @classmethod - def _get_compiler_link_paths(cls, paths): + def _get_compiler_link_paths(self, paths): first_compiler = next((c for c in paths if c), None) if not first_compiler: return [] - if not cls.verbose_flag(): + if not self.verbose_flag: # In this case there is no mechanism to learn what link directories # are used by the compiler return [] + # What flag types apply to first_compiler, in what order + flags = ['cppflags', 'ldflags'] + if first_compiler == self.cc: + flags = ['cflags'] + flags + elif first_compiler == self.cxx: + flags = ['cxxflags'] + flags + else: + flags.append('fflags') + try: tmpdir = tempfile.mkdtemp(prefix='spack-implicit-link-info') fout = os.path.join(tmpdir, 'output') @@ -332,10 +352,14 @@ def _get_compiler_link_paths(cls, paths): 'int main(int argc, char* argv[]) { ' '(void)argc; (void)argv; return 0; }\n') compiler_exe = spack.util.executable.Executable(first_compiler) - output = str(compiler_exe(cls.verbose_flag(), fin, '-o', fout, - output=str, error=str)) # str for py2 - - return _parse_non_system_link_dirs(output) + for flag_type in flags: + for flag in self.flags.get(flag_type, []): + compiler_exe.add_default_arg(flag) + with self._compiler_environment(): + output = str(compiler_exe( + self.verbose_flag, fin, '-o', fout, + output=str, error=str)) # str for py2 + return _parse_non_system_link_dirs(output) except spack.util.executable.ProcessError as pe: tty.debug('ProcessError: Command exited with non-zero status: ' + pe.long_message) @@ -343,8 +367,8 @@ def _get_compiler_link_paths(cls, paths): finally: shutil.rmtree(tmpdir, ignore_errors=True) - @classmethod - def verbose_flag(cls): + @property + def verbose_flag(self): """ This property should be overridden in the compiler subclass if a verbose flag is available. @@ -434,6 +458,25 @@ def fc_pic_flag(self): Position Independent Code (PIC).""" return '-fPIC' + # Note: This is not a class method. The class methods are used to detect + # compilers on PATH based systems, and do not set up the run environment of + # the compiler. This method can be called on `module` based systems as well + def get_real_version(self): + """Query the compiler for its version. + + This is the "real" compiler version, regardless of what is in the + compilers.yaml file, which the user can change to name their compiler. + + Use the runtime environment of the compiler (modules and environment + modifications) to enable the compiler to run properly on any platform. + """ + cc = spack.util.executable.Executable(self.cc) + with self._compiler_environment(): + output = cc(self.version_argument, + output=str, error=str, + ignore_errors=tuple(self.ignore_version_errors)) + return self.extract_version_from_output(output) + # # Compiler classes have methods for querying the version of # specific compiler executables. This is used when discovering compilers. @@ -501,6 +544,30 @@ def __str__(self): self.cc, self.cxx, self.f77, self.fc, self.modules, str(self.operating_system))))) + @contextlib.contextmanager + def _compiler_environment(self): + # store environment to replace later + backup_env = os.environ.copy() + + # load modules and set env variables + for module in self.modules: + # On cray, mic-knl module cannot be loaded without cce module + # See: https://github.com/spack/spack/issues/3153 + if os.environ.get("CRAY_CPU_TARGET") == 'mic-knl': + spack.util.module_cmd.load_module('cce') + spack.util.module_cmd.load_module(module) + + # apply other compiler environment changes + env = spack.util.environment.EnvironmentModifications() + env.extend(spack.schema.environment.parse(self.environment)) + env.apply_modifications() + + yield + + # Restore environment + os.environ.clear() + os.environ.update(backup_env) + class CompilerAccessError(spack.error.SpackError): diff --git a/lib/spack/spack/compilers/arm.py b/lib/spack/spack/compilers/arm.py index 2f6e1950065..59eb1714b81 100644 --- a/lib/spack/spack/compilers/arm.py +++ b/lib/spack/spack/compilers/arm.py @@ -51,10 +51,14 @@ def extract_version_from_output(cls, output): temp = match.group(1) + "." + match.group(2) return temp - @classmethod - def verbose_flag(cls): + @property + def verbose_flag(self): return "-v" + @property + def opt_flags(self): + return ['-O', '-O0', '-O1', '-O2', '-O3', '-Ofast'] + @property def openmp_flag(self): return "-fopenmp" diff --git a/lib/spack/spack/compilers/cce.py b/lib/spack/spack/compilers/cce.py index 7aedb55a5d0..47c0263cfd7 100644 --- a/lib/spack/spack/compilers/cce.py +++ b/lib/spack/spack/compilers/cce.py @@ -32,26 +32,41 @@ class Cce(Compiler): 'f77': 'cce/ftn', 'fc': 'cce/ftn'} - version_argument = '-V' + @property + def version_argument(self): + if self.version >= ver('9.0'): + return '--version' + return '-V' + version_regex = r'[Vv]ersion.*?(\d+(\.\d+)+)' - @classmethod - def verbose_flag(cls): + @property + def verbose_flag(self): return "-v" + @property + def debug_flags(self): + return ['-g', '-G0', '-G1', '-G2', '-Gfast'] + @property def openmp_flag(self): + if self.version >= ver('9.0'): + return '-fopenmp' return "-h omp" @property def cxx11_flag(self): + if self.version >= ver('9.0'): + return '-std=c++11' return "-h std=c++11" @property def c99_flag(self): - if self.version >= ver('8.4'): - return '-h stc=c99,noconform,gnu' - if self.version >= ver('8.1'): + if self.version >= ver('9.0'): + return '-std=c99' + elif self.version >= ver('8.4'): + return '-h std=c99,noconform,gnu' + elif self.version >= ver('8.1'): return '-h c99,noconform,gnu' raise UnsupportedCompilerFlag(self, 'the C99 standard', @@ -60,7 +75,9 @@ def c99_flag(self): @property def c11_flag(self): - if self.version >= ver('8.5'): + if self.version >= ver('9.0'): + return '-std=c11' + elif self.version >= ver('8.5'): return '-h std=c11,noconform,gnu' raise UnsupportedCompilerFlag(self, 'the C11 standard', diff --git a/lib/spack/spack/compilers/clang.py b/lib/spack/spack/compilers/clang.py index b14eaa1278b..846c3609dc8 100644 --- a/lib/spack/spack/compilers/clang.py +++ b/lib/spack/spack/compilers/clang.py @@ -50,6 +50,18 @@ class Clang(Compiler): # Subclasses use possible names of Fortran 90 compiler fc_names = ['flang', 'gfortran', 'xlf90_r'] + version_argument = '--version' + + @property + def debug_flags(self): + return ['-gcodeview', '-gdwarf-2', '-gdwarf-3', '-gdwarf-4', + '-gdwarf-5', '-gline-tables-only', '-gmodules', '-gz', '-g'] + + @property + def opt_flags(self): + return ['-O0', '-O1', '-O2', '-O3', '-Ofast', '-Os', '-Oz', '-Og', + '-O', '-O4'] + # Clang has support for using different fortran compilers with the # clang executable. @property @@ -81,8 +93,8 @@ def is_apple(self): ver_string = str(self.version) return ver_string.endswith('-apple') - @classmethod - def verbose_flag(cls): + @property + def verbose_flag(self): return "-v" @property @@ -191,26 +203,6 @@ def fc_pic_flag(self): required_libs = ['libclang'] - @classmethod - @llnl.util.lang.memoized - def default_version(cls, comp): - """The ``--version`` option works for clang compilers. - On most platforms, output looks like this:: - - clang version 3.1 (trunk 149096) - Target: x86_64-unknown-linux-gnu - Thread model: posix - - On macOS, it looks like this:: - - Apple LLVM version 7.0.2 (clang-700.1.81) - Target: x86_64-apple-darwin15.2.0 - Thread model: posix - """ - compiler = Executable(comp) - output = compiler('--version', output=str, error=str) - return cls.extract_version_from_output(output) - @classmethod @llnl.util.lang.memoized def extract_version_from_output(cls, output): diff --git a/lib/spack/spack/compilers/fj.py b/lib/spack/spack/compilers/fj.py index 54d9308c4a0..3747d49d9bf 100644 --- a/lib/spack/spack/compilers/fj.py +++ b/lib/spack/spack/compilers/fj.py @@ -30,10 +30,14 @@ class Fj(spack.compiler.Compiler): required_libs = ['libfj90i', 'libfj90f', 'libfjsrcinfo'] - @classmethod - def verbose_flag(cls): + @property + def verbose_flag(self): return "-v" + @property + def opt_flags(self): + return ['-O', '-O0', '-O1', '-O2', '-O3', '-O4'] + @property def openmp_flag(self): return "-Kopenmp" diff --git a/lib/spack/spack/compilers/gcc.py b/lib/spack/spack/compilers/gcc.py index fc9074a67aa..98809eea25b 100644 --- a/lib/spack/spack/compilers/gcc.py +++ b/lib/spack/spack/compilers/gcc.py @@ -27,7 +27,7 @@ class Gcc(Compiler): # MacPorts builds gcc versions with prefixes and -mp-X.Y suffixes. # Homebrew and Linuxbrew may build gcc with -X, -X.Y suffixes. # Old compatibility versions may contain XY suffixes. - suffixes = [r'-mp-\d\.\d', r'-\d\.\d', r'-\d', r'\d\d'] + suffixes = [r'-mp-\d+\.\d+', r'-\d+\.\d+', r'-\d+', r'\d\d'] # Named wrapper links within build_env_path link_paths = {'cc': 'gcc/gcc', @@ -38,10 +38,18 @@ class Gcc(Compiler): PrgEnv = 'PrgEnv-gnu' PrgEnv_compiler = 'gcc' - @classmethod - def verbose_flag(cls): + @property + def verbose_flag(self): return "-v" + @property + def debug_flags(self): + return ['-g', '-gstabs+', '-gstabs', '-gxcoff+', '-gxcoff', '-gvms'] + + @property + def opt_flags(self): + return ['-O', '-O0', '-O1', '-O2', '-O3', '-Os', '-Ofast', '-Og'] + @property def openmp_flag(self): return "-fopenmp" diff --git a/lib/spack/spack/compilers/intel.py b/lib/spack/spack/compilers/intel.py index c7d853e3fa9..b44ed73d6fc 100644 --- a/lib/spack/spack/compilers/intel.py +++ b/lib/spack/spack/compilers/intel.py @@ -32,12 +32,20 @@ class Intel(Compiler): version_argument = '--version' version_regex = r'\((?:IFORT|ICC)\) ([^ ]+)' - @classmethod - def verbose_flag(cls): + @property + def verbose_flag(self): return "-v" required_libs = ['libirc', 'libifcore', 'libifcoremt', 'libirng'] + @property + def debug_flags(self): + return ['-debug', '-g', '-g0', '-g1', '-g2', '-g3'] + + @property + def opt_flags(self): + return ['-O', '-O0', '-O1', '-O2', '-O3', '-Ofast', '-Os'] + @property def openmp_flag(self): if self.version < ver('16.0'): diff --git a/lib/spack/spack/compilers/nag.py b/lib/spack/spack/compilers/nag.py index e5dffa4a67b..503a31e404b 100644 --- a/lib/spack/spack/compilers/nag.py +++ b/lib/spack/spack/compilers/nag.py @@ -30,10 +30,48 @@ class Nag(spack.compiler.Compiler): version_argument = '-V' version_regex = r'NAG Fortran Compiler Release ([0-9.]+)' + @property + def verbose_flag(self): + # NAG does not support a flag that would enable verbose output and + # compilation/linking at the same time (with either '-#' or '-dryrun' + # the compiler only prints the commands but does not run them). + # Therefore, the only thing we can do is to pass the '-v' argument to + # the underlying GCC. In order to get verbose output from the latter + # at both compile and linking stages, we need to call NAG with two + # additional flags: '-Wc,-v' and '-Wl,-v'. However, we return only + # '-Wl,-v' for the following reasons: + # 1) the interface of this method does not support multiple flags in + # the return value and, at least currently, verbose output at the + # linking stage has a higher priority for us; + # 2) NAG is usually mixed with GCC compiler, which also accepts + # '-Wl,-v' and produces meaningful result with it: '-v' is passed + # to the linker and the latter produces verbose output for the + # linking stage ('-Wc,-v', however, would break the compilation + # with a message from GCC that the flag is not recognized). + # + # This way, we at least enable the implicit rpath detection, which is + # based on compilation of a C file (see method + # spack.compiler._get_compiler_link_paths): in the case of a mixed + # NAG/GCC toolchain, the flag will be passed to g++ (e.g. + # 'g++ -Wl,-v ./main.c'), otherwise, the flag will be passed to nagfor + # (e.g. 'nagfor -Wl,-v ./main.c' - note that nagfor recognizes '.c' + # extension and treats the file accordingly). The list of detected + # rpaths will contain only GCC-related directories and rpaths to + # NAG-related directories are injected by nagfor anyway. + return "-Wl,-v" + @property def openmp_flag(self): return "-openmp" + @property + def debug_flags(self): + return ['-g', '-gline', '-g90'] + + @property + def opt_flags(self): + return ['-O', '-O0', '-O1', '-O2', '-O3', '-O4'] + @property def cxx11_flag(self): # NAG does not have a C++ compiler diff --git a/lib/spack/spack/compilers/pgi.py b/lib/spack/spack/compilers/pgi.py index 13d8f69ec51..f782e1fc86f 100644 --- a/lib/spack/spack/compilers/pgi.py +++ b/lib/spack/spack/compilers/pgi.py @@ -33,10 +33,18 @@ class Pgi(Compiler): ignore_version_errors = [2] # `pgcc -V` on PowerPC annoyingly returns 2 version_regex = r'pg[^ ]* ([0-9.]+)-[0-9]+ (LLVM )?[^ ]+ target on ' - @classmethod - def verbose_flag(cls): + @property + def verbose_flag(self): return "-v" + @property + def debug_flags(self): + return ['-g', '-gopt'] + + @property + def opt_flags(self): + return ['-O', '-O0', '-O1', '-O2', '-O3', '-O4'] + @property def openmp_flag(self): return "-mp" diff --git a/lib/spack/spack/compilers/xl.py b/lib/spack/spack/compilers/xl.py index 46a5002e25d..ce74ec47c1f 100644 --- a/lib/spack/spack/compilers/xl.py +++ b/lib/spack/spack/compilers/xl.py @@ -29,10 +29,18 @@ class Xl(Compiler): version_argument = '-qversion' version_regex = r'([0-9]?[0-9]\.[0-9])' - @classmethod - def verbose_flag(cls): + @property + def verbose_flag(self): return "-V" + @property + def debug_flags(self): + return ['-g', '-g0', '-g1', '-g2', '-g8', '-g9'] + + @property + def opt_flags(self): + return ['-O', '-O0', '-O1', '-O2', '-O3', '-O4', '-O5', '-Ofast'] + @property def openmp_flag(self): return "-qsmp=omp" diff --git a/lib/spack/spack/concretize.py b/lib/spack/spack/concretize.py index e460f456002..7424b8a90dc 100644 --- a/lib/spack/spack/concretize.py +++ b/lib/spack/spack/concretize.py @@ -727,7 +727,9 @@ def __init__(self, compiler_spec, arch=None): ) super(UnavailableCompilerVersionError, self).__init__( - err_msg, "Run 'spack compiler find' to add compilers.") + err_msg, "Run 'spack compiler find' to add compilers or " + "'spack compilers' to see which compilers are already recognized" + " by spack.") class NoValidVersionError(spack.error.SpackError): diff --git a/lib/spack/spack/container/images.json b/lib/spack/spack/container/images.json index ecd911815d4..0c047c7b8b0 100644 --- a/lib/spack/spack/container/images.json +++ b/lib/spack/spack/container/images.json @@ -8,7 +8,10 @@ "build_tags": { "develop": "latest", "0.14": "0.14", - "0.14.0": "0.14.0" + "0.14.0": "0.14.0", + "0.14.1": "0.14.1", + "0.14.2": "0.14.2", + "0.14.3": "0.14.3" } }, "ubuntu:16.04": { @@ -20,7 +23,10 @@ "build_tags": { "develop": "latest", "0.14": "0.14", - "0.14.0": "0.14.0" + "0.14.0": "0.14.0", + "0.14.1": "0.14.1", + "0.14.2": "0.14.2", + "0.14.3": "0.14.3" } }, "centos:7": { @@ -32,7 +38,10 @@ "build_tags": { "develop": "latest", "0.14": "0.14", - "0.14.0": "0.14.0" + "0.14.0": "0.14.0", + "0.14.1": "0.14.1", + "0.14.2": "0.14.2", + "0.14.3": "0.14.3" } }, "centos:6": { @@ -44,7 +53,10 @@ "build_tags": { "develop": "latest", "0.14": "0.14", - "0.14.0": "0.14.0" + "0.14.0": "0.14.0", + "0.14.1": "0.14.1", + "0.14.2": "0.14.2", + "0.14.3": "0.14.3" } } } \ No newline at end of file diff --git a/lib/spack/spack/directory_layout.py b/lib/spack/spack/directory_layout.py index a2e871ee1a3..0f4c9663bae 100644 --- a/lib/spack/spack/directory_layout.py +++ b/lib/spack/spack/directory_layout.py @@ -190,6 +190,7 @@ def __init__(self, root, **kwargs): "{architecture}/" "{compiler.name}-{compiler.version}/" "{name}-{version}-{hash}") + self.path_scheme = self.path_scheme.lower() if self.hash_len is not None: if re.search(r'{hash:\d+}', self.path_scheme): raise InvalidDirectoryLayoutParametersError( diff --git a/lib/spack/spack/environment.py b/lib/spack/spack/environment.py index fff1485e3c3..1b9df358f47 100644 --- a/lib/spack/spack/environment.py +++ b/lib/spack/spack/environment.py @@ -37,6 +37,7 @@ from spack.spec_list import SpecList, InvalidSpecConstraintError from spack.variant import UnknownVariantError import spack.util.lock as lk +from spack.util.path import substitute_path_variables #: environment variable used to indicate the active environment spack_env_var = 'SPACK_ENV' @@ -406,8 +407,12 @@ def validate(data, filename=None): try: spack.schema.Validator(spack.schema.env.schema).validate(validate_data) except jsonschema.ValidationError as e: + if hasattr(e.instance, 'lc'): + line_number = e.instance.lc.line + 1 + else: + line_number = None raise spack.config.ConfigFormatError( - e, data, filename, e.instance.lc.line + 1) + e, data, filename, line_number) return validate_data @@ -776,8 +781,8 @@ def included_config_scopes(self): # highest-precedence scopes are last. includes = config_dict(self.yaml).get('include', []) for i, config_path in enumerate(reversed(includes)): - # allow paths to contain environment variables - config_path = config_path.format(**os.environ) + # allow paths to contain spack config/environment variables, etc. + config_path = substitute_path_variables(config_path) # treat relative paths as relative to the environment if not os.path.isabs(config_path): @@ -1091,6 +1096,25 @@ def regenerate_views(self): for view in self.views.values(): view.regenerate(specs, self.roots()) + def _env_modifications_for_default_view(self, reverse=False): + all_mods = spack.util.environment.EnvironmentModifications() + + errors = [] + for _, spec in self.concretized_specs(): + if spec in self.default_view and spec.package.installed: + try: + mods = uenv.environment_modifications_for_spec( + spec, self.default_view) + except Exception as e: + msg = ("couldn't get environment settings for %s" + % spec.format("{name}@{version} /{hash:7}")) + errors.append((msg, str(e))) + continue + + all_mods.extend(mods.reversed() if reverse else mods) + + return all_mods, errors + def add_default_view_to_shell(self, shell): env_mod = spack.util.environment.EnvironmentModifications() @@ -1101,10 +1125,11 @@ def add_default_view_to_shell(self, shell): env_mod.extend(uenv.unconditional_environment_modifications( self.default_view)) - for _, spec in self.concretized_specs(): - if spec in self.default_view and spec.package.installed: - env_mod.extend(uenv.environment_modifications_for_spec( - spec, self.default_view)) + mods, errors = self._env_modifications_for_default_view() + env_mod.extend(mods) + if errors: + for err in errors: + tty.warn(*err) # deduplicate paths from specs mapped to the same location for env_var in env_mod.group_by_name(): @@ -1122,11 +1147,9 @@ def rm_default_view_from_shell(self, shell): env_mod.extend(uenv.unconditional_environment_modifications( self.default_view).reversed()) - for _, spec in self.concretized_specs(): - if spec in self.default_view and spec.package.installed: - env_mod.extend( - uenv.environment_modifications_for_spec( - spec, self.default_view).reversed()) + mods, _ = self._env_modifications_for_default_view(reverse=True) + env_mod.extend(mods) + return env_mod.shell_modifications(shell) def _add_concrete_spec(self, spec, concrete, new=True): @@ -1210,26 +1233,19 @@ def install_all(self, args=None): self._install(spec, **kwargs) - def all_specs_by_hash(self): - """Map of hashes to spec for all specs in this environment.""" - # Note this uses dag-hashes calculated without build deps as keys, - # whereas the environment tracks specs based on dag-hashes calculated - # with all dependencies. This function should not be used by an - # Environment object for management of its own data structures - hashes = {} - for h in self.concretized_order: - specs = self.specs_by_hash[h].traverse(deptype=('link', 'run')) - for spec in specs: - hashes[spec.dag_hash()] = spec - return hashes - def all_specs(self): """Return all specs, even those a user spec would shadow.""" - return sorted(self.all_specs_by_hash().values()) + all_specs = set() + for h in self.concretized_order: + all_specs.update(self.specs_by_hash[h].traverse()) + + return sorted(all_specs) def all_hashes(self): - """Return all specs, even those a user spec would shadow.""" - return list(self.all_specs_by_hash().keys()) + """Return hashes of all specs. + + Note these hashes exclude build dependencies.""" + return list(set(s.dag_hash() for s in self.all_specs())) def roots(self): """Specs explicitly requested by the user *in this environment*. @@ -1427,9 +1443,9 @@ def write(self, regenerate_views=True): # The primary list is handled differently continue - active_yaml_lists = [l for l in yaml_dict.get('definitions', []) - if name in l and - _eval_conditional(l.get('when', 'True'))] + active_yaml_lists = [x for x in yaml_dict.get('definitions', []) + if name in x and + _eval_conditional(x.get('when', 'True'))] # Remove any specs in yaml that are not in internal representation for ayl in active_yaml_lists: diff --git a/lib/spack/spack/extensions.py b/lib/spack/spack/extensions.py index b8fde7d7fa3..4358dcd52f4 100644 --- a/lib/spack/spack/extensions.py +++ b/lib/spack/spack/extensions.py @@ -11,10 +11,17 @@ import types import llnl.util.lang -import llnl.util.tty as tty import spack.config +import spack.error -extension_regexp = re.compile(r'spack-([\w]*)') +_extension_regexp = re.compile(r'spack-(\w[-\w]*)$') + + +# TODO: For consistency we should use spack.cmd.python_name(), but +# currently this would create a circular relationship between +# spack.cmd and spack.extensions. +def _python_name(cmd_name): + return cmd_name.replace('-', '_') def extension_name(path): @@ -24,15 +31,16 @@ def extension_name(path): path (str): path where the extension resides Returns: - The extension name or None if path doesn't match the format - for Spack's extension. + The extension name. + + Raises: + ExtensionNamingError: if path does not match the expected format + for a Spack command extension. """ - regexp_match = re.search(extension_regexp, os.path.basename(path)) + regexp_match = re.search(_extension_regexp, + os.path.basename(os.path.normpath(path))) if not regexp_match: - msg = "[FOLDER NAMING]" - msg += " {0} doesn't match the format for Spack's extensions" - tty.warn(msg.format(path)) - return None + raise ExtensionNamingError(path) return regexp_match.group(1) @@ -40,23 +48,30 @@ def load_command_extension(command, path): """Loads a command extension from the path passed as argument. Args: - command (str): name of the command + command (str): name of the command (contains ``-``, not ``_``). path (str): base path of the command extension Returns: - A valid module object if the command is found or None + A valid module if found and loadable; None if not found. Module + loading exceptions are passed through. """ - extension = extension_name(path) - if not extension: - return None + extension = _python_name(extension_name(path)) # Compute the name of the module we search, exit early if already imported cmd_package = '{0}.{1}.cmd'.format(__name__, extension) - python_name = command.replace('-', '_') + python_name = _python_name(command) module_name = '{0}.{1}'.format(cmd_package, python_name) if module_name in sys.modules: return sys.modules[module_name] + # Compute the absolute path of the file to be loaded, along with the + # name of the python module where it will be stored + cmd_path = os.path.join(path, extension, 'cmd', python_name + '.py') + + # Short circuit if the command source file does not exist + if not os.path.exists(cmd_path): + return None + def ensure_package_creation(name): package_name = '{0}.{1}'.format(__name__, name) if package_name in sys.modules: @@ -82,17 +97,10 @@ def ensure_package_creation(name): ensure_package_creation(extension) ensure_package_creation(extension + '.cmd') - # Compute the absolute path of the file to be loaded, along with the - # name of the python module where it will be stored - cmd_path = os.path.join(path, extension, 'cmd', command + '.py') - - try: - # TODO: Upon removal of support for Python 2.6 substitute the call - # TODO: below with importlib.import_module(module_name) - module = llnl.util.lang.load_module_from_file(module_name, cmd_path) - sys.modules[module_name] = module - except (ImportError, IOError): - module = None + # TODO: Upon removal of support for Python 2.6 substitute the call + # TODO: below with importlib.import_module(module_name) + module = llnl.util.lang.load_module_from_file(module_name, cmd_path) + sys.modules[module_name] = module return module @@ -103,9 +111,8 @@ def get_command_paths(): extension_paths = spack.config.get('config:extensions') or [] for path in extension_paths: - extension = extension_name(path) - if extension: - command_paths.append(os.path.join(path, extension, 'cmd')) + extension = _python_name(extension_name(path)) + command_paths.append(os.path.join(path, extension, 'cmd')) return command_paths @@ -144,7 +151,7 @@ def get_module(cmd_name): if module: return module else: - return None + raise CommandNotFoundError(cmd_name) def get_template_dirs(): @@ -154,3 +161,23 @@ def get_template_dirs(): extension_dirs = spack.config.get('config:extensions') or [] extensions = [os.path.join(x, 'templates') for x in extension_dirs] return extensions + + +class CommandNotFoundError(spack.error.SpackError): + """Exception class thrown when a requested command is not recognized as + such. + """ + def __init__(self, cmd_name): + super(CommandNotFoundError, self).__init__( + '{0} is not a recognized Spack command or extension command;' + ' check with `spack commands`.'.format(cmd_name)) + + +class ExtensionNamingError(spack.error.SpackError): + """Exception class thrown when a configured extension does not follow + the expected naming convention. + """ + def __init__(self, path): + super(ExtensionNamingError, self).__init__( + '{0} does not match the format for a Spack extension path.' + .format(path)) diff --git a/lib/spack/spack/fetch_strategy.py b/lib/spack/spack/fetch_strategy.py index d7613ae58af..5f0cc4db5d7 100644 --- a/lib/spack/spack/fetch_strategy.py +++ b/lib/spack/spack/fetch_strategy.py @@ -292,6 +292,7 @@ def fetch(self): tty.msg("Already downloaded %s" % self.archive_file) return + url = None for url in self.candidate_urls: try: partial_file, save_file = self._fetch_from_url(url) @@ -303,7 +304,7 @@ def fetch(self): pass if not self.archive_file: - raise FailedDownloadError(self.url) + raise FailedDownloadError(url) def _fetch_from_url(self, url): save_file = None @@ -336,7 +337,7 @@ def _fetch_from_url(self, url): else: curl_args.append('-sS') # just errors when not. - connect_timeout = spack.config.get('config:connect_timeout') + connect_timeout = spack.config.get('config:connect_timeout', 10) if self.extra_options: cookie = self.extra_options.get('cookie') @@ -369,12 +370,12 @@ def _fetch_from_url(self, url): if curl.returncode == 22: # This is a 404. Curl will print the error. raise FailedDownloadError( - self.url, "URL %s was not found!" % self.url) + url, "URL %s was not found!" % url) elif curl.returncode == 60: # This is a certificate error. Suggest spack -k raise FailedDownloadError( - self.url, + url, "Curl was unable to fetch due to invalid certificate. " "This is either an attack, or your cluster's SSL " "configuration is bad. If you believe your SSL " @@ -386,7 +387,7 @@ def _fetch_from_url(self, url): # This is some other curl error. Curl will print the # error, but print a spack message too raise FailedDownloadError( - self.url, + url, "Curl failed with error %d" % curl.returncode) # Check if we somehow got an HTML file rather than the archive we diff --git a/lib/spack/spack/filesystem_view.py b/lib/spack/spack/filesystem_view.py index babe1b9c0ea..0f21e4d975d 100644 --- a/lib/spack/spack/filesystem_view.py +++ b/lib/spack/spack/filesystem_view.py @@ -3,7 +3,6 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -import filecmp import functools as ft import os import re @@ -18,11 +17,14 @@ mkdirp, remove_dead_links, remove_empty_directories) import spack.util.spack_yaml as s_yaml +import spack.util.spack_json as s_json import spack.spec import spack.store import spack.schema.projections +import spack.projections import spack.config +import spack.relocate from spack.error import SpackError from spack.directory_layout import ExtensionAlreadyInstalledError from spack.directory_layout import YamlViewExtensionsLayout @@ -40,6 +42,58 @@ _projections_path = '.spack/projections.yaml' +def view_symlink(src, dst, **kwargs): + # keyword arguments are irrelevant + # here to fit required call signature + os.symlink(src, dst) + + +def view_hardlink(src, dst, **kwargs): + # keyword arguments are irrelevant + # here to fit required call signature + os.link(src, dst) + + +def view_copy(src, dst, view, spec=None): + """ + Copy a file from src to dst. + + Use spec and view to generate relocations + """ + shutil.copyfile(src, dst) + if spec: + # Not metadata, we have to relocate it + + # Get information on where to relocate from/to + prefix_to_projection = dict( + (dep.prefix, view.get_projection_for_spec(dep)) + for dep in spec.traverse() + ) + + if spack.relocate.is_binary(dst): + # relocate binaries + spack.relocate.relocate_text_bin( + binaries=[dst], + orig_install_prefix=spec.prefix, + new_install_prefix=view.get_projection_for_spec(spec), + orig_spack=spack.paths.spack_root, + new_spack=view._root, + new_prefixes=prefix_to_projection + ) + else: + # relocate text + spack.relocate.relocate_text( + files=[dst], + orig_layout_root=spack.store.layout.root, + new_layout_root=view._root, + orig_install_prefix=spec.prefix, + new_install_prefix=view.get_projection_for_spec(spec), + orig_spack=spack.paths.spack_root, + new_spack=view._root, + new_prefixes=prefix_to_projection + ) + + class FilesystemView(object): """ Governs a filesystem view that is located at certain root-directory. @@ -66,9 +120,12 @@ def __init__(self, root, layout, **kwargs): self.projections = kwargs.get('projections', {}) self.ignore_conflicts = kwargs.get("ignore_conflicts", False) - self.link = kwargs.get("link", os.symlink) self.verbose = kwargs.get("verbose", False) + # Setup link function to include view + link_func = kwargs.get("link", view_symlink) + self.link = ft.partial(link_func, view=self) + def add_specs(self, *specs, **kwargs): """ Add given specs to view. @@ -354,12 +411,34 @@ def remove_file(self, src, dest): if not os.path.lexists(dest): tty.warn("Tried to remove %s which does not exist" % dest) return - if not os.path.islink(dest): - raise ValueError("%s is not a link tree!" % dest) - # remove if dest is a hardlink/symlink to src; this will only - # be false if two packages are merged into a prefix and have a - # conflicting file - if filecmp.cmp(src, dest, shallow=True): + + def needs_file(spec, file): + # convert the file we want to remove to a source in this spec + projection = self.get_projection_for_spec(spec) + relative_path = os.path.relpath(file, projection) + test_path = os.path.join(spec.prefix, relative_path) + + # check if this spec owns a file of that name (through the + # manifest in the metadata dir, which we have in the view). + manifest_file = os.path.join(self.get_path_meta_folder(spec), + spack.store.layout.manifest_file_name) + try: + with open(manifest_file, 'r') as f: + manifest = s_json.load(f) + except (OSError, IOError): + # if we can't load it, assume it doesn't know about the file. + manifest = {} + return test_path in manifest + + # remove if dest is not owned by any other package in the view + # This will only be false if two packages are merged into a prefix + # and have a conflicting file + + # check all specs for whether they own the file. That include the spec + # we are currently removing, as we remove files before unlinking the + # metadata directory. + if len([s for s in self.get_all_specs() + if needs_file(s, dest)]) <= 1: os.remove(dest) def check_added(self, spec): @@ -470,14 +549,9 @@ def get_projection_for_spec(self, spec): if spec.package.extendee_spec: locator_spec = spec.package.extendee_spec - all_fmt_str = None - for spec_like, fmt_str in self.projections.items(): - if locator_spec.satisfies(spec_like, strict=True): - return os.path.join(self._root, locator_spec.format(fmt_str)) - elif spec_like == 'all': - all_fmt_str = fmt_str - if all_fmt_str: - return os.path.join(self._root, locator_spec.format(all_fmt_str)) + proj = spack.projections.get_projection(self.projections, locator_spec) + if proj: + return os.path.join(self._root, locator_spec.format(proj)) return self._root def get_all_specs(self): diff --git a/lib/spack/spack/installer.py b/lib/spack/spack/installer.py index 213a85f0dc7..0eeec020bc1 100644 --- a/lib/spack/spack/installer.py +++ b/lib/spack/spack/installer.py @@ -562,6 +562,8 @@ def package_id(pkg): even with exceptions. restage (bool): Force spack to restage the package source. skip_patch (bool): Skip patch stage of build if True. + stop_before (InstallPhase): stop execution before this + installation phase (or None) stop_at (InstallPhase): last installation phase to be executed (or None) tests (bool or list or set): False to run no tests, True to test @@ -779,17 +781,32 @@ def _check_last_phase(self, **kwargs): Ensures the package being installed has a valid last phase before proceeding with the installation. - The ``stop_at`` argument is removed from the installation arguments. + The ``stop_before`` or ``stop_at`` arguments are removed from the + installation arguments. + + The last phase is also set to None if it is the last phase of the + package already Args: kwargs: + ``stop_before``': stop before execution of this phase (or None) ``stop_at``': last installation phase to be executed (or None) """ + self.pkg.stop_before_phase = kwargs.pop('stop_before', None) + if self.pkg.stop_before_phase is not None and \ + self.pkg.stop_before_phase not in self.pkg.phases: + tty.die('\'{0}\' is not an allowed phase for package {1}' + .format(self.pkg.stop_before_phase, self.pkg.name)) + self.pkg.last_phase = kwargs.pop('stop_at', None) if self.pkg.last_phase is not None and \ self.pkg.last_phase not in self.pkg.phases: tty.die('\'{0}\' is not an allowed phase for package {1}' .format(self.pkg.last_phase, self.pkg.name)) + # If we got a last_phase, make sure it's not already last + if self.pkg.last_phase and \ + self.pkg.last_phase == self.pkg.phases[-1]: + self.pkg.last_phase = None def _cleanup_all_tasks(self): """Cleanup all build tasks to include releasing their locks.""" @@ -1154,13 +1171,12 @@ def build_process(): if task.compiler: spack.compilers.add_compilers_to_config( spack.compilers.find_compilers([pkg.spec.prefix])) - - except StopIteration as e: - # A StopIteration exception means that do_install was asked to - # stop early from clients. - tty.msg('{0} {1}'.format(self.pid, str(e))) - tty.msg('Package stage directory : {0}' - .format(pkg.stage.source_path)) + except spack.build_environment.StopPhase as e: + # A StopPhase exception means that do_install was asked to + # stop early from clients, and is not an error at this point + tty.debug('{0} {1}'.format(self.pid, str(e))) + tty.debug('Package stage directory : {0}' + .format(pkg.stage.source_path)) _install_task.__doc__ += install_args_docstring @@ -1466,6 +1482,12 @@ def install(self, **kwargs): if lock is not None: self._update_installed(task) _print_installed_pkg(pkg.prefix) + + # It's an already installed compiler, add it to the config + if task.compiler: + spack.compilers.add_compilers_to_config( + spack.compilers.find_compilers([pkg.spec.prefix])) + else: # At this point we've failed to get a write or a read # lock, which means another process has taken a write @@ -1498,8 +1520,10 @@ def install(self, **kwargs): self._update_installed(task) # If we installed then we should keep the prefix + stop_before_phase = getattr(pkg, 'stop_before_phase', None) last_phase = getattr(pkg, 'last_phase', None) - keep_prefix = last_phase is None or keep_prefix + keep_prefix = keep_prefix or \ + (stop_before_phase is None and last_phase is None) except spack.directory_layout.InstallDirectoryAlreadyExistsError: tty.debug("Keeping existing install prefix in place.") diff --git a/lib/spack/spack/main.py b/lib/spack/spack/main.py index f42fbebbf5f..833048bab18 100644 --- a/lib/spack/spack/main.py +++ b/lib/spack/spack/main.py @@ -92,6 +92,7 @@ #: Recorded directory where spack command was originally invoked spack_working_dir = None +spack_ld_library_path = os.environ.get('LD_LIBRARY_PATH', '') def set_working_dir(): @@ -737,17 +738,11 @@ def main(argv=None): # ensure options on spack command come before everything setup_main_options(args) - # Try to load the particular command the caller asked for. If there - # is no module for it, just die. + # Try to load the particular command the caller asked for. cmd_name = args.command[0] cmd_name = aliases.get(cmd_name, cmd_name) - try: - command = parser.add_command(cmd_name) - except ImportError: - if spack.config.get('config:debug'): - raise - tty.die("Unknown command: %s" % args.command[0]) + command = parser.add_command(cmd_name) # Re-parse with the proper sub-parser added. args, unknown = parser.parse_known_args() diff --git a/lib/spack/spack/modules/common.py b/lib/spack/spack/modules/common.py index fe9340402e3..44aff24c44f 100644 --- a/lib/spack/spack/modules/common.py +++ b/lib/spack/spack/modules/common.py @@ -41,6 +41,7 @@ import spack.error import spack.paths import spack.schema.environment +import spack.projections as proj import spack.tengine as tengine import spack.util.environment import spack.util.file_permissions as fp @@ -383,6 +384,9 @@ class BaseConfiguration(object): querying easier. It needs to be sub-classed for specific module types. """ + default_projections = { + 'all': '{name}-{version}-{compiler.name}-{compiler.version}'} + def __init__(self, spec): # Module where type(self) is defined self.module = inspect.getmodule(self) @@ -393,19 +397,23 @@ def __init__(self, spec): self.conf = merge_config_rules(self.module.configuration(), self.spec) @property - def naming_scheme(self): - """Naming scheme suitable for non-hierarchical layouts""" - scheme = self.module.configuration().get( - 'naming_scheme', - '{name}-{version}-{compiler.name}-{compiler.version}' - ) + def projections(self): + """Projection from specs to module names""" + # backwards compatiblity for naming_scheme key + conf = self.module.configuration() + if 'naming_scheme' in conf: + default = {'all': conf['naming_scheme']} + else: + default = self.default_projections + projections = conf.get('projections', default) # Ensure the named tokens we are expanding are allowed, see # issue #2884 for reference msg = 'some tokens cannot be part of the module naming scheme' - _check_tokens_are_valid(scheme, message=msg) + for projection in projections.values(): + _check_tokens_are_valid(projection, message=msg) - return scheme + return projections @property def template(self): @@ -553,7 +561,11 @@ def use_name(self): to console to use it. This implementation fits the needs of most non-hierarchical layouts. """ - name = self.spec.format(self.conf.naming_scheme) + projection = proj.get_projection(self.conf.projections, self.spec) + if not projection: + projection = self.conf.default_projections['all'] + + name = self.spec.format(projection) # Not everybody is working on linux... parts = name.split('/') name = os.path.join(*parts) diff --git a/lib/spack/spack/modules/lmod.py b/lib/spack/spack/modules/lmod.py index a9ebb0bba96..018edb35adf 100644 --- a/lib/spack/spack/modules/lmod.py +++ b/lib/spack/spack/modules/lmod.py @@ -91,6 +91,7 @@ def guess_core_compilers(store=False): class LmodConfiguration(BaseConfiguration): """Configuration class for lmod module files.""" + default_projections = {'all': os.path.join('{name}', '{version}')} @property def core_compilers(self): @@ -110,6 +111,11 @@ def core_compilers(self): raise CoreCompilersNotFoundError(msg) return value + @property + def core_specs(self): + """Returns the list of "Core" specs""" + return configuration().get('core_specs', []) + @property def hierarchy_tokens(self): """Returns the list of tokens that are part of the modulefile @@ -140,6 +146,11 @@ def requires(self): to the actual provider. 'compiler' is always present among the requirements. """ + # If it's a core_spec, lie and say it requires a core compiler + if any(self.spec.satisfies(core_spec) + for core_spec in self.core_specs): + return {'compiler': self.core_compilers[0]} + # Keep track of the requirements that this package has in terms # of virtual packages that participate in the hierarchical structure requirements = {'compiler': self.spec.compiler} @@ -233,18 +244,6 @@ def filename(self): ) return fullname - @property - def use_name(self): - """Returns the 'use' name of the module i.e. the name you have to type - to console to use it. - """ - # Package name and version - base = os.path.join("{name}", "{version}") - name_parts = [self.spec.format(base)] - # The remaining elements are filename suffixes - name_parts.extend(self.conf.suffixes) - return '-'.join(name_parts) - def token_to_path(self, name, value): """Transforms a hierarchy token into the corresponding path part. diff --git a/lib/spack/spack/modules/tcl.py b/lib/spack/spack/modules/tcl.py index ec8032a0e18..0efc6332fef 100644 --- a/lib/spack/spack/modules/tcl.py +++ b/lib/spack/spack/modules/tcl.py @@ -12,6 +12,7 @@ import llnl.util.tty as tty import spack.config +import spack.projections as proj import spack.tengine as tengine from .common import BaseConfiguration, BaseFileLayout from .common import BaseContext, BaseModuleFileWriter @@ -72,12 +73,12 @@ def prerequisites(self): def conflicts(self): """List of conflicts for the tcl module file.""" fmts = [] - naming_scheme = self.conf.naming_scheme + projection = proj.get_projection(self.conf.projections, self.spec) f = string.Formatter() for item in self.conf.conflicts: if len([x for x in f.parse(item)]) > 1: for naming_dir, conflict_dir in zip( - naming_scheme.split('/'), item.split('/') + projection.split('/'), item.split('/') ): if naming_dir != conflict_dir: message = 'conflict scheme does not match naming ' @@ -87,7 +88,7 @@ def conflicts(self): message += '** You may want to check your ' message += '`modules.yaml` configuration file **\n' tty.error(message.format(spec=self.spec, - nformat=naming_scheme, + nformat=projection, cformat=item)) raise SystemExit('Module generation aborted.') item = self.spec.format(item) diff --git a/lib/spack/spack/operating_systems/cnl.py b/lib/spack/spack/operating_systems/cray_backend.py similarity index 81% rename from lib/spack/spack/operating_systems/cnl.py rename to lib/spack/spack/operating_systems/cray_backend.py index 3d4036cb470..91c0e6ae985 100644 --- a/lib/spack/spack/operating_systems/cnl.py +++ b/lib/spack/spack/operating_systems/cray_backend.py @@ -10,7 +10,7 @@ import spack.error import spack.version -from spack.architecture import OperatingSystem +from spack.operating_systems.linux_distro import LinuxDistro from spack.util.module_cmd import module #: Possible locations of the Cray CLE release file, @@ -68,7 +68,7 @@ def read_clerelease_file(): return line.strip() -class Cnl(OperatingSystem): +class CrayBackend(LinuxDistro): """Compute Node Linux (CNL) is the operating system used for the Cray XC series super computers. It is a very stripped down version of GNU/Linux. Any compilers found through this operating system will be used with @@ -79,7 +79,15 @@ class Cnl(OperatingSystem): def __init__(self): name = 'cnl' version = self._detect_crayos_version() - super(Cnl, self).__init__(name, version) + if version: + # If we found a CrayOS version, we do not want the information + # from LinuxDistro. In order to skip the logic from + # external.distro.linux_distribution, while still calling __init__ + # methods further up the MRO, we skip LinuxDistro in the MRO and + # call the OperatingSystem superclass __init__ method + super(LinuxDistro, self).__init__(name, version) + else: + super(CrayBackend, self).__init__() self.modulecmd = module def __str__(self): @@ -95,8 +103,15 @@ def _detect_crayos_version(cls): v = read_clerelease_file() return spack.version.Version(v)[0] else: - raise spack.error.UnsupportedPlatformError( - 'Unable to detect Cray OS version') + # Not all Cray systems run CNL on the backend. + # Systems running in what Cray calls "cluster" mode run other + # linux OSs under the Cray PE. + # So if we don't detect any Cray OS version on the system, + # we return None. We can't ever be sure we will get a Cray OS + # version. + # Returning None allows the calling code to test for the value + # being "True-ish" rather than requiring a try/except block. + return None def arguments_to_detect_version_fn(self, paths): import spack.compilers diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index b8ded0364b3..dc32effbec9 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -115,13 +115,18 @@ def phase_wrapper(spec, prefix): return phase_wrapper def _on_phase_start(self, instance): - pass + # If a phase has a matching stop_before_phase attribute, + # stop the installation process raising a StopPhase + if getattr(instance, 'stop_before_phase', None) == self.name: + from spack.build_environment import StopPhase + raise StopPhase('Stopping before \'{0}\' phase'.format(self.name)) def _on_phase_exit(self, instance): # If a phase has a matching last_phase attribute, - # stop the installation process raising a StopIteration + # stop the installation process raising a StopPhase if getattr(instance, 'last_phase', None) == self.name: - raise StopIteration('Stopping at \'{0}\' phase'.format(self.name)) + from spack.build_environment import StopPhase + raise StopPhase('Stopping at \'{0}\' phase'.format(self.name)) def copy(self): try: @@ -328,7 +333,7 @@ def add_files_to_view(self, view, merge_map): """ for src, dst in merge_map.items(): if not os.path.exists(dst): - view.link(src, dst) + view.link(src, dst, spec=self.spec) def remove_files_from_view(self, view, merge_map): """Given a map of package files to files currently linked in the view, @@ -2007,12 +2012,16 @@ def all_urls(self): if hasattr(self, 'url') and self.url: urls.append(self.url) + # fetch from first entry in urls to save time + if hasattr(self, 'urls') and self.urls: + urls.append(self.urls[0]) + for args in self.versions.values(): if 'url' in args: urls.append(args['url']) return urls - def fetch_remote_versions(self): + def fetch_remote_versions(self, concurrency=128): """Find remote versions of this package. Uses ``list_url`` and any other URLs listed in the package file. @@ -2025,7 +2034,8 @@ def fetch_remote_versions(self): try: return spack.util.web.find_versions_of_archive( - self.all_urls, self.list_url, self.list_depth) + self.all_urls, self.list_url, self.list_depth, concurrency + ) except spack.util.web.NoNetworkConnectionError as e: tty.die("Package.fetch_versions couldn't connect to:", e.url, e.message) diff --git a/lib/spack/spack/package_prefs.py b/lib/spack/spack/package_prefs.py index 0158c7063a6..67325fc7ae1 100644 --- a/lib/spack/spack/package_prefs.py +++ b/lib/spack/spack/package_prefs.py @@ -190,11 +190,17 @@ def spec_externals(spec): def is_spec_buildable(spec): """Return true if the spec pkgspec is configured as buildable""" + allpkgs = spack.config.get('packages') - do_not_build = [name for name, entry in allpkgs.items() - if not entry.get('buildable', True)] - return not (spec.name in do_not_build or - any(spec.package.provides(name) for name in do_not_build)) + all_buildable = allpkgs.get('all', {}).get('buildable', True) + + # Get the list of names for which all_buildable is overridden + reverse = [name for name, entry in allpkgs.items() + if entry.get('buildable', all_buildable) != all_buildable] + # Does this spec override all_buildable + spec_reversed = (spec.name in reverse or + any(spec.package.provides(name) for name in reverse)) + return not all_buildable if spec_reversed else all_buildable def get_package_dir_permissions(spec): @@ -204,7 +210,7 @@ def get_package_dir_permissions(spec): attribute sticky for the directory. Package-specific settings take precedent over settings for ``all``""" perms = get_package_permissions(spec) - if perms & stat.S_IRWXG: + if perms & stat.S_IRWXG and spack.config.get('config:allow_sgid', True): perms |= stat.S_ISGID return perms diff --git a/lib/spack/spack/platforms/cray.py b/lib/spack/spack/platforms/cray.py index 6e8c79ef0cd..9c8770c3680 100644 --- a/lib/spack/spack/platforms/cray.py +++ b/lib/spack/spack/platforms/cray.py @@ -4,30 +4,32 @@ # SPDX-License-Identifier: (Apache-2.0 OR MIT) import os +import os.path import re +import platform +import llnl.util.cpu as cpu import llnl.util.tty as tty from spack.paths import build_env_path from spack.util.executable import Executable from spack.architecture import Platform, Target, NoPlatformError from spack.operating_systems.cray_frontend import CrayFrontend -from spack.operating_systems.cnl import Cnl +from spack.operating_systems.cray_backend import CrayBackend from spack.util.module_cmd import module -def _get_modules_in_modulecmd_output(output): - '''Return list of valid modules parsed from modulecmd output string.''' - return [i for i in output.splitlines() - if len(i.split()) == 1] +_craype_name_to_target_name = { + 'x86-cascadelake': 'cascadelake', + 'x86-naples': 'zen', + 'x86-rome': 'zen', # Cheating because we have the wrong modules on rzcrayz + 'x86-skylake': 'skylake_avx512', + 'mic-knl': 'mic_knl', + 'interlagos': 'bulldozer', + 'abudhabi': 'piledriver', +} -def _fill_craype_targets_from_modules(targets, modules): - '''Extend CrayPE CPU targets list with those found in list of modules.''' - # Craype- module prefixes that are not valid CPU targets. - non_targets = ('hugepages', 'network', 'target', 'accel', 'xtpe') - pattern = r'craype-(?!{0})(\S*)'.format('|'.join(non_targets)) - for mod in modules: - if 'craype-' in mod: - targets.extend(re.findall(pattern, mod)) +def _target_name_from_craype_target_name(name): + return _craype_name_to_target_name.get(name, name) class Cray(Platform): @@ -47,40 +49,34 @@ def __init__(self): # Make all craype targets available. for target in self._avail_targets(): - name = target.replace('-', '_') + name = _target_name_from_craype_target_name(target) self.add_target(name, Target(name, 'craype-%s' % target)) - self.add_target("x86_64", Target("x86_64")) - self.add_target("front_end", Target("x86_64")) - self.front_end = "x86_64" - - # Get aliased targets from config or best guess from environment: - for name in ('front_end', 'back_end'): - _target = getattr(self, name, None) - if _target is None: - _target = os.environ.get('SPACK_' + name.upper()) - if _target is None and name == 'back_end': - _target = self._default_target_from_env() - if _target is not None: - safe_name = _target.replace('-', '_') - setattr(self, name, safe_name) - self.add_target(name, self.targets[safe_name]) - - if self.back_end is not None: - self.default = self.back_end - self.add_target('default', self.targets[self.back_end]) - else: + self.back_end = os.environ.get('SPACK_BACK_END', + self._default_target_from_env()) + self.default = self.back_end + if self.back_end not in self.targets: + # We didn't find a target module for the backend raise NoPlatformError() + # Setup frontend targets + for name in cpu.targets: + if name not in self.targets: + self.add_target(name, Target(name)) + self.front_end = os.environ.get('SPACK_FRONT_END', cpu.host().name) + if self.front_end not in self.targets: + self.add_target(self.front_end, Target(self.front_end)) + front_distro = CrayFrontend() - back_distro = Cnl() + back_distro = CrayBackend() self.default_os = str(back_distro) self.back_os = self.default_os self.front_os = str(front_distro) self.add_operating_system(self.back_os, back_distro) - self.add_operating_system(self.front_os, front_distro) + if self.front_os != self.back_os: + self.add_operating_system(self.front_os, front_distro) @classmethod def setup_platform_environment(cls, pkg, env): @@ -104,9 +100,28 @@ def setup_platform_environment(cls, pkg, env): env.append_path("PKG_CONFIG_PATH", "/usr/lib64/pkgconfig") env.append_path("PKG_CONFIG_PATH", "/usr/local/lib64/pkgconfig") + # CRAY_LD_LIBRARY_PATH is used at build time by the cray compiler + # wrappers to augment LD_LIBRARY_PATH. This is to avoid long load + # times at runtime. This behavior is not always respected on cray + # "cluster" systems, so we reproduce it here. + if os.environ.get('CRAY_LD_LIBRARY_PATH'): + env.prepend_path('LD_LIBRARY_PATH', + os.environ['CRAY_LD_LIBRARY_PATH']) + @classmethod def detect(cls): - return os.environ.get('CRAYPE_VERSION') is not None + """ + Detect whether this system is a cray machine. + + We detect the cray platform based on the availability through `module` + of the cray programming environment. If this environment is available, + we can use it to find compilers, target modules, etc. If the cray + programming environment is not available via modules, then we will + treat it as a standard linux system, as the cray compiler wrappers + and other componenets of the cray programming environment are + irrelevant without module support. + """ + return 'opt/cray' in os.environ.get('MODULEPATH', '') def _default_target_from_env(self): '''Set and return the default CrayPE target loaded in a clean login @@ -119,22 +134,66 @@ def _default_target_from_env(self): if getattr(self, 'default', None) is None: bash = Executable('/bin/bash') output = bash( - '-lc', 'echo $CRAY_CPU_TARGET', + '--norc', '--noprofile', '-lc', 'echo $CRAY_CPU_TARGET', env={'TERM': os.environ.get('TERM', '')}, - output=str, - error=os.devnull + output=str, error=os.devnull ) - output = ''.join(output.split()) # remove all whitespace - if output: - self.default = output - tty.debug("Found default module:%s" % self.default) - return self.default + default_from_module = ''.join(output.split()) # rm all whitespace + if default_from_module: + tty.debug("Found default module:%s" % default_from_module) + return default_from_module + else: + front_end = cpu.host().name + if front_end in list( + map(lambda x: _target_name_from_craype_target_name(x), + self._avail_targets()) + ): + tty.debug("default to front-end architecture") + return cpu.host().name + else: + return platform.machine() def _avail_targets(self): '''Return a list of available CrayPE CPU targets.''' + + def modules_in_output(output): + """Returns a list of valid modules parsed from modulecmd output""" + return [i for i in re.split(r'\s\s+|\n', output)] + + def target_names_from_modules(modules): + # Craype- module prefixes that are not valid CPU targets. + targets = [] + for mod in modules: + if 'craype-' in mod: + name = mod[7:] + _n = name.replace('-', '_') # test for mic-knl/mic_knl + is_target_name = name in cpu.targets or _n in cpu.targets + is_cray_target_name = name in _craype_name_to_target_name + if is_target_name or is_cray_target_name: + targets.append(name) + + return targets + + def modules_from_listdir(): + craype_default_path = '/opt/cray/pe/craype/default/modulefiles' + if os.path.isdir(craype_default_path): + return os.listdir(craype_default_path) + return [] + if getattr(self, '_craype_targets', None) is None: - output = module('avail', '-t', 'craype-') - craype_modules = _get_modules_in_modulecmd_output(output) - self._craype_targets = targets = [] - _fill_craype_targets_from_modules(targets, craype_modules) + strategies = [ + lambda: modules_in_output(module('avail', '-t', 'craype-')), + modules_from_listdir + ] + for available_craype_modules in strategies: + craype_modules = available_craype_modules() + craype_targets = target_names_from_modules(craype_modules) + if craype_targets: + self._craype_targets = craype_targets + break + else: + # If nothing is found add platform.machine() + # to avoid Spack erroring out + self._craype_targets = [platform.machine()] + return self._craype_targets diff --git a/lib/spack/spack/projections.py b/lib/spack/spack/projections.py new file mode 100644 index 00000000000..b91d321436a --- /dev/null +++ b/lib/spack/spack/projections.py @@ -0,0 +1,16 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +def get_projection(projections, spec): + """ + Get the projection for a spec from a projections dict. + """ + all_projection = None + for spec_like, projection in projections.items(): + if spec.satisfies(spec_like, strict=True): + return projection + elif spec_like == 'all': + all_projection = projection + return all_projection diff --git a/lib/spack/spack/relocate.py b/lib/spack/spack/relocate.py index 9f8669f3d45..56e7c6632cd 100644 --- a/lib/spack/spack/relocate.py +++ b/lib/spack/spack/relocate.py @@ -11,6 +11,7 @@ import llnl.util.tty as tty import macholib.MachO import macholib.mach_o +import spack.architecture import spack.cmd import spack.repo import spack.spec @@ -97,70 +98,98 @@ def _patchelf(): return exe_path if os.path.exists(exe_path) else None -def get_existing_elf_rpaths(path_name): - """ - Return the RPATHS returned by patchelf --print-rpath path_name - as a list of strings. +def _elf_rpaths_for(path): + """Return the RPATHs for an executable or a library. + + The RPATHs are obtained by ``patchelf --print-rpath PATH``. + + Args: + path (str): full path to the executable or library + + Return: + RPATHs as a list of strings. """ + # If we're relocating patchelf itself, use it + patchelf_path = path if path.endswith("/bin/patchelf") else _patchelf() + patchelf = executable.Executable(patchelf_path) - # if we're relocating patchelf itself, use it - - if path_name.endswith("/bin/patchelf"): - patchelf = executable.Executable(path_name) - else: - patchelf = executable.Executable(_patchelf()) - - rpaths = list() + output = '' try: - output = patchelf('--print-rpath', '%s' % - path_name, output=str, error=str) - rpaths = output.rstrip('\n').split(':') + output = patchelf('--print-rpath', path, output=str, error=str) + output = output.strip('\n') except executable.ProcessError as e: - msg = 'patchelf --print-rpath %s produced an error %s' % (path_name, e) - tty.warn(msg) - return rpaths + msg = 'patchelf --print-rpath {0} produced an error [{1}]' + tty.warn(msg.format(path, str(e))) + + return output.split(':') if output else [] -def get_relative_elf_rpaths(path_name, orig_layout_root, orig_rpaths): +def _make_relative(reference_file, path_root, paths): + """Return a list where any path in ``paths`` that starts with + ``path_root`` is made relative to the directory in which the + reference file is stored. + + After a path is made relative it is prefixed with the ``$ORIGIN`` + string. + + Args: + reference_file (str): file from which the reference directory + is computed + path_root (str): root of the relative paths + paths: paths to be examined + + Returns: + List of relative paths """ - Replaces orig rpath with relative path from dirname(path_name) if an rpath - in orig_rpaths contains orig_layout_root. Prefixes $ORIGIN - to relative paths and returns replacement rpaths. - """ - rel_rpaths = [] - for rpath in orig_rpaths: - if re.match(orig_layout_root, rpath): - rel = os.path.relpath(rpath, start=os.path.dirname(path_name)) - rel_rpaths.append(os.path.join('$ORIGIN', '%s' % rel)) - else: - rel_rpaths.append(rpath) - return rel_rpaths + start_directory = os.path.dirname(reference_file) + pattern = re.compile(path_root) + relative_paths = [] + + for path in paths: + if pattern.match(path): + rel = os.path.relpath(path, start=start_directory) + path = os.path.join('$ORIGIN', rel) + + relative_paths.append(path) + + return relative_paths -def get_normalized_elf_rpaths(orig_path_name, rel_rpaths): +def _normalize_relative_paths(start_path, relative_paths): + """Normalize the relative paths with respect to the original path name + of the file (``start_path``). + + The paths that are passed to this function existed or were relevant + on another filesystem, so os.path.abspath cannot be used. + + A relative path may contain the signifier $ORIGIN. Assuming that + ``start_path`` is absolute, this implies that the relative path + (relative to start_path) should be replaced with an absolute path. + + Args: + start_path (str): path from which the starting directory + is extracted + relative_paths (str): list of relative paths as obtained by a + call to :ref:`_make_relative` + + Returns: + List of normalized paths """ - Normalize the relative rpaths with respect to the original path name - of the file. If the rpath starts with $ORIGIN replace $ORIGIN with the - dirname of the original path name and then normalize the rpath. - A dictionary mapping relativized rpaths to normalized rpaths is returned. - """ - norm_rpaths = list() - for rpath in rel_rpaths: - if rpath.startswith('$ORIGIN'): - sub = re.sub(re.escape('$ORIGIN'), - os.path.dirname(orig_path_name), - rpath) - norm = os.path.normpath(sub) - norm_rpaths.append(norm) - else: - norm_rpaths.append(rpath) - return norm_rpaths + normalized_paths = [] + pattern = re.compile(re.escape('$ORIGIN')) + start_directory = os.path.dirname(start_path) + + for path in relative_paths: + if path.startswith('$ORIGIN'): + sub = pattern.sub(start_directory, path) + path = os.path.normpath(sub) + normalized_paths.append(path) + + return normalized_paths -def set_placeholder(dirname): - """ - return string of @'s with same length - """ +def _placeholder(dirname): + """String of of @'s with same length of the argument""" return '@' * len(dirname) @@ -357,57 +386,80 @@ def macholib_get_paths(cur_path): return (rpaths, deps, ident) -def modify_elf_object(path_name, new_rpaths): - """ - Replace orig_rpath with new_rpath in RPATH of elf object path_name +def _set_elf_rpaths(target, rpaths): + """Replace the original RPATH of the target with the paths passed + as arguments. + + This function uses ``patchelf`` to set RPATHs. + + Args: + target: target executable. Must be an ELF object. + rpaths: paths to be set in the RPATH + + Returns: + A string concatenating the stdout and stderr of the call + to ``patchelf`` """ + # Join the paths using ':' as a separator + rpaths_str = ':'.join(rpaths) - new_joined = ':'.join(new_rpaths) - - # if we're relocating patchelf itself, use it - bak_path = path_name + ".bak" - - if path_name[-13:] == "/bin/patchelf": - shutil.copy(path_name, bak_path) - patchelf = executable.Executable(bak_path) - else: - patchelf = executable.Executable(_patchelf()) + # If we're relocating patchelf itself, make a copy and use it + bak_path = None + if target.endswith("/bin/patchelf"): + bak_path = target + ".bak" + shutil.copy(target, bak_path) + patchelf, output = executable.Executable(bak_path or _patchelf()), None try: - patchelf('--force-rpath', '--set-rpath', '%s' % new_joined, - '%s' % path_name, output=str, error=str) + # TODO: revisit the use of --force-rpath as it might be conditional + # TODO: if we want to support setting RUNPATH from binary packages + patchelf_args = ['--force-rpath', '--set-rpath', rpaths_str, target] + output = patchelf(*patchelf_args, output=str, error=str) except executable.ProcessError as e: - msg = 'patchelf --force-rpath --set-rpath %s failed with error %s' % ( - path_name, e) - tty.warn(msg) - if os.path.exists(bak_path): - os.remove(bak_path) + msg = 'patchelf --force-rpath --set-rpath {0} failed with error {1}' + tty.warn(msg.format(target, e)) + finally: + if bak_path and os.path.exists(bak_path): + os.remove(bak_path) + return output def needs_binary_relocation(m_type, m_subtype): - """ - Check whether the given filetype is a binary that may need relocation. + """Returns True if the file with MIME type/subtype passed as arguments + needs binary relocation, False otherwise. + + Args: + m_type (str): MIME type of the file + m_subtype (str): MIME subtype of the file """ if m_type == 'application': - if (m_subtype == 'x-executable' or m_subtype == 'x-sharedlib' or - m_subtype == 'x-mach-binary'): + if m_subtype in ('x-executable', 'x-sharedlib', 'x-mach-binary'): return True return False def needs_text_relocation(m_type, m_subtype): + """Returns True if the file with MIME type/subtype passed as arguments + needs text relocation, False otherwise. + + Args: + m_type (str): MIME type of the file + m_subtype (str): MIME subtype of the file """ - Check whether the given filetype is text that may need relocation. - """ - return (m_type == "text") + return m_type == 'text' -def replace_prefix_text(path_name, old_dir, new_dir): +def _replace_prefix_text(filename, old_dir, new_dir): + """Replace all the occurrences of the old install prefix with a + new install prefix in text files that are utf-8 encoded. + + Args: + filename (str): target text file (utf-8 encoded) + old_dir (str): directory to be searched in the file + new_dir (str): substitute for the old directory """ - Replace old install prefix with new install prefix - in text files using utf-8 encoded strings. - """ - with open(path_name, 'rb+') as f: + # TODO: cache regexes globally to speedup computation + with open(filename, 'rb+') as f: data = f.read() f.seek(0) # Replace old_dir with new_dir if it appears at the beginning of a path @@ -426,13 +478,18 @@ def replace_prefix_text(path_name, old_dir, new_dir): f.truncate() -def replace_prefix_bin(path_name, old_dir, new_dir): - """ - Attempt to replace old install prefix with new install prefix - in binary files by prefixing new install prefix with os.sep - until the lengths of the prefixes are the same. - """ +def _replace_prefix_bin(filename, old_dir, new_dir): + """Replace all the occurrences of the old install prefix with a + new install prefix in binary files. + The new install prefix is prefixed with ``os.sep`` until the + lengths of the prefixes are the same. + + Args: + filename (str): target binary file + old_dir (str): directory to be searched in the file + new_dir (str): substitute for the old directory + """ def replace(match): occurances = match.group().count(old_dir.encode('utf-8')) olen = len(old_dir.encode('utf-8')) @@ -440,11 +497,12 @@ def replace(match): padding = (olen - nlen) * occurances if padding < 0: return data - return match.group().replace(old_dir.encode('utf-8'), - os.sep.encode('utf-8') * padding + - new_dir.encode('utf-8')) + return match.group().replace( + old_dir.encode('utf-8'), + os.sep.encode('utf-8') * padding + new_dir.encode('utf-8') + ) - with open(path_name, 'rb+') as f: + with open(filename, 'rb+') as f: data = f.read() f.seek(0) original_data_len = len(data) @@ -454,43 +512,7 @@ def replace(match): ndata = pat.sub(replace, data) if not len(ndata) == original_data_len: raise BinaryStringReplacementError( - path_name, original_data_len, len(ndata)) - f.write(ndata) - f.truncate() - - -def replace_prefix_nullterm(path_name, old_dir, new_dir): - """ - Attempt to replace old install prefix with new install prefix - in binary files by replacing with null terminated string - that is the same length unless the old path is shorter - Used on linux to replace mach-o rpaths - """ - - def replace(match): - occurances = match.group().count(old_dir.encode('utf-8')) - olen = len(old_dir.encode('utf-8')) - nlen = len(new_dir.encode('utf-8')) - padding = (olen - nlen) * occurances - if padding < 0: - return data - return match.group().replace(old_dir.encode('utf-8'), - new_dir.encode('utf-8')) + b'\0' * padding - - if len(new_dir) > len(old_dir): - raise BinaryTextReplaceError(old_dir, new_dir) - - with open(path_name, 'rb+') as f: - data = f.read() - f.seek(0) - original_data_len = len(data) - pat = re.compile(re.escape(old_dir).encode('utf-8') + b'([^\0]*?)\0') - if not pat.search(data): - return - ndata = pat.sub(replace, data) - if not len(ndata) == original_data_len: - raise BinaryStringReplacementError( - path_name, original_data_len, len(ndata)) + filename, original_data_len, len(ndata)) f.write(ndata) f.truncate() @@ -569,62 +591,104 @@ def relocate_macho_binaries(path_names, old_layout_root, new_layout_root, paths_to_paths) -def elf_find_paths(orig_rpaths, old_layout_root, prefix_to_prefix): - new_rpaths = list() +def _transform_rpaths(orig_rpaths, orig_root, new_prefixes): + """Return an updated list of RPATHs where each entry in the original list + starting with the old root is relocated to another place according to the + mapping passed as argument. + + Args: + orig_rpaths (list): list of the original RPATHs + orig_root (str): original root to be substituted + new_prefixes (dict): dictionary that maps the original prefixes to + where they should be relocated + + Returns: + List of paths + """ + new_rpaths = [] for orig_rpath in orig_rpaths: - if orig_rpath.startswith(old_layout_root): - for old_prefix, new_prefix in prefix_to_prefix.items(): - if orig_rpath.startswith(old_prefix): - new_rpaths.append(re.sub(re.escape(old_prefix), - new_prefix, orig_rpath)) - else: + # If the original RPATH doesn't start with the target root + # append it verbatim and proceed + if not orig_rpath.startswith(orig_root): new_rpaths.append(orig_rpath) + continue + + # Otherwise inspect the mapping and transform + append any prefix + # that starts with a registered key + for old_prefix, new_prefix in new_prefixes.items(): + if orig_rpath.startswith(old_prefix): + new_rpaths.append( + re.sub(re.escape(old_prefix), new_prefix, orig_rpath) + ) + return new_rpaths -def relocate_elf_binaries(path_names, old_layout_root, new_layout_root, - prefix_to_prefix, rel, old_prefix, new_prefix): - """ - Use patchelf to get the original rpaths and then replace them with +def relocate_elf_binaries(binaries, orig_root, new_root, + new_prefixes, rel, orig_prefix, new_prefix): + """Relocate the binaries passed as arguments by changing their RPATHs. + + Use patchelf to get the original RPATHs and then replace them with rpaths in the new directory layout. - New rpaths are determined from a dictionary mapping the prefixes in the + + New RPATHs are determined from a dictionary mapping the prefixes in the old directory layout to the prefixes in the new directory layout if the rpath was in the old layout root, i.e. system paths are not replaced. + + Args: + binaries (list): list of binaries that might need relocation, located + in the new prefix + orig_root (str): original root to be substituted + new_root (str): new root to be used, only relevant for relative RPATHs + new_prefixes (dict): dictionary that maps the original prefixes to + where they should be relocated + rel (bool): True if the RPATHs are relative, False if they are absolute + orig_prefix (str): prefix where the executable was originally located + new_prefix (str): prefix where we want to relocate the executable """ - for path_name in path_names: - orig_rpaths = get_existing_elf_rpaths(path_name) - new_rpaths = list() + for new_binary in binaries: + orig_rpaths = _elf_rpaths_for(new_binary) + # TODO: Can we deduce `rel` from the original RPATHs? if rel: - # get the file path in the old_prefix - orig_path_name = re.sub(re.escape(new_prefix), old_prefix, - path_name) - # get the normalized rpaths in the old prefix using the file path + # Get the file path in the original prefix + orig_binary = re.sub( + re.escape(new_prefix), orig_prefix, new_binary + ) + + # Get the normalized RPATHs in the old prefix using the file path # in the orig prefix - orig_norm_rpaths = get_normalized_elf_rpaths(orig_path_name, - orig_rpaths) - # get the normalize rpaths in the new prefix - norm_rpaths = elf_find_paths(orig_norm_rpaths, old_layout_root, - prefix_to_prefix) - # get the relativized rpaths in the new prefix - new_rpaths = get_relative_elf_rpaths(path_name, new_layout_root, - norm_rpaths) - modify_elf_object(path_name, new_rpaths) + orig_norm_rpaths = _normalize_relative_paths( + orig_binary, orig_rpaths + ) + # Get the normalize RPATHs in the new prefix + new_norm_rpaths = _transform_rpaths( + orig_norm_rpaths, orig_root, new_prefixes + ) + # Get the relative RPATHs in the new prefix + new_rpaths = _make_relative( + new_binary, new_root, new_norm_rpaths + ) + _set_elf_rpaths(new_binary, new_rpaths) else: - new_rpaths = elf_find_paths(orig_rpaths, old_layout_root, - prefix_to_prefix) - modify_elf_object(path_name, new_rpaths) + new_rpaths = _transform_rpaths( + orig_rpaths, orig_root, new_prefixes + ) + _set_elf_rpaths(new_binary, new_rpaths) -def make_link_relative(cur_path_names, orig_path_names): +def make_link_relative(new_links, orig_links): + """Compute the relative target from the original link and + make the new link relative. + + Args: + new_links (list): new links to be made relative + orig_links (list): original links """ - Change absolute links to relative links. - """ - for cur_path, orig_path in zip(cur_path_names, orig_path_names): - target = os.readlink(orig_path) - relative_target = os.path.relpath(target, os.path.dirname(orig_path)) - - os.unlink(cur_path) - os.symlink(relative_target, cur_path) + for new_link, orig_link in zip(new_links, orig_links): + target = os.readlink(orig_link) + relative_target = os.path.relpath(target, os.path.dirname(orig_link)) + os.unlink(new_link) + os.symlink(relative_target, new_link) def make_macho_binaries_relative(cur_path_names, orig_path_names, @@ -646,97 +710,147 @@ def make_macho_binaries_relative(cur_path_names, orig_path_names, paths_to_paths) -def make_elf_binaries_relative(cur_path_names, orig_path_names, - old_layout_root): +def make_elf_binaries_relative(new_binaries, orig_binaries, orig_layout_root): + """Replace the original RPATHs in the new binaries making them + relative to the original layout root. + + Args: + new_binaries (list): new binaries whose RPATHs is to be made relative + orig_binaries (list): original binaries + orig_layout_root (str): path to be used as a base for making + RPATHs relative """ - Replace old RPATHs with paths relative to old_dir in binary files - """ - for cur_path, orig_path in zip(cur_path_names, orig_path_names): - orig_rpaths = get_existing_elf_rpaths(cur_path) + for new_binary, orig_binary in zip(new_binaries, orig_binaries): + orig_rpaths = _elf_rpaths_for(new_binary) if orig_rpaths: - new_rpaths = get_relative_elf_rpaths(orig_path, old_layout_root, - orig_rpaths) - modify_elf_object(cur_path, new_rpaths) + new_rpaths = _make_relative( + orig_binary, orig_layout_root, orig_rpaths + ) + _set_elf_rpaths(new_binary, new_rpaths) -def check_files_relocatable(cur_path_names, allow_root): +def raise_if_not_relocatable(binaries, allow_root): + """Raise an error if any binary in the list is not relocatable. + + Args: + binaries (list): list of binaries to check + allow_root (bool): whether root dir is allowed or not in a binary + + Raises: + InstallRootStringError: if the file is not relocatable """ - Check binary files for the current install root - """ - for cur_path in cur_path_names: - if (not allow_root and - not file_is_relocatable(cur_path)): - raise InstallRootStringError( - cur_path, spack.store.layout.root) + for binary in binaries: + if not (allow_root or file_is_relocatable(binary)): + raise InstallRootStringError(binary, spack.store.layout.root) -def relocate_links(linknames, old_layout_root, new_layout_root, - old_install_prefix, new_install_prefix, prefix_to_prefix): - """ - The symbolic links in filenames are absolute links or placeholder links. +def relocate_links(links, orig_layout_root, + orig_install_prefix, new_install_prefix): + """Relocate links to a new install prefix. + + The symbolic links are relative to the original installation prefix. The old link target is read and the placeholder is replaced by the old layout root. If the old link target is in the old install prefix, the new link target is create by replacing the old install prefix with the new install prefix. + + Args: + links (list): list of links to be relocated + orig_layout_root (str): original layout root + orig_install_prefix (str): install prefix of the original installation + new_install_prefix (str): install prefix where we want to relocate """ - placeholder = set_placeholder(old_layout_root) - link_names = [os.path.join(new_install_prefix, linkname) - for linkname in linknames] - for link_name in link_names: - link_target = os.readlink(link_name) - link_target = re.sub(placeholder, old_layout_root, link_target) - if link_target.startswith(old_install_prefix): - new_link_target = re.sub( - old_install_prefix, new_install_prefix, link_target) - os.unlink(link_name) - os.symlink(new_link_target, link_name) + placeholder = _placeholder(orig_layout_root) + abs_links = [os.path.join(new_install_prefix, link) for link in links] + for abs_link in abs_links: + link_target = os.readlink(abs_link) + link_target = re.sub(placeholder, orig_layout_root, link_target) + # If the link points to a file in the original install prefix, + # compute the corresponding target in the new prefix and relink + if link_target.startswith(orig_install_prefix): + link_target = re.sub( + orig_install_prefix, new_install_prefix, link_target + ) + os.unlink(abs_link) + os.symlink(link_target, abs_link) + + # If the link is absolute and has not been relocated then + # warn the user about that if (os.path.isabs(link_target) and not link_target.startswith(new_install_prefix)): - msg = 'Link target %s' % link_target - msg += ' for symbolic link %s is outside' % link_name - msg += ' of the newinstall prefix %s.\n' % new_install_prefix - tty.warn(msg) + msg = ('Link target "{0}" for symbolic link "{1}" is outside' + ' of the new install prefix {2}') + tty.warn(msg.format(link_target, abs_link, new_install_prefix)) -def relocate_text(path_names, old_layout_root, new_layout_root, - old_install_prefix, new_install_prefix, - old_spack_prefix, new_spack_prefix, - prefix_to_prefix): +def relocate_text( + files, orig_layout_root, new_layout_root, orig_install_prefix, + new_install_prefix, orig_spack, new_spack, new_prefixes +): + """Relocate text file from the original installation prefix to the + new prefix. + + Relocation also affects the the path in Spack's sbang script. + + Args: + files (list): text files to be relocated + orig_layout_root (str): original layout root + new_layout_root (str): new layout root + orig_install_prefix (str): install prefix of the original installation + new_install_prefix (str): install prefix where we want to relocate + orig_spack (str): path to the original Spack + new_spack (str): path to the new Spack + new_prefixes (dict): dictionary that maps the original prefixes to + where they should be relocated """ - Replace old paths with new paths in text files - including the path the the spack sbang script + # TODO: reduce the number of arguments (8 seems too much) + sbang_regex = r'#!/bin/bash {0}/bin/sbang'.format(orig_spack) + new_sbang = r'#!/bin/bash {0}/bin/sbang'.format(new_spack) + + for file in files: + _replace_prefix_text(file, orig_install_prefix, new_install_prefix) + for orig_dep_prefix, new_dep_prefix in new_prefixes.items(): + _replace_prefix_text(file, orig_dep_prefix, new_dep_prefix) + _replace_prefix_text(file, orig_layout_root, new_layout_root) + _replace_prefix_text(file, sbang_regex, new_sbang) + + +def relocate_text_bin( + binaries, orig_install_prefix, new_install_prefix, + orig_spack, new_spack, new_prefixes +): + """Replace null terminated path strings hard coded into binaries. + + The new install prefix must be shorter than the original one. + + Args: + binaries (list): binaries to be relocated + orig_install_prefix (str): install prefix of the original installation + new_install_prefix (str): install prefix where we want to relocate + orig_spack (str): path to the original Spack + new_spack (str): path to the new Spack + new_prefixes (dict): dictionary that maps the original prefixes to + where they should be relocated + + Raises: + BinaryTextReplaceError: when the new path in longer than the old path """ - sbangre = '#!/bin/bash %s/bin/sbang' % old_spack_prefix - sbangnew = '#!/bin/bash %s/bin/sbang' % new_spack_prefix + # Raise if the new install prefix is longer than the + # original one, since it means we can't change the original + # binary to relocate it + new_prefix_is_shorter = len(new_install_prefix) <= len(orig_install_prefix) + if not new_prefix_is_shorter and len(binaries) > 0: + raise BinaryTextReplaceError(orig_install_prefix, new_install_prefix) - for path_name in path_names: - replace_prefix_text(path_name, old_install_prefix, new_install_prefix) - for orig_dep_prefix, new_dep_prefix in prefix_to_prefix.items(): - replace_prefix_text(path_name, orig_dep_prefix, new_dep_prefix) - replace_prefix_text(path_name, old_layout_root, new_layout_root) - replace_prefix_text(path_name, sbangre, sbangnew) + for binary in binaries: + for old_dep_prefix, new_dep_prefix in new_prefixes.items(): + if len(new_dep_prefix) <= len(old_dep_prefix): + _replace_prefix_bin(binary, old_dep_prefix, new_dep_prefix) + _replace_prefix_bin(binary, orig_install_prefix, new_install_prefix) - -def relocate_text_bin(path_names, old_layout_root, new_layout_root, - old_install_prefix, new_install_prefix, - old_spack_prefix, new_spack_prefix, - prefix_to_prefix): - """ - Replace null terminated path strings hard coded into binaries. - Raise an exception when the new path in longer than the old path - because this breaks the binary. - """ - if len(new_install_prefix) <= len(old_install_prefix): - for path_name in path_names: - for old_dep_prefix, new_dep_prefix in prefix_to_prefix.items(): - if len(new_dep_prefix) <= len(old_dep_prefix): - replace_prefix_bin( - path_name, old_dep_prefix, new_dep_prefix) - replace_prefix_bin(path_name, old_spack_prefix, new_spack_prefix) - else: - if len(path_names) > 0: - raise BinaryTextReplaceError( - old_install_prefix, new_install_prefix) + # Note: Replacement of spack directory should not be done. This causes + # an incorrect replacement path in the case where the install root is a + # subdirectory of the spack directory. def is_relocatable(spec): @@ -810,7 +924,7 @@ def file_is_relocatable(file, paths_to_relocate=None): if platform.system().lower() == 'linux': if m_subtype == 'x-executable' or m_subtype == 'x-sharedlib': - rpaths = ':'.join(get_existing_elf_rpaths(file)) + rpaths = ':'.join(_elf_rpaths_for(file)) set_of_strings.discard(rpaths) if platform.system().lower() == 'darwin': if m_subtype == 'x-mach-binary': @@ -869,4 +983,4 @@ def mime_type(file): if '/' not in output: output += '/' split_by_slash = output.strip().split('/') - return (split_by_slash[0], "/".join(split_by_slash[1:])) + return split_by_slash[0], "/".join(split_by_slash[1:]) diff --git a/lib/spack/spack/repo.py b/lib/spack/spack/repo.py index 764f1f9168b..8d31f2cf497 100644 --- a/lib/spack/spack/repo.py +++ b/lib/spack/spack/repo.py @@ -759,8 +759,8 @@ def _create_namespace(self): """ parent = None - for l in range(1, len(self._names) + 1): - ns = '.'.join(self._names[:l]) + for i in range(1, len(self._names) + 1): + ns = '.'.join(self._names[:i]) if ns not in sys.modules: module = SpackNamespace(ns) @@ -773,7 +773,7 @@ def _create_namespace(self): # This ensures that we can do things like: # import spack.pkg.builtin.mpich as mpich if parent: - modname = self._names[l - 1] + modname = self._names[i - 1] setattr(parent, modname, module) else: # no need to set up a module @@ -882,9 +882,7 @@ def get(self, spec): raise UnknownPackageError(spec.name) if spec.namespace and spec.namespace != self.namespace: - raise UnknownPackageError( - "Repository %s does not contain package %s" - % (self.namespace, spec.fullname)) + raise UnknownPackageError(spec.name, self.namespace) package_class = self.get_pkg_class(spec.name) try: @@ -1271,16 +1269,22 @@ class UnknownPackageError(UnknownEntityError): def __init__(self, name, repo=None): msg = None - if repo: - msg = "Package '%s' not found in repository '%s'" % (name, repo) - else: - msg = "Package '%s' not found." % name - - # special handling for specs that may have been intended as filenames - # prompt the user to ask whether they intended to write './' long_msg = None - if name.endswith(".yaml"): - long_msg = "Did you mean to specify a filename with './%s'?" % name + if name: + if repo: + msg = "Package '{0}' not found in repository '{1}'" + msg = msg.format(name, repo) + else: + msg = "Package '{0}' not found.".format(name) + + # Special handling for specs that may have been intended as + # filenames: prompt the user to ask whether they intended to write + # './'. + if name.endswith(".yaml"): + long_msg = "Did you mean to specify a filename with './{0}'?" + long_msg = long_msg.format(name) + else: + msg = "Attempting to retrieve anonymous package." super(UnknownPackageError, self).__init__(msg, long_msg) self.name = name diff --git a/lib/spack/spack/schema/config.py b/lib/spack/spack/schema/config.py index a05af2f4384..d56321d09c3 100644 --- a/lib/spack/spack/schema/config.py +++ b/lib/spack/spack/schema/config.py @@ -73,6 +73,7 @@ {'type': 'null'} ], }, + 'allow_sgid': {'type': 'boolean'}, }, }, } diff --git a/lib/spack/spack/schema/container.py b/lib/spack/spack/schema/container.py index cb1ed8d63ac..65e9f1bad86 100644 --- a/lib/spack/spack/schema/container.py +++ b/lib/spack/spack/schema/container.py @@ -29,7 +29,10 @@ }, 'spack': { 'type': 'string', - 'enum': ['develop', '0.14', '0.14.0'] + 'enum': [ + 'develop', + '0.14', '0.14.0', '0.14.1', '0.14.2', '0.14.3' + ] } }, 'required': ['image', 'spack'] diff --git a/lib/spack/spack/schema/modules.py b/lib/spack/spack/schema/modules.py index 1fbbf614c88..d03dbfb4100 100644 --- a/lib/spack/spack/schema/modules.py +++ b/lib/spack/spack/schema/modules.py @@ -9,15 +9,16 @@ :lines: 13- """ import spack.schema.environment - +import spack.schema.projections #: Matches a spec or a multi-valued variant but not another #: valid keyword. #: #: THIS NEEDS TO BE UPDATED FOR EVERY NEW KEYWORD THAT #: IS ADDED IMMEDIATELY BELOW THE MODULE TYPE ATTRIBUTE -spec_regex = r'(?!hierarchy|verbose|hash_length|whitelist|' \ - r'blacklist|naming_scheme|core_compilers|all)(^\w[\w-]*)' +spec_regex = r'(?!hierarchy|core_specs|verbose|hash_length|whitelist|' \ + r'blacklist|projections|naming_scheme|core_compilers|all)' \ + r'(^\w[\w-]*)' #: Matches an anonymous spec, i.e. a spec without a root name anonymous_spec_regex = r'^[\^@%+~]' @@ -72,6 +73,8 @@ } } +projections_scheme = spack.schema.projections.properties['projections'] + module_type_configuration = { 'type': 'object', 'default': {}, @@ -95,6 +98,7 @@ 'naming_scheme': { 'type': 'string' # Can we be more specific here? }, + 'projections': projections_scheme, 'all': module_file_configuration, } }, @@ -145,7 +149,8 @@ 'type': 'object', 'properties': { 'core_compilers': array_of_strings, - 'hierarchy': array_of_strings + 'hierarchy': array_of_strings, + 'core_specs': array_of_strings, }, } # Specific lmod extensions ] diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index c6fe2da7627..b09d72003e6 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -2134,6 +2134,8 @@ def concretize(self, tests=False): consistent with requirements of its packages. See flatten() and normalize() for more details on this. """ + import spack.concretize + if not self.name: raise spack.error.SpecError( "Attempting to concretize anonymous spec") @@ -2145,7 +2147,6 @@ def concretize(self, tests=False): force = False user_spec_deps = self.flat_dependencies(copy=False) - import spack.concretize concretizer = spack.concretize.Concretizer(self.copy()) while changed: changes = (self.normalize(force, tests=tests, @@ -2239,7 +2240,11 @@ def concretize(self, tests=False): for mod in compiler.modules: md.load_module(mod) - s.external_path = md.get_path_from_module(s.external_module) + # get the path from the module + # the package can override the default + s.external_path = getattr(s.package, 'external_prefix', + md.get_path_from_module( + s.external_module)) # Mark everything in the spec as concrete, as well. self._mark_concrete() @@ -3940,7 +3945,8 @@ def __init__(self): # Filenames match before identifiers, so no initial filename # component is parsed as a spec (e.g., in subdir/spec.yaml) - (r'[/\w.-]+\.yaml[^\b]*', lambda scanner, v: self.token(FILE, v)), + (r'[/\w.-]*/[/\w/-]+\.yaml[^\b]*', + lambda scanner, v: self.token(FILE, v)), # Hash match after filename. No valid filename can be a hash # (files end w/.yaml), but a hash can match a filename prefix. @@ -4091,11 +4097,6 @@ def spec_from_file(self): """ path = self.token.value - # don't treat builtin.yaml, builtin.yaml-cpp, etc. as filenames - if re.match(spec_id_re + '$', path): - self.push_tokens([spack.parse.Token(ID, self.token.value)]) - return None - # Special case where someone omits a space after a filename. Consider: # # libdwarf^/some/path/to/libelf.yamllibdwarf ^../../libelf.yaml @@ -4107,7 +4108,6 @@ def spec_from_file(self): raise SpecFilenameError( "Spec filename must end in .yaml: '{0}'".format(path)) - # if we get here, we're *finally* interpreting path as a filename if not os.path.exists(path): raise NoSuchSpecFileError("No such spec file: '{0}'".format(path)) diff --git a/lib/spack/spack/spec_list.py b/lib/spack/spack/spec_list.py index 63bd29a7a9d..bc473f530cc 100644 --- a/lib/spack/spack/spec_list.py +++ b/lib/spack/spack/spec_list.py @@ -5,6 +5,7 @@ import itertools from six import string_types +import spack.variant from spack.spec import Spec from spack.error import SpackError @@ -120,23 +121,42 @@ def update_reference(self, reference): self._constraints = None self._specs = None + def _parse_reference(self, name): + sigil = '' + name = name[1:] + + # Parse specs as constraints + if name.startswith('^') or name.startswith('%'): + sigil = name[0] + name = name[1:] + + # Make sure the reference is valid + if name not in self._reference: + msg = 'SpecList %s refers to ' % self.name + msg += 'named list %s ' % name + msg += 'which does not appear in its reference dict' + raise UndefinedReferenceError(msg) + + return (name, sigil) + def _expand_references(self, yaml): if isinstance(yaml, list): - for idx, item in enumerate(yaml): + ret = [] + + for item in yaml: + # if it's a reference, expand it if isinstance(item, string_types) and item.startswith('$'): - name = item[1:] - if name in self._reference: - ret = [self._expand_references(i) for i in yaml[:idx]] - ret += self._reference[name].specs_as_yaml_list - ret += self._expand_references(yaml[idx + 1:]) - return ret - else: - msg = 'SpecList %s refers to ' % self.name - msg += 'named list %s ' % name - msg += 'which does not appear in its reference dict' - raise UndefinedReferenceError(msg) - # No references in this - return [self._expand_references(item) for item in yaml] + # replace the reference and apply the sigil if needed + name, sigil = self._parse_reference(item) + referent = [ + _sigilify(item, sigil) + for item in self._reference[name].specs_as_yaml_list + ] + ret.extend(referent) + else: + # else just recurse + ret.append(self._expand_references(item)) + return ret elif isinstance(yaml, dict): # There can't be expansions in dicts return dict((name, self._expand_references(val)) @@ -159,21 +179,39 @@ def _expand_matrix_constraints(object, specify=True): new_row = [] for r in row: if isinstance(r, dict): - new_row.extend(_expand_matrix_constraints(r, specify=False)) + new_row.extend( + [[' '.join(c)] + for c in _expand_matrix_constraints(r, specify=False)]) else: new_row.append([r]) expanded_rows.append(new_row) - results = [] excludes = object.get('exclude', []) # only compute once + sigil = object.get('sigil', '') + + results = [] for combo in itertools.product(*expanded_rows): # Construct a combined spec to test against excludes flat_combo = [constraint for list in combo for constraint in list] ordered_combo = sorted(flat_combo, key=spec_ordering_key) + test_spec = Spec(' '.join(ordered_combo)) + # Abstract variants don't have normal satisfaction semantics + # Convert all variants to concrete types. + # This method is best effort, so all existing variants will be + # converted before any error is raised. + # Catch exceptions because we want to be able to operate on + # abstract specs without needing package information + try: + spack.variant.substitute_abstract_variants(test_spec) + except spack.variant.UnknownVariantError: + pass if any(test_spec.satisfies(x) for x in excludes): continue + if sigil: # add sigil if necessary + ordered_combo[0] = sigil + ordered_combo[0] + # Add to list of constraints if specify: results.append([Spec(x) for x in ordered_combo]) @@ -182,6 +220,15 @@ def _expand_matrix_constraints(object, specify=True): return results +def _sigilify(item, sigil): + if isinstance(item, dict): + if sigil: + item['sigil'] = sigil + return item + else: + return sigil + item + + class SpecListError(SpackError): """Error class for all errors related to SpecList objects.""" diff --git a/lib/spack/spack/stage.py b/lib/spack/spack/stage.py index 04f0c1d18c0..5d3b0db5021 100644 --- a/lib/spack/spack/stage.py +++ b/lib/spack/spack/stage.py @@ -754,7 +754,7 @@ def purge(): def get_checksums_for_versions( url_dict, name, first_stage_function=None, keep_stage=False, - fetch_options=None): + fetch_options=None, batch=False): """Fetches and checksums archives from URLs. This function is called by both ``spack checksum`` and ``spack @@ -768,6 +768,8 @@ def get_checksums_for_versions( first_stage_function (callable): function that takes a Stage and a URL; this is run on the stage of the first URL downloaded keep_stage (bool): whether to keep staging area when command completes + batch (bool): whether to ask user how many versions to fetch (false) + or fetch all versions (true) fetch_options (dict): Options used for the fetcher (such as timeout or cookies) @@ -789,8 +791,11 @@ def get_checksums_for_versions( for v in sorted_versions])) print() - archives_to_fetch = tty.get_number( - "How many would you like to checksum?", default=1, abort='q') + if batch: + archives_to_fetch = len(sorted_versions) + else: + archives_to_fetch = tty.get_number( + "How many would you like to checksum?", default=1, abort='q') if not archives_to_fetch: tty.die("Aborted.") diff --git a/lib/spack/spack/test/architecture.py b/lib/spack/spack/test/architecture.py index 552bc324bf9..48cec134d27 100644 --- a/lib/spack/spack/test/architecture.py +++ b/lib/spack/spack/test/architecture.py @@ -1,4 +1,3 @@ - # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # @@ -41,7 +40,7 @@ def test_dict_functions_for_architecture(): def test_platform(): output_platform_class = spack.architecture.real_platform() - if os.environ.get('CRAYPE_VERSION') is not None: + if os.path.exists('/opt/cray/pe'): my_platform_class = Cray() elif os.path.exists('/bgsys'): my_platform_class = Bgq() @@ -210,8 +209,8 @@ def test_optimization_flags_with_custom_versions( target = spack.architecture.Target(target_str) if real_version: monkeypatch.setattr( - spack.compiler.Compiler, 'cc_version', lambda x, y: real_version - ) + spack.compiler.Compiler, 'get_real_version', + lambda x: real_version) opt_flags = target.optimization_flags(compiler) assert opt_flags == expected_flags diff --git a/lib/spack/spack/test/cc.py b/lib/spack/spack/test/cc.py index c1e1db914f8..7b8d34fbde2 100644 --- a/lib/spack/spack/test/cc.py +++ b/lib/spack/spack/test/cc.py @@ -338,6 +338,36 @@ def test_ccld_deps(): test_args_without_paths) +def test_ccld_deps_isystem(): + """Ensure all flags are added in ccld mode. + When a build uses -isystem, Spack should inject it's + include paths using -isystem. Spack will insert these + after any provided -isystem includes, but before any + system directories included using -isystem""" + with set_env(SPACK_INCLUDE_DIRS='xinc:yinc:zinc', + SPACK_RPATH_DIRS='xlib:ylib:zlib', + SPACK_LINK_DIRS='xlib:ylib:zlib'): + mytest_args = test_args + ['-isystemfooinc'] + check_args( + cc, mytest_args, + [real_cc] + + test_include_paths + + ['-isystemfooinc', + '-isystemxinc', + '-isystemyinc', + '-isystemzinc'] + + test_library_paths + + ['-Lxlib', + '-Lylib', + '-Lzlib'] + + ['-Wl,--disable-new-dtags'] + + test_wl_rpaths + + ['-Wl,-rpath,xlib', + '-Wl,-rpath,ylib', + '-Wl,-rpath,zlib'] + + test_args_without_paths) + + def test_cc_deps(): """Ensure -L and RPATHs are not added in cc mode.""" with set_env(SPACK_INCLUDE_DIRS='xinc:yinc:zinc', @@ -390,6 +420,44 @@ def test_ccld_with_system_dirs(): test_args_without_paths) +def test_ccld_with_system_dirs_isystem(): + """Ensure all flags are added in ccld mode. + Ensure that includes are in the proper + place when a build uses -isystem, and uses + system directories in the include paths""" + with set_env(SPACK_INCLUDE_DIRS='xinc:yinc:zinc', + SPACK_RPATH_DIRS='xlib:ylib:zlib', + SPACK_LINK_DIRS='xlib:ylib:zlib'): + + sys_path_args = ['-isystem/usr/include', + '-L/usr/local/lib', + '-Wl,-rpath,/usr/lib64', + '-isystem/usr/local/include', + '-L/lib64/'] + check_args( + cc, sys_path_args + test_args, + [real_cc] + + test_include_paths + + ['-isystemxinc', + '-isystemyinc', + '-isystemzinc'] + + ['-isystem/usr/include', + '-isystem/usr/local/include'] + + test_library_paths + + ['-Lxlib', + '-Lylib', + '-Lzlib'] + + ['-L/usr/local/lib', + '-L/lib64/'] + + ['-Wl,--disable-new-dtags'] + + test_wl_rpaths + + ['-Wl,-rpath,xlib', + '-Wl,-rpath,ylib', + '-Wl,-rpath,zlib'] + + ['-Wl,-rpath,/usr/lib64'] + + test_args_without_paths) + + def test_ld_deps(): """Ensure no (extra) -I args or -Wl, are passed in ld mode.""" with set_env(SPACK_INCLUDE_DIRS='xinc:yinc:zinc', diff --git a/lib/spack/spack/test/cmd/ci.py b/lib/spack/spack/test/cmd/ci.py index 89ed4718687..ab733244112 100644 --- a/lib/spack/spack/test/cmd/ci.py +++ b/lib/spack/spack/test/cmd/ci.py @@ -7,8 +7,6 @@ import os import pytest -import llnl.util.filesystem as fs - import spack import spack.ci as ci import spack.config @@ -18,7 +16,7 @@ import spack.paths as spack_paths import spack.repo as repo from spack.spec import Spec -from spack.test.conftest import MockPackage, MockPackageMultiRepo +from spack.util.mock_package import MockPackageMultiRepo import spack.util.executable as exe import spack.util.spack_yaml as syaml import spack.util.gpg @@ -48,37 +46,6 @@ def env_deactivate(): os.environ.pop('SPACK_ENV', None) -def initialize_new_repo(repo_path, initial_commit=False): - if not os.path.exists(repo_path): - os.makedirs(repo_path) - - with fs.working_dir(repo_path): - init_args = ['init', '.'] - # if not initial_commit: - # init_args.append('--bare') - - git(*init_args) - - if initial_commit: - readme_contents = "This is the project README\n" - readme_path = os.path.join(repo_path, 'README.md') - with open(readme_path, 'w') as fd: - fd.write(readme_contents) - git('add', '.') - git('commit', '-m', 'Project initial commit') - - -def get_repo_status(repo_path): - with fs.working_dir(repo_path): - output = git('rev-parse', '--abbrev-ref', 'HEAD', output=str) - current_branch = output.split()[0] - - output = git('rev-parse', 'HEAD', output=str) - current_sha = output.split()[0] - - return current_branch, current_sha - - def set_env_var(key, val): os.environ[key] = val @@ -101,15 +68,14 @@ def test_specs_staging(config): """ default = ('build', 'link') - g = MockPackage('g', [], []) - f = MockPackage('f', [], []) - e = MockPackage('e', [], []) - d = MockPackage('d', [f, g], [default, default]) - c = MockPackage('c', [], []) - b = MockPackage('b', [d, e], [default, default]) - a = MockPackage('a', [b, c], [default, default]) - - mock_repo = MockPackageMultiRepo([a, b, c, d, e, f, g]) + mock_repo = MockPackageMultiRepo() + g = mock_repo.add_package('g', [], []) + f = mock_repo.add_package('f', [], []) + e = mock_repo.add_package('e', [], []) + d = mock_repo.add_package('d', [f, g], [default, default]) + c = mock_repo.add_package('c', [], []) + b = mock_repo.add_package('b', [d, e], [default, default]) + mock_repo.add_package('a', [b, c], [default, default]) with repo.swap(mock_repo): spec_a = Spec('a') @@ -205,6 +171,144 @@ def test_ci_generate_with_env(tmpdir, mutable_mock_env_path, env_deactivate, assert(yaml_contents['stages'][5] == 'stage-rebuild-index') +def _validate_needs_graph(yaml_contents, needs_graph, artifacts): + for job_name, job_def in yaml_contents.items(): + for needs_def_name, needs_list in needs_graph.items(): + if job_name.startswith(needs_def_name): + # check job needs against the expected needs definition + assert all([job_needs['job'][:job_needs['job'].index('/')] + in needs_list for job_needs in job_def['needs']]) + assert all([job_needs['artifacts'] == artifacts + for job_needs in job_def['needs']]) + break + + +def test_ci_generate_bootstrap_gcc(tmpdir, mutable_mock_env_path, + env_deactivate, install_mockery, + mock_packages): + """Test that we can bootstrap a compiler and use it as the + compiler for a spec in the environment""" + filename = str(tmpdir.join('spack.yaml')) + with open(filename, 'w') as f: + f.write("""\ +spack: + definitions: + - bootstrap: + - gcc@3.0 + specs: + - dyninst%gcc@3.0 + mirrors: + some-mirror: https://my.fake.mirror + gitlab-ci: + bootstrap: + - name: bootstrap + compiler-agnostic: true + mappings: + - match: + - arch=test-debian6-x86_64 + runner-attributes: + tags: + - donotcare +""") + + needs_graph = { + '(bootstrap) conflict': [], + '(bootstrap) gcc': [ + '(bootstrap) conflict', + ], + '(specs) libelf': [ + '(bootstrap) gcc', + ], + '(specs) libdwarf': [ + '(bootstrap) gcc', + '(specs) libelf', + ], + '(specs) dyninst': [ + '(bootstrap) gcc', + '(specs) libelf', + '(specs) libdwarf', + ], + } + + with tmpdir.as_cwd(): + env_cmd('create', 'test', './spack.yaml') + outputfile = str(tmpdir.join('.gitlab-ci.yml')) + + with ev.read('test'): + ci_cmd('generate', '--output-file', outputfile) + + with open(outputfile) as f: + contents = f.read() + yaml_contents = syaml.load(contents) + _validate_needs_graph(yaml_contents, needs_graph, False) + + +def test_ci_generate_bootstrap_artifacts_buildcache(tmpdir, + mutable_mock_env_path, + env_deactivate, + install_mockery, + mock_packages): + """Test that we can bootstrap a compiler when artifacts buildcache + is turned on""" + filename = str(tmpdir.join('spack.yaml')) + with open(filename, 'w') as f: + f.write("""\ +spack: + definitions: + - bootstrap: + - gcc@3.0 + specs: + - dyninst%gcc@3.0 + mirrors: + some-mirror: https://my.fake.mirror + gitlab-ci: + bootstrap: + - name: bootstrap + compiler-agnostic: true + mappings: + - match: + - arch=test-debian6-x86_64 + runner-attributes: + tags: + - donotcare + enable-artifacts-buildcache: True +""") + + needs_graph = { + '(bootstrap) conflict': [], + '(bootstrap) gcc': [ + '(bootstrap) conflict', + ], + '(specs) libelf': [ + '(bootstrap) gcc', + '(bootstrap) conflict', + ], + '(specs) libdwarf': [ + '(bootstrap) gcc', + '(bootstrap) conflict', + '(specs) libelf', + ], + '(specs) dyninst': [ + '(bootstrap) gcc', + '(bootstrap) conflict', + '(specs) libelf', + '(specs) libdwarf', + ], + } + + with tmpdir.as_cwd(): + env_cmd('create', 'test', './spack.yaml') + outputfile = str(tmpdir.join('.gitlab-ci.yml')) + + with ev.read('test'): + ci_cmd('generate', '--output-file', outputfile) + + with open(outputfile) as f: + contents = f.read() + yaml_contents = syaml.load(contents) + _validate_needs_graph(yaml_contents, needs_graph, True) + + def test_ci_generate_with_env_missing_section(tmpdir, mutable_mock_env_path, env_deactivate, install_mockery, mock_packages): @@ -283,6 +387,110 @@ def test_ci_generate_with_cdash_token(tmpdir, mutable_mock_env_path, assert(filecmp.cmp(orig_file, copy_to_file) is True) +def test_ci_generate_pkg_with_deps(tmpdir, mutable_mock_env_path, + env_deactivate, install_mockery, + mock_packages): + """Test pipeline generation for a package w/ dependencies""" + filename = str(tmpdir.join('spack.yaml')) + with open(filename, 'w') as f: + f.write("""\ +spack: + specs: + - flatten-deps + mirrors: + some-mirror: https://my.fake.mirror + gitlab-ci: + enable-artifacts-buildcache: True + mappings: + - match: + - flatten-deps + runner-attributes: + tags: + - donotcare + - match: + - dependency-install + runner-attributes: + tags: + - donotcare +""") + + with tmpdir.as_cwd(): + env_cmd('create', 'test', './spack.yaml') + outputfile = str(tmpdir.join('.gitlab-ci.yml')) + + with ev.read('test'): + ci_cmd('generate', '--output-file', outputfile) + + with open(outputfile) as f: + contents = f.read() + print('generated contents: ') + print(contents) + yaml_contents = syaml.load(contents) + found = [] + for ci_key in yaml_contents.keys(): + ci_obj = yaml_contents[ci_key] + if 'dependency-install' in ci_key: + assert('stage' in ci_obj) + assert(ci_obj['stage'] == 'stage-0') + found.append('dependency-install') + if 'flatten-deps' in ci_key: + assert('stage' in ci_obj) + assert(ci_obj['stage'] == 'stage-1') + found.append('flatten-deps') + + assert('flatten-deps' in found) + assert('dependency-install' in found) + + +def test_ci_generate_for_pr_pipeline(tmpdir, mutable_mock_env_path, + env_deactivate, install_mockery, + mock_packages): + """Test that PR pipelines do not include a final stage job for + rebuilding the mirror index, even if that job is specifically + configured""" + filename = str(tmpdir.join('spack.yaml')) + with open(filename, 'w') as f: + f.write("""\ +spack: + specs: + - flatten-deps + mirrors: + some-mirror: https://my.fake.mirror + gitlab-ci: + enable-artifacts-buildcache: True + mappings: + - match: + - flatten-deps + runner-attributes: + tags: + - donotcare + - match: + - dependency-install + runner-attributes: + tags: + - donotcare + final-stage-rebuild-index: + image: donotcare + tags: [donotcare] +""") + + with tmpdir.as_cwd(): + env_cmd('create', 'test', './spack.yaml') + outputfile = str(tmpdir.join('.gitlab-ci.yml')) + + with ev.read('test'): + os.environ['SPACK_IS_PR_PIPELINE'] = 'True' + ci_cmd('generate', '--output-file', outputfile) + + with open(outputfile) as f: + contents = f.read() + print('generated contents: ') + print(contents) + yaml_contents = syaml.load(contents) + + assert('rebuild-index' not in yaml_contents) + + def test_ci_generate_with_external_pkg(tmpdir, mutable_mock_env_path, env_deactivate, install_mockery, mock_packages): @@ -459,49 +667,6 @@ def test_ci_rebuild_basic(tmpdir, mutable_mock_env_path, env_deactivate, print(rebuild_output) -def test_ci_pushyaml(tmpdir): - fake_yaml_contents = """generate ci jobs: - script: - - "./share/spack/qa/gitlab/generate-gitlab-ci-yml.sh" - tags: - - "spack-pre-ci" - artifacts: - paths: - - ci-generation - when: always - """ - local_repo_path = tmpdir.join('local_repo') - initialize_new_repo(local_repo_path.strpath, True) - - remote_repo_path = tmpdir.join('remote_repo') - initialize_new_repo(remote_repo_path.strpath) - - current_branch, current_sha = get_repo_status(local_repo_path.strpath) - - print('local repo info: {0}, {1}'.format(current_branch, current_sha)) - - local_jobs_yaml = local_repo_path.join('.gitlab-ci.yml') - with local_jobs_yaml.open('w') as f: - f.write(fake_yaml_contents) - - pushyaml_args = [ - 'pushyaml', - '--downstream-repo', remote_repo_path.strpath, - '--branch-name', current_branch, - '--commit-sha', current_sha, - ] - - with fs.working_dir(local_repo_path.strpath): - ci_cmd(*pushyaml_args) - - with fs.working_dir(remote_repo_path.strpath): - branch_to_checkout = 'multi-ci-{0}'.format(current_branch) - git('checkout', branch_to_checkout) - with open('.gitlab-ci.yml') as fd: - pushed_contents = fd.read() - assert pushed_contents == fake_yaml_contents - - @pytest.mark.disable_clean_stage_check @pytest.mark.skipif(not has_gpg(), reason='This test requires gpg') def test_push_mirror_contents(tmpdir, mutable_mock_env_path, env_deactivate, diff --git a/lib/spack/spack/test/cmd/compiler.py b/lib/spack/spack/test/cmd/compiler.py new file mode 100644 index 00000000000..0476275a5f1 --- /dev/null +++ b/lib/spack/spack/test/cmd/compiler.py @@ -0,0 +1,105 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +import os + +import pytest + +import llnl.util.filesystem +import spack.main +import spack.version + +compiler = spack.main.SpackCommand('compiler') + + +@pytest.fixture +def no_compilers_yaml(mutable_config, monkeypatch): + """Creates a temporary configuration without compilers.yaml""" + + for scope, local_config in mutable_config.scopes.items(): + compilers_yaml = os.path.join( + local_config.path, scope, 'compilers.yaml' + ) + if os.path.exists(compilers_yaml): + os.remove(compilers_yaml) + + +@pytest.fixture +def mock_compiler_version(): + return '4.5.3' + + +@pytest.fixture() +def mock_compiler_dir(tmpdir, mock_compiler_version): + """Return a directory containing a fake, but detectable compiler.""" + + tmpdir.ensure('bin', dir=True) + bin_dir = tmpdir.join('bin') + + gcc_path = bin_dir.join('gcc') + gxx_path = bin_dir.join('g++') + gfortran_path = bin_dir.join('gfortran') + + gcc_path.write("""\ +#!/bin/sh + +for arg in "$@"; do + if [ "$arg" = -dumpversion ]; then + echo '%s' + fi +done +""" % mock_compiler_version) + + # Create some mock compilers in the temporary directory + llnl.util.filesystem.set_executable(str(gcc_path)) + gcc_path.copy(gxx_path, mode=True) + gcc_path.copy(gfortran_path, mode=True) + + return str(tmpdir) + + +@pytest.mark.regression('11678,13138') +def test_compiler_find_without_paths(no_compilers_yaml, working_env, tmpdir): + with tmpdir.as_cwd(): + with open('gcc', 'w') as f: + f.write("""\ +#!/bin/bash +echo "0.0.0" +""") + os.chmod('gcc', 0o700) + + os.environ['PATH'] = str(tmpdir) + output = compiler('find', '--scope=site') + + assert 'gcc' in output + + +def test_compiler_remove(mutable_config, mock_packages): + args = spack.util.pattern.Bunch( + all=True, compiler_spec='gcc@4.5.0', add_paths=[], scope=None + ) + spack.cmd.compiler.compiler_remove(args) + compilers = spack.compilers.all_compiler_specs() + assert spack.spec.CompilerSpec("gcc@4.5.0") not in compilers + + +def test_compiler_add( + mutable_config, mock_packages, mock_compiler_dir, mock_compiler_version +): + # Compilers available by default. + old_compilers = set(spack.compilers.all_compiler_specs()) + + args = spack.util.pattern.Bunch( + all=None, + compiler_spec=None, + add_paths=[mock_compiler_dir], + scope=None + ) + spack.cmd.compiler.compiler_find(args) + + # Ensure new compiler is in there + new_compilers = set(spack.compilers.all_compiler_specs()) + new_compiler = new_compilers - old_compilers + assert any(c.version == spack.version.Version(mock_compiler_version) + for c in new_compiler) diff --git a/lib/spack/spack/test/cmd/compiler_command.py b/lib/spack/spack/test/cmd/compiler_command.py deleted file mode 100644 index 15949d08d48..00000000000 --- a/lib/spack/spack/test/cmd/compiler_command.py +++ /dev/null @@ -1,40 +0,0 @@ -# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other -# Spack Project Developers. See the top-level COPYRIGHT file for details. -# -# SPDX-License-Identifier: (Apache-2.0 OR MIT) - -import pytest - -import os - -import spack.main - -compiler = spack.main.SpackCommand('compiler') - - -@pytest.fixture -def no_compilers_yaml(mutable_config, monkeypatch): - """Creates a temporary configuration without compilers.yaml""" - - for scope, local_config in mutable_config.scopes.items(): - compilers_yaml = os.path.join( - local_config.path, scope, 'compilers.yaml' - ) - if os.path.exists(compilers_yaml): - os.remove(compilers_yaml) - - -@pytest.mark.regression('11678,13138') -def test_compiler_find_without_paths(no_compilers_yaml, working_env, tmpdir): - with tmpdir.as_cwd(): - with open('gcc', 'w') as f: - f.write("""\ -#!/bin/bash -echo "0.0.0" -""") - os.chmod('gcc', 0o700) - - os.environ['PATH'] = str(tmpdir) - output = compiler('find', '--scope=site') - - assert 'gcc' in output diff --git a/lib/spack/spack/test/cmd/dependents.py b/lib/spack/spack/test/cmd/dependents.py index 1001e8764f4..22a3acd0c32 100644 --- a/lib/spack/spack/test/cmd/dependents.py +++ b/lib/spack/spack/test/cmd/dependents.py @@ -26,7 +26,7 @@ def test_transitive_dependents(mock_packages): out = dependents('--transitive', 'libelf') actual = set(re.split(r'\s+', out.strip())) assert actual == set( - ['callpath', 'dyninst', 'libdwarf', 'mpileaks', 'multivalue_variant', + ['callpath', 'dyninst', 'libdwarf', 'mpileaks', 'multivalue-variant', 'singlevalue-variant-dependent', 'patch-a-dependency', 'patch-several-dependencies']) @@ -36,8 +36,8 @@ def test_immediate_installed_dependents(mock_packages, database): with color_when(False): out = dependents('--installed', 'libelf') - lines = [l for l in out.strip().split('\n') if not l.startswith('--')] - hashes = set([re.split(r'\s+', l)[0] for l in lines]) + lines = [li for li in out.strip().split('\n') if not li.startswith('--')] + hashes = set([re.split(r'\s+', li)[0] for li in lines]) expected = set([spack.store.db.query_one(s).dag_hash(7) for s in ['dyninst', 'libdwarf']]) @@ -53,8 +53,8 @@ def test_transitive_installed_dependents(mock_packages, database): with color_when(False): out = dependents('--installed', '--transitive', 'fake') - lines = [l for l in out.strip().split('\n') if not l.startswith('--')] - hashes = set([re.split(r'\s+', l)[0] for l in lines]) + lines = [li for li in out.strip().split('\n') if not li.startswith('--')] + hashes = set([re.split(r'\s+', li)[0] for li in lines]) expected = set([spack.store.db.query_one(s).dag_hash(7) for s in ['zmpi', 'callpath^zmpi', 'mpileaks^zmpi']]) diff --git a/lib/spack/spack/test/cmd/dev_build.py b/lib/spack/spack/test/cmd/dev_build.py index a0075130676..94c7690de6d 100644 --- a/lib/spack/spack/test/cmd/dev_build.py +++ b/lib/spack/spack/test/cmd/dev_build.py @@ -3,8 +3,9 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) import os +import pytest import spack.spec -from spack.main import SpackCommand +from spack.main import SpackCommand, SpackCommandError dev_build = SpackCommand('dev-build') @@ -23,6 +24,22 @@ def test_dev_build_basics(tmpdir, mock_packages, install_mockery): assert f.read() == spec.package.replacement_string +def test_dev_build_before(tmpdir, mock_packages, install_mockery): + spec = spack.spec.Spec('dev-build-test-install@0.0.0').concretized() + + with tmpdir.as_cwd(): + with open(spec.package.filename, 'w') as f: + f.write(spec.package.original_string) + + dev_build('-b', 'edit', 'dev-build-test-install@0.0.0') + + assert spec.package.filename in os.listdir(os.getcwd()) + with open(spec.package.filename, 'r') as f: + assert f.read() == spec.package.original_string + + assert not os.path.exists(spec.prefix) + + def test_dev_build_until(tmpdir, mock_packages, install_mockery): spec = spack.spec.Spec('dev-build-test-install@0.0.0').concretized() @@ -37,6 +54,66 @@ def test_dev_build_until(tmpdir, mock_packages, install_mockery): assert f.read() == spec.package.replacement_string assert not os.path.exists(spec.prefix) + assert not spack.store.db.query(spec, installed=True) + + +def test_dev_build_until_last_phase(tmpdir, mock_packages, install_mockery): + # Test that we ignore the last_phase argument if it is already last + spec = spack.spec.Spec('dev-build-test-install@0.0.0').concretized() + + with tmpdir.as_cwd(): + with open(spec.package.filename, 'w') as f: + f.write(spec.package.original_string) + + dev_build('-u', 'install', 'dev-build-test-install@0.0.0') + + assert spec.package.filename in os.listdir(os.getcwd()) + with open(spec.package.filename, 'r') as f: + assert f.read() == spec.package.replacement_string + + assert os.path.exists(spec.prefix) + assert spack.store.db.query(spec, installed=True) + + +def test_dev_build_before_until(tmpdir, mock_packages, install_mockery): + spec = spack.spec.Spec('dev-build-test-install@0.0.0').concretized() + + with tmpdir.as_cwd(): + with open(spec.package.filename, 'w') as f: + f.write(spec.package.original_string) + + with pytest.raises(SystemExit): + dev_build('-u', 'edit', '-b', 'edit', + 'dev-build-test-install@0.0.0') + + with pytest.raises(SpackCommandError): + dev_build('-u', 'phase_that_does_not_exist', + 'dev-build-test-install@0.0.0') + + with pytest.raises(SpackCommandError): + dev_build('-b', 'phase_that_does_not_exist', + 'dev-build-test-install@0.0.0') + + +def test_dev_build_drop_in(tmpdir, mock_packages, monkeypatch, + install_mockery): + def print_spack_cc(*args): + # Eat arguments and print environment variable to test + print(os.environ.get('CC', '')) + monkeypatch.setattr(os, 'execvp', print_spack_cc) + + # `module unload cray-libsci` in test environment causes failure + # It does not fail for actual installs + # build_environment.py imports module directly, so we monkeypatch it there + # rather than in module_cmd + def module(*args): + pass + monkeypatch.setattr(spack.build_environment, 'module', module) + + with tmpdir.as_cwd(): + output = dev_build('-b', 'edit', '--drop-in', 'sh', + 'dev-build-test-install@0.0.0') + assert "lib/spack/env" in output def test_dev_build_fails_already_installed(tmpdir, mock_packages, diff --git a/lib/spack/spack/test/cmd/env.py b/lib/spack/spack/test/cmd/env.py index 4f3abb4438d..57037387318 100644 --- a/lib/spack/spack/test/cmd/env.py +++ b/lib/spack/spack/test/cmd/env.py @@ -20,8 +20,9 @@ from spack.stage import stage_prefix from spack.spec_list import SpecListError -from spack.test.conftest import MockPackage, MockPackageMultiRepo +from spack.util.mock_package import MockPackageMultiRepo import spack.util.spack_json as sjson +from spack.util.path import substitute_path_variables # everything here uses the mock_env_path @@ -163,6 +164,28 @@ def test_env_install_single_spec(install_mockery, mock_fetch): assert e.specs_by_hash[e.concretized_order[0]].name == 'cmake-client' +def test_env_modifications_error_on_activate( + install_mockery, mock_fetch, monkeypatch, capfd): + env('create', 'test') + install = SpackCommand('install') + + e = ev.read('test') + with e: + install('cmake-client') + + def setup_error(pkg, env): + raise RuntimeError("cmake-client had issues!") + + pkg = spack.repo.path.get_pkg_class("cmake-client") + monkeypatch.setattr(pkg, "setup_run_environment", setup_error) + with e: + pass + + _, err = capfd.readouterr() + assert "cmake-client had issues!" in err + assert "Warning: couldn't get environment settings" in err + + def test_env_install_same_spec_twice(install_mockery, mock_fetch, capfd): env('create', 'test') @@ -517,6 +540,35 @@ def test_env_with_included_config_scope(): for x in e._get_environment_specs()) +def test_env_with_included_config_var_path(): + config_var_path = os.path.join('$tempdir', 'included-config.yaml') + test_config = """\ +env: + include: + - %s + specs: + - mpileaks +""" % config_var_path + + _env_create('test', StringIO(test_config)) + e = ev.read('test') + + config_real_path = substitute_path_variables(config_var_path) + fs.mkdirp(os.path.dirname(config_real_path)) + with open(config_real_path, 'w') as f: + f.write("""\ +packages: + mpileaks: + version: [2.2] +""") + + with e: + e.concretize() + + assert any(x.satisfies('mpileaks@2.2') + for x in e._get_environment_specs()) + + def test_env_config_precedence(): test_config = """\ env: @@ -733,10 +785,10 @@ def create_v1_lockfile_dict(roots, all_specs): def test_read_old_lock_and_write_new(tmpdir): build_only = ('build',) - y = MockPackage('y', [], []) - x = MockPackage('x', [y], [build_only]) + mock_repo = MockPackageMultiRepo() + y = mock_repo.add_package('y', [], []) + mock_repo.add_package('x', [y], [build_only]) - mock_repo = MockPackageMultiRepo([x, y]) with spack.repo.swap(mock_repo): x = Spec('x') x.concretize() @@ -765,9 +817,9 @@ def test_read_old_lock_creates_backup(tmpdir): """When reading a version-1 lockfile, make sure that a backup of that file is created. """ - y = MockPackage('y', [], []) + mock_repo = MockPackageMultiRepo() + y = mock_repo.add_package('y', [], []) - mock_repo = MockPackageMultiRepo([y]) with spack.repo.swap(mock_repo): y = Spec('y') y.concretize() @@ -796,11 +848,10 @@ def test_indirect_build_dep(): default = ('build', 'link') build_only = ('build',) - z = MockPackage('z', [], []) - y = MockPackage('y', [z], [build_only]) - x = MockPackage('x', [y], [default]) - - mock_repo = MockPackageMultiRepo([x, y, z]) + mock_repo = MockPackageMultiRepo() + z = mock_repo.add_package('z', [], []) + y = mock_repo.add_package('y', [z], [build_only]) + mock_repo.add_package('x', [y], [default]) def noop(*args): pass @@ -838,11 +889,10 @@ def test_store_different_build_deps(): default = ('build', 'link') build_only = ('build',) - z = MockPackage('z', [], []) - y = MockPackage('y', [z], [build_only]) - x = MockPackage('x', [y, z], [default, build_only]) - - mock_repo = MockPackageMultiRepo([x, y, z]) + mock_repo = MockPackageMultiRepo() + z = mock_repo.add_package('z', [], []) + y = mock_repo.add_package('y', [z], [build_only]) + mock_repo.add_package('x', [y, z], [default, build_only]) def noop(*args): pass @@ -1042,6 +1092,55 @@ def test_stack_yaml_definitions(tmpdir): assert Spec('callpath') in test.user_specs +def test_stack_yaml_definitions_as_constraints(tmpdir): + filename = str(tmpdir.join('spack.yaml')) + with open(filename, 'w') as f: + f.write("""\ +env: + definitions: + - packages: [mpileaks, callpath] + - mpis: [mpich, openmpi] + specs: + - matrix: + - [$packages] + - [$^mpis] +""") + with tmpdir.as_cwd(): + env('create', 'test', './spack.yaml') + test = ev.read('test') + + assert Spec('mpileaks^mpich') in test.user_specs + assert Spec('callpath^mpich') in test.user_specs + assert Spec('mpileaks^openmpi') in test.user_specs + assert Spec('callpath^openmpi') in test.user_specs + + +def test_stack_yaml_definitions_as_constraints_on_matrix(tmpdir): + filename = str(tmpdir.join('spack.yaml')) + with open(filename, 'w') as f: + f.write("""\ +env: + definitions: + - packages: [mpileaks, callpath] + - mpis: + - matrix: + - [mpich] + - ['@3.0.4', '@3.0.3'] + specs: + - matrix: + - [$packages] + - [$^mpis] +""") + with tmpdir.as_cwd(): + env('create', 'test', './spack.yaml') + test = ev.read('test') + + assert Spec('mpileaks^mpich@3.0.4') in test.user_specs + assert Spec('callpath^mpich@3.0.4') in test.user_specs + assert Spec('mpileaks^mpich@3.0.3') in test.user_specs + assert Spec('callpath^mpich@3.0.3') in test.user_specs + + @pytest.mark.regression('12095') def test_stack_yaml_definitions_write_reference(tmpdir): filename = str(tmpdir.join('spack.yaml')) diff --git a/lib/spack/spack/test/cmd/external.py b/lib/spack/spack/test/cmd/external.py new file mode 100644 index 00000000000..0bdf67fe3ee --- /dev/null +++ b/lib/spack/spack/test/cmd/external.py @@ -0,0 +1,177 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +import pytest +import os +import stat + +import spack +from spack.spec import Spec +from spack.cmd.external import ExternalPackageEntry +from spack.main import SpackCommand + + +@pytest.fixture() +def create_exe(tmpdir_factory): + def _create_exe(exe_name, content): + base_prefix = tmpdir_factory.mktemp('base-prefix') + base_prefix.ensure('bin', dir=True) + exe_path = str(base_prefix.join('bin', exe_name)) + with open(exe_path, 'w') as f: + f.write("""\ +#!/bin/bash + +echo "{0}" +""".format(content)) + + st = os.stat(exe_path) + os.chmod(exe_path, st.st_mode | stat.S_IEXEC) + return exe_path + + yield _create_exe + + +def test_find_external_single_package(create_exe): + pkgs_to_check = [spack.repo.get('cmake')] + + cmake_path = create_exe("cmake", "cmake version 1.foo") + system_path_to_exe = {cmake_path: 'cmake'} + + pkg_to_entries = spack.cmd.external._get_external_packages( + pkgs_to_check, system_path_to_exe) + + pkg, entries = next(iter(pkg_to_entries.items())) + single_entry = next(iter(entries)) + + assert single_entry.spec == Spec('cmake@1.foo') + + +def test_find_external_two_instances_same_package(create_exe): + pkgs_to_check = [spack.repo.get('cmake')] + + # Each of these cmake instances is created in a different prefix + cmake_path1 = create_exe("cmake", "cmake version 1.foo") + cmake_path2 = create_exe("cmake", "cmake version 3.17.2") + system_path_to_exe = { + cmake_path1: 'cmake', + cmake_path2: 'cmake'} + + pkg_to_entries = spack.cmd.external._get_external_packages( + pkgs_to_check, system_path_to_exe) + + pkg, entries = next(iter(pkg_to_entries.items())) + spec_to_path = dict((e.spec, e.base_dir) for e in entries) + assert spec_to_path[Spec('cmake@1.foo')] == ( + spack.cmd.external._determine_base_dir(os.path.dirname(cmake_path1))) + assert spec_to_path[Spec('cmake@3.17.2')] == ( + spack.cmd.external._determine_base_dir(os.path.dirname(cmake_path2))) + + +def test_find_external_update_config(mutable_config): + pkg_to_entries = { + 'cmake': [ + ExternalPackageEntry(Spec('cmake@1.foo'), '/x/y1/'), + ExternalPackageEntry(Spec('cmake@3.17.2'), '/x/y2/'), + ] + } + + spack.cmd.external._update_pkg_config(pkg_to_entries, False) + + pkgs_cfg = spack.config.get('packages') + cmake_cfg = pkgs_cfg['cmake'] + cmake_paths_cfg = cmake_cfg['paths'] + + assert cmake_paths_cfg['cmake@1.foo'] == '/x/y1/' + assert cmake_paths_cfg['cmake@3.17.2'] == '/x/y2/' + + +def test_get_executables(working_env, create_exe): + cmake_path1 = create_exe("cmake", "cmake version 1.foo") + + os.environ['PATH'] = ':'.join([os.path.dirname(cmake_path1)]) + path_to_exe = spack.cmd.external._get_system_executables() + assert path_to_exe[cmake_path1] == 'cmake' + + +external = SpackCommand('external') + + +def test_find_external_cmd(mutable_config, working_env, create_exe): + """Test invoking 'spack external find' with additional package arguments, + which restricts the set of packages that Spack looks for. + """ + cmake_path1 = create_exe("cmake", "cmake version 1.foo") + + os.environ['PATH'] = ':'.join([os.path.dirname(cmake_path1)]) + external('find', 'cmake') + + pkgs_cfg = spack.config.get('packages') + cmake_cfg = pkgs_cfg['cmake'] + cmake_paths_cfg = cmake_cfg['paths'] + + assert 'cmake@1.foo' in cmake_paths_cfg + + +def test_find_external_cmd_not_buildable( + mutable_config, working_env, create_exe): + """When the user invokes 'spack external find --not-buildable', the config + for any package where Spack finds an external version should be marked as + not buildable. + """ + cmake_path1 = create_exe("cmake", "cmake version 1.foo") + os.environ['PATH'] = ':'.join([os.path.dirname(cmake_path1)]) + external('find', '--not-buildable', 'cmake') + pkgs_cfg = spack.config.get('packages') + assert not pkgs_cfg['cmake']['buildable'] + + +def test_find_external_cmd_full_repo( + mutable_config, working_env, create_exe, mutable_mock_repo): + """Test invoking 'spack external find' with no additional arguments, which + iterates through each package in the repository. + """ + + exe_path1 = create_exe( + "find-externals1-exe", "find-externals1 version 1.foo") + + os.environ['PATH'] = ':'.join([os.path.dirname(exe_path1)]) + external('find') + + pkgs_cfg = spack.config.get('packages') + pkg_cfg = pkgs_cfg['find-externals1'] + pkg_paths_cfg = pkg_cfg['paths'] + + assert 'find-externals1@1.foo' in pkg_paths_cfg + + +def test_find_external_merge(mutable_config, mutable_mock_repo): + """Check that 'spack find external' doesn't overwrite an existing spec + entry in packages.yaml. + """ + pkgs_cfg_init = { + 'find-externals1': { + 'paths': { + 'find-externals1@1.1': '/preexisting-prefix/' + }, + 'buildable': False + } + } + + mutable_config.update_config('packages', pkgs_cfg_init) + + pkg_to_entries = { + 'find-externals1': [ + ExternalPackageEntry(Spec('find-externals1@1.1'), '/x/y1/'), + ExternalPackageEntry(Spec('find-externals1@1.2'), '/x/y2/'), + ] + } + spack.cmd.external._update_pkg_config(pkg_to_entries, False) + + pkgs_cfg = spack.config.get('packages') + pkg_cfg = pkgs_cfg['find-externals1'] + pkg_paths_cfg = pkg_cfg['paths'] + + assert pkg_paths_cfg['find-externals1@1.1'] == '/preexisting-prefix/' + assert pkg_paths_cfg['find-externals1@1.2'] == '/x/y2/' diff --git a/lib/spack/spack/test/cmd/find.py b/lib/spack/spack/test/cmd/find.py index 8516569592d..8d3c68d31aa 100644 --- a/lib/spack/spack/test/cmd/find.py +++ b/lib/spack/spack/test/cmd/find.py @@ -324,7 +324,7 @@ def test_find_prefix_in_env(mutable_mock_env_path, install_mockery, mock_fetch, def test_find_loaded(database, working_env): output = find('--loaded', '--group') - assert output == '' # 0 packages installed printed separately + assert output == '' os.environ[uenv.spack_loaded_hashes_var] = ':'.join( [x.dag_hash() for x in spack.store.db.query()]) diff --git a/lib/spack/spack/test/cmd/init_py_functions.py b/lib/spack/spack/test/cmd/init_py_functions.py new file mode 100644 index 00000000000..2e631062837 --- /dev/null +++ b/lib/spack/spack/test/cmd/init_py_functions.py @@ -0,0 +1,29 @@ +# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +import pytest +from spack.cmd import require_python_name, python_name, PythonNameError, \ + require_cmd_name, cmd_name, CommandNameError + + +def test_require_python_name(): + """Python module names should not contain dashes---ensure that + require_python_name() raises the appropriate exception if one is + detected. + """ + require_python_name("okey_dokey") + with pytest.raises(PythonNameError): + require_python_name("okey-dokey") + require_python_name(python_name("okey-dokey")) + + +def test_require_cmd_name(): + """By convention, Spack command names should contain dashes rather than + underscores---ensure that require_cmd_name() raises the appropriate + exception if underscores are detected. + """ + require_cmd_name("okey-dokey") + with pytest.raises(CommandNameError): + require_cmd_name("okey_dokey") + require_cmd_name(cmd_name("okey_dokey")) diff --git a/lib/spack/spack/test/cmd/install.py b/lib/spack/spack/test/cmd/install.py index 8cebee273ea..5d4ad78c38a 100644 --- a/lib/spack/spack/test/cmd/install.py +++ b/lib/spack/spack/test/cmd/install.py @@ -15,11 +15,12 @@ import llnl.util.filesystem as fs import spack.config +import spack.compilers as compilers import spack.hash_types as ht import spack.package import spack.cmd.install from spack.error import SpackError -from spack.spec import Spec +from spack.spec import Spec, CompilerSpec from spack.main import SpackCommand import spack.environment as ev @@ -758,3 +759,30 @@ def test_cdash_auth_token(tmpdir, install_mockery, capfd): '--log-format=cdash', 'a') assert 'Using CDash auth token from environment' in out + + +def test_compiler_bootstrap( + install_mockery_mutable_config, mock_packages, mock_fetch, + mock_archive, mutable_config, monkeypatch): + monkeypatch.setattr(spack.concretize.Concretizer, + 'check_for_compiler_existence', False) + spack.config.set('config:install_missing_compilers', True) + assert CompilerSpec('gcc@2.0') not in compilers.all_compiler_specs() + + # Test succeeds if it does not raise an error + install('a%gcc@2.0') + + +@pytest.mark.regression('16221') +def test_compiler_bootstrap_already_installed( + install_mockery_mutable_config, mock_packages, mock_fetch, + mock_archive, mutable_config, monkeypatch): + monkeypatch.setattr(spack.concretize.Concretizer, + 'check_for_compiler_existence', False) + spack.config.set('config:install_missing_compilers', True) + + assert CompilerSpec('gcc@2.0') not in compilers.all_compiler_specs() + + # Test succeeds if it does not raise an error + install('gcc@2.0') + install('a%gcc@2.0') diff --git a/lib/spack/spack/test/cmd/is_git_repo.py b/lib/spack/spack/test/cmd/is_git_repo.py new file mode 100644 index 00000000000..724925e5e48 --- /dev/null +++ b/lib/spack/spack/test/cmd/is_git_repo.py @@ -0,0 +1,67 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from __future__ import print_function + +import spack +import pytest + +from llnl.util.filesystem import mkdirp + +from spack.util.executable import which +from spack.version import ver + + +git = which("git") +git_required_version = '2.17.0' + + +def check_git_version(): + """Check if git version is new enough for worktree functionality. + Return True if requirements are met. + + The latest required functionality is `worktree remove` which was only added + in 2.17.0. + + Refer: + https://github.com/git/git/commit/cc73385cf6c5c229458775bc92e7dbbe24d11611 + """ + git_version = ver(git('--version', output=str).lstrip('git version ')) + return git_version >= ver(git_required_version) + + +pytestmark = pytest.mark.skipif( + not git or not check_git_version(), + reason="we need git to test if we are in a git repo" +) + + +@pytest.fixture(scope="function") +def git_tmp_worktree(tmpdir): + """Create new worktree in a temporary folder and monkeypatch + spack.paths.prefix to point to it. + """ + worktree_root = str(tmpdir.join("tmp_worktree")) + mkdirp(worktree_root) + + git("worktree", "add", "--detach", worktree_root, "HEAD") + + yield worktree_root + + git("worktree", "remove", "--force", worktree_root) + + +def test_is_git_repo_in_worktree(git_tmp_worktree): + """Verify that spack.cmd.spack_is_git_repo() can identify a git repository + in a worktree. + """ + assert spack.cmd.is_git_repo(git_tmp_worktree) + + +def test_spack_is_git_repo_nongit(tmpdir, monkeypatch): + """Verify that spack.cmd.spack_is_git_repo() correctly returns False if we + are in a non-git directory. + """ + assert not spack.cmd.is_git_repo(str(tmpdir)) diff --git a/lib/spack/spack/test/cmd/mirror.py b/lib/spack/spack/test/cmd/mirror.py index 4bb4fad2248..f6fe0b24ddb 100644 --- a/lib/spack/spack/test/cmd/mirror.py +++ b/lib/spack/spack/test/cmd/mirror.py @@ -89,6 +89,56 @@ def test_mirror_skip_unstable(tmpdir_factory, mock_packages, config, set(['trivial-pkg-with-valid-hash'])) +class MockMirrorArgs(object): + def __init__(self, specs=None, all=False, file=None, + versions_per_spec=None, dependencies=False, + exclude_file=None, exclude_specs=None): + self.specs = specs or [] + self.all = all + self.file = file + self.versions_per_spec = versions_per_spec + self.dependencies = dependencies + self.exclude_file = exclude_file + self.exclude_specs = exclude_specs + + +def test_exclude_specs(mock_packages): + args = MockMirrorArgs( + specs=['mpich'], + versions_per_spec='all', + exclude_specs="mpich@3.0.1:3.0.2 mpich@1.0") + + mirror_specs = spack.cmd.mirror._determine_specs_to_mirror(args) + expected_include = set(spack.spec.Spec(x) for x in + ['mpich@3.0.3', 'mpich@3.0.4', 'mpich@3.0']) + expected_exclude = set(spack.spec.Spec(x) for x in + ['mpich@3.0.1', 'mpich@3.0.2', 'mpich@1.0']) + assert expected_include <= set(mirror_specs) + assert (not expected_exclude & set(mirror_specs)) + + +def test_exclude_file(mock_packages, tmpdir): + exclude_path = os.path.join(str(tmpdir), 'test-exclude.txt') + with open(exclude_path, 'w') as exclude_file: + exclude_file.write("""\ +mpich@3.0.1:3.0.2 +mpich@1.0 +""") + + args = MockMirrorArgs( + specs=['mpich'], + versions_per_spec='all', + exclude_file=exclude_path) + + mirror_specs = spack.cmd.mirror._determine_specs_to_mirror(args) + expected_include = set(spack.spec.Spec(x) for x in + ['mpich@3.0.3', 'mpich@3.0.4', 'mpich@3.0']) + expected_exclude = set(spack.spec.Spec(x) for x in + ['mpich@3.0.1', 'mpich@3.0.2', 'mpich@1.0']) + assert expected_include <= set(mirror_specs) + assert (not expected_exclude & set(mirror_specs)) + + def test_mirror_crud(tmp_scope, capsys): with capsys.disabled(): mirror('add', '--scope', tmp_scope, 'mirror', 'http://spack.io') diff --git a/lib/spack/spack/test/cmd/providers.py b/lib/spack/spack/test/cmd/providers.py index 3d537c553fb..dd0c8a9eb72 100644 --- a/lib/spack/spack/test/cmd/providers.py +++ b/lib/spack/spack/test/cmd/providers.py @@ -22,8 +22,7 @@ def test_it_just_runs(pkg): @pytest.mark.parametrize('vpkg,provider_list', [ - (('mpi',), ['charmpp@6.7.1:', - 'intel-mpi', + (('mpi',), ['intel-mpi', 'intel-parallel-studio', 'mpich', 'mpich@1:', diff --git a/lib/spack/spack/test/cmd/test_compiler_cmd.py b/lib/spack/spack/test/cmd/test_compiler_cmd.py deleted file mode 100644 index 3c34f720ec9..00000000000 --- a/lib/spack/spack/test/cmd/test_compiler_cmd.py +++ /dev/null @@ -1,73 +0,0 @@ -# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other -# Spack Project Developers. See the top-level COPYRIGHT file for details. -# -# SPDX-License-Identifier: (Apache-2.0 OR MIT) - -import pytest -import llnl.util.filesystem - -import spack.cmd.compiler -import spack.compilers -import spack.spec -import spack.util.pattern -from spack.version import Version - -test_version = '4.5.3' - - -@pytest.fixture() -def mock_compiler_dir(tmpdir): - """Return a directory containing a fake, but detectable compiler.""" - - tmpdir.ensure('bin', dir=True) - bin_dir = tmpdir.join('bin') - - gcc_path = bin_dir.join('gcc') - gxx_path = bin_dir.join('g++') - gfortran_path = bin_dir.join('gfortran') - - gcc_path.write("""\ -#!/bin/sh - -for arg in "$@"; do - if [ "$arg" = -dumpversion ]; then - echo '%s' - fi -done -""" % test_version) - - # Create some mock compilers in the temporary directory - llnl.util.filesystem.set_executable(str(gcc_path)) - gcc_path.copy(gxx_path, mode=True) - gcc_path.copy(gfortran_path, mode=True) - - return str(tmpdir) - - -@pytest.mark.usefixtures('config', 'mock_packages') -class TestCompilerCommand(object): - - def test_compiler_remove(self): - args = spack.util.pattern.Bunch( - all=True, compiler_spec='gcc@4.5.0', add_paths=[], scope=None - ) - spack.cmd.compiler.compiler_remove(args) - compilers = spack.compilers.all_compiler_specs() - assert spack.spec.CompilerSpec("gcc@4.5.0") not in compilers - - def test_compiler_add(self, mock_compiler_dir): - # Compilers available by default. - old_compilers = set(spack.compilers.all_compiler_specs()) - - args = spack.util.pattern.Bunch( - all=None, - compiler_spec=None, - add_paths=[mock_compiler_dir], - scope=None - ) - spack.cmd.compiler.compiler_find(args) - - # Ensure new compiler is in there - new_compilers = set(spack.compilers.all_compiler_specs()) - new_compiler = new_compilers - old_compilers - assert any(c.version == Version(test_version) for c in new_compiler) diff --git a/lib/spack/spack/test/cmd/view.py b/lib/spack/spack/test/cmd/view.py index c52cd123252..4ff15920357 100644 --- a/lib/spack/spack/test/cmd/view.py +++ b/lib/spack/spack/test/cmd/view.py @@ -24,7 +24,8 @@ def create_projection_file(tmpdir, projection): return projection_file -@pytest.mark.parametrize('cmd', ['hardlink', 'symlink', 'hard', 'add']) +@pytest.mark.parametrize('cmd', ['hardlink', 'symlink', 'hard', 'add', + 'copy', 'relocate']) def test_view_link_type( tmpdir, mock_packages, mock_archive, mock_fetch, config, install_mockery, cmd): @@ -33,10 +34,29 @@ def test_view_link_type( view(cmd, viewpath, 'libdwarf') package_prefix = os.path.join(viewpath, 'libdwarf') assert os.path.exists(package_prefix) - assert os.path.islink(package_prefix) == (not cmd.startswith('hard')) + + # Check that we use symlinks for and only for the appropriate subcommands + is_link_cmd = cmd in ('symlink', 'add') + assert os.path.islink(package_prefix) == is_link_cmd -@pytest.mark.parametrize('cmd', ['hardlink', 'symlink', 'hard', 'add']) +@pytest.mark.parametrize('add_cmd', ['hardlink', 'symlink', 'hard', 'add', + 'copy', 'relocate']) +def test_view_link_type_remove( + tmpdir, mock_packages, mock_archive, mock_fetch, config, + install_mockery, add_cmd): + install('needs-relocation') + viewpath = str(tmpdir.mkdir('view_{0}'.format(add_cmd))) + view(add_cmd, viewpath, 'needs-relocation') + bindir = os.path.join(viewpath, 'bin') + assert os.path.exists(bindir) + + view('remove', viewpath, 'needs-relocation') + assert not os.path.exists(bindir) + + +@pytest.mark.parametrize('cmd', ['hardlink', 'symlink', 'hard', 'add', + 'copy', 'relocate']) def test_view_projections( tmpdir, mock_packages, mock_archive, mock_fetch, config, install_mockery, cmd): @@ -54,7 +74,10 @@ def test_view_projections( package_prefix = os.path.join(viewpath, 'libdwarf-20130207/libdwarf') assert os.path.exists(package_prefix) - assert os.path.islink(package_prefix) == (not cmd.startswith('hard')) + + # Check that we use symlinks for and only for the appropriate subcommands + is_symlink_cmd = cmd in ('symlink', 'add') + assert os.path.islink(package_prefix) == is_symlink_cmd def test_view_multiple_projections( diff --git a/lib/spack/spack/test/cmd_extensions.py b/lib/spack/spack/test/cmd_extensions.py index 486ac925cf7..cb166092780 100644 --- a/lib/spack/spack/test/cmd_extensions.py +++ b/lib/spack/spack/test/cmd_extensions.py @@ -5,24 +5,71 @@ import pytest +import contextlib +import os import sys +import spack.cmd import spack.config +import spack.extensions import spack.main -@pytest.fixture() -def extension_root(tmpdir): - root = tmpdir.mkdir('spack-testcommand') - root.ensure('testcommand', 'cmd', dir=True) - return root +class Extension: + """Helper class to simplify the creation of simple command extension + directory structures with a conventional format for testing. + """ + def __init__(self, name, root): + """Create a command extension. + + Args: + name (str): The name of the command extension. + root (path object): The temporary root for the command extension + (e.g. from tmpdir.mkdir()). + """ + self.name = name + self.pname = spack.cmd.python_name(name) + self.root = root + self.main = self.root.ensure(self.pname, dir=True) + self.cmd = self.main.ensure('cmd', dir=True) + + def add_command(self, command_name, contents): + """Add a command to this command extension. + + Args: + command_name (str): The name of the command. + contents (str): the desired contents of the new command module + file.""" + spack.cmd.require_cmd_name(command_name) + python_name = spack.cmd.python_name(command_name) + cmd = self.cmd.ensure(python_name + '.py') + cmd.write(contents) -@pytest.fixture() -def hello_world_cmd(extension_root): - """Simple extension command with code contained in a single file.""" - hello = extension_root.ensure('testcommand', 'cmd', 'hello.py') - hello.write(""" +@pytest.fixture(scope='function') +def extension_creator(tmpdir, config): + """Create a basic extension command directory structure""" + @contextlib.contextmanager + def _ce(extension_name='testcommand'): + root = tmpdir.mkdir('spack-' + extension_name) + extension = Extension(extension_name, root) + with spack.config.override('config:extensions', + [str(extension.root)]): + yield extension + list_of_modules = list(sys.modules.keys()) + try: + yield _ce + finally: + to_be_deleted = [x for x in sys.modules if x not in list_of_modules] + for module_name in to_be_deleted: + del sys.modules[module_name] + + +@pytest.fixture(scope='function') +def hello_world_extension(extension_creator): + """Create an extension with a hello-world command.""" + with extension_creator() as extension: + extension.add_command('hello-world', """ description = "hello world extension command" section = "test command" level = "long" @@ -31,27 +78,33 @@ def setup_parser(subparser): pass -def hello(parser, args): +def hello_world(parser, args): print('Hello world!') """) - list_of_modules = list(sys.modules.keys()) - with spack.config.override('config:extensions', [str(extension_root)]): - yield spack.main.SpackCommand('hello') - - to_be_deleted = [x for x in sys.modules if x not in list_of_modules] - for module_name in to_be_deleted: - del sys.modules[module_name] + yield extension -@pytest.fixture() -def hello_world_with_module_in_root(extension_root): - """Extension command with additional code in the root folder.""" - extension_root.ensure('testcommand', '__init__.py') - command_root = extension_root.join('testcommand', 'cmd') - hello = command_root.ensure('hello.py') - hello.write(""" +@pytest.fixture(scope='function') +def hello_world_cmd(hello_world_extension): + """Create and return an invokable "hello-world" extension command.""" + yield spack.main.SpackCommand('hello-world') + + +@pytest.fixture(scope='function') +def hello_world_with_module_in_root(extension_creator): + """Create a "hello-world" extension command with additional code in the + root folder. + """ + @contextlib.contextmanager + def _hwwmir(extension_name=None): + with extension_creator(extension_name) \ + if extension_name else \ + extension_creator() as extension: + # Note that the namespace of the extension is derived from the + # fixture. + extension.add_command('hello', """ # Test an absolute import -from spack.extensions.testcommand.implementation import hello_world +from spack.extensions.{ext_pname}.implementation import hello_world # Test a relative import from ..implementation import hello_folks @@ -79,33 +132,143 @@ def hello(parser, args): hello_folks() elif args.subcommand == 'global': print(global_message) -""") - implementation = extension_root.ensure('testcommand', 'implementation.py') - implementation.write(""" +""".format(ext_pname=extension.pname)) + + extension.main.ensure('__init__.py') + implementation \ + = extension.main.ensure('implementation.py') + implementation.write(""" def hello_world(): print('Hello world!') def hello_folks(): print('Hello folks!') """) - list_of_modules = list(sys.modules.keys()) - with spack.config.override('config:extensions', [str(extension_root)]): - yield spack.main.SpackCommand('hello') + yield spack.main.SpackCommand('hello') - to_be_deleted = [x for x in sys.modules if x not in list_of_modules] - for module_name in to_be_deleted: - del sys.modules[module_name] + yield _hwwmir def test_simple_command_extension(hello_world_cmd): + """Basic test of a functioning command.""" output = hello_world_cmd() assert 'Hello world!' in output -def test_command_with_import(hello_world_with_module_in_root): - output = hello_world_with_module_in_root('world') - assert 'Hello world!' in output - output = hello_world_with_module_in_root('folks') - assert 'Hello folks!' in output - output = hello_world_with_module_in_root('global') - assert 'bar' in output +def test_multi_extension_search(hello_world_extension, extension_creator): + """Ensure we can find an extension command even if it's not in the first + place we look. + """ + + with extension_creator('testcommand2'): + assert ('Hello world') in spack.main.SpackCommand('hello-world')() + + +def test_duplicate_module_load(hello_world_cmd, capsys): + """Ensure duplicate module load attempts are successful. + + The command module will already have been loaded once by the + hello_world_cmd fixture. + """ + parser = spack.main.make_argument_parser() + args = [] + hw_cmd = spack.cmd.get_command(hello_world_cmd.command_name) + hw_cmd(parser, args) + captured = capsys.readouterr() + assert captured == ('Hello world!\n', '') + + +@pytest.mark.parametrize('extension_name', + [None, 'hyphenated-extension'], + ids=['simple', 'hyphenated_extension_name']) +def test_command_with_import(extension_name, hello_world_with_module_in_root): + """Ensure we can write a functioning command with multiple imported + subcommands, including where the extension name contains a hyphen. + """ + with hello_world_with_module_in_root(extension_name) as hello_world: + output = hello_world('world') + assert 'Hello world!' in output + output = hello_world('folks') + assert 'Hello folks!' in output + output = hello_world('global') + assert 'bar' in output + + +def test_missing_command(): + """Ensure that we raise the expected exception if the desired command is + not present. + """ + with pytest.raises(spack.extensions.CommandNotFoundError): + spack.cmd.get_module("no-such-command") + + +@pytest.mark.\ + parametrize('extension_path,expected_exception', + [('/my/bad/extension', + spack.extensions.ExtensionNamingError), + ('', spack.extensions.ExtensionNamingError), + ('/my/bad/spack--extra-hyphen', + spack.extensions.ExtensionNamingError), + ('/my/good/spack-extension', + spack.extensions.CommandNotFoundError), + ('/my/still/good/spack-extension/', + spack.extensions.CommandNotFoundError), + ('/my/spack-hyphenated-extension', + spack.extensions.CommandNotFoundError)], + ids=['no_stem', 'vacuous', 'leading_hyphen', + 'basic_good', 'trailing_slash', 'hyphenated']) +def test_extension_naming(extension_path, expected_exception, config): + """Ensure that we are correctly validating configured extension paths + for conformity with the rules: the basename should match + ``spack-``; may have embedded hyphens but not begin with one. + """ + with spack.config.override('config:extensions', [extension_path]): + with pytest.raises(expected_exception): + spack.cmd.get_module("no-such-command") + + +def test_missing_command_function(extension_creator, capsys): + """Ensure we die as expected if a command module does not have the + expected command function defined. + """ + with extension_creator() as extension: + extension.\ + add_command('bad-cmd', + """\ndescription = "Empty command implementation"\n""") + with pytest.raises(SystemExit): + spack.cmd.get_module('bad-cmd') + capture = capsys.readouterr() + assert "must define function 'bad_cmd'." in capture[1] + + +def test_get_command_paths(config): + """Exercise the construction of extension command search paths.""" + extensions = ('extension-1', 'extension-2') + ext_paths = [] + expected_cmd_paths = [] + for ext in extensions: + ext_path = os.path.join('my', 'path', 'to', 'spack-' + ext) + ext_paths.append(ext_path) + expected_cmd_paths.append(os.path.join(ext_path, + spack.cmd.python_name(ext), + 'cmd')) + + with spack.config.override('config:extensions', ext_paths): + assert spack.extensions.get_command_paths() == expected_cmd_paths + + +@pytest.mark.parametrize('command_name,contents,exception', + [('bad-cmd', 'from oopsie.daisy import bad\n', + ImportError), + ('bad-cmd', """var = bad_function_call('blech')\n""", + NameError), + ('bad-cmd', ')\n', SyntaxError)], + ids=['ImportError', 'NameError', 'SyntaxError']) +def test_failing_command(command_name, contents, exception, extension_creator): + """Ensure that the configured command fails to import with the specified + error. + """ + with extension_creator() as extension: + extension.add_command(command_name, contents) + with pytest.raises(exception): + spack.extensions.get_module(command_name) diff --git a/lib/spack/spack/test/compilers/__init__.py b/lib/spack/spack/test/compilers/__init__.py new file mode 100644 index 00000000000..9f87532b851 --- /dev/null +++ b/lib/spack/spack/test/compilers/__init__.py @@ -0,0 +1,4 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) diff --git a/lib/spack/spack/test/compilers.py b/lib/spack/spack/test/compilers/basics.py similarity index 70% rename from lib/spack/spack/test/compilers.py rename to lib/spack/spack/test/compilers/basics.py index 24115ba562e..faf18e38715 100644 --- a/lib/spack/spack/test/compilers.py +++ b/lib/spack/spack/test/compilers/basics.py @@ -2,29 +2,21 @@ # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) - +"""Test basic behavior of compilers in Spack""" import pytest import sys +import os from copy import copy from six import iteritems +import llnl.util.filesystem as fs + import spack.spec import spack.compiler import spack.compilers as compilers -import spack.compilers.arm -import spack.compilers.cce -import spack.compilers.clang -import spack.compilers.fj -import spack.compilers.gcc -import spack.compilers.intel -import spack.compilers.nag -import spack.compilers.pgi -import spack.compilers.xl -import spack.compilers.xl_r - from spack.compiler import Compiler @@ -142,8 +134,8 @@ def test_compiler_flags_from_config_are_grouped(): 'paths': { 'cc': 'cc-path', 'cxx': 'cxx-path', - 'fc': None, - 'f77': None + 'fc': 'fc-path', + 'f77': 'f77-path' }, 'flags': {}, 'modules': None @@ -154,13 +146,16 @@ def test_compiler_flags_from_config_are_grouped(): class MockCompiler(Compiler): def __init__(self): super(MockCompiler, self).__init__( - "badcompiler@1.0.0", - default_compiler_entry['operating_system'], - None, - [default_compiler_entry['paths']['cc'], - default_compiler_entry['paths']['cxx'], - default_compiler_entry['paths']['fc'], - default_compiler_entry['paths']['f77']]) + cspec="badcompiler@1.0.0", + operating_system=default_compiler_entry['operating_system'], + target=None, + paths=[default_compiler_entry['paths']['cc'], + default_compiler_entry['paths']['cxx'], + default_compiler_entry['paths']['fc'], + default_compiler_entry['paths']['f77']], + environment={}) + + _get_compiler_link_paths = Compiler._get_compiler_link_paths @property def name(self): @@ -170,6 +165,12 @@ def name(self): def version(self): return "1.0.0" + _verbose_flag = "--verbose" + + @property + def verbose_flag(self): + return self._verbose_flag + required_libs = ['libgfortran'] @@ -189,6 +190,99 @@ def try_all_dirs(*args): assert set(retrieved_rpaths) == expected_rpaths +no_flag_dirs = ['/path/to/first/lib', '/path/to/second/lib64'] +no_flag_output = 'ld -L%s -L%s' % tuple(no_flag_dirs) + +flag_dirs = ['/path/to/first/with/flag/lib', '/path/to/second/lib64'] +flag_output = 'ld -L%s -L%s' % tuple(flag_dirs) + + +def call_compiler(exe, *args, **kwargs): + # This method can replace Executable.__call__ to emulate a compiler that + # changes libraries depending on a flag. + if '--correct-flag' in exe.exe: + return flag_output + return no_flag_output + + +@pytest.mark.parametrize('exe,flagname', [ + ('cxx', ''), + ('cxx', 'cxxflags'), + ('cxx', 'cppflags'), + ('cxx', 'ldflags'), + ('cc', ''), + ('cc', 'cflags'), + ('cc', 'cppflags'), + ('fc', ''), + ('fc', 'fflags'), + ('f77', 'fflags'), + ('f77', 'cppflags'), +]) +def test_get_compiler_link_paths(monkeypatch, exe, flagname): + # create fake compiler that emits mock verbose output + compiler = MockCompiler() + monkeypatch.setattr( + spack.util.executable.Executable, '__call__', call_compiler) + + # Grab executable path to test + paths = [getattr(compiler, exe)] + + # Test without flags + dirs = compiler._get_compiler_link_paths(paths) + assert dirs == no_flag_dirs + + if flagname: + # set flags and test + setattr(compiler, 'flags', {flagname: ['--correct-flag']}) + dirs = compiler._get_compiler_link_paths(paths) + assert dirs == flag_dirs + + +def test_get_compiler_link_paths_no_path(): + compiler = MockCompiler() + compiler.cc = None + compiler.cxx = None + compiler.f77 = None + compiler.fc = None + + dirs = compiler._get_compiler_link_paths([compiler.cxx]) + assert dirs == [] + + +def test_get_compiler_link_paths_no_verbose_flag(): + compiler = MockCompiler() + compiler._verbose_flag = None + + dirs = compiler._get_compiler_link_paths([compiler.cxx]) + assert dirs == [] + + +def test_get_compiler_link_paths_load_env(working_env, monkeypatch, tmpdir): + gcc = str(tmpdir.join('gcc')) + with open(gcc, 'w') as f: + f.write("""#!/bin/bash +if [[ $ENV_SET == "1" && $MODULE_LOADED == "1" ]]; then + echo '""" + no_flag_output + """' +fi +""") + fs.set_executable(gcc) + + # Set module load to turn compiler on + def module(*args): + if args[0] == 'show': + return '' + elif args[0] == 'load': + os.environ['MODULE_LOADED'] = "1" + monkeypatch.setattr(spack.util.module_cmd, 'module', module) + + compiler = MockCompiler() + compiler.environment = {'set': {'ENV_SET': '1'}} + compiler.modules = ['turn_on'] + + dirs = compiler._get_compiler_link_paths([gcc]) + assert dirs == no_flag_dirs + + # Get the desired flag from the specified compiler spec. def flag_value(flag, spec): compiler = None @@ -238,6 +332,8 @@ def test_default_flags(): supported_flag_test("cxx_pic_flag", "-fPIC") supported_flag_test("f77_pic_flag", "-fPIC") supported_flag_test("fc_pic_flag", "-fPIC") + supported_flag_test("debug_flags", ["-g"]) + supported_flag_test("opt_flags", ["-O", "-O0", "-O1", "-O2", "-O3"]) # Verify behavior of particular compiler definitions. @@ -252,6 +348,9 @@ def test_arm_flags(): supported_flag_test("cxx_pic_flag", "-fPIC", "arm@1.0") supported_flag_test("f77_pic_flag", "-fPIC", "arm@1.0") supported_flag_test("fc_pic_flag", "-fPIC", "arm@1.0") + supported_flag_test("opt_flags", + ['-O', '-O0', '-O1', '-O2', '-O3', '-Ofast'], + 'arm@1.0') def test_cce_flags(): @@ -259,13 +358,15 @@ def test_cce_flags(): supported_flag_test("cxx11_flag", "-h std=c++11", "cce@1.0") unsupported_flag_test("c99_flag", "cce@8.0") supported_flag_test("c99_flag", "-h c99,noconform,gnu", "cce@8.1") - supported_flag_test("c99_flag", "-h stc=c99,noconform,gnu", "cce@8.4") + supported_flag_test("c99_flag", "-h std=c99,noconform,gnu", "cce@8.4") unsupported_flag_test("c11_flag", "cce@8.4") supported_flag_test("c11_flag", "-h std=c11,noconform,gnu", "cce@8.5") supported_flag_test("cc_pic_flag", "-h PIC", "cce@1.0") supported_flag_test("cxx_pic_flag", "-h PIC", "cce@1.0") supported_flag_test("f77_pic_flag", "-h PIC", "cce@1.0") supported_flag_test("fc_pic_flag", "-h PIC", "cce@1.0") + supported_flag_test("debug_flags", ['-g', '-G0', '-G1', '-G2', '-Gfast'], + 'cce@1.0') def test_clang_flags(): @@ -288,6 +389,7 @@ def test_clang_flags(): supported_flag_test("fc_pic_flag", "-fPIC", "clang@2.0.0-apple") # non-Apple Clang. + supported_flag_test("version_argument", "--version", "clang@foo.bar") supported_flag_test("openmp_flag", "-fopenmp", "clang@3.3") unsupported_flag_test("cxx11_flag", "clang@3.2") supported_flag_test("cxx11_flag", "-std=c++11", "clang@3.3") @@ -304,6 +406,15 @@ def test_clang_flags(): supported_flag_test("cxx_pic_flag", "-fPIC", "clang@3.3") supported_flag_test("f77_pic_flag", "-fPIC", "clang@3.3") supported_flag_test("fc_pic_flag", "-fPIC", "clang@3.3") + supported_flag_test("debug_flags", + ['-gcodeview', '-gdwarf-2', '-gdwarf-3', '-gdwarf-4', + '-gdwarf-5', '-gline-tables-only', '-gmodules', '-gz', + '-g'], + 'clang@3.3') + supported_flag_test("opt_flags", + ['-O0', '-O1', '-O2', '-O3', '-Ofast', '-Os', '-Oz', + '-Og', '-O', '-O4'], + 'clang@3.3') def test_fj_flags(): @@ -317,6 +428,8 @@ def test_fj_flags(): supported_flag_test("cxx_pic_flag", "-KPIC", "fj@4.0.0") supported_flag_test("f77_pic_flag", "-KPIC", "fj@4.0.0") supported_flag_test("fc_pic_flag", "-KPIC", "fj@4.0.0") + supported_flag_test("opt_flags", ['-O', '-O0', '-O1', '-O2', '-O3', '-O4'], + 'fj@4.0.0') def test_gcc_flags(): @@ -342,6 +455,14 @@ def test_gcc_flags(): supported_flag_test("f77_pic_flag", "-fPIC", "gcc@4.0") supported_flag_test("fc_pic_flag", "-fPIC", "gcc@4.0") supported_flag_test("stdcxx_libs", ("-lstdc++",), "gcc@4.1") + supported_flag_test("debug_flags", + ['-g', '-gstabs+', '-gstabs', '-gxcoff+', '-gxcoff', + '-gvms'], + 'gcc@4.0') + supported_flag_test("opt_flags", + ['-O', '-O0', '-O1', '-O2', '-O3', '-Os', '-Ofast', + '-Og'], + 'gcc@4.0') def test_intel_flags(): @@ -362,6 +483,12 @@ def test_intel_flags(): supported_flag_test("f77_pic_flag", "-fPIC", "intel@1.0") supported_flag_test("fc_pic_flag", "-fPIC", "intel@1.0") supported_flag_test("stdcxx_libs", ("-cxxlib",), "intel@1.0") + supported_flag_test("debug_flags", + ['-debug', '-g', '-g0', '-g1', '-g2', '-g3'], + 'intel@1.0') + supported_flag_test("opt_flags", + ['-O', '-O0', '-O1', '-O2', '-O3', '-Ofast', '-Os'], + 'intel@1.0') def test_nag_flags(): @@ -376,6 +503,9 @@ def test_nag_flags(): supported_flag_test("f77_rpath_arg", "-Wl,-Wl,,-rpath,,", "nag@1.0") supported_flag_test("fc_rpath_arg", "-Wl,-Wl,,-rpath,,", "nag@1.0") supported_flag_test("linker_arg", "-Wl,-Wl,,", "nag@1.0") + supported_flag_test("debug_flags", ['-g', '-gline', '-g90'], 'nag@1.0') + supported_flag_test("opt_flags", ['-O', '-O0', '-O1', '-O2', '-O3', '-O4'], + 'nag@1.0') def test_pgi_flags(): @@ -389,6 +519,9 @@ def test_pgi_flags(): supported_flag_test("cxx_pic_flag", "-fpic", "pgi@1.0") supported_flag_test("f77_pic_flag", "-fpic", "pgi@1.0") supported_flag_test("fc_pic_flag", "-fpic", "pgi@1.0") + supported_flag_test("debug_flags", ['-g', '-gopt'], 'pgi@1.0') + supported_flag_test("opt_flags", ['-O', '-O0', '-O1', '-O2', '-O3', '-O4'], + 'pgi@1.0') def test_xl_flags(): @@ -406,6 +539,13 @@ def test_xl_flags(): supported_flag_test("f77_pic_flag", "-qpic", "xl@1.0") supported_flag_test("fc_pic_flag", "-qpic", "xl@1.0") supported_flag_test("fflags", "-qzerosize", "xl@1.0") + supported_flag_test("debug_flags", + ['-g', '-g0', '-g1', '-g2', '-g8', '-g9'], + 'xl@1.0') + supported_flag_test("opt_flags", + ['-O', '-O0', '-O1', '-O2', '-O3', '-O4', '-O5', + '-Ofast'], + 'xl@1.0') def test_xl_r_flags(): @@ -423,165 +563,13 @@ def test_xl_r_flags(): supported_flag_test("f77_pic_flag", "-qpic", "xl_r@1.0") supported_flag_test("fc_pic_flag", "-qpic", "xl_r@1.0") supported_flag_test("fflags", "-qzerosize", "xl_r@1.0") - - -@pytest.mark.parametrize('version_str,expected_version', [ - ('Arm C/C++/Fortran Compiler version 19.0 (build number 73) (based on LLVM 7.0.2)\n' # NOQA - 'Target: aarch64--linux-gnu\n' - 'Thread model: posix\n' - 'InstalledDir:\n' - '/opt/arm/arm-hpc-compiler-19.0_Generic-AArch64_RHEL-7_aarch64-linux/bin\n', # NOQA - '19.0.0.73'), - ('Arm C/C++/Fortran Compiler version 19.3.1 (build number 75) (based on LLVM 7.0.2)\n' # NOQA - 'Target: aarch64--linux-gnu\n' - 'Thread model: posix\n' - 'InstalledDir:\n' - '/opt/arm/arm-hpc-compiler-19.0_Generic-AArch64_RHEL-7_aarch64-linux/bin\n', # NOQA - '19.3.1.75') -]) -def test_arm_version_detection(version_str, expected_version): - version = spack.compilers.arm.Arm.extract_version_from_output(version_str) - assert version == expected_version - - -@pytest.mark.parametrize('version_str,expected_version', [ - ('Cray C : Version 8.4.6 Mon Apr 15, 2019 12:13:39\n', '8.4.6'), - ('Cray C++ : Version 8.4.6 Mon Apr 15, 2019 12:13:45\n', '8.4.6'), - ('Cray Fortran : Version 8.4.6 Mon Apr 15, 2019 12:13:55\n', '8.4.6') -]) -def test_cce_version_detection(version_str, expected_version): - version = spack.compilers.cce.Cce.extract_version_from_output(version_str) - assert version == expected_version - - -@pytest.mark.regression('10191') -@pytest.mark.parametrize('version_str,expected_version', [ - # macOS clang - ('Apple clang version 11.0.0 (clang-1100.0.33.8)\n' - 'Target: x86_64-apple-darwin18.7.0\n' - 'Thread model: posix\n' - 'InstalledDir: /Applications/Xcode.app/Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/bin\n', # noqa - '11.0.0-apple'), - ('Apple LLVM version 7.0.2 (clang-700.1.81)\n' - 'Target: x86_64-apple-darwin15.2.0\n' - 'Thread model: posix\n', '7.0.2-apple'), - # Other platforms - ('clang version 6.0.1-svn334776-1~exp1~20181018152737.116 (branches/release_60)\n' # noqa - 'Target: x86_64-pc-linux-gnu\n' - 'Thread model: posix\n' - 'InstalledDir: /usr/bin\n', '6.0.1'), - ('clang version 3.1 (trunk 149096)\n' - 'Target: x86_64-unknown-linux-gnu\n' - 'Thread model: posix\n', '3.1'), - ('clang version 8.0.0-3~ubuntu18.04.1 (tags/RELEASE_800/final)\n' - 'Target: x86_64-pc-linux-gnu\n' - 'Thread model: posix\n' - 'InstalledDir: /usr/bin\n', '8.0.0'), - ('clang version 9.0.1-+201911131414230800840845a1eea-1~exp1~20191113231141.78\n' # noqa - 'Target: x86_64-pc-linux-gnu\n' - 'Thread model: posix\n' - 'InstalledDir: /usr/bin\n', '9.0.1'), - ('clang version 8.0.0-3 (tags/RELEASE_800/final)\n' - 'Target: aarch64-unknown-linux-gnu\n' - 'Thread model: posix\n' - 'InstalledDir: /usr/bin\n', '8.0.0') -]) -def test_clang_version_detection(version_str, expected_version): - version = compilers.clang.Clang.extract_version_from_output(version_str) - assert version == expected_version - - -@pytest.mark.parametrize('version_str,expected_version', [ - # C compiler - ('fcc (FCC) 4.0.0 20190314\n' - 'simulating gcc version 6.1\n' - 'Copyright FUJITSU LIMITED 2019', - '4.0.0'), - # C++ compiler - ('FCC (FCC) 4.0.0 20190314\n' - 'simulating gcc version 6.1\n' - 'Copyright FUJITSU LIMITED 2019', - '4.0.0'), - # Fortran compiler - ('frt (FRT) 4.0.0 20190314\n' - 'Copyright FUJITSU LIMITED 2019', - '4.0.0') -]) -def test_fj_version_detection(version_str, expected_version): - version = spack.compilers.fj.Fj.extract_version_from_output(version_str) - assert version == expected_version - - -@pytest.mark.parametrize('version_str,expected_version', [ - # Output of -dumpversion changed to return only major from GCC 7 - ('4.4.7\n', '4.4.7'), - ('7\n', '7') -]) -def test_gcc_version_detection(version_str, expected_version): - version = spack.compilers.gcc.Gcc.extract_version_from_output(version_str) - assert version == expected_version - - -@pytest.mark.parametrize('version_str,expected_version', [ - ('icpc (ICC) 12.1.5 20120612\n' - 'Copyright (C) 1985-2012 Intel Corporation. All rights reserved.\n', - '12.1.5'), - ('ifort (IFORT) 12.1.5 20120612\n' - 'Copyright (C) 1985-2012 Intel Corporation. All rights reserved.\n', - '12.1.5') -]) -def test_intel_version_detection(version_str, expected_version): - version = compilers.intel.Intel.extract_version_from_output(version_str) - assert version == expected_version - - -@pytest.mark.parametrize('version_str,expected_version', [ - ('NAG Fortran Compiler Release 6.0(Hibiya) Build 1037\n' - 'Product NPL6A60NA for x86-64 Linux\n', '6.0') -]) -def test_nag_version_detection(version_str, expected_version): - version = spack.compilers.nag.Nag.extract_version_from_output(version_str) - assert version == expected_version - - -@pytest.mark.parametrize('version_str,expected_version', [ - # Output on x86-64 - ('pgcc 15.10-0 64-bit target on x86-64 Linux -tp sandybridge\n' - 'The Portland Group - PGI Compilers and Tools\n' - 'Copyright (c) 2015, NVIDIA CORPORATION. All rights reserved.\n', - '15.10'), - # Output on PowerPC - ('pgcc 17.4-0 linuxpower target on Linuxpower\n' - 'PGI Compilers and Tools\n' - 'Copyright (c) 2017, NVIDIA CORPORATION. All rights reserved.\n', - '17.4'), - # Output when LLVM-enabled - ('pgcc-llvm 18.4-0 LLVM 64-bit target on x86-64 Linux -tp haswell\n' - 'PGI Compilers and Tools\n' - 'Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved.\n', - '18.4') -]) -def test_pgi_version_detection(version_str, expected_version): - version = spack.compilers.pgi.Pgi.extract_version_from_output(version_str) - assert version == expected_version - - -@pytest.mark.parametrize('version_str,expected_version', [ - ('IBM XL C/C++ for Linux, V11.1 (5724-X14)\n' - 'Version: 11.01.0000.0000\n', '11.1'), - ('IBM XL Fortran for Linux, V13.1 (5724-X16)\n' - 'Version: 13.01.0000.0000\n', '13.1'), - ('IBM XL C/C++ for AIX, V11.1 (5724-X13)\n' - 'Version: 11.01.0000.0009\n', '11.1'), - ('IBM XL C/C++ Advanced Edition for Blue Gene/P, V9.0\n' - 'Version: 09.00.0000.0017\n', '9.0') -]) -def test_xl_version_detection(version_str, expected_version): - version = spack.compilers.xl.Xl.extract_version_from_output(version_str) - assert version == expected_version - - version = spack.compilers.xl_r.XlR.extract_version_from_output(version_str) - assert version == expected_version + supported_flag_test("debug_flags", + ['-g', '-g0', '-g1', '-g2', '-g8', '-g9'], + 'xl@1.0') + supported_flag_test("opt_flags", + ['-O', '-O0', '-O1', '-O2', '-O3', '-O4', '-O5', + '-Ofast'], + 'xl@1.0') @pytest.mark.parametrize('compiler_spec,expected_result', [ @@ -615,3 +603,53 @@ def test_raising_if_compiler_target_is_over_specific(config): cfg = spack.compilers.get_compiler_config() with pytest.raises(ValueError): spack.compilers.get_compilers(cfg, 'gcc@9.0.1', arch_spec) + + +def test_compiler_get_real_version(working_env, monkeypatch, tmpdir): + # Test variables + test_version = '2.2.2' + + # Create compiler + gcc = str(tmpdir.join('gcc')) + with open(gcc, 'w') as f: + f.write("""#!/bin/bash +if [[ $CMP_ON == "1" ]]; then + echo "$CMP_VER" +fi +""") + fs.set_executable(gcc) + + # Add compiler to config + compiler_info = { + 'spec': 'gcc@foo', + 'paths': { + 'cc': gcc, + 'cxx': None, + 'f77': None, + 'fc': None, + }, + 'flags': {}, + 'operating_system': 'fake', + 'target': 'fake', + 'modules': ['turn_on'], + 'environment': { + 'set': {'CMP_VER': test_version}, + }, + 'extra_rpaths': [], + } + compiler_dict = {'compiler': compiler_info} + + # Set module load to turn compiler on + def module(*args): + if args[0] == 'show': + return '' + elif args[0] == 'load': + os.environ['CMP_ON'] = "1" + monkeypatch.setattr(spack.util.module_cmd, 'module', module) + + # Run and confirm output + compilers = spack.compilers.get_compilers([compiler_dict]) + assert len(compilers) == 1 + compiler = compilers[0] + version = compiler.get_real_version() + assert version == test_version diff --git a/lib/spack/spack/test/compilers/detection.py b/lib/spack/spack/test/compilers/detection.py new file mode 100644 index 00000000000..90311ad2d32 --- /dev/null +++ b/lib/spack/spack/test/compilers/detection.py @@ -0,0 +1,180 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +"""Test detection of compiler version""" +import pytest + +import spack.compilers.arm +import spack.compilers.cce +import spack.compilers.clang +import spack.compilers.fj +import spack.compilers.gcc +import spack.compilers.intel +import spack.compilers.nag +import spack.compilers.pgi +import spack.compilers.xl +import spack.compilers.xl_r + + +@pytest.mark.parametrize('version_str,expected_version', [ + ('Arm C/C++/Fortran Compiler version 19.0 (build number 73) (based on LLVM 7.0.2)\n' # NOQA + 'Target: aarch64--linux-gnu\n' + 'Thread model: posix\n' + 'InstalledDir:\n' + '/opt/arm/arm-hpc-compiler-19.0_Generic-AArch64_RHEL-7_aarch64-linux/bin\n', # NOQA + '19.0.0.73'), + ('Arm C/C++/Fortran Compiler version 19.3.1 (build number 75) (based on LLVM 7.0.2)\n' # NOQA + 'Target: aarch64--linux-gnu\n' + 'Thread model: posix\n' + 'InstalledDir:\n' + '/opt/arm/arm-hpc-compiler-19.0_Generic-AArch64_RHEL-7_aarch64-linux/bin\n', # NOQA + '19.3.1.75') +]) +def test_arm_version_detection(version_str, expected_version): + version = spack.compilers.arm.Arm.extract_version_from_output(version_str) + assert version == expected_version + + +@pytest.mark.parametrize('version_str,expected_version', [ + ('Cray C : Version 8.4.6 Mon Apr 15, 2019 12:13:39\n', '8.4.6'), + ('Cray C++ : Version 8.4.6 Mon Apr 15, 2019 12:13:45\n', '8.4.6'), + ('Cray Fortran : Version 8.4.6 Mon Apr 15, 2019 12:13:55\n', '8.4.6') +]) +def test_cce_version_detection(version_str, expected_version): + version = spack.compilers.cce.Cce.extract_version_from_output(version_str) + assert version == expected_version + + +@pytest.mark.regression('10191') +@pytest.mark.parametrize('version_str,expected_version', [ + # macOS clang + ('Apple clang version 11.0.0 (clang-1100.0.33.8)\n' + 'Target: x86_64-apple-darwin18.7.0\n' + 'Thread model: posix\n' + 'InstalledDir: /Applications/Xcode.app/Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/bin\n', # noqa + '11.0.0-apple'), + ('Apple LLVM version 7.0.2 (clang-700.1.81)\n' + 'Target: x86_64-apple-darwin15.2.0\n' + 'Thread model: posix\n', '7.0.2-apple'), + # Other platforms + ('clang version 6.0.1-svn334776-1~exp1~20181018152737.116 (branches/release_60)\n' # noqa + 'Target: x86_64-pc-linux-gnu\n' + 'Thread model: posix\n' + 'InstalledDir: /usr/bin\n', '6.0.1'), + ('clang version 3.1 (trunk 149096)\n' + 'Target: x86_64-unknown-linux-gnu\n' + 'Thread model: posix\n', '3.1'), + ('clang version 8.0.0-3~ubuntu18.04.1 (tags/RELEASE_800/final)\n' + 'Target: x86_64-pc-linux-gnu\n' + 'Thread model: posix\n' + 'InstalledDir: /usr/bin\n', '8.0.0'), + ('clang version 9.0.1-+201911131414230800840845a1eea-1~exp1~20191113231141.78\n' # noqa + 'Target: x86_64-pc-linux-gnu\n' + 'Thread model: posix\n' + 'InstalledDir: /usr/bin\n', '9.0.1'), + ('clang version 8.0.0-3 (tags/RELEASE_800/final)\n' + 'Target: aarch64-unknown-linux-gnu\n' + 'Thread model: posix\n' + 'InstalledDir: /usr/bin\n', '8.0.0') +]) +def test_clang_version_detection(version_str, expected_version): + version = spack.compilers.clang.Clang.extract_version_from_output( + version_str + ) + assert version == expected_version + + +@pytest.mark.parametrize('version_str,expected_version', [ + # C compiler + ('fcc (FCC) 4.0.0 20190314\n' + 'simulating gcc version 6.1\n' + 'Copyright FUJITSU LIMITED 2019', + '4.0.0'), + # C++ compiler + ('FCC (FCC) 4.0.0 20190314\n' + 'simulating gcc version 6.1\n' + 'Copyright FUJITSU LIMITED 2019', + '4.0.0'), + # Fortran compiler + ('frt (FRT) 4.0.0 20190314\n' + 'Copyright FUJITSU LIMITED 2019', + '4.0.0') +]) +def test_fj_version_detection(version_str, expected_version): + version = spack.compilers.fj.Fj.extract_version_from_output(version_str) + assert version == expected_version + + +@pytest.mark.parametrize('version_str,expected_version', [ + # Output of -dumpversion changed to return only major from GCC 7 + ('4.4.7\n', '4.4.7'), + ('7\n', '7') +]) +def test_gcc_version_detection(version_str, expected_version): + version = spack.compilers.gcc.Gcc.extract_version_from_output(version_str) + assert version == expected_version + + +@pytest.mark.parametrize('version_str,expected_version', [ + ('icpc (ICC) 12.1.5 20120612\n' + 'Copyright (C) 1985-2012 Intel Corporation. All rights reserved.\n', + '12.1.5'), + ('ifort (IFORT) 12.1.5 20120612\n' + 'Copyright (C) 1985-2012 Intel Corporation. All rights reserved.\n', + '12.1.5') +]) +def test_intel_version_detection(version_str, expected_version): + version = spack.compilers.intel.Intel.extract_version_from_output( + version_str + ) + assert version == expected_version + + +@pytest.mark.parametrize('version_str,expected_version', [ + ('NAG Fortran Compiler Release 6.0(Hibiya) Build 1037\n' + 'Product NPL6A60NA for x86-64 Linux\n', '6.0') +]) +def test_nag_version_detection(version_str, expected_version): + version = spack.compilers.nag.Nag.extract_version_from_output(version_str) + assert version == expected_version + + +@pytest.mark.parametrize('version_str,expected_version', [ + # Output on x86-64 + ('pgcc 15.10-0 64-bit target on x86-64 Linux -tp sandybridge\n' + 'The Portland Group - PGI Compilers and Tools\n' + 'Copyright (c) 2015, NVIDIA CORPORATION. All rights reserved.\n', + '15.10'), + # Output on PowerPC + ('pgcc 17.4-0 linuxpower target on Linuxpower\n' + 'PGI Compilers and Tools\n' + 'Copyright (c) 2017, NVIDIA CORPORATION. All rights reserved.\n', + '17.4'), + # Output when LLVM-enabled + ('pgcc-llvm 18.4-0 LLVM 64-bit target on x86-64 Linux -tp haswell\n' + 'PGI Compilers and Tools\n' + 'Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved.\n', + '18.4') +]) +def test_pgi_version_detection(version_str, expected_version): + version = spack.compilers.pgi.Pgi.extract_version_from_output(version_str) + assert version == expected_version + + +@pytest.mark.parametrize('version_str,expected_version', [ + ('IBM XL C/C++ for Linux, V11.1 (5724-X14)\n' + 'Version: 11.01.0000.0000\n', '11.1'), + ('IBM XL Fortran for Linux, V13.1 (5724-X16)\n' + 'Version: 13.01.0000.0000\n', '13.1'), + ('IBM XL C/C++ for AIX, V11.1 (5724-X13)\n' + 'Version: 11.01.0000.0009\n', '11.1'), + ('IBM XL C/C++ Advanced Edition for Blue Gene/P, V9.0\n' + 'Version: 09.00.0000.0017\n', '9.0') +]) +def test_xl_version_detection(version_str, expected_version): + version = spack.compilers.xl.Xl.extract_version_from_output(version_str) + assert version == expected_version + + version = spack.compilers.xl_r.XlR.extract_version_from_output(version_str) + assert version == expected_version diff --git a/lib/spack/spack/test/concretize.py b/lib/spack/spack/test/concretize.py index 9024c99cadf..fd0e1851685 100644 --- a/lib/spack/spack/test/concretize.py +++ b/lib/spack/spack/test/concretize.py @@ -12,10 +12,9 @@ from spack.concretize import find_spec, NoValidVersionError from spack.error import SpecError -from spack.package_prefs import PackagePrefs from spack.spec import Spec, CompilerSpec, ConflictsInSpecError from spack.version import ver -from spack.test.conftest import MockPackage, MockPackageMultiRepo +from spack.util.mock_package import MockPackageMultiRepo import spack.compilers import spack.platforms.test @@ -103,8 +102,6 @@ def current_host(request, monkeypatch): monkeypatch.setattr(spack.platforms.test.Test, 'default', cpu) yield target else: - # There's a cache that needs to be cleared for unit tests - PackagePrefs._packages_config_cache = None with spack.config.override('packages:all', {'target': [cpu]}): yield target @@ -112,7 +109,10 @@ def current_host(request, monkeypatch): spack.architecture.get_platform.cache.clear() -@pytest.mark.usefixtures('config', 'mock_packages') +# This must use the mutable_config fixture because the test +# adjusting_default_target_based_on_compiler uses the current_host fixture, +# which changes the config. +@pytest.mark.usefixtures('mutable_config', 'mock_packages') class TestConcretize(object): def test_concretize(self, spec): check_concretize(spec) @@ -235,10 +235,10 @@ def test_architecture_deep_inheritance(self): """ default_dep = ('link', 'build') - bazpkg = MockPackage('bazpkg', [], []) - barpkg = MockPackage('barpkg', [bazpkg], [default_dep]) - foopkg = MockPackage('foopkg', [barpkg], [default_dep]) - mock_repo = MockPackageMultiRepo([foopkg, barpkg, bazpkg]) + mock_repo = MockPackageMultiRepo() + bazpkg = mock_repo.add_package('bazpkg', [], []) + barpkg = mock_repo.add_package('barpkg', [bazpkg], [default_dep]) + mock_repo.add_package('foopkg', [barpkg], [default_dep]) with spack.repo.swap(mock_repo): spec = Spec('foopkg %clang@3.3 os=CNL target=footar' + @@ -633,3 +633,8 @@ def test_compiler_version_matches_any_entry_in_compilers_yaml(self): s = Spec('mpileaks %gcc@4.5:') s.concretize() assert str(s.compiler.version) == '4.5.0' + + def test_concretize_anonymous(self): + with pytest.raises(spack.error.SpecError): + s = Spec('+variant') + s.concretize() diff --git a/lib/spack/spack/test/concretize_preferences.py b/lib/spack/spack/test/concretize_preferences.py index 922d5a11d87..df46ed9fe80 100644 --- a/lib/spack/spack/test/concretize_preferences.py +++ b/lib/spack/spack/test/concretize_preferences.py @@ -100,7 +100,7 @@ def test_preferred_compilers(self): # Try the last available compiler compiler = str(compiler_list[-1]) update_packages('mpileaks', 'compiler', [compiler]) - spec = concretize('mpileaks') + spec = concretize('mpileaks os=redhat6 target=x86') assert spec.compiler == spack.spec.CompilerSpec(compiler) def test_preferred_target(self, mutable_mock_repo): @@ -239,6 +239,70 @@ def mock_module(cmd, module): spec.concretize() assert spec['mpich'].external_path == '/dummy/path' + def test_buildable_false(self): + conf = syaml.load_config("""\ +libelf: + buildable: false +""") + spack.config.set('packages', conf, scope='concretize') + spec = Spec('libelf') + assert not spack.package_prefs.is_spec_buildable(spec) + + spec = Spec('mpich') + assert spack.package_prefs.is_spec_buildable(spec) + + def test_buildable_false_virtual(self): + conf = syaml.load_config("""\ +mpi: + buildable: false +""") + spack.config.set('packages', conf, scope='concretize') + spec = Spec('libelf') + assert spack.package_prefs.is_spec_buildable(spec) + + spec = Spec('mpich') + assert not spack.package_prefs.is_spec_buildable(spec) + + def test_buildable_false_all(self): + conf = syaml.load_config("""\ +all: + buildable: false +""") + spack.config.set('packages', conf, scope='concretize') + spec = Spec('libelf') + assert not spack.package_prefs.is_spec_buildable(spec) + + spec = Spec('mpich') + assert not spack.package_prefs.is_spec_buildable(spec) + + def test_buildable_false_all_true_package(self): + conf = syaml.load_config("""\ +all: + buildable: false +libelf: + buildable: true +""") + spack.config.set('packages', conf, scope='concretize') + spec = Spec('libelf') + assert spack.package_prefs.is_spec_buildable(spec) + + spec = Spec('mpich') + assert not spack.package_prefs.is_spec_buildable(spec) + + def test_buildable_false_all_true_virtual(self): + conf = syaml.load_config("""\ +all: + buildable: false +mpi: + buildable: true +""") + spack.config.set('packages', conf, scope='concretize') + spec = Spec('libelf') + assert not spack.package_prefs.is_spec_buildable(spec) + + spec = Spec('mpich') + assert spack.package_prefs.is_spec_buildable(spec) + def test_config_permissions_from_all(self, configure_permissions): # Although these aren't strictly about concretization, they are # configured in the same file and therefore convenient to test here. diff --git a/lib/spack/spack/test/config.py b/lib/spack/spack/test/config.py index b8598616d50..8212db6c214 100644 --- a/lib/spack/spack/test/config.py +++ b/lib/spack/spack/test/config.py @@ -23,7 +23,7 @@ import spack.schema.mirrors import spack.schema.repos import spack.util.spack_yaml as syaml -from spack.util.path import canonicalize_path +import spack.util.path as spack_path # sample config data @@ -272,31 +272,31 @@ def test_substitute_config_variables(mock_low_high_config): assert os.path.join( '/foo/bar/baz', prefix - ) == canonicalize_path('/foo/bar/baz/$spack') + ) == spack_path.canonicalize_path('/foo/bar/baz/$spack') assert os.path.join( spack.paths.prefix, 'foo/bar/baz' - ) == canonicalize_path('$spack/foo/bar/baz/') + ) == spack_path.canonicalize_path('$spack/foo/bar/baz/') assert os.path.join( '/foo/bar/baz', prefix, 'foo/bar/baz' - ) == canonicalize_path('/foo/bar/baz/$spack/foo/bar/baz/') + ) == spack_path.canonicalize_path('/foo/bar/baz/$spack/foo/bar/baz/') assert os.path.join( '/foo/bar/baz', prefix - ) == canonicalize_path('/foo/bar/baz/${spack}') + ) == spack_path.canonicalize_path('/foo/bar/baz/${spack}') assert os.path.join( spack.paths.prefix, 'foo/bar/baz' - ) == canonicalize_path('${spack}/foo/bar/baz/') + ) == spack_path.canonicalize_path('${spack}/foo/bar/baz/') assert os.path.join( '/foo/bar/baz', prefix, 'foo/bar/baz' - ) == canonicalize_path('/foo/bar/baz/${spack}/foo/bar/baz/') + ) == spack_path.canonicalize_path('/foo/bar/baz/${spack}/foo/bar/baz/') assert os.path.join( '/foo/bar/baz', prefix, 'foo/bar/baz' - ) != canonicalize_path('/foo/bar/baz/${spack/foo/bar/baz/') + ) != spack_path.canonicalize_path('/foo/bar/baz/${spack/foo/bar/baz/') packages_merge_low = { @@ -345,19 +345,43 @@ def test_merge_with_defaults(mock_low_high_config, write_config_file): def test_substitute_user(mock_low_high_config): user = getpass.getuser() - assert '/foo/bar/' + user + '/baz' == canonicalize_path( + assert '/foo/bar/' + user + '/baz' == spack_path.canonicalize_path( '/foo/bar/$user/baz' ) def test_substitute_tempdir(mock_low_high_config): tempdir = tempfile.gettempdir() - assert tempdir == canonicalize_path('$tempdir') - assert tempdir + '/foo/bar/baz' == canonicalize_path( + assert tempdir == spack_path.canonicalize_path('$tempdir') + assert tempdir + '/foo/bar/baz' == spack_path.canonicalize_path( '$tempdir/foo/bar/baz' ) +def test_substitute_padding(mock_low_high_config): + max_system_path = spack_path.get_system_path_max() + expected_length = (max_system_path - + spack_path.SPACK_MAX_INSTALL_PATH_LENGTH) + + install_path = spack_path.canonicalize_path('/foo/bar/${padding}/baz') + + assert spack_path.SPACK_PATH_PADDING_CHARS in install_path + assert len(install_path) == expected_length + + install_path = spack_path.canonicalize_path('/foo/bar/baz/gah/$padding') + + assert spack_path.SPACK_PATH_PADDING_CHARS in install_path + assert len(install_path) == expected_length + + i_path = spack_path.canonicalize_path('/foo/$padding:10') + i_expect = os.path.join('/foo', spack_path.SPACK_PATH_PADDING_CHARS[:10]) + assert i_path == i_expect + + i_path = spack_path.canonicalize_path('/foo/${padding:20}') + i_expect = os.path.join('/foo', spack_path.SPACK_PATH_PADDING_CHARS[:20]) + assert i_path == i_expect + + def test_read_config(mock_low_high_config, write_config_file): write_config_file('config', config_low, 'low') assert spack.config.get('config') == config_low['config'] diff --git a/lib/spack/spack/test/config_values.py b/lib/spack/spack/test/config_values.py new file mode 100644 index 00000000000..ff97f26db42 --- /dev/null +++ b/lib/spack/spack/test/config_values.py @@ -0,0 +1,41 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +import spack.spec + + +def test_set_install_hash_length(mock_packages, mutable_config, monkeypatch): + # spack.store.layout caches initial config values, so we monkeypatch + mutable_config.set('config:install_hash_length', 5) + monkeypatch.setattr(spack.store, 'store', spack.store._store()) + + spec = spack.spec.Spec('libelf').concretized() + prefix = spec.prefix + hash = prefix.rsplit('-')[-1] + + assert len(hash) == 5 + + mutable_config.set('config:install_hash_length', 9) + monkeypatch.setattr(spack.store, 'store', spack.store._store()) + + spec = spack.spec.Spec('libelf').concretized() + prefix = spec.prefix + hash = prefix.rsplit('-')[-1] + + assert len(hash) == 9 + + +def test_set_install_hash_length_upper_case(mock_packages, mutable_config, + monkeypatch): + # spack.store.layout caches initial config values, so we monkeypatch + mutable_config.set('config:install_path_scheme', '{name}-{HASH}') + mutable_config.set('config:install_hash_length', 5) + monkeypatch.setattr(spack.store, 'store', spack.store._store()) + + spec = spack.spec.Spec('libelf').concretized() + prefix = spec.prefix + hash = prefix.rsplit('-')[-1] + + assert len(hash) == 5 diff --git a/lib/spack/spack/test/conftest.py b/lib/spack/spack/test/conftest.py index 8912c0219b7..bac5bcebf60 100644 --- a/lib/spack/spack/test/conftest.py +++ b/lib/spack/spack/test/conftest.py @@ -14,7 +14,6 @@ import tempfile import xml.etree.ElementTree -import ordereddict_backport import py import pytest import ruamel.yaml as yaml @@ -38,11 +37,8 @@ import spack.util.gpg from spack.util.pattern import Bunch -from spack.dependency import Dependency from spack.fetch_strategy import FetchStrategyComposite, URLFetchStrategy from spack.fetch_strategy import FetchError -from spack.spec import Spec -from spack.version import Version @pytest.fixture @@ -427,13 +423,13 @@ def config(mock_configuration): @pytest.fixture(scope='function') -def mutable_config(tmpdir_factory, configuration_dir, monkeypatch): +def mutable_config(tmpdir_factory, configuration_dir): """Like config, but tests can modify the configuration.""" mutable_dir = tmpdir_factory.mktemp('mutable_config').join('tmp') configuration_dir.copy(mutable_dir) cfg = spack.config.Configuration( - *[spack.config.ConfigScope(name, str(mutable_dir)) + *[spack.config.ConfigScope(name, str(mutable_dir.join(name))) for name in ['site', 'system', 'user']]) with use_configuration(cfg): @@ -606,6 +602,26 @@ def install_mockery(tmpdir, config, mock_packages, monkeypatch): spack.store.store = real_store +@pytest.fixture(scope='function') +def install_mockery_mutable_config( + tmpdir, mutable_config, mock_packages, monkeypatch): + """Hooks a fake install directory, DB, and stage directory into Spack. + + This is specifically for tests which want to use 'install_mockery' but + also need to modify configuration (and hence would want to use + 'mutable config'): 'install_mockery' does not support this. + """ + real_store = spack.store.store + spack.store.store = spack.store.Store(str(tmpdir.join('opt'))) + + # We use a fake package, so temporarily disable checksumming + with spack.config.override('config:checksum', False): + yield + + tmpdir.join('opt').remove() + spack.store.store = real_store + + @pytest.fixture() def mock_fetch(mock_archive): """Fake the URL for a package so it downloads from a file.""" @@ -1004,75 +1020,6 @@ def installation_dir_with_headers(tmpdir_factory): return root -########## -# Mock packages -########## - - -class MockPackage(object): - def __init__(self, name, dependencies, dependency_types, conditions=None, - versions=None): - self.name = name - self.spec = None - self.dependencies = ordereddict_backport.OrderedDict() - self._installed_upstream = False - - assert len(dependencies) == len(dependency_types) - for dep, dtype in zip(dependencies, dependency_types): - d = Dependency(self, Spec(dep.name), type=dtype) - if not conditions or dep.name not in conditions: - self.dependencies[dep.name] = {Spec(name): d} - else: - dep_conditions = conditions[dep.name] - dep_conditions = dict( - (Spec(x), Dependency(self, Spec(y), type=dtype)) - for x, y in dep_conditions.items()) - self.dependencies[dep.name] = dep_conditions - - if versions: - self.versions = versions - else: - versions = list(Version(x) for x in [1, 2, 3]) - self.versions = dict((x, {'preferred': False}) for x in versions) - - self.variants = {} - self.provided = {} - self.conflicts = {} - self.patches = {} - - def provides(self, vname): - return vname in self.provided - - @property - def virtuals_provided(self): - return [v.name for v, c in self.provided] - - -class MockPackageMultiRepo(object): - def __init__(self, packages): - self.spec_to_pkg = dict((x.name, x) for x in packages) - self.spec_to_pkg.update( - dict(('mockrepo.' + x.name, x) for x in packages)) - - def get(self, spec): - if not isinstance(spec, spack.spec.Spec): - spec = Spec(spec) - return self.spec_to_pkg[spec.name] - - def get_pkg_class(self, name): - return self.spec_to_pkg[name] - - def exists(self, name): - return name in self.spec_to_pkg - - def is_virtual(self, name): - return False - - def repo_for_pkg(self, name): - import collections - Repo = collections.namedtuple('Repo', ['namespace']) - return Repo('mockrepo') - ########## # Specs of various kind ########## @@ -1152,3 +1099,20 @@ def clear_directive_functions(): # proceeding with subsequent tests that may depend on the original # functions. spack.directives.DirectiveMeta._directives_to_be_executed = [] + + +@pytest.fixture +def mock_executable(tmpdir): + """Factory to create a mock executable in a temporary directory that + output a custom string when run. + """ + import jinja2 + + def _factory(name, output, subdir=('bin',)): + f = tmpdir.mkdir(*subdir).join(name) + t = jinja2.Template('#!/bin/bash\n{{ output }}\n') + f.write(t.render(output=output)) + f.chmod(0o755) + return str(f) + + return _factory diff --git a/lib/spack/spack/test/data/compiler_verbose_output/collect2-6.3.0-gnu-ld.txt b/lib/spack/spack/test/data/compiler_verbose_output/collect2-6.3.0-gnu-ld.txt new file mode 100644 index 00000000000..cceff7fc658 --- /dev/null +++ b/lib/spack/spack/test/data/compiler_verbose_output/collect2-6.3.0-gnu-ld.txt @@ -0,0 +1,3 @@ +collect2 version 6.5.0 +/usr/bin/ld -plugin /scratch/local1/spack/opt/spack/gcc-6.3.0-haswell/gcc-6.5.0-4sdjgrs/libexec/gcc/x86_64-pc-linux-gnu/6.5.0/liblto_plugin.so -plugin-opt=/scratch/local1/spack/opt/spack/gcc-6.3.0-haswell/gcc-6.5.0-4sdjgrs/libexec/gcc/x86_64-pc-linux-gnu/6.5.0/lto-wrapper -plugin-opt=-fresolution=/tmp/ccbFmewQ.res -plugin-opt=-pass-through=-lgcc_s -plugin-opt=-pass-through=-lgcc -plugin-opt=-pass-through=-lc -plugin-opt=-pass-through=-lgcc_s -plugin-opt=-pass-through=-lgcc -rpath /scratch/local1/spack/opt/spack/gcc-6.3.0-haswell/gcc-6.5.0-4sdjgrs/lib:/scratch/local1/spack/opt/spack/gcc-6.3.0-haswell/gcc-6.5.0-4sdjgrs/lib64 --eh-frame-hdr -m elf_x86_64 -dynamic-linker /lib64/ld-linux-x86-64.so.2 -o output /usr/lib/x86_64-linux-gnu/crt1.o /usr/lib/x86_64-linux-gnu/crti.o /scratch/local1/spack/opt/spack/gcc-6.3.0-haswell/gcc-6.5.0-4sdjgrs/lib/gcc/x86_64-pc-linux-gnu/6.5.0/crtbegin.o -L/scratch/local1/spack/opt/spack/gcc-6.3.0-haswell/gcc-6.5.0-4sdjgrs/lib/gcc/x86_64-pc-linux-gnu/6.5.0 -L/scratch/local1/spack/opt/spack/gcc-6.3.0-haswell/gcc-6.5.0-4sdjgrs/lib/gcc/x86_64-pc-linux-gnu/6.5.0/../../../../lib64 -L/lib/x86_64-linux-gnu -L/lib/../lib64 -L/usr/lib/x86_64-linux-gnu -L/scratch/local1/spack/opt/spack/gcc-6.3.0-haswell/gcc-6.5.0-4sdjgrs/lib/gcc/x86_64-pc-linux-gnu/6.5.0/../../.. -v /tmp/ccxz6i1I.o -lstdc++ -lm -lgcc_s -lgcc -lc -lgcc_s -lgcc /scratch/local1/spack/opt/spack/gcc-6.3.0-haswell/gcc-6.5.0-4sdjgrs/lib/gcc/x86_64-pc-linux-gnu/6.5.0/crtend.o /usr/lib/x86_64-linux-gnu/crtn.o +GNU ld (GNU Binutils for Debian) 2.28 diff --git a/lib/spack/spack/test/data/compiler_verbose_output/nag-6.2-gcc-6.5.0.txt b/lib/spack/spack/test/data/compiler_verbose_output/nag-6.2-gcc-6.5.0.txt new file mode 100644 index 00000000000..9fa3dc69bae --- /dev/null +++ b/lib/spack/spack/test/data/compiler_verbose_output/nag-6.2-gcc-6.5.0.txt @@ -0,0 +1,13 @@ +NAG Fortran Compiler Release 6.2(Chiyoda) Build 6223 +Reading specs from /scratch/local1/spack/opt/spack/gcc-6.3.0-haswell/gcc-6.5.0-4sdjgrs/lib/gcc/x86_64-pc-linux-gnu/6.5.0/specs +COLLECT_GCC=/scratch/local1/spack/opt/spack/gcc-6.3.0-haswell/gcc-6.5.0-4sdjgrs/bin/gcc +COLLECT_LTO_WRAPPER=/scratch/local1/spack/opt/spack/gcc-6.3.0-haswell/gcc-6.5.0-4sdjgrs/libexec/gcc/x86_64-pc-linux-gnu/6.5.0/lto-wrapper +Target: x86_64-pc-linux-gnu +Configured with: /tmp/m300488/spack-stage/spack-stage-gcc-6.5.0-4sdjgrsboy3lowtq3t7pmp7rx3ogkqtz/spack-src/configure --prefix=/scratch/local1/spack/opt/spack/gcc-6.3.0-haswell/gcc-6.5.0-4sdjgrs --with-pkgversion='Spack GCC' --with-bugurl=https://github.com/spack/spack/issues --disable-multilib --enable-languages=c,c++,fortran --disable-nls --with-mpfr=/scratch/local1/spack/opt/spack/gcc-6.3.0-haswell/mpfr-3.1.6-w63rspk --with-gmp=/scratch/local1/spack/opt/spack/gcc-6.3.0-haswell/gmp-6.1.2-et64cuj --with-system-zlib --with-mpc=/scratch/local1/spack/opt/spack/gcc-6.3.0-haswell/mpc-1.1.0-en66k4t --with-isl=/scratch/local1/spack/opt/spack/gcc-6.3.0-haswell/isl-0.18-62v4uyg +Thread model: posix +gcc version 6.5.0 (Spack GCC) +COMPILER_PATH=/scratch/local1/spack/opt/spack/gcc-6.3.0-haswell/gcc-6.5.0-4sdjgrs/libexec/gcc/x86_64-pc-linux-gnu/6.5.0/:/scratch/local1/spack/opt/spack/gcc-6.3.0-haswell/gcc-6.5.0-4sdjgrs/libexec/gcc/x86_64-pc-linux-gnu/6.5.0/:/scratch/local1/spack/opt/spack/gcc-6.3.0-haswell/gcc-6.5.0-4sdjgrs/libexec/gcc/x86_64-pc-linux-gnu/:/scratch/local1/spack/opt/spack/gcc-6.3.0-haswell/gcc-6.5.0-4sdjgrs/lib/gcc/x86_64-pc-linux-gnu/6.5.0/:/scratch/local1/spack/opt/spack/gcc-6.3.0-haswell/gcc-6.5.0-4sdjgrs/lib/gcc/x86_64-pc-linux-gnu/ +LIBRARY_PATH=/scratch/local1/spack/opt/spack/gcc-6.3.0-haswell/gcc-6.5.0-4sdjgrs/lib/gcc/x86_64-pc-linux-gnu/6.5.0/:/scratch/local1/spack/opt/spack/gcc-6.3.0-haswell/gcc-6.5.0-4sdjgrs/lib/gcc/x86_64-pc-linux-gnu/6.5.0/../../../../lib64/:/lib/x86_64-linux-gnu/:/lib/../lib64/:/usr/lib/x86_64-linux-gnu/:/scratch/local1/spack/opt/spack/gcc-6.3.0-haswell/gcc-6.5.0-4sdjgrs/lib/gcc/x86_64-pc-linux-gnu/6.5.0/../../../:/lib/:/usr/lib/ +COLLECT_GCC_OPTIONS='-m64' '-o' 'output' '-v' '-mtune=generic' '-march=x86-64' + /scratch/local1/spack/opt/spack/gcc-6.3.0-haswell/gcc-6.5.0-4sdjgrs/libexec/gcc/x86_64-pc-linux-gnu/6.5.0/collect2 -plugin /scratch/local1/spack/opt/spack/gcc-6.3.0-haswell/gcc-6.5.0-4sdjgrs/libexec/gcc/x86_64-pc-linux-gnu/6.5.0/liblto_plugin.so -plugin-opt=/scratch/local1/spack/opt/spack/gcc-6.3.0-haswell/gcc-6.5.0-4sdjgrs/libexec/gcc/x86_64-pc-linux-gnu/6.5.0/lto-wrapper -plugin-opt=-fresolution=/tmp/ccBpU203.res -plugin-opt=-pass-through=-lgcc -plugin-opt=-pass-through=-lgcc_s -plugin-opt=-pass-through=-lc -plugin-opt=-pass-through=-lgcc -plugin-opt=-pass-through=-lgcc_s -rpath /scratch/local1/spack/opt/spack/gcc-6.3.0-haswell/gcc-6.5.0-4sdjgrs/lib:/scratch/local1/spack/opt/spack/gcc-6.3.0-haswell/gcc-6.5.0-4sdjgrs/lib64 --eh-frame-hdr -m elf_x86_64 -dynamic-linker /lib64/ld-linux-x86-64.so.2 -o output /usr/lib/x86_64-linux-gnu/crt1.o /usr/lib/x86_64-linux-gnu/crti.o /scratch/local1/spack/opt/spack/gcc-6.3.0-haswell/gcc-6.5.0-4sdjgrs/lib/gcc/x86_64-pc-linux-gnu/6.5.0/crtbegin.o -L/scratch/local1/spack/opt/spack/gcc-6.3.0-haswell/gcc-6.5.0-4sdjgrs/lib/gcc/x86_64-pc-linux-gnu/6.5.0 -L/scratch/local1/spack/opt/spack/gcc-6.3.0-haswell/gcc-6.5.0-4sdjgrs/lib/gcc/x86_64-pc-linux-gnu/6.5.0/../../../../lib64 -L/lib/x86_64-linux-gnu -L/lib/../lib64 -L/usr/lib/x86_64-linux-gnu -L/scratch/local1/spack/opt/spack/gcc-6.3.0-haswell/gcc-6.5.0-4sdjgrs/lib/gcc/x86_64-pc-linux-gnu/6.5.0/../../.. /sw/stretch-x64/nag/nag-6.2/lib/NAG_Fortran/f62init.o /sw/stretch-x64/nag/nag-6.2/lib/NAG_Fortran/quickfit.o /tmp/main.000786.o -rpath /sw/stretch-x64/nag/nag-6.2/lib/NAG_Fortran /sw/stretch-x64/nag/nag-6.2/lib/NAG_Fortran/libf62rts.so /sw/stretch-x64/nag/nag-6.2/lib/NAG_Fortran/libf62rts.a -lm -lgcc --as-needed -lgcc_s --no-as-needed -lc -lgcc --as-needed -lgcc_s --no-as-needed /scratch/local1/spack/opt/spack/gcc-6.3.0-haswell/gcc-6.5.0-4sdjgrs/lib/gcc/x86_64-pc-linux-gnu/6.5.0/crtend.o /usr/lib/x86_64-linux-gnu/crtn.o +COLLECT_GCC_OPTIONS='-m64' '-o' 'output' '-v' '-mtune=generic' '-march=x86-64' diff --git a/lib/spack/spack/test/data/config/packages.yaml b/lib/spack/spack/test/data/config/packages.yaml index c7256ddb334..63e63e525d6 100644 --- a/lib/spack/spack/test/data/config/packages.yaml +++ b/lib/spack/spack/test/data/config/packages.yaml @@ -1,4 +1,7 @@ packages: + all: + providers: + mpi: [openmpi, mpich] externaltool: buildable: False paths: diff --git a/lib/spack/spack/test/data/modules/lmod/complex_hierarchy.yaml b/lib/spack/spack/test/data/modules/lmod/complex_hierarchy.yaml index ada3c691cb5..5cad95c7fae 100644 --- a/lib/spack/spack/test/data/modules/lmod/complex_hierarchy.yaml +++ b/lib/spack/spack/test/data/modules/lmod/complex_hierarchy.yaml @@ -6,6 +6,9 @@ lmod: core_compilers: - 'clang@3.3' + core_specs: + - 'mpich@3.0.1' + hierarchy: - lapack - blas diff --git a/lib/spack/spack/test/data/modules/lmod/projections.yaml b/lib/spack/spack/test/data/modules/lmod/projections.yaml new file mode 100644 index 00000000000..1efd93bc89a --- /dev/null +++ b/lib/spack/spack/test/data/modules/lmod/projections.yaml @@ -0,0 +1,6 @@ +enable: + - lmod +lmod: + projections: + all: '{name}/v{version}' + mpileaks: '{name}-mpiprojection' diff --git a/lib/spack/spack/test/data/modules/tcl/conflicts.yaml b/lib/spack/spack/test/data/modules/tcl/conflicts.yaml index 66494b1ce19..0183753e55f 100644 --- a/lib/spack/spack/test/data/modules/tcl/conflicts.yaml +++ b/lib/spack/spack/test/data/modules/tcl/conflicts.yaml @@ -1,7 +1,8 @@ enable: - tcl tcl: - naming_scheme: '{name}/{version}-{compiler.name}' + projections: + all: '{name}/{version}-{compiler.name}' all: conflict: - '{name}' diff --git a/lib/spack/spack/test/data/modules/tcl/invalid_naming_scheme.yaml b/lib/spack/spack/test/data/modules/tcl/invalid_naming_scheme.yaml index f523f0bbef0..36b58670daa 100644 --- a/lib/spack/spack/test/data/modules/tcl/invalid_naming_scheme.yaml +++ b/lib/spack/spack/test/data/modules/tcl/invalid_naming_scheme.yaml @@ -2,4 +2,5 @@ enable: - tcl tcl: # {variants} is not allowed in the naming scheme, see #2884 - naming_scheme: '{name}/{version}-{compiler.name}-{variants}' + projections: + all: '{name}/{version}-{compiler.name}-{variants}' diff --git a/lib/spack/spack/test/data/modules/tcl/naming_scheme.yaml b/lib/spack/spack/test/data/modules/tcl/naming_scheme.yaml new file mode 100644 index 00000000000..cc4cf8f7829 --- /dev/null +++ b/lib/spack/spack/test/data/modules/tcl/naming_scheme.yaml @@ -0,0 +1,4 @@ +enable: + - tcl +tcl: + naming_scheme: '{name}/{version}-{compiler.name}' diff --git a/lib/spack/spack/test/data/modules/tcl/projections.yaml b/lib/spack/spack/test/data/modules/tcl/projections.yaml new file mode 100644 index 00000000000..11dbd053f9f --- /dev/null +++ b/lib/spack/spack/test/data/modules/tcl/projections.yaml @@ -0,0 +1,6 @@ +enable: + - tcl +tcl: + projections: + all: '{name}/{version}-{compiler.name}' + mpileaks: '{name}-mpiprojection' diff --git a/lib/spack/spack/test/data/modules/tcl/wrong_conflicts.yaml b/lib/spack/spack/test/data/modules/tcl/wrong_conflicts.yaml index a4bd97257be..22377816c8b 100644 --- a/lib/spack/spack/test/data/modules/tcl/wrong_conflicts.yaml +++ b/lib/spack/spack/test/data/modules/tcl/wrong_conflicts.yaml @@ -1,7 +1,8 @@ enable: - tcl tcl: - naming_scheme: '{name}/{version}-{compiler.name}' + projections: + all: '{name}/{version}-{compiler.name}' all: conflict: - '{name}/{compiler.name}' diff --git a/lib/spack/spack/test/database.py b/lib/spack/spack/test/database.py index ebd4a960f31..d47b7dbd441 100644 --- a/lib/spack/spack/test/database.py +++ b/lib/spack/spack/test/database.py @@ -29,7 +29,7 @@ import spack.database import spack.package import spack.spec -from spack.test.conftest import MockPackage, MockPackageMultiRepo +from spack.util.mock_package import MockPackageMultiRepo from spack.util.executable import Executable @@ -87,11 +87,11 @@ def test_installed_upstream(upstream_and_downstream_db): downstream_db, downstream_layout = (upstream_and_downstream_db) default = ('build', 'link') - x = MockPackage('x', [], []) - z = MockPackage('z', [], []) - y = MockPackage('y', [z], [default]) - w = MockPackage('w', [x, y], [default, default]) - mock_repo = MockPackageMultiRepo([w, x, y, z]) + mock_repo = MockPackageMultiRepo() + x = mock_repo.add_package('x', [], []) + z = mock_repo.add_package('z', [], []) + y = mock_repo.add_package('y', [z], [default]) + mock_repo.add_package('w', [x, y], [default, default]) with spack.repo.swap(mock_repo): spec = spack.spec.Spec('w') @@ -130,9 +130,9 @@ def test_removed_upstream_dep(upstream_and_downstream_db): downstream_db, downstream_layout = (upstream_and_downstream_db) default = ('build', 'link') - z = MockPackage('z', [], []) - y = MockPackage('y', [z], [default]) - mock_repo = MockPackageMultiRepo([y, z]) + mock_repo = MockPackageMultiRepo() + z = mock_repo.add_package('z', [], []) + mock_repo.add_package('y', [z], [default]) with spack.repo.swap(mock_repo): spec = spack.spec.Spec('y') @@ -164,8 +164,8 @@ def test_add_to_upstream_after_downstream(upstream_and_downstream_db): upstream_write_db, upstream_db, upstream_layout,\ downstream_db, downstream_layout = (upstream_and_downstream_db) - x = MockPackage('x', [], []) - mock_repo = MockPackageMultiRepo([x]) + mock_repo = MockPackageMultiRepo() + mock_repo.add_package('x', [], []) with spack.repo.swap(mock_repo): spec = spack.spec.Spec('x') @@ -197,8 +197,8 @@ def test_cannot_write_upstream(tmpdir_factory, test_store, gen_mock_layout): roots = [str(tmpdir_factory.mktemp(x)) for x in ['a', 'b']] layouts = [gen_mock_layout(x) for x in ['/ra/', '/rb/']] - x = MockPackage('x', [], []) - mock_repo = MockPackageMultiRepo([x]) + mock_repo = MockPackageMultiRepo() + mock_repo.add_package('x', [], []) # Instantiate the database that will be used as the upstream DB and make # sure it has an index file @@ -223,11 +223,10 @@ def test_recursive_upstream_dbs(tmpdir_factory, test_store, gen_mock_layout): layouts = [gen_mock_layout(x) for x in ['/ra/', '/rb/', '/rc/']] default = ('build', 'link') - z = MockPackage('z', [], []) - y = MockPackage('y', [z], [default]) - x = MockPackage('x', [y], [default]) - - mock_repo = MockPackageMultiRepo([x, y, z]) + mock_repo = MockPackageMultiRepo() + z = mock_repo.add_package('z', [], []) + y = mock_repo.add_package('y', [z], [default]) + mock_repo.add_package('x', [y], [default]) with spack.repo.swap(mock_repo): spec = spack.spec.Spec('x') @@ -689,7 +688,7 @@ def test_115_reindex_with_packages_not_in_repo(mutable_database): # Dont add any package definitions to this repository, the idea is that # packages should not have to be defined in the repository once they # are installed - with spack.repo.swap(MockPackageMultiRepo([])): + with spack.repo.swap(MockPackageMultiRepo()): spack.store.store.reindex() _check_db_sanity(mutable_database) diff --git a/lib/spack/spack/test/link_paths.py b/lib/spack/spack/test/link_paths.py index 27e42d2194b..4ae0a35cf82 100644 --- a/lib/spack/spack/test/link_paths.py +++ b/lib/spack/spack/test/link_paths.py @@ -32,8 +32,8 @@ def check_link_paths(filename, paths): def test_icc16_link_paths(): check_link_paths('icc-16.0.3.txt', [ - '/usr/tce/packages/intel/intel-16.0.3/compilers_and_libraries_2016.3.210/linux/compiler/lib/intel64_lin', # noqa - '/usr/tce/packages/gcc/gcc-4.9.3/lib64/gcc/x86_64-unknown-linux-gnu/4.9.3', # noqa + '/usr/tce/packages/intel/intel-16.0.3/compilers_and_libraries_2016.3.210/linux/compiler/lib/intel64_lin', # noqa + '/usr/tce/packages/gcc/gcc-4.9.3/lib64/gcc/x86_64-unknown-linux-gnu/4.9.3', # noqa '/usr/tce/packages/gcc/gcc-4.9.3/lib64']) @@ -82,6 +82,28 @@ def test_clang_apple_ld_link_paths(): '/Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.13.sdk/usr/lib']) # noqa +def test_nag_mixed_gcc_gnu_ld_link_paths(): + # This is a test of a mixed NAG/GCC toolchain, i.e. 'cxx' is set to g++ and + # is used for the rpath detection. The reference compiler output is a + # result of + # '/path/to/gcc/bin/g++ -Wl,-v ./main.c'. + check_link_paths('collect2-6.3.0-gnu-ld.txt', [ + '/scratch/local1/spack/opt/spack/gcc-6.3.0-haswell/gcc-6.5.0-4sdjgrs/lib/gcc/x86_64-pc-linux-gnu/6.5.0', # noqa + '/scratch/local1/spack/opt/spack/gcc-6.3.0-haswell/gcc-6.5.0-4sdjgrs/lib64', # noqa + '/scratch/local1/spack/opt/spack/gcc-6.3.0-haswell/gcc-6.5.0-4sdjgrs/lib']) # noqa + + +def test_nag_link_paths(): + # This is a test of a NAG-only toolchain, i.e. 'cc' and 'cxx' are empty, + # and therefore 'fc' is used for the rpath detection). The reference + # compiler output is a result of + # 'nagfor -Wc=/path/to/gcc/bin/gcc -Wl,-v ./main.c'. + check_link_paths('nag-6.2-gcc-6.5.0.txt', [ + '/scratch/local1/spack/opt/spack/gcc-6.3.0-haswell/gcc-6.5.0-4sdjgrs/lib/gcc/x86_64-pc-linux-gnu/6.5.0', # noqa + '/scratch/local1/spack/opt/spack/gcc-6.3.0-haswell/gcc-6.5.0-4sdjgrs/lib64', # noqa + '/scratch/local1/spack/opt/spack/gcc-6.3.0-haswell/gcc-6.5.0-4sdjgrs/lib']) # noqa + + def test_obscure_parsing_rules(): check_link_paths('obscure-parsing-rules.txt', [ '/first/path', diff --git a/lib/spack/spack/test/llnl/util/lock.py b/lib/spack/spack/test/llnl/util/lock.py index b2b7cf85acc..a959ea0c73a 100644 --- a/lib/spack/spack/test/llnl/util/lock.py +++ b/lib/spack/spack/test/llnl/util/lock.py @@ -1143,6 +1143,8 @@ def read(): assert vals['read'] == 1 +@pytest.mark.skipif('macos' in os.environ.get('GITHUB_WORKFLOW', ''), + reason="Skip failing test for GA on MacOS") def test_lock_debug_output(lock_path): host = socket.getfqdn() diff --git a/lib/spack/spack/test/modules/lmod.py b/lib/spack/spack/test/modules/lmod.py index 12b6d71aa53..6d2ee7bc390 100644 --- a/lib/spack/spack/test/modules/lmod.py +++ b/lib/spack/spack/test/modules/lmod.py @@ -27,6 +27,7 @@ def compiler(request): @pytest.fixture(params=[ ('mpich@3.0.4', ('mpi',)), + ('mpich@3.0.1', []), ('openblas@0.2.15', ('blas',)), ('openblas-with-lapack@0.2.15', ('blas', 'lapack')) ]) @@ -54,7 +55,8 @@ def test_file_layout( # Check that the compiler part of the path has no hash and that it # is transformed to r"Core" if the compiler is listed among core # compilers - if compiler == 'clang@3.3': + # Check that specs listed as core_specs are transformed to "Core" + if compiler == 'clang@3.3' or spec_string == 'mpich@3.0.1': assert 'Core' in layout.available_path_parts else: assert compiler.replace('@', '/') in layout.available_path_parts @@ -278,3 +280,39 @@ def test_only_generic_microarchitectures_in_root( assert str(spec.target.family) in writer.layout.arch_dirname if spec.target.family != spec.target: assert str(spec.target) not in writer.layout.arch_dirname + + def test_projections_specific(self, factory, module_configuration): + """Tests reading the correct naming scheme.""" + + # This configuration has no error, so check the conflicts directives + # are there + module_configuration('projections') + + # Test we read the expected configuration for the naming scheme + writer, _ = factory('mpileaks') + expected = { + 'all': '{name}/v{version}', + 'mpileaks': '{name}-mpiprojection' + } + + assert writer.conf.projections == expected + projection = writer.spec.format(writer.conf.projections['mpileaks']) + assert projection in writer.layout.use_name + + def test_projections_all(self, factory, module_configuration): + """Tests reading the correct naming scheme.""" + + # This configuration has no error, so check the conflicts directives + # are there + module_configuration('projections') + + # Test we read the expected configuration for the naming scheme + writer, _ = factory('libelf') + expected = { + 'all': '{name}/v{version}', + 'mpileaks': '{name}-mpiprojection' + } + + assert writer.conf.projections == expected + projection = writer.spec.format(writer.conf.projections['all']) + assert projection in writer.layout.use_name diff --git a/lib/spack/spack/test/modules/tcl.py b/lib/spack/spack/test/modules/tcl.py index 40cb7be5ef7..7672cdc676d 100644 --- a/lib/spack/spack/test/modules/tcl.py +++ b/lib/spack/spack/test/modules/tcl.py @@ -142,18 +142,55 @@ def test_blacklist(self, modulefile_content, module_configuration): assert len([x for x in content if 'is-loaded' in x]) == 1 assert len([x for x in content if 'module load ' in x]) == 1 - def test_naming_scheme(self, factory, module_configuration): + def test_naming_scheme_compat(self, factory, module_configuration): + """Tests backwards compatibility for naming_scheme key""" + module_configuration('naming_scheme') + + # Test we read the expected configuration for the naming scheme + writer, _ = factory('mpileaks') + expected = { + 'all': '{name}/{version}-{compiler.name}' + } + + assert writer.conf.projections == expected + projection = writer.spec.format(writer.conf.projections['all']) + assert projection in writer.layout.use_name + + def test_projections_specific(self, factory, module_configuration): """Tests reading the correct naming scheme.""" # This configuration has no error, so check the conflicts directives # are there - module_configuration('conflicts') + module_configuration('projections') # Test we read the expected configuration for the naming scheme writer, _ = factory('mpileaks') - expected = '{name}/{version}-{compiler.name}' + expected = { + 'all': '{name}/{version}-{compiler.name}', + 'mpileaks': '{name}-mpiprojection' + } - assert writer.conf.naming_scheme == expected + assert writer.conf.projections == expected + projection = writer.spec.format(writer.conf.projections['mpileaks']) + assert projection in writer.layout.use_name + + def test_projections_all(self, factory, module_configuration): + """Tests reading the correct naming scheme.""" + + # This configuration has no error, so check the conflicts directives + # are there + module_configuration('projections') + + # Test we read the expected configuration for the naming scheme + writer, _ = factory('libelf') + expected = { + 'all': '{name}/{version}-{compiler.name}', + 'mpileaks': '{name}-mpiprojection' + } + + assert writer.conf.projections == expected + projection = writer.spec.format(writer.conf.projections['all']) + assert projection in writer.layout.use_name def test_invalid_naming_scheme(self, factory, module_configuration): """Tests the evaluation of an invalid naming scheme.""" diff --git a/lib/spack/spack/test/operating_system.py b/lib/spack/spack/test/operating_system.py index 221712e5ef2..97def3feda2 100644 --- a/lib/spack/spack/test/operating_system.py +++ b/lib/spack/spack/test/operating_system.py @@ -3,7 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -import spack.operating_systems.cnl as cnl +import spack.operating_systems.cray_backend as cray_backend def test_read_cle_release_file(tmpdir, monkeypatch): @@ -20,8 +20,9 @@ def test_read_cle_release_file(tmpdir, monkeypatch): DUMMY=foo=bar """) - monkeypatch.setattr(cnl, '_cle_release_file', str(cle_release_path)) - attrs = cnl.read_cle_release_file() + monkeypatch.setattr(cray_backend, '_cle_release_file', + str(cle_release_path)) + attrs = cray_backend.read_cle_release_file() assert attrs['RELEASE'] == '6.0.UP07' assert attrs['BUILD'] == '6.0.7424' @@ -31,7 +32,7 @@ def test_read_cle_release_file(tmpdir, monkeypatch): assert attrs['PATCHSET'] == '35-201906112304' assert attrs['DUMMY'] == 'foo=bar' - assert cnl.Cnl._detect_crayos_version() == 6 + assert cray_backend.CrayBackend._detect_crayos_version() == 6 def test_read_clerelease_file(tmpdir, monkeypatch): @@ -40,12 +41,12 @@ def test_read_clerelease_file(tmpdir, monkeypatch): with clerelease_path.open('w') as f: f.write('5.2.UP04\n') - monkeypatch.setattr(cnl, '_clerelease_file', str(clerelease_path)) - v = cnl.read_clerelease_file() + monkeypatch.setattr(cray_backend, '_clerelease_file', str(clerelease_path)) + v = cray_backend.read_clerelease_file() assert v == '5.2.UP04' - assert cnl.Cnl._detect_crayos_version() == 5 + assert cray_backend.CrayBackend._detect_crayos_version() == 5 def test_cle_release_precedence(tmpdir, monkeypatch): @@ -67,7 +68,8 @@ def test_cle_release_precedence(tmpdir, monkeypatch): with clerelease_path.open('w') as f: f.write('5.2.UP04\n') - monkeypatch.setattr(cnl, '_clerelease_file', str(clerelease_path)) - monkeypatch.setattr(cnl, '_cle_release_file', str(cle_release_path)) + monkeypatch.setattr(cray_backend, '_clerelease_file', str(clerelease_path)) + monkeypatch.setattr(cray_backend, '_cle_release_file', + str(cle_release_path)) - assert cnl.Cnl._detect_crayos_version() == 6 + assert cray_backend.CrayBackend._detect_crayos_version() == 6 diff --git a/lib/spack/spack/test/packaging.py b/lib/spack/spack/test/packaging.py index 39da7c3ae5e..7d4adfe9755 100644 --- a/lib/spack/spack/test/packaging.py +++ b/lib/spack/spack/test/packaging.py @@ -25,11 +25,9 @@ from spack.fetch_strategy import URLFetchStrategy, FetchStrategyComposite from spack.relocate import needs_binary_relocation, needs_text_relocation from spack.relocate import relocate_text, relocate_links -from spack.relocate import get_relative_elf_rpaths -from spack.relocate import get_normalized_elf_rpaths from spack.relocate import macho_make_paths_relative from spack.relocate import macho_make_paths_normal -from spack.relocate import set_placeholder, macho_find_paths +from spack.relocate import _placeholder, macho_find_paths from spack.relocate import file_is_relocatable @@ -228,7 +226,7 @@ def test_relocate_links(tmpdir): old_install_prefix = os.path.join( '%s' % old_layout_root, 'debian6', 'test') old_binname = os.path.join(old_install_prefix, 'binfile') - placeholder = set_placeholder(old_layout_root) + placeholder = _placeholder(old_layout_root) re.sub(old_layout_root, placeholder, old_binname) filenames = ['link.ln', 'outsideprefix.ln'] new_layout_root = os.path.join( @@ -244,9 +242,8 @@ def test_relocate_links(tmpdir): os.utime(new_binname, None) os.symlink(old_binname, new_linkname) os.symlink('/usr/lib/libc.so', new_linkname2) - relocate_links(filenames, old_layout_root, new_layout_root, - old_install_prefix, new_install_prefix, - {old_install_prefix: new_install_prefix}) + relocate_links(filenames, old_layout_root, + old_install_prefix, new_install_prefix) assert os.readlink(new_linkname) == new_binname assert os.readlink(new_linkname2) == '/usr/lib/libc.so' @@ -561,15 +558,3 @@ def test_macho_make_paths(): '/Users/Shared/spack/pkgB/libB.dylib', '/usr/local/lib/libloco.dylib': '/usr/local/lib/libloco.dylib'} - - -def test_elf_paths(): - out = get_relative_elf_rpaths( - '/usr/bin/test', '/usr', - ('/usr/lib', '/usr/lib64', '/opt/local/lib')) - assert out == ['$ORIGIN/../lib', '$ORIGIN/../lib64', '/opt/local/lib'] - - out = get_normalized_elf_rpaths( - '/usr/bin/test', - ['$ORIGIN/../lib', '$ORIGIN/../lib64', '/opt/local/lib']) - assert out == ['/usr/lib', '/usr/lib64', '/opt/local/lib'] diff --git a/lib/spack/spack/test/relocate.py b/lib/spack/spack/test/relocate.py index 0a9e9f7f0a3..551f1596f7b 100644 --- a/lib/spack/spack/test/relocate.py +++ b/lib/spack/spack/test/relocate.py @@ -2,10 +2,10 @@ # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) - import collections import os.path import platform +import re import shutil import llnl.util.filesystem @@ -20,6 +20,23 @@ import spack.util.executable +def rpaths_for(new_binary): + """Return the RPATHs or RUNPATHs of a binary.""" + patchelf = spack.util.executable.which('patchelf') + output = patchelf('--print-rpath', str(new_binary), output=str) + return output.strip() + + +def text_in_bin(text, binary): + with open(str(binary), "rb") as f: + data = f.read() + f.seek(0) + pat = re.compile(text.encode('utf-8')) + if not pat.search(data): + return False + return True + + @pytest.fixture(params=[True, False]) def is_relocatable(request): return request.param @@ -89,6 +106,56 @@ def do_install_mock(self, **kwargs): return expected_path +@pytest.fixture() +def mock_patchelf(tmpdir, mock_executable): + def _factory(output): + return mock_executable('patchelf', output=output) + return _factory + + +@pytest.fixture() +def hello_world(tmpdir): + """Factory fixture that compiles an ELF binary setting its RPATH. Relative + paths are encoded with `$ORIGIN` prepended. + """ + def _factory(rpaths, message="Hello world!"): + source = tmpdir.join('main.c') + source.write(""" + #include + int main(){{ + printf("{0}"); + }} + """.format(message)) + gcc = spack.util.executable.which('gcc') + executable = source.dirpath('main.x') + # Encode relative RPATHs using `$ORIGIN` as the root prefix + rpaths = [x if os.path.isabs(x) else os.path.join('$ORIGIN', x) + for x in rpaths] + rpath_str = ':'.join(rpaths) + opts = [ + '-Wl,--disable-new-dtags', + '-Wl,-rpath={0}'.format(rpath_str), + str(source), '-o', str(executable) + ] + gcc(*opts) + return executable + + return _factory + + +@pytest.fixture() +def copy_binary(): + """Returns a function that copies a binary somewhere and + returns the new location. + """ + def _copy_somewhere(orig_binary): + new_root = orig_binary.mkdtemp() + new_binary = new_root.join('main.x') + shutil.copy(str(orig_binary), str(new_binary)) + return new_binary + return _copy_somewhere + + @pytest.mark.requires_executables( '/usr/bin/gcc', 'patchelf', 'strings', 'file' ) @@ -104,9 +171,7 @@ def test_file_is_relocatable(source_file, is_relocatable): assert spack.relocate.file_is_relocatable(executable) is is_relocatable -@pytest.mark.requires_executables( - 'patchelf', 'strings', 'file' -) +@pytest.mark.requires_executables('patchelf', 'strings', 'file') def test_patchelf_is_relocatable(): patchelf = spack.relocate._patchelf() assert llnl.util.filesystem.is_exe(patchelf) @@ -140,3 +205,178 @@ def test_file_is_relocatable_errors(tmpdir): def test_search_patchelf(expected_patchelf_path): current = spack.relocate._patchelf() assert current == expected_patchelf_path + + +@pytest.mark.parametrize('patchelf_behavior,expected', [ + ('echo ', []), + ('echo /opt/foo/lib:/opt/foo/lib64', ['/opt/foo/lib', '/opt/foo/lib64']), + ('exit 1', []) +]) +def test_existing_rpaths(patchelf_behavior, expected, mock_patchelf): + # Here we are mocking an executable that is always called "patchelf" + # because that will skip the part where we try to build patchelf + # by ourselves. The executable will output some rpaths like + # `patchelf --print-rpath` would. + path = mock_patchelf(patchelf_behavior) + rpaths = spack.relocate._elf_rpaths_for(path) + assert rpaths == expected + + +@pytest.mark.parametrize('start_path,path_root,paths,expected', [ + ('/usr/bin/test', '/usr', ['/usr/lib', '/usr/lib64', '/opt/local/lib'], + ['$ORIGIN/../lib', '$ORIGIN/../lib64', '/opt/local/lib']) +]) +def test_make_relative_paths(start_path, path_root, paths, expected): + relatives = spack.relocate._make_relative(start_path, path_root, paths) + assert relatives == expected + + +@pytest.mark.parametrize('start_path,relative_paths,expected', [ + # $ORIGIN will be replaced with os.path.dirname('usr/bin/test') + # and then normalized + ('/usr/bin/test', + ['$ORIGIN/../lib', '$ORIGIN/../lib64', '/opt/local/lib'], + ['/usr/lib', '/usr/lib64', '/opt/local/lib']), + # Relative path without $ORIGIN + ('/usr/bin/test', ['../local/lib'], ['../local/lib']), +]) +def test_normalize_relative_paths(start_path, relative_paths, expected): + normalized = spack.relocate._normalize_relative_paths( + start_path, relative_paths + ) + assert normalized == expected + + +def test_set_elf_rpaths(mock_patchelf): + # Try to relocate a mock version of patchelf and check + # the call made to patchelf itself + patchelf = mock_patchelf('echo $@') + rpaths = ['/usr/lib', '/usr/lib64', '/opt/local/lib'] + output = spack.relocate._set_elf_rpaths(patchelf, rpaths) + + # Assert that the arguments of the call to patchelf are as expected + assert '--force-rpath' in output + assert '--set-rpath ' + ':'.join(rpaths) in output + assert patchelf in output + + +def test_set_elf_rpaths_warning(mock_patchelf): + # Mock a failing patchelf command and ensure it warns users + patchelf = mock_patchelf('exit 1') + rpaths = ['/usr/lib', '/usr/lib64', '/opt/local/lib'] + # To avoid using capfd in order to check if the warning was triggered + # here we just check that output is not set + output = spack.relocate._set_elf_rpaths(patchelf, rpaths) + assert output is None + + +@pytest.mark.requires_executables('patchelf', 'strings', 'file', 'gcc') +def test_replace_prefix_bin(hello_world): + # Compile an "Hello world!" executable and set RPATHs + executable = hello_world(rpaths=['/usr/lib', '/usr/lib64']) + + # Relocate the RPATHs + spack.relocate._replace_prefix_bin(str(executable), '/usr', '/foo') + + # Some compilers add rpaths so ensure changes included in final result + assert '/foo/lib:/foo/lib64' in rpaths_for(executable) + + +@pytest.mark.requires_executables('patchelf', 'strings', 'file', 'gcc') +def test_relocate_elf_binaries_absolute_paths( + hello_world, copy_binary, tmpdir +): + # Create an executable, set some RPATHs, copy it to another location + orig_binary = hello_world(rpaths=[str(tmpdir.mkdir('lib')), '/usr/lib64']) + new_binary = copy_binary(orig_binary) + + spack.relocate.relocate_elf_binaries( + binaries=[str(new_binary)], + orig_root=str(orig_binary.dirpath()), + new_root=None, # Not needed when relocating absolute paths + new_prefixes={ + str(tmpdir): '/foo' + }, + rel=False, + # Not needed when relocating absolute paths + orig_prefix=None, new_prefix=None + ) + + # Some compilers add rpaths so ensure changes included in final result + assert '/foo/lib:/usr/lib64' in rpaths_for(new_binary) + + +@pytest.mark.requires_executables('patchelf', 'strings', 'file', 'gcc') +def test_relocate_elf_binaries_relative_paths(hello_world, copy_binary): + # Create an executable, set some RPATHs, copy it to another location + orig_binary = hello_world(rpaths=['lib', 'lib64', '/opt/local/lib']) + new_binary = copy_binary(orig_binary) + + spack.relocate.relocate_elf_binaries( + binaries=[str(new_binary)], + orig_root=str(orig_binary.dirpath()), + new_root=str(new_binary.dirpath()), + new_prefixes={str(orig_binary.dirpath()): '/foo'}, + rel=True, + orig_prefix=str(orig_binary.dirpath()), + new_prefix=str(new_binary.dirpath()) + ) + + # Some compilers add rpaths so ensure changes included in final result + assert '/foo/lib:/foo/lib64:/opt/local/lib' in rpaths_for(new_binary) + + +@pytest.mark.requires_executables('patchelf', 'strings', 'file', 'gcc') +def test_make_elf_binaries_relative(hello_world, copy_binary, tmpdir): + orig_binary = hello_world(rpaths=[ + str(tmpdir.mkdir('lib')), str(tmpdir.mkdir('lib64')), '/opt/local/lib' + ]) + new_binary = copy_binary(orig_binary) + + spack.relocate.make_elf_binaries_relative( + [str(new_binary)], [str(orig_binary)], str(orig_binary.dirpath()) + ) + + assert rpaths_for(new_binary) == '$ORIGIN/lib:$ORIGIN/lib64:/opt/local/lib' + + +def test_raise_if_not_relocatable(monkeypatch): + monkeypatch.setattr(spack.relocate, 'file_is_relocatable', lambda x: False) + with pytest.raises(spack.relocate.InstallRootStringError): + spack.relocate.raise_if_not_relocatable( + ['an_executable'], allow_root=False + ) + + +@pytest.mark.requires_executables('patchelf', 'strings', 'file', 'gcc') +def test_relocate_text_bin(hello_world, copy_binary, tmpdir): + orig_binary = hello_world(rpaths=[ + str(tmpdir.mkdir('lib')), str(tmpdir.mkdir('lib64')), '/opt/local/lib' + ], message=str(tmpdir)) + new_binary = copy_binary(orig_binary) + + # Check original directory is in the executabel and the new one is not + assert text_in_bin(str(tmpdir), new_binary) + assert not text_in_bin(str(new_binary.dirpath()), new_binary) + + # Check this call succeed + spack.relocate.relocate_text_bin( + [str(new_binary)], + str(orig_binary.dirpath()), str(new_binary.dirpath()), + spack.paths.spack_root, spack.paths.spack_root, + {str(orig_binary.dirpath()): str(new_binary.dirpath())} + ) + + # Check original directory is not there anymore and it was + # substituted with the new one + assert not text_in_bin(str(tmpdir), new_binary) + assert text_in_bin(str(new_binary.dirpath()), new_binary) + + +def test_relocate_text_bin_raise_if_new_prefix_is_longer(): + short_prefix = '/short' + long_prefix = '/much/longer' + with pytest.raises(spack.relocate.BinaryTextReplaceError): + spack.relocate.relocate_text_bin( + ['item'], short_prefix, long_prefix, None, None, None + ) diff --git a/lib/spack/spack/test/repo.py b/lib/spack/spack/test/repo.py index 2cd1c0fa4a1..d10349e6ecc 100644 --- a/lib/spack/spack/test/repo.py +++ b/lib/spack/spack/test/repo.py @@ -51,6 +51,11 @@ def test_repo_unknown_pkg(mutable_mock_repo): mutable_mock_repo.get('builtin.mock.nonexistentpackage') +def test_repo_anonymous_pkg(mutable_mock_repo): + with pytest.raises(spack.repo.UnknownPackageError): + mutable_mock_repo.get('+variant') + + @pytest.mark.maybeslow def test_repo_last_mtime(): latest_mtime = max(os.path.getmtime(p.module.__file__) diff --git a/lib/spack/spack/test/spec_dag.py b/lib/spack/spack/test/spec_dag.py index e031f02c255..0b638ada04f 100644 --- a/lib/spack/spack/test/spec_dag.py +++ b/lib/spack/spack/test/spec_dag.py @@ -12,7 +12,7 @@ from spack.spec import Spec from spack.dependency import all_deptypes, Dependency, canonical_deptype -from spack.test.conftest import MockPackage, MockPackageMultiRepo +from spack.util.mock_package import MockPackageMultiRepo def check_links(spec_to_check): @@ -69,12 +69,12 @@ def test_test_deptype(): default = ('build', 'link') test_only = ('test',) - x = MockPackage('x', [], []) - z = MockPackage('z', [], []) - y = MockPackage('y', [z], [test_only]) - w = MockPackage('w', [x, y], [test_only, default]) + mock_repo = MockPackageMultiRepo() + x = mock_repo.add_package('x', [], []) + z = mock_repo.add_package('z', [], []) + y = mock_repo.add_package('y', [z], [test_only]) + w = mock_repo.add_package('w', [x, y], [test_only, default]) - mock_repo = MockPackageMultiRepo([w, x, y, z]) with spack.repo.swap(mock_repo): spec = Spec('w') spec.concretize(tests=(w.name,)) @@ -93,8 +93,9 @@ def test_installed_deps(): default = ('build', 'link') build_only = ('build',) - e = MockPackage('e', [], []) - d = MockPackage('d', [], []) + mock_repo = MockPackageMultiRepo() + e = mock_repo.add_package('e', [], []) + d = mock_repo.add_package('d', [], []) c_conditions = { d.name: { 'c': 'd@2' @@ -103,11 +104,10 @@ def test_installed_deps(): 'c': 'e@2' } } - c = MockPackage('c', [d, e], [build_only, default], - conditions=c_conditions) - b = MockPackage('b', [d, e], [default, default]) - a = MockPackage('a', [b, c], [default, default]) - mock_repo = MockPackageMultiRepo([a, b, c, d, e]) + c = mock_repo.add_package('c', [d, e], [build_only, default], + conditions=c_conditions) + b = mock_repo.add_package('b', [d, e], [default, default]) + mock_repo.add_package('a', [b, c], [default, default]) with spack.repo.swap(mock_repo): c_spec = Spec('c') @@ -133,10 +133,10 @@ def test_specify_preinstalled_dep(): """ default = ('build', 'link') - c = MockPackage('c', [], []) - b = MockPackage('b', [c], [default]) - a = MockPackage('a', [b], [default]) - mock_repo = MockPackageMultiRepo([a, b, c]) + mock_repo = MockPackageMultiRepo() + c = mock_repo.add_package('c', [], []) + b = mock_repo.add_package('b', [c], [default]) + mock_repo.add_package('a', [b], [default]) with spack.repo.swap(mock_repo): b_spec = Spec('b') @@ -161,15 +161,15 @@ def test_conditional_dep_with_user_constraints(): """ default = ('build', 'link') - y = MockPackage('y', [], []) + mock_repo = MockPackageMultiRepo() + y = mock_repo.add_package('y', [], []) x_on_y_conditions = { y.name: { 'x@2:': 'y' } } - x = MockPackage('x', [y], [default], conditions=x_on_y_conditions) + mock_repo.add_package('x', [y], [default], conditions=x_on_y_conditions) - mock_repo = MockPackageMultiRepo([x, y]) with spack.repo.swap(mock_repo): spec = Spec('x ^y@2') spec.concretize() diff --git a/lib/spack/spack/test/spec_list.py b/lib/spack/spack/test/spec_list.py index 9bbbc435e24..27567b4080e 100644 --- a/lib/spack/spack/test/spec_list.py +++ b/lib/spack/spack/test/spec_list.py @@ -2,6 +2,7 @@ # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) +import pytest import itertools from spack.spec_list import SpecList from spack.spec import Spec @@ -156,3 +157,26 @@ def test_spec_list_nested_matrices(self): ['+shared', '~shared']) expected = [Spec(' '.join(combo)) for combo in expected_components] assert set(speclist.specs) == set(expected) + + @pytest.mark.regression('16897') + def test_spec_list_recursion_specs_as_constraints(self): + input = ['mpileaks', '$mpis', + {'matrix': [['hypre'], ['$%gccs', '$%clangs']]}, + 'libelf'] + + reference = {'gccs': SpecList('gccs', ['gcc@4.5.0']), + 'clangs': SpecList('clangs', ['clang@3.3']), + 'mpis': SpecList('mpis', ['zmpi@1.0', 'mpich@3.0'])} + + speclist = SpecList('specs', input, reference) + + assert speclist.specs_as_yaml_list == self.default_expansion + assert speclist.specs_as_constraints == self.default_constraints + assert speclist.specs == self.default_specs + + def test_spec_list_matrix_exclude(self, mock_packages): + # Test on non-boolean variants for regression for #16841 + matrix = [{'matrix': [['multivalue-variant'], ['foo=bar', 'foo=baz']], + 'exclude': ['foo=bar']}] + speclist = SpecList('specs', matrix) + assert len(speclist.specs) == 1 diff --git a/lib/spack/spack/test/spec_semantics.py b/lib/spack/spack/test/spec_semantics.py index b55fa27ae1f..d908ce7d89a 100644 --- a/lib/spack/spack/test/spec_semantics.py +++ b/lib/spack/spack/test/spec_semantics.py @@ -275,27 +275,27 @@ def test_satisfies_matching_variant(self): def test_satisfies_multi_value_variant(self): # Check quoting - check_satisfies('multivalue_variant foo="bar,baz"', - 'multivalue_variant foo="bar,baz"') - check_satisfies('multivalue_variant foo=bar,baz', - 'multivalue_variant foo=bar,baz') - check_satisfies('multivalue_variant foo="bar,baz"', - 'multivalue_variant foo=bar,baz') + check_satisfies('multivalue-variant foo="bar,baz"', + 'multivalue-variant foo="bar,baz"') + check_satisfies('multivalue-variant foo=bar,baz', + 'multivalue-variant foo=bar,baz') + check_satisfies('multivalue-variant foo="bar,baz"', + 'multivalue-variant foo=bar,baz') # A more constrained spec satisfies a less constrained one - check_satisfies('multivalue_variant foo="bar,baz"', - 'multivalue_variant foo="bar"') + check_satisfies('multivalue-variant foo="bar,baz"', + 'multivalue-variant foo="bar"') - check_satisfies('multivalue_variant foo="bar,baz"', - 'multivalue_variant foo="baz"') + check_satisfies('multivalue-variant foo="bar,baz"', + 'multivalue-variant foo="baz"') - check_satisfies('multivalue_variant foo="bar,baz,barbaz"', - 'multivalue_variant foo="bar,baz"') + check_satisfies('multivalue-variant foo="bar,baz,barbaz"', + 'multivalue-variant foo="bar,baz"') - check_satisfies('multivalue_variant foo="bar,baz"', + check_satisfies('multivalue-variant foo="bar,baz"', 'foo="bar,baz"') - check_satisfies('multivalue_variant foo="bar,baz"', + check_satisfies('multivalue-variant foo="bar,baz"', 'foo="bar"') def test_satisfies_single_valued_variant(self): @@ -325,7 +325,7 @@ def test_unsatisfied_single_valued_variant(self): a.concretize() assert '^b' not in a - mv = Spec('multivalue_variant') + mv = Spec('multivalue-variant') mv.concretize() assert 'a@1.0' not in mv @@ -340,9 +340,9 @@ def test_unsatisfiable_multi_value_variant(self): # Depending on whether the spec is concrete or not a = make_spec( - 'multivalue_variant foo="bar"', concrete=True + 'multivalue-variant foo="bar"', concrete=True ) - spec_str = 'multivalue_variant foo="bar,baz"' + spec_str = 'multivalue-variant foo="bar,baz"' b = Spec(spec_str) assert not a.satisfies(b) assert not a.satisfies(spec_str) @@ -350,8 +350,8 @@ def test_unsatisfiable_multi_value_variant(self): with pytest.raises(UnsatisfiableSpecError): a.constrain(b) - a = Spec('multivalue_variant foo="bar"') - spec_str = 'multivalue_variant foo="bar,baz"' + a = Spec('multivalue-variant foo="bar"') + spec_str = 'multivalue-variant foo="bar,baz"' b = Spec(spec_str) # The specs are abstract and they **could** be constrained assert a.satisfies(b) @@ -360,9 +360,9 @@ def test_unsatisfiable_multi_value_variant(self): assert a.constrain(b) a = make_spec( - 'multivalue_variant foo="bar,baz"', concrete=True + 'multivalue-variant foo="bar,baz"', concrete=True ) - spec_str = 'multivalue_variant foo="bar,baz,quux"' + spec_str = 'multivalue-variant foo="bar,baz,quux"' b = Spec(spec_str) assert not a.satisfies(b) assert not a.satisfies(spec_str) @@ -370,8 +370,8 @@ def test_unsatisfiable_multi_value_variant(self): with pytest.raises(UnsatisfiableSpecError): a.constrain(b) - a = Spec('multivalue_variant foo="bar,baz"') - spec_str = 'multivalue_variant foo="bar,baz,quux"' + a = Spec('multivalue-variant foo="bar,baz"') + spec_str = 'multivalue-variant foo="bar,baz,quux"' b = Spec(spec_str) # The specs are abstract and they **could** be constrained assert a.satisfies(b) @@ -384,8 +384,8 @@ def test_unsatisfiable_multi_value_variant(self): a.concretize() # This time we'll try to set a single-valued variant - a = Spec('multivalue_variant fee="bar"') - spec_str = 'multivalue_variant fee="baz"' + a = Spec('multivalue-variant fee="bar"') + spec_str = 'multivalue-variant fee="baz"' b = Spec(spec_str) # The specs are abstract and they **could** be constrained, # as before concretization I don't know which type of variant @@ -405,20 +405,20 @@ def test_unsatisfiable_variant_types(self): # FIXME: these needs to be checked as the new relaxed # FIXME: semantic makes them fail (constrain does not raise) - # check_unsatisfiable('multivalue_variant +foo', - # 'multivalue_variant foo="bar"') - # check_unsatisfiable('multivalue_variant ~foo', - # 'multivalue_variant foo="bar"') + # check_unsatisfiable('multivalue-variant +foo', + # 'multivalue-variant foo="bar"') + # check_unsatisfiable('multivalue-variant ~foo', + # 'multivalue-variant foo="bar"') check_unsatisfiable( - target_spec='multivalue_variant foo="bar"', - constraint_spec='multivalue_variant +foo', + target_spec='multivalue-variant foo="bar"', + constraint_spec='multivalue-variant +foo', target_concrete=True ) check_unsatisfiable( - target_spec='multivalue_variant foo="bar"', - constraint_spec='multivalue_variant ~foo', + target_spec='multivalue-variant foo="bar"', + constraint_spec='multivalue-variant ~foo', target_concrete=True ) @@ -597,15 +597,15 @@ def test_constrain_variants(self): def test_constrain_multi_value_variant(self): check_constrain( - 'multivalue_variant foo="bar,baz"', - 'multivalue_variant foo="bar"', - 'multivalue_variant foo="baz"' + 'multivalue-variant foo="bar,baz"', + 'multivalue-variant foo="bar"', + 'multivalue-variant foo="baz"' ) check_constrain( - 'multivalue_variant foo="bar,baz,barbaz"', - 'multivalue_variant foo="bar,barbaz"', - 'multivalue_variant foo="baz"' + 'multivalue-variant foo="bar,baz,barbaz"', + 'multivalue-variant foo="bar,barbaz"', + 'multivalue-variant foo="baz"' ) def test_constrain_compiler_flags(self): @@ -734,7 +734,7 @@ def test_exceptional_paths_for_constructor(self): Spec('libelf foo') def test_spec_formatting(self): - spec = Spec("multivalue_variant cflags=-O2") + spec = Spec("multivalue-variant cflags=-O2") spec.concretize() # Since the default is the full spec see if the string rep of @@ -806,7 +806,7 @@ def test_spec_formatting(self): assert expected == actual def test_spec_formatting_escapes(self): - spec = Spec('multivalue_variant cflags=-O2') + spec = Spec('multivalue-variant cflags=-O2') spec.concretize() sigil_mismatches = [ @@ -895,7 +895,7 @@ def test_spec_flags_maintain_order(self): def test_any_combination_of(self): # Test that using 'none' and another value raise during concretization - spec = Spec('multivalue_variant foo=none,bar') + spec = Spec('multivalue-variant foo=none,bar') with pytest.raises(spack.error.SpecError) as exc_info: spec.concretize() diff --git a/lib/spack/spack/test/spec_syntax.py b/lib/spack/spack/test/spec_syntax.py index 6b31af54ce9..9eacddfccb3 100644 --- a/lib/spack/spack/test/spec_syntax.py +++ b/lib/spack/spack/test/spec_syntax.py @@ -177,7 +177,7 @@ def test_full_specs(self): " ^stackwalker@8.1_1e") self.check_parse( "mvapich_foo" - " ^_openmpi@1.2:1.4,1.6%intel@12.1 debug=2 ~qt_4" + " ^_openmpi@1.2:1.4,1.6%intel@12.1~qt_4 debug=2" " ^stackwalker@8.1_1e") self.check_parse( 'mvapich_foo' @@ -185,9 +185,35 @@ def test_full_specs(self): ' ^stackwalker@8.1_1e') self.check_parse( "mvapich_foo" - " ^_openmpi@1.2:1.4,1.6%intel@12.1 debug=2 ~qt_4" + " ^_openmpi@1.2:1.4,1.6%intel@12.1~qt_4 debug=2" " ^stackwalker@8.1_1e arch=test-redhat6-x86") + def test_yaml_specs(self): + self.check_parse( + "yaml-cpp@0.1.8%intel@12.1" + " ^boost@3.1.4") + tempspec = r"builtin.yaml-cpp%gcc" + self.check_parse( + tempspec.strip("builtin."), + spec=tempspec) + tempspec = r"testrepo.yaml-cpp%gcc" + self.check_parse( + tempspec.strip("testrepo."), + spec=tempspec) + tempspec = r"builtin.yaml-cpp@0.1.8%gcc" + self.check_parse( + tempspec.strip("builtin."), + spec=tempspec) + tempspec = r"builtin.yaml-cpp@0.1.8%gcc@7.2.0" + self.check_parse( + tempspec.strip("builtin."), + spec=tempspec) + tempspec = r"builtin.yaml-cpp@0.1.8%gcc@7.2.0" \ + r" ^boost@3.1.4" + self.check_parse( + tempspec.strip("builtin."), + spec=tempspec) + def test_canonicalize(self): self.check_parse( "mvapich_foo" diff --git a/lib/spack/spack/test/spec_yaml.py b/lib/spack/spack/test/spec_yaml.py index f9b41df19a1..98fb1e68fe4 100644 --- a/lib/spack/spack/test/spec_yaml.py +++ b/lib/spack/spack/test/spec_yaml.py @@ -26,7 +26,7 @@ from spack import repo from spack.spec import Spec, save_dependency_spec_yamls from spack.util.spack_yaml import syaml_dict -from spack.test.conftest import MockPackage, MockPackageMultiRepo +from spack.util.mock_package import MockPackageMultiRepo def check_yaml_round_trip(spec): @@ -69,7 +69,7 @@ def test_concrete_spec(config, mock_packages): def test_yaml_multivalue(config, mock_packages): - spec = Spec('multivalue_variant foo="bar,baz"') + spec = Spec('multivalue-variant foo="bar,baz"') spec.concretize() check_yaml_round_trip(spec) @@ -301,15 +301,14 @@ def test_save_dependency_spec_yamls_subset(tmpdir, config): default = ('build', 'link') - g = MockPackage('g', [], []) - f = MockPackage('f', [], []) - e = MockPackage('e', [], []) - d = MockPackage('d', [f, g], [default, default]) - c = MockPackage('c', [], []) - b = MockPackage('b', [d, e], [default, default]) - a = MockPackage('a', [b, c], [default, default]) - - mock_repo = MockPackageMultiRepo([a, b, c, d, e, f, g]) + mock_repo = MockPackageMultiRepo() + g = mock_repo.add_package('g', [], []) + f = mock_repo.add_package('f', [], []) + e = mock_repo.add_package('e', [], []) + d = mock_repo.add_package('d', [f, g], [default, default]) + c = mock_repo.add_package('c', [], []) + b = mock_repo.add_package('b', [d, e], [default, default]) + mock_repo.add_package('a', [b, c], [default, default]) with repo.swap(mock_repo): spec_a = Spec('a') diff --git a/lib/spack/spack/test/url_parse.py b/lib/spack/spack/test/url_parse.py index 06515620073..846e24d242c 100644 --- a/lib/spack/spack/test/url_parse.py +++ b/lib/spack/spack/test/url_parse.py @@ -464,6 +464,8 @@ def test_url_parse_offset(name, noffset, ver, voffset, path): 'http://laws-green.lanl.gov/projects/data/eos/get_file.php?package=eospac&filename=eospac_v6.4.0beta.1_r20171213193219.tgz'), ('vampirtrace', '5.14.4', 'http://wwwpub.zih.tu-dresden.de/~mlieber/dcount/dcount.php?package=vampirtrace&get=VampirTrace-5.14.4.tar.gz'), + ('EvtGen', '01.07.00', + 'https://evtgen.hepforge.org/downloads?f=EvtGen-01.07.00.tar.gz'), # (we don't actually look for these, they are picked up # during the preliminary stem parsing) ('octopus', '6.0', 'http://octopus-code.org/down.php?file=6.0/octopus-6.0.tar.gz'), diff --git a/lib/spack/spack/test/util/executable.py b/lib/spack/spack/test/util/executable.py index 26719e94d1c..8baae3b4539 100644 --- a/lib/spack/spack/test/util/executable.py +++ b/lib/spack/spack/test/util/executable.py @@ -4,18 +4,19 @@ # SPDX-License-Identifier: (Apache-2.0 OR MIT) import sys +import os import llnl.util.filesystem as fs - +import spack import spack.util.executable as ex from spack.hooks.sbang import filter_shebangs_in_directory -def test_read_unicode(tmpdir): +def test_read_unicode(tmpdir, working_env): script_name = 'print_unicode.py' with tmpdir.as_cwd(): - + os.environ['LD_LIBRARY_PATH'] = spack.main.spack_ld_library_path # make a script that prints some unicode with open(script_name, 'w') as f: f.write('''#!{0} diff --git a/lib/spack/spack/test/util/mock_package.py b/lib/spack/spack/test/util/mock_package.py new file mode 100644 index 00000000000..376ac581bd5 --- /dev/null +++ b/lib/spack/spack/test/util/mock_package.py @@ -0,0 +1,43 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +import spack.repo +from spack.util.mock_package import MockPackageMultiRepo + + +def test_mock_package_possible_dependencies(): + mock_repo = MockPackageMultiRepo() + e = mock_repo.add_package('e') + d = mock_repo.add_package('d', [e]) + c = mock_repo.add_package('c', [d]) + b = mock_repo.add_package('b', [d]) + a = mock_repo.add_package('a', [b, c]) + + with spack.repo.swap(mock_repo): + assert set(a.possible_dependencies()) == set(['a', 'b', 'c', 'd', 'e']) + assert set(b.possible_dependencies()) == set(['b', 'd', 'e']) + assert set(c.possible_dependencies()) == set(['c', 'd', 'e']) + assert set(d.possible_dependencies()) == set(['d', 'e']) + assert set(e.possible_dependencies()) == set(['e']) + + assert set( + a.possible_dependencies(transitive=False)) == set(['a', 'b', 'c']) + assert set( + b.possible_dependencies(transitive=False)) == set(['b', 'd']) + assert set( + c.possible_dependencies(transitive=False)) == set(['c', 'd']) + assert set( + d.possible_dependencies(transitive=False)) == set(['d', 'e']) + assert set( + e.possible_dependencies(transitive=False)) == set(['e']) + + +def test_mock_repo_is_virtual(): + mock_repo = MockPackageMultiRepo() + + # current implementation is always false + assert mock_repo.is_virtual("foo") is False + assert mock_repo.is_virtual("bar") is False + assert mock_repo.is_virtual("baz") is False diff --git a/lib/spack/spack/test/variant.py b/lib/spack/spack/test/variant.py index d1657b71b73..10e8ea7e7fa 100644 --- a/lib/spack/spack/test/variant.py +++ b/lib/spack/spack/test/variant.py @@ -694,7 +694,7 @@ def test_str(self): c['foobar'] = SingleValuedVariant('foobar', 'fee') c['feebar'] = SingleValuedVariant('feebar', 'foo') c['shared'] = BoolValuedVariant('shared', True) - assert str(c) == ' feebar=foo foo=bar,baz foobar=fee +shared' + assert str(c) == '+shared feebar=foo foo=bar,baz foobar=fee' def test_disjoint_set_initialization_errors(): diff --git a/lib/spack/spack/test/web.py b/lib/spack/spack/test/web.py index ae62301319b..dfca41c95bb 100644 --- a/lib/spack/spack/test/web.py +++ b/lib/spack/spack/test/web.py @@ -2,125 +2,101 @@ # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) - -"""Tests for web.py.""" import os + +import ordereddict_backport import pytest - -from ordereddict_backport import OrderedDict - import spack.paths -import spack.util.web as web_util +import spack.util.web from spack.version import ver -web_data_path = os.path.join(spack.paths.test_path, 'data', 'web') - -root = 'file://' + web_data_path + '/index.html' -root_tarball = 'file://' + web_data_path + '/foo-0.0.0.tar.gz' - -page_1 = 'file://' + os.path.join(web_data_path, '1.html') -page_2 = 'file://' + os.path.join(web_data_path, '2.html') -page_3 = 'file://' + os.path.join(web_data_path, '3.html') -page_4 = 'file://' + os.path.join(web_data_path, '4.html') +def _create_url(relative_url): + web_data_path = os.path.join(spack.paths.test_path, 'data', 'web') + return 'file://' + os.path.join(web_data_path, relative_url) -def test_spider_0(): - pages, links = web_util.spider(root, depth=0) - - assert root in pages - assert page_1 not in pages - assert page_2 not in pages - assert page_3 not in pages - assert page_4 not in pages - - assert "This is the root page." in pages[root] - - assert root not in links - assert page_1 in links - assert page_2 not in links - assert page_3 not in links - assert page_4 not in links +root = _create_url('index.html') +root_tarball = _create_url('foo-0.0.0.tar.gz') +page_1 = _create_url('1.html') +page_2 = _create_url('2.html') +page_3 = _create_url('3.html') +page_4 = _create_url('4.html') -def test_spider_1(): - pages, links = web_util.spider(root, depth=1) +@pytest.mark.parametrize( + 'depth,expected_found,expected_not_found,expected_text', [ + (0, + {'pages': [root], 'links': [page_1]}, + {'pages': [page_1, page_2, page_3, page_4], + 'links': [root, page_2, page_3, page_4]}, + {root: "This is the root page."}), + (1, + {'pages': [root, page_1], 'links': [page_1, page_2]}, + {'pages': [page_2, page_3, page_4], + 'links': [root, page_3, page_4]}, + {root: "This is the root page.", + page_1: "This is page 1."}), + (2, + {'pages': [root, page_1, page_2], + 'links': [page_1, page_2, page_3, page_4]}, + {'pages': [page_3, page_4], 'links': [root]}, + {root: "This is the root page.", + page_1: "This is page 1.", + page_2: "This is page 2."}), + (3, + {'pages': [root, page_1, page_2, page_3, page_4], + 'links': [root, page_1, page_2, page_3, page_4]}, + {'pages': [], 'links': []}, + {root: "This is the root page.", + page_1: "This is page 1.", + page_2: "This is page 2.", + page_3: "This is page 3.", + page_4: "This is page 4."}), + ]) +def test_spider(depth, expected_found, expected_not_found, expected_text): + pages, links = spack.util.web.spider(root, depth=depth) - assert root in pages - assert page_1 in pages - assert page_2 not in pages - assert page_3 not in pages - assert page_4 not in pages + for page in expected_found['pages']: + assert page in pages - assert "This is the root page." in pages[root] - assert "This is page 1." in pages[page_1] + for page in expected_not_found['pages']: + assert page not in pages - assert root not in links - assert page_1 in links - assert page_2 in links - assert page_3 not in links - assert page_4 not in links + for link in expected_found['links']: + assert link in links + + for link in expected_not_found['links']: + assert link not in links + + for page, text in expected_text.items(): + assert text in pages[page] -def test_spider_2(): - pages, links = web_util.spider(root, depth=2) - - assert root in pages - assert page_1 in pages - assert page_2 in pages - assert page_3 not in pages - assert page_4 not in pages - - assert "This is the root page." in pages[root] - assert "This is page 1." in pages[page_1] - assert "This is page 2." in pages[page_2] - - assert root not in links - assert page_1 in links - assert page_1 in links - assert page_2 in links - assert page_3 in links - assert page_4 in links - - -def test_spider_3(): - pages, links = web_util.spider(root, depth=3) - - assert root in pages - assert page_1 in pages - assert page_2 in pages - assert page_3 in pages - assert page_4 in pages - - assert "This is the root page." in pages[root] - assert "This is page 1." in pages[page_1] - assert "This is page 2." in pages[page_2] - assert "This is page 3." in pages[page_3] - assert "This is page 4." in pages[page_4] - - assert root in links # circular link on page 3 - assert page_1 in links - assert page_1 in links - assert page_2 in links - assert page_3 in links - assert page_4 in links +def test_spider_no_response(monkeypatch): + # Mock the absence of a response + monkeypatch.setattr( + spack.util.web, 'read_from_url', lambda x, y: (None, None, None) + ) + pages, links = spack.util.web.spider(root, depth=0) + assert not pages and not links def test_find_versions_of_archive_0(): - versions = web_util.find_versions_of_archive( + versions = spack.util.web.find_versions_of_archive( root_tarball, root, list_depth=0) assert ver('0.0.0') in versions def test_find_versions_of_archive_1(): - versions = web_util.find_versions_of_archive( + versions = spack.util.web.find_versions_of_archive( root_tarball, root, list_depth=1) assert ver('0.0.0') in versions assert ver('1.0.0') in versions def test_find_versions_of_archive_2(): - versions = web_util.find_versions_of_archive( + versions = spack.util.web.find_versions_of_archive( root_tarball, root, list_depth=2) assert ver('0.0.0') in versions assert ver('1.0.0') in versions @@ -128,14 +104,14 @@ def test_find_versions_of_archive_2(): def test_find_exotic_versions_of_archive_2(): - versions = web_util.find_versions_of_archive( + versions = spack.util.web.find_versions_of_archive( root_tarball, root, list_depth=2) # up for grabs to make this better. assert ver('2.0.0b2') in versions def test_find_versions_of_archive_3(): - versions = web_util.find_versions_of_archive( + versions = spack.util.web.find_versions_of_archive( root_tarball, root, list_depth=3) assert ver('0.0.0') in versions assert ver('1.0.0') in versions @@ -145,7 +121,7 @@ def test_find_versions_of_archive_3(): def test_find_exotic_versions_of_archive_3(): - versions = web_util.find_versions_of_archive( + versions = spack.util.web.find_versions_of_archive( root_tarball, root, list_depth=3) assert ver('2.0.0b2') in versions assert ver('3.0a1') in versions @@ -159,35 +135,35 @@ def test_get_header(): # looking up headers should just work like a plain dict # lookup when there is an entry with the right key - assert(web_util.get_header(headers, 'Content-type') == 'text/plain') + assert(spack.util.web.get_header(headers, 'Content-type') == 'text/plain') # looking up headers should still work if there is a fuzzy match - assert(web_util.get_header(headers, 'contentType') == 'text/plain') + assert(spack.util.web.get_header(headers, 'contentType') == 'text/plain') # ...unless there is an exact match for the "fuzzy" spelling. headers['contentType'] = 'text/html' - assert(web_util.get_header(headers, 'contentType') == 'text/html') + assert(spack.util.web.get_header(headers, 'contentType') == 'text/html') # If lookup has to fallback to fuzzy matching and there are more than one # fuzzy match, the result depends on the internal ordering of the given # mapping - headers = OrderedDict() + headers = ordereddict_backport.OrderedDict() headers['Content-type'] = 'text/plain' headers['contentType'] = 'text/html' - assert(web_util.get_header(headers, 'CONTENT_TYPE') == 'text/plain') + assert(spack.util.web.get_header(headers, 'CONTENT_TYPE') == 'text/plain') del headers['Content-type'] - assert(web_util.get_header(headers, 'CONTENT_TYPE') == 'text/html') + assert(spack.util.web.get_header(headers, 'CONTENT_TYPE') == 'text/html') # Same as above, but different ordering - headers = OrderedDict() + headers = ordereddict_backport.OrderedDict() headers['contentType'] = 'text/html' headers['Content-type'] = 'text/plain' - assert(web_util.get_header(headers, 'CONTENT_TYPE') == 'text/html') + assert(spack.util.web.get_header(headers, 'CONTENT_TYPE') == 'text/html') del headers['contentType'] - assert(web_util.get_header(headers, 'CONTENT_TYPE') == 'text/plain') + assert(spack.util.web.get_header(headers, 'CONTENT_TYPE') == 'text/plain') # If there isn't even a fuzzy match, raise KeyError with pytest.raises(KeyError): - web_util.get_header(headers, 'ContentLength') + spack.util.web.get_header(headers, 'ContentLength') diff --git a/lib/spack/spack/url.py b/lib/spack/spack/url.py index a728c46a713..4969d5f3530 100644 --- a/lib/spack/spack/url.py +++ b/lib/spack/spack/url.py @@ -549,27 +549,23 @@ def parse_version_offset(path): # 8th Pass: Query strings # e.g. https://gitlab.cosma.dur.ac.uk/api/v4/projects/swift%2Fswiftsim/repository/archive.tar.gz?sha=v0.3.0 - (r'\?sha=[a-zA-Z+._-]*v?(\d[\da-zA-Z._-]*)$', suffix), - + # e.g. https://gitlab.kitware.com/api/v4/projects/icet%2Ficet/repository/archive.tar.bz2?sha=IceT-2.1.1 # e.g. http://gitlab.cosma.dur.ac.uk/swift/swiftsim/repository/archive.tar.gz?ref=v0.3.0 - (r'\?ref=[a-zA-Z+._-]*v?(\d[\da-zA-Z._-]*)$', suffix), - # e.g. http://apps.fz-juelich.de/jsc/sionlib/download.php?version=1.7.1 # e.g. https://software.broadinstitute.org/gatk/download/auth?package=GATK-archive&version=3.8-1-0-gf15c1c3ef - (r'[?&]version=v?(\d[\da-zA-Z._-]*)$', suffix), + (r'[?&](?:sha|ref|version)=[a-zA-Z\d+-]*[_-]?v?(\d[\da-zA-Z._-]*)$', suffix), # noqa: E501 # e.g. http://slepc.upv.es/download/download.php?filename=slepc-3.6.2.tar.gz # e.g. http://laws-green.lanl.gov/projects/data/eos/get_file.php?package=eospac&filename=eospac_v6.4.0beta.1_r20171213193219.tgz - (r'[?&]filename=[a-zA-Z\d+-]+[_-]v?(\d[\da-zA-Z.]*)', stem), - + # e.g. https://evtgen.hepforge.org/downloads?f=EvtGen-01.07.00.tar.gz # e.g. http://wwwpub.zih.tu-dresden.de/%7Emlieber/dcount/dcount.php?package=otf&get=OTF-1.12.5salmon.tar.gz - (r'&get=[a-zA-Z\d+-]+-v?(\d[\da-zA-Z.]*)$', stem), # noqa + (r'[?&](?:filename|f|get)=[a-zA-Z\d+-]+[_-]v?(\d[\da-zA-Z.]*)', stem), # 9th Pass: Version in path # github.com/repo/name/releases/download/vver/name # e.g. https://github.com/nextflow-io/nextflow/releases/download/v0.20.1/nextflow - (r'github\.com/[^/]+/[^/]+/releases/download/[a-zA-Z+._-]*v?(\d[\da-zA-Z._-]*)/', path), # noqa + (r'github\.com/[^/]+/[^/]+/releases/download/[a-zA-Z+._-]*v?(\d[\da-zA-Z._-]*)/', path), # noqa: E501 # e.g. ftp://ftp.ncbi.nlm.nih.gov/blast/executables/legacy.NOTSUPPORTED/2.2.26/ncbi.tar.gz (r'(\d[\da-zA-Z._-]*)/[^/]+$', path), @@ -696,6 +692,10 @@ def parse_name_offset(path, v=None): # e.g. http://slepc.upv.es/download/download.php?filename=slepc-3.6.2.tar.gz (r'\?filename=([A-Za-z\d+-]+)$', stem), + # ?f=name-ver.ver + # e.g. https://evtgen.hepforge.org/downloads?f=EvtGen-01.07.00.tar.gz + (r'\?f=([A-Za-z\d+-]+)$', stem), + # ?package=name # e.g. http://wwwpub.zih.tu-dresden.de/%7Emlieber/dcount/dcount.php?package=otf&get=OTF-1.12.5salmon.tar.gz (r'\?package=([A-Za-z\d+-]+)', stem), diff --git a/lib/spack/spack/util/mock_package.py b/lib/spack/spack/util/mock_package.py new file mode 100644 index 00000000000..3d8ae30b103 --- /dev/null +++ b/lib/spack/spack/util/mock_package.py @@ -0,0 +1,161 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +"""Infrastructure used by tests for mocking packages and repos.""" + +import ordereddict_backport + +import spack.util.naming +from spack.dependency import Dependency +from spack.spec import Spec +from spack.version import Version + +__all__ = ["MockPackageMultiRepo"] + + +class MockPackageBase(object): + """Internal base class for mocking ``spack.package.PackageBase``. + + Use ``MockPackageMultiRepo.add_package()`` to create new instances. + + """ + def __init__(self, dependencies, dependency_types, + conditions=None, versions=None): + """Instantiate a new MockPackageBase. + + This is not for general use; it needs to be constructed by a + ``MockPackageMultiRepo``, as we need to know about *all* packages + to find possible depenencies. + + """ + self.spec = None + self._installed_upstream = False + + def provides(self, vname): + return vname in self.provided + + @property + def virtuals_provided(self): + return [v.name for v, c in self.provided] + + @classmethod + def possible_dependencies( + cls, transitive=True, deptype='all', visited=None, virtuals=None): + visited = {} if visited is None else visited + + for name, conditions in cls.dependencies.items(): + # check whether this dependency could be of the type asked for + types = [dep.type for cond, dep in conditions.items()] + types = set.union(*types) + if not any(d in types for d in deptype): + continue + + visited.setdefault(cls.name, set()) + for dep_name in cls.dependencies: + if dep_name in visited: + continue + + visited.setdefault(dep_name, set()) + + if not transitive: + continue + + cls._repo.get(dep_name).possible_dependencies( + transitive, deptype, visited, virtuals) + + return visited + + +class MockPackageMultiRepo(object): + """Mock package repository, mimicking ``spack.repo.Repo``.""" + + def __init__(self): + self.spec_to_pkg = {} + + def get(self, spec): + if not isinstance(spec, spack.spec.Spec): + spec = Spec(spec) + return self.spec_to_pkg[spec.name] + + def get_pkg_class(self, name): + return self.spec_to_pkg[name] + + def exists(self, name): + return name in self.spec_to_pkg + + def is_virtual(self, name): + return False + + def repo_for_pkg(self, name): + import collections + Repo = collections.namedtuple('Repo', ['namespace']) + return Repo('mockrepo') + + def add_package(self, name, dependencies=None, dependency_types=None, + conditions=None): + """Factory method for creating mock packages. + + This creates a new subclass of ``MockPackageBase``, ensures that its + ``name`` and ``__name__`` properties are set up correctly, and + returns a new instance. + + We use a factory function here because many functions and properties + of packages need to be class functions. + + Args: + name (str): name of the new package + dependencies (list): list of mock packages to be dependencies + for this new package (optional; no deps if not provided) + dependency_type (list): list of deptypes for each dependency + (optional; will be default_deptype if not provided) + conditions (list): condition specs for each dependency (optional) + + """ + if not dependencies: + dependencies = [] + + if not dependency_types: + dependency_types = [ + spack.dependency.default_deptype] * len(dependencies) + + assert len(dependencies) == len(dependency_types) + + # new class for the mock package + class MockPackage(MockPackageBase): + pass + MockPackage.__name__ = spack.util.naming.mod_to_class(name) + MockPackage.name = name + MockPackage._repo = self + + # set up dependencies + MockPackage.dependencies = ordereddict_backport.OrderedDict() + for dep, dtype in zip(dependencies, dependency_types): + d = Dependency(MockPackage, Spec(dep.name), type=dtype) + if not conditions or dep.name not in conditions: + MockPackage.dependencies[dep.name] = {Spec(name): d} + else: + dep_conditions = conditions[dep.name] + dep_conditions = dict( + (Spec(x), Dependency(MockPackage, Spec(y), type=dtype)) + for x, y in dep_conditions.items()) + MockPackage.dependencies[dep.name] = dep_conditions + + # each package has some fake versions + versions = list(Version(x) for x in [1, 2, 3]) + MockPackage.versions = dict( + (x, {'preferred': False}) for x in versions + ) + + MockPackage.variants = {} + MockPackage.provided = {} + MockPackage.conflicts = {} + MockPackage.patches = {} + + mock_package = MockPackage( + dependencies, dependency_types, conditions, versions) + self.spec_to_pkg[name] = mock_package + self.spec_to_pkg["mockrepo." + name] = mock_package + + return mock_package diff --git a/lib/spack/spack/util/module_cmd.py b/lib/spack/spack/util/module_cmd.py index 74790156ae0..7017b2ecb6d 100644 --- a/lib/spack/spack/util/module_cmd.py +++ b/lib/spack/spack/util/module_cmd.py @@ -13,12 +13,13 @@ import json import re +import spack import llnl.util.tty as tty # This list is not exhaustive. Currently we only use load and unload # If we need another option that changes the environment, add it here. module_change_commands = ['load', 'swap', 'unload', 'purge', 'use', 'unuse'] -py_cmd = "'import os;import json;print(json.dumps(dict(os.environ)))'" +py_cmd = 'import os;import json;print(json.dumps(dict(os.environ)))' _cmd_template = "'module ' + ' '.join(args) + ' 2>&1'" @@ -27,7 +28,31 @@ def module(*args): if args[0] in module_change_commands: # Do the module manipulation, then output the environment in JSON # and read the JSON back in the parent process to update os.environ - module_cmd += ' >/dev/null;' + sys.executable + ' -c %s' % py_cmd + # For python, we use the same python running the Spack process, because + # we can guarantee its existence. We have to do some LD_LIBRARY_PATH + # shenanigans to ensure python will run. + + # LD_LIBRARY_PATH under which Spack ran + os.environ['SPACK_LD_LIBRARY_PATH'] = spack.main.spack_ld_library_path + + # suppress output from module function + module_cmd += ' >/dev/null;' + + # Capture the new LD_LIBRARY_PATH after `module` was run + module_cmd += 'export SPACK_NEW_LD_LIBRARY_PATH="$LD_LIBRARY_PATH";' + + # Set LD_LIBRARY_PATH to value at Spack startup time to ensure that + # python executable finds its libraries + module_cmd += 'LD_LIBRARY_PATH="$SPACK_LD_LIBRARY_PATH" ' + + # Execute the python command + module_cmd += '%s -c "%s";' % (sys.executable, py_cmd) + + # If LD_LIBRARY_PATH was set after `module`, dump the old value because + # we have since corrupted it to ensure python would run. + # dump SPACKIGNORE as a placeholder for parsing if LD_LIBRARY_PATH null + module_cmd += 'echo "${SPACK_NEW_LD_LIBRARY_PATH:-SPACKIGNORE}"' + module_p = subprocess.Popen(module_cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, @@ -36,13 +61,24 @@ def module(*args): # Cray modules spit out warnings that we cannot supress. # This hack skips to the last output (the environment) - env_output = str(module_p.communicate()[0].decode()) - env = env_output.strip().split('\n')[-1] + env_out = str(module_p.communicate()[0].decode()).strip().split('\n') + + # The environment dumped as json + env_json = env_out[-2] + # Either the uncorrupted $LD_LIBRARY_PATH or SPACKIGNORE + new_ld_library_path = env_out[-1] # Update os.environ with new dict - env_dict = json.loads(env) + env_dict = json.loads(env_json) os.environ.clear() os.environ.update(env_dict) + + # Override restored LD_LIBRARY_PATH with pre-python value + if new_ld_library_path == 'SPACKIGNORE': + os.environ.pop('LD_LIBRARY_PATH', None) + else: + os.environ['LD_LIBRARY_PATH'] = new_ld_library_path + else: # Simply execute commands that don't change state and return output module_p = subprocess.Popen(module_cmd, @@ -87,7 +123,13 @@ def get_path_args_from_module_line(line): words_and_symbols = line.split(lua_quote) path_arg = words_and_symbols[-2] else: - path_arg = line.split()[2] + # The path arg is the 3rd "word" of the line in a TCL module + # OPERATION VAR_NAME PATH_ARG + words = line.split() + if len(words) > 2: + path_arg = words[2] + else: + return [] paths = path_arg.split(':') return paths diff --git a/lib/spack/spack/util/path.py b/lib/spack/spack/util/path.py index 9d5413c6091..8bcf598882b 100644 --- a/lib/spack/spack/util/path.py +++ b/lib/spack/spack/util/path.py @@ -10,8 +10,12 @@ import os import re import getpass +import subprocess import tempfile +import llnl.util.tty as tty +from llnl.util.lang import memoized + import spack.paths @@ -27,6 +31,38 @@ 'tempdir': tempfile.gettempdir(), } +# This is intended to be longer than the part of the install path +# spack generates from the root path we give it. Included in the +# estimate: +# +# os-arch -> 30 +# compiler -> 30 +# package name -> 50 (longest is currently 47 characters) +# version -> 20 +# hash -> 32 +# buffer -> 138 +# --------------------- +# total -> 300 +SPACK_MAX_INSTALL_PATH_LENGTH = 300 +SPACK_PATH_PADDING_CHARS = 'spack_path_placeholder' + + +@memoized +def get_system_path_max(): + # Choose a conservative default + sys_max_path_length = 256 + try: + path_max_proc = subprocess.Popen(['getconf', 'PATH_MAX', '/'], + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT) + proc_output = str(path_max_proc.communicate()[0].decode()) + sys_max_path_length = int(proc_output) + except (ValueError, subprocess.CalledProcessError, OSError): + tty.msg('Unable to find system max path length, using: {0}'.format( + sys_max_path_length)) + + return sys_max_path_length + def substitute_config_variables(path): """Substitute placeholders into paths. @@ -58,8 +94,45 @@ def substitute_path_variables(path): return path +def _get_padding_string(length): + spack_path_padding_size = len(SPACK_PATH_PADDING_CHARS) + num_reps = int(length / (spack_path_padding_size + 1)) + extra_chars = length % (spack_path_padding_size + 1) + reps_list = [SPACK_PATH_PADDING_CHARS for i in range(num_reps)] + reps_list.append(SPACK_PATH_PADDING_CHARS[:extra_chars]) + return os.path.sep.join(reps_list) + + +def _add_computed_padding(path): + """Subtitute in padding of os-specific length. The intent is to leave + SPACK_MAX_INSTALL_PATH_LENGTH characters available for parts of the + path generated by spack. This is to allow for not-completely-known + lengths of things like os/arch, compiler, package name, hash length, + etc. + """ + padding_regex = re.compile(r'(\$[\w\d\:]+\b|\$\{[\w\d\:]+\})') + m = padding_regex.search(path) + if m and m.group(0).strip('${}').startswith('padding'): + padding_part = m.group(0) + len_pad_part = len(m.group(0)) + p_match = re.search(r'\:(\d+)', padding_part) + if p_match: + computed_padding = _get_padding_string(int(p_match.group(1))) + else: + # Take whatever has been computed/substituted so far and add some + # room + path_len = len(path) - len_pad_part + SPACK_MAX_INSTALL_PATH_LENGTH + system_max_path = get_system_path_max() + needed_pad_len = system_max_path - path_len + computed_padding = _get_padding_string(needed_pad_len) + return padding_regex.sub(computed_padding, path) + return path + + def canonicalize_path(path): """Same as substitute_path_variables, but also take absolute path.""" path = substitute_path_variables(path) path = os.path.abspath(path) + path = _add_computed_padding(path) + return path diff --git a/lib/spack/spack/util/web.py b/lib/spack/spack/util/web.py index 8039dc5fdaf..3f71dd1f719 100644 --- a/lib/spack/spack/util/web.py +++ b/lib/spack/spack/util/web.py @@ -7,17 +7,18 @@ import codecs import errno -import re +import multiprocessing.pool import os import os.path +import re import shutil import ssl import sys import traceback -from six.moves.urllib.request import urlopen, Request +import six from six.moves.urllib.error import URLError -import multiprocessing.pool +from six.moves.urllib.request import urlopen, Request try: # Python 2 had these in the HTMLParser package. @@ -63,34 +64,6 @@ def handle_starttag(self, tag, attrs): self.links.append(val) -class NonDaemonProcess(multiprocessing.Process): - """Process that allows sub-processes, so pools can have sub-pools.""" - @property - def daemon(self): - return False - - @daemon.setter - def daemon(self, value): - pass - - -if sys.version_info[0] < 3: - class NonDaemonPool(multiprocessing.pool.Pool): - """Pool that uses non-daemon processes""" - Process = NonDaemonProcess -else: - - class NonDaemonContext(type(multiprocessing.get_context())): # novm - Process = NonDaemonProcess - - class NonDaemonPool(multiprocessing.pool.Pool): - """Pool that uses non-daemon processes""" - - def __init__(self, *args, **kwargs): - kwargs['context'] = NonDaemonContext() - super(NonDaemonPool, self).__init__(*args, **kwargs) - - def uses_ssl(parsed_url): if parsed_url.scheme == 'https': return True @@ -336,109 +309,152 @@ def list_url(url): for key in _iter_s3_prefix(s3, url))) -def _spider(url, visited, root, depth, max_depth, raise_on_error): - """Fetches URL and any pages it links to up to max_depth. +def spider(root_urls, depth=0, concurrency=32): + """Get web pages from root URLs. - depth should initially be zero, and max_depth is the max depth of - links to follow from the root. + If depth is specified (e.g., depth=2), then this will also follow + up to levels of links from each root. - Prints out a warning only if the root can't be fetched; it ignores - errors with pages that the root links to. + Args: + root_urls (str or list of str): root urls used as a starting point + for spidering + depth (int): level of recursion into links + concurrency (int): number of simultaneous requests that can be sent - Returns a tuple of: - - pages: dict of pages visited (URL) mapped to their full text. - - links: set of links encountered while visiting the pages. + Returns: + A dict of pages visited (URL) mapped to their full text and the + set of visited links. """ - pages = {} # dict from page URL -> text content. - links = set() # set of all links seen on visited pages. + # Cache of visited links, meant to be captured by the closure below + _visited = set() - try: - response_url, _, response = read_from_url(url, 'text/html') - if not response_url or not response: - return pages, links + def _spider(url, collect_nested): + """Fetches URL and any pages it links to. - page = codecs.getreader('utf-8')(response).read() - pages[response_url] = page + Prints out a warning only if the root can't be fetched; it ignores + errors with pages that the root links to. - # Parse out the links in the page - link_parser = LinkParser() + Args: + url (str): url being fetched and searched for links + collect_nested (bool): whether we want to collect arguments + for nested spidering on the links found in this url + + Returns: + A tuple of: + - pages: dict of pages visited (URL) mapped to their full text. + - links: set of links encountered while visiting the pages. + - spider_args: argument for subsequent call to spider + """ + pages = {} # dict from page URL -> text content. + links = set() # set of all links seen on visited pages. subcalls = [] - link_parser.feed(page) - while link_parser.links: - raw_link = link_parser.links.pop() - abs_link = url_util.join( - response_url, - raw_link.strip(), - resolve_href=True) - links.add(abs_link) + try: + response_url, _, response = read_from_url(url, 'text/html') + if not response_url or not response: + return pages, links, subcalls - # Skip stuff that looks like an archive - if any(raw_link.endswith(suf) for suf in ALLOWED_ARCHIVE_TYPES): - continue + page = codecs.getreader('utf-8')(response).read() + pages[response_url] = page - # Skip things outside the root directory - if not abs_link.startswith(root): - continue + # Parse out the links in the page + link_parser = LinkParser() + link_parser.feed(page) - # Skip already-visited links - if abs_link in visited: - continue + while link_parser.links: + raw_link = link_parser.links.pop() + abs_link = url_util.join( + response_url, + raw_link.strip(), + resolve_href=True) + links.add(abs_link) - # If we're not at max depth, follow links. - if depth < max_depth: - subcalls.append((abs_link, visited, root, - depth + 1, max_depth, raise_on_error)) - visited.add(abs_link) + # Skip stuff that looks like an archive + if any(raw_link.endswith(s) for s in ALLOWED_ARCHIVE_TYPES): + continue - if subcalls: - pool = NonDaemonPool(processes=len(subcalls)) - try: - results = pool.map(_spider_wrapper, subcalls) + # Skip already-visited links + if abs_link in _visited: + continue - for sub_pages, sub_links in results: - pages.update(sub_pages) - links.update(sub_links) + # If we're not at max depth, follow links. + if collect_nested: + subcalls.append((abs_link,)) + _visited.add(abs_link) - finally: - pool.terminate() - pool.join() + except URLError as e: + tty.debug(str(e)) - except URLError as e: - tty.debug(e) + if hasattr(e, 'reason') and isinstance(e.reason, ssl.SSLError): + tty.warn("Spack was unable to fetch url list due to a " + "certificate verification problem. You can try " + "running spack -k, which will not check SSL " + "certificates. Use this at your own risk.") - if hasattr(e, 'reason') and isinstance(e.reason, ssl.SSLError): - tty.warn("Spack was unable to fetch url list due to a certificate " - "verification problem. You can try running spack -k, " - "which will not check SSL certificates. Use this at your " - "own risk.") + except HTMLParseError as e: + # This error indicates that Python's HTML parser sucks. + msg = "Got an error parsing HTML." - if raise_on_error: - raise NoNetworkConnectionError(str(e), url) + # Pre-2.7.3 Pythons in particular have rather prickly HTML parsing. + if sys.version_info[:3] < (2, 7, 3): + msg += " Use Python 2.7.3 or newer for better HTML parsing." - except HTMLParseError as e: - # This error indicates that Python's HTML parser sucks. - msg = "Got an error parsing HTML." + tty.warn(msg, url, "HTMLParseError: " + str(e)) - # Pre-2.7.3 Pythons in particular have rather prickly HTML parsing. - if sys.version_info[:3] < (2, 7, 3): - msg += " Use Python 2.7.3 or newer for better HTML parsing." + except Exception as e: + # Other types of errors are completely ignored, + # except in debug mode + tty.debug("Error in _spider: %s:%s" % (type(e), str(e)), + traceback.format_exc()) - tty.warn(msg, url, "HTMLParseError: " + str(e)) + finally: + tty.debug("SPIDER: [url={0}]".format(url)) - except Exception as e: - # Other types of errors are completely ignored, except in debug mode. - tty.debug("Error in _spider: %s:%s" % (type(e), e), - traceback.format_exc()) + return pages, links, subcalls + + # TODO: Needed until we drop support for Python 2.X + def star(func): + def _wrapper(args): + return func(*args) + return _wrapper + + if isinstance(root_urls, six.string_types): + root_urls = [root_urls] + + # Clear the local cache of visited pages before starting the search + _visited.clear() + + current_depth = 0 + pages, links, spider_args = {}, set(), [] + + collect = current_depth < depth + for root in root_urls: + root = url_util.parse(root) + spider_args.append((root, collect)) + + tp = multiprocessing.pool.ThreadPool(processes=concurrency) + try: + while current_depth <= depth: + tty.debug("SPIDER: [depth={0}, max_depth={1}, urls={2}]".format( + current_depth, depth, len(spider_args)) + ) + results = tp.map(star(_spider), spider_args) + spider_args = [] + collect = current_depth < depth + for sub_pages, sub_links, sub_spider_args in results: + sub_spider_args = [x + (collect,) for x in sub_spider_args] + pages.update(sub_pages) + links.update(sub_links) + spider_args.extend(sub_spider_args) + + current_depth += 1 + finally: + tp.terminate() + tp.join() return pages, links -def _spider_wrapper(args): - """Wrapper for using spider with multiprocessing.""" - return _spider(*args) - - def _urlopen(req, *args, **kwargs): """Wrapper for compatibility with old versions of Python.""" url = req @@ -460,37 +476,22 @@ def _urlopen(req, *args, **kwargs): return opener(req, *args, **kwargs) -def spider(root, depth=0): - """Gets web pages from a root URL. - - If depth is specified (e.g., depth=2), then this will also follow - up to levels of links from the root. - - This will spawn processes to fetch the children, for much improved - performance over a sequential fetch. - - """ - root = url_util.parse(root) - pages, links = _spider(root, set(), root, 0, depth, False) - return pages, links - - -def find_versions_of_archive(archive_urls, list_url=None, list_depth=0): +def find_versions_of_archive( + archive_urls, list_url=None, list_depth=0, concurrency=32 +): """Scrape web pages for new versions of a tarball. - Arguments: + Args: archive_urls (str or list or tuple): URL or sequence of URLs for different versions of a package. Typically these are just the tarballs from the package file itself. By default, this searches the parent directories of archives. - - Keyword Arguments: list_url (str or None): URL for a listing of archives. Spack will scrape these pages for download links that look like the archive URL. - - list_depth (int): Max depth to follow links on list_url pages. + list_depth (int): max depth to follow links on list_url pages. Defaults to 0. + concurrency (int): maximum number of concurrent requests """ if not isinstance(archive_urls, (list, tuple)): archive_urls = [archive_urls] @@ -511,12 +512,7 @@ def find_versions_of_archive(archive_urls, list_url=None, list_depth=0): list_urls |= additional_list_urls # Grab some web pages to scrape. - pages = {} - links = set() - for lurl in list_urls: - pg, lnk = spider(lurl, depth=list_depth) - pages.update(pg) - links.update(lnk) + pages, links = spider(list_urls, depth=list_depth, concurrency=concurrency) # Scrape them for archive URLs regexes = [] diff --git a/lib/spack/spack/variant.py b/lib/spack/spack/variant.py index 3915fe00fa5..e43a002182a 100644 --- a/lib/spack/spack/variant.py +++ b/lib/spack/spack/variant.py @@ -567,25 +567,24 @@ def __str__(self): # print keys in order sorted_keys = sorted(self.keys()) + # Separate boolean variants from key-value pairs as they print + # differently. All booleans go first to avoid ' ~foo' strings that + # break spec reuse in zsh. + bool_keys = [] + kv_keys = [] + for key in sorted_keys: + bool_keys.append(key) if isinstance(self[key].value, bool) \ + else kv_keys.append(key) + # add spaces before and after key/value variants. string = StringIO() - kv = False - for key in sorted_keys: - vspec = self[key] + for key in bool_keys: + string.write(str(self[key])) - if not isinstance(vspec.value, bool): - # add space before all kv pairs. - string.write(' ') - kv = True - else: - # not a kv pair this time - if kv: - # if it was LAST time, then pad after. - string.write(' ') - kv = False - - string.write(str(vspec)) + for key in kv_keys: + string.write(' ') + string.write(str(self[key])) return string.getvalue() @@ -594,19 +593,30 @@ def substitute_abstract_variants(spec): """Uses the information in `spec.package` to turn any variant that needs it into a SingleValuedVariant. + This method is best effort. All variants that can be substituted will be + substituted before any error is raised. + Args: spec: spec on which to operate the substitution """ + # This method needs to be best effort so that it works in matrix exlusion + # in $spack/lib/spack/spack/spec_list.py + failed = [] for name, v in spec.variants.items(): if name in spack.directives.reserved_names: continue pkg_variant = spec.package_class.variants.get(name, None) if not pkg_variant: - raise UnknownVariantError(spec, [name]) + failed.append(name) + continue new_variant = pkg_variant.make_variant(v._original_value) pkg_variant.validate_or_raise(new_variant, spec.package_class) spec.variants.substitute(new_variant) + # Raise all errors at once + if failed: + raise UnknownVariantError(spec, failed) + # The class below inherit from Sequence to disguise as a tuple and comply # with the semantic expected by the 'values' argument of the variant directive diff --git a/share/spack/bash/spack-completion.in b/share/spack/bash/spack-completion.in index 164fc5c5b3d..80ba94ed876 100755 --- a/share/spack/bash/spack-completion.in +++ b/share/spack/bash/spack-completion.in @@ -304,6 +304,13 @@ _pretty_print() { complete -o bashdefault -o default -F _bash_completion_spack spack +# Completion for spacktivate +complete -o bashdefault -o default -F _bash_completion_spack spacktivate + +_spacktivate() { + _spack_env_activate +} + # Spack commands # # Everything below here is auto-generated. diff --git a/share/spack/docker/centos-6.dockerfile b/share/spack/docker/centos-6.dockerfile index 72aa934f99a..8c971124a53 100644 --- a/share/spack/docker/centos-6.dockerfile +++ b/share/spack/docker/centos-6.dockerfile @@ -16,46 +16,43 @@ COPY share $SPACK_ROOT/share COPY var $SPACK_ROOT/var RUN mkdir -p $SPACK_ROOT/opt/spack -RUN yum update -y \ - && yum install -y epel-release \ - && yum update -y \ +RUN ln -s $SPACK_ROOT/share/spack/docker/entrypoint.bash \ + /usr/local/bin/docker-shell \ + && ln -s $SPACK_ROOT/share/spack/docker/entrypoint.bash \ + /usr/local/bin/interactive-shell \ + && ln -s $SPACK_ROOT/share/spack/docker/entrypoint.bash \ + /usr/local/bin/spack-env + +RUN yum update -y \ + && yum install -y epel-release \ + && yum update -y \ && yum --enablerepo epel groupinstall -y "Development Tools" \ - && yum --enablerepo epel install -y \ - curl findutils gcc-c++ gcc \ - gcc-gfortran git gnupg2 hostname \ - iproute Lmod make patch \ - openssh-server python python-pip tcl \ - unzip which \ - && pip install boto3 \ - && rm -rf /var/cache/yum \ + && yum --enablerepo epel install -y \ + curl \ + findutils \ + gcc-c++ \ + gcc \ + gcc-gfortran \ + git \ + gnupg2 \ + hostname \ + iproute \ + Lmod \ + make \ + patch \ + python \ + python-pip \ + python-setuptools \ + tcl \ + unzip \ + which \ + && pip install boto3 \ + && rm -rf /var/cache/yum \ && yum clean all -RUN ( echo ". /usr/share/lmod/lmod/init/bash" \ - && echo ". \$SPACK_ROOT/share/spack/setup-env.sh" \ - && echo "if [ \"\$CURRENTLY_BUILDING_DOCKER_IMAGE\" '!=' '1' ]" \ - && echo "then" \ - && echo " . \$SPACK_ROOT/share/spack/spack-completion.bash" \ - && echo "fi" ) \ - >> /etc/profile.d/spack.sh \ - && ( echo "f=\"\$SPACK_ROOT/share/spack/docker/handle-ssh.sh\"" \ - && echo "if [ -f \"\$f\" ]" \ - && echo "then" \ - && echo " . \"\$f\"" \ - && echo "else" \ - && cat $SPACK_ROOT/share/spack/docker/handle-ssh.sh \ - && echo "fi" ) \ - >> /etc/profile.d/handle-ssh.sh \ - && ( echo "f=\"\$SPACK_ROOT/share/spack/docker/handle-prompt.sh\"" \ - && echo "if [ -f \"\$f\" ]" \ - && echo "then" \ - && echo " . \"\$f\"" \ - && echo "else" \ - && cat $SPACK_ROOT/share/spack/docker/handle-prompt.sh \ - && echo "fi" ) \ - >> /etc/profile.d/handle-prompt.sh \ - && mkdir -p /root/.spack \ - && cp $SPACK_ROOT/share/spack/docker/modules.yaml \ - /root/.spack/modules.yaml \ +RUN mkdir -p /root/.spack \ + && cp $SPACK_ROOT/share/spack/docker/modules.yaml \ + /root/.spack/modules.yaml \ && rm -rf /root/*.* /run/nologin $SPACK_ROOT/.git # [WORKAROUND] @@ -66,10 +63,10 @@ RUN [ -f ~/.profile ] \ || true WORKDIR /root -SHELL ["/bin/bash", "-l", "-c"] +SHELL ["docker-shell"] # TODO: add a command to Spack that (re)creates the package cache RUN spack spec hdf5+mpi ENTRYPOINT ["/bin/bash", "/opt/spack/share/spack/docker/entrypoint.bash"] -CMD ["docker-shell"] +CMD ["interactive-shell"] diff --git a/share/spack/docker/centos-7.dockerfile b/share/spack/docker/centos-7.dockerfile index 6aa969fef82..292663d9222 100644 --- a/share/spack/docker/centos-7.dockerfile +++ b/share/spack/docker/centos-7.dockerfile @@ -16,46 +16,43 @@ COPY share $SPACK_ROOT/share COPY var $SPACK_ROOT/var RUN mkdir -p $SPACK_ROOT/opt/spack -RUN yum update -y \ - && yum install -y epel-release \ - && yum update -y \ +RUN ln -s $SPACK_ROOT/share/spack/docker/entrypoint.bash \ + /usr/local/bin/docker-shell \ + && ln -s $SPACK_ROOT/share/spack/docker/entrypoint.bash \ + /usr/local/bin/interactive-shell \ + && ln -s $SPACK_ROOT/share/spack/docker/entrypoint.bash \ + /usr/local/bin/spack-env + +RUN yum update -y \ + && yum install -y epel-release \ + && yum update -y \ && yum --enablerepo epel groupinstall -y "Development Tools" \ - && yum --enablerepo epel install -y \ - curl findutils gcc-c++ gcc \ - gcc-gfortran git gnupg2 hostname \ - iproute Lmod make patch \ - openssh-server python python-pip tcl \ - unzip which \ - && pip install boto3 \ - && rm -rf /var/cache/yum \ + && yum --enablerepo epel install -y \ + curl \ + findutils \ + gcc-c++ \ + gcc \ + gcc-gfortran \ + git \ + gnupg2 \ + hostname \ + iproute \ + Lmod \ + make \ + patch \ + python \ + python-pip \ + python-setuptools \ + tcl \ + unzip \ + which \ + && pip install boto3 \ + && rm -rf /var/cache/yum \ && yum clean all -RUN ( echo ". /usr/share/lmod/lmod/init/bash" \ - && echo ". \$SPACK_ROOT/share/spack/setup-env.sh" \ - && echo "if [ \"\$CURRENTLY_BUILDING_DOCKER_IMAGE\" '!=' '1' ]" \ - && echo "then" \ - && echo " . \$SPACK_ROOT/share/spack/spack-completion.bash" \ - && echo "fi" ) \ - >> /etc/profile.d/spack.sh \ - && ( echo "f=\"\$SPACK_ROOT/share/spack/docker/handle-ssh.sh\"" \ - && echo "if [ -f \"\$f\" ]" \ - && echo "then" \ - && echo " . \"\$f\"" \ - && echo "else" \ - && cat $SPACK_ROOT/share/spack/docker/handle-ssh.sh \ - && echo "fi" ) \ - >> /etc/profile.d/handle-ssh.sh \ - && ( echo "f=\"\$SPACK_ROOT/share/spack/docker/handle-prompt.sh\"" \ - && echo "if [ -f \"\$f\" ]" \ - && echo "then" \ - && echo " . \"\$f\"" \ - && echo "else" \ - && cat $SPACK_ROOT/share/spack/docker/handle-prompt.sh \ - && echo "fi" ) \ - >> /etc/profile.d/handle-prompt.sh \ - && mkdir -p /root/.spack \ - && cp $SPACK_ROOT/share/spack/docker/modules.yaml \ - /root/.spack/modules.yaml \ +RUN mkdir -p /root/.spack \ + && cp $SPACK_ROOT/share/spack/docker/modules.yaml \ + /root/.spack/modules.yaml \ && rm -rf /root/*.* /run/nologin $SPACK_ROOT/.git # [WORKAROUND] @@ -66,10 +63,10 @@ RUN [ -f ~/.profile ] \ || true WORKDIR /root -SHELL ["/bin/bash", "-l", "-c"] +SHELL ["docker-shell"] # TODO: add a command to Spack that (re)creates the package cache RUN spack spec hdf5+mpi ENTRYPOINT ["/bin/bash", "/opt/spack/share/spack/docker/entrypoint.bash"] -CMD ["docker-shell"] +CMD ["interactive-shell"] diff --git a/share/spack/docker/entrypoint.bash b/share/spack/docker/entrypoint.bash index 775747d904d..fbd7baf2bcd 100755 --- a/share/spack/docker/entrypoint.bash +++ b/share/spack/docker/entrypoint.bash @@ -1,45 +1,143 @@ -#! /usr/bin/env bash -e +#! /usr/bin/env bash # # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -unset CURRENTLY_BUILDING_DOCKER_IMAGE +mode=oneshot -if [ "$1" '=' 'docker-shell' ] ; then - if [ -t 0 ] ; then - exec bash -il - else - ( - echo -n "It looks like you're trying to run an intractive shell" - echo -n " session, but either no psuedo-TTY is allocateed for this" - echo -n " container's STDIN, or it is closed." - echo - - echo -n "Make sure you run docker with the --interactive and --tty" - echo -n " options." - echo - ) >&2 - - exit 1 - fi -else - exec 3>&1 - exec 4>&2 - - exec 1>&- - exec 2>&- - - source /etc/profile.d/spack.sh - source /etc/profile.d/handle-ssh.sh - - exec 1>&3 - exec 2>&4 - - exec 3>&- - exec 4>&- - - spack "$@" - exit $? +if [ "$( basename "$0" )" '=' 'spack-env' ] ; then + mode=spackenv +elif [ "$( basename "$0" )" '=' 'docker-shell' ] ; then + mode=dockershell +elif [ "$( basename "$0" )" '=' 'interactive-shell' ] ; then + mode=interactiveshell +elif [ "$1" '=' 'docker-shell' ] ; then + mode=dockershell + shift +elif [ "$1" '=' 'interactive-shell' ] ; then + mode=interactiveshell + shift fi + +case "$mode" in + "spackenv") + # Scenario 1: Run as if the image had no ENTRYPOINT + # + # Necessary for use cases where the command to run and all + # arguments must be accepted in the CMD portion. (e.g.: Gitlab CI + # Runners) + # + # Roughly equivalent to + # docker run ... --entrypoint spack-env ... sh -c "..." + # + # The shell script runs with spack pre-loaded and ready to use. + . $SPACK_ROOT/share/spack/setup-env.sh + unset CURRENTLY_BUILDING_DOCKER_IMAGE + exec "$@" + ;; + + "dockershell") + # Scenario 2: Accept shell code from a RUN command in a + # Dockerfile + # + # For new Docker images that start FROM this image as its base. + # Prepared so that subsequent RUN commands can take advantage of + # Spack without having to manually (re)initialize. + # + # Example: + # FROM spack/centos7 + # COPY spack.yaml . + # RUN spack install # <- Spack is loaded and ready to use. + # # No manual initialization necessary. + . $SPACK_ROOT/share/spack/setup-env.sh + exec bash -c "$*" + ;; + + "interactiveshell") + # Scenario 3: Run an interactive shell session with Spack + # preloaded. + # + # Create a container meant for an interactive shell session. + # Additional checks are performed to ensure that stdin is a tty + # and additional shell completion files are sourced. The user is + # presented with a shell prompt from which they may issue Spack + # commands. + # + # This is the default behavior when running with no CMD or + # ENTRYPOINT overrides: + # docker run -it spack/centos7 + if [ -t 0 ] ; then + . $SPACK_ROOT/share/spack/setup-env.sh + . $SPACK_ROOT/share/spack/spack-completion.bash + unset CURRENTLY_BUILDING_DOCKER_IMAGE + exec bash -i + else + ( + echo -n "It looks like you're trying to run an" + echo -n " intractive shell session, but either no" + echo -n " psuedo-TTY is allocated for this container's" + echo " STDIN, or it is closed." + echo + + echo -n "Make sure you run docker with the --interactive" + echo -n " and --tty options." + echo + ) >&2 + + exit 1 + fi + ;; + + "oneshot") + # Scenario 4: Run a one-shot Spack command from the host command + # line. + # + # Triggered by providing arguments to `docker run`. Arguments + # are passed along to the container's underlying spack + # installation, allowing users to use the image as if it were + # spack, itself. Pass volume mount information to `docker run` + # to persist the effects of running in this mode. + # + # This is the default behavior when running with a CMD override. + # + # Examples: + # # concretize the same spec on different OSes + # docker run --rm spack/ubuntu-xenial spec zlib + # docker run --rm spack/centos7 spec zlib + # + # # a "wetter" dry-run; + # # install a package and then throw away the results. + # docker run --rm spack/centos7 install libiconv + # docker run --rm spack/centos7 find libiconv + # ==> No package matches the query: libiconv + # + # # use docker volumes to persist changes + # docker run --rm -v ...:/spack spack/centos7 install ... + # docker run --rm -v ...:/spack spack/centos7 install ... + # docker run --rm -v ...:/spack spack/centos7 install ... + exec 3>&1 + exec 4>&2 + + exec 1>&- + exec 2>&- + + . $SPACK_ROOT/share/spack/setup-env.sh + unset CURRENTLY_BUILDING_DOCKER_IMAGE + + exec 1>&3 + exec 2>&4 + + exec 3>&- + exec 4>&- + + spack "$@" + exit $? + ;; + + *) + echo "INTERNAL ERROR - UNRECOGNIZED MODE: $mode" >&2 + exit 1 + ;; +esac diff --git a/share/spack/docker/handle-prompt.sh b/share/spack/docker/handle-prompt.sh deleted file mode 100644 index a01047463e2..00000000000 --- a/share/spack/docker/handle-prompt.sh +++ /dev/null @@ -1,173 +0,0 @@ -# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other -# Spack Project Developers. See the top-level COPYRIGHT file for details. -# -# SPDX-License-Identifier: (Apache-2.0 OR MIT) - -if [ "$CURRENTLY_BUILDING_DOCKER_IMAGE" '!=' '1' ] ; then - -if [ x$SPACK_PROMPT '!=' x0 ] ; then - -__tmp="`mktemp -d`" - -__trylock() { - local dir - dir="$__tmp/$1.lock" - mkdir "$dir" &>/dev/null - return $? -} - -__queue_init() { - local r - local w - - mkdir "$__tmp/$1.read.lock" ; r=$? - mkdir "$__tmp/$1.write.lock" ; w=$? - - if [ "$r" '=' '0' -a "$w" '=' '0' ] ; then - return 0 - else - return 1 - fi -} - -__queue_try_read() { - __trylock "$1.read" - return $? -} - -__queue_try_write() { - __trylock "$1.write" - return $? -} - -__queue_make_readable() { - rm -r "$__tmp/$1.read.lock" &>/dev/null - return $? -} - -__queue_make_writable() { - rm -r "$__tmp/$1.write.lock" &>/dev/null - return $? -} - -__read() { - cat "$__tmp/$1" 2> /dev/null - return $? -} - -__write() { - cat > "$__tmp/$1" 2> /dev/null - return $? -} - -__revparse_head() { - head="`git -C "$SPACK_ROOT" rev-parse $@ HEAD 2>/dev/null`" - result="$?" - if [ "$result" '!=' '0' ] ; then - head="`git --git-dir="$SPACK_ROOT"/.git \\ - --work-tree="$SPACK_ROOT" rev-parse $@ HEAD 2>/dev/null`" - result="$?" - fi - - echo "$head" - return $result -} - -__git_head() { - head="`__revparse_head --abbrev-ref`" - if [ "$?" '=' '0' ] ; then - if [ "$head" '=' 'HEAD' ] ; then - head="`__revparse_head | cut -c1-8`..." - fi - - echo "$head" - fi -} - -__update_prompt() { - local prompt - prompt='' - linux_distro="$DOCKERFILE_DISTRO" - if [ -n "$linux_distro" ] ; then - linux_distro='\[\e[1;34m\][\[\e[0;34m\]'"$linux_distro"'\[\e[1;34m\]]' - if [ -n "$prompt" ] ; then - prompt="$prompt " - fi - prompt="$prompt$linux_distro" - fi - - git_head="`__git_head`" - - if [ -n "$git_head" ] ; then - git_head='\[\e[1;32m\](\[\e[0;32m\]'"$git_head"'\[\e[1;32m\])' - if [ -n "$prompt" ] ; then - prompt="$prompt " - fi - prompt="$prompt$git_head" - fi - - if [ -n "$prompt" ] ; then - prompt="$prompt " - fi - prompt="$prompt"'\[\e[0;m\]\W: ' - echo "$prompt" | __write prompt -} - -set -m -( - __queue_init query - __queue_init prompt - - __update_prompt - __queue_make_readable prompt - - __queue_make_writable query - - while sleep 0.010 ; do - last_q_time='' - - while sleep 0.010 ; do - q_time="`date +%s%N`" - if __queue_try_read query ; then - last_q_time="$q_time" - __queue_make_writable query - fi - - if [ -n "$last_q_time" -a \ - "$(( (q_time - last_q_time)/10000000 > 100 ))" '=' '1' ] ; then - break - fi - done - - __update_prompt - __queue_make_readable prompt - done -) &>/dev/null & -set +m - -__update_prompt_main_first_call=1 -__update_prompt_main() { - if [ "$__update_prompt_main_first_call" '=' '1' ] ; then - while sleep 0.001 ; do - if __queue_try_read prompt ; then - PS1="`__read prompt`" - break - fi - done - __update_prompt_main_first_call=0 - else - if __queue_try_read prompt ; then - PS1="`__read prompt`" - fi - fi - - if __queue_try_write query ; then - __queue_make_readable query - fi -} - -PROMPT_COMMAND=__update_prompt_main - -fi # [ x$SPACK_PROMPT '!=' x0 ] - -fi # [ "$CURRENTLY_BUILDING_DOCKER_IMAGE" '!=' '1' ] diff --git a/share/spack/docker/handle-ssh.sh b/share/spack/docker/handle-ssh.sh deleted file mode 100644 index 18e5ae259d8..00000000000 --- a/share/spack/docker/handle-ssh.sh +++ /dev/null @@ -1,50 +0,0 @@ -# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other -# Spack Project Developers. See the top-level COPYRIGHT file for details. -# -# SPDX-License-Identifier: (Apache-2.0 OR MIT) - -if [ "$CURRENTLY_BUILDING_DOCKER_IMAGE" '!=' '1' ] ; then - -uid="`id -u`" -if [ "$uid" '=' '0' ] ; then - key_types="dsa ecdsa rsa" - if [ "$DOCKERFILE_BASE" '!=' 'centos:6' ] ; then - key_types="${key_types} ed25519" - fi - - for key_type in $key_types ; do - private_key_file="/etc/ssh/ssh_host_${key_type}_key" - public_key_file="$private_key_file.pub" - - if [ '!' -f "$private_key_file" ] ; then - ssh-keygen \ - -q -t "$key_type" -N "" -f "$private_key_file" - chmod 600 "$private_key_file" - chmod 644 "$public_key_file" - fi - done - - mkdir -p /var/run/sshd - - pgrep -u 0 -U 0 sshd &> /dev/null - if [ '!' "$?" '=' '0' ] ; then - nohup /usr/sbin/sshd -f /etc/ssh/sshd_config < /dev/null &> /dev/null - fi -fi - -if [ '!' -f "$HOME/.ssh/id_rsa" ] ; then - ssh-keygen \ - -t rsa -C "spack.developer@docker.host" -N "" -f "$HOME/.ssh/id_rsa" - cat "$HOME/.ssh/id_rsa.pub" >> "$HOME/.ssh/authorized_keys" - chmod 600 "$HOME/.ssh/authorized_keys" - - docker_ip="`ip address show dev eth0 | - grep inet | - cut -d' ' -f 6 | - cut -d/ -f 1`" - - ssh-keyscan -t rsa 127.0.0.1 localhost "$docker_ip" "`hostname`" \ - > "$HOME/.ssh/known_hosts" 2> /dev/null -fi - -fi # [ "$CURRENTLY_BUILDING_DOCKER_IMAGE" '!=' '1' ] diff --git a/share/spack/docker/ubuntu-1604.dockerfile b/share/spack/docker/ubuntu-1604.dockerfile index e7a6783d24c..1f7db8bea9c 100644 --- a/share/spack/docker/ubuntu-1604.dockerfile +++ b/share/spack/docker/ubuntu-1604.dockerfile @@ -16,29 +16,36 @@ COPY share $SPACK_ROOT/share COPY var $SPACK_ROOT/var RUN mkdir -p $SPACK_ROOT/opt/spack -RUN apt-get -yqq update \ - && apt-get -yqq install --no-install-recommends \ - build-essential \ - ca-certificates \ - curl \ - file \ - g++ \ - gcc \ - gfortran \ - git \ - gnupg2 \ - iproute2 \ - lmod \ - locales \ - lua-posix \ - make \ - openssh-server \ - python3 \ - python3-pip \ - tcl \ - unzip \ - && locale-gen en_US.UTF-8 \ - && pip3 install boto3 \ +RUN ln -s $SPACK_ROOT/share/spack/docker/entrypoint.bash \ + /usr/local/bin/docker-shell \ + && ln -s $SPACK_ROOT/share/spack/docker/entrypoint.bash \ + /usr/local/bin/interactive-shell \ + && ln -s $SPACK_ROOT/share/spack/docker/entrypoint.bash \ + /usr/local/bin/spack-env + +RUN apt-get -yqq update \ + && apt-get -yqq install --no-install-recommends \ + build-essential \ + ca-certificates \ + curl \ + file \ + g++ \ + gcc \ + gfortran \ + git \ + gnupg2 \ + iproute2 \ + lmod \ + locales \ + lua-posix \ + make \ + python3 \ + python3-pip \ + python3-setuptools \ + tcl \ + unzip \ + && locale-gen en_US.UTF-8 \ + && pip3 install boto3 \ && rm -rf /var/lib/apt/lists/* # Add LANG default to en_US.UTF-8 @@ -46,33 +53,10 @@ ENV LANGUAGE en_US.UTF-8 ENV LANG en_US.UTF-8 ENV LC_ALL en_US.UTF-8 -RUN ( echo ". /usr/share/lmod/lmod/init/bash" \ - && echo ". \$SPACK_ROOT/share/spack/setup-env.sh" \ - && echo "if [ \"\$CURRENTLY_BUILDING_DOCKER_IMAGE\" '!=' '1' ]" \ - && echo "then" \ - && echo " . \$SPACK_ROOT/share/spack/spack-completion.bash" \ - && echo "fi" ) \ - >> /etc/profile.d/spack.sh \ - && ( echo "f=\"\$SPACK_ROOT/share/spack/docker/handle-ssh.sh\"" \ - && echo "if [ -f \"\$f\" ]" \ - && echo "then" \ - && echo " . \"\$f\"" \ - && echo "else" \ - && cat $SPACK_ROOT/share/spack/docker/handle-ssh.sh \ - && echo "fi" ) \ - >> /etc/profile.d/handle-ssh.sh \ - && ( echo "f=\"\$SPACK_ROOT/share/spack/docker/handle-prompt.sh\"" \ - && echo "if [ -f \"\$f\" ]" \ - && echo "then" \ - && echo " . \"\$f\"" \ - && echo "else" \ - && cat $SPACK_ROOT/share/spack/docker/handle-prompt.sh \ - && echo "fi" ) \ - >> /etc/profile.d/handle-prompt.sh \ - && mkdir -p /root/.spack \ - && cp $SPACK_ROOT/share/spack/docker/modules.yaml \ - /root/.spack/modules.yaml \ - && rm -rf /root/*.* $SPACK_ROOT/.git +RUN mkdir -p /root/.spack \ + && cp $SPACK_ROOT/share/spack/docker/modules.yaml \ + /root/.spack/modules.yaml \ + && rm -rf /root/*.* /run/nologin $SPACK_ROOT/.git # [WORKAROUND] # https://superuser.com/questions/1241548/ @@ -82,10 +66,10 @@ RUN [ -f ~/.profile ] \ || true WORKDIR /root -SHELL ["/bin/bash", "-l", "-c"] +SHELL ["docker-shell"] # TODO: add a command to Spack that (re)creates the package cache RUN spack spec hdf5+mpi ENTRYPOINT ["/bin/bash", "/opt/spack/share/spack/docker/entrypoint.bash"] -CMD ["docker-shell"] +CMD ["interactive-shell"] diff --git a/share/spack/docker/ubuntu-1804.dockerfile b/share/spack/docker/ubuntu-1804.dockerfile index 5950908e1ee..a8d9577b664 100644 --- a/share/spack/docker/ubuntu-1804.dockerfile +++ b/share/spack/docker/ubuntu-1804.dockerfile @@ -16,29 +16,36 @@ COPY share $SPACK_ROOT/share COPY var $SPACK_ROOT/var RUN mkdir -p $SPACK_ROOT/opt/spack -RUN apt-get -yqq update \ - && apt-get -yqq install --no-install-recommends \ - build-essential \ - ca-certificates \ - curl \ - file \ - g++ \ - gcc \ - gfortran \ - git \ - gnupg2 \ - iproute2 \ - lmod \ - locales \ - lua-posix \ - make \ - openssh-server \ - python3 \ - python3-pip \ - tcl \ - unzip \ - && locale-gen en_US.UTF-8 \ - && pip3 install boto3 \ +RUN ln -s $SPACK_ROOT/share/spack/docker/entrypoint.bash \ + /usr/local/bin/docker-shell \ + && ln -s $SPACK_ROOT/share/spack/docker/entrypoint.bash \ + /usr/local/bin/interactive-shell \ + && ln -s $SPACK_ROOT/share/spack/docker/entrypoint.bash \ + /usr/local/bin/spack-env + +RUN apt-get -yqq update \ + && apt-get -yqq install --no-install-recommends \ + build-essential \ + ca-certificates \ + curl \ + file \ + g++ \ + gcc \ + gfortran \ + git \ + gnupg2 \ + iproute2 \ + lmod \ + locales \ + lua-posix \ + make \ + python3 \ + python3-pip \ + python3-setuptools \ + tcl \ + unzip \ + && locale-gen en_US.UTF-8 \ + && pip3 install boto3 \ && rm -rf /var/lib/apt/lists/* # Add LANG default to en_US.UTF-8 @@ -46,33 +53,10 @@ ENV LANGUAGE en_US.UTF-8 ENV LANG en_US.UTF-8 ENV LC_ALL en_US.UTF-8 -RUN ( echo ". /usr/share/lmod/lmod/init/bash" \ - && echo ". \$SPACK_ROOT/share/spack/setup-env.sh" \ - && echo "if [ \"\$CURRENTLY_BUILDING_DOCKER_IMAGE\" '!=' '1' ]" \ - && echo "then" \ - && echo " . \$SPACK_ROOT/share/spack/spack-completion.bash" \ - && echo "fi" ) \ - >> /etc/profile.d/spack.sh \ - && ( echo "f=\"\$SPACK_ROOT/share/spack/docker/handle-ssh.sh\"" \ - && echo "if [ -f \"\$f\" ]" \ - && echo "then" \ - && echo " . \"\$f\"" \ - && echo "else" \ - && cat $SPACK_ROOT/share/spack/docker/handle-ssh.sh \ - && echo "fi" ) \ - >> /etc/profile.d/handle-ssh.sh \ - && ( echo "f=\"\$SPACK_ROOT/share/spack/docker/handle-prompt.sh\"" \ - && echo "if [ -f \"\$f\" ]" \ - && echo "then" \ - && echo " . \"\$f\"" \ - && echo "else" \ - && cat $SPACK_ROOT/share/spack/docker/handle-prompt.sh \ - && echo "fi" ) \ - >> /etc/profile.d/handle-prompt.sh \ - && mkdir -p /root/.spack \ - && cp $SPACK_ROOT/share/spack/docker/modules.yaml \ - /root/.spack/modules.yaml \ - && rm -rf /root/*.* $SPACK_ROOT/.git +RUN mkdir -p /root/.spack \ + && cp $SPACK_ROOT/share/spack/docker/modules.yaml \ + /root/.spack/modules.yaml \ + && rm -rf /root/*.* /run/nologin $SPACK_ROOT/.git # [WORKAROUND] # https://superuser.com/questions/1241548/ @@ -86,10 +70,10 @@ RUN [ -f ~/.profile ] \ RUN ln -s posix_c.so /usr/lib/x86_64-linux-gnu/lua/5.2/posix.so WORKDIR /root -SHELL ["/bin/bash", "-l", "-c"] +SHELL ["docker-shell"] # TODO: add a command to Spack that (re)creates the package cache RUN spack spec hdf5+mpi ENTRYPOINT ["/bin/bash", "/opt/spack/share/spack/docker/entrypoint.bash"] -CMD ["docker-shell"] +CMD ["interactive-shell"] diff --git a/share/spack/gitlab/pr_pipeline.yml b/share/spack/gitlab/pr_pipeline.yml new file mode 100644 index 00000000000..4eb08f3434f --- /dev/null +++ b/share/spack/gitlab/pr_pipeline.yml @@ -0,0 +1,20 @@ +pr_pipeline: + only: + - external_pull_requests + variables: + SPACK_REPO: https://github.com/spack/spack.git + SPACK_REF: ${CI_EXTERNAL_PULL_REQUEST_SOURCE_BRANCH_NAME} + SPACK_IS_PR_PIPELINE: "True" + trigger: + project: spack/e4s + strategy: depend + +merge_pipeline: + only: + - develop + variables: + SPACK_REPO: https://github.com/spack/spack.git + SPACK_REF: develop + trigger: + project: spack/e4s + strategy: depend \ No newline at end of file diff --git a/share/spack/qa/run-bootstrap-tests b/share/spack/qa/run-bootstrap-tests deleted file mode 100755 index 9532d2d801a..00000000000 --- a/share/spack/qa/run-bootstrap-tests +++ /dev/null @@ -1,37 +0,0 @@ -#!/bin/bash -e -# -# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other -# Spack Project Developers. See the top-level COPYRIGHT file for details. -# -# SPDX-License-Identifier: (Apache-2.0 OR MIT) - -# -# Description: -# Checks that Spack shell integration with modules works correctly. -# -# Usage: -# run-bootstrap-tests -# -. "$(dirname $0)/setup.sh" -check_dependencies ${coverage} git hg svn - -# Fetch the sources in a mirror, and add it to Spack -mkdir -p ~/.mirror -bin/spack mirror add travis ~/.mirror -bin/spack mirror create -D -d ~/.mirror environment-modules~X - - -# Move to root directory of Spack -# Allows script to be run from anywhere -cd "$SPACK_ROOT" - -# Print compiler information -spack config get compilers - -# Run some build smoke tests, potentially with code coverage -${coverage_run} bin/spack bootstrap - -# Check module integration -. "share/spack/setup-env.sh" -module av || exit 1 -spack load tcl || exit 1 diff --git a/share/spack/qa/run-shell-tests b/share/spack/qa/run-shell-tests new file mode 100755 index 00000000000..31c1c1548b1 --- /dev/null +++ b/share/spack/qa/run-shell-tests @@ -0,0 +1,43 @@ +#!/bin/bash -e +# +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +# +# Description: +# Runs Spack shell tests. +# +# Usage: +# run-shell-tests + +#----------------------------------------------------------- +# Run a few initial commands and set up test environment +#----------------------------------------------------------- +ORIGINAL_PATH="$PATH" + +. "$(dirname $0)/setup.sh" +check_dependencies $coverage git hg svn + +# Clean the environment by removing Spack from the path and getting rid of +# the spack shell function +export PATH="$ORIGINAL_PATH" +unset spack + +# Start in the spack root directory +cd "$SPACK_ROOT" + +# Run bash tests with coverage enabled, but pipe output to /dev/null +# because it seems that kcov seems to undo the script's redirection +if [ "$COVERAGE" = true ]; then + "$QA_DIR/bashcov" "$QA_DIR/setup-env-test.sh" &> /dev/null + "$QA_DIR/bashcov" "$QA_DIR/completion-test.sh" &> /dev/null +else + bash "$QA_DIR/setup-env-test.sh" + bash "$QA_DIR/completion-test.sh" +fi + +# Run the test scripts for their output (these will print nicely) +zsh "$QA_DIR/setup-env-test.sh" +dash "$QA_DIR/setup-env-test.sh" diff --git a/share/spack/qa/run-unit-tests b/share/spack/qa/run-unit-tests index 01f564e5e1f..7c4abb14130 100755 --- a/share/spack/qa/run-unit-tests +++ b/share/spack/qa/run-unit-tests @@ -37,34 +37,13 @@ bin/spack -h bin/spack help -a # Profile and print top 20 lines for a simple call to spack spec -spack -p --lines 20 spec mpileaks%gcc ^elfutils@0.170 +if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then + spack -p --lines 20 spec openmpi +else + spack -p --lines 20 spec mpileaks%gcc ^elfutils@0.170 +fi #----------------------------------------------------------- # Run unit tests with code coverage #----------------------------------------------------------- $coverage_run $(which spack) test -x --verbose - -#----------------------------------------------------------- -# Run tests for setup-env.sh -#----------------------------------------------------------- -# Clean the environment by removing Spack from the path and getting rid of -# the spack shell function -export PATH="$ORIGINAL_PATH" -unset spack - -# start in the spack root directory -cd "$SPACK_ROOT" - -# Run bash tests with coverage enabled, but pipe output to /dev/null -# because it seems that kcov seems to undo the script's redirection -if [ "$BASH_COVERAGE" = true ]; then - "$QA_DIR/bashcov" "$QA_DIR/setup-env-test.sh" &> /dev/null - "$QA_DIR/bashcov" "$QA_DIR/completion-test.sh" &> /dev/null -fi - -# run the test scripts for their output (these will print nicely) -bash "$QA_DIR/setup-env-test.sh" -zsh "$QA_DIR/setup-env-test.sh" -dash "$QA_DIR/setup-env-test.sh" - -bash "$QA_DIR/completion-test.sh" diff --git a/share/spack/qa/setup.sh b/share/spack/qa/setup.sh index 61fb2a7706c..65364790154 100755 --- a/share/spack/qa/setup.sh +++ b/share/spack/qa/setup.sh @@ -20,24 +20,15 @@ export SPACK_ROOT=$(realpath "$QA_DIR/../../..") coverage="" coverage_run="" -# bash coverage depends on some other factors -- there are issues with -# kcov for Python 2.6, unit tests, and build tests. -if [[ $TEST_SUITE == unit && # kcov segfaults for the MPICH build test - $TRAVIS_OS_NAME == linux && - $TRAVIS_PYTHON_VERSION != 2.6 ]]; -then - BASH_COVERAGE="true" -else - BASH_COVERAGE="false" -fi - # Set up some variables for running coverage tests. if [[ "$COVERAGE" == "true" ]]; then # these set up coverage for Python coverage=coverage coverage_run="coverage run" - if [ "$BASH_COVERAGE" = true ]; then + # bash coverage depends on some other factors -- there are issues with + # kcov for Python 2.6, unit tests, and build tests. + if [[ $TRAVIS_PYTHON_VERSION != 2.6 ]]; then mkdir -p coverage cc_script="$SPACK_ROOT/lib/spack/env/cc" bashcov=$(realpath ${QA_DIR}/bashcov) diff --git a/share/spack/setup-env.csh b/share/spack/setup-env.csh index 67357c94e49..edbf51e8e28 100755 --- a/share/spack/setup-env.csh +++ b/share/spack/setup-env.csh @@ -18,6 +18,7 @@ if ($?SPACK_ROOT) then # Command aliases point at separate source files alias spack 'set _sp_args = (\!*); source $_spack_share_dir/csh/spack.csh' + alias spacktivate 'spack env activate' alias _spack_pathadd 'set _pa_args = (\!*) && source $_spack_share_dir/csh/pathadd.csh' # Set variables needed by this script diff --git a/share/spack/setup-env.sh b/share/spack/setup-env.sh index d3aed61d1a7..032247cd8f3 100755 --- a/share/spack/setup-env.sh +++ b/share/spack/setup-env.sh @@ -242,6 +242,8 @@ if [ "$_sp_shell" = bash ]; then export -f spack fi +alias spacktivate="spack env activate" + # # Figure out where this file is. # diff --git a/share/spack/spack-completion.bash b/share/spack/spack-completion.bash index 34ed2bd1484..84fda993125 100755 --- a/share/spack/spack-completion.bash +++ b/share/spack/spack-completion.bash @@ -304,6 +304,13 @@ _pretty_print() { complete -o bashdefault -o default -F _bash_completion_spack spack +# Completion for spacktivate +complete -o bashdefault -o default -F _bash_completion_spack spacktivate + +_spacktivate() { + _spack_env_activate +} + # Spack commands # # Everything below here is auto-generated. @@ -313,7 +320,7 @@ _spack() { then SPACK_COMPREPLY="-h --help -H --all-help --color -C --config-scope -d --debug --timestamp --pdb -e --env -D --env-dir -E --no-env --use-env-repo -k --insecure -l --enable-locks -L --disable-locks -m --mock -p --profile --sorted-profile --lines -v --verbose --stacktrace -V --version --print-shell-vars" else - SPACK_COMPREPLY="activate add arch blame bootstrap build build-env buildcache cd checksum ci clean clone commands compiler compilers concretize config configure containerize create deactivate debug dependencies dependents deprecate dev-build diy docs edit env extensions fetch find flake8 gc gpg graph help info install license list load location log-parse maintainers mirror module patch pkg providers pydoc python reindex remove rm repo resource restage setup spec stage test uninstall unload upload-s3 url verify versions view" + SPACK_COMPREPLY="activate add arch blame build build-env buildcache cd checksum ci clean clone commands compiler compilers concretize config configure containerize create deactivate debug dependencies dependents deprecate dev-build diy docs edit env extensions external fetch find flake8 gc gpg graph help info install license list load location log-parse maintainers mirror module patch pkg providers pydoc python reindex remove rm repo resource restage setup spec stage test uninstall unload upload-s3 url verify versions view" fi } @@ -348,10 +355,6 @@ _spack_blame() { fi } -_spack_bootstrap() { - SPACK_COMPREPLY="-h --help -j --jobs --keep-prefix --keep-stage -n --no-checksum -v --verbose --use-cache --no-cache --cache-only --clean --dirty" -} - _spack_build() { if $list_options then @@ -455,7 +458,7 @@ _spack_cd() { _spack_checksum() { if $list_options then - SPACK_COMPREPLY="-h --help --keep-stage" + SPACK_COMPREPLY="-h --help --keep-stage -b --batch" else _all_packages fi @@ -466,20 +469,12 @@ _spack_ci() { then SPACK_COMPREPLY="-h --help" else - SPACK_COMPREPLY="start generate pushyaml rebuild" + SPACK_COMPREPLY="generate rebuild" fi } -_spack_ci_start() { - SPACK_COMPREPLY="-h --help --output-file --copy-to --spack-repo --spack-ref --downstream-repo --branch-name --commit-sha" -} - _spack_ci_generate() { - SPACK_COMPREPLY="-h --help --output-file --copy-to --spack-repo --spack-ref" -} - -_spack_ci_pushyaml() { - SPACK_COMPREPLY="-h --help --downstream-repo --branch-name --commit-sha" + SPACK_COMPREPLY="-h --help --output-file --copy-to --spack-repo --spack-ref --optimize" } _spack_ci_rebuild() { @@ -697,7 +692,7 @@ _spack_deprecate() { _spack_dev_build() { if $list_options then - SPACK_COMPREPLY="-h --help -j --jobs -d --source-path -i --ignore-dependencies -n --no-checksum --keep-prefix --skip-patch -q --quiet -u --until --clean --dirty" + SPACK_COMPREPLY="-h --help -j --jobs -d --source-path -i --ignore-dependencies -n --no-checksum --keep-prefix --skip-patch -q --quiet --drop-in -b --before -u --until --clean --dirty" else _all_packages fi @@ -706,7 +701,7 @@ _spack_dev_build() { _spack_diy() { if $list_options then - SPACK_COMPREPLY="-h --help -j --jobs -d --source-path -i --ignore-dependencies -n --no-checksum --keep-prefix --skip-patch -q --quiet -u --until --clean --dirty" + SPACK_COMPREPLY="-h --help -j --jobs -d --source-path -i --ignore-dependencies -n --no-checksum --keep-prefix --skip-patch -q --quiet --drop-in -b --before -u --until --clean --dirty" else _all_packages fi @@ -817,6 +812,24 @@ _spack_extensions() { fi } +_spack_external() { + if $list_options + then + SPACK_COMPREPLY="-h --help" + else + SPACK_COMPREPLY="find" + fi +} + +_spack_external_find() { + if $list_options + then + SPACK_COMPREPLY="-h --help --not-buildable" + else + _all_packages + fi +} + _spack_fetch() { if $list_options then @@ -1029,7 +1042,7 @@ _spack_mirror() { _spack_mirror_create() { if $list_options then - SPACK_COMPREPLY="-h --help -d --directory -a --all -f --file --skip-unstable-versions -D --dependencies -n --versions-per-spec" + SPACK_COMPREPLY="-h --help -d --directory -a --all -f --file --exclude-file --exclude-specs --skip-unstable-versions -D --dependencies -n --versions-per-spec" else _all_packages fi @@ -1497,7 +1510,7 @@ _spack_verify() { _spack_versions() { if $list_options then - SPACK_COMPREPLY="-h --help -s --safe-only" + SPACK_COMPREPLY="-h --help -s --safe-only -c --concurrency" else _all_packages fi @@ -1508,7 +1521,7 @@ _spack_view() { then SPACK_COMPREPLY="-h --help -v --verbose -e --exclude -d --dependencies" else - SPACK_COMPREPLY="symlink add soft hardlink hard remove rm statlink status check" + SPACK_COMPREPLY="symlink add soft hardlink hard copy relocate remove rm statlink status check" fi } @@ -1557,6 +1570,24 @@ _spack_view_hard() { fi } +_spack_view_copy() { + if $list_options + then + SPACK_COMPREPLY="-h --help --projection-file -i --ignore-conflicts" + else + _all_packages + fi +} + +_spack_view_relocate() { + if $list_options + then + SPACK_COMPREPLY="-h --help --projection-file -i --ignore-conflicts" + else + _all_packages + fi +} + _spack_view_remove() { if $list_options then diff --git a/var/spack/repos/builtin.mock/packages/find-externals1/package.py b/var/spack/repos/builtin.mock/packages/find-externals1/package.py new file mode 100644 index 00000000000..25e26dcced4 --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/find-externals1/package.py @@ -0,0 +1,34 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + +import os +import re + + +class FindExternals1(AutotoolsPackage): + executables = ['find-externals1-exe'] + + url = "http://www.example.com/find-externals-1.0.tar.gz" + + version('1.0', 'hash-1.0') + + @classmethod + def determine_spec_details(cls, prefix, exes_in_prefix): + exe_to_path = dict( + (os.path.basename(p), p) for p in exes_in_prefix + ) + if 'find-externals1-exe' not in exe_to_path: + return None + + exe = spack.util.executable.Executable( + exe_to_path['find-externals1-exe']) + output = exe('--version', output=str) + if output: + match = re.search(r'find-externals1.*version\s+(\S+)', output) + if match: + version_str = match.group(1) + return Spec('find-externals1@{0}'.format(version_str)) diff --git a/var/spack/repos/builtin.mock/packages/gcc/package.py b/var/spack/repos/builtin.mock/packages/gcc/package.py new file mode 100644 index 00000000000..7826e1b5cbb --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/gcc/package.py @@ -0,0 +1,26 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Gcc(Package): + """Simple compiler package.""" + + homepage = "http://www.example.com" + url = "http://www.example.com/gcc-1.0.tar.gz" + + version('1.0', '0123456789abcdef0123456789abcdef') + version('2.0', '2.0_a_hash') + version('3.0', '3.0_a_hash') + + depends_on('conflict', when='@3.0') + + def install(self, spec, prefix): + # Create the minimal compiler that will fool `spack compiler find` + mkdirp(prefix.bin) + with open(prefix.bin.gcc, 'w') as f: + f.write('#!/bin/bash\necho "%s"' % str(spec.version)) + set_executable(prefix.bin.gcc) diff --git a/var/spack/repos/builtin.mock/packages/multivalue_variant/package.py b/var/spack/repos/builtin.mock/packages/multivalue-variant/package.py similarity index 100% rename from var/spack/repos/builtin.mock/packages/multivalue_variant/package.py rename to var/spack/repos/builtin.mock/packages/multivalue-variant/package.py diff --git a/var/spack/repos/builtin.mock/packages/needs-relocation/package.py b/var/spack/repos/builtin.mock/packages/needs-relocation/package.py new file mode 100644 index 00000000000..681a8a53ed4 --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/needs-relocation/package.py @@ -0,0 +1,28 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +def check(condition, msg): + """Raise an install error if condition is False.""" + if not condition: + raise InstallError(msg) + + +class NeedsRelocation(Package): + """A dumy package that encodes its prefix.""" + homepage = 'https://www.cmake.org' + url = 'https://cmake.org/files/v3.4/cmake-3.4.3.tar.gz' + + version('0.0.0', '12345678qwertyuiasdfghjkzxcvbnm0') + + def install(self, spec, prefix): + mkdirp(prefix.bin) + + exe = join_path(prefix.bin, 'exe') + with open(exe, 'w') as f: + f.write(prefix) + set_executable(exe) diff --git a/var/spack/repos/builtin.mock/packages/singlevalue-variant-dependent/package.py b/var/spack/repos/builtin.mock/packages/singlevalue-variant-dependent/package.py index 5507fbdc21f..de14faa51f5 100644 --- a/var/spack/repos/builtin.mock/packages/singlevalue-variant-dependent/package.py +++ b/var/spack/repos/builtin.mock/packages/singlevalue-variant-dependent/package.py @@ -14,4 +14,4 @@ class SinglevalueVariantDependent(Package): version('1.0', '0123456789abcdef0123456789abcdef') - depends_on('multivalue_variant fee=baz') + depends_on('multivalue-variant fee=baz') diff --git a/var/spack/repos/builtin/packages/abseil-cpp/package.py b/var/spack/repos/builtin/packages/abseil-cpp/package.py index 90fd6cbd879..c40b2353bf8 100644 --- a/var/spack/repos/builtin/packages/abseil-cpp/package.py +++ b/var/spack/repos/builtin/packages/abseil-cpp/package.py @@ -14,10 +14,18 @@ class AbseilCpp(CMakePackage): maintainers = ['jcftang'] + version('20200225.1', sha256='0db0d26f43ba6806a8a3338da3e646bb581f0ca5359b3a201d8fb8e4752fd5f8') version('20190808', sha256='8100085dada279bf3ee00cd064d43b5f55e5d913be0dfe2906f06f8f28d5b37e') version('20181200', sha256='e2b53bfb685f5d4130b84c4f3050c81bf48c497614dc85d91dbd3ed9129bce6d') version('20180600', sha256='794d483dd9a19c43dc1fbbe284ce8956eb7f2600ef350dac4c602f9b4eb26e90') + variant('shared', default=True, + description='Build shared instead of static libraries') + + conflicts('+shared', when='@:20190808') + def cmake_args(self): args = ["-DBUILD_TESTING=OFF", "-DCMAKE_CXX_STANDARD=11"] + args.append('-DBUILD_SHARED_LIBS:Bool={0}'.format( + 'ON' if '+shared' in self.spec else 'OFF')) return args diff --git a/var/spack/repos/builtin/packages/accumulo/package.py b/var/spack/repos/builtin/packages/accumulo/package.py new file mode 100644 index 00000000000..219d313aa10 --- /dev/null +++ b/var/spack/repos/builtin/packages/accumulo/package.py @@ -0,0 +1,26 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Accumulo(Package): + """Apache Accumulo is a sorted, distributed key/value store that + provides robust, scalable data storage and retrieval.""" + + homepage = "https://accumulo.apache.org/" + url = "https://github.com/apache/accumulo/archive/rel/2.0.0.tar.gz" + + version('2.0.0', sha256='2564056dc24398aa464763c21bae10ef09356fe3261600d27744071cf965c265') + version('1.9.3', sha256='d9548d5b9cf9f494f027f0fe59d5d6d45d09064359d7761cade62991ce2a5d0c') + version('1.9.2', sha256='11ab028143ad6313cd5fc701b36b4c35e46a4a3fa2ce663869860b9f6bf5ee4d') + + depends_on('maven', type='build') + depends_on('java', type=('build', 'run')) + + def install(self, spec, prefix): + mvn = which('mvn') + mvn('package', '-DskipTests') + install_tree('.', prefix) diff --git a/var/spack/repos/builtin/packages/acts-core/package.py b/var/spack/repos/builtin/packages/acts/package.py similarity index 68% rename from var/spack/repos/builtin/packages/acts-core/package.py rename to var/spack/repos/builtin/packages/acts/package.py index ea3498b4f3a..849c326b18a 100644 --- a/var/spack/repos/builtin/packages/acts-core/package.py +++ b/var/spack/repos/builtin/packages/acts/package.py @@ -6,9 +6,9 @@ from spack import * -class ActsCore(CMakePackage): +class Acts(CMakePackage): """ - A Common Tracking Software (ACTS) + A Common Tracking Software (Acts) This project contains an experiment-independent set of track reconstruction tools. The main philosophy is to provide high-level track reconstruction @@ -33,7 +33,12 @@ class ActsCore(CMakePackage): git = "https://github.com/acts-project/acts.git" maintainers = ['HadrienG2'] + # Supported Acts versions version('master', branch='master') + version('0.24.0', commit='ef4699c8500bfea59a5fe88bed67fde2f00f0adf') + version('0.23.0', commit='dc443dd7e663bc4d7fb3c1e3f1f75aaf57ffd4e4') + version('0.22.1', commit='ca1b8b1645db6b552f44c48d2ff34c8c29618f3a') + version('0.22.0', commit='2c8228f5843685fc0ae69a8b95dd8fc001139efb') version('0.21.0', commit='10b719e68ddaca15b28ac25b3daddce8c0d3368d') version('0.20.0', commit='1d37a849a9c318e8ca4fa541ef8433c1f004637b') version('0.19.0', commit='408335636486c421c6222a64372250ef12544df6') @@ -63,13 +68,13 @@ class ActsCore(CMakePackage): version('0.08.1', commit='289bdcc320f0b3ff1d792e29e462ec2d3ea15df6') version('0.08.0', commit='99eedb38f305e3a1cd99d9b4473241b7cd641fa9') - # Variants that affect the core ACTS library + # Variants that affect the core Acts library variant('benchmarks', default=False, description='Build the performance benchmarks') variant('examples', default=False, description='Build the examples') - variant('tests', default=False, description='Build the unit tests') variant('integration_tests', default=False, description='Build the integration tests') + variant('unit_tests', default=False, description='Build the unit tests') - # Variants the enable / disable ACTS plugins + # Variants that enable / disable Acts plugins variant('dd4hep', default=False, description='Build the DD4hep plugin') variant('digitization', default=False, description='Build the geometric digitization plugin') variant('fatras', default=False, description='Build the FAst TRAcking Simulation package') @@ -78,15 +83,44 @@ class ActsCore(CMakePackage): variant('legacy', default=False, description='Build the Legacy package') variant('tgeo', default=False, description='Build the TGeo plugin') - depends_on('cmake @3.11:', type='build') + # Variants that only affect Acts examples for now + variant('geant4', default=False, description='Build the Geant4-based examples') + variant('hepmc3', default=False, description='Build the HepMC3-based examples') + variant('pythia8', default=False, description='Build the Pythia8-based examples') + + # Build dependencies depends_on('boost @1.62:1.69.99 +program_options +test', when='@:0.10.3') - depends_on('boost @1.62: +program_options +test', when='@0.10.4:0.18.0') - depends_on('boost @1.69: +program_options +test', when='@0.19.0:') + depends_on('boost @1.69: +filesystem +program_options +test', when='@0.10.4:') + depends_on('cmake @3.11:', type='build') + depends_on('dd4hep @1.10: +xercesc', when='+dd4hep') + depends_on('dd4hep @1.10: +geant4 +xercesc', when='+dd4hep +geant4') depends_on('eigen @3.2.9:', type='build') - depends_on('nlohmann-json @3.2.0:', when='@0.14.0: +json') + depends_on('geant4', when='+geant4') + depends_on('hepmc3@3.1:', when='+hepmc3') + depends_on('heppdt', when='+hepmc3') + depends_on('intel-tbb', when='+examples') + depends_on('nlohmann-json @3.2.0:', when='@0.14: +json') + depends_on('pythia8', when='+pythia8') depends_on('root @6.10: cxxstd=14', when='+tgeo @:0.8.0') depends_on('root @6.10: cxxstd=17', when='+tgeo @0.8.1:') - depends_on('dd4hep @1.2: +xercesc', when='+dd4hep') + + # Some variant combinations do not make sense + conflicts('+benchmarks', when='@:0.15') + conflicts('+dd4hep', when='-tgeo') + conflicts('+examples', when='@:0.22') + conflicts('+examples', when='-digitization') + conflicts('+examples', when='-fatras') + conflicts('+examples', when='-identification') + conflicts('+examples', when='-json') + conflicts('+examples', when='-tgeo') + conflicts('+fatras', when='@:0.15') + conflicts('+geant4', when='@:0.22') + conflicts('+geant4', when='-examples') + conflicts('+hepmc3', when='@:0.22') + conflicts('+hepmc3', when='-examples') + conflicts('+pythia8', when='@:0.22') + conflicts('+pythia8', when='-examples') + conflicts('+tgeo', when='-identification') def cmake_args(self): spec = self.spec @@ -95,22 +129,30 @@ def cmake_variant(cmake_label, spack_variant): enabled = spec.satisfies('+' + spack_variant) return "-DACTS_BUILD_{0}={1}".format(cmake_label, enabled) + def example_cmake_variant(cmake_label, spack_variant): + enabled = spec.satisfies('+examples +' + spack_variant) + return "-DACTS_BUILD_EXAMPLES_{0}={1}".format(cmake_label, enabled) + integration_tests_label = "INTEGRATIONTESTS" - tests_label = "UNITTESTS" + unit_tests_label = "UNITTESTS" if spec.satisfies('@:0.15.99'): integration_tests_label = "INTEGRATION_TESTS" - tests_label = "TESTS" + unit_tests_label = "TESTS" args = [ cmake_variant("BENCHMARKS", "benchmarks"), - cmake_variant("EXAMPLES", "examples"), - cmake_variant(tests_label, "tests"), - cmake_variant(integration_tests_label, "integration_tests"), - cmake_variant("DIGITIZATION_PLUGIN", "digitization"), cmake_variant("DD4HEP_PLUGIN", "dd4hep"), + cmake_variant("DIGITIZATION_PLUGIN", "digitization"), + cmake_variant("EXAMPLES", "examples"), + example_cmake_variant("DD4HEP", "dd4hep"), + example_cmake_variant("GEANT4", "geant4"), + example_cmake_variant("HEPMC3", "hepmc3"), + example_cmake_variant("PYTHIA8", "pythia8"), cmake_variant("FATRAS", "fatras"), cmake_variant("IDENTIFICATION_PLUGIN", "identification"), + cmake_variant(integration_tests_label, "integration_tests"), cmake_variant("JSON_PLUGIN", "json"), + cmake_variant(unit_tests_label, "unit_tests"), cmake_variant("LEGACY", "legacy"), cmake_variant("TGEO_PLUGIN", "tgeo") ] diff --git a/var/spack/repos/builtin/packages/adios/package.py b/var/spack/repos/builtin/packages/adios/package.py index c850b3058b5..4bcd3f69d6f 100644 --- a/var/spack/repos/builtin/packages/adios/package.py +++ b/var/spack/repos/builtin/packages/adios/package.py @@ -127,6 +127,11 @@ def with_or_without_hdf5(self, activated): return '--without-phdf5' + def setup_build_environment(self, env): + # https://github.com/ornladios/ADIOS/issues/206 + if self.spec.satisfies('%gcc@10: +fortran'): + env.set('FCFLAGS', '-fallow-argument-mismatch') + def configure_args(self): spec = self.spec self.validate(spec) diff --git a/var/spack/repos/builtin/packages/adios2/package.py b/var/spack/repos/builtin/packages/adios2/package.py index 9d298b4213c..13f502e050d 100644 --- a/var/spack/repos/builtin/packages/adios2/package.py +++ b/var/spack/repos/builtin/packages/adios2/package.py @@ -11,12 +11,13 @@ class Adios2(CMakePackage): developed in the Exascale Computing Program""" homepage = "https://csmd.ornl.gov/software/adios2" - url = "https://github.com/ornladios/ADIOS2/archive/v2.5.0.tar.gz" + url = "https://github.com/ornladios/ADIOS2/archive/v2.6.0.tar.gz" git = "https://github.com/ornladios/ADIOS2.git" maintainers = ['ax3l', 'chuckatkins', 'williamfgc'] version('master', branch='master') + version('2.6.0', sha256='45b41889065f8b840725928db092848b8a8b8d1bfae1b92e72f8868d1c76216c') version('2.5.0', sha256='7c8ff3bf5441dd662806df9650c56a669359cb0185ea232ecb3578de7b065329') version('2.4.0', sha256='50ecea04b1e41c88835b4b3fd4e7bf0a0a2a3129855c9cc4ba6cf6a1575106e2') version('2.3.1', sha256='3bf81ccc20a7f2715935349336a76ba4c8402355e1dc3848fcd6f4c3c5931893') @@ -76,7 +77,7 @@ class Adios2(CMakePackage): # DataMan needs dlopen conflicts('+dataman', when='~shared') - depends_on('cmake@3.6.0:', type='build') + depends_on('cmake@3.12.0:', type='build') depends_on('pkgconfig', type='build') depends_on('libffi', when='+sst') # optional in DILL @@ -118,6 +119,21 @@ class Adios2(CMakePackage): # See https://github.com/ornladios/ADIOS2/pull/1899 patch('2.5-fix-clear_cache.patch', when='@2.5.0') + @when('%fj') + def patch(self): + """ add fujitsu mpi commands #16864 """ + f = join_path('cmake', 'upstream', 'FindMPI.cmake') + filter_file('mpcc_r)', 'mpcc_r mpifcc)', f, string=True) + filter_file('mpc++_r)', 'mpcc_r mpiFCC)', f, string=True) + filter_file('mpf77_r', 'mpf77_r mpifrt', f, string=True) + + def setup_build_environment(self, env): + # https://github.com/ornladios/ADIOS2/issues/2228 + if self.spec.satisfies('%gcc@10: +fortran'): + env.set('FFLAGS', '-fallow-argument-mismatch') + elif self.spec.satisfies('%fj +fortran'): + env.set('FFLAGS', '-Ccpp') + def cmake_args(self): spec = self.spec @@ -163,6 +179,9 @@ def cmake_args(self): args.append('-DADIOS2_USE_DataSpaces={0}'.format( 'ON' if '+dataspaces' in spec else 'OFF')) + if spec.version >= Version('2.6.0'): + args.append('-DADIOS2_USE_IME=OFF') + if '+sst' in spec: args.extend([ # Broken dependency package @@ -179,6 +198,12 @@ def cmake_args(self): args.append('-DCMAKE_POSITION_INDEPENDENT_CODE:BOOL={0}'.format( 'ON' if '+pic' in spec else 'OFF')) + if spec.satisfies('%fj'): + args.extend([ + '-DCMAKE_Fortran_SUBMODULE_EXT=.smod', + '-DCMAKE_Fortran_SUBMODULE_SEP=.' + ]) + if spec.satisfies('+python') or self.run_tests: args.append('-DPYTHON_EXECUTABLE:FILEPATH=%s' % spec['python'].command.path) diff --git a/var/spack/repos/builtin/packages/alps/mpi.patch b/var/spack/repos/builtin/packages/alps/mpi.patch new file mode 100644 index 00000000000..bd68181eb76 --- /dev/null +++ b/var/spack/repos/builtin/packages/alps/mpi.patch @@ -0,0 +1,13 @@ +diff --git a/alps/src/alps/CMakeLists.txt b/alps/src/alps/CMakeLists.txt +index ae73f13..0d1ba34 100644 +--- a/alps/src/alps/CMakeLists.txt ++++ b/alps/src/alps/CMakeLists.txt +@@ -93,7 +93,7 @@ if (Boost_FOUND) + set(ALPS_LINK_LIBS ${ALPS_LINK_LIBS} ${PYTHON_LIBRARY} ${PYTHON_EXTRA_LIBS}) + endif(PYTHONLIBS_FOUND) + if(MPI_FOUND) +- set(ALPS_LINK_LIBS ${ALPS_LINK_LIBS} ${MPI_LIBRARIES} ${MPI_EXTRA_LIBRARY}) ++ set(ALPS_LINK_LIBS ${ALPS_LINK_LIBS} ${MPI_LIBRARIES}) + endif(MPI_FOUND) + target_link_libraries(alps ${ALPS_LINK_LIBS}) + else (Boost_FOUND) diff --git a/var/spack/repos/builtin/packages/alps/package.py b/var/spack/repos/builtin/packages/alps/package.py new file mode 100644 index 00000000000..55df8729330 --- /dev/null +++ b/var/spack/repos/builtin/packages/alps/package.py @@ -0,0 +1,50 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Alps(CMakePackage): + """Algorithms for Physics Simulations + + Tags: Condensed Matter Physics, Computational Physics + """ + + homepage = "https://alps.comp-phys.org" + url = "http://alps.comp-phys.org/static/software/releases/alps-2.3.0-src.tar.gz" + + version('2.3.0', sha256='e64208d1e5acdd6f569277413c4867e1fa366cf4a224570eacbf1e9939fca2d2') + + # Refs for building from source and recipes + # http://alps.comp-phys.org/mediawiki/index.php/Building_ALPS_from_source + # https://github.com/easybuilders/easybuild-easyconfigs/tree/master/easybuild/easyconfigs/a/ALPS + # https://github.com/conda-forge/alps-feedstock/tree/master/recipe + + # Package failed to build with boost version >= 1.64 + depends_on('boost@:1.63.0 +chrono +date_time +filesystem +iostreams +mpi +numpy +program_options +python +regex +serialization +system +test +thread +timer') + depends_on('fftw') + depends_on('hdf5 ~mpi+hl') + depends_on('lapack') + # build fails for latest python@3.7 + depends_on('python@:3.6.99', type=('build', 'link', 'run')) + depends_on('py-numpy', type=('build', 'run')) + depends_on('py-scipy', type=('build', 'run')) + depends_on('py-matplotlib', type=('build', 'run')) + + # build fails with gcc@7: + conflicts('%gcc@7:') + + # remove a problematic build variable + patch('mpi.patch') + + extends('python') + + root_cmakelists_dir = 'alps' + + def cmake_args(self): + args = [] + args.append('Boost_ROOT_DIR=' + self.spec['boost'].prefix) + args.append("-DCMAKE_CXX_FLAGS={0}".format(self.compiler.cxx98_flag)) + return args diff --git a/var/spack/repos/builtin/packages/alsa-lib/package.py b/var/spack/repos/builtin/packages/alsa-lib/package.py index e766a8cc16e..9b53de24e9b 100644 --- a/var/spack/repos/builtin/packages/alsa-lib/package.py +++ b/var/spack/repos/builtin/packages/alsa-lib/package.py @@ -12,6 +12,29 @@ class AlsaLib(AutotoolsPackage): space library that developers compile ALSA applications against.""" homepage = "https://www.alsa-project.org" - url = "ftp://ftp.alsa-project.org/pub/lib/alsa-lib-1.1.4.1.tar.bz2" + url = "ftp://ftp.alsa-project.org/pub/lib/alsa-lib-1.2.2.tar.bz2" + version('1.2.2', sha256='d8e853d8805574777bbe40937812ad1419c9ea7210e176f0def3e6ed255ab3ec') version('1.1.4.1', sha256='91bb870c14d1c7c269213285eeed874fa3d28112077db061a3af8010d0885b76') + + variant('python', default=False, description='enable python') + + patch('python.patch', when='@1.1.4:1.1.5 +python') + + depends_on('python', type=('link', 'run'), when='+python') + + def configure_args(self): + spec = self.spec + args = [] + if spec.satisfies('+python'): + args.append( + '--with-pythonlibs={0}'.format(spec['python'].libs.ld_flags) + ) + args.append( + '--with-pythonincludes={0}'.format( + spec['python'].headers.include_flags + ) + ) + else: + args.append('--disable-python') + return args diff --git a/var/spack/repos/builtin/packages/alsa-lib/python.patch b/var/spack/repos/builtin/packages/alsa-lib/python.patch new file mode 100644 index 00000000000..6cc6cdb40e6 --- /dev/null +++ b/var/spack/repos/builtin/packages/alsa-lib/python.patch @@ -0,0 +1,20 @@ +diff --git a/modules/mixer/simple/python.c b/modules/mixer/simple/python.c +index c822c52a..917a5d0a 100644 +--- a/modules/mixer/simple/python.c ++++ b/modules/mixer/simple/python.c +@@ -588,7 +588,6 @@ + pymelem_dealloc(struct pymelem *self) + { + selem_free(self->melem); +- self->ob_type->tp_free(self); + } + + static PyGetSetDef pymelem_getseters[] = { +@@ -800,7 +799,6 @@ + pymixer_dealloc(struct pymixer *self) + { + pymixer_free(self); +- self->ob_type->tp_free(self); + } + + static PyGetSetDef pymixer_getseters[] = { diff --git a/var/spack/repos/builtin/packages/ambari/package.py b/var/spack/repos/builtin/packages/ambari/package.py new file mode 100644 index 00000000000..6e54f0a44e7 --- /dev/null +++ b/var/spack/repos/builtin/packages/ambari/package.py @@ -0,0 +1,25 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Ambari(PythonPackage): + """Apache Ambari is a tool for provisioning, managing, and monitoring + Apache Hadoop clusters. Ambari consists of a set of RESTful APIs and + a browser-based management interface.""" + + homepage = "https://cwiki.apache.org/confluence/display/AMBARI/Ambari" + url = "https://github.com/apache/ambari/archive/release-2.7.5.tar.gz" + + version('2.7.5', sha256='f8c8687b7a61b633b92f83b1c104fd75b1e13836cd8a0e0df6db7b483b23a354') + version('2.7.4', sha256='d6796c7ea913d39c93dad52b4cb74ef411a7dce4ebf68f11b12718117f2c01a4') + version('2.7.3', sha256='30fe72e60fa6b62fe032bd193ebd0cef20b65c54b57cad92f6f44daabd3771cf') + version('2.7.1', sha256='ea4eb28f377ce9d0b9b7648f2020dda4be974c6d9a22ebaafbf1bc97890e4e42') + + depends_on('python@:2.7.999', type=('build', 'run')) + depends_on('py-setuptools', type='build') + depends_on('py-mock', type='test') + depends_on('py-coilmq', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/amber/package.py b/var/spack/repos/builtin/packages/amber/package.py index eae98b53bda..d2ec13fb3e9 100644 --- a/var/spack/repos/builtin/packages/amber/package.py +++ b/var/spack/repos/builtin/packages/amber/package.py @@ -96,7 +96,7 @@ class Amber(Package, CudaPackage): depends_on('mpi', when='+mpi') # Cuda dependencies - depends_on('cuda@:10.1.243', when='@18:+cuda') + depends_on('cuda@:10.2.89', when='@18:+cuda') depends_on('cuda@7.5.18', when='@:16+cuda') # conflicts diff --git a/var/spack/repos/builtin/packages/amgx/package.py b/var/spack/repos/builtin/packages/amgx/package.py new file mode 100644 index 00000000000..c03f3599df8 --- /dev/null +++ b/var/spack/repos/builtin/packages/amgx/package.py @@ -0,0 +1,58 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack import * + + +class Amgx(CMakePackage, CudaPackage): + """AmgX provides a simple path to accelerated core solver technology on + NVIDIA GPUs. AmgX provides up to 10x acceleration to the computationally + intense linear solver portion of simulations, and is especially well + suited for implicit unstructured methods. It is a high performance, + state-of-the-art library and includes a flexible solver composition + system that allows a user to easily construct complex nested solvers and + preconditioners.""" + + homepage = "https://developer.nvidia.com/amgx" + url = "https://github.com/nvidia/amgx/archive/v2.1.0.tar.gz" + + maintainers = ['js947'] + + version('2.1.0', sha256='6245112b768a1dc3486b2b3c049342e232eb6281a6021fffa8b20c11631f63cc') + version('2.0.1', sha256='6f9991f1836fbf4ba2114ce9f49febd0edc069a24f533bd94fd9aa9be72435a7') + version('2.0.0', sha256='8ec7ea8412be3de216fcf7243c4e2a8bcf76878e6865468e4238630a082a431b') + + variant('cuda', default=True, description='Build with CUDA') + variant('mpi', default=True, description='Enable MPI support') + variant('mkl', default=False, description='Enable MKL support') + variant('magma', default=False, description='Enable Magma support') + + depends_on('mpi', when='+mpi') + depends_on('mkl', when='+mkl') + depends_on('magma', when='+magma') + + def cmake_args(self): + args = [] + args.append("-DCMAKE_NO_MPI={0}".format( + '1' if '+mpi' not in self.spec else '0')) + + if '+cuda' in self.spec: + args.append('-DWITH_CUDA=ON') + cuda_arch = self.spec.variants['cuda_arch'].value + if cuda_arch != 'none': + args.append('-DCUDA_ARCH={0}'.format(cuda_arch[0])) + else: + args.append('-DWITH_CUDA=OFF') + + if '+mkl' in self.spec: + args.append('-DMKL_ROOT_DIR={0}'.format( + self.spec['mkl'].prefix)) + + if '+magma' in self.spec: + args.append('-DMAGMA_ROOT_DIR={0}'.format( + self.spec['magma'].prefix)) + + return args diff --git a/var/spack/repos/builtin/packages/amrex/package.py b/var/spack/repos/builtin/packages/amrex/package.py index 5bd19e582ba..32da808f1d1 100644 --- a/var/spack/repos/builtin/packages/amrex/package.py +++ b/var/spack/repos/builtin/packages/amrex/package.py @@ -12,21 +12,23 @@ class Amrex(CMakePackage): mesh refinement (AMR) applications.""" homepage = "https://amrex-codes.github.io/amrex/" - url = "https://github.com/AMReX-Codes/amrex/archive/20.01.tar.gz" + url = "https://github.com/AMReX-Codes/amrex/releases/download/20.05/amrex-20.05.tar.gz" git = "https://github.com/AMReX-Codes/amrex.git" maintainers = ['mic84', 'asalmgren'] version('develop', branch='development') - version('20.04', sha256='ce951105336d6fcc07abe3eadf9f71161f0ccbe3e45f4547be4d0ae99e15f3c6') - version('20.03', sha256='a535dcc016f0d38b55d0ab8e9067c1c53e3686961f6a1fb471cb18a0ebc909e6') - version('20.02', sha256='33529a23694283d12eb37d4682aa86c9cc1240bd50124efcf4464747a7554147') - version('20.01', sha256='f7026d267ca5de79ec7e740264d54230f419776d40feae705e939be0b1d8e0d3') - version('19.10', commit='52844b32b7da11e9733b9a7f4a782e51de7f5e1e') # tag:19.10 - version('19.08', commit='bdd1146139e8727a513d451075f900c172eb81fd') # tag:19.08 - version('18.10.1', commit='260b53169badaa760b91dfc60ea6b2ea3d9ccf06') # tag:18.10.1 - version('18.10', commit='d37a266c38092e1174096e245326e9eead1f4e03') # tag:18.10 - version('18.09.1', commit='88120db4736c325a2d3d2c291adacaffd3bf224b') # tag:18.09.1 + version('20.06', sha256='be2f2a5107111fcb8b3928b76024b370c7cb01a9e5dd79484cf7fcf59d0b4858') + version('20.05', sha256='97d753bb75e845a0a959ec1a044a48e6adb86dd008b5e29ce7a01d49ed276338') + version('20.04', sha256='a7ece54d5d89cc00fd555551902a0d4d0fb50db15d2600f441353eed0dddd83b') + version('20.03', sha256='9728f20c0d7297c935fe5cbc63c1ee60f983b833a735c797340ee2765d626165') + version('20.02', sha256='2eda858b43e7455718ccb96c18f678da1778ec61031e90effdcb9c3e7e6f9bb5') + version('20.01', sha256='957e7a7fe90a0a9f4ae10bf9e46dba68d72448d0bec69a4a4e66a544930caca3') + version('19.10', sha256='9f30a2b3ec13711dfc6a1b59af59bd7df78449b5846ac6457b5dbbdecb20c576') + version('19.08', sha256='94b1e9a9dcfb8c5b52aef91a2ed373aef504d766dd7d0aba6731ceb94e48e940') + version('18.10.1', sha256='e648465c9c3b7ff4c696dfa8b6d079b4f61c80d96c51e27af210951c9367c201') + version('18.10', sha256='298eba03ef03d617c346079433af1089d38076d6fab2c34476c687740c1f4234') + version('18.09.1', sha256='a065ee4d1d98324b6c492ae20ea63ba12a4a4e23432bf5b3fe9788d44aa4398e') # Config options variant('dimensions', default='3', @@ -59,11 +61,19 @@ class Amrex(CMakePackage): # Build dependencies depends_on('mpi', when='+mpi') depends_on('sundials@4.0.0:4.1.0 +ARKODE +CVODE', when='@19.08: +sundials') - depends_on('python@2.7:', type='build') + depends_on('python@2.7:', type='build', when='@:20.04') depends_on('cmake@3.5:', type='build', when='@:18.10.99') - depends_on('cmake@3.13:', type='build', when='@18.11:') + depends_on('cmake@3.13:', type='build', when='@18.11:') + depends_on('cmake@3.14:', type='build', when='@19.04:') conflicts('%clang') + def url_for_version(self, version): + if version >= Version('20.05'): + url = "https://github.com/AMReX-Codes/amrex/releases/download/{0}/amrex-{0}.tar.gz" + else: + url = "https://github.com/AMReX-Codes/amrex/archive/{0}.tar.gz" + return url.format(version.dotted) + def cmake_is_on(self, option): return 'ON' if option in self.spec else 'OFF' diff --git a/var/spack/repos/builtin/packages/angsd/package.py b/var/spack/repos/builtin/packages/angsd/package.py index 561fe972018..6dae47f2204 100644 --- a/var/spack/repos/builtin/packages/angsd/package.py +++ b/var/spack/repos/builtin/packages/angsd/package.py @@ -16,18 +16,34 @@ class Angsd(MakefilePackage): homepage = "https://github.com/ANGSD/angsd" url = "https://github.com/ANGSD/angsd/archive/0.919.tar.gz" + version('0.933', sha256='2f992325dc08fa25ac525d9300ef6bd61808e74c521b4cc72a2ce00d98f402bb') version('0.921', sha256='8892d279ce1804f9e17fe2fc65a47e5498e78fc1c1cb84d2ca2527fd5c198772') version('0.919', sha256='c2ea718ca5a5427109f4c3415e963dcb4da9afa1b856034e25c59c003d21822a') + variant('r', default=True, description='Enable R dependency') + depends_on('htslib') conflicts('^htslib@1.6:', when='@0.919') + depends_on('zlib') + depends_on('lzma') + depends_on('curl') + + depends_on('r', type='run', when='+rlib') + def setup_run_environment(self, env): env.set('R_LIBS', self.prefix.R) def install(self, spec, prefix): + binaries = [ + 'angsd', 'misc/realSFS', 'misc/thetaStat' + ] + mkdirp(prefix.bin) - install('angsd', join_path(prefix.bin)) + + for b in binaries: + install(b, join_path(prefix.bin)) + install_tree('R', prefix.R) install_tree('RES', prefix.RES) install_tree('scripts', prefix.scripts) diff --git a/var/spack/repos/builtin/packages/antimony/package.py b/var/spack/repos/builtin/packages/antimony/package.py new file mode 100644 index 00000000000..157a0a2510b --- /dev/null +++ b/var/spack/repos/builtin/packages/antimony/package.py @@ -0,0 +1,54 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Antimony(CMakePackage): + """Human readable language for modifying sbml""" + + homepage = "http://antimony.sourceforge.net/" + url = "antimony" + + maintainers = ['rblake-llnl'] + + version('2.8', sha256='7e3e38706c074b72e241ac56ef4ce23e87ef8c718c70f29b2207f1847c43770f') + version('2.7', sha256='7ad181cac632282ae77ced09388dd92db87ea4683eed8c45f2b43861ae2acad4') + version('2.6', sha256='afc8dc5ec6bc2cd3085038f80362327456f219171b09a13f775b50550c8b1d87') + version('2.5', sha256='138d6b45df62198ca71bd3b3c8fd06920f8a78d7de7f6dbc1b89fa7ea7c7d215') + version('2.4', sha256='1597efa823f9a48f5a40373cbd40386207764807fbc0b79cf20d0f8570a7e54b') + version('2.2', sha256='795c777dd90c28fd8c3f4f8896702744b7389cff2fcf40e797b4bfafbb6f7251') + version('2.0', sha256='778146206e5f420d0e3d30dc25eabc9bad2759bfaf6b4b355bb1f72c5bc9593f') + + def url_for_version(self, version): + url = "https://downloads.sourceforge.net/project/antimony/Antimony source/{0}/antimony_src_v{1}.tar.gz".format(version, version) + return url + + variant("qt", default=False, + description="Build the QT editor.") + variant("python", default=False, + description="Build python bindings.") + + depends_on('sbml~cpp') + depends_on('swig') + depends_on('qt', when="+qt") + depends_on('python', when="+python") + + def cmake_args(self): + spec = self.spec + args = [ + '-DWITH_SBML:BOOL=ON', + '-DWITH_COMP_SBML:BOOL=ON', + '-DWITH_LIBSBML_EXPAT:BOOL=OFF', + '-DWITH_LIBSBML_LIBXML:BOOL=ON', + '-DWITH_LIBSBML_XERCES:BOOL=OFF', + '-DLIBSBML_INSTALL_DIR:PATH=' + spec['sbml'].prefix, + '-DWITH_CELLML:BOOL=OFF', + '-DWITH_SBW:BOOL=OFF', + '-DWITH_SWIG:BOOL=ON', + ] + args.append(self.define_from_variant('WITH_PYTHON', 'python')) + args.append(self.define_from_variant('WITH_QTANTIMONY', "qt")) + return args diff --git a/var/spack/repos/builtin/packages/arborx/package.py b/var/spack/repos/builtin/packages/arborx/package.py index 2408dd1a99d..7d10ef1fabb 100644 --- a/var/spack/repos/builtin/packages/arborx/package.py +++ b/var/spack/repos/builtin/packages/arborx/package.py @@ -28,15 +28,15 @@ class Arborx(CMakePackage): # ArborX relies on Kokkos to provide devices, thus having one-to-one match # The only way to disable those devices is to make sure Kokkos does not # provide them - depends_on('kokkos@2.7.00:+cuda+enable_lambda cxxstd=c++14', when='+cuda') - depends_on('kokkos@2.7.00:+openmp cxxstd=c++14', when='+openmp') - depends_on('kokkos@2.7.00:+serial cxxstd=c++14', when='+serial') + depends_on('kokkos-legacy@2.7.00:+cuda+enable_lambda cxxstd=c++14', when='+cuda') + depends_on('kokkos-legacy@2.7.00:+openmp cxxstd=c++14', when='+openmp') + depends_on('kokkos-legacy@2.7.00:+serial cxxstd=c++14', when='+serial') def cmake_args(self): spec = self.spec options = [ - '-DCMAKE_PREFIX_PATH=%s' % spec['kokkos'].prefix, + '-DCMAKE_PREFIX_PATH=%s' % spec['kokkos-legacy'].prefix, '-DARBORX_ENABLE_TESTS=OFF', '-DARBORX_ENABLE_EXAMPLES=OFF', '-DARBORX_ENABLE_BENCHMARKS=OFF', diff --git a/var/spack/repos/builtin/packages/argobots/package.py b/var/spack/repos/builtin/packages/argobots/package.py index 8f967d020b1..2dc8212eb7f 100644 --- a/var/spack/repos/builtin/packages/argobots/package.py +++ b/var/spack/repos/builtin/packages/argobots/package.py @@ -20,20 +20,16 @@ class Argobots(AutotoolsPackage): git = "https://github.com/pmodels/argobots.git" maintainers = ['shintaro-iwasaki'] - version("master", branch="master") + version("main", branch="main") version("1.0", sha256="36a0815f7bf99900a9c9c1eef61ef9b3b76aa2cfc4594a304f6c8c3296da8def") - version("1.0rc2", sha256="7496b8bd39930a548b01aa3b1fe8f8b582c272600ef6a05ddc4398cf21dc12a2") - version("1.0rc1", sha256="2dc4487556dce602655a6535f501136f0edc3575708029c80b1af6dccd069ce7") - version("1.0b1", sha256="480b85b0e8db288400088a57c2dc5639f556843b06b0492841920c38348a2a3e") - version("1.0a1", sha256="bef93e06026ddeba8809474923176803e64d08e1425672cd7c5b424c797d5d9d") variant("valgrind", default=False, description="Enable Valgrind") variant("debug", default=False, description="Compiled with debugging symbols") - depends_on("m4", type=("build"), when="@master") - depends_on("autoconf", type=("build"), when="@master") - depends_on("automake", type=("build"), when="@master") - depends_on("libtool", type=("build"), when="@master") + depends_on("m4", type=("build"), when="@main") + depends_on("autoconf", type=("build"), when="@main") + depends_on("automake", type=("build"), when="@main") + depends_on("libtool", type=("build"), when="@main") depends_on("valgrind", when="+valgrind") def configure_args(self): diff --git a/var/spack/repos/builtin/packages/arpack-ng/package.py b/var/spack/repos/builtin/packages/arpack-ng/package.py index 35a263541f3..1698a0e4bd1 100644 --- a/var/spack/repos/builtin/packages/arpack-ng/package.py +++ b/var/spack/repos/builtin/packages/arpack-ng/package.py @@ -85,6 +85,12 @@ def libs(self): libraries, root=self.prefix, shared=True, recursive=True ) + @when('@:3.7.0 %gcc@10:') + def setup_build_environment(self, env): + # version up to and including 3.7.0 are not ported to gcc 10 + # https://github.com/opencollab/arpack-ng/issues/242 + env.set('FFLAGS', '-fallow-argument-mismatch') + @when('@3.4.0:') def install(self, spec, prefix): diff --git a/var/spack/repos/builtin/packages/arrayfire/package.py b/var/spack/repos/builtin/packages/arrayfire/package.py new file mode 100644 index 00000000000..d3a7b65e2c1 --- /dev/null +++ b/var/spack/repos/builtin/packages/arrayfire/package.py @@ -0,0 +1,47 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Arrayfire(CMakePackage, CudaPackage): + """ArrayFire is a high performance software library for parallel computing + with an easy-to-use API. Its array based function set makes parallel + programming more accessible.""" + + homepage = "http://arrayfire.org/docs/index.htm" + git = "https://github.com/arrayfire/arrayfire.git" + + version('master', submodules=True) + version('3.7.0', submodules=True, tag='v3.7.0') + + variant('cuda', default=False, description='Enable Cuda backend') + variant('forge', default=False, description='Enable graphics library') + variant('opencl', default=False, description='Enable OpenCL backend') + + depends_on('boost@1.65:') + depends_on('fftw') + depends_on('blas') + depends_on('cuda@7.5:', when='+cuda') + depends_on('cudnn', when='+cuda') + depends_on('opencl +icd', when='+opencl') + # TODO add more opencl backends: + # currently only Cuda backend is enabled + # https://github.com/arrayfire/arrayfire/wiki/Build-Instructions-for-Linux#opencl-backend-dependencies + + depends_on('fontconfig', when='+forge') + depends_on('glfw@3.1.4:', when='+forge') + + def cmake_args(self): + args = [] + args.extend([ + '-DAF_BUILD_CUDA={0}'.format( + 'ON' if '+cuda' in self.spec else 'OFF'), + '-DAF_BUILD_FORGE={0}'.format( + 'ON' if '+forge' in self.spec else 'OFF'), + '-DAF_BUILD_OPENCL={0}'.format( + 'ON' if '+opencl' in self.spec else 'OFF'), + ]) + return args diff --git a/var/spack/repos/builtin/packages/arrow/package.py b/var/spack/repos/builtin/packages/arrow/package.py index 8cc4a067669..ef0e1adc8c8 100644 --- a/var/spack/repos/builtin/packages/arrow/package.py +++ b/var/spack/repos/builtin/packages/arrow/package.py @@ -15,6 +15,7 @@ class Arrow(CMakePackage): homepage = "http://arrow.apache.org" url = "https://github.com/apache/arrow/archive/apache-arrow-0.9.0.tar.gz" + version('0.17.1', sha256='ecb6da20f9288c0ca31f9b457ffdd460198765a8af27c1cac4b1382a8d130f86') version('0.15.1', sha256='ab1c0d371a10b615eccfcead71bb79832245d788f4834cc6b278c03c3872d593') version('0.15.0', sha256='d1072d8c4bf9166949f4b722a89350a88b7c8912f51642a5d52283448acdfd58') version('0.14.1', sha256='69d9de9ec60a3080543b28a5334dbaf892ca34235b8bd8f8c1c01a33253926c1') @@ -32,6 +33,7 @@ class Arrow(CMakePackage): depends_on('snappy~shared') depends_on('zlib+pic') depends_on('zstd+pic') + depends_on('thrift+pic', when='+parquet') variant('build_type', default='Release', description='CMake build type', diff --git a/var/spack/repos/builtin/packages/at-spi2-atk/package.py b/var/spack/repos/builtin/packages/at-spi2-atk/package.py index 3b2fd04e053..371b661b456 100644 --- a/var/spack/repos/builtin/packages/at-spi2-atk/package.py +++ b/var/spack/repos/builtin/packages/at-spi2-atk/package.py @@ -15,6 +15,7 @@ class AtSpi2Atk(MesonPackage): list_url = "http://ftp.gnome.org/pub/gnome/sources/at-spi2-atk" list_depth = 1 + version('2.34.2', sha256='901323cee0eef05c01ec4dee06c701aeeca81a314a7d60216fa363005e27f4f0') version('2.26.2', sha256='61891f0abae1689f6617a963105a3f1dcdab5970c4a36ded9c79a7a544b16a6e') version('2.26.1', sha256='b4f0c27b61dbffba7a5b5ba2ff88c8cee10ff8dac774fa5b79ce906853623b75') diff --git a/var/spack/repos/builtin/packages/at-spi2-core/package.py b/var/spack/repos/builtin/packages/at-spi2-core/package.py index 11695fd4c50..e7480e26a23 100644 --- a/var/spack/repos/builtin/packages/at-spi2-core/package.py +++ b/var/spack/repos/builtin/packages/at-spi2-core/package.py @@ -16,6 +16,7 @@ class AtSpi2Core(MesonPackage): list_url = "http://ftp.gnome.org/pub/gnome/sources/at-spi2-core" list_depth = 1 + version('2.36.0', sha256='88da57de0a7e3c60bc341a974a80fdba091612db3547c410d6deab039ca5c05a') version('2.28.0', sha256='42a2487ab11ce43c288e73b2668ef8b1ab40a0e2b4f94e80fca04ad27b6f1c87') depends_on('glib@2.56.1:') @@ -23,12 +24,13 @@ class AtSpi2Core(MesonPackage): depends_on('gettext') depends_on('libx11') depends_on('libxi') - depends_on('libxtst', type='build') - depends_on('recordproto', type='build') - depends_on('inputproto', type='build') - depends_on('fixesproto', type='build') + depends_on('libxtst') + depends_on('recordproto') + depends_on('inputproto') + depends_on('fixesproto') depends_on('pkgconfig', type='build') depends_on('python', type='build') + depends_on('gobject-introspection') def url_for_version(self, version): """Handle gnome's version-based custom URLs.""" diff --git a/var/spack/repos/builtin/packages/atk/package.py b/var/spack/repos/builtin/packages/atk/package.py index 9bb7aeb2965..a28b053f01a 100644 --- a/var/spack/repos/builtin/packages/atk/package.py +++ b/var/spack/repos/builtin/packages/atk/package.py @@ -17,6 +17,7 @@ class Atk(Package): list_url = "https://ftp.gnome.org/pub/gnome/sources/atk" list_depth = 1 + version('2.36.0', sha256='fb76247e369402be23f1f5c65d38a9639c1164d934e40f6a9cf3c9e96b652788') version('2.30.0', sha256='dd4d90d4217f2a0c1fee708a555596c2c19d26fef0952e1ead1938ab632c027b') version('2.28.1', sha256='cd3a1ea6ecc268a2497f0cd018e970860de24a6d42086919d6bf6c8e8d53f4fc') version('2.20.0', sha256='493a50f6c4a025f588d380a551ec277e070b28a82e63ef8e3c06b3ee7c1238f0') diff --git a/var/spack/repos/builtin/packages/atop/package.py b/var/spack/repos/builtin/packages/atop/package.py index c06d27cbd7b..3c0772fc4e4 100644 --- a/var/spack/repos/builtin/packages/atop/package.py +++ b/var/spack/repos/builtin/packages/atop/package.py @@ -20,6 +20,9 @@ class Atop(Package): depends_on('zlib') depends_on('ncurses') + def setup_build_environment(self, env): + env.append_flags('LDFLAGS', '-ltinfo') + def install(self, spec, prefix): make() mkdirp(prefix.bin) diff --git a/var/spack/repos/builtin/packages/audacious/package.py b/var/spack/repos/builtin/packages/audacious/package.py new file mode 100644 index 00000000000..0115d4c5537 --- /dev/null +++ b/var/spack/repos/builtin/packages/audacious/package.py @@ -0,0 +1,30 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Audacious(AutotoolsPackage): + """A lightweight and versatile audio player.""" + + homepage = "https://audacious-media-player.org/" + url = "https://github.com/audacious-media-player/audacious/archive/audacious-4.0.2.tar.gz" + + version('4.0.2', sha256='92f30a78353c50f99b536061b9d94b6b9128760d546fddbf863e3591c4ac5a8d') + version('4.0.1', sha256='203195cf0d3c2e40d23c9895269ca0ace639c4a2b4dceb624169d75337059985') + version('4.0', sha256='cdfffd0eb966856980328ebb0fff9cbce57f99db9bda15e7e839d26c89e953e6') + version('3.10.1', sha256='c478939b4bcf6704c26eee87d48cab26547e92a83741f437711178c433373fa1') + version('3.10', sha256='82710d6ac90931c2cc4a0f0fcb6380ac21ed42a7a50856d16a67d3179a96e9ae') + + depends_on('m4', type='build') + depends_on('autoconf', type='build') + depends_on('automake', type='build') + depends_on('libtool', type='build') + depends_on('glib') + depends_on('qt') + + def autoreconf(self, spec, prefix): + bash = which('bash') + bash('./autogen.sh') diff --git a/var/spack/repos/builtin/packages/automake/package.py b/var/spack/repos/builtin/packages/automake/package.py index 5327c90daf5..0e9d22cb375 100644 --- a/var/spack/repos/builtin/packages/automake/package.py +++ b/var/spack/repos/builtin/packages/automake/package.py @@ -5,6 +5,9 @@ from spack import * +import os +import re + class Automake(AutotoolsPackage, GNUMirrorPackage): """Automake -- make file builder part of autotools""" @@ -25,6 +28,24 @@ class Automake(AutotoolsPackage, GNUMirrorPackage): build_directory = 'spack-build' + executables = ['automake'] + + @classmethod + def determine_spec_details(cls, prefix, exes_in_prefix): + exe_to_path = dict( + (os.path.basename(p), p) for p in exes_in_prefix + ) + if 'automake' not in exe_to_path: + return None + + exe = spack.util.executable.Executable(exe_to_path['automake']) + output = exe('--version', output=str) + if output: + match = re.search(r'GNU automake\)\s+(\S+)', output) + if match: + version_str = match.group(1) + return Spec('automake@{0}'.format(version_str)) + def patch(self): # The full perl shebang might be too long files_to_be_patched_fmt = 'bin/{0}.in' diff --git a/var/spack/repos/builtin/packages/avizo/package.py b/var/spack/repos/builtin/packages/avizo/package.py index cf14c76d22d..c8f769b9b23 100644 --- a/var/spack/repos/builtin/packages/avizo/package.py +++ b/var/spack/repos/builtin/packages/avizo/package.py @@ -17,12 +17,37 @@ class Avizo(Package): interface.""" homepage = "https://www.thermofisher.com/sa/en/home/industrial/electron-microscopy/electron-microscopy-instruments-workflow-solutions/3d-visualization-analysis-software.html" - version('9.7.0', '9c9b9e81957387f4218df0c5adbb80717e9ae80ab3ca6ff8da523f7f499dcc5b', + + version('2020.1', + sha256='9321aaa276567eebf116e268353c33a4c930d768d22793f921338e1d8cefe991', + url="file://{0}/Avizo-20201-Linux64-gcc48.bin".format(os.getcwd()), + expand=False) + version('2019.4', + sha256='a637720535bcbe254ab56368004a9544c64ec36186373fa24f26cee279685248', + url="file://{0}/Avizo-20194-Linux64-gcc48.bin".format(os.getcwd()), + expand=False) + version('2019.3', + sha256='be109df81e2f7238f234862367841dae05e76cc62218c1f36b1d9bc9514ce5f7', + url="file://{0}/Avizo-20193-Linux64-gcc48.bin".format(os.getcwd()), + expand=False) + version('9.7.0', + sha256='9c9b9e81957387f4218df0c5adbb80717e9ae80ab3ca6ff8da523f7f499dcc5b', + url="file://{0}/Avizo-970-Linux64-gcc44.bin".format(os.getcwd()), expand=False) - def url_for_version(self, version): - return "file://{0}/Avizo-{1}-Linux64-gcc44.bin".format(os.getcwd(), - version.joined) + gcc_ver = { + "9.7.0": "44", + "2019.3": "48", + "2019.4": "48", + "2020.1": "48" + } + + install_dir = { + "9.7.0": 'Avizo-9.7.0', + "2019.3": join_path('..', 'Avizo'), + "2019.4": join_path('..', 'Avizo'), + "2020.1": join_path('..', 'Avizo') + } # Licensing license_required = True @@ -37,14 +62,17 @@ def setup_run_environment(self, env): def install(self, spec, prefix): ver = self.version.joined sh = which('sh') - sh('Avizo-{0}-Linux64-gcc44.bin'.format(ver), '--noexec', '--keep') + sh('Avizo-{0}-Linux64-gcc{1}.bin' + .format(ver, self.gcc_ver[self.version.string]), + '--noexec', '--keep') with working_dir('Avizo'): - avizo_tar = tarfile.open(name='Avizo-{0}-Linux64-gcc44.tar.bz2' - .format(self.version)) + avizo_tar = tarfile.open(name='Avizo-{0}-Linux64-gcc{1}.tar.bz2' + .format(self.version, self.gcc_ver + [self.version.string])) avizo_tar.extractall() - with working_dir('Avizo-{0}'.format(self.version)): + with working_dir(self.install_dir[self.version.string]): install_tree('bin', prefix.bin) install_tree('lib', prefix.lib) install_tree('data', prefix.data) diff --git a/var/spack/repos/builtin/packages/aws-parallelcluster/package.py b/var/spack/repos/builtin/packages/aws-parallelcluster/package.py index 5737f078551..b9d65247372 100644 --- a/var/spack/repos/builtin/packages/aws-parallelcluster/package.py +++ b/var/spack/repos/builtin/packages/aws-parallelcluster/package.py @@ -12,7 +12,7 @@ class AwsParallelcluster(PythonPackage): tool to deploy and manage HPC clusters in the AWS cloud.""" homepage = "https://github.com/aws/aws-parallelcluster" - url = "https://pypi.io/packages/source/a/aws-parallelcluster/aws-parallelcluster-2.6.1.tar.gz" + url = "https://pypi.io/packages/source/a/aws-parallelcluster/aws-parallelcluster-2.7.0.tar.gz" maintainers = [ 'sean-smith', 'demartinofra', 'enrico-usai', 'lukeseawalker', 'rexcsn', @@ -23,6 +23,7 @@ class AwsParallelcluster(PythonPackage): 'pcluster.config', 'pcluster.networking' ] + version('2.7.0', sha256='7c34995acfcc256a6996541d330575fc711e1fd5735bf3d734d4e96c1dc8df60') version('2.6.1', sha256='2ce9015d90b5d4dc88b46a44cb8a82e8fb0bb2b4cca30335fc5759202ec1b343') version('2.6.0', sha256='aaed6962cf5027206834ac24b3d312da91e0f96ae8607f555e12cb124b869f0c') version('2.5.1', sha256='4fd6e14583f8cf81f9e4aa1d6188e3708d3d14e6ae252de0a94caaf58be76303') diff --git a/var/spack/repos/builtin/packages/axom/package.py b/var/spack/repos/builtin/packages/axom/package.py new file mode 100644 index 00000000000..add0619a3e8 --- /dev/null +++ b/var/spack/repos/builtin/packages/axom/package.py @@ -0,0 +1,541 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + +import os +import socket +from os.path import join as pjoin + +import llnl.util.tty as tty + + +def cmake_cache_entry(name, value, comment=""): + """Generate a string for a cmake cache variable""" + return 'set({0} "{1}" CACHE PATH "{2}")\n\n'.format(name, value, comment) + + +def cmake_cache_option(name, boolean_value, comment=""): + """Generate a string for a cmake configuration option""" + + value = "ON" if boolean_value else "OFF" + return 'set({0} {1} CACHE BOOL "{2}")\n\n'.format(name, value, comment) + + +def get_spec_path(spec, package_name, path_replacements={}, use_bin=False): + """Extracts the prefix path for the given spack package + path_replacements is a dictionary with string replacements for the path. + """ + + if not use_bin: + path = spec[package_name].prefix + else: + path = spec[package_name].prefix.bin + + path = os.path.realpath(path) + + for key in path_replacements: + path = path.replace(key, path_replacements[key]) + + return path + + +class Axom(CMakePackage, CudaPackage): + """Axom provides a robust, flexible software infrastructure for the development + of multi-physics applications and computational tools.""" + + maintainers = ['white238'] + + homepage = "https://github.com/LLNL/axom" + git = "https://github.com/LLNL/axom.git" + + version('master', branch='master', submodules=True) + version('develop', branch='develop', submodules=True) + version('0.3.3', tag='v0.3.3', submodules="True") + version('0.3.2', tag='v0.3.2', submodules="True") + version('0.3.1', tag='v0.3.1', submodules="True") + version('0.3.0', tag='v0.3.0', submodules="True") + version('0.2.9', tag='v0.2.9', submodules="True") + + phases = ["hostconfig", "cmake", "build", "install"] + root_cmakelists_dir = 'src' + + # ----------------------------------------------------------------------- + # Variants + # ----------------------------------------------------------------------- + variant('debug', default=False, + description='Build debug instead of optimized version') + + variant('fortran', default=True, description="Build with Fortran support") + + variant("python", default=False, description="Build python support") + + variant("mpi", default=True, description="Build MPI support") + variant('openmp', default=True, description='Turn on OpenMP support.') + + variant("mfem", default=False, description="Build with mfem") + variant("hdf5", default=True, description="Build with hdf5") + variant("lua", default=True, description="Build with Lua") + variant("scr", default=False, description="Build with SCR") + variant("umpire", default=True, description="Build with umpire") + + variant("raja", default=True, description="Build with raja") + variant("cub", default=True, + description="Build with RAJA's internal CUB support") + + varmsg = "Build development tools (such as Sphinx, Uncrustify, etc...)" + variant("devtools", default=False, description=varmsg) + + # ----------------------------------------------------------------------- + # Dependencies + # ----------------------------------------------------------------------- + # Basics + depends_on("cmake@3.8.2:", type='build') + depends_on("mpi", when="+mpi") + + # Libraries + depends_on("conduit~shared+python", when="+python") + depends_on("conduit~shared~python", when="~python") + depends_on("conduit~shared+python+hdf5", when="+hdf5+python") + depends_on("conduit~shared+python~hdf5", when="~hdf5+python") + depends_on("conduit~shared~python+hdf5", when="+hdf5~python") + depends_on("conduit~shared~python~hdf5", when="~hdf5~python") + + # HDF5 needs to be the same as Conduit's + depends_on("hdf5@1.8.19:1.8.999~mpi~cxx~shared~fortran", when="+hdf5") + + depends_on("lua", when="+lua") + + depends_on("scr", when="+scr") + + depends_on("raja~openmp", when="+raja~openmp") + depends_on("raja+openmp", when="+raja+openmp") + depends_on("raja+cuda", when="+raja+cuda") + + depends_on("umpire~openmp", when="+umpire~openmp") + depends_on("umpire+openmp", when="+umpire+openmp") + depends_on("umpire+cuda+deviceconst", when="+umpire+cuda") + + for sm_ in CudaPackage.cuda_arch_values: + depends_on('raja cuda_arch={0}'.format(sm_), + when='+raja cuda_arch={0}'.format(sm_)) + depends_on('umpire cuda_arch={0}'.format(sm_), + when='+umpire cuda_arch={0}'.format(sm_)) + + depends_on("mfem~mpi~hypre~metis~zlib", when="+mfem") + + depends_on("python", when="+python") + + # Devtools + depends_on("cppcheck", when="+devtools") + depends_on("doxygen", when="+devtools") + depends_on("graphviz", when="+devtools") + depends_on("python", when="+devtools") + depends_on("py-sphinx", when="+devtools") + depends_on("py-shroud", when="+devtools") + depends_on("uncrustify@0.61", when="+devtools") + + def flag_handler(self, name, flags): + if name in ('cflags', 'cxxflags', 'fflags'): + # the package manages these flags in another way + return (None, None, None) + return (flags, None, None) + + def _get_sys_type(self, spec): + sys_type = spec.architecture + # if on llnl systems, we can use the SYS_TYPE + if "SYS_TYPE" in env: + sys_type = env["SYS_TYPE"] + return sys_type + + def _get_host_config_path(self, spec): + hostname = socket.gethostname() + if "SYS_TYPE" in env: + # Are we on a LLNL system then strip node number + hostname = hostname.rstrip('1234567890') + filename = "{0}-{1}-{2}.cmake".format(hostname, + self._get_sys_type(spec), + spec.compiler) + dest_dir = self.stage.source_path + fullpath = os.path.abspath(pjoin(dest_dir, filename)) + return fullpath + + def hostconfig(self, spec, prefix): + """ + This method creates a 'host-config' file that specifies + all of the options used to configure and build Axom. + """ + + c_compiler = env["SPACK_CC"] + cpp_compiler = env["SPACK_CXX"] + f_compiler = None + + # see if we should enable fortran support + if "SPACK_FC" in env.keys(): + # even if this is set, it may not exist + # do one more sanity check + if os.path.isfile(env["SPACK_FC"]): + f_compiler = env["SPACK_FC"] + + # cmake + if "+cmake" in spec: + cmake_exe = pjoin(spec['cmake'].prefix.bin, "cmake") + else: + cmake_exe = which("cmake") + if cmake_exe is None: + # error could not find cmake! + crash() + cmake_exe = cmake_exe.command + cmake_exe = os.path.realpath(cmake_exe) + + host_config_path = self._get_host_config_path(spec) + cfg = open(host_config_path, "w") + cfg.write("#------------------{0}\n".format("-" * 60)) + cfg.write("# !!!! This is a generated file, edit at own risk !!!!\n") + cfg.write("#------------------{0}\n".format("-" * 60)) + cfg.write("# SYS_TYPE: {0}\n".format(self._get_sys_type(spec))) + cfg.write("# Compiler Spec: {0}\n".format(spec.compiler)) + cfg.write("#------------------{0}\n".format("-" * 60)) + # show path to cmake for reference and to be used by config-build.py + cfg.write("# CMake executable path: {0}\n".format(cmake_exe)) + cfg.write("#------------------{0}\n\n".format("-" * 60)) + + # compiler settings + cfg.write("#------------------{0}\n".format("-" * 60)) + cfg.write("# Compilers\n") + cfg.write("#------------------{0}\n\n".format("-" * 60)) + + cfg.write(cmake_cache_entry("CMAKE_C_COMPILER", c_compiler)) + cfg.write(cmake_cache_entry("CMAKE_CXX_COMPILER", cpp_compiler)) + + if "+fortran" in spec or f_compiler is not None: + cfg.write(cmake_cache_option("ENABLE_FORTRAN", True)) + cfg.write(cmake_cache_entry("CMAKE_Fortran_COMPILER", f_compiler)) + else: + cfg.write(cmake_cache_option("ENABLE_FORTRAN", False)) + + # use global spack compiler flags + cppflags = ' '.join(spec.compiler_flags['cppflags']) + if cppflags: + # avoid always ending up with ' ' with no flags defined + cppflags += ' ' + cflags = cppflags + ' '.join(spec.compiler_flags['cflags']) + if cflags: + cfg.write(cmake_cache_entry("CMAKE_C_FLAGS", cflags)) + cxxflags = cppflags + ' '.join(spec.compiler_flags['cxxflags']) + if cxxflags: + cfg.write(cmake_cache_entry("CMAKE_CXX_FLAGS", cxxflags)) + fflags = ' '.join(spec.compiler_flags['fflags']) + if fflags: + cfg.write(cmake_cache_entry("CMAKE_Fortran_FLAGS", fflags)) + + if ((f_compiler is not None) + and ("gfortran" in f_compiler) + and ("clang" in cpp_compiler)): + libdir = pjoin(os.path.dirname( + os.path.dirname(cpp_compiler)), "lib") + flags = "" + for _libpath in [libdir, libdir + "64"]: + if os.path.exists(_libpath): + flags += " -Wl,-rpath,{0}".format(_libpath) + description = ("Adds a missing libstdc++ rpath") + if flags: + cfg.write(cmake_cache_entry("BLT_EXE_LINKER_FLAGS", flags, + description)) + + # TPL locations + cfg.write("#------------------{0}\n".format("-" * 60)) + cfg.write("# TPLs\n") + cfg.write("#------------------{0}\n\n".format("-" * 60)) + + # Try to find the common prefix of the TPL directory, including the + # compiler. If found, we will use this in the TPL paths + compiler_str = str(spec.compiler).replace('@', '-') + prefix_paths = prefix.split(compiler_str) + path_replacements = {} + + if len(prefix_paths) == 2: + tpl_root = os.path.realpath(pjoin(prefix_paths[0], compiler_str)) + path_replacements[tpl_root] = "${TPL_ROOT}" + cfg.write("# Root directory for generated TPLs\n") + cfg.write(cmake_cache_entry("TPL_ROOT", tpl_root)) + + conduit_dir = get_spec_path(spec, "conduit", path_replacements) + cfg.write(cmake_cache_entry("CONDUIT_DIR", conduit_dir)) + + # optional tpls + + if "+mfem" in spec: + mfem_dir = get_spec_path(spec, "mfem", path_replacements) + cfg.write(cmake_cache_entry("MFEM_DIR", mfem_dir)) + else: + cfg.write("# MFEM not built\n\n") + + if "+hdf5" in spec: + hdf5_dir = get_spec_path(spec, "hdf5", path_replacements) + cfg.write(cmake_cache_entry("HDF5_DIR", hdf5_dir)) + else: + cfg.write("# HDF5 not built\n\n") + + if "+lua" in spec: + lua_dir = get_spec_path(spec, "lua", path_replacements) + cfg.write(cmake_cache_entry("LUA_DIR", lua_dir)) + else: + cfg.write("# Lua not built\n\n") + + if "+scr" in spec: + scr_dir = get_spec_path(spec, "scr", path_replacements) + cfg.write(cmake_cache_entry("SCR_DIR", scr_dir)) + else: + cfg.write("# SCR not built\n\n") + + if "+raja" in spec: + raja_dir = get_spec_path(spec, "raja", path_replacements) + cfg.write(cmake_cache_entry("RAJA_DIR", raja_dir)) + else: + cfg.write("# RAJA not built\n\n") + + if "+umpire" in spec: + umpire_dir = get_spec_path(spec, "umpire", path_replacements) + cfg.write(cmake_cache_entry("UMPIRE_DIR", umpire_dir)) + else: + cfg.write("# Umpire not built\n\n") + + cfg.write("#------------------{0}\n".format("-" * 60)) + cfg.write("# MPI\n") + cfg.write("#------------------{0}\n\n".format("-" * 60)) + + if "+mpi" in spec: + cfg.write(cmake_cache_option("ENABLE_MPI", True)) + cfg.write(cmake_cache_entry("MPI_C_COMPILER", spec['mpi'].mpicc)) + cfg.write(cmake_cache_entry("MPI_CXX_COMPILER", + spec['mpi'].mpicxx)) + if "+fortran" in spec or f_compiler is not None: + cfg.write(cmake_cache_entry("MPI_Fortran_COMPILER", + spec['mpi'].mpifc)) + + # Check for slurm + using_slurm = False + slurm_checks = ['+slurm', + 'schedulers=slurm', + 'process_managers=slurm'] + if any(spec['mpi'].satisfies(variant) for variant in slurm_checks): + using_slurm = True + + # Determine MPIEXEC + if using_slurm: + if spec['mpi'].external: + mpiexec = '/usr/bin/srun' + else: + mpiexec = os.path.join(spec['slurm'].prefix.bin, 'srun') + else: + mpiexec = os.path.join(spec['mpi'].prefix.bin, 'mpirun') + if not os.path.exists(mpiexec): + mpiexec = os.path.join(spec['mpi'].prefix.bin, 'mpiexec') + + if not os.path.exists(mpiexec): + msg = "Unable to determine MPIEXEC, Axom tests may fail" + cfg.write("# {0}\n\n".format(msg)) + tty.msg(msg) + else: + # starting with cmake 3.10, FindMPI expects MPIEXEC_EXECUTABLE + # vs the older versions which expect MPIEXEC + if self.spec["cmake"].satisfies('@3.10:'): + cfg.write(cmake_cache_entry("MPIEXEC_EXECUTABLE", mpiexec)) + else: + cfg.write(cmake_cache_entry("MPIEXEC", mpiexec)) + + # Determine MPIEXEC_NUMPROC_FLAG + if using_slurm: + cfg.write(cmake_cache_entry("MPIEXEC_NUMPROC_FLAG", "-n")) + else: + cfg.write(cmake_cache_entry("MPIEXEC_NUMPROC_FLAG", "-np")) + + if spec['mpi'].name == 'spectrum-mpi': + cfg.write(cmake_cache_entry("BLT_MPI_COMMAND_APPEND", + "mpibind")) + else: + cfg.write(cmake_cache_option("ENABLE_MPI", False)) + + ################################## + # Devtools + ################################## + + cfg.write("#------------------{0}\n".format("-" * 60)) + cfg.write("# Devtools\n") + cfg.write("#------------------{0}\n\n".format("-" * 60)) + + # Add common prefix to path replacement list + if "+devtools" in spec: + # Grab common devtools root and strip the trailing slash + path1 = os.path.realpath(spec["uncrustify"].prefix) + path2 = os.path.realpath(spec["doxygen"].prefix) + devtools_root = os.path.commonprefix([path1, path2])[:-1] + path_replacements[devtools_root] = "${DEVTOOLS_ROOT}" + cfg.write("# Root directory for generated developer tools\n") + cfg.write(cmake_cache_entry("DEVTOOLS_ROOT", devtools_root)) + + if "+python" in spec or "+devtools" in spec: + python_path = os.path.realpath(spec['python'].command.path) + for key in path_replacements: + python_path = python_path.replace(key, path_replacements[key]) + cfg.write(cmake_cache_entry("PYTHON_EXECUTABLE", python_path)) + + if "doxygen" in spec or "py-sphinx" in spec: + cfg.write(cmake_cache_option("ENABLE_DOCS", True)) + + if "doxygen" in spec: + doxygen_bin_dir = get_spec_path(spec, "doxygen", + path_replacements, + use_bin=True) + cfg.write(cmake_cache_entry("DOXYGEN_EXECUTABLE", + pjoin(doxygen_bin_dir, + "doxygen"))) + + if "py-sphinx" in spec: + python_bin_dir = get_spec_path(spec, "python", + path_replacements, + use_bin=True) + cfg.write(cmake_cache_entry("SPHINX_EXECUTABLE", + pjoin(python_bin_dir, + "sphinx-build"))) + else: + cfg.write(cmake_cache_option("ENABLE_DOCS", False)) + + if "py-shroud" in spec: + shroud_bin_dir = get_spec_path(spec, "py-shroud", + path_replacements, use_bin=True) + cfg.write(cmake_cache_entry("SHROUD_EXECUTABLE", + pjoin(shroud_bin_dir, "shroud"))) + + if "uncrustify" in spec: + uncrustify_bin_dir = get_spec_path(spec, "uncrustify", + path_replacements, + use_bin=True) + cfg.write(cmake_cache_entry("UNCRUSTIFY_EXECUTABLE", + pjoin(uncrustify_bin_dir, + "uncrustify"))) + + if "cppcheck" in spec: + cppcheck_bin_dir = get_spec_path(spec, "cppcheck", + path_replacements, use_bin=True) + cfg.write(cmake_cache_entry("CPPCHECK_EXECUTABLE", + pjoin(cppcheck_bin_dir, "cppcheck"))) + + ################################## + # Other machine specifics + ################################## + + cfg.write("#------------------{0}\n".format("-" * 60)) + cfg.write("# Other machine specifics\n") + cfg.write("#------------------{0}\n\n".format("-" * 60)) + + # OpenMP + if "+openmp" in spec: + cfg.write(cmake_cache_option("ENABLE_OPENMP", True)) + else: + cfg.write(cmake_cache_option("ENABLE_OPENMP", False)) + + # Enable death tests + if spec.satisfies('target=ppc64le:') and "+cuda" in spec: + cfg.write(cmake_cache_option("ENABLE_GTEST_DEATH_TESTS", False)) + else: + cfg.write(cmake_cache_option("ENABLE_GTEST_DEATH_TESTS", True)) + + # Override XL compiler family + familymsg = ("Override to proper compiler family for XL") + if (f_compiler is not None) and ("xlf" in f_compiler): + cfg.write(cmake_cache_entry("CMAKE_Fortran_COMPILER_ID", "XL", + familymsg)) + if "xlc" in c_compiler: + cfg.write(cmake_cache_entry("CMAKE_C_COMPILER_ID", "XL", + familymsg)) + if "xlC" in cpp_compiler: + cfg.write(cmake_cache_entry("CMAKE_CXX_COMPILER_ID", "XL", + familymsg)) + + if spec.satisfies('target=ppc64le:'): + if (f_compiler is not None) and ("xlf" in f_compiler): + description = ("Converts C-style comments to Fortran style " + "in preprocessed files") + cfg.write(cmake_cache_entry("BLT_FORTRAN_FLAGS", + "-WF,-C! -qxlf2003=polymorphic", + description)) + # Grab lib directory for the current fortran compiler + libdir = os.path.join(os.path.dirname( + os.path.dirname(f_compiler)), "lib") + description = ("Adds a missing rpath for libraries " + "associated with the fortran compiler") + linker_flags = "${BLT_EXE_LINKER_FLAGS} -Wl,-rpath," + libdir + cfg.write(cmake_cache_entry("BLT_EXE_LINKER_FLAGS", + linker_flags, description)) + + if "+cuda" in spec: + cfg.write("#------------------{0}\n".format("-" * 60)) + cfg.write("# Cuda\n") + cfg.write("#------------------{0}\n\n".format("-" * 60)) + + cfg.write(cmake_cache_option("ENABLE_CUDA", True)) + + cudatoolkitdir = spec['cuda'].prefix + cfg.write(cmake_cache_entry("CUDA_TOOLKIT_ROOT_DIR", + cudatoolkitdir)) + cudacompiler = "${CUDA_TOOLKIT_ROOT_DIR}/bin/nvcc" + cfg.write(cmake_cache_entry("CMAKE_CUDA_COMPILER", + cudacompiler)) + + cfg.write(cmake_cache_option("CUDA_SEPARABLE_COMPILATION", + True)) + + cfg.write(cmake_cache_option("AXOM_ENABLE_ANNOTATIONS", True)) + + if "+cub" in spec: + cfg.write(cmake_cache_option("AXOM_ENABLE_CUB", True)) + else: + cfg.write(cmake_cache_option("AXOM_ENABLE_CUB", False)) + + # CUDA_FLAGS + cudaflags = "-restrict " + + if not spec.satisfies('cuda_arch=none'): + cuda_arch = spec.variants['cuda_arch'].value + axom_arch = 'sm_{0}'.format(cuda_arch[0]) + cfg.write(cmake_cache_entry("AXOM_CUDA_ARCH", axom_arch)) + cudaflags += "-arch ${AXOM_CUDA_ARCH} " + else: + cfg.write("# cuda_arch could not be determined\n\n") + + cudaflags += "-std=c++11 --expt-extended-lambda -G " + cfg.write(cmake_cache_entry("CMAKE_CUDA_FLAGS", cudaflags)) + + if "+mpi" in spec: + cfg.write(cmake_cache_entry("CMAKE_CUDA_HOST_COMPILER", + "${MPI_CXX_COMPILER}")) + else: + cfg.write(cmake_cache_entry("CMAKE_CUDA_HOST_COMPILER", + "${CMAKE_CXX_COMPILER}")) + + cfg.write("# nvcc does not like gtest's 'pthreads' flag\n") + cfg.write(cmake_cache_option("gtest_disable_pthreads", True)) + + cfg.write("\n") + cfg.close() + tty.info("Spack generated Axom host-config file: " + host_config_path) + + def cmake_args(self): + spec = self.spec + host_config_path = self._get_host_config_path(spec) + + options = [] + options.extend(['-C', host_config_path]) + if self.run_tests is False: + options.append('-DENABLE_TESTS=OFF') + else: + options.append('-DENABLE_TESTS=ON') + return options + + @run_after('install') + def install_cmake_cache(self): + install(self._get_host_config_path(self.spec), prefix) diff --git a/var/spack/repos/builtin/packages/bashtop/package.py b/var/spack/repos/builtin/packages/bashtop/package.py new file mode 100644 index 00000000000..47f98f14a8e --- /dev/null +++ b/var/spack/repos/builtin/packages/bashtop/package.py @@ -0,0 +1,28 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Bashtop(Package): + """Linux resource monitor.""" + + homepage = "https://github.com/aristocratos/bashtop" + url = "https://github.com/aristocratos/bashtop/archive/v0.8.17.tar.gz" + + version('0.8.17', sha256='853a7143de533437cc1654b853bc89da54ff91c629820ac45b7c8708dababf1f') + version('0.8.16', sha256='6249e5c678fdb0a2a87d6fa13b9fe1f6bd56f7dbcaba0066d2a5275a7f9a9355') + version('0.8.15', sha256='617aab0a23b1a9430f2ef7d51e4f89eb06c5b3f2ff40768cb6849fc2899ffc6a') + version('0.8.14', sha256='e2e05a36a8fb3984f256af62f66562c8bd13a901e5f2ef7b7b0056ef40e57543') + version('0.8.13', sha256='50eda3c91f36a49d7696585fce5b44ba0df53879758f30d94477010bd56c4ff1') + version('0.8.12', sha256='1e762e40527f454da0f1d050a251a1c288cbbe49645f4ee31aa30afe44e70a0f') + version('0.8.11', sha256='bf4b3f109819450ee52f42a32d1b46bf2524ae3b1def83fcafba1b8427c71241') + version('0.8.10', sha256='eac071d68d2ec08869dea696b886bbc0fc33e596e9efa73563e71b4af27c0947') + version('0.8.9', sha256='af2ba211d3bc1fbe910cd33c447686a6f39c2c731aaba54355b9e66184a0aec1') + version('0.8.8', sha256='63e88c6f91fdfb3c5265f347e48d7a54a3ad0582407e9adbb70694eb9039ce3f') + + def install(self, spec, prefix): + mkdirp(prefix.bin) + install('bashtop', prefix.bin) diff --git a/var/spack/repos/builtin/packages/bat/package.py b/var/spack/repos/builtin/packages/bat/package.py index 50c3b5349a0..0463580f8ff 100644 --- a/var/spack/repos/builtin/packages/bat/package.py +++ b/var/spack/repos/builtin/packages/bat/package.py @@ -9,9 +9,10 @@ class Bat(Package): """A cat(1) clone with wings.""" - homepage = "https://github.com/sharkdp/bat" - url = "https://github.com/sharkdp/bat/archive/v0.10.0.tar.gz" + homepage = 'https://github.com/sharkdp/bat' + url = 'https://github.com/sharkdp/bat/archive/v0.13.0.tar.gz' + version('0.13.0', sha256='f4aee370013e2a3bc84c405738ed0ab6e334d3a9f22c18031a7ea008cd5abd2a') version('0.12.1', sha256='1dd184ddc9e5228ba94d19afc0b8b440bfc1819fef8133fe331e2c0ec9e3f8e2') depends_on('rust') @@ -19,11 +20,3 @@ class Bat(Package): def install(self, spec, prefix): cargo = which('cargo') cargo('install', '--root', prefix, '--path', '.') - - # cargo seems to need these to be set so that when it's building - # onig_sys it can run llvm-config and link against libclang. - def setup_build_environment(self, env): - env.append_flags('LLVM_CONFIG_PATH', - join_path(self.spec['llvm'].prefix.libexec.llvm, - 'llvm-config')) - env.append_flags('LIBCLANG_PATH', self.spec['llvm'].prefix.lib) diff --git a/var/spack/repos/builtin/packages/bazel/disabledepcheck.patch b/var/spack/repos/builtin/packages/bazel/disabledepcheck.patch new file mode 100644 index 00000000000..c15d3bf64a1 --- /dev/null +++ b/var/spack/repos/builtin/packages/bazel/disabledepcheck.patch @@ -0,0 +1,11 @@ +--- a/src/main/java/com/google/devtools/build/lib/rules/cpp/HeaderDiscovery.java.orig 2020-03-25 08:54:37.914186251 -0400 ++++ b/src/main/java/com/google/devtools/build/lib/rules/cpp/HeaderDiscovery.java 2020-03-25 08:55:01.356250657 -0400 +@@ -148,7 +148,7 @@ + if (execPath.startsWith(execRoot)) { + execPathFragment = execPath.relativeTo(execRoot); // funky but tolerable path + } else { +- problems.add(execPathFragment.getPathString()); ++ // problems.add(execPathFragment.getPathString()); + continue; + } + } diff --git a/var/spack/repos/builtin/packages/bazel/disabledepcheck_old.patch b/var/spack/repos/builtin/packages/bazel/disabledepcheck_old.patch new file mode 100644 index 00000000000..dd23972d992 --- /dev/null +++ b/var/spack/repos/builtin/packages/bazel/disabledepcheck_old.patch @@ -0,0 +1,11 @@ +--- a/src/main/java/com/google/devtools/build/lib/rules/cpp/CppCompileAction.java.orig 2020-06-08 13:42:14.035342560 -0400 ++++ b/src/main/java/com/google/devtools/build/lib/rules/cpp/CppCompileAction.java 2020-06-08 13:42:25.149375458 -0400 +@@ -963,7 +963,7 @@ + // are, it's probably due to a non-hermetic #include, & we should stop + // the build with an error. + if (execPath.startsWith(execRoot)) { +- execPathFragment = execPath.relativeTo(execRoot); // funky but tolerable path ++ // execPathFragment = execPath.relativeTo(execRoot); // funky but tolerable path + } else { + problems.add(execPathFragment.getPathString()); + continue; diff --git a/var/spack/repos/builtin/packages/bazel/package.py b/var/spack/repos/builtin/packages/bazel/package.py index 1ea1a6ce9cf..27f2da0597d 100644 --- a/var/spack/repos/builtin/packages/bazel/package.py +++ b/var/spack/repos/builtin/packages/bazel/package.py @@ -4,6 +4,7 @@ # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * +import platform class Bazel(Package): @@ -14,10 +15,17 @@ class Bazel(Package): numbers of users.""" homepage = "https://bazel.build/" - url = "https://github.com/bazelbuild/bazel/releases/download/1.2.0/bazel-1.2.0-dist.zip" + url = "https://github.com/bazelbuild/bazel/releases/download/3.1.0/bazel-3.1.0-dist.zip" maintainers = ['adamjstewart'] + version('3.1.0', sha256='d7f40d0cac95a06cea6cb5b7f7769085257caebc3ee84269dd9298da760d5615') + version('3.0.0', sha256='530f5132e0a50da7ebb0ed08d9b6f1ddfd0d7d9b5d0beb2df5d687a4c8daf6b3') + version('2.2.0', sha256='9379878a834d105a47a87d3d7b981852dd9f64bc16620eacd564b48533e169a7') + version('2.1.1', sha256='83f67f28f4e47ff69043307d1791c9bffe83949e84165d49058b84eded932647') + version('2.1.0', sha256='3371cd9050989173a3b27364668328653a65653a50a85c320adc53953b4d5f46') + version('2.0.1', sha256='a863ed9e6fc420fbd92e63a12fe1a5b9be1a7a36f11f61f1fdc582c813bbe543') + version('2.0.0', sha256='724da3c656f68e787a86ebb9844773aa1c2e3a873cc39462a8f1b336153d6cbb') version('1.2.1', sha256='255da49d0f012bc4f2c1d6d3ccdbe578e22fe97b8d124e1629a486fe2a09d3e1') version('1.2.0', sha256='9cb46b0a18b9166730307a0e82bf4c02281a1cc6da0fb11239e6fe4147bdee6e') version('1.1.0', sha256='4b66a8c93af7832ed32e7236cf454a05f3aa06d25a8576fc3f83114f142f95ab') @@ -86,11 +94,16 @@ class Bazel(Package): version('0.3.1', sha256='218d0e28b4d1ee34585f2ac6b18d169c81404d93958815e73e60cc0368efcbb7') version('0.3.0', sha256='357fd8bdf86034b93902616f0844bd52e9304cccca22971ab7007588bf9d5fb3') + variant('nodepfail', default=True, description='Disable failing dependency checks due to injected absolute paths - required for most builds using bazel with spack') + # https://docs.bazel.build/versions/master/install-compile-source.html#bootstrap-bazel # Until https://github.com/spack/spack/issues/14058 is fixed, use jdk to build bazel # Strict dependency on java@8 as per # https://docs.bazel.build/versions/master/install-compile-source.html#bootstrap-unix-prereq - depends_on('jdk@1.8.0:1.8.999', type=('build', 'run')) + if platform.machine() == 'aarch64': + depends_on('java@8:8.999', type=('build', 'run')) + else: + depends_on('jdk@1.8.0:1.8.999', type=('build', 'run')) depends_on('python', type=('build', 'run')) depends_on('zip', type=('build', 'run')) @@ -100,7 +113,8 @@ class Bazel(Package): patch('bazelconfiguration-0.3.patch', when='@:0.13') # Inject include paths - patch('unix_cc_configure-0.15.patch', when='@0.15:') + patch('unix_cc_configure-3.0.patch', when='@3:') + patch('unix_cc_configure-0.15.patch', when='@0.15:2') patch('unix_cc_configure-0.10.patch', when='@0.10:0.14') patch('unix_cc_configure-0.5.3.patch', when='@0.5.3:0.9') patch('cc_configure-0.5.0.patch', when='@0.5.0:0.5.2') @@ -116,6 +130,19 @@ class Bazel(Package): patch('compile-0.4.patch', when='@0.4:0.5') patch('compile-0.3.patch', when='@:0.3') + # for fcc + patch('patch_for_fcc.patch', when='@0.29.1:%fj') + patch('patch_for_fcc2.patch', when='@0.25:%fj') + conflicts( + '%fj', + when='@:0.24.1', + msg='Fujitsu Compiler cannot build 0.24.1 or less, ' + 'please use a newer release.' + ) + + patch('disabledepcheck.patch', when='@0.3.2:+nodepfail') + patch('disabledepcheck_old.patch', when='@0.3.0:0.3.1+nodepfail') + phases = ['bootstrap', 'install'] def url_for_version(self, version): @@ -184,3 +211,7 @@ def test(self): def setup_dependent_package(self, module, dependent_spec): module.bazel = Executable('bazel') + + @property + def parallel(self): + return not self.spec.satisfies('%fj') diff --git a/var/spack/repos/builtin/packages/bazel/patch_for_fcc.patch b/var/spack/repos/builtin/packages/bazel/patch_for_fcc.patch new file mode 100644 index 00000000000..d90448b0bae --- /dev/null +++ b/var/spack/repos/builtin/packages/bazel/patch_for_fcc.patch @@ -0,0 +1,37 @@ +diff --git a/src/main/cpp/blaze_util_posix.cc b/src/main/cpp/blaze_util_posix.cc +index 87ba899180..1c967ee818 100644 +--- a/src/main/cpp/blaze_util_posix.cc ++++ b/src/main/cpp/blaze_util_posix.cc +@@ -565,7 +565,8 @@ static int setlk(int fd, struct flock *lock) { + // Prefer OFD locks if available. POSIX locks can be lost "accidentally" + // due to any close() on the lock file, and are not reliably preserved + // across execve() on Linux, which we need for --batch mode. +- if (fcntl(fd, F_OFD_SETLK, lock) == 0) return 0; ++ //if (fcntl(fd, F_OFD_SETLK, lock) == 0) return 0; ++ if (fcntl(fd, F_SETLK, lock) == 0) return 0; + if (errno != EINVAL) { + if (errno != EACCES && errno != EAGAIN) { + BAZEL_DIE(blaze_exit_code::LOCAL_ENVIRONMENTAL_ERROR) +diff --git a/tools/cpp/unix_cc_configure.bzl b/tools/cpp/unix_cc_configure.bzl +index ccb18a431a..0c5c8fd6f0 100644 +--- a/tools/cpp/unix_cc_configure.bzl ++++ b/tools/cpp/unix_cc_configure.bzl +@@ -379,7 +379,8 @@ def configure_unix_toolchain(repository_ctx, cpu_value, overriden_tools): + "", + False, + ), ":") +- gold_linker_path = _find_gold_linker_path(repository_ctx, cc) ++ #gold_linker_path = _find_gold_linker_path(repository_ctx, cc) ++ gold_linker_path = None + cc_path = repository_ctx.path(cc) + if not str(cc_path).startswith(str(repository_ctx.path(".")) + "/"): + # cc is outside the repository, set -B +@@ -468,7 +469,7 @@ def configure_unix_toolchain(repository_ctx, cpu_value, overriden_tools): + # Security hardening requires optimization. + # We need to undef it as some distributions now have it enabled by default. + "-U_FORTIFY_SOURCE", +- "-fstack-protector", ++ #"-fstack-protector", + # All warnings are enabled. Maybe enable -Werror as well? + "-Wall", + # Enable a few more warnings that aren't part of -Wall. diff --git a/var/spack/repos/builtin/packages/bazel/patch_for_fcc2.patch b/var/spack/repos/builtin/packages/bazel/patch_for_fcc2.patch new file mode 100644 index 00000000000..709e5ee0202 --- /dev/null +++ b/var/spack/repos/builtin/packages/bazel/patch_for_fcc2.patch @@ -0,0 +1,11 @@ +--- a/src/main/java/com/google/devtools/build/lib/bazel/rules/BazelRuleClassProvider.java ++++ b/src/main/java/com/google/devtools/build/lib/bazel/rules/BazelRuleClassProvider.java +@@ -185,7 +185,7 @@ public class BazelRuleClassProvider { + + Map spackEnv = System.getenv(); + for (String envName : spackEnv.keySet()) { +- if (envName.startsWith("SPACK_")) { ++ if ((envName.startsWith("SPACK_")) || (envName.equals("fcc_ENV")) || (envName.equals("FCC_ENV"))) { + env.put(envName, spackEnv.get(envName)); + } + } diff --git a/var/spack/repos/builtin/packages/bazel/unix_cc_configure-3.0.patch b/var/spack/repos/builtin/packages/bazel/unix_cc_configure-3.0.patch new file mode 100644 index 00000000000..1140b0c306c --- /dev/null +++ b/var/spack/repos/builtin/packages/bazel/unix_cc_configure-3.0.patch @@ -0,0 +1,19 @@ +diff --git a/tools/cpp/unix_cc_configure.bzl b/tools/cpp/unix_cc_configure.bzl +index 5feb10b76a..cde0b8f8f1 100644 +--- a/tools/cpp/unix_cc_configure.bzl ++++ b/tools/cpp/unix_cc_configure.bzl +@@ -151,8 +151,14 @@ def get_escaped_cxx_inc_directories(repository_ctx, cc, lang_flag, additional_fl + ).stdout.strip() + "/share" + inc_directories.append(_prepare_include_path(repository_ctx, resource_dir)) + ++ env = repository_ctx.os.environ ++ if "SPACK_INCLUDE_DIRS" in env: ++ for path in env["SPACK_INCLUDE_DIRS"].split(":"): ++ inc_directories.append(path) ++ + return inc_directories + ++ + def _is_compiler_option_supported(repository_ctx, cc, option): + """Checks that `option` is supported by the C compiler. Doesn't %-escape the option.""" + result = repository_ctx.execute([ diff --git a/var/spack/repos/builtin/packages/bgpdump/package.py b/var/spack/repos/builtin/packages/bgpdump/package.py new file mode 100644 index 00000000000..e708860dc3f --- /dev/null +++ b/var/spack/repos/builtin/packages/bgpdump/package.py @@ -0,0 +1,21 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Bgpdump(AutotoolsPackage): + """Utility and C Library for parsing MRT files""" + + homepage = "https://github.com/RIPE-NCC/bgpdump/wiki" + git = "https://github.com/RIPE-NCC/bgpdump.git" + + version('master', branch='master') + + depends_on('m4', type='build') + depends_on('autoconf', type='build') + depends_on('automake', type='build') + depends_on('libtool', type='build') + depends_on('bzip2') diff --git a/var/spack/repos/builtin/packages/binutils/package.py b/var/spack/repos/builtin/packages/binutils/package.py index 60cd9563fa1..791606f5645 100644 --- a/var/spack/repos/builtin/packages/binutils/package.py +++ b/var/spack/repos/builtin/packages/binutils/package.py @@ -124,4 +124,7 @@ def flag_handler(self, name, flags): and (self.compiler.name == 'fj' or self.compiler.name == 'clang')\ and self.version <= ver('2.31.1'): flags.append('-Wno-narrowing') + elif name == 'cflags': + if self.spec.satisfies('@:2.34 %gcc@10:'): + flags.append('-fcommon') return (flags, None, None) diff --git a/var/spack/repos/builtin/packages/blaspp/package.py b/var/spack/repos/builtin/packages/blaspp/package.py new file mode 100644 index 00000000000..bcc5e80292a --- /dev/null +++ b/var/spack/repos/builtin/packages/blaspp/package.py @@ -0,0 +1,83 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Blaspp(CMakePackage): + """C++ API for the Basic Linear Algebra Subroutines. Developed by the + Innovative Computing Laboratory at the University of Tennessee, + Knoxville.""" + + homepage = "https://bitbucket.org/icl/blaspp" + git = "https://bitbucket.org/icl/blaspp" + maintainers = ['teonnik', 'Sely85', 'G-Ragghianti', 'mgates3'] + + version('develop', commit='6293d96') + + variant('gfort', + default=False, + description=('Use GNU Fortran interface. ' + 'Default is Intel interface. (MKL)')) + variant('ilp64', + default=False, + description=('Use 64bit integer interface. ' + 'Default is 32bit. (MKL & ESSL)')) + variant('openmp', + default=False, + description=('Use OpenMP threaded backend. ' + 'Default is sequential. (MKL & ESSL)')) + + depends_on('blas') + + # 1) The CMake options exposed by `blaspp` allow for a value called `auto`. + # The value is not needed here as the choice of dependency in the spec + # determines the appropriate flags. + # + # 2) BLASFinder.cmake handles most options. For `auto`, it searches all + # blas libraries listed in `def_lib_list`. + # + # 3) ?? Custom blas library can be supplied via `BLAS_LIBRARIES`. + # + def cmake_args(self): + spec = self.spec + args = ['-DBLASPP_BUILD_TESTS:BOOL={0}'.format( + 'ON' if self.run_tests else 'OFF')] + + if '+gfort' in spec: + args.append('-DBLAS_LIBRARY_MKL="GNU gfortran conventions"') + else: + args.append('-DBLAS_LIBRARY_MKL="Intel ifort conventions"') + + if '+ilp64' in spec: + args.append('-DBLAS_LIBRARY_INTEGER="int64_t (ILP64)"') + else: + args.append('-DBLAS_LIBRARY_INTEGER="int (LP64)"') + + if '+openmp' in spec: + args.append(['-DUSE_OPENMP=ON', + '-DBLAS_LIBRARY_THREADING="threaded"']) + else: + args.append('-DBLAS_LIBRARY_THREADING="sequential"') + + # Missing: + # + # - acml : BLAS_LIBRARY="AMD ACML" + # BLAS_LIBRARY_THREADING= threaded/sequential + # + # - apple : BLAS_LIBRARY="Apple Accelerate" (veclibfort ???) + # + if '^mkl' in spec: + args.append('-DBLAS_LIBRARY="Intel MKL"') + elif '^essl' in spec: + args.append('-DBLAS_LIBRARY="IBM ESSL"') + elif '^openblas' in spec: + args.append('-DBLAS_LIBRARY="OpenBLAS"') + elif '^cray-libsci' in spec: + args.append('-DBLAS_LIBRARY="Cray LibSci"') + else: # e.g. netlib-lapack + args.append('-DBLAS_LIBRARY="generic"') + + return args diff --git a/var/spack/repos/builtin/packages/blis/package.py b/var/spack/repos/builtin/packages/blis/package.py index c2fc1545a3e..fd06bc455e9 100644 --- a/var/spack/repos/builtin/packages/blis/package.py +++ b/var/spack/repos/builtin/packages/blis/package.py @@ -104,7 +104,9 @@ def darwin_fix(self): @property def libs(self): - return find_libraries(['libblis'], root=self.prefix, recursive=True) + return find_libraries( + ["libblis", "libblis-mt"], root=self.prefix, recursive=True + ) class Blis(BlisBase): diff --git a/var/spack/repos/builtin/packages/blogbench/package.py b/var/spack/repos/builtin/packages/blogbench/package.py new file mode 100644 index 00000000000..d9a7fd6088b --- /dev/null +++ b/var/spack/repos/builtin/packages/blogbench/package.py @@ -0,0 +1,16 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Blogbench(AutotoolsPackage): + """A filesystem benchmark tool that simulates a realistic load.""" + + homepage = "https://openbenchmarking.org/test/pts/blogbench" + url = "http://download.pureftpd.org/pub/blogbench/blogbench-1.1.tar.gz" + + version('1.1', sha256='8cded059bfdbccb7be35bb6a2272ecfdbe3fbea43d53c92ba5572ac24f26c4df') + version('1.0', sha256='dc29261a19064a8fb64d39b27607f19d3b33ce3795908e717404167687ef33be') diff --git a/var/spack/repos/builtin/packages/bolt/package.py b/var/spack/repos/builtin/packages/bolt/package.py index d0750e615da..b4e4cf95ce9 100644 --- a/var/spack/repos/builtin/packages/bolt/package.py +++ b/var/spack/repos/builtin/packages/bolt/package.py @@ -22,11 +22,8 @@ class Bolt(CMakePackage): git = "https://github.com/pmodels/bolt.git" maintainers = ['shintaro-iwasaki'] - version("master", branch="master") - version("1.0rc3", sha256="beec522d26e74f0a562762ea5ae7805486a17b40013090ea1472f0c34c3379c8") - version("1.0rc2", sha256="662ab0bb9583e8d733e8af62a97b41828e8bfe4bd65902f1195b986901775a45") - version("1.0rc1", sha256="c08cde0695b9d1252ab152425be96eb29c70d764e3083e276c013804883a15a4") - version("1.0b1", sha256="fedba46ad2f8835dd1cec1a9a52bcc9d8923071dc40045d0360517d09cd1a57d") + version("main", branch="main") + version("1.0", sha256="1c0d2f75597485ca36335d313a73736594e75c8a36123c5a6f54d01b5ba5c384") depends_on('argobots') depends_on('autoconf', type='build') @@ -36,7 +33,6 @@ class Bolt(CMakePackage): def cmake_args(self): spec = self.spec options = [ - '-DLIBOMP_USE_ITT_NOTIFY=off', '-DLIBOMP_USE_ARGOBOTS=on', '-DLIBOMP_ARGOBOTS_INSTALL_DIR=' + spec['argobots'].prefix ] diff --git a/var/spack/repos/builtin/packages/bonniepp/package.py b/var/spack/repos/builtin/packages/bonniepp/package.py index 18751810fc8..6a33a15ea77 100644 --- a/var/spack/repos/builtin/packages/bonniepp/package.py +++ b/var/spack/repos/builtin/packages/bonniepp/package.py @@ -19,3 +19,7 @@ def configure_args(self): configure_args = [] configure_args.append('--enable-debug') return configure_args + + def setup_run_environment(self, env): + """Prepend the sbin directory to PATH.""" + env.prepend_path('PATH', self.prefix.sbin) diff --git a/var/spack/repos/builtin/packages/boost/1.72_boost_process.patch b/var/spack/repos/builtin/packages/boost/1.72_boost_process.patch new file mode 100644 index 00000000000..54cd2ae6715 --- /dev/null +++ b/var/spack/repos/builtin/packages/boost/1.72_boost_process.patch @@ -0,0 +1,48 @@ +From 6a4d2ff72114ef47c7afaf92e1042aca3dfa41b0 Mon Sep 17 00:00:00 2001 +From: Klemens David Morgenstern +Date: Fri, 22 Nov 2019 14:03:22 +0800 +Subject: [PATCH] added typedef executor_type; + +--- + include/boost/process/async_pipe.hpp | 2 ++ + include/boost/process/detail/posix/async_pipe.hpp | 1 + + include/boost/process/detail/windows/async_pipe.hpp | 1 + + 3 files changed, 4 insertions(+) + +diff --git a/include/boost/process/async_pipe.hpp b/include/boost/process/async_pipe.hpp +index 101fe1d59..a562432c0 100644 +--- a/include/boost/process/async_pipe.hpp ++++ b/include/boost/process/async_pipe.hpp +@@ -47,6 +47,8 @@ class async_pipe + */ + typedef platform_specific handle_type; + ++ typedef typename handle_type::executor_type executor_type; ++ + /** Construct a new async_pipe, does automatically open the pipe. + * Initializes source and sink with the same io_context. + * @note Windows creates a named pipe here, where the name is automatically generated. +diff --git a/include/boost/process/detail/posix/async_pipe.hpp b/include/boost/process/detail/posix/async_pipe.hpp +index 725a07890..a82c057b9 100644 +--- a/include/boost/process/detail/posix/async_pipe.hpp ++++ b/include/boost/process/detail/posix/async_pipe.hpp +@@ -23,6 +23,7 @@ class async_pipe + public: + typedef int native_handle_type; + typedef ::boost::asio::posix::stream_descriptor handle_type; ++ typedef typename handle_type::executor_type executor_type; + + inline async_pipe(boost::asio::io_context & ios) : async_pipe(ios, ios) {} + +diff --git a/include/boost/process/detail/windows/async_pipe.hpp b/include/boost/process/detail/windows/async_pipe.hpp +index 06d5f2d85..0b447f9b8 100644 +--- a/include/boost/process/detail/windows/async_pipe.hpp ++++ b/include/boost/process/detail/windows/async_pipe.hpp +@@ -48,6 +48,7 @@ class async_pipe + public: + typedef ::boost::winapi::HANDLE_ native_handle_type; + typedef ::boost::asio::windows::stream_handle handle_type; ++ typedef typename handle_type::executor_type executor_type; + + async_pipe(boost::asio::io_context & ios) : async_pipe(ios, ios, make_pipe_name(), true) {} + async_pipe(boost::asio::io_context & ios_source, boost::asio::io_context & ios_sink) diff --git a/var/spack/repos/builtin/packages/boost/package.py b/var/spack/repos/builtin/packages/boost/package.py index cc223d2f7e4..1e5bd00c2bd 100644 --- a/var/spack/repos/builtin/packages/boost/package.py +++ b/var/spack/repos/builtin/packages/boost/package.py @@ -25,6 +25,7 @@ class Boost(Package): maintainers = ['hainest'] version('develop', branch='develop', submodules=True) + version('1.73.0', sha256='4eb3b8d442b426dc35346235c8733b5ae35ba431690e38c6a8263dce9fcbb402') version('1.72.0', sha256='59c9b274bc451cf91a9ba1dd2c7fdcaf5d60b1b3aa83f2c9fa143417cc660722') version('1.71.0', sha256='d73a8da01e8bf8c7eda40b4c84915071a8c8a0df4a6734537ddde4a8580524ee') version('1.70.0', sha256='430ae8354789de4fd19ee52f3b1f739e1fba576f0aded0897c3c2bc00fb38778') @@ -125,7 +126,7 @@ def libs(self): variant('cxxstd', default='98', - values=('98', '11', '14', '17'), + values=('98', '11', '14', '17', '2a'), multi=False, description='Use the specified C++ standard when building.') variant('debug', default=False, @@ -172,6 +173,9 @@ def libs(self): conflicts('cxxstd=98', when='+fiber') # Fiber requires >=C++11. conflicts('~context', when='+fiber') # Fiber requires Context. + # C++20/2a is not support by Boost < 1.73.0 + conflicts('cxxstd=2a', when='@:1.72.99') + # C++17 is not supported by Boost<1.63.0. conflicts('cxxstd=17', when='@:1.62.99') @@ -209,6 +213,11 @@ def libs(self): patch('darwin_clang_version.patch', level=0, when='@1.56.0:1.72.0 platform=darwin') + # Fix: "Unable to compile code using boost/process.hpp" + # See: https://github.com/boostorg/process/issues/116 + # Patch: https://github.com/boostorg/process/commit/6a4d2ff72114ef47c7afaf92e1042aca3dfa41b0.patch + patch('1.72_boost_process.patch', level=2, when='@1.72.0') + # Fix the bootstrap/bjam build for Cray patch('bootstrap-path.patch', when='@1.39.0: platform=cray') @@ -226,6 +235,18 @@ def libs(self): patch('clang-linux_add_option.patch', when='@1.56.0:1.63.0') patch('clang-linux_add_option2.patch', when='@1.47.0:1.55.0') + # C++20 concepts fix for Beast + # See https://github.com/boostorg/beast/pull/1927 for details + patch('https://www.boost.org/patches/1_73_0/0002-beast-coroutines.patch', + sha256='4dd507e1f5a29e3b87b15321a4d8c74afdc8331433edabf7aeab89b3c405d556', + when='@1.73.0') + + # Cloning a status_code with indirecting_domain leads to segmentation fault + # See https://github.com/ned14/outcome/issues/223 for details + patch('https://www.boost.org/patches/1_73_0/0001-outcome-assert.patch', + sha256='246508e052c44b6f4e8c2542a71c06cacaa72cd1447ab8d2a542b987bc35ace9', + when='@1.73.0') + def url_for_version(self, version): if version >= Version('1.63.0'): url = "https://dl.bintray.com/boostorg/release/{0}/source/boost_{1}.tar.bz2" @@ -377,7 +398,7 @@ def determine_b2_options(self, spec, options): # and at least in clang 3.9 still fails to build # http://www.boost.org/build/doc/html/bbv2/reference/precompiled_headers.html # https://svn.boost.org/trac/boost/ticket/12496 - if spec.satisfies('%clang'): + if spec.satisfies('%clang') or spec.satisfies('%fj'): options.extend(['pch=off']) if '+clanglibcpp' in spec: cxxflags.append('-stdlib=libc++') @@ -491,3 +512,6 @@ def install(self, spec, prefix): # on Darwin; correct this if (sys.platform == 'darwin') and ('+shared' in spec): fix_darwin_install_name(prefix.lib) + + def setup_run_environment(self, env): + env.set('BOOST_ROOT', self.prefix) diff --git a/var/spack/repos/builtin/packages/brpc/narrow.patch b/var/spack/repos/builtin/packages/brpc/narrow.patch new file mode 100644 index 00000000000..621e32f9987 --- /dev/null +++ b/var/spack/repos/builtin/packages/brpc/narrow.patch @@ -0,0 +1,33 @@ +diff --git a/src/butil/containers/case_ignored_flat_map.cpp b/src/butil/containers/case_ignored_flat_map.cpp +index 7f2fadc5..a7471561 100644 +--- a/src/butil/containers/case_ignored_flat_map.cpp ++++ b/src/butil/containers/case_ignored_flat_map.cpp +@@ -19,7 +19,7 @@ + + namespace butil { + +-static const char g_tolower_map_base[] = { ++static const signed char g_tolower_map_base[] = { + -128, -127, -126, -125, -124, -123, -122, -121, -120, + -119, -118, -117, -116, -115, -114, -113, -112, -111, -110, + -109, -108, -107, -106, -105, -104, -103, -102, -101, -100, +@@ -48,6 +48,6 @@ static const char g_tolower_map_base[] = { + 120, 121, 122, 123, 124, 125, 126, 127 + }; + +-extern const char* const g_tolower_map = g_tolower_map_base + 128; ++extern const signed char* const g_tolower_map = g_tolower_map_base + 128; + + } // namespace butil +diff --git a/src/butil/containers/case_ignored_flat_map.h b/src/butil/containers/case_ignored_flat_map.h +index ed6e58b4..6bc1a9af 100644 +--- a/src/butil/containers/case_ignored_flat_map.h ++++ b/src/butil/containers/case_ignored_flat_map.h +@@ -27,7 +27,7 @@ namespace butil { + // NOTE: Using ascii_tolower instead of ::tolower shortens 150ns in + // FlatMapTest.perf_small_string_map (with -O2 added, -O0 by default) + inline char ascii_tolower(char c) { +- extern const char* const g_tolower_map; ++ extern const signed char* const g_tolower_map; + return g_tolower_map[(int)c]; + } diff --git a/var/spack/repos/builtin/packages/brpc/package.py b/var/spack/repos/builtin/packages/brpc/package.py new file mode 100644 index 00000000000..ba1417babf0 --- /dev/null +++ b/var/spack/repos/builtin/packages/brpc/package.py @@ -0,0 +1,26 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Brpc(CMakePackage): + """An industrial-grade RPC framework used throughout Baidu, with + 1,000,000+ instances(not counting clients) and thousands kinds of + services, called "baidu-rpc" inside Baidu. Only C++ implementatioo + on is opensourced right now.""" + + homepage = "https://github.com/apache/incubator-brpc" + url = "https://github.com/apache/incubator-brpc/archive/0.9.7.tar.gz" + + version('0.9.7', sha256='722cd342baf3b05189ca78ecf6c56ea6ffec22e62fc2938335e4e5bab545a49c') + version('0.9.6', sha256='b872ca844999e0ba768acd823b409761f126590fb34cb0183da915a595161446') + version('0.9.5', sha256='11ca8942242a4c542c11345b7463a4aea33a11ca33e91d9a2f64f126df8c70e9') + + depends_on('gflags') + depends_on('protobuf') + depends_on('leveldb') + + patch('narrow.patch', sha256='d7393029443853ddda6c09e3d2185ac2f60920a36a8b685eb83b6b80c1535539', when='@:0.9.7') diff --git a/var/spack/repos/builtin/packages/buddy/package.py b/var/spack/repos/builtin/packages/buddy/package.py new file mode 100644 index 00000000000..d6e92bb9b38 --- /dev/null +++ b/var/spack/repos/builtin/packages/buddy/package.py @@ -0,0 +1,25 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * +import platform + + +class Buddy(AutotoolsPackage): + """A Binary Decision Diagram library.""" + + homepage = "https://sourceforge.net/projects/buddy/" + url = "https://sourceforge.net/projects/buddy/files/buddy/BuDDy%202.4/buddy-2.4.tar.gz" + list_url = "https://sourceforge.net/projects/buddy/files/buddy" + list_depth = 1 + + version('2.4', sha256='d3df80a6a669d9ae408cb46012ff17bd33d855529d20f3a7e563d0d913358836') + + def configure_args(self): + if platform.machine() == 'aarch64': + config_args = [ + '--build=aarch64-unknown-linux-gnu', + ] + return config_args diff --git a/var/spack/repos/builtin/packages/byacc/package.py b/var/spack/repos/builtin/packages/byacc/package.py new file mode 100644 index 00000000000..b555910c42f --- /dev/null +++ b/var/spack/repos/builtin/packages/byacc/package.py @@ -0,0 +1,24 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Byacc(AutotoolsPackage): + """Berkeley Yacc is an LALR(1) parser generator. Berkeley Yacc has + been made as compatible as possible with AT&T Yacc. Berkeley Yacc + can accept any input specification that conforms to the AT&T Yacc + documentation. Specifications that take advantage of undocumented + features of AT&T Yacc will probably be rejected.""" + + homepage = "https://github.com/grandseiken/byacc" + git = "https://github.com/grandseiken/byacc.git" + + version('master', branch='master') + + depends_on('m4', type='build') + depends_on('autoconf', type='build') + depends_on('automake', type='build') + depends_on('libtool', type='build') diff --git a/var/spack/repos/builtin/packages/cabana/package.py b/var/spack/repos/builtin/packages/cabana/package.py index 0efff53f926..b84bfa60b08 100644 --- a/var/spack/repos/builtin/packages/cabana/package.py +++ b/var/spack/repos/builtin/packages/cabana/package.py @@ -13,18 +13,26 @@ class Cabana(CMakePackage): git = "https://github.com/ECP-copa/Cabana.git" url = "https://github.com/ECP-copa/Cabana/archive/0.1.0.tar.gz" - version('develop', branch='master') + version('master', branch='master') + version('0.3.0', sha256='fb67ab9aaf254b103ae0eb5cc913ddae3bf3cd0cf6010e9686e577a2981ca84f') + version('0.2.0', sha256='3e0c0e224e90f4997f6c7e2b92f00ffa18f8bcff72f789e0908cea0828afc2cb') version('0.1.0', sha256='3280712facf6932b9d1aff375b24c932abb9f60a8addb0c0a1950afd0cb9b9cf') version('0.1.0-rc0', sha256='73754d38aaa0c2a1e012be6959787108fec142294774c23f70292f59c1bdc6c5') variant('serial', default=True, description="enable Serial backend (default)") variant('openmp', default=False, description="enable OpenMP backend") variant('cuda', default=False, description="enable Cuda backend") + variant('shared', default=True, description='Build shared libraries') + variant('mpi', default=True, description='Build with mpi support') depends_on("cmake@3.9:", type='build') - depends_on("kokkos+serial", when="+serial") - depends_on("kokkos+openmp", when="+openmp") - depends_on("kokkos+cuda", when="+cuda") + depends_on("kokkos-legacy+serial", when="@:0.2.0+serial") + depends_on("kokkos-legacy+openmp", when="@:0.2.0+openmp") + depends_on("kokkos-legacy+cuda", when="@:0.2.0+cuda") + depends_on("kokkos@3.1:+serial", when="@0.3.0:+serial") + depends_on("kokkos@3.1:+openmp", when="@0.3.0:+openmp") + depends_on("kokkos@3.1:+cuda", when="@0.3.0:+cuda") + depends_on('mpi', when='+mpi') def cmake_args(self): options = [ @@ -34,7 +42,11 @@ def cmake_args(self): '-DCabana_ENABLE_OpenMP=%s' % ( 'On' if '+openmp' in self.spec else 'Off'), '-DCabana_ENABLE_Cuda=%s' % ( - 'On' if '+cuda' in self.spec else 'Off') + 'On' if '+cuda' in self.spec else 'Off'), + '-DCabana_ENABLE_MPI=%s' % ( + 'On' if '+mpi' in self.spec else 'Off'), + '-DBUILD_SHARED_LIBS=%s' % ( + 'On' if '+shared' in self.spec else 'Off') ] return options diff --git a/var/spack/repos/builtin/packages/caffe/package.py b/var/spack/repos/builtin/packages/caffe/package.py index e07f545f739..187bdc5764e 100644 --- a/var/spack/repos/builtin/packages/caffe/package.py +++ b/var/spack/repos/builtin/packages/caffe/package.py @@ -40,7 +40,7 @@ class Caffe(CMakePackage): depends_on('protobuf') depends_on('glog') depends_on('gflags') - depends_on('hdf5') + depends_on('hdf5 +hl +cxx') # Optional dependencies depends_on('opencv@3.2.0+core+highgui+imgproc', when='+opencv') @@ -81,4 +81,10 @@ def cmake_args(self): version = spec['python'].version.up_to(1) args.append('-Dpython_version=%s' % version) + if spec['hdf5'].satisfies('+mpi'): + args.extend([ + '-DCMAKE_C_COMPILER={0}'.format(self.spec['mpi'].mpicc), + '-DCMAKE_CXX_COMPILER={0}'.format(self.spec['mpi'].mpicxx) + ]) + return args diff --git a/var/spack/repos/builtin/packages/canal/package.py b/var/spack/repos/builtin/packages/canal/package.py new file mode 100644 index 00000000000..57df9ea7f26 --- /dev/null +++ b/var/spack/repos/builtin/packages/canal/package.py @@ -0,0 +1,25 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Canal(Package): + """Alibaba MySQL binlog incremental subscription & consumer components.""" + + homepage = "https://github.com/alibaba/canal/wiki" + url = "https://github.com/alibaba/canal/archive/canal-1.1.4.tar.gz" + + version('1.1.4', sha256='740e0adac56d7f281cba21eca173eef3e8d42aa3e0fb49709f92cb6a1451dfbc') + version('1.1.3', sha256='3fe75ca5eb5cb97eb35818426c1427542ccddb0de052cf154e948ef321822cbc') + version('1.1.2', sha256='097190f952bdf09b835ed68966f5a98fa8308322a6aab11c1bfd16cec1800cf2') + + depends_on('maven', type='build') + depends_on('java', type=('build', 'run')) + + def install(self, spec, prefix): + mvn = which('mvn') + mvn('install', '-DskipTests') + install_tree('.', prefix) diff --git a/var/spack/repos/builtin/packages/cardioid/package.py b/var/spack/repos/builtin/packages/cardioid/package.py index 73e980be247..c95035b5bf1 100644 --- a/var/spack/repos/builtin/packages/cardioid/package.py +++ b/var/spack/repos/builtin/packages/cardioid/package.py @@ -4,8 +4,6 @@ # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * -import spack.environment as ev -import os class Cardioid(CMakePackage): @@ -32,20 +30,6 @@ class Cardioid(CMakePackage): depends_on('cmake@3.1:', type='build') depends_on('perl', type='build') - @property - def build_directory(self): - """Returns the directory to use when building the package - - :return: directory where to build the package - """ - env = ev.get_env(None, 'env status') - if not env: - basename = str(self.spec.arch) - else: - basename = env.name - - return os.path.join(self.stage.source_path, "build", basename) - def cmake_args(self): spec = self.spec args = [ diff --git a/var/spack/repos/builtin/packages/cassandra/package.py b/var/spack/repos/builtin/packages/cassandra/package.py index fab6d9c9c49..d6c4781cb4c 100644 --- a/var/spack/repos/builtin/packages/cassandra/package.py +++ b/var/spack/repos/builtin/packages/cassandra/package.py @@ -17,7 +17,10 @@ class Cassandra(Package): version('4.0-alpha2', sha256='6a8e99d8bc51efd500981c85c6aa547387b2fdbedecd692308f4632dbc1de3ba') version('4.0-alpha1', sha256='2fdf5e3d6c03a29d24a09cd52bb17575e5faccdc4c75a07edd63a9bf4f740105') - version('3.11.5', sha256='0ee3da12a2be86d7e03203fcc56c3589ddb38347b9cd031495a2b7fcf639fea6', preferred=True) + version('3.11.6', sha256='ce34edebd1b6bb35216ae97bd06d3efc338c05b273b78267556a99f85d30e45b', preferred=True) + version('3.11.5', sha256='0ee3da12a2be86d7e03203fcc56c3589ddb38347b9cd031495a2b7fcf639fea6') + + depends_on('java', type=('build', 'run')) def install(self, spec, prefix): install_tree('.', prefix) diff --git a/var/spack/repos/builtin/packages/catalyst/package.py b/var/spack/repos/builtin/packages/catalyst/package.py index c00aca23e97..1a2967926d4 100644 --- a/var/spack/repos/builtin/packages/catalyst/package.py +++ b/var/spack/repos/builtin/packages/catalyst/package.py @@ -6,6 +6,7 @@ from spack import * import os import subprocess +import sys import llnl.util.tty as tty @@ -133,7 +134,12 @@ def do_stage(self, mirror_only=False): 'Editions') catalyst_source_dir = os.path.abspath(self.root_cmakelists_dir) - command = ['python', catalyst_script, + python_path = (os.path.realpath( + spec['python3'].command.path if '+python3' in self.spec else + spec['python'].command.path if '+python' in self.spec else + sys.executable)) + + command = [python_path, catalyst_script, '-r', self.stage.source_path, '-o', catalyst_source_dir] diff --git a/var/spack/repos/builtin/packages/catch2/package.py b/var/spack/repos/builtin/packages/catch2/package.py index 6961afad299..96b1534b183 100644 --- a/var/spack/repos/builtin/packages/catch2/package.py +++ b/var/spack/repos/builtin/packages/catch2/package.py @@ -14,6 +14,8 @@ class Catch2(CMakePackage): url = "https://github.com/catchorg/Catch2/archive/v2.9.1.tar.gz" maintainers = ['ax3l'] + version('2.12.1', sha256='e5635c082282ea518a8dd7ee89796c8026af8ea9068cd7402fb1615deacd91c3') + version('2.12.0', sha256='6606b754363d3a4521bfecf717dc1972c50dca282bd428dfb1370ec8b9c26918') version('2.11.3', sha256='9a6967138062688f04374698fce4ce65908f907d8c0fe5dfe8dc33126bd46543') version('2.11.2', sha256='a96203fa531092375678ad2d81c43317ee58c684787f24b2a55748f6c6839799') version('2.11.1', sha256='9af06ca5b10362620c6c9c729821367e1aeb0f76adfc7bc3a468da83db3c50c6') diff --git a/var/spack/repos/builtin/packages/ccache/package.py b/var/spack/repos/builtin/packages/ccache/package.py index 97ebf17441b..6bc53831a99 100644 --- a/var/spack/repos/builtin/packages/ccache/package.py +++ b/var/spack/repos/builtin/packages/ccache/package.py @@ -12,8 +12,9 @@ class Ccache(AutotoolsPackage): again.""" homepage = "https://ccache.samba.org/" - url = "https://github.com/ccache/ccache/releases/download/v3.7.1/ccache-3.7.1.tar.gz" + url = "https://github.com/ccache/ccache/releases/download/v3.7.9/ccache-3.7.9.tar.gz" + version('3.7.9', sha256='92838e2133c9e704fdab9ee2608dad86c99021278b9ac47d065aa8ff2ea8ce36') version('3.7.1', sha256='e562fcdbe766406b6fe4bf97ce5c001d2be8a17465f33bcddefc9499bbb057d8') version('3.3.4', sha256='1348b54e7c35dd2f8d17923389e03c546e599cfbde6459d2f31cf6f1521ec538') version('3.3.3', sha256='87a399a2267cfac3f36411fbc12ff8959f408cffd050ad15fe423df88e977e8f') diff --git a/var/spack/repos/builtin/packages/cctools/package.py b/var/spack/repos/builtin/packages/cctools/package.py index bf74c320ae6..ade997bbb89 100644 --- a/var/spack/repos/builtin/packages/cctools/package.py +++ b/var/spack/repos/builtin/packages/cctools/package.py @@ -12,9 +12,11 @@ class Cctools(AutotoolsPackage): machines from clusters, clouds, and grids. """ - homepage = "https://github.com/cooperative-computing-lab/cctools" - url = "https://github.com/cooperative-computing-lab/cctools/archive/release/7.1.2.tar.gz" + homepage = "https://cctools.readthedocs.io" + url = "https://ccl.cse.nd.edu/software/files/cctools-7.1.5-source.tar.gz" + version('7.1.5', sha256='c01415fd47a1d9626b6c556e0dc0a6b0d3cd67224fa060cabd44ff78eede1d8a') + version('7.1.3', sha256='b937878ab429dda31bc692e5d9ffb402b9eb44bb674c07a934bb769cee4165ba') version('7.1.2', sha256='ca871e9fe245d047d4c701271cf2b868e6e3a170e8834c1887157ed855985131') version('7.1.0', sha256='84748245db10ff26c0c0a7b9fd3ec20fbbb849dd4aadc5e8531fd1671abe7a81') version('7.0.18', sha256='5b6f3c87ae68dd247534a5c073eb68cb1a60176a7f04d82699fbc05e649a91c2') diff --git a/var/spack/repos/builtin/packages/cdo/package.py b/var/spack/repos/builtin/packages/cdo/package.py index 4cb06132ac6..7b1929b7da5 100644 --- a/var/spack/repos/builtin/packages/cdo/package.py +++ b/var/spack/repos/builtin/packages/cdo/package.py @@ -17,7 +17,7 @@ class Cdo(AutotoolsPackage): maintainers = ['skosukhin'] - version('1.9.8', sha256='f2660ac6f8bf3fa071cf2a3a196b3ec75ad007deb3a782455e80f28680c5252a', url='https://code.mpimet.mpg.de/attachments/download/20286/cdo-1.9.8.tar.gz') + version('1.9.8', sha256='f2660ac6f8bf3fa071cf2a3a196b3ec75ad007deb3a782455e80f28680c5252a', url='https://code.mpimet.mpg.de/attachments/download/20826/cdo-1.9.8.tar.gz') version('1.9.7.1', sha256='3771952e065bcf935d43e492707370ed2a0ecb59a06bea24f9ab69d77943962c', url='https://code.mpimet.mpg.de/attachments/download/20124/cdo-1.9.7.1.tar.gz') version('1.9.6', sha256='b31474c94548d21393758caa33f35cf7f423d5dfc84562ad80a2bdcb725b5585', url='https://code.mpimet.mpg.de/attachments/download/19299/cdo-1.9.6.tar.gz') diff --git a/var/spack/repos/builtin/packages/charliecloud/package.py b/var/spack/repos/builtin/packages/charliecloud/package.py index 59f39fd5307..119e014176d 100644 --- a/var/spack/repos/builtin/packages/charliecloud/package.py +++ b/var/spack/repos/builtin/packages/charliecloud/package.py @@ -9,34 +9,44 @@ class Charliecloud(AutotoolsPackage): """Lightweight user-defined software stacks for HPC.""" - maintainers = ['j-ogas'] + maintainers = ['j-ogas', 'reidpr'] homepage = "https://hpc.github.io/charliecloud" url = "https://github.com/hpc/charliecloud/releases/download/v0.14/charliecloud-0.14.tar.gz" git = "https://github.com/hpc/charliecloud.git" version('master', branch='master') + version('0.16', sha256='6cdc21d414b6173090ac0a4c2c62a2a038c81659a75ae8f837b332bb7e6e9090') version('0.15', sha256='2163420d43c934151c4f44a188313bdb7f79e576d5a86ba64b9ea45f784b9921') version('0.14', sha256='4ae23c2d6442949e16902f9d5604dbd1d6059aeb5dd461b11fc5c74d49dcb194') + version('0.13', sha256='5740bff6e410ca99484c1bdf3dbe834c0f753c846d55c19d6162967a3e2718e0') depends_on('m4', type='build') depends_on('autoconf', type='build') depends_on('automake', type='build') depends_on('libtool', type='build') - depends_on('python@3.5:', type='run') - depends_on('py-lark-parser', type='run') - depends_on('py-requests', type='run') + # Use skopeo and umoci for older ch-grow version dependencies. + depends_on('skopeo', type='run', when='@0.10:0.13') + depends_on('umoci', type='run', when='@0.10:0.13') + depends_on('python+libxml2', type='run', when='@0.10:0.13') - # man pages and html docs variant + # Use python for ch-grow 0.14 and above version dependencies. + depends_on('python@3.5:', type='run', when='@0.14:') + depends_on('py-lark-parser', type='run', when='@0.14:') + depends_on('py-requests', type='run', when='@0.14:') + + # Man pages and html docs variant. variant('docs', default=False, description='Build man pages and html docs') depends_on('rsync', type='build', when='+docs') depends_on('py-sphinx', type='build', when='+docs') depends_on('py-sphinx-rtd-theme', type='build', when='+docs') + # See https://github.com/spack/spack/pull/16049. conflicts('platform=darwin', msg='This package does not build on macOS') - # bash automated testing harness (bats) + # Bash automated testing harness (bats). depends_on('bats@0.4.0', type='test') + depends_on('python@3.5:', type='test') def configure_args(self): diff --git a/var/spack/repos/builtin/packages/charmpp/fj.patch b/var/spack/repos/builtin/packages/charmpp/fj.patch new file mode 100755 index 00000000000..d5df680c0c2 --- /dev/null +++ b/var/spack/repos/builtin/packages/charmpp/fj.patch @@ -0,0 +1,61 @@ +diff --git a/src/arch/netlrts-linux-arm8/cc-fcc.h b/src/arch/netlrts-linux-arm8/cc-fcc.h +new file mode 100644 +index 000000000..f25b2250a +--- /dev/null ++++ b/src/arch/netlrts-linux-arm8/cc-fcc.h +@@ -0,0 +1,6 @@ ++#undef CMK_DLL_CC ++ ++#undef CMK_COMPILEMODE_ORIG ++#undef CMK_COMPILEMODE_ANSI ++#define CMK_COMPILEMODE_ORIG 1 ++#define CMK_COMPILEMODE_ANSI 0 +diff --git a/src/arch/netlrts-linux-arm8/cc-fcc.sh b/src/arch/netlrts-linux-arm8/cc-fcc.sh +new file mode 100644 +index 000000000..0efdc417e +--- /dev/null ++++ b/src/arch/netlrts-linux-arm8/cc-fcc.sh +@@ -0,0 +1,15 @@ ++# Assumes Fujitsu C/C++ compiler: ++CMK_CPP_CHARM="cpp -P" ++CMK_CPP_C="fcc$CMK_COMPILER_SUFFIX" ++CMK_CC="fcc$CMK_COMPILER_SUFFIX" ++CMK_LD="fcc$CMK_COMPILER_SUFFIX" ++CMK_CXX="FCC$CMK_COMPILER_SUFFIX" ++CMK_LDXX="FCC$CMK_COMPILER_SUFFIX" ++ ++CMK_CPP_C_FLAGS="-E" ++ ++CMK_PIC='' # empty string: will be reset to default by conv-config.sh ++CMK_PIE='-fPIE' ++ ++CMK_COMPILER='fcc' ++CMK_WARNINGS_ARE_ERRORS="-Werror" +diff --git a/src/arch/netlrts-linux-arm8/conv-mach-frt.h b/src/arch/netlrts-linux-arm8/conv-mach-frt.h +new file mode 100644 +index 000000000..d1ab56ad5 +--- /dev/null ++++ b/src/arch/netlrts-linux-arm8/conv-mach-frt.h +@@ -0,0 +1 @@ ++/* This file intentionally left blank */ +diff --git a/src/arch/netlrts-linux-arm8/conv-mach-frt.sh b/src/arch/netlrts-linux-arm8/conv-mach-frt.sh +new file mode 100644 +index 000000000..1de93a5ed +--- /dev/null ++++ b/src/arch/netlrts-linux-arm8/conv-mach-frt.sh +@@ -0,0 +1,15 @@ ++CMK_CF90="frt" ++ ++CMK_FPP="$CMK_CF90 -Ccpp" ++ ++CMK_CF90_FIXED="$CMK_CF90 -Fixed " ++ ++CMK_F90LIBS="-lfj90i -lfj90f -lfjsrcinfo -lfjsrcinfo -lelf" ++ ++CMK_CF77=$CMK_CF90 ++CMK_F77LIBS=$CMK_F90LIBS ++ ++CMK_MOD_NAME_ALLCAPS= ++CMK_MOD_EXT="mod" ++CMK_F90_USE_MODDIR=1 ++CMK_F90_MODINC="-I" diff --git a/var/spack/repos/builtin/packages/charmpp/package.py b/var/spack/repos/builtin/packages/charmpp/package.py index 158dd599378..a6c60ef11d3 100644 --- a/var/spack/repos/builtin/packages/charmpp/package.py +++ b/var/spack/repos/builtin/packages/charmpp/package.py @@ -40,6 +40,9 @@ class Charmpp(Package): # Patch is no longer needed in versions 6.8.0+ patch("mpi.patch", when="@:6.7.1") + # support Fujitsu compiler + patch("fj.patch", when="%fj") + # Ignore compiler warnings while configuring patch("strictpass.patch", when="@:6.8.2") @@ -58,10 +61,18 @@ class Charmpp(Package): "backend", default="netlrts", values=("mpi", "multicore", "netlrts", "verbs", "gni", - "ofi", "pami", "pamilrts"), + "ucx", "ofi", "pami", "pamilrts"), description="Set the backend to use" ) + # Process management interface + variant( + "pmi", + default="none", + values=("none", "simplepmi", "slurmpmi", "slurmpmi2", "pmix"), + description="The ucx/ofi/gni backends need PMI to run!" + ) + # Other options variant("papi", default=False, description="Enable PAPI integration") variant("syncft", default=False, description="Compile with Charm++ fault tolerance support") @@ -78,29 +89,23 @@ class Charmpp(Package): variant("production", default=True, description="Build charm++ with all optimizations") variant("tracing", default=False, description="Enable tracing modules") - # FIXME: backend=mpi also provides mpi, but spack does not support - # depends_on("mpi") and provides("mpi") in the same package currently. - for b in ['multicore', 'netlrts', 'verbs', 'gni', 'ofi', 'pami', - 'pamilrts']: - provides('mpi@2', when='@6.7.1: build-target=AMPI backend={0}'.format(b)) - provides('mpi@2', when='@6.7.1: build-target=LIBS backend={0}'.format(b)) - - def setup_dependent_build_environment(self, env, dependent_spec): - env.set('MPICC', self.prefix.bin.ampicc) - env.set('MPICXX', self.prefix.bin.ampicxx) - env.set('MPIF77', self.prefix.bin.ampif77) - env.set('MPIF90', self.prefix.bin.ampif90) - - def setup_dependent_package(self, module, dependent_spec): - self.spec.mpicc = self.prefix.bin.ampicc - self.spec.mpicxx = self.prefix.bin.ampicxx - self.spec.mpifc = self.prefix.bin.ampif90 - self.spec.mpif77 = self.prefix.bin.ampif77 - depends_on("mpi", when="backend=mpi") depends_on("papi", when="+papi") depends_on("cuda", when="+cuda") + depends_on("ucx", when="backend=ucx") + depends_on("slurm@:17-11-9-2", when="pmi=slurmpmi") + depends_on("slurm@17-11-9-2:", when="pmi=slurmpmi2") + + # FIXME : As of now spack's OpenMPI recipe does not have a PMIx variant + # But if users have external installs of OpenMPI with PMIx support, this + # will allow them to build charm++ with it. + depends_on("openmpi", when="pmi=pmix") + + depends_on("mpi", when="pmi=simplepmi") + depends_on("mpi", when="pmi=slurmpmi") + depends_on("mpi", when="pmi=slurmpmi2") + # Git versions of Charm++ require automake and autoconf depends_on("automake", when="@develop") depends_on("autoconf", when="@develop") @@ -108,11 +113,12 @@ def setup_dependent_package(self, module, dependent_spec): conflicts("~tracing", "+papi") conflicts("backend=multicore", "+smp") + conflicts("backend=ucx", when="@:6.9.99") - def install(self, spec, prefix): - target = spec.variants["build-target"].value - + @property + def charmarch(self): plat = sys.platform + if plat.startswith("linux"): plat = "linux" elif plat.startswith("win"): @@ -123,12 +129,13 @@ def install(self, spec, prefix): plat = "cnk" mach = platform.machine() + if mach.startswith("ppc"): mach = "ppc" elif mach.startswith("arm"): mach = "arm" - comm = spec.variants['backend'].value + comm = self.spec.variants['backend'].value # Define Charm++ version names for various (plat, mach, comm) # combinations. Note that not all combinations are supported. @@ -145,6 +152,7 @@ def install(self, spec, prefix): ("linux", "x86_64", "netlrts"): "netlrts-linux-x86_64", ("linux", "x86_64", "verbs"): "verbs-linux-x86_64", ("linux", "x86_64", "ofi"): "ofi-linux-x86_64", + ("linux", "x86_64", "ucx"): "ucx-linux-x86_64", ("linux", "x86_64", "uth"): "uth-linux-x86_64", ("linux", "ppc", "mpi"): "mpi-linux-ppc", ("linux", "ppc", "multicore"): "multicore-linux-ppc", @@ -153,6 +161,8 @@ def install(self, spec, prefix): ("linux", "ppc", "verbs"): "verbs-linux-ppc64le", ("linux", "arm", "netlrts"): "netlrts-linux-arm7", ("linux", "arm", "multicore"): "multicore-arm7", + ("linux", "aarch64", "netlrts"): "netlrts-linux-arm8", + ("linux", "aarch64", "multicore"): "multicore-arm8", ("win", "x86_64", "mpi"): "mpi-win-x86_64", ("win", "x86_64", "multicore"): "multicore-win-x86_64", ("win", "x86_64", "netlrts"): "netlrts-win-x86_64", @@ -167,7 +177,47 @@ def install(self, spec, prefix): "The communication mechanism %s is not supported " "on a %s platform with a %s CPU" % (comm, plat, mach)) - version = versions[(plat, mach, comm)] + + return versions[(plat, mach, comm)] + + # FIXME: backend=mpi also provides mpi, but spack does not support + # depends_on("mpi") and provides("mpi") in the same package currently. + # for b in ['multicore', 'netlrts', 'verbs', 'gni', 'ofi', 'pami', + # 'pamilrts']: + # provides('mpi@2', when='@6.7.1: + # build-target=AMPI backend={0}'.format(b)) + # provides('mpi@2', when='@6.7.1: + # build-target=LIBS backend={0}'.format(b)) + + def install(self, spec, prefix): + + if not("backend=mpi" in self.spec) or \ + not("backend=netlrts" in self.spec): + if ("+pthreads" in self.spec): + raise InstallError("The pthreads option is only\ + available on the Netlrts and MPI \ + network layers.") + + if ("backend=ucx" in self.spec) or \ + ("backend=ofi" in self.spec) or \ + ("backend=gni" in self.spec): + if ("pmi=none" in self.spec): + raise InstallError("The UCX/OFI/GNI backends need \ + PMI to run. Please add pmi=... \ + Note that PMIx is the preferred \ + option.") + + if ("pmi=simplepmi" in self.spec) or \ + ("pmi=slurmpmi" in self.spec) or \ + ("pmi=slurmpmi2" in self.spec): + if ("^openmpi" in self.spec): + raise InstallError("To use any process management \ + interface other than PMIx, \ + a non OpenMPI based MPI must be \ + present on the system") + + target = spec.variants["build-target"].value + builddir = prefix + "/" + str(self.charmarch) # We assume that Spack's compiler wrappers make this work. If # not, then we need to query the compiler vendor from Spack @@ -176,9 +226,17 @@ def install(self, spec, prefix): os.path.basename(self.compiler.cc), os.path.basename(self.compiler.fc), "-j%d" % make_jobs, - "--destination=%s" % prefix, + "--destination=%s" % builddir, ] + if "pmi=slurmpmi" in spec: + options.append("slurmpmi") + if "pmi=slurmpmi2" in spec: + options.append("slurmpmi2") + if "pmi=pmix" in spec: + options.append("ompipmix") + options.extend(["--basedir=%s" % spec["openmpi"].prefix]) + if 'backend=mpi' in spec: # in intelmpi /include and /lib fails so --basedir # cannot be used @@ -190,6 +248,9 @@ def install(self, spec, prefix): '--libdir={0}'.format(libdir) for libdir in spec["mpi"].libs.directories ]) + + if "backend=ucx" in spec: + options.extend(["--basedir=%s" % spec["ucx"].prefix]) if "+papi" in spec: options.extend(["papi", "--basedir=%s" % spec["papi"].prefix]) if "+smp" in spec: @@ -223,11 +284,11 @@ def install(self, spec, prefix): # could dissect the build script; the build instructions say # this wouldn't be difficult. build = Executable(join_path(".", "build")) - build(target, version, *options) + build(target, self.charmarch, *options) # Charm++'s install script does not copy files, it only creates # symbolic links. Fix this. - for dirpath, dirnames, filenames in os.walk(prefix): + for dirpath, dirnames, filenames in os.walk(builddir): for filename in filenames: filepath = join_path(dirpath, filename) if os.path.islink(filepath): @@ -239,18 +300,32 @@ def install(self, spec, prefix): os.rename(tmppath, filepath) except (IOError, OSError): pass - shutil.rmtree(join_path(prefix, "tmp")) + shutil.rmtree(join_path(builddir, "tmp")) - # A broken 'doc' link in the prefix can break the build. - # Remove it and replace it if it is broken. - try: - os.stat(prefix.doc) - except OSError: - os.remove(prefix.doc) - mkdirp(prefix.doc) + if self.spec.satisfies('@6.9.99'): + # A broken 'doc' link in the prefix can break the build. + # Remove it and replace it if it is broken. + try: + os.stat(prefix.doc) + except OSError: + os.remove(prefix.doc) + mkdirp(prefix.doc) @run_after('install') @on_package_attributes(run_tests=True) def check_build(self): make('-C', join_path(self.stage.source_path, 'charm/tests'), 'test', parallel=False) + + def setup_dependent_build_environment(self, env, dependent_spec): + env.set('MPICC', self.prefix.bin.ampicc) + env.set('MPICXX', self.prefix.bin.ampicxx) + env.set('MPIF77', self.prefix.bin.ampif77) + env.set('MPIF90', self.prefix.bin.ampif90) + + def setup_dependent_package(self, module, dependent_spec): + self.spec.mpicc = self.prefix.bin.ampicc + self.spec.mpicxx = self.prefix.bin.ampicxx + self.spec.mpifc = self.prefix.bin.ampif90 + self.spec.mpif77 = self.prefix.bin.ampif77 + self.spec.charmarch = self.charmarch diff --git a/var/spack/repos/builtin/packages/chill/package.py b/var/spack/repos/builtin/packages/chill/package.py index d33ab4a9591..96afa55379b 100644 --- a/var/spack/repos/builtin/packages/chill/package.py +++ b/var/spack/repos/builtin/packages/chill/package.py @@ -19,15 +19,16 @@ class Chill(AutotoolsPackage): version('0.3', sha256='574b622368a6bfaadbe9c1fa02fabefdc6c006069246f67d299f943b7e1d8aa3') depends_on('boost@1.66.0 cxxstd=11', type='build') - depends_on('rose@0.9.10.0 +cxx11', type='build') + depends_on('rose@0.9.13.0: +cxx11', type=('build', 'run')) depends_on('autoconf', type='build') depends_on('automake@1.14:', type='build') depends_on('libtool', type='build') depends_on('m4', type='build') - depends_on('iegenlib', type='build') - depends_on('bison@3.4', type='build') + depends_on('iegenlib', type=('build', 'run')) + depends_on('bison@3.4.2:', type='build') depends_on('flex', type='build') - depends_on('python') + # Does not currrently work with Python3 + depends_on('python@2.7:2.8') build_directory = 'spack-build' diff --git a/var/spack/repos/builtin/packages/clinfo/package.py b/var/spack/repos/builtin/packages/clinfo/package.py new file mode 100644 index 00000000000..1fb78b42aa4 --- /dev/null +++ b/var/spack/repos/builtin/packages/clinfo/package.py @@ -0,0 +1,24 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack import * + + +class Clinfo(MakefilePackage): + """Print all known information about all available OpenCL platforms and + devices in the system.""" + + homepage = "https://github.com/Oblomov/clinfo" + url = "https://github.com/Oblomov/clinfo/archive/2.2.18.04.06.tar.gz" + + maintainers = ['matthiasdiener'] + + version('2.2.18.04.06', sha256='f77021a57b3afcdebc73107e2254b95780026a9df9aa4f8db6aff11c03f0ec6c') + + depends_on('opencl') + + def install(self, spec, prefix): + make('install', 'PREFIX={0}'.format(prefix)) diff --git a/var/spack/repos/builtin/packages/clingo/package.py b/var/spack/repos/builtin/packages/clingo/package.py index 8f1b64af60b..d839ff3f332 100644 --- a/var/spack/repos/builtin/packages/clingo/package.py +++ b/var/spack/repos/builtin/packages/clingo/package.py @@ -18,13 +18,32 @@ class Clingo(CMakePackage): homepage = "https://potassco.org/clingo/" url = "https://github.com/potassco/clingo/archive/v5.2.2.tar.gz" + git = 'https://github.com/potassco/clingo.git' + maintainers = ["tgamblin"] + + version('develop', branch='wip', submodules=True) version('5.4.0', sha256='e2de331ee0a6d254193aab5995338a621372517adcf91568092be8ac511c18f3') version('5.3.0', sha256='b0d406d2809352caef7fccf69e8864d55e81ee84f4888b0744894977f703f976') version('5.2.2', sha256='da1ef8142e75c5a6f23c9403b90d4f40b9f862969ba71e2aaee9a257d058bfcf') - depends_on('doxygen', type=('build')) - depends_on('python') + variant("docs", default=False, description="build documentation with Doxyegen") + variant("python", default=True, description="build with python bindings") + + depends_on('doxygen', type="build", when="+docs") + depends_on('re2c@0.13:', type="build") + depends_on('bison@2.5:', type="build") + + depends_on('python', type=("build", "link", "run"), when="+python") + + def patch(self): + # Doxygen is optional but can't be disabled with a -D, so patch + # it out if it's really supposed to be disabled + if '+docs' not in self.spec: + filter_file(r'find_package\(Doxygen\)', + 'message("Doxygen disabled for Spack build.")', + 'clasp/CMakeLists.txt', + 'clasp/libpotassco/CMakeLists.txt') def cmake_args(self): try: @@ -32,8 +51,11 @@ def cmake_args(self): except UnsupportedCompilerFlag: InstallError('clingo requires a C++14-compliant C++ compiler') - args = ['-DCLINGO_BUILD_WITH_PYTHON=ON', - '-DCLING_BUILD_PY_SHARED=ON', - '-DPYCLINGO_USE_INSTALL_PREFIX=ON', - '-DCLINGO_BUILD_WITH_LUA=OFF'] - return args + return [ + '-DCLINGO_REQUIRE_PYTHON=ON', + '-DCLINGO_BUILD_WITH_PYTHON=ON', + '-DCLINGO_BUILD_PY_SHARED=ON', + '-DPYCLINGO_USER_INSTALL=OFF', + '-DPYCLINGO_USE_INSTALL_PREFIX=ON', + '-DCLINGO_BUILD_WITH_LUA=OFF' + ] diff --git a/var/spack/repos/builtin/packages/cloverleaf/package.py b/var/spack/repos/builtin/packages/cloverleaf/package.py index a3d4bd02b0b..ad1360cb306 100644 --- a/var/spack/repos/builtin/packages/cloverleaf/package.py +++ b/var/spack/repos/builtin/packages/cloverleaf/package.py @@ -16,7 +16,7 @@ class Cloverleaf(MakefilePackage): """ homepage = "http://uk-mac.github.io/CloverLeaf" - url = "https://github.com/Mantevo/mantevo.github.io/raw/master/download_files/CloverLeaf-1.1.tar.gz" + url = "http://downloads.mantevo.org/releaseTarballs/miniapps/CloverLeaf/CloverLeaf-1.1.tar.gz" tags = ['proxy-app'] diff --git a/var/spack/repos/builtin/packages/cloverleaf3d/package.py b/var/spack/repos/builtin/packages/cloverleaf3d/package.py index 17638b54e53..4e745412532 100644 --- a/var/spack/repos/builtin/packages/cloverleaf3d/package.py +++ b/var/spack/repos/builtin/packages/cloverleaf3d/package.py @@ -17,7 +17,7 @@ class Cloverleaf3d(MakefilePackage): """ homepage = "http://uk-mac.github.io/CloverLeaf3D/" - url = "https://github.com/Mantevo/mantevo.github.io/raw/master/download_files/CloverLeaf3D-1.0.tar.gz" + url = "http://downloads.mantevo.org/releaseTarballs/miniapps/CloverLeaf3D/CloverLeaf3D-1.0.tar.gz" tags = ['proxy-app'] diff --git a/var/spack/repos/builtin/packages/cmake/fujitsu_add_linker_option.patch b/var/spack/repos/builtin/packages/cmake/fujitsu_add_linker_option.patch new file mode 100644 index 00000000000..4a0cf7abbf2 --- /dev/null +++ b/var/spack/repos/builtin/packages/cmake/fujitsu_add_linker_option.patch @@ -0,0 +1,10 @@ +--- spack-src/Modules/FortranCInterface/Verify/CMakeLists.txt.org 2020-06-05 15:54:59.559043595 +0900 ++++ spack-src/Modules/FortranCInterface/Verify/CMakeLists.txt 2020-06-05 15:58:28.150062948 +0900 +@@ -4,6 +4,7 @@ + cmake_minimum_required(VERSION ${CMAKE_VERSION}) + project(VerifyFortranC C Fortran) + ++set (CMAKE_EXE_LINKER_FLAGS "--linkfortran") + option(VERIFY_CXX "Whether to verify C++ and Fortran" OFF) + if(VERIFY_CXX) + enable_language(CXX) diff --git a/var/spack/repos/builtin/packages/cmake/ignore_crayxc_warnings.patch b/var/spack/repos/builtin/packages/cmake/ignore_crayxc_warnings.patch new file mode 100644 index 00000000000..85928f0e84e --- /dev/null +++ b/var/spack/repos/builtin/packages/cmake/ignore_crayxc_warnings.patch @@ -0,0 +1,11 @@ +diff --git a/Source/Checks/cm_cxx_features.cmake b/Source/Checks/cm_cxx_features.cmake +index fb68ed78c9..c6c1ba667f 100644 +--- a/Source/Checks/cm_cxx_features.cmake ++++ b/Source/Checks/cm_cxx_features.cmake +@@ -17,2 +17,6 @@ function(cm_check_cxx_feature name) + set(check_output "${OUTPUT}") ++ # Filter out libhugetlbfs warnings ++ string(REGEX REPLACE "[^\n]*libhugetlbfs [^\n]*: WARNING[^\n]*" "" check_output "${check_output}") ++ # Filter out icpc warnings ++ string(REGEX REPLACE "[^\n]*icpc: command line warning #10121: overriding [^\n]*" "" check_output "${check_output}") + # Filter out MSBuild output that looks like a warning. diff --git a/var/spack/repos/builtin/packages/cmake/package.py b/var/spack/repos/builtin/packages/cmake/package.py index ce436d2ea9f..933f5e1995a 100644 --- a/var/spack/repos/builtin/packages/cmake/package.py +++ b/var/spack/repos/builtin/packages/cmake/package.py @@ -5,6 +5,9 @@ from spack import * +import re +import os + class Cmake(Package): """A cross-platform, open-source build system. CMake is a family of @@ -13,6 +16,9 @@ class Cmake(Package): url = 'https://github.com/Kitware/CMake/releases/download/v3.15.5/cmake-3.15.5.tar.gz' maintainers = ['chuckatkins'] + executables = ['cmake'] + + version('3.17.1', sha256='3aa9114485da39cbd9665a0bfe986894a282d5f0882b1dea960a739496620727') version('3.17.0', sha256='b74c05b55115eacc4fa2b77a814981dbda05cdc95a53e279fe16b7b272f00847') version('3.16.5', sha256='5f760b50b8ecc9c0c37135fae5fbf00a2fef617059aa9d61c1bb91653e5a8bfc') version('3.16.2', sha256='8c09786ec60ca2be354c29829072c38113de9184f29928eb9da8446a5f2ce6a9') @@ -92,6 +98,13 @@ class Cmake(Package): variant('openssl', default=True, description="Enables CMake's OpenSSL features") variant('ncurses', default=True, description='Enables the build of the ncurses gui') + # Tries to build an Objective-C file from libuv with GCC's C frontend + # https://gitlab.kitware.com/cmake/cmake/-/issues/20620 + # https://github.com/libuv/libuv/issues/2805 + conflicts('%gcc platform=darwin', + msg='CMake does not compile with GCC on macOS yet, use clang. ' + 'See: https://gitlab.kitware.com/cmake/cmake/-/issues/20620') + # Really this should conflict since it's enabling or disabling openssl for # CMake's internal copy of curl. Ideally we'd want a way to have the # openssl variant disabled when ~ownlibs but there's not really a way to @@ -124,6 +137,15 @@ class Cmake(Package): # https://gitlab.kitware.com/cmake/cmake/issues/18232 patch('nag-response-files.patch', when='@3.7:3.12') + # Cray libhugetlbfs and icpc warnings failing CXX tests + # https://gitlab.kitware.com/cmake/cmake/-/merge_requests/4698 + # https://gitlab.kitware.com/cmake/cmake/-/merge_requests/4681 + patch('ignore_crayxc_warnings.patch', when='@3.7:3.17.2') + + # The Fujitsu compiler requires the '--linkfortran' option + # to combine C++ and Fortran programs. + patch('fujitsu_add_linker_option.patch', when='%fj') + conflicts('+qt', when='^qt@5.4.0') # qt-5.4.0 has broken CMake modules # https://gitlab.kitware.com/cmake/cmake/issues/18166 @@ -133,6 +155,22 @@ class Cmake(Package): phases = ['bootstrap', 'build', 'install'] + @classmethod + def determine_spec_details(cls, prefix, exes_in_prefix): + exe_to_path = dict( + (os.path.basename(p), p) for p in exes_in_prefix + ) + if 'cmake' not in exe_to_path: + return None + + cmake = spack.util.executable.Executable(exe_to_path['cmake']) + output = cmake('--version', output=str) + if output: + match = re.search(r'cmake.*version\s+(\S+)', output) + if match: + version_str = match.group(1) + return Spec('cmake@{0}'.format(version_str)) + def flag_handler(self, name, flags): if name == 'cxxflags' and self.compiler.name == 'fj': cxx11plus_flags = (self.compiler.cxx11_flag, @@ -204,3 +242,9 @@ def test(self): def install(self, spec, prefix): make('install') + + if spec.satisfies('%fj'): + for f in find(self.prefix, 'FindMPI.cmake', recursive=True): + filter_file('mpcc_r)', 'mpcc_r mpifcc)', f, string=True) + filter_file('mpc++_r)', 'mpc++_r mpiFCC)', f, string=True) + filter_file('mpifc)', 'mpifc mpifrt)', f, string=True) diff --git a/var/spack/repos/builtin/packages/cmaq/package.py b/var/spack/repos/builtin/packages/cmaq/package.py new file mode 100644 index 00000000000..2f8d35e5c48 --- /dev/null +++ b/var/spack/repos/builtin/packages/cmaq/package.py @@ -0,0 +1,21 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Cmaq(Package): + """Code base for the U.S. EPA's Community Multiscale Air Quality Model + (CMAQ).""" + homepage = "http://www.epa.gov/CMAQ" + url = "https://github.com/USEPA/CMAQ/archive/CMAQv5.3.1_19Dec2019.tar.gz" + + version('5.3.1', sha256='659156bba27f33010e0fdc157a8d33f3b5b779b95511e2ade870284b6bcb4bc8', + url='https://github.com/USEPA/CMAQ/archive/CMAQv5.3.1_19Dec2019.tar.gz') + version('5.3', sha256='e245c291c7e88d481b13f577d1af9aeb5aef4de8c59f7fa06fa41d19bb2ed18c', + url='https://github.com/USEPA/CMAQ/archive/CMAQv5.3_27Aug2019.tar.gz') + + def install(self, spec, prefix): + install_tree('.', prefix) diff --git a/var/spack/repos/builtin/packages/cmockery/package.py b/var/spack/repos/builtin/packages/cmockery/package.py new file mode 100644 index 00000000000..bb4cf5b22cf --- /dev/null +++ b/var/spack/repos/builtin/packages/cmockery/package.py @@ -0,0 +1,27 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Cmockery(AutotoolsPackage): + """A lightweight library to simplify and generalize the process of + writing unit tests for C applications.""" + + homepage = "https://github.com/google/cmockery" + url = "https://github.com/google/cmockery/archive/v0.1.2.tar.gz" + + version('0.1.2', sha256='d40135ae9179201c01bde725fa64fc32d86b5899972e0ce4ad51668d261edbae') + version('0.1.1', sha256='a801d17976f781fff6dc49042ff109e55ca4ebe8efb13757fa1a511ca52316be') + version('0.1.0', sha256='9e017d48e56ab9d2ebcf5286fa54e37d42fe308d3c01fbc367793da2b9ad95e7') + + depends_on('m4', type='build') + depends_on('autoconf', type='build') + depends_on('automake', type='build') + depends_on('libtool', type='build') + + def autoreconf(self, spec, prefix): + bash = which('bash') + bash('./autogen.sh') diff --git a/var/spack/repos/builtin/packages/codec2/package.py b/var/spack/repos/builtin/packages/codec2/package.py new file mode 100644 index 00000000000..55a526c8540 --- /dev/null +++ b/var/spack/repos/builtin/packages/codec2/package.py @@ -0,0 +1,17 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Codec2(CMakePackage): + """Open source speech codec designed for communications quality speech + between 450 and 3200 bit/s. The main application is low bandwidth + HF/VHF digital radio.""" + + homepage = "http://www.rowetel.com/codec2.html" + url = "https://github.com/drowe67/codec2/archive/v0.9.2.tar.gz" + + version('0.9.2', sha256='19181a446f4df3e6d616b50cabdac4485abb9cd3242cf312a0785f892ed4c76c') diff --git a/var/spack/repos/builtin/packages/collectd/package.py b/var/spack/repos/builtin/packages/collectd/package.py new file mode 100644 index 00000000000..35472a4a675 --- /dev/null +++ b/var/spack/repos/builtin/packages/collectd/package.py @@ -0,0 +1,21 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Collectd(AutotoolsPackage): + """The system statistics collection daemon.""" + + homepage = "http://collectd.org/" + url = "https://github.com/collectd/collectd/archive/collectd-5.11.0.tar.gz" + + version('5.11.0', sha256='639676d09c5980ceea90b5a97811a9647d94e368528cce7cea3d43f0f308465d') + version('5.10.0', sha256='bcde95a3997b5eee448d247d9414854994b3592cb9fb4fecd6ff78082cc28a1b') + + depends_on('m4', type='build') + depends_on('autoconf', type='build') + depends_on('automake', type='build') + depends_on('libtool', type='build') diff --git a/var/spack/repos/builtin/packages/colordiff/package.py b/var/spack/repos/builtin/packages/colordiff/package.py index 7c7073cadc4..5d97c61d475 100644 --- a/var/spack/repos/builtin/packages/colordiff/package.py +++ b/var/spack/repos/builtin/packages/colordiff/package.py @@ -10,8 +10,9 @@ class Colordiff(Package): """Colorful diff utility.""" homepage = "https://www.colordiff.org" - url = "https://www.colordiff.org/colordiff-1.0.18.tar.gz" + url = "https://www.colordiff.org/archive/colordiff-1.0.18.tar.gz" + version('1.0.19', sha256='46e8c14d87f6c4b77a273cdd97020fda88d5b2be42cf015d5d84aca3dfff3b19') version('1.0.18', sha256='29cfecd8854d6e19c96182ee13706b84622d7b256077df19fbd6a5452c30d6e0') depends_on('perl') diff --git a/var/spack/repos/builtin/packages/conda4aarch64/package.py b/var/spack/repos/builtin/packages/conda4aarch64/package.py new file mode 100644 index 00000000000..fe7ede5c147 --- /dev/null +++ b/var/spack/repos/builtin/packages/conda4aarch64/package.py @@ -0,0 +1,24 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Conda4aarch64(Package): + """Conda for aarch64""" + + homepage = "https://anaconda.org/c4aarch64/" + url = "https://github.com/jjhelmus/conda4aarch64/releases/download/1.0.0/c4aarch64_installer-1.0.0-Linux-aarch64.sh" + + conflicts('arch=x86_64:') + conflicts('arch=ppc64:') + conflicts('arch=ppc64le:') + + version('1.0.0', sha256='165565dc7e7cc74c9ef8fd75d309fb7b81a6d1bc5e2eab48aafa7b836a7427af', expand=False) + + def install(self, spec, prefix): + conda_script = self.stage.archive_file + bash = which('bash') + bash(conda_script, '-b', '-f', '-p', self.prefix) diff --git a/var/spack/repos/builtin/packages/conduit/package.py b/var/spack/repos/builtin/packages/conduit/package.py index ed1bd4f3d31..8481ddce82f 100644 --- a/var/spack/repos/builtin/packages/conduit/package.py +++ b/var/spack/repos/builtin/packages/conduit/package.py @@ -147,6 +147,12 @@ class Conduit(Package): # build phases used by this package phases = ["configure", "build", "install"] + def flag_handler(self, name, flags): + if name in ('cflags', 'cxxflags', 'fflags'): + # the package manages these flags in another way + return (None, None, None) + return (flags, None, None) + def setup_build_environment(self, env): env.set('CTEST_OUTPUT_ON_FAILURE', '1') @@ -285,8 +291,10 @@ def create_host_config(self, spec, prefix, py_site_pkgs_dir=None): f_compiler = None if self.compiler.fc: - # even if this is set, it may not exist so do one more sanity check - f_compiler = which(env["SPACK_FC"]) + # even if this is set, it may not exist + # do one more sanity check + if os.path.isfile(env["SPACK_FC"]): + f_compiler = env["SPACK_FC"] ####################################################################### # By directly fetching the names of the actual compilers we appear @@ -299,6 +307,9 @@ def create_host_config(self, spec, prefix, py_site_pkgs_dir=None): if "SYS_TYPE" in env: sys_type = env["SYS_TYPE"] + # are we on a specific machine + on_blueos = 'blueos' in sys_type + ############################################## # Find and record what CMake is used ############################################## @@ -335,7 +346,7 @@ def create_host_config(self, spec, prefix, py_site_pkgs_dir=None): if "+fortran" in spec and f_compiler is not None: cfg.write(cmake_cache_entry("ENABLE_FORTRAN", "ON")) cfg.write(cmake_cache_entry("CMAKE_Fortran_COMPILER", - f_compiler.path)) + f_compiler)) else: cfg.write("# no fortran compiler found\n\n") cfg.write(cmake_cache_entry("ENABLE_FORTRAN", "OFF")) @@ -345,15 +356,34 @@ def create_host_config(self, spec, prefix, py_site_pkgs_dir=None): else: cfg.write(cmake_cache_entry("BUILD_SHARED_LIBS", "OFF")) - # extra fun for blueos - if 'blueos_3' in sys_type and "+fortran" in spec: - if 'xl@coral' in os.getenv('SPACK_COMPILER_SPEC', ""): - # Fix missing std linker flag in xlc compiler - cfg.write(cmake_cache_entry("BLT_FORTRAN_FLAGS", - "-WF,-C! -qxlf2003=polymorphic")) - # Conduit can't link C++ into fortran for this spec, but works - # fine in host code - cfg.write(cmake_cache_entry("ENABLE_TESTS", "OFF")) + # use global spack compiler flags + cppflags = ' '.join(spec.compiler_flags['cppflags']) + if cppflags: + # avoid always ending up with ' ' with no flags defined + cppflags += ' ' + cflags = cppflags + ' '.join(spec.compiler_flags['cflags']) + if cflags: + cfg.write(cmake_cache_entry("CMAKE_C_FLAGS", cflags)) + cxxflags = cppflags + ' '.join(spec.compiler_flags['cxxflags']) + if cxxflags: + cfg.write(cmake_cache_entry("CMAKE_CXX_FLAGS", cxxflags)) + fflags = ' '.join(spec.compiler_flags['fflags']) + if fflags: + cfg.write(cmake_cache_entry("CMAKE_Fortran_FLAGS", fflags)) + + if ((f_compiler is not None) + and ("gfortran" in f_compiler) + and ("clang" in cpp_compiler)): + libdir = os.path.join(os.path.dirname( + os.path.dirname(f_compiler)), "lib") + flags = "" + for _libpath in [libdir, libdir + "64"]: + if os.path.exists(_libpath): + flags += " -Wl,-rpath,{0}".format(_libpath) + description = ("Adds a missing libstdc++ rpath") + if flags: + cfg.write(cmake_cache_entry("BLT_EXE_LINKER_FLAGS", flags, + description)) ####################### # Unit Tests @@ -363,6 +393,33 @@ def create_host_config(self, spec, prefix, py_site_pkgs_dir=None): else: cfg.write(cmake_cache_entry("ENABLE_TESTS", "OFF")) + # extra fun for blueos + if on_blueos: + # All of BlueOS compilers report clang due to nvcc, + # override to proper compiler family + if "xlc" in c_compiler: + cfg.write(cmake_cache_entry("CMAKE_C_COMPILER_ID", "XL")) + if "xlC" in cpp_compiler: + cfg.write(cmake_cache_entry("CMAKE_CXX_COMPILER_ID", "XL")) + + if "+fortran" in spec: + if "xlf" in f_compiler: + cfg.write(cmake_cache_entry("CMAKE_Fortran_COMPILER_ID", + "XL")) + + if 'xl@coral' in os.getenv('SPACK_COMPILER_SPEC', ""): + # Fix missing std linker flag in xlc compiler + flags = "-WF,-C! -qxlf2003=polymorphic" + cfg.write(cmake_cache_entry("BLT_FORTRAN_FLAGS", + flags)) + # Grab lib directory for the current fortran compiler + libdir = os.path.join(os.path.dirname( + os.path.dirname(f_compiler)), "lib") + flags = "${BLT_EXE_LINKER_FLAGS} -lstdc++ " + flags += "-Wl,-rpath,{0} -Wl,-rpath,{0}64".format(libdir) + cfg.write(cmake_cache_entry("BLT_EXE_LINKER_FLAGS", + flags)) + ####################### # Python ####################### @@ -412,14 +469,17 @@ def create_host_config(self, spec, prefix, py_site_pkgs_dir=None): # use those for mpi wrappers, b/c spec['mpi'].mpicxx # etc make return the spack compiler wrappers # which can trip up mpi detection in CMake 3.14 - if cpp_compiler == "CC": + if spec['mpi'].mpicc == spack_cc: mpicc_path = "cc" mpicxx_path = "CC" mpifc_path = "ftn" cfg.write(cmake_cache_entry("ENABLE_MPI", "ON")) cfg.write(cmake_cache_entry("MPI_C_COMPILER", mpicc_path)) cfg.write(cmake_cache_entry("MPI_CXX_COMPILER", mpicxx_path)) - cfg.write(cmake_cache_entry("MPI_Fortran_COMPILER", mpifc_path)) + if "+fortran" in spec: + cfg.write(cmake_cache_entry("MPI_Fortran_COMPILER", + mpifc_path)) + mpiexe_bin = join_path(spec['mpi'].prefix.bin, 'mpiexec') if os.path.isfile(mpiexe_bin): # starting with cmake 3.10, FindMPI expects MPIEXEC_EXECUTABLE @@ -456,12 +516,6 @@ def create_host_config(self, spec, prefix, py_site_pkgs_dir=None): if "+hdf5" in spec: cfg.write(cmake_cache_entry("HDF5_DIR", spec['hdf5'].prefix)) - # extra fun for BG/Q - if 'bgqos_0' in sys_type: - cfg.write(cmake_cache_entry('HDF5_C_LIBRARY_m', - '-lm', 'STRING')) - cfg.write(cmake_cache_entry('HDF5_C_LIBRARY_dl', - '-ldl', 'STRING')) else: cfg.write("# hdf5 not built by spack \n") diff --git a/var/spack/repos/builtin/packages/coreutils/package.py b/var/spack/repos/builtin/packages/coreutils/package.py index 1bc3ee1e724..569831dac7a 100644 --- a/var/spack/repos/builtin/packages/coreutils/package.py +++ b/var/spack/repos/builtin/packages/coreutils/package.py @@ -35,4 +35,4 @@ def configure_args(self): configure_args.append('--without-gmp') configure_args.append('gl_cv_func_ftello_works=yes') - return configure_args + return configure_args diff --git a/var/spack/repos/builtin/packages/cosma/package.py b/var/spack/repos/builtin/packages/cosma/package.py index efb81035da9..18d69605ee4 100644 --- a/var/spack/repos/builtin/packages/cosma/package.py +++ b/var/spack/repos/builtin/packages/cosma/package.py @@ -7,37 +7,30 @@ from spack import * -class Cosma(CMakePackage): +class Cosma(CMakePackage, CudaPackage): """ Distributed Communication-Optimal Matrix-Matrix Multiplication Library """ - maintainers = ['teonnik', 'kabicm'] + maintainers = ['haampie', 'kabicm', 'teonnik'] homepage = 'https://github.com/eth-cscs/COSMA' - url = 'https://github.com/eth-cscs/COSMA/releases/download/v2.0.2/cosma.tar.gz' + url = 'https://github.com/eth-cscs/COSMA/releases/download/v2.2.0/cosma.tar.gz' git = 'https://github.com/eth-cscs/COSMA.git' # note: The default archives produced with github do not have the archives # of the submodules. version('master', branch='master', submodules=True) + version('2.2.0', sha256='1eb92a98110df595070a12193b9221eecf9d103ced8836c960f6c79a2bd553ca') + version('2.0.7', sha256='8d70bfcbda6239b6a8fbeaca138790bbe58c0c3aa576879480d2632d4936cf7e') version('2.0.2', sha256='4f3354828bc718f3eef2f0098c3bdca3499297497a220da32db1acd57920c68d') - # note: this version fails to build at the moment - # version('1.0.0', - # url='https://github.com/eth-cscs/COSMA/releases/download/1.0/cosma.tar.gz', - # sha256='c142104258dcca4c17fa7faffc2990a08d2777235c7980006e93c5dca51061f6') - variant('cuda', default=False, - description='Build with the CUBLAS back end.') variant('scalapack', default=False, - description='Build with ScaLAPACK support.') + description='Build with ScaLAPACK API.') depends_on('cmake@3.12:', type='build') depends_on('mpi@3:') depends_on('blas', when='~cuda') depends_on('scalapack', when='+scalapack') - # COSMA is written entirely in C++, it may use cublasXt but a CUDA capable - # compiler is not needed. There is no need for CudaPackage in this recipe. - depends_on('cuda', when='+cuda') def setup_build_environment(self, env): if '+cuda' in self.spec: @@ -52,6 +45,8 @@ def cmake_args(self): if '^mkl' in spec: args += ['-DCOSMA_BLAS=MKL'] + elif '^cray-libsci' in spec: + args += ['-DCOSMA_BLAS=CRAY_LIBSCI'] elif '^netlib-lapack' in spec: args += ['-DCOSMA_BLAS=CUSTOM'] elif '^openblas' in spec: @@ -63,6 +58,8 @@ def cmake_args(self): if '+scalapack' and '^mkl' in spec: args += ['-DCOSMA_SCALAPACK=MKL'] + elif '+scalapack' and '^cray-libsci' in spec: + args += ['-DCOSMA_SCALAPACK=CRAY_LIBSCI'] elif '+scalapack' and '^netlib-scalapack' in spec: args += ['-DCOSMA_SCALAPACK=CUSTOM'] diff --git a/var/spack/repos/builtin/packages/cp2k/package.py b/var/spack/repos/builtin/packages/cp2k/package.py index 734b2c0aa3d..0a172d025fe 100644 --- a/var/spack/repos/builtin/packages/cp2k/package.py +++ b/var/spack/repos/builtin/packages/cp2k/package.py @@ -20,16 +20,16 @@ class Cp2k(MakefilePackage, CudaPackage): git = 'https://github.com/cp2k/cp2k.git' list_url = 'https://github.com/cp2k/cp2k/releases' + maintainers = ['dev-zero'] + version('7.1', sha256='ccd711a09a426145440e666310dd01cc5772ab103493c4ae6a3470898cd0addb') version('6.1', sha256='af803558e0a6b9e9d9ce8a3ab955ba32bacd179922455424e061c82c9fefa34b') version('5.1', sha256='e23613b593354fa82e0b8410e17d94c607a0b8c6d9b5d843528403ab09904412') version('4.1', sha256='4a3e4a101d8a35ebd80a9e9ecb02697fb8256364f1eccdbe4e5a85d31fe21343') version('3.0', sha256='1acfacef643141045b7cbade7006f9b7538476d861eeecd9658c9e468dc61151') - version('develop', branch='master', submodules="True") + version('master', branch='master', submodules="True") variant('mpi', default=True, description='Enable MPI support') - variant('blas', default='openblas', values=('openblas', 'mkl', 'accelerate'), - description='Enable the use of OpenBlas/MKL/Accelerate') variant('openmp', default=False, description='Enable OpenMP support') variant('smm', default='libxsmm', values=('libxsmm', 'libsmm', 'blas'), description='Library for small matrix multiplications') @@ -73,18 +73,9 @@ class Cp2k(MakefilePackage, CudaPackage): depends_on('python', type='build') - depends_on('fftw@3:', when='~openmp') - depends_on('fftw@3:+openmp', when='+openmp') - - # see #1712 for the reason to enumerate BLAS libraries here - depends_on('openblas threads=none', when='blas=openblas ~openmp') - depends_on('openblas threads=openmp', when='blas=openblas +openmp') - depends_on('lapack', when='blas=openblas ~openmp') - - depends_on('intel-mkl', when="blas=mkl ~openmp") - depends_on('intel-mkl threads=openmp', when='blas=mkl +openmp') - - conflicts('blas=accelerate', '+openmp') # there is no Accelerate with OpenMP support + depends_on('blas') + depends_on('lapack') + depends_on('fftw-api@3') # require libxsmm-1.11+ since 1.10 can leak file descriptors in Fortran depends_on('libxsmm@1.11:~header-only', when='smm=libxsmm') @@ -110,6 +101,7 @@ class Cp2k(MakefilePackage, CudaPackage): depends_on('cosma+cuda+scalapack', when='+cosma+cuda') depends_on('elpa@2011.12:2016.13+openmp', when='+openmp+elpa@:5.999') depends_on('elpa@2011.12:2017.11+openmp', when='+openmp+elpa@6.0:') + depends_on('elpa@2018.05:+openmp', when='+openmp+elpa@7.0:') depends_on('elpa@2011.12:2016.13~openmp', when='~openmp+elpa@:5.999') depends_on('elpa@2011.12:2017.11~openmp', when='~openmp+elpa@6.0:') depends_on('elpa@2018.05:~openmp', when='~openmp+elpa@7.0:') @@ -150,9 +142,7 @@ class Cp2k(MakefilePackage, CudaPackage): # CP2K needs compiler specific compilation flags, e.g. optflags conflicts('%clang') - conflicts('%cray') conflicts('%nag') - conflicts('%xl') @property def makefile_architecture(self): @@ -176,9 +166,42 @@ def makefile(self): def archive_files(self): return [os.path.join(self.stage.source_path, self.makefile)] - def edit(self, spec, prefix): + def consistency_check(self, spec): + """ + Consistency checks. + Due to issue #1712 we can not put them into depends_on/conflicts. + """ - fftw = spec['fftw:openmp' if '+openmp' in spec else 'fftw'] + if '+openmp' in spec: + if '^openblas' in spec and '^openblas threads=openmp' not in spec: + raise InstallError( + '^openblas threads=openmp required for cp2k+openmp' + ' with openblas') + + if '^fftw' in spec and '^fftw +openmp' not in spec: + raise InstallError( + '^fftw +openmp required for cp2k+openmp' + ' with fftw') + + # MKL doesn't need to be checked since they are + # OMP thread-safe when using mkl_sequential + # BUT: we should check the version of MKL IF it is used for FFTW + # since there we need at least v14 of MKL to be safe! + + def edit(self, spec, prefix): + self.consistency_check(spec) + + pkgconf = which('pkg-config') + + if '^fftw' in spec: + fftw = spec['fftw:openmp' if '+openmp' in spec else 'fftw'] + fftw_header_dir = fftw.headers.directories[0] + elif '^intel-mkl' in spec: + fftw = spec['intel-mkl'] + fftw_header_dir = fftw.headers.directories[0] + '/fftw' + elif '^intel-parallel-studio+mkl' in spec: + fftw = spec['intel-parallel-studio'] + fftw_header_dir = fftw.headers.directories[0] + '/fftw' optimization_flags = { 'gcc': [ @@ -186,15 +209,17 @@ def edit(self, spec, prefix): '-funroll-loops', '-ftree-vectorize', ], - 'intel': ['-O2', '-pc64', '-unroll'], + 'intel': ['-O2', '-pc64', '-unroll', ], 'pgi': ['-fast'], + 'cray': ['-O2'], + 'xl': ['-O3'], } dflags = ['-DNDEBUG'] cppflags = [ '-D__LIBINT', '-D__FFTW3', - fftw.headers.cpp_flags, + '-I{0}'.format(fftw_header_dir), ] if '@:6.9' in spec: @@ -220,19 +245,24 @@ def edit(self, spec, prefix): cflags.append('-fp-model precise') cxxflags.append('-fp-model precise') fcflags += [ - '-fp-model source', + '-fp-model precise', '-heap-arrays 64', '-g', '-traceback', ] elif '%gcc' in spec: - fcflags.extend([ + fcflags += [ '-ffree-form', '-ffree-line-length-none', '-ggdb', # make sure we get proper Fortran backtraces - ]) + ] elif '%pgi' in spec: - fcflags.extend(['-Mfreeform', '-Mextend']) + fcflags += ['-Mfreeform', '-Mextend'] + elif '%cray' in spec: + fcflags += ['-emf', '-ffree', '-hflex_mp=strict'] + elif '%xl' in spec: + fcflags += ['-qpreprocess', '-qstrict', '-q64'] + ldflags += ['-Wl,--allow-multiple-definition'] if '+openmp' in spec: cflags.append(self.compiler.openmp_flag) @@ -241,6 +271,15 @@ def edit(self, spec, prefix): ldflags.append(self.compiler.openmp_flag) nvflags.append('-Xcompiler="{0}"'.format( self.compiler.openmp_flag)) + elif '%cray' in spec: # Cray enables OpenMP by default + cflags += ['-hnoomp'] + cxxflags += ['-hnoomp'] + fcflags += ['-hnoomp'] + ldflags += ['-hnoomp'] + + if '@7:' in spec: # recent versions of CP2K use C++14 CUDA code + cxxflags.append(self.compiler.cxx14_flag) + nvflags.append(self.compiler.cxx14_flag) ldflags.append(fftw.libs.search_flags) @@ -257,8 +296,8 @@ def edit(self, spec, prefix): os.path.join(spec['libint'].libs.directories[0], 'libint.a'), ]) else: - fcflags += ['$(shell pkg-config --cflags libint2)'] - libs += ['$(shell pkg-config --libs libint2)'] + fcflags += pkgconf('--cflags', 'libint2', output=str).split() + libs += pkgconf('--libs', 'libint2', output=str).split() if '+plumed' in self.spec: dflags.extend(['-D__PLUMED2']) @@ -268,15 +307,16 @@ def edit(self, spec, prefix): 'libplumed.{0}'.format(dso_suffix)) ]) - fc = self.compiler.fc if '~mpi' in spec else self.spec['mpi'].mpifc + cc = spack_cc if '~mpi' in spec else spec['mpi'].mpicc + cxx = spack_cxx if '~mpi' in spec else spec['mpi'].mpicxx + fc = spack_fc if '~mpi' in spec else spec['mpi'].mpifc # Intel - if '%intel' in self.spec: + if '%intel' in spec: cppflags.extend([ '-D__INTEL', '-D__HAS_ISO_C_BINDING', '-D__USE_CP2K_TRACE', - '-D__MKL' ]) fcflags.extend([ '-diag-disable 8290,8291,10010,10212,11060', @@ -290,9 +330,9 @@ def edit(self, spec, prefix): ldflags.append((lapack + blas).search_flags) libs.extend([str(x) for x in (fftw.libs, lapack, blas)]) - if self.spec.variants['blas'].value == 'mkl': + if '^intel-mkl' in spec or '^intel-parallel-studio+mkl' in spec: cppflags += ['-D__MKL'] - elif self.spec.variants['blas'].value == 'accelerate': + elif '^accelerate' in spec: cppflags += ['-D__ACCELERATE'] if '+cosma' in spec: @@ -302,7 +342,7 @@ def edit(self, spec, prefix): libs.extend(cosma) # MPI - if '+mpi' in self.spec: + if '+mpi' in spec: cppflags.extend([ '-D__parallel', '-D__SCALAPACK' @@ -312,7 +352,7 @@ def edit(self, spec, prefix): ldflags.append(scalapack.search_flags) libs.extend(scalapack) - libs.extend(self.spec['mpi:cxx'].libs) + libs.extend(spec['mpi:cxx'].libs) libs.extend(self.compiler.stdcxx_libs) if 'wannier90' in spec: @@ -331,10 +371,10 @@ def edit(self, spec, prefix): ldflags.append(libxc.libs.search_flags) libs.append(str(libxc.libs)) else: - fcflags += ['$(shell pkg-config --cflags libxcf03)'] - libs += ['$(shell pkg-config --libs libxcf03)'] + fcflags += pkgconf('--cflags', 'libxcf03', output=str).split() + libs += pkgconf('--libs', 'libxcf03', output=str).split() - if '+pexsi' in self.spec: + if '+pexsi' in spec: cppflags.append('-D__LIBPEXSI') fcflags.append('-I' + os.path.join( spec['pexsi'].prefix, 'fortran')) @@ -353,7 +393,7 @@ def edit(self, spec, prefix): ), ]) - if '+elpa' in self.spec: + if '+elpa' in spec: elpa = spec['elpa'] elpa_suffix = '_openmp' if '+openmp' in elpa else '' elpa_incdir = elpa.headers.directories[0] @@ -377,27 +417,27 @@ def edit(self, spec, prefix): int(elpa.version[1]))) fcflags += ['-I{0}'.format(os.path.join(elpa_incdir, 'elpa'))] - if self.spec.satisfies('+sirius'): + if spec.satisfies('+sirius'): sirius = spec['sirius'] cppflags.append('-D__SIRIUS') fcflags += ['-I{0}'.format(os.path.join(sirius.prefix, 'fortran'))] libs += list(sirius.libs) - if self.spec.satisfies('+cuda'): + if spec.satisfies('+cuda'): cppflags += ['-D__ACC'] libs += ['-lcudart', '-lnvrtc', '-lcuda'] - if self.spec.satisfies('+cuda_blas'): + if spec.satisfies('+cuda_blas'): cppflags += ['-D__DBCSR_ACC=2'] libs += ['-lcublas'] else: cppflags += ['-D__DBCSR_ACC'] - if self.spec.satisfies('+cuda_fft'): + if spec.satisfies('+cuda_fft'): cppflags += ['-D__PW_CUDA'] libs += ['-lcufft', '-lcublas'] - cuda_arch = self.spec.variants['cuda_arch'].value + cuda_arch = spec.variants['cuda_arch'].value if cuda_arch: gpuver = { '35': 'K40', @@ -407,7 +447,7 @@ def edit(self, spec, prefix): }[cuda_arch] if (cuda_arch == '35' - and self.spec.satisfies('+cuda_arch_35_k20x')): + and spec.satisfies('+cuda_arch_35_k20x')): gpuver = 'K20X' if 'smm=libsmm' in spec: @@ -430,12 +470,12 @@ def edit(self, spec, prefix): libs.append('-lsmm') elif 'smm=libxsmm' in spec: - cppflags.extend([ - '-D__LIBXSMM', - '$(shell pkg-config --cflags-only-other libxsmmf)', - ]) - fcflags.append('$(shell pkg-config --cflags-only-I libxsmmf)') - libs.append('$(shell pkg-config --libs libxsmmf)') + cppflags += ['-D__LIBXSMM'] + cppflags += pkgconf('--cflags-only-other', 'libxsmmf', + output=str).split() + fcflags += pkgconf('--cflags-only-I', 'libxsmmf', + output=str).split() + libs += pkgconf('--libs', 'libxsmmf', output=str).split() dflags.extend(cppflags) cflags.extend(cppflags) @@ -444,14 +484,21 @@ def edit(self, spec, prefix): nvflags.extend(cppflags) with open(self.makefile, 'w') as mkf: - if '+plumed' in self.spec: - # Include Plumed.inc in the Makefile + if '+plumed' in spec: + mkf.write('# include Plumed.inc as recommended by' + 'PLUMED to include libraries and flags') mkf.write('include {0}\n'.format( - self.spec['plumed'].package.plumed_inc + spec['plumed'].package.plumed_inc )) - mkf.write('CC = {0.compiler.cc}\n'.format(self)) - if '%intel' in self.spec: + mkf.write('\n# COMPILER, LINKER, TOOLS\n\n') + mkf.write('FC = {0}\n' + 'CC = {1}\n' + 'CXX = {2}\n' + 'LD = {3}\n' + .format(fc, cc, cxx, fc)) + + if '%intel' in spec: intel_bin_dir = ancestor(self.compiler.cc) # CPP is a commented command in Intel arch of CP2K # This is the hack through which cp2k developers avoid doing : @@ -459,33 +506,38 @@ def edit(self, spec, prefix): # ${CPP} .F > .f90 # # and use `-fpp` instead - mkf.write('CPP = # {0.compiler.cc} -P\n\n'.format(self)) - mkf.write('AR = {0}/xiar -r\n\n'.format(intel_bin_dir)) + mkf.write('CPP = # {0} -P\n'.format(spack_cc)) + mkf.write('AR = {0}/xiar -r\n'.format(intel_bin_dir)) else: - mkf.write('CPP = # {0.compiler.cc} -E\n\n'.format(self)) - mkf.write('AR = ar -r\n\n') - mkf.write('FC = {0}\n'.format(fc)) - mkf.write('LD = {0}\n'.format(fc)) + mkf.write('CPP = # {0} -E\n'.format(spack_cc)) + mkf.write('AR = ar -r\n') - if self.spec.satisfies('+cuda'): + if spec.satisfies('+cuda'): mkf.write('NVCC = {0}\n'.format( - os.path.join(self.spec['cuda'].prefix, 'bin', 'nvcc'))) + os.path.join(spec['cuda'].prefix, 'bin', 'nvcc'))) # Write compiler flags to file - mkf.write('DFLAGS = {0}\n\n'.format(' '.join(dflags))) - mkf.write('CPPFLAGS = {0}\n\n'.format(' '.join(cppflags))) - mkf.write('CFLAGS = {0}\n\n'.format(' '.join(cflags))) - mkf.write('CXXFLAGS = {0}\n\n'.format(' '.join(cxxflags))) - mkf.write('NVFLAGS = {0}\n\n'.format(' '.join(nvflags))) - mkf.write('FCFLAGS = {0}\n\n'.format(' '.join(fcflags))) - mkf.write('LDFLAGS = {0}\n\n'.format(' '.join(ldflags))) + def fflags(var, lst): + return '{0} = {1}\n\n'.format( + var, + ' \\\n\t'.join(lst)) + + mkf.write('\n# FLAGS & LIBRARIES\n') + mkf.write(fflags('DFLAGS', dflags)) + mkf.write(fflags('CPPFLAGS', cppflags)) + mkf.write(fflags('CFLAGS', cflags)) + mkf.write(fflags('CXXFLAGS', cxxflags)) + mkf.write(fflags('NVFLAGS', nvflags)) + mkf.write(fflags('FCFLAGS', fcflags)) + mkf.write(fflags('LDFLAGS', ldflags)) + mkf.write(fflags('LIBS', libs)) + if '%intel' in spec: - mkf.write('LDFLAGS_C = {0}\n\n'.format( - ' '.join(ldflags) + ' -nofor_main') - ) - mkf.write('LIBS = {0}\n\n'.format(' '.join(libs))) - mkf.write('GPUVER = {0}\n\n'.format(gpuver)) - mkf.write('DATA_DIR = {0}\n\n'.format(self.prefix.share.data)) + mkf.write(fflags('LDFLAGS_C', ldflags + ['-nofor_main'])) + + mkf.write('# CP2K-specific flags\n\n') + mkf.write('GPUVER = {0}\n'.format(gpuver)) + mkf.write('DATA_DIR = {0}\n'.format(self.prefix.share.data)) @property def build_directory(self): diff --git a/var/spack/repos/builtin/packages/cpio/package.py b/var/spack/repos/builtin/packages/cpio/package.py index 2381a824997..ecaf2531ea8 100644 --- a/var/spack/repos/builtin/packages/cpio/package.py +++ b/var/spack/repos/builtin/packages/cpio/package.py @@ -15,6 +15,8 @@ class Cpio(AutotoolsPackage, GNUMirrorPackage): version('2.13', sha256='e87470d9c984317f658567c03bfefb6b0c829ff17dbf6b0de48d71a4c8f3db88') + patch('https://src.fedoraproject.org/rpms/cpio/raw/dfe64c466d3ea2c8dfbd99700d9006f610064167/f/cpio-2.13-mutiple-definition.patch', sha256='d22633c368b8aedf4c08b23b6fbaa81a52404c8943ab04926404083ac10f1a4b', when='%gcc@10:') + build_directory = 'spack-build' def flag_handler(self, name, flags): diff --git a/var/spack/repos/builtin/packages/cpp-httplib/package.py b/var/spack/repos/builtin/packages/cpp-httplib/package.py new file mode 100644 index 00000000000..2774a95bd1c --- /dev/null +++ b/var/spack/repos/builtin/packages/cpp-httplib/package.py @@ -0,0 +1,27 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class CppHttplib(CMakePackage): + """A C++ header-only HTTP/HTTPS server and client library.""" + + homepage = "https://github.com/yhirose/cpp-httplib/" + url = "https://github.com/yhirose/cpp-httplib/archive/v0.5.10.tar.gz" + + version('0.5.9', sha256='c9e7aef3b0d4e80ee533d10413508d8a6e09a67d0d59646c43111f3993de006e') + version('0.5.8', sha256='184d4fe79fc836ee26aa8635b3240879af4c6f17257fc7063d0b77a0cf856dfc') + version('0.5.7', sha256='27b7f6346bdeb1ead9d17bd7cea89d9ad491f50f0479081053cc6e5742a89e64') + version('0.5.6', sha256='06ebc94edcdf23d66692bf1d128f6c65bb0ec36ce5e2f8ee61990bc74e838868') + version('0.5.5', sha256='e18dab82b3b395290514baf3804c7b74892beb654bd8020600a9d9dfdc49c32a') + version('0.5.4', sha256='40dcce66ec002e2631ef918e1b3bfc9ec1662d02007291ea4743e17ac9c7d43f') + version('0.5.3', sha256='d9d62ae15d5a2f4404286d5f6ec48daef27e24b5aab98d0505e24ee2b187d3f5') + version('0.5.2', sha256='a28cc74d3b46e2ba60311b9229375599b513151e39a7d8df6fe1fb797fc1be3a') + version('0.5.1', sha256='e079d1803e4fdbaf8bed5b414f6045c78273082eec7ac0d4802029175f2a1448') + version('0.4.2', sha256='ceaf50e2a9fce48910b244d33c6824e55aef688ad5bc181f4b9504242c2447ff') + version('0.3.3', sha256='476471c6fcd4b39fc79a5dd6ad343a2428cb69b4d528557abb6a0b7bf8186e34') + version('0.2.6', sha256='8678afc0e69bc198edcb8fe0066e46a87373221232ebabde2d78c237f31d3c3d') + version('0.2.1', sha256='94a6ddd25088b66b7b9e57b9d0ea138c984967e91b21395401642027bf279438') diff --git a/var/spack/repos/builtin/packages/cray-libsci/package.py b/var/spack/repos/builtin/packages/cray-libsci/package.py index c0313a1e39e..d391f471c3d 100755 --- a/var/spack/repos/builtin/packages/cray-libsci/package.py +++ b/var/spack/repos/builtin/packages/cray-libsci/package.py @@ -2,10 +2,9 @@ # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) - -from llnl.util.filesystem import LibraryList -from spack import * -import os +from spack.concretize import NoBuildError +from spack.util.module_cmd import module +from spack.util.module_cmd import get_path_args_from_module_line class CrayLibsci(Package): @@ -22,14 +21,53 @@ class CrayLibsci(Package): version("16.06.1") version("16.03.1") + variant("shared", default=True, description="enable shared libs") + variant("openmp", default=False, description="link with openmp") + variant("mpi", default=False, description="link with mpi libs") + provides("blas") provides("lapack") provides("scalapack") - # NOTE: Cray compiler wrappers already include linking for the following + canonical_names = { + 'gcc': 'GNU', + 'cce': 'CRAY', + 'intel': 'INTEL', + } + + @property + def modname(self): + return "cray-libsci/{0}".format(self.version) + + @property + def external_prefix(self): + libsci_module = module("show", self.modname).splitlines() + + for line in libsci_module: + if "CRAY_LIBSCI_PREFIX_DIR" in line: + return get_path_args_from_module_line(line)[0] + @property def blas_libs(self): - return LibraryList(os.path.join(self.prefix.lib, 'libsci.so')) + shared = True if "+shared" in self.spec else False + compiler = self.spec.compiler.name + + if "+openmp" in self.spec and "+mpi" in self.spec: + lib = "libsci_{0}_mpi_mp" + elif "+openmp" in self.spec: + lib = "libsci_{0}_mp" + elif "+mpi" in self.spec: + lib = "libsci_{0}_mpi" + else: + lib = "libsci_{0}" + + libname = lib.format(self.canonical_names[compiler].lower()) + + return find_libraries( + libname, + root=self.prefix.lib, + shared=shared, + recursive=False) @property def lapack_libs(self): diff --git a/var/spack/repos/builtin/packages/cryptopp/package.py b/var/spack/repos/builtin/packages/cryptopp/package.py index a96e358d2a8..fa586eca4b0 100644 --- a/var/spack/repos/builtin/packages/cryptopp/package.py +++ b/var/spack/repos/builtin/packages/cryptopp/package.py @@ -25,11 +25,30 @@ class Cryptopp(MakefilePackage): version('5.6.2', sha256='5cbfd2fcb4a6b3aab35902e2e0f3b59d9171fee12b3fc2b363e1801dfec53574') version('5.6.1', sha256='98e74d8cb17a38033354519ac8ba9c5d98a6dc00bf5d1ec3c533c2e8ec86f268') + variant('shared', default=True, description="Build shared object versions of libraries.") + depends_on('gmake', type='build') def url_for_version(self, version): url = '{0}/{1}{2}.zip' return url.format(self.homepage, self.name, version.joined) + def build(self, spec, prefix): + cxx_flags = [] + + if '+shared' in spec: + cxx_flags.append(self.compiler.cxx_pic_flag) + + target = self.spec.target + if 'sse4.1' not in target: + cxx_flags.append('-DCRYPTOPP_DISABLE_SSE4') + if 'ssse3' not in target: + cxx_flags.append('-DCRYPTOPP_DISABLE_SSSE3') + if 'sse2' not in target: + cxx_flags.append('-DCRYPTOPP_DISABLE_SSE2') + + make_target = 'dynamic' if '+shared' in spec else 'static' + make(make_target, 'CXXFLAGS={0}'.format(' '.join(cxx_flags))) + def install(self, spec, prefix): make('install', 'PREFIX={0}'.format(prefix)) diff --git a/var/spack/repos/builtin/packages/cscope/package.py b/var/spack/repos/builtin/packages/cscope/package.py index 2682ffea704..a829b1e1e51 100644 --- a/var/spack/repos/builtin/packages/cscope/package.py +++ b/var/spack/repos/builtin/packages/cscope/package.py @@ -21,7 +21,7 @@ class Cscope(AutotoolsPackage): depends_on('bison', type='build') depends_on('pkgconfig', type='build') - build_targets = ['CURSES_LIBS=-lncursesw -ltinfo'] + build_targets = ['CURSES_LIBS=-lncursesw -ltinfow'] def url_for_version(self, version): url = "https://sourceforge.net/projects/cscope/files/cscope/{0}{1}/cscope-{1}.tar.gz" diff --git a/var/spack/repos/builtin/packages/ctre/package.py b/var/spack/repos/builtin/packages/ctre/package.py new file mode 100644 index 00000000000..2dc188b7756 --- /dev/null +++ b/var/spack/repos/builtin/packages/ctre/package.py @@ -0,0 +1,25 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +from spack import * + + +class Ctre(CMakePackage): + """Compile time regular expressions for C++""" + + homepage = 'https://compile-time.re/' + url = 'https://github.com/hanickadot/compile-time-regular-expressions/archive/v2.8.3.tar.gz' + git = 'https://github.com/hanickadot/compile-time-regular-expressions.git' + + version('master', branch='master') + version('2.8.4', sha256='99b981857f1b66cab5e71161ae74deca268ed39a96ec6507def92d4f445cadd6') + version('2.8.3', sha256='5833a9d0fbce39ee39bd6e29df2f7fcafc82e41c373e8675ed0774bcf76fdc7a') + version('2.8.2', sha256='f89494f52ec31e5854fff3d2c5825474201476636c5d82a9365dad5188396314') + version('2.8.1', sha256='a6153629751ba0adc039551d8ff8d7018972ce362d20c0f70135496d4e7721df') + version('2.8', sha256='44ccdaa299dd43c351f208c5906422eb000e7cdcb53e4f3b7c7c094d0461ab2c') + version('2.7', sha256='ccbf42515b27d542cd36104eb9548f288b0c1989cb584a518900ba1ca3619e12') + version('2.6.4', sha256='ce216cfae0e7e1e8c7d7531cfcf81fa18f9bdbfcb800a3119788ca323bedbdac') + version('2.6.3', sha256='bdf668b02f0b986dfc0fbc6066f446e2d0a9faa3347f00f53b19131297c84c4a') + version('2.6.2', sha256='e82c87aeb0fc3f21ae8a2d3ffce2b1ef970fbea9c3e846ef1a6e5f81790f2946') + version('2.6.1', sha256='58c623d9ea1cb7890aaa63c1a87f1a60a8acf31dbd4061ab672bea287ed689ac') diff --git a/var/spack/repos/builtin/packages/cube/package.py b/var/spack/repos/builtin/packages/cube/package.py index d3bdd366173..d2eeb203c00 100644 --- a/var/spack/repos/builtin/packages/cube/package.py +++ b/var/spack/repos/builtin/packages/cube/package.py @@ -17,6 +17,7 @@ class Cube(AutotoolsPackage): homepage = "http://www.scalasca.org/software/cube-4.x/download.html" url = "http://apps.fz-juelich.de/scalasca/releases/cube/4.4/dist/cubegui-4.4.2.tar.gz" + version('4.5', sha256='ffe84108adce0adf06dca80820d941b1a60a5580a8bacc8f7c1b6989c8ab1bfa') version('4.4.4', sha256='9b7b96d5a64b558a9017cc3599bba93a42095534e018e3de9b1f80ab6d04cc34') version('4.4.3', sha256='bf4b0f2ff68507ff82ba24eb4895aed961710dae16d783c222a12f152440cf36') version('4.4.2', sha256='29b6479616a524f8325f5031a883963bf965fb92569de33271a020f08650ec7b') diff --git a/var/spack/repos/builtin/packages/cubelib/package.py b/var/spack/repos/builtin/packages/cubelib/package.py index 42b756648d8..7d6181c7c12 100644 --- a/var/spack/repos/builtin/packages/cubelib/package.py +++ b/var/spack/repos/builtin/packages/cubelib/package.py @@ -12,6 +12,7 @@ class Cubelib(AutotoolsPackage): homepage = "http://www.scalasca.org/software/cube-4.x/download.html" url = "http://apps.fz-juelich.de/scalasca/releases/cube/4.4/dist/cubelib-4.4.tar.gz" + version('4.5', sha256='98f66837b4a834b1aacbcd4480a242d7a8c4a1b8dd44e02e836b8c7a4f0ffd98') version('4.4.4', sha256='adb8216ee3b7701383884417374e7ff946edb30e56640307c65465187dca7512') version('4.4.3', sha256='bcd4fa81a5ba37194e590a5d7c3e6c44b448f5e156a175837b77c21206847a8d') version('4.4.2', sha256='843335c7d238493f1b4cb8e07555ccfe99a3fa521bf162e9d8eaa6733aa1f949') diff --git a/var/spack/repos/builtin/packages/cubew/package.py b/var/spack/repos/builtin/packages/cubew/package.py index d761f102f3b..c21310601b9 100644 --- a/var/spack/repos/builtin/packages/cubew/package.py +++ b/var/spack/repos/builtin/packages/cubew/package.py @@ -12,6 +12,7 @@ class Cubew(AutotoolsPackage): homepage = "http://www.scalasca.org/software/cube-4.x/download.html" url = "http://apps.fz-juelich.de/scalasca/releases/cube/4.4/dist/cubew-4.4.tar.gz" + version('4.5', sha256='16bd8fd864197a74ca65f7325761ad75d73d555072326e95e1338cff39f28a5c') version('4.4.3', sha256='93fff6cc1e8b0780f0171ef5302a2e1a257f99b6383fbfc1b9b82f925ceff501') version('4.4.2', sha256='31a71e9a05e6523de2b86b4026821bbb75fb411eb5b18ae38b27c1f44158014a') version('4.4.1', sha256='c09e3f5a3533ebedee2cc7dfaacd7bac4680c14c3fa540669466583a23f04b67') diff --git a/var/spack/repos/builtin/packages/cuda/package.py b/var/spack/repos/builtin/packages/cuda/package.py index 13675fb7744..ce1fdf69b9b 100644 --- a/var/spack/repos/builtin/packages/cuda/package.py +++ b/var/spack/repos/builtin/packages/cuda/package.py @@ -8,6 +8,7 @@ from llnl.util.filesystem import LibraryList import os import platform +import llnl.util.tty as tty # FIXME Remove hack for polymorphic versions # This package uses a ugly hack to be able to dispatch, given the same @@ -85,6 +86,14 @@ def setup_run_environment(self, env): env.set('CUDA_HOME', self.prefix) def install(self, spec, prefix): + if os.path.exists('/tmp/cuda-installer.log'): + try: + os.remove('/tmp/cuda-installer.log') + except OSError: + if spec.satisfies('@10.1:'): + tty.die("The cuda installer will segfault due to the " + "presence of /tmp/cuda-installer.log " + "please remove the file and try again ") runfile = glob(join_path(self.stage.source_path, 'cuda*_linux*'))[0] chmod = which('chmod') chmod('+x', runfile) @@ -110,6 +119,10 @@ def install(self, spec, prefix): arguments.append('--toolkitpath=%s' % prefix) # Where to install runfile(*arguments) + try: + os.remove('/tmp/cuda-installer.log') + except OSError: + pass @property def libs(self): diff --git a/var/spack/repos/builtin/packages/cudnn/package.py b/var/spack/repos/builtin/packages/cudnn/package.py index 4491af18a88..f0e8904e346 100644 --- a/var/spack/repos/builtin/packages/cudnn/package.py +++ b/var/spack/repos/builtin/packages/cudnn/package.py @@ -22,6 +22,16 @@ class Cudnn(Package): maintainers = ['adamjstewart'] + # cuDNN 8.0 + version('8.0.0.180-11.0-linux-x64', + sha256='9e75ea70280a77de815e0bdc85d08b67e081bc99a708b574092142344d2ba07e') + version('8.0.0.180-11.0-linux-ppc64le', + sha256='1229e94731bbca63ee7f5a239f4e1838a51a301d896f3097fbf7377d74704060') + version('8.0.0.180-10.2-linux-x64', + sha256='0c87c12358ee2b99d57c2a8c7560e3bb93e54bb929f5f8bec4964a72a2bb261d') + version('8.0.0.180-10.2-linux-ppc64le', + sha256='59e4ad6db15fcc374976e8052fe39e3f30f34079710fb3c7751a64c853d9243f') + # cuDNN 7.6.5 version('7.6.5.32-10.2-linux-x64', sha256='600267f2caaed2fd58eb214ba669d8ea35f396a7d19b94822e6b36f9f7088c20', @@ -190,6 +200,11 @@ def url_for_version(self, version): return url.format(directory, cuda, ver) + def setup_run_environment(self, env): + if 'target=ppc64le: platform=linux' in self.spec: + env.set('cuDNN_ROOT', os.path.join( + self.prefix, 'targets', 'ppc64le-linux')) + def install(self, spec, prefix): install_tree('.', prefix) diff --git a/var/spack/repos/builtin/packages/cyrus-sasl/package.py b/var/spack/repos/builtin/packages/cyrus-sasl/package.py new file mode 100644 index 00000000000..98848ccff98 --- /dev/null +++ b/var/spack/repos/builtin/packages/cyrus-sasl/package.py @@ -0,0 +1,26 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class CyrusSasl(AutotoolsPackage): + """This is the Cyrus SASL API implementation. It can be used on the + client or server side to provide authentication and authorization + services.""" + + homepage = "https://github.com/cyrusimap/cyrus-sasl" + url = "https://github.com/cyrusimap/cyrus-sasl/archive/cyrus-sasl-2.1.27.tar.gz" + + version('2.1.27', sha256='b564d773803dc4cff42d2bdc04c80f2b105897a724c247817d4e4a99dd6b9976') + version('2.1.26', sha256='7c14d1b5bd1434adf2dd79f70538617e6aa2a7bde447454b90b84ac5c4d034ba') + version('2.1.25', sha256='8bfd4fa4def54c760e5061f2a74c278384c3b9807f02c4b07dab68b5894cc7c1') + version('2.1.24', sha256='1df15c492f7ecb90be49531a347b3df21b041c2e0325dcc4fc5a6e98384c40dd') + version('2.1.23', sha256='b1ec43f62d68446a6a5879925c63d94e26089c5a46cd83e061dd685d014c7d1f') + + depends_on('m4', type='build') + depends_on('autoconf', type='build') + depends_on('automake', type='build') + depends_on('libtool', type='build') diff --git a/var/spack/repos/builtin/packages/darshan-runtime/package.py b/var/spack/repos/builtin/packages/darshan-runtime/package.py index 3789612f129..c58bc9edd5f 100644 --- a/var/spack/repos/builtin/packages/darshan-runtime/package.py +++ b/var/spack/repos/builtin/packages/darshan-runtime/package.py @@ -21,6 +21,8 @@ class DarshanRuntime(Package): maintainers = ['shanedsnyder', 'carns'] version('develop', branch='master') + version('3.2.1', sha256='d63048b7a3d1c4de939875943e3e7a2468a9034fcb68585edbc87f57f622e7f7') + version('3.2.0', sha256='4035435bdc0fa2a678247fbf8d5a31dfeb3a133baf06577786b1fe8d00a31b7e') version('3.1.8', sha256='3ed51c8d5d93b4a8cbb7d53d13052140a9dffe0bc1a3e1ebfc44a36a184b5c82') version('3.1.7', sha256='9ba535df292727ac1e8025bdf2dc42942715205cad8319d925723fd88709e8d6') version('3.1.6', sha256='21cb24e2a971c45e04476e00441b7fbea63d2afa727a5cf8b7a4a9d9004dd856') diff --git a/var/spack/repos/builtin/packages/darshan-util/package.py b/var/spack/repos/builtin/packages/darshan-util/package.py index 8d25fa99650..af5502a7fbf 100644 --- a/var/spack/repos/builtin/packages/darshan-util/package.py +++ b/var/spack/repos/builtin/packages/darshan-util/package.py @@ -19,6 +19,8 @@ class DarshanUtil(Package): maintainers = ['shanedsnyder', 'carns'] version('develop', branch='master') + version('3.2.1', sha256='d63048b7a3d1c4de939875943e3e7a2468a9034fcb68585edbc87f57f622e7f7') + version('3.2.0', sha256='4035435bdc0fa2a678247fbf8d5a31dfeb3a133baf06577786b1fe8d00a31b7e') version('3.1.8', sha256='3ed51c8d5d93b4a8cbb7d53d13052140a9dffe0bc1a3e1ebfc44a36a184b5c82') version('3.1.7', sha256='9ba535df292727ac1e8025bdf2dc42942715205cad8319d925723fd88709e8d6') version('3.1.6', sha256='21cb24e2a971c45e04476e00441b7fbea63d2afa727a5cf8b7a4a9d9004dd856') diff --git a/var/spack/repos/builtin/packages/dateutils/package.py b/var/spack/repos/builtin/packages/dateutils/package.py index 39e616876b7..89277319424 100644 --- a/var/spack/repos/builtin/packages/dateutils/package.py +++ b/var/spack/repos/builtin/packages/dateutils/package.py @@ -13,6 +13,7 @@ class Dateutils(AutotoolsPackage): homepage = "http://www.fresse.org/dateutils/" url = "https://github.com/hroptatyr/dateutils/releases/download/v0.4.6/dateutils-0.4.6.tar.xz" + version('0.4.7', sha256='49725457f5bef45ea424baade8999a6e54496e357f64280474ff7134a54f599a') version('0.4.6', sha256='26a071317ae5710f226a3e6ba9a54d3764cd9efe3965aecc18e75372088757cd') version('0.4.5', sha256='16d6a0fe7b7d49ddbb303f33538dd7304a0d4af5a0369bcbf275db6a5060cbde') diff --git a/var/spack/repos/builtin/packages/dd4hep/package.py b/var/spack/repos/builtin/packages/dd4hep/package.py index 6fd63f76c68..76de6090c5c 100644 --- a/var/spack/repos/builtin/packages/dd4hep/package.py +++ b/var/spack/repos/builtin/packages/dd4hep/package.py @@ -16,37 +16,67 @@ class Dd4hep(CMakePackage): It distributed under the LGPLv3 License.""" homepage = "https://dd4hep.web.cern.ch/dd4hep/" + url = "https://github.com/AIDASoft/DD4hep/archive/v01-12-01.tar.gz" git = "https://github.com/AIDASoft/DD4hep.git" + maintainers = ['vvolkl', 'drbenmorgan'] + version('master', branch='master') - version('1.11.0', commit='280c7d748d56a704699408ac8e57815d029b169a') - version('1.10.0', commit='9835d1813c07d9d5850d1e68276c0171d1726801') + version('1.12.1', sha256='85e8c775ec03c499ce10911e228342e757c81ce9ef2a9195cb253b85175a2e93') + version('1.12.0', sha256='133a1fb8ce0466d2482f3ebb03e60b3bebb9b2d3e33d14ba15c8fbb91706b398') + version('1.11.2', sha256='96a53dd26cb8df11c6dae54669fbc9cc3c90dd47c67e07b24be9a1341c95abc4') + version('1.11.1', sha256='d7902dd7f6744bbda92f6e303ad5a3410eec4a0d2195cdc86f6c1167e72893f0') + version('1.11.0', sha256='25643296f15f9d11ad4ad550b7c3b92e8974fc56f1ee8e4455501010789ae7b6') + version('1.10.0', sha256='1d6b5d1c368dc8bcedd9c61b7c7e1a44bad427f8bd34932516aff47c88a31d95') # Workarounds for various TBB issues in DD4hep v1.11 # See https://github.com/AIDASoft/DD4hep/pull/613 . patch('tbb-workarounds.patch', when='@1.11.0') + patch('tbb2.patch', when='@1.12.1') variant('xercesc', default=False, description="Enable 'Detector Builders' based on XercesC") variant('geant4', default=False, description="Enable the simulation part based on Geant4") - variant('testing', default=False, description="Enable and build tests") + variant('assimp', default=False, description="Enable CAD interface based on Assimp") depends_on('cmake @3.12:', type='build') depends_on('boost @1.49:') depends_on('root @6.08: +gdml +math +opengl +python +x') - depends_on('python') + extends('python') depends_on('xerces-c', when='+xercesc') depends_on('geant4@10.2.2:', when='+geant4') + depends_on('assimp', when='+assimp') def cmake_args(self): spec = self.spec cxxstd = spec['root'].variants['cxxstd'].value + # root can be built with cxxstd=11, but dd4hep requires 14 + if cxxstd == "11": + cxxstd = "14" args = [ "-DCMAKE_CXX_STANDARD={0}".format(cxxstd), "-DDD4HEP_USE_XERCESC={0}".format(spec.satisfies('+xercesc')), "-DDD4HEP_USE_GEANT4={0}".format(spec.satisfies('+geant4')), - "-DBUILD_TESTING={0}".format(spec.satisfies('+testing')), + "-DDD4HEP_LOAD_ASSIMP={0}".format(spec.satisfies('+assimp')), + "-DBUILD_TESTING={0}".format(self.run_tests), "-DBOOST_ROOT={0}".format(spec['boost'].prefix), "-DBoost_NO_BOOST_CMAKE=ON", "-DPYTHON_EXECUTABLE={0}".format(spec['python'].command.path), ] return args + + def setup_run_environment(self, env): + # used p.ex. in ddsim to find DDDetectors dir + env.set("DD4hepINSTALL", self.prefix) + + def url_for_version(self, version): + # dd4hep releases are dashes and padded with a leading zero + # the patch version is omitted when 0 + # so for example v01-12-01, v01-12 ... + major = (str(version[0]).zfill(2)) + minor = (str(version[1]).zfill(2)) + patch = (str(version[2]).zfill(2)) + if version[2] == 0: + url = "https://github.com/AIDASoft/DD4hep/archive/v%s-%s.tar.gz" % (major, minor) + else: + url = "https://github.com/AIDASoft/DD4hep/archive/v%s-%s-%s.tar.gz" % (major, minor, patch) + return url diff --git a/var/spack/repos/builtin/packages/dd4hep/tbb2.patch b/var/spack/repos/builtin/packages/dd4hep/tbb2.patch new file mode 100644 index 00000000000..932458001c2 --- /dev/null +++ b/var/spack/repos/builtin/packages/dd4hep/tbb2.patch @@ -0,0 +1,14 @@ +diff --git a/DDDigi/src/DigiKernel.cpp b/DDDigi/src/DigiKernel.cpp +index f2c2e86c..f168ef9b 100644 +--- a/DDDigi/src/DigiKernel.cpp ++++ b/DDDigi/src/DigiKernel.cpp +@@ -273,7 +273,7 @@ void DigiKernel::submit(const DigiAction::Actors& actions, Digi + if ( parallel ) { + tbb::task_group que; + for ( auto* i : actions ) +- que.run(Wrapper(context, *i)); ++ que.run(Wrapper(context, i)); + que.wait(); + goto print_stamp; + } + diff --git a/var/spack/repos/builtin/packages/dealii/package.py b/var/spack/repos/builtin/packages/dealii/package.py index d3a58cb835f..0f46c318f6d 100644 --- a/var/spack/repos/builtin/packages/dealii/package.py +++ b/var/spack/repos/builtin/packages/dealii/package.py @@ -16,13 +16,14 @@ class Dealii(CMakePackage, CudaPackage): url = "https://github.com/dealii/dealii/releases/download/v8.4.1/dealii-8.4.1.tar.gz" git = "https://github.com/dealii/dealii.git" - maintainers = ['davydden', 'jppelteret'] + maintainers = ['davydden', 'jppelteret', 'luca-heltai'] # Don't add RPATHs to this package for the full build DAG. # only add for immediate deps. transitive_rpaths = False - version('develop', branch='master') + version('master', branch='master') + version('9.2.0', sha256='d05a82fb40f1f1e24407451814b5a6004e39366a44c81208b1ae9d65f3efa43a') version('9.1.1', sha256='fc5b483f7fe58dfeb52d05054011280f115498e337af3e085bf272fd1fd81276') version('9.1.0', sha256='5b070112403f8afbb72345c1bb24d2a38d11ce58891217e353aab97957a04600') version('9.0.1', sha256='df2f0d666f2224be07e3741c0e8e02132fd67ea4579cd16a2429f7416146ee64') @@ -139,7 +140,8 @@ class Dealii(CMakePackage, CudaPackage): depends_on('metis@5:+int64', when='+metis+int64') depends_on('metis@5:~int64', when='+metis~int64') depends_on('muparser', when='+muparser') - depends_on('nanoflann', when='@9.0:+nanoflann') + # Nanoflann support has been removed after 9.2.0 + depends_on('nanoflann', when='@9.0:9.2+nanoflann') depends_on('netcdf-c+mpi', when='+netcdf+mpi') depends_on('netcdf-cxx', when='+netcdf+mpi') depends_on('oce', when='+oce') @@ -210,6 +212,10 @@ class Dealii(CMakePackage, CudaPackage): 'onwards. Please explicitly disable this variant ' 'via ~{0}'.format(p)) + conflicts('+nanoflann', when='@9.3.0:', + msg='The interface to nanoflann was removed from version 9.3.0. ' + 'Please explicitly disable this variant via ~nanoflann') + conflicts('+slepc', when='~petsc', msg='It is not possible to enable slepc interfaces ' 'without petsc.') diff --git a/var/spack/repos/builtin/packages/delphes/package.py b/var/spack/repos/builtin/packages/delphes/package.py new file mode 100644 index 00000000000..b3b511a00fa --- /dev/null +++ b/var/spack/repos/builtin/packages/delphes/package.py @@ -0,0 +1,64 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Delphes(CMakePackage): + """A high energy physics framework for fast simulation + of a generic collider experiment. + """ + + homepage = "https://cp3.irmp.ucl.ac.be/projects/delphes" + git = "https://github.com/delphes/delphes.git" + url = "http://cp3.irmp.ucl.ac.be/downloads/Delphes-3.4.2.tar.gz" + + maintainers = ['drbenmorgan', 'vvolkl', 'selvaggi'] + + version('master', branch='master') + version('3.4.2', sha256='d46a7c5474de650befdb89377115feee31f1743107ceb3d8da699be9d48c097b') + version('3.4.1', sha256='4b5a2aeac326643f45b6d45c39ba2302e323eeb86d8cb58843c6e73949b1208a') + version('3.4.0', sha256='c0f9500663a0c3a5c1eddcee598a67b5bcfc9318303195c6cacc0590b4023fa1') + version('3.3.3', sha256='404de818a6b7852b01187ccf598d8ac19d308b9361f128751ef003cde248ff00') + version('3.3.2', sha256='b8dc066e480678bb50ea0b68d157c391d47f66c084bda602d3d498538e682622') + version('3.3.1', sha256='d8fcaa9711b5892ba24b2c7be38158dedbe552b159961f9d29887b2cc7eb2e83') + version('3.3.0', sha256='3fcdcd31827227ff3d0d56df908b12289c67aa6d01c5725a2a9441c200f3966f') + version('3.2.0', sha256='3510b0852c750120425f9b014cada25d48b90b29c512b974a9ffbd7aa80ccde4') + version('3.1.2', sha256='edfccc47f7666d3607e86db82c6c79cfb10716423b496f0c0bdd4060b717ea1d') + version('3.1.1', sha256='c4128575b6103239ca13de392f47da2eaedfd93c3789b1ecb32eea09da3408e4') + version('3.1.0', sha256='c37b07aea3e57b39d34bf07f8afd209e36b278cf3792cd6e970d96a2c3b114eb') + version('3.0.12', sha256='55b4cf25f681c75457e33ad4ee615b9ab66317216675ca5f466ab256aa85cd16') + version('3.0.11', sha256='870921c8070762dc56aa0b8e0e07d1756584399e8740c848d330fc0fcb5e0604') + version('3.0.10', sha256='872a386baf298cab14e42aac198dbf7184a2ab7c28ee712448060e1dec078d34') + version('3.0.9', sha256='d12592fe66062a51e513a8d070fe1f49b672a4328bad2aa5cdb682937391a639') + version('3.0.8', sha256='8ab146ca3c163932ab21df9168d8ca86dbb1c3494b7bdc3e143743d769803c23') + version('3.0.7', sha256='7f43c84bca38fb8a41d7840dd2d7fab52456182babaa1e528791d0f4e517aba8') + version('3.0.6', sha256='9e225731d57d2a76d35886841f8eff121bb3a45560b16077bd8c351151581d88') + version('3.0.5', sha256='ab64ec6d2476fbfa40562e7edb510a8ab4c4fe5be77a4353ebf315c2af181a80') + + depends_on('cmake', type='build') + depends_on('root cxxstd=14', when='cxxstd=14') + depends_on('root cxxstd=17', when='cxxstd=17') + + variant('build_type', default='Release', + description='The build type to build', + values=('Debug', 'Release')) + + variant('cxxstd', + default='17', + values=('14', '17'), + multi=False, + description='Use the specified C++ standard when building.') + + def cmake_args(self): + args = [] + # C++ Standard + args.append('-DCMAKE_CXX_STANDARD=%s' + % self.spec.variants['cxxstd'].value) + return args + + def setup_run_environment(self, env): + # make the cards distributed with delphes more easily accessible + env.set('DELPHESCARDS', self.prefix.cards) diff --git a/var/spack/repos/builtin/packages/dnsmasq/package.py b/var/spack/repos/builtin/packages/dnsmasq/package.py new file mode 100644 index 00000000000..8f0ff1bcf89 --- /dev/null +++ b/var/spack/repos/builtin/packages/dnsmasq/package.py @@ -0,0 +1,30 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Dnsmasq(MakefilePackage): + """A lightweight, caching DNS proxy with integrated DHCP server.""" + + homepage = "http://www.thekelleys.org.uk/dnsmasq/doc.html" + url = "http://www.thekelleys.org.uk/dnsmasq/dnsmasq-2.70.tar.gz" + + version('2.81', sha256='3c28c68c6c2967c3a96e9b432c0c046a5df17a426d3a43cffe9e693cf05804d0') + version('2.80', sha256='9e4a58f816ce0033ce383c549b7d4058ad9b823968d352d2b76614f83ea39adc') + version('2.79', sha256='77512dd6f31ffd96718e8dcbbf54f02c083f051d4cca709bd32540aea269f789') + version('2.78', sha256='c92e5d78aa6353354d02aabf74590d08980bb1385d8a00b80ef9bc80430aa1dc') + version('2.77', sha256='ae97a68c4e64f07633f31249eb03190d673bdb444a05796a3a2d3f521bfe9d38') + version('2.76', sha256='777c4762d2fee3738a0380401f2d087b47faa41db2317c60660d69ad10a76c32') + version('2.75', sha256='f8252c0a0ba162c2cd45f81140c7c17cc40a5fca2b869d1a420835b74acad294') + version('2.74', sha256='27b95a8b933d7eb88e93a4c405b808d09268246d4e108606e423ac518aede78f') + version('2.73', sha256='9f350f74ae2c7990b1c7c6c8591d274c37b674aa987f54dfee7ca856fae0d02d') + version('2.72', sha256='635f1b47417d17cf32e45cfcfd0213ac39fd09918479a25373ba9b2ce4adc05d') + version('2.71', sha256='7d8c64f66a396442e01b639df3ea6b4e02ba88cbe206c80be8de68b6841634c4') + version('2.70', sha256='8eb7bf53688d6aaede5c90cfd2afcce04803a4efbddfbeecc6297180749e98af') + + def install(self, spec, prefix): + mkdirp(prefix.bin) + install('./src/dnsmasq', prefix.bin) diff --git a/var/spack/repos/builtin/packages/dock/package.py b/var/spack/repos/builtin/packages/dock/package.py new file mode 100644 index 00000000000..2493be97f1c --- /dev/null +++ b/var/spack/repos/builtin/packages/dock/package.py @@ -0,0 +1,59 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * +import os + + +class Dock(Package): + """DOCK is a molecular docking program used in drug discovery. + + This program, given a protein binding site and a small molecule, tries + to predict the correct binding mode of the small molecule in the binding + site, and the associated binding energy.""" + + homepage = "http://dock.compbio.ucsf.edu/DOCK_6/index.htm" + url = "file://{0}/dock.6.9_source.tar.gz".format(os.getcwd()) + + version('6.9', sha256='c2caef9b4bb47bb0cb437f6dc21f4c605fd3d0d9cc817fa13748c050dc87a5a8') + + variant('mpi', default=True, description='Enable mpi') + + depends_on('bison', type='build') + depends_on('mpi', when='+mpi') + + def setup_build_environment(self, env): + if '+mpi' in self.spec: + env.set('MPICH_HOME', self.spec['mpi'].prefix) + + def install(self, spec, prefix): + compiler_targets = { + 'gcc': 'gnu', + 'intel': 'intel', + 'pgi': 'pgi', + 'sgi': 'sgi', + } + + if self.compiler.name not in compiler_targets: + template = 'Unsupported compiler {0}! Supported compilers: {1}' + err = template.format(self.compiler.name, + ', '.join(list(compiler_targets.keys()))) + + raise InstallError(err) + + if self.compiler.name == 'pgi' and '+mpi' in spec: + raise InstallError('Parallel output is not supported with pgi.') + + with working_dir('install'): + sh_args = ['./configure', compiler_targets[self.compiler.name]] + + if '+mpi' in spec: + sh_args.append('parallel') + + which('sh')(*sh_args) + which('make')('YACC=bison -o y.tab.c') + + mkdirp(prefix.bin) + install_tree('bin', prefix.bin) diff --git a/var/spack/repos/builtin/packages/dos2unix/package.py b/var/spack/repos/builtin/packages/dos2unix/package.py index 926bc5f65f5..8630b8369aa 100644 --- a/var/spack/repos/builtin/packages/dos2unix/package.py +++ b/var/spack/repos/builtin/packages/dos2unix/package.py @@ -14,5 +14,7 @@ class Dos2unix(MakefilePackage): version('7.3.4', sha256='8ccda7bbc5a2f903dafd95900abb5bf5e77a769b572ef25150fde4056c5f30c5') + depends_on('gettext', type='build') + def install(self, spec, prefix): make('prefix={0}'.format(prefix), 'install') diff --git a/var/spack/repos/builtin/packages/dramsim2/package.py b/var/spack/repos/builtin/packages/dramsim2/package.py new file mode 100644 index 00000000000..df2d42af437 --- /dev/null +++ b/var/spack/repos/builtin/packages/dramsim2/package.py @@ -0,0 +1,30 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Dramsim2(MakefilePackage): + """ + DRAMsim is a hardware-validated, cycle-accurate + C based simulator for DRAM devices such as DDR3 + """ + + homepage = "https://github.com/umd-memsys/DRAMSim2" + git = "https://github.com/umd-memsys/DRAMSim2" + url = "https://github.com/dramninjasUMD/DRAMSim2/archive/v2.2.2.tar.gz" + + maintainers = ['jjwilke'] + + version('2.2.2', sha256="96d0257eafb41e38ffa4f13e3ef3759567bdde7fa3329403f324abd0ddf8d015") + + def build(self, spec, prefix): + if spec.satisfies("platform=darwin"): + make("libdramsim.dylib") + else: + make("libdramsim.so") + + def install(self, spec, prefix): + install_tree(".", prefix) diff --git a/var/spack/repos/builtin/packages/ecflow/package.py b/var/spack/repos/builtin/packages/ecflow/package.py index 467076382f2..14b3f2033df 100644 --- a/var/spack/repos/builtin/packages/ecflow/package.py +++ b/var/spack/repos/builtin/packages/ecflow/package.py @@ -18,22 +18,29 @@ class Ecflow(CMakePackage): homepage = 'https://confluence.ecmwf.int/display/ECFLOW/' url = 'https://confluence.ecmwf.int/download/attachments/8650755/ecFlow-4.11.1-Source.tar.gz' + version('4.13.0', sha256='c743896e0ec1d705edd2abf2ee5a47f4b6f7b1818d8c159b521bdff50a403e39') + version('4.12.0', sha256='566b797e8d78e3eb93946b923ef540ac61f50d4a17c9203d263c4fd5c39ab1d1') version('4.11.1', sha256='b3bcc1255939f87b9ba18d802940e08c0cf6379ca6aeec1fef7bd169b0085d6c') variant('static_boost', default=False, description='Use also static boost libraries when compiling') - depends_on('boost+python') - depends_on('boost+pic', when='+static_boost') - depends_on('qt') - depends_on('cmake@2.8.11:', type='build') + variant('ui', default=False, description='Enable ecflow_ui') + + # Boost-1.7X release not working well on serialization + depends_on('boost@1.53:1.69+python') + depends_on('boost@1.53:1.69+pic', when='+static_boost') + depends_on('qt@5:', when='+ui') + depends_on('cmake@2.12.11:', type='build') def cmake_args(self): boost_lib = self.spec['boost'].prefix.lib args = ['-DBoost_PYTHON_LIBRARY_RELEASE=' + boost_lib] + ecflow_ui = 'ON' if '+ui' in self.spec else 'OFF' # https://jira.ecmwf.int/browse/SUP-2641#comment-208943 use_static_boost = 'ON' if '+static_boost' in self.spec else 'OFF' args.append('-DENABLE_STATIC_BOOST_LIBS=' + use_static_boost) + args.extend(['-DENABLE_UI=' + ecflow_ui, '-DENABLE_GUI=' + ecflow_ui]) return args diff --git a/var/spack/repos/builtin/packages/elfutils/package.py b/var/spack/repos/builtin/packages/elfutils/package.py index e32c68c60d9..651caf3d76d 100644 --- a/var/spack/repos/builtin/packages/elfutils/package.py +++ b/var/spack/repos/builtin/packages/elfutils/package.py @@ -17,10 +17,11 @@ class Elfutils(AutotoolsPackage, SourcewarePackage): version of elfutils.""" homepage = "https://fedorahosted.org/elfutils/" - sourceware_mirror_path = "elfutils/0.178/elfutils-0.178.tar.bz2" + sourceware_mirror_path = "elfutils/0.179/elfutils-0.179.tar.bz2" list_url = "https://sourceware.org/elfutils/ftp" list_depth = 1 + version('0.179', sha256='25a545566cbacaa37ae6222e58f1c48ea4570f53ba991886e2f5ce96e22a23a2') version('0.178', sha256='31e7a00e96d4e9c4bda452e1f2cdac4daf8abd24f5e154dee232131899f3a0f2') version('0.177', sha256='fa489deccbcae7d8c920f60d85906124c1989c591196d90e0fd668e3dc05042e') version('0.176', sha256='eb5747c371b0af0f71e86215a5ebb88728533c3a104a43d4231963f308cd1023') diff --git a/var/spack/repos/builtin/packages/enca/package.py b/var/spack/repos/builtin/packages/enca/package.py new file mode 100644 index 00000000000..9324a22652f --- /dev/null +++ b/var/spack/repos/builtin/packages/enca/package.py @@ -0,0 +1,27 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Enca(AutotoolsPackage): + """Extremely Naive Charset Analyser.""" + + homepage = "https://cihar.com/software/enca/" + url = "https://github.com/nijel/enca/archive/1.19.tar.gz" + + version('1.19', sha256='c4fd9a3d7c086803138842b18eed6072ec8810859b0e1ef091f1e1138d283f25') + version('1.18', sha256='b87c8d1bffc7d06ba74f82ae86eb21a921e94629203b2a971c966064c7eadab2') + version('1.17', sha256='b20372440c500e6463bd61dab0e68131cdfe857c6b7ca139b5c6cbf01e24fdc7') + version('1.16', sha256='14457b185c77b947ca2f8e09a2c3ec66940d97a2ccea28b8e61a6e0f3a0033f6') + + depends_on('m4', type='build') + depends_on('autoconf', type='build') + depends_on('automake', type='build') + depends_on('libtool', type='build') + + def autoreconf(self, spec, prefix): + bash = which('bash') + bash('./autogen.sh') diff --git a/var/spack/repos/builtin/packages/environment-modules/package.py b/var/spack/repos/builtin/packages/environment-modules/package.py index cb6699cf3ad..3a756a0d8c9 100644 --- a/var/spack/repos/builtin/packages/environment-modules/package.py +++ b/var/spack/repos/builtin/packages/environment-modules/package.py @@ -12,10 +12,11 @@ class EnvironmentModules(Package): """ homepage = 'https://cea-hpc.github.io/modules/' - url = 'https://github.com/cea-hpc/modules/releases/download/v4.5.0/modules-4.5.0.tar.gz' + url = 'https://github.com/cea-hpc/modules/releases/download/v4.5.1/modules-4.5.1.tar.gz' maintainers = ['xdelaruelle'] + version('4.5.1', sha256='7d4bcc8559e7fbbc52e526fc86a15b161ff4422aa49eee37897ee7a48eb64ac2') version('4.5.0', sha256='5f46336f612553af5553d99347f387f733de0aaa0d80d4572e67615289382ec8') version('4.4.1', sha256='3c20cfb2ff8a4d74ac6d566e7b5fa9dd220d96d17e6d8a4ae29b1ec0107ee407') version('4.4.0', sha256='4dd55ad6cc684905e891ad1ba9e3c542e79eea0a9cd9a0e99cd77abe6ed63fab') diff --git a/var/spack/repos/builtin/packages/eospac/package.py b/var/spack/repos/builtin/packages/eospac/package.py index f73274ca464..0d9221e7dfc 100644 --- a/var/spack/repos/builtin/packages/eospac/package.py +++ b/var/spack/repos/builtin/packages/eospac/package.py @@ -15,7 +15,7 @@ class Eospac(Package): homepage = "http://laws.lanl.gov/projects/data/eos.html" list_url = "http://laws.lanl.gov/projects/data/eos/eospacReleases.php" - version('6.4.0', sha256='15a953beac735c68431afe86ffe33323d540d0fbbbec03ba79438dd29736051d', + version('6.4.0', sha256='15a953beac735c68431afe86ffe33323d540d0fbbbec03ba79438dd29736051d', preferred=True, url="http://laws.lanl.gov/projects/data/eos/get_file.php?package=eospac&filename=eospac_v6.4.0_612ea8c9b8ffa6d9175d9118955571d9107f1e3c.tgz") version('6.4.0beta.4', sha256='0ebfd8badff575ea77444aa978629dbdca3135a0b5eb373b8daba058773d4635', url="http://laws.lanl.gov/projects/data/eos/get_file.php?package=eospac&filename=eospac_v6.4.0beta.4_aff6429bb6868de31a980278bafa13487c2ce83f.tgz") @@ -25,7 +25,7 @@ class Eospac(Package): url="http://laws.lanl.gov/projects/data/eos/get_file.php?package=eospac&filename=eospac_v6.4.0beta.2_69196eadbc77506561eef711f19d2f03b4ab0ffa.tgz") version('6.4.0beta.1', sha256='14c5c804e5f628f41e8ed80bcee5a80adeb6c6f3d130715421ca99a30c7eb7e2', url="http://laws.lanl.gov/projects/data/eos/get_file.php?package=eospac&filename=eospac_v6.4.0beta.1_r20171213193219.tgz") - version('6.3.1', sha256='aa1112c4251c9c3c2883a7ab2c7f2abff2c339f29dbbf8421ef88b0c9df904f8', preferred=True, + version('6.3.1', sha256='aa1112c4251c9c3c2883a7ab2c7f2abff2c339f29dbbf8421ef88b0c9df904f8', url="http://laws.lanl.gov/projects/data/eos/get_file.php?package=eospac&filename=eospac_v6.3.1_r20161202150449.tgz") # This patch allows the use of spack's compile wrapper 'flang' diff --git a/var/spack/repos/builtin/packages/erlang/package.py b/var/spack/repos/builtin/packages/erlang/package.py index 1101bee787d..f1fc027ae09 100644 --- a/var/spack/repos/builtin/packages/erlang/package.py +++ b/var/spack/repos/builtin/packages/erlang/package.py @@ -16,6 +16,7 @@ class Erlang(AutotoolsPackage): homepage = "https://erlang.org/" url = "https://erlang.org/download/otp_src_22.2.tar.gz" + version('23.0', sha256='42dcf3c721f4de59fe74ae7b65950c2174c46dc8d1dd4e27c0594d86f606a635') version('22.2', sha256='89c2480cdac566065577c82704a48e10f89cf2e6ca5ab99e1cf80027784c678f') version('22.1', sha256='cd33a102cbac6dd1c7b1e7a9a0d82d13587771fac4e96e8fff92e403d15e32c8') version('22.0', sha256='042e168d74055a501c75911694758a30597446accd8c82ec569552b9e9fcd272') diff --git a/var/spack/repos/builtin/packages/esmf/package.py b/var/spack/repos/builtin/packages/esmf/package.py index d63af514ff3..b533851228f 100644 --- a/var/spack/repos/builtin/packages/esmf/package.py +++ b/var/spack/repos/builtin/packages/esmf/package.py @@ -146,6 +146,9 @@ def edit(self, spec, prefix): # Build an optimized version of the library. os.environ['ESMF_BOPT'] = 'O' + if self.spec.satisfies('%gcc@10:'): + os.environ['ESMF_F90COMPILEOPTS'] = '-fallow-argument-mismatch' + ####### # MPI # ####### diff --git a/var/spack/repos/builtin/packages/etcd/package.py b/var/spack/repos/builtin/packages/etcd/package.py new file mode 100644 index 00000000000..fa21e6965b5 --- /dev/null +++ b/var/spack/repos/builtin/packages/etcd/package.py @@ -0,0 +1,35 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * +import platform + + +class Etcd(Package): + """etcd is a distributed reliable key-value store for the most + critical data of a distributed system""" + + homepage = "https://etcd.io/" + url = "https://github.com/etcd-io/etcd/archive/v3.4.7.tar.gz" + + version('3.4.7', sha256='858f5ad8c830a66f6bd0cd19386deea64d374185b32f40650ba979e0a70b8b97') + version('3.4.6', sha256='e9ebd003f5545a05017a8dbdde236d6c9d25f98ee35f8ba237e57b75330664f9') + version('3.4.5', sha256='2888f73dc52ba89da470d9bd40b1348ffe8b3da51cd8fe8bff5a1a8db2e50d46') + version('3.4.4', sha256='46bcd0d034fe9cc6ae86a9f2a72bdc78761ca99bfd5ae4b96b24e4ad93fc627e') + version('3.3.20', sha256='a9fcd2a3343f7f5b99acae956dd7c4fe12f16772b660f16fa9c24368df002477') + + depends_on('go@:1.13.9') + + def setup_run_environment(self, env): + if platform.machine() == 'aarch64': + env.set('ETCD_UNSUPPORTED_ARCH', 'arm64') + + def setup_build_environment(self, env): + if platform.machine() == 'aarch64': + env.set('ETCD_UNSUPPORTED_ARCH', 'arm64') + + def install(self, spec, prefix): + make() + install_tree('bin', prefix.bin) diff --git a/var/spack/repos/builtin/packages/evtgen/g2c.patch b/var/spack/repos/builtin/packages/evtgen/g2c.patch new file mode 100644 index 00000000000..0cce1d54014 --- /dev/null +++ b/var/spack/repos/builtin/packages/evtgen/g2c.patch @@ -0,0 +1,22 @@ +diff --git a/configure b/configure +index 375e3b7..e47deac 100755 +--- a/configure ++++ b/configure +@@ -196,7 +196,7 @@ echo "Platform is $ARCH" + FFLAGS="${FFLAGS_OPT}" + CFLAGS="${CFLAGS_OPT}" + CXXFLAGS="${CXXFLAGS_OPT}" +-FLIBS="-lfrtbegin -lg2c" ++#FLIBS="-lfrtbegin -lg2c" + SOFLAGS="-soname" + + if [ ${COMPMODE} = OPT ]; then +@@ -213,7 +213,7 @@ if [ $ARCH = Linux ]; then + FFLAGS="${FFLAGS_OPT} -Wno-globals" + CFLAGS="${CFLAGS_OPT}" + CXXFLAGS="${CXXFLAGS_OPT}" +- FLIBS="-lfrtbegin -lg2c" ++ #FLIBS="-lfrtbegin -lg2c" + if [ ${COMPMODE} = OPT ]; then + FFLAGS="${FFLAGS_OPT}" + CFLAGS="${CFLAGS_OPT}" diff --git a/var/spack/repos/builtin/packages/evtgen/package.py b/var/spack/repos/builtin/packages/evtgen/package.py new file mode 100644 index 00000000000..514fe026265 --- /dev/null +++ b/var/spack/repos/builtin/packages/evtgen/package.py @@ -0,0 +1,55 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Evtgen(AutotoolsPackage): + """ EvtGen is a Monte Carlo event generator that simulates + the decays of heavy flavour particles, primarily B and D mesons. """ + + homepage = "https://evtgen.hepforge.org/" + url = "http://lcgpackages.web.cern.ch/lcgpackages/tarFiles/sources/MCGeneratorsTarFiles/evtgen-R01-07-00.tar.gz" + + maintainers = ['vvolkl'] + + version('02-00-00', sha256='02372308e1261b8369d10538a3aa65fe60728ab343fcb64b224dac7313deb719') + version('01-07-00', sha256='2648f1e2be5f11568d589d2079f22f589c283a2960390bbdb8d9d7f71bc9c014', preferred=True) + + variant('pythia8', default=True, description='Build with pythia8') + variant('tauola', default=False, description='Build with tauola') + variant('photos', default=False, description='Build with photos') + + patch("g2c.patch") + + depends_on('hepmc@:2.99.99') + depends_on("pythia8", when="+pythia8") + depends_on("tauola", when="+tauola") + depends_on("photos", when="+photos") + + conflicts("^pythia8+evtgen", when="+pythia8", + msg="Building pythia with evtgen bindings and " + "evtgen with pythia bindings results in a circular dependency " + "that cannot be resolved at the moment! " + "Use evtgen+pythia8^pythia8~evtgen.") + + def configure_args(self): + args = [] + + args.append('--hepmcdir=%s' % self.spec["hepmc"].prefix) + if '+pythia8' in self.spec: + args.append('--pythiadir=%s' % self.spec['pythia8'].prefix) + if '+photos' in self.spec: + args.append('--photosdir=%s' % self.spec['photos'].prefix) + if '+tauola' in self.spec: + args.append('--tauoladir=%s' % self.spec['tauola'].prefix) + + return args + + def build(self, spec, prefix): + # avoid parallel compilation errors + # due to libext_shared depending on lib_shared + make('lib_shared') + make('all') diff --git a/var/spack/repos/builtin/packages/exa/package.py b/var/spack/repos/builtin/packages/exa/package.py new file mode 100644 index 00000000000..e6a0fe39a4f --- /dev/null +++ b/var/spack/repos/builtin/packages/exa/package.py @@ -0,0 +1,21 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Exa(Package): + """exa is a replacement for ls written in Rust.""" + + homepage = 'https://the.exa.website' + url = 'https://github.com/ogham/exa/archive/v0.9.0.tar.gz' + + version('0.9.0', sha256='96e743ffac0512a278de9ca3277183536ee8b691a46ff200ec27e28108fef783') + + depends_on('rust') + + def install(self, spec, prefix): + cargo = which('cargo') + cargo('install', '--root', prefix, '--path', '.') diff --git a/var/spack/repos/builtin/packages/examinimd/package.py b/var/spack/repos/builtin/packages/examinimd/package.py index a6250b0dcb4..c8afa1c2232 100644 --- a/var/spack/repos/builtin/packages/examinimd/package.py +++ b/var/spack/repos/builtin/packages/examinimd/package.py @@ -34,14 +34,15 @@ class Examinimd(MakefilePackage): conflicts('+openmp', when='+pthreads') - depends_on('kokkos') + depends_on('kokkos-legacy') depends_on('mpi', when='+mpi') @property def build_targets(self): targets = [] # Append Kokkos - targets.append('KOKKOS_PATH={0}'.format(self.spec['kokkos'].prefix)) + targets.append('KOKKOS_PATH={0}'.format( + self.spec['kokkos-legacy'].prefix)) # Set kokkos device if 'openmp' in self.spec: targets.append('KOKKOS_DEVICES=OpenMP') diff --git a/var/spack/repos/builtin/packages/exampm/package.py b/var/spack/repos/builtin/packages/exampm/package.py index bf1ae7f545e..64732c2eda5 100644 --- a/var/spack/repos/builtin/packages/exampm/package.py +++ b/var/spack/repos/builtin/packages/exampm/package.py @@ -12,6 +12,21 @@ class Exampm(CMakePackage): homepage = "https://github.com/ECP-copa/ExaMPM" git = "https://github.com/ECP-copa/ExaMPM.git" - version('develop', branch='master') + version('master', branch='master') tags = ['proxy-app'] + + variant('shared', default=True, description='Build shared libraries') + + depends_on('mpi') + depends_on('kokkos@3.0:') + depends_on('silo') + depends_on('cabana+mpi@master') + + def cmake_args(self): + options = [ + '-DBUILD_SHARED_LIBS=%s' % ( + 'On' if '+shared' in self.spec else 'Off') + ] + + return options diff --git a/var/spack/repos/builtin/packages/exonerate-gff3/package.py b/var/spack/repos/builtin/packages/exonerate-gff3/package.py new file mode 100644 index 00000000000..87bc23e071f --- /dev/null +++ b/var/spack/repos/builtin/packages/exonerate-gff3/package.py @@ -0,0 +1,22 @@ +# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class ExonerateGff3(AutotoolsPackage): + """This is an exonerate fork with added gff3 support. + Original website with user guides: + http://www.ebi.ac.uk/~guy/exonerate/""" + + homepage = "https://github.com/hotdogee/exonerate-gff3/" + url = "https://github.com/hotdogee/exonerate-gff3/archive/2.3.0.tar.gz" + + version('2.3.0', sha256='eeab7ea8bc815fc4a37d4c3b89c625167a9a60a4a833b5cc96e32dc313eafd1f') + + depends_on('glib') + + # parallel builds fail occasionally + parallel = False diff --git a/var/spack/repos/builtin/packages/faodel/lambda-capture-f0267fc.patch b/var/spack/repos/builtin/packages/faodel/lambda-capture-f0267fc.patch new file mode 100644 index 00000000000..2dd2c32d118 --- /dev/null +++ b/var/spack/repos/builtin/packages/faodel/lambda-capture-f0267fc.patch @@ -0,0 +1,26 @@ +From f0267fc728d0f49ad396b83e8e62fba54027f31f Mon Sep 17 00:00:00 2001 +From: Craig Ulmer +Date: Fri, 29 May 2020 23:08:29 -0700 +Subject: [PATCH] FIX: Removes variable from lambda capture that conflicted + with args + +--- + src/kelpie/pools/DHTPool/DHTPool.cpp | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) + +diff --git a/src/kelpie/pools/DHTPool/DHTPool.cpp b/src/kelpie/pools/DHTPool/DHTPool.cpp +index df9c1d3..d24aa89 100644 +--- a/src/kelpie/pools/DHTPool/DHTPool.cpp ++++ b/src/kelpie/pools/DHTPool/DHTPool.cpp +@@ -229,7 +229,7 @@ rc_t DHTPool::Need(const Key &key, size_t expected_ldo_user_bytes, lunasa::DataO + bool is_found=false; + + rc_t rc = Want(key, expected_ldo_user_bytes, +- [&key, &returned_ldo, &cv, &is_found] (bool success, Key key, lunasa::DataObject result_ldo, ++ [&returned_ldo, &cv, &is_found] (bool success, Key key, lunasa::DataObject result_ldo, + const kv_row_info_t &ri, const kv_col_info_t &c) { + if(success) { + *returned_ldo = result_ldo; +-- +2.24.2 (Apple Git-127) + diff --git a/var/spack/repos/builtin/packages/faodel/package.py b/var/spack/repos/builtin/packages/faodel/package.py index 63f5798a024..255f0396fa4 100644 --- a/var/spack/repos/builtin/packages/faodel/package.py +++ b/var/spack/repos/builtin/packages/faodel/package.py @@ -55,6 +55,7 @@ class Faodel(CMakePackage): patch('faodel_mpi.patch', when='@1.1811.1 ~mpi') # FAODEL Github issue #5 patch('faodel_sbl.patch', when='@1.1811.1 logging=sbl') + patch('lambda-capture-f0267fc.patch', when='@1.1906.1') def cmake_args(self): spec = self.spec diff --git a/var/spack/repos/builtin/packages/fastjet/package.py b/var/spack/repos/builtin/packages/fastjet/package.py new file mode 100644 index 00000000000..fcd854f9788 --- /dev/null +++ b/var/spack/repos/builtin/packages/fastjet/package.py @@ -0,0 +1,49 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Fastjet(AutotoolsPackage): + """ + A high energy physics software package for jet finding in pp + and e+e- collisions. + """ + + homepage = "http://fastjet.fr/" + url = "http://fastjet.fr/repo/fastjet-3.3.3.tar.gz" + + maintainers = ['drbenmorgan', 'vvolkl'] + + version('3.3.3', sha256='30b0a0282ce5aeac9e45862314f5966f0be941ce118a83ee4805d39b827d732b') + version('3.3.2', sha256='3f59af13bfc54182c6bb0b0a6a8541b409c6fda5d105f17e03c4cce8db9963c2') + version('3.3.1', sha256='76bfed9b87e5efdb93bcd0f7779e27427fbe38e05fe908c2a2e80a9ca0876c53') + version('3.3.0', sha256='e9da5b9840cbbec6d05c9223f73c97af1d955c166826638e0255706a6b2da70f') + version('3.2.2', sha256='3a70cb6ba64071db49a7eecad821679e1a0dadd84e8abca83e518802b3d876e5') + version('3.2.1', sha256='c858b6c4f348c3676afa173251bb16d987674e64679a84306510e3963f858d5b') + version('3.2.0', sha256='96a927f1a336ad93cff30f07e2dc137a4de8ff7d74d5cd43eb455f42cf5275e3') + version('3.1.3', sha256='9809c2a0c89aec30890397d01eda56621e036589b66d7b3cd196cf087c65e40d') + version('3.1.2', sha256='dcc834e53da821cbac459c00249d5d18aee6ac866f37551d6a0c60690d3c170b') + version('3.1.1', sha256='38303789390726803bd3e7b3a245933273e86342d080b82754df44f5168634eb') + version('3.1.0', sha256='f8dc701dfdb124f009b7614010b911e8cc552655c2a966a7f2608a6caa062263') + version('3.0.6', sha256='9718f1d014afe4433bc0612a67a050d720c486fcfa7ad9c9b96bf087b0f3da0b') + version('3.0.5', sha256='0781a5528a0374b3189190abc8e8a2bdfbeaab7ed64e8c74ec0389a86bbabff9') + version('3.0.4', sha256='8161ea18087cea97de37bd9df2a49895ca1ef72732f5766af7c62738b21ed2c9') + version('3.0.3', sha256='6a3e5869cf43b325c7222a925e195b2bd624db922958a926cb4211c00882a50d') + version('3.0.2', sha256='6035a3295253bcd6dd68408985dbedc4a7c5aec13ed1dfa5fdb3cb9229dc6d31') + version('3.0.1', sha256='4f17c235e73a6fcbc8ee39c15a00f166b701e732033e623625f55fe93220a4ed') + version('3.0.0', sha256='f63252e3e9d27553c65642ff35d82913b804dfd569d2446c01166882dbf2577f') + version('2.4.5', sha256='a175849393a3a251b8f92ea9f747b74236dfc83d2786ef5dd92b39c57316a727') + version('2.4.4', sha256='4d97a8494e9aae7e5738e97d224f5aafb44ae8c5d5021f836d5c8c20fc5030fc') + version('2.4.3', sha256='0560622140f9f2dfd9e316bfba6a7582c4aac68fbe06f333bd442363f54a3e40') + version('2.4.2', sha256='504714b8d4895b41c6399347a873bbcf515037d9f5cf3cd5413c9d7aac67f16f') + version('2.4.1', sha256='764de6c3b9ff3e6d1f48022eb0d536054e7321e73c9f71f7eb1e93f90b6e8ad0') + version('2.4.0', sha256='96af9b21076be779e686c83a921d4598d93329eb69f9789fe619e27cbad6034a') + version('2.3.4', sha256='8bd1d9c12866cc768974e9c05c95e00c2fec3c65854ee91b7fb11709db9c5c12') + version('2.3.3', sha256='c7eadb8ddd956815f3387ed611faae746c05b69b7550de8ae802a00342b159b0') + version('2.3.2', sha256='ba8b17fcc8edae16faa74608e8da53e87a8c574aa21a28c985ea0dfedcb95210') + version('2.3.1', sha256='16c32b420e1aa7d0b6fecddd980ea0f2b7e3c2c66585e06f0eb3142677ab6ccf') + version('2.3.0', sha256='e452fe4a9716627bcdb726cfb0917f46a7ac31f6006330a6ccc1abc43d9c2d53') + # older version use .tar instead of .tar.gz extension, to be added diff --git a/var/spack/repos/builtin/packages/fca/package.py b/var/spack/repos/builtin/packages/fca/package.py new file mode 100644 index 00000000000..ca91a43b4ec --- /dev/null +++ b/var/spack/repos/builtin/packages/fca/package.py @@ -0,0 +1,31 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Fca(Package): + """Legacy interface for Mellanox Fabric Collective Accelerator (FCA). FCA + is a MPI-integrated software package that utilizes CORE-Direct technology + for implementing the MPI collective communications.""" + + homepage = 'https://www.mellanox.com/products/fca' + has_code = False + + version('2.5.2431') + + # FCA needs to be added as an external package to SPACK. For this, the + # config file packages.yaml needs to be adjusted: + # + # fca: + # version: [2.5.2431] + # paths: + # fca@2.5.2431: /opt/mellanox/fca (path to your FCA installation) + # buildable: False + + def install(self, spec, prefix): + raise InstallError( + self.spec.format('{name} is not installable, you need to specify ' + 'it as an external package in packages.yaml')) diff --git a/var/spack/repos/builtin/packages/ffb/fj_compiler.patch b/var/spack/repos/builtin/packages/ffb/fj_compiler.patch new file mode 100644 index 00000000000..2f733ced38f --- /dev/null +++ b/var/spack/repos/builtin/packages/ffb/fj_compiler.patch @@ -0,0 +1,10 @@ +--- FFB8.org/util/xvx2gf/XVX.h 2013-03-26 10:09:49.000000000 +0900 ++++ FFB8.new/util/xvx2gf/XVX.h 2020-06-16 16:00:06.408500236 +0900 +@@ -5,6 +5,7 @@ + #include + #include + #include ++#include + + #define MAX_LEVEL 32 + diff --git a/var/spack/repos/builtin/packages/ffb/package.py b/var/spack/repos/builtin/packages/ffb/package.py new file mode 100644 index 00000000000..c7d10955832 --- /dev/null +++ b/var/spack/repos/builtin/packages/ffb/package.py @@ -0,0 +1,168 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +import os +from spack import * + + +class Ffb(MakefilePackage): + """Computational Fluid Dynamics Software for aeroacoustics""" + + homepage = "http://www.ciss.iis.u-tokyo.ac.jp/dl/index.php" + url = "file://{0}/FrontFlow_blue.8.1.tar.gz".format(os.getcwd()) + version('8.1', sha256='1ad008c909152b6c27668bafbad820da3e6ec3309c7e858ddb785f0a3d6e43ae') + + patch('revocap_refiner.patch') + patch('fj_compiler.patch', when='%fj') + + depends_on('mpi') + depends_on('blas') + depends_on('scalapack') + + parallel = False + + def flag_handler(self, name, flags): + opt_flag_found = any(f in self.compiler.opt_flags for f in flags) + if name == 'cflags': + if not opt_flag_found: + flags.append('-O3') + elif name == 'cxxflags': + if not opt_flag_found: + flags.append('-O2') + flags.append(self.compiler.cxx_pic_flag) + if name == 'fflags': + if not opt_flag_found: + flags.append('-O3') + flags.append('-mcmodel=large') + if name in ('cflags', 'cxxflags', 'fflags'): + return (None, flags, None) + else: + return (flags, None, flags) + + def edit(self, spec, prefix): + workdir = os.getcwd() + cflags = env['CFLAGS'] + cxxflags = env['CXXFLAGS'] + fflags = env['FFLAGS'] + + make = join_path('make', 'makefile') + m = FileFilter(make) + m.filter( + r'#LES3DHOME =', 'LES3DHOME= {0}\n'.format(workdir)) + make = join_path('make', 'OPTION') + m = FileFilter(make) + m.filter(r'CPP\s*=.*$', 'CPP = /usr/bin/cpp') + m.filter(r'CCOM\s*=.*$', 'CCOM = {0}'.format(spack_cc)) + m.filter(r'COPT\s*=.*$', 'COPT = {0}'.format(cflags)) + m.filter(r'FCOM\s*=.*$', 'FCOM = {0}\n'.format(spack_fc)) + m.filter(r'FOPT\s*=.*$', 'FOPT = {0}\n'.format(fflags)) + m.filter(r'INCDIR\s*=.*$', 'INCDIR = {0}\n' + .format(spec['mpi'].headers.directories[0])) + m.filter(r'LIBDIR\s*=.*$', 'LIBDIR = {0}\n' + .format(spec['mpi'].libs.directories[0])) + + srcdir = join_path('lib', 'src') + utildir = join_path(workdir, 'util') + with open(join_path('make', 'Makeall'), 'w') as m: + m.write('#!/bin/csh -f\n') + m.write('setenv LES3DHOME {0}\n'.format(workdir)) + m.write('cd {0}\n'.format(srcdir)) + m.write('./Makeall\n') + m.write('cd {0}\n'.format(utildir)) + m.write('./Makeall\n') + + makeall = join_path('lib', 'src', 'dd_mpi', 'Makeall') + dd_mpi_dir = join_path('lib', 'src', 'dd_mpi') + with open(makeall, 'w') as m: + m.write('#!/bin/csh -f\n') + m.write('setenv LES3DHOME {0}\n'.format(workdir)) + m.write('cd {0}\n'.format(dd_mpi_dir)) + m.write('make lib\n') + os.chmod(makeall, 0o755) + + makeall = join_path('.', 'Makeall.les') + les3d_dir = join_path('util', 'les3d.mpi') + les3c_dir = join_path('util', 'les3c.mpi') + les3ct_dir = join_path('util', 'les3ct.mpi') + les3x_dir = join_path('util', 'les3x.mpi') + with open(makeall, 'w') as m: + m.write('#!/bin/csh -f\n') + m.write('setenv LES3DHOME {0}\n'.format(workdir)) + m.write('cd {0}\n'.format(join_path(workdir, les3d_dir))) + m.write('make CCOM={0}'.format(spec['mpi'].mpicc)) + m.write(' FCOM={0}\n'.format(spec['mpi'].mpifc)) + m.write('cd {0}\n'.format(join_path(workdir, les3c_dir))) + m.write('make CCOM={0}'.format(spec['mpi'].mpicc)) + m.write(' FCOM={0}\n'.format(spec['mpi'].mpifc)) + m.write('cd {0}\n'.format(join_path(workdir, les3ct_dir))) + m.write('make CCOM={0}'.format(spec['mpi'].mpicc)) + m.write(' FCOM={0}\n'.format(spec['mpi'].mpifc)) + m.write('cd {0}\n'.format(join_path(workdir, les3x_dir))) + m.write('make CCOM={0}'.format(spec['mpi'].mpicc)) + m.write(' FCOM={0}\n'.format(spec['mpi'].mpifc)) + + for d in [les3c_dir, les3ct_dir, les3d_dir]: + editfile = join_path(d, 'FILES') + m = FileFilter(editfile) + m.filter(r'-lmpi_f77', '') + os.chmod(makeall, 0o755) + + editfile = join_path('lib', 'src', 'Makeall') + m = FileFilter(editfile) + m.filter(r'x86_64-linux', '{0}-linux'.format(spec.target.family)) + + editfile = join_path('lib', 'src', 'REVOCAP_Refiner-0.4.3', 'OPTIONS') + m = FileFilter(editfile) + m.filter(r'ARCH\s*=.*$', 'ARCH= $(shell arch)-linux') + m.filter(r'CC\s*=.*$', 'CC={0}'.format(spack_cc)) + m.filter(r'CFLAGS\s*=.*$', 'CFLAGS={0}'.format(cflags)) + m.filter(r'CXX\s*=.*$', 'CXX={0}'.format(spack_cxx)) + m.filter(r'CXXFLAGS\s*=.*$', + 'CXXFLAGS={0}'.format(cxxflags)) + m.filter(r'F90\s*=.*$', 'CC={0}'.format(spack_fc)) + m.filter(r'LD\s*=.*$', 'LD={0}'.format(spack_fc)) + m.filter(r'LIBPATH\s*=.*$', 'LIBPATH= ') + m.filter(r'FFLAGS\s*=.*$', 'FFLAGS={0}'.format(fflags)) + m.filter(r'LDFLAGS\s*=.*$', 'LDFLAGS={0}'.format(fflags)) + + editfile = join_path('lib', 'src', 'ParMetis-3.1', 'Makefile.in') + m = FileFilter(editfile) + m.filter(r'CC \s*=.*$', 'CC ={0}'.format(spack_cc)) + m.filter(r'INCDIR\s*=.*$', 'INCDIR = \n') + + editfile = join_path('util', 'xvx2gf', 'Makefile') + m = FileFilter(editfile) + m.filter( + r'#LES3DHOME =', 'LES3DHOME= {0}\n'.format(workdir)) + m.filter(r'g\+\+', (spack_cxx)) + + editfile = join_path('util', 'les3x.mpi', 'FILES') + m = FileFilter(editfile) + m.filter(r'LIBS = -lfort -lgf2 -ldd_mpi -lmpi_f77', + 'LIBS = -lfort -lgf2 -ldd_mpi') + + if spec.satisfies('%gcc'): + editfile = join_path('util', 'xvx2gf', 'FILES') + m = FileFilter(editfile) + m.filter(r'LIBS = -lgf2 -lz -lifcore -limf -ldl', + 'LIBS = -lgf2 -lz -ldl') + elif spec.satisfies('%fj'): + editfile = join_path('util', 'xvx2gf', 'FILES') + m = FileFilter(editfile) + m.filter(r'LIBS = -lgf2 -lz -lifcore -limf -ldl', + 'LIBS = -lgf2 -lz -ldl -linkfortran') + + def build(self, spec, prefix): + for m in [join_path('make', 'Makeall'), + join_path('lib', 'src', 'dd_mpi', 'Makeall'), + join_path('.', 'Makeall.les')]: + Executable(m)() + + def install(self, spec, prefix): + install_tree('bin', prefix.bin) + install_tree('macro', prefix.macro) + + def setup_run_environment(self, env): + env.prepend_path('PATH', prefix.macro) diff --git a/var/spack/repos/builtin/packages/ffb/revocap_refiner.patch b/var/spack/repos/builtin/packages/ffb/revocap_refiner.patch new file mode 100644 index 00000000000..13d34bacd3a --- /dev/null +++ b/var/spack/repos/builtin/packages/ffb/revocap_refiner.patch @@ -0,0 +1,38 @@ +diff -uprN FFB8.org/lib/src/REVOCAP_Refiner-0.4.3/Geometry/kmb_Bucket.h FFB8.new/lib/src/REVOCAP_Refiner-0.4.3/Geometry/kmb_Bucket.h +--- FFB8.org/lib/src/REVOCAP_Refiner-0.4.3/Geometry/kmb_Bucket.h 2013-03-26 10:09:31.000000000 +0900 ++++ FFB8.new/lib/src/REVOCAP_Refiner-0.4.3/Geometry/kmb_Bucket.h 2020-06-16 11:33:05.765408337 +0900 +@@ -64,7 +64,7 @@ public: + numY = this->ynum; + numZ = this->znum; + }; +- void getSize(void) const{ ++ int getSize(void) const{ + return this->xnum * this->ynum * this->znum; + }; + +@@ -81,12 +81,6 @@ public: + + int getIndex() const{ return it->first; }; + +- void getIndices(int &i,int &j,int &k) const{ +- i = it->first / (ynum*znum); +- j = (it->first - i*ynum*znum) / znum; +- k = it->first - i*ynum*znum - j*znum; +- }; +- + iterator& operator++(void){ ++it; return *this; }; + + iterator operator++(int n){ +@@ -124,12 +118,6 @@ public: + + int getIndex() const{ return it->first; }; + +- void getIndices(int &i,int &j,int &k) const{ +- i = it->first / (ynum*znum); +- j = (it->first - i*ynum*znum) / znum; +- k = it->first - i*ynum*znum - j*znum; +- }; +- + const_iterator& operator++(void){ ++it; return *this; }; + + const_iterator operator++(int n){ diff --git a/var/spack/repos/builtin/packages/ffmpeg/package.py b/var/spack/repos/builtin/packages/ffmpeg/package.py index eda88445ee6..cc3a0216e86 100644 --- a/var/spack/repos/builtin/packages/ffmpeg/package.py +++ b/var/spack/repos/builtin/packages/ffmpeg/package.py @@ -13,10 +13,14 @@ class Ffmpeg(AutotoolsPackage): homepage = "https://ffmpeg.org" url = "http://ffmpeg.org/releases/ffmpeg-4.1.1.tar.bz2" - version('4.2.2', sha256='b620d187c26f76ca19e74210a0336c3b8380b97730df5cdf45f3e69e89000e5c') - version('4.1.1', sha256='0cb40e3b8acaccd0ecb38aa863f66f0c6e02406246556c2992f67bf650fab058') - version('4.1', sha256='b684fb43244a5c4caae652af9022ed5d85ce15210835bce054a33fb26033a1a5') - version('3.2.4', sha256='c0fa3593a2e9e96ace3c1757900094437ad96d1d6ca19f057c378b5f394496a4') + maintainers = ['xjrc'] + + version('4.2.2', sha256='b620d187c26f76ca19e74210a0336c3b8380b97730df5cdf45f3e69e89000e5c') + version('4.1.1', sha256='0cb40e3b8acaccd0ecb38aa863f66f0c6e02406246556c2992f67bf650fab058') + version('4.1', sha256='b684fb43244a5c4caae652af9022ed5d85ce15210835bce054a33fb26033a1a5') + version('3.2.4', sha256='c0fa3593a2e9e96ace3c1757900094437ad96d1d6ca19f057c378b5f394496a4') + version('2.8.15', sha256='35647f6c1f6d4a1719bc20b76bf4c26e4ccd665f46b5676c0e91c5a04622ee21') + version('1.0.10', sha256='1dbde434c3b5c573d3b2ffc1babe3814f781c10c4bc66193a4132a44c9715176') # Licensing variant('gpl', default=True, @@ -53,9 +57,10 @@ class Ffmpeg(AutotoolsPackage): # variant('libxml2', default=False, # description='XML parsing, needed for dash demuxing support') variant('libzmq', default=False, description='message passing via libzmq') - variant('lzma', default=True, description='lzma support') + variant('lzma', default=False, description='lzma support') + variant('avresample', default=False, description='AV reasmpling component') variant('openssl', default=False, description='needed for https support') - variant('sdl2', default=True, description='sdl2 support') + variant('sdl2', default=False, description='sdl2 support') variant('shared', default=True, description='build shared libraries') depends_on('alsa-lib') @@ -84,58 +89,92 @@ class Ffmpeg(AutotoolsPackage): depends_on('speex', when='+libspeex') depends_on('xz', when='+lzma') + # TODO: enable when libxml2 header issue is resolved + # conflicts('+libxml2', when='@:3.999') + # See: https://www.ffmpeg.org/index.html#news (search AV1) + conflicts('+libaom', when='@:3.999') + # All of the following constraints were sourced from the official 'ffmpeg' + # change log, which can be found here: + # https://raw.githubusercontent.com/FFmpeg/FFmpeg/release/4.0/Changelog + conflicts('+sdl2', when='@:3.1.999') + conflicts('+libsnappy', when='@:2.7.999') + conflicts('+X', when='@:2.4.999') + conflicts('+lzma', when='@2.3.999:') + conflicts('+libwebp', when='@2.1.999:') + conflicts('+libssh', when='@2.0.999:') + conflicts('+libzmq', when='@:1.999.999') + + def enable_or_disable_meta(self, variant, options): + switch = 'enable' if '+{0}'.format(variant) in self.spec else 'disable' + return ['--{0}-{1}'.format(switch, option) for option in options] + def configure_args(self): spec = self.spec - config_args = ['--enable-pic'] + config_args = [ + '--enable-pic', + '--cc={0}'.format(spack_cc), + '--cxx={0}'.format(spack_cxx) + ] - if '+X' in spec: - config_args.extend([ - '--enable-libxcb', - '--enable-libxcb-shape', - '--enable-libxcb-shm', - '--enable-libxcb-xfixes', - '--enable-xlib', - ]) - else: - config_args.extend([ - '--disable-libxcb', - '--disable-libxcb-shape', - '--disable-libxcb-shm', - '--disable-libxcb-xfixes', - '--disable-xlib', + # '+X' meta variant # + + xlib_opts = [] + + if spec.satisfies('@2.5:'): + xlib_opts.extend([ + 'libxcb', + 'libxcb-shape', + 'libxcb-shm', + 'libxcb-xfixes', + 'xlib', ]) - if '+drawtext' in spec: - config_args.extend([ - '--enable-libfontconfig', - '--enable-libfreetype', - '--enable-libfribidi', - ]) - else: - config_args.extend([ - '--disable-libfontconfig', - '--disable-libfreetype', - '--disable-libfribidi', - ]) - for variant in [ + config_args += self.enable_or_disable_meta('X', xlib_opts) + + # '+drawtext' meta variant # + + drawtext_opts = [ + '{0}fontconfig'.format('lib' if spec.satisfies('@3:') else ''), + 'libfreetype', + ] + + if spec.satisfies('@2.3:'): + drawtext_opts.append('libfribidi') + + config_args += self.enable_or_disable_meta('drawtext', drawtext_opts) + + # other variants # + + variant_opts = [ 'bzlib', - 'libaom', 'libmp3lame', 'libopenjpeg', 'libopus', - 'libsnappy', 'libspeex', - 'libssh', 'libvorbis', - 'libwebp', - # TODO: enable when libxml2 header issue is resolved - # 'libxml2', - 'libzmq', - 'lzma', + 'avresample', 'openssl', - 'sdl2', 'shared', - ]: - config_args += self.enable_or_disable(variant) + ] + + if spec.satisfies('@2.0:'): + variant_opts.append('libzmq') + if spec.satisfies('@2.1:'): + variant_opts.append('libssh') + if spec.satisfies('@2.2:'): + variant_opts.append('libwebp') + if spec.satisfies('@2.4:'): + variant_opts.append('lzma') + if spec.satisfies('@2.8:'): + variant_opts.append('libsnappy') + if spec.satisfies('@3.2:'): + variant_opts.append('sdl2') + if spec.satisfies('@4:'): + variant_opts.append('libaom') + # TODO: enable when libxml2 header issue is resolved + # variant_opts.append('libxml2') + + for variant_opt in variant_opts: + config_args += self.enable_or_disable(variant_opt) return config_args diff --git a/var/spack/repos/builtin/packages/ffr/package.py b/var/spack/repos/builtin/packages/ffr/package.py index 8cd10fe5380..35ac20f9f6a 100644 --- a/var/spack/repos/builtin/packages/ffr/package.py +++ b/var/spack/repos/builtin/packages/ffr/package.py @@ -39,8 +39,11 @@ def edit(self, spec, prefix): fflags = flags[:] if spec.satisfies('%gcc'): fflags.append('-ffixed-line-length-none') + elif spec.satisfies('%fj'): + fflags.append('-Fwide') d = find('.', 'src_main', recursive=True) - root_dir = os.path.dirname(d[0]) + src_main = d[0] + root_dir = os.path.dirname(src_main) make = join_path(root_dir, 'src_pre', 'src', 'Makefile') os.chmod(make, 0o644) filter_file('#CSRCS =.*$', 'CSRCS = kmetis_main.c io.c', make) @@ -49,9 +52,17 @@ def edit(self, spec, prefix): 'LIBPRE = ' + spec['metis'].libs.ld_flags, make ) + + make = join_path(src_main, 'src', 'Makefile') + os.chmod(make, 0o644) + with open(make, 'a') as m: + m.write('module_hpc.o: module_hpc.f\n') + m.write('\t$(MPI_F90) $(FFLAGS) -c $<\n') + m.write('\n') + m.write('hpc.o: hpc.f\n') + m.write('\t$(MPI_F90) $(FFLAGS) -c $<\n') + if spec.satisfies('@3.0_000'): - d = find('.', 'src_main', recursive=True) - root_dir = os.path.dirname(d[0]) for d in ['src_pre', 'FFR2VIZ']: workdir = join_path(root_dir, d, 'src') make = join_path(workdir, 'Makefile') diff --git a/var/spack/repos/builtin/packages/ffsb/package.py b/var/spack/repos/builtin/packages/ffsb/package.py new file mode 100644 index 00000000000..5745ccb8214 --- /dev/null +++ b/var/spack/repos/builtin/packages/ffsb/package.py @@ -0,0 +1,18 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Ffsb(AutotoolsPackage): + """The Flexible Filesystem Benchmark (FFSB) is a cross-platform + filesystem performance measurement tool.""" + + homepage = "https://sourceforge.net/projects/ffsb/" + url = "https://sourceforge.net/projects/ffsb/files/ffsb/5.2.1/ffsb-5.2.1.tar.gz" + + version('5.2.1', sha256='36ccda8ff04f837e20bb8b2cc9edb8c6fc923fdcdbb8060d9448dc49234b968d') + version('5.1.1', sha256='e25aef255d8bfe54f29ac88c7af8237fa5a8c2e1716fdef1946cf0ecd9166d1f') + version('5.1', sha256='4d7da7eba46c824ebdc23b3d32532b006aeb5b6697a3ada314c75785ab25cb97') diff --git a/var/spack/repos/builtin/packages/figlet/package.py b/var/spack/repos/builtin/packages/figlet/package.py new file mode 100644 index 00000000000..8c6028d2463 --- /dev/null +++ b/var/spack/repos/builtin/packages/figlet/package.py @@ -0,0 +1,31 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Figlet(MakefilePackage): + """FIGlet is a program that creates large characters out of ordinary + screen characters.""" + + homepage = "http://www.figlet.org/" + url = "https://github.com/cmatsuoka/figlet/archive/2.2.5.tar.gz" + + version('2.2.5', sha256='4d366c4a618ecdd6fdb81cde90edc54dbff9764efb635b3be47a929473f13930') + version('2.2.4', sha256='970a18a2a32cca736ff11a5b77e26a54f31a0e08606b85d21d3d5c666937e03d') + version('2.2.3', sha256='168fa3c7a5888d6f796708780d3006f0e1871d83f32c4a10a84596b90ac35999') + + def install(self, spec, prefix): + mkdirp(prefix.bin) + bins = ['figlet', 'chkfont', 'figlist', 'showfigfonts'] + for f in bins: + install(f, prefix.bin) + + mkdirp(prefix.man6) + manuals = ['figlet.6', 'chkfont.6', 'figlist.6', 'showfigfonts.6'] + for f in manuals: + install(f, prefix.man6) + + install_tree('./fonts', prefix.share.figlet) diff --git a/var/spack/repos/builtin/packages/fio/package.py b/var/spack/repos/builtin/packages/fio/package.py index 90064be1d52..4fdc44fae2b 100644 --- a/var/spack/repos/builtin/packages/fio/package.py +++ b/var/spack/repos/builtin/packages/fio/package.py @@ -16,6 +16,7 @@ class Fio(AutotoolsPackage): homepage = "https://github.com/axboe/fio" url = "https://github.com/axboe/fio/archive/fio-2.19.tar.gz" + version('3.19', sha256='809963b1d023dbc9ac7065557af8129aee17b6895e0e8c5ca671b0b14285f404') version('3.16', sha256='c7731a9e831581bab7104da9ea60c9f44e594438dbe95dff26726ca0285e7b93') version('2.19', sha256='61fb03a18703269b781aaf195cb0d7931493bbb5bfcc8eb746d5d66d04ed77f7') @@ -32,6 +33,9 @@ class Fio(AutotoolsPackage): conflicts('+libaio', when='platform=darwin', msg='libaio does not support Darwin') + conflicts('@:3.18', when='%gcc@10:', + msg='gcc@10: sets -fno-common by default') + def configure_args(self): config_args = [] diff --git a/var/spack/repos/builtin/packages/flex/package.py b/var/spack/repos/builtin/packages/flex/package.py index 3c9f51a0eb0..83214099f2c 100644 --- a/var/spack/repos/builtin/packages/flex/package.py +++ b/var/spack/repos/builtin/packages/flex/package.py @@ -43,6 +43,11 @@ class Flex(AutotoolsPackage): # - https://github.com/westes/flex/issues/241 patch('https://github.com/westes/flex/commit/24fd0551333e7eded87b64dd36062da3df2f6380.patch', sha256='09c22e5c6fef327d3e48eb23f0d610dcd3a35ab9207f12e0f875701c677978d3', when='@2.6.4') + @when('@:2.6.0,2.6.4') + def autoreconf(self, spec, prefix): + autogen = Executable('./autogen.sh') + autogen() + @property def force_autoreconf(self): # The patch for 2.6.4 touches configure diff --git a/var/spack/repos/builtin/packages/flux-core/package.py b/var/spack/repos/builtin/packages/flux-core/package.py index 04ce1eceebf..cb53c0f8ad9 100644 --- a/var/spack/repos/builtin/packages/flux-core/package.py +++ b/var/spack/repos/builtin/packages/flux-core/package.py @@ -15,6 +15,8 @@ class FluxCore(AutotoolsPackage): git = "https://github.com/flux-framework/flux-core.git" version('master', branch='master') + version('0.16.0', sha256='1582f7fb4d2313127418c34de7c9ce4f5fef00622d19cedca7bed929f4709f10') + version('0.15.0', sha256='51bc2eae69501f802459fc82f191eb5e8ae0b4f7e9e77ac18543a850cc8445f5') version('0.11.3', sha256='91b5d7dca8fc28a77777c4e4cb8717fc3dc2c174e70611740689a71901c6de7e') version('0.11.2', sha256='ab8637428cd9b74b2dff4842d10e0fc4acc8213c4e51f31d32a4cbfbdf730412') version('0.11.1', sha256='3c8495db0f3b701f6dfe3e2a75aed794fc561e9f28284e8c02ac67693bfe890e') @@ -43,9 +45,13 @@ class FluxCore(AutotoolsPackage): depends_on("lua@5.1:5.2.99", when="@0.10.0:,master") depends_on("lua-luaposix") depends_on("munge", when="@0.1.0:0.10.0") - depends_on("python", type=('build', 'run')) - depends_on("python@2.7:2.99", when="@0.1.0:0.11.0") - depends_on("python@2.7:", when="@0.11.1:") + # `link` dependency on python due to Flux's `pymod` module + depends_on("python", type=('build', 'run', 'link')) + depends_on("python@2.7:2.99", + when="@0.1.0:0.11.0", + type=('build', 'run', 'link')) + depends_on("python@2.7:", when="@0.11.1:", type=('build', 'run', 'link')) + depends_on("python@3.6:", when="@0.17.0:,master", type=('build', 'run', 'link')) depends_on("py-cffi", type=('build', 'run')) depends_on("py-six", type=('build', 'run'), when="@0.11.0:") depends_on("py-pyyaml", type=('build', 'run'), when="@0.11.0:") diff --git a/var/spack/repos/builtin/packages/flux-sched/package.py b/var/spack/repos/builtin/packages/flux-sched/package.py index 60001e7b896..5c33d39a0df 100644 --- a/var/spack/repos/builtin/packages/flux-sched/package.py +++ b/var/spack/repos/builtin/packages/flux-sched/package.py @@ -15,6 +15,7 @@ class FluxSched(AutotoolsPackage): git = "https://github.com/flux-framework/flux-sched.git" version('master', branch='master') + version('0.8.0', sha256='45bc3cefb453d19c0cb289f03692fba600a39045846568d258e4b896ca19ca0d') version('0.7.1', sha256='a35e555a353feed6b7b814ae83d05362356f9ee33ffa75d7dfb7e2fe86c21294') version('0.7.0', sha256='69267a3aaacaedd9896fd90cfe17aef266cba4fb28c77f8123d95a31ce739a7b') version('0.6.0', sha256='3301d4c10810414228e5969b84b75fe1285abb97453070eb5a77f386d8184f8d') @@ -35,13 +36,14 @@ class FluxSched(AutotoolsPackage): depends_on("pkgconfig") depends_on("flux-core", type=('build', 'link', 'run')) - depends_on("flux-core+cuda", when='+cuda') - depends_on("flux-core@0.8.0", when='@0.4.0') - depends_on("flux-core@0.9.0", when='@0.5.0') - depends_on("flux-core@0.10.0", when='@0.6.0') - depends_on("flux-core@0.11.0", when='@0.7.0') - depends_on("flux-core@0.11.2:0.11.99", when='@0.7.1') - depends_on("flux-core@master", when='@master') + depends_on("flux-core+cuda", when='+cuda', type=('build', 'run', 'link')) + depends_on("flux-core@0.8.0", when='@0.4.0', type=('build', 'run', 'link')) + depends_on("flux-core@0.9.0", when='@0.5.0', type=('build', 'run', 'link')) + depends_on("flux-core@0.10.0", when='@0.6.0', type=('build', 'run', 'link')) + depends_on("flux-core@0.11.0", when='@0.7.0', type=('build', 'run', 'link')) + depends_on("flux-core@0.11.2:0.11.99", when='@0.7.1', type=('build', 'run', 'link')) + depends_on("flux-core@0.16.0:0.16.99", when='@0.8.0', type=('build', 'run', 'link')) + depends_on("flux-core@master", when='@master', type=('build', 'run', 'link')) # Need autotools when building on master: depends_on("autoconf", type='build', when='@master') diff --git a/var/spack/repos/builtin/packages/fox/no_rexdebug.patch b/var/spack/repos/builtin/packages/fox/no_rexdebug.patch new file mode 100644 index 00000000000..3d0c0cf754c --- /dev/null +++ b/var/spack/repos/builtin/packages/fox/no_rexdebug.patch @@ -0,0 +1,11 @@ +--- a/lib/FXRex.cpp 2019-08-22 21:46:13.000000000 -0500 ++++ b/lib/FXRex.cpp 2020-04-13 16:46:31.718701955 -0500 +@@ -503,7 +503,7 @@ + */ + + // Debugging regex code +-#define REXDEBUG 1 ++// #define REXDEBUG 1 + + // As close to infinity as we're going to get; this seems big enough. We can not make + // it too large as this may wrap around when added to something else! diff --git a/var/spack/repos/builtin/packages/fox/package.py b/var/spack/repos/builtin/packages/fox/package.py new file mode 100644 index 00000000000..28e6e9b7b31 --- /dev/null +++ b/var/spack/repos/builtin/packages/fox/package.py @@ -0,0 +1,46 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Fox(AutotoolsPackage): + """FOX is a C++ based Toolkit for developing Graphical User Interfaces + easily and effectively. It offers a wide, and growing, collection of + Controls, and provides state of the art facilities such as drag and drop, + selection, as well as OpenGL widgets for 3D graphical manipulation. FOX + also implements icons, images, and user-convenience features such as status + line help, and tooltips. Tooltips may even be used for 3D objects!""" + + homepage = "http://fox-toolkit.org/" + url = "http://fox-toolkit.org/ftp/fox-1.7.67.tar.gz" + + version('1.7.67', sha256='7e511685119ef096fa90d334da46f0e50cfed8d414df32d80a7850442052f57d') + version('1.6.57', preferred=True, sha256='65ef15de9e0f3a396dc36d9ea29c158b78fad47f7184780357b929c94d458923') + + patch('no_rexdebug.patch', when='@1.7.67') + + variant('opengl', default=False, description='opengl support') + + depends_on('bzip2') + depends_on('jpeg') + depends_on('libpng') + depends_on('libtiff') + depends_on('zlib') + depends_on('libx11') + depends_on('libsm') + depends_on('libxft') + depends_on('libxext') + depends_on('libxcursor') + depends_on('libxi') + depends_on('libxrandr') + depends_on('gl', when='+opengl') + + def configure_args(self): + # Make the png link flags explicit or it will try to pick up libpng15 + # from system + args = ['LDFLAGS={0}'.format(self.spec['libpng'].libs.search_flags)] + args += self.with_or_without('opengl') + return args diff --git a/var/spack/repos/builtin/packages/fping/package.py b/var/spack/repos/builtin/packages/fping/package.py new file mode 100644 index 00000000000..59c2127dad1 --- /dev/null +++ b/var/spack/repos/builtin/packages/fping/package.py @@ -0,0 +1,22 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Fping(AutotoolsPackage): + """High performance ping tool.""" + + homepage = "https://fping.org/" + url = "https://github.com/schweikert/fping/archive/v4.2.tar.gz" + + version('4.2', sha256='49b0ac77fd67c1ed45c9587ffab0737a3bebcfa5968174329f418732dbf655d4') + version('4.1', sha256='1da45b1d8c2d38b52bebd4f8b1617ddfae678e9f6436dafa6f62e97b8ecfc93c') + version('4.0', sha256='8c9eac7aeadb5be0daa978cdac5f68ae44b749af0f643e8252b5e3dd4ce32e6a') + + depends_on('m4', type='build') + depends_on('autoconf', type='build') + depends_on('automake', type='build') + depends_on('libtool', type='build') diff --git a/var/spack/repos/builtin/packages/fraggenescan/package.py b/var/spack/repos/builtin/packages/fraggenescan/package.py new file mode 100644 index 00000000000..c1af71a829b --- /dev/null +++ b/var/spack/repos/builtin/packages/fraggenescan/package.py @@ -0,0 +1,25 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Fraggenescan(MakefilePackage): + """FragGeneScan is an application for finding (fragmented) genes in short + reads. It can also be applied to predict prokaryotic genes in + incomplete assemblies or complete genomes.""" + + homepage = "https://sourceforge.net/projects/fraggenescan/" + url = "https://downloads.sourceforge.net/project/fraggenescan/FragGeneScan1.31.tar.gz" + + version('1.31', sha256='cd3212d0f148218eb3b17d24fcd1fc897fb9fee9b2c902682edde29f895f426c') + + build_targets = ['fgs'] + + def edit(self, spec, prefix): + filter_file('gcc', spack_cc, 'Makefile', string=True) + + def install(self, spec, prefix): + install_tree('.', prefix.bin) diff --git a/var/spack/repos/builtin/packages/frontistr/package.py b/var/spack/repos/builtin/packages/frontistr/package.py new file mode 100644 index 00000000000..ebdfa0a3e31 --- /dev/null +++ b/var/spack/repos/builtin/packages/frontistr/package.py @@ -0,0 +1,40 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack import * + + +class Frontistr(CMakePackage): + """Open-Source Large-Scale Parallel FEM Program for + Nonlinear Structural Analysis""" + + homepage = "https://www.frontistr.com/" + git = "https://gitlab.com/FrontISTR-Commons/FrontISTR.git" + maintainers = ['hiroshi.okuda', 'kgoto', 'morita', 'inagaki', 'michioga'] + + version('5.0', tag='v5.0') + version('master', tag='master') + + variant('build_type', default='RELEASE', + description='CMake build type', + values=('DEBUG', 'RELEASE')) + + depends_on('mpi') + depends_on('blas') + depends_on('lapack') + depends_on('scalapack') + depends_on('revocap-refiner') + # depends_on('revocap-coupler') + depends_on('metis') + depends_on('mumps') + depends_on('trilinos') + + def cmake_args(self): + define = CMakePackage.define + cmake_args = [ + define('WITH_ML', True), + ] + return cmake_args diff --git a/var/spack/repos/builtin/packages/fujitsu-mpi/package.py b/var/spack/repos/builtin/packages/fujitsu-mpi/package.py index 62da3aab01e..c0a882ec4c7 100644 --- a/var/spack/repos/builtin/packages/fujitsu-mpi/package.py +++ b/var/spack/repos/builtin/packages/fujitsu-mpi/package.py @@ -34,6 +34,11 @@ def setup_dependent_package(self, module, dependent_spec): self.spec.mpifc = self.prefix.bin.mpifrt def setup_dependent_build_environment(self, env, dependent_spec): + self.setup_run_environment(env) + + def setup_run_environment(self, env): + # Because MPI are both compilers and runtimes, we set up the compilers + # as part of run environment env.set('MPICC', self.prefix.bin.mpifcc) env.set('MPICXX', self.prefix.bin.mpiFCC) env.set('MPIF77', self.prefix.bin.mpifrt) diff --git a/var/spack/repos/builtin/packages/g4emlow/package.py b/var/spack/repos/builtin/packages/g4emlow/package.py index 713b9da6311..e7b34000cdd 100644 --- a/var/spack/repos/builtin/packages/g4emlow/package.py +++ b/var/spack/repos/builtin/packages/g4emlow/package.py @@ -15,6 +15,7 @@ class G4emlow(Package): maintainers = ['drbenmorgan'] # Only versions relevant to Geant4 releases built by spack are added + version('7.9.1', sha256='820c106e501c64c617df6c9e33a0f0a3822ffad059871930f74b8cc37f043ccb') version('7.9', sha256='4abf9aa6cda91e4612676ce4d2d8a73b91184533aa66f9aad19a53a8c4dc3aff') version('7.7', sha256='16dec6adda6477a97424d749688d73e9bd7d0b84d0137a67cf341f1960984663') version('7.3', sha256='583aa7f34f67b09db7d566f904c54b21e95a9ac05b60e2bfb794efb569dba14e') diff --git a/var/spack/repos/builtin/packages/ganglia/package.py b/var/spack/repos/builtin/packages/ganglia/package.py new file mode 100644 index 00000000000..ffccc3c14c5 --- /dev/null +++ b/var/spack/repos/builtin/packages/ganglia/package.py @@ -0,0 +1,35 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Ganglia(AutotoolsPackage): + """Ganglia is a scalable distributed monitoring system for high-performance + computing systems such as clusters and Grids. It is based on a hierarchical + design targeted at federations of clusters. Supports clusters up to 2000 + nodes in size.""" + + homepage = "http://ganglia.sourceforge.net/" + url = "http://jaist.dl.sourceforge.net/project/ganglia/ganglia%20monitoring%20core/3.7.2/ganglia-3.7.2.tar.gz" + list_url = "http://jaist.dl.sourceforge.net/project/ganglia/ganglia%20monitoring%20core" + list_depth = 1 + + version('3.7.2', sha256='042dbcaf580a661b55ae4d9f9b3566230b2232169a0898e91a797a4c61888409') + + depends_on('m4', type='build') + depends_on('autoconf', type='build') + depends_on('automake', type='build') + depends_on('libtool', type='build') + depends_on('apr') + depends_on('libconfuse') + depends_on('python@:2.7.999') + depends_on('pcre') + depends_on('libtirpc') + depends_on('expat') + + def setup_build_environment(self, env): + env.prepend_path('CPATH', self.spec['libtirpc'].prefix.include.tirpc) + env.append_flags('LDFLAGS', '-ltirpc') diff --git a/var/spack/repos/builtin/packages/gaudi/build_testing.patch b/var/spack/repos/builtin/packages/gaudi/build_testing.patch new file mode 100644 index 00000000000..ee736fd6dd4 --- /dev/null +++ b/var/spack/repos/builtin/packages/gaudi/build_testing.patch @@ -0,0 +1,13 @@ +diff --git a/GaudiExamples/CMakeLists.txt b/GaudiExamples/CMakeLists.txt +index ef6f9fcff..672b76d26 100644 +--- a/GaudiExamples/CMakeLists.txt ++++ b/GaudiExamples/CMakeLists.txt +@@ -0,0 +1,7 @@ ++# GaudiExamples subdirectory ++if(NOT BUILD_TESTING) ++ # Ignore examples if not building tests ++ # see https://gitlab.cern.ch/atlas/atlasexternals/-/merge_requests/664#note_3395313 ++ return() ++endif() ++ + diff --git a/var/spack/repos/builtin/packages/gaudi/link_target_fixes.patch b/var/spack/repos/builtin/packages/gaudi/link_target_fixes.patch new file mode 100644 index 00000000000..468c117b517 --- /dev/null +++ b/var/spack/repos/builtin/packages/gaudi/link_target_fixes.patch @@ -0,0 +1,106 @@ +diff --git a/cmake/GaudiProjectConfig.cmake b/cmake/GaudiProjectConfig.cmake +index d7049233e..a9ef71e9a 100644 +--- a/cmake/GaudiProjectConfig.cmake ++++ b/cmake/GaudiProjectConfig.cmake +@@ -356,6 +356,8 @@ macro(gaudi_project project version) + # Make sure we select the version of Python provided by LCG (if we are building in that context) + if(Python_config_version) + set(Python_config_version ${Python_config_version} CACHE STRING "LCG version of Python") ++ # Prevent special LCG versions (like 2.7.9.p1) to confuse CMake ++ string(REGEX REPLACE "([0-9]+\\.[0-9]+\\.[0-9]+).*" "\\1" Python_config_version "${Python_config_version}") + find_package(PythonInterp ${Python_config_version} QUIET) + find_package(PythonLibs ${Python_config_version} QUIET) + if(CMAKE_VERSION VERSION_GREATER 3.12) +@@ -373,10 +375,10 @@ macro(gaudi_project project version) + #-- Set up the boost_python_version variable for the project + find_package(PythonInterp) + find_package(Boost) +- if((Boost_VERSION GREATER 106700) OR (Boost_VERSION EQUAL 106700)) +- set(boost_python_version "${PYTHON_VERSION_MAJOR}${PYTHON_VERSION_MINOR}") ++ if((Boost_VERSION LESS 106700) OR (Boost_VERSION GREATER 1069000)) ++ set(boost_python_version "") + else() +- set(boost_python_version "") ++ set(boost_python_version "${Python_VERSION_MAJOR}${Python_VERSION_MINOR}") + endif() + + #--- Allow installation on failed builds +@@ -1620,9 +1622,24 @@ function(gaudi_resolve_link_libraries variable) + set(collected) + foreach(package ${ARGN}) + # check if it is an actual library or a target first ++ if(NOT TARGET ${package}) ++ if(package MATCHES "^Boost::(.*)$") ++ # special handling of Boost imported targets ++ find_package(Boost COMPONENTS ${CMAKE_MATCH_1} QUIET) ++ else() ++ # the target might be in a project namespace ++ foreach(_p IN LISTS used_gaudi_projects) ++ if(TARGET ${_p}::${package}) ++ #message(STATUS "using ${_p}::${package} for ${package}") ++ set(package ${_p}::${package}) ++ break() ++ endif() ++ endforeach() ++ endif() ++ endif() + if(TARGET ${package}) + get_property(target_type TARGET ${package} PROPERTY TYPE) +- if(NOT target_type MATCHES "(SHARED|STATIC)_LIBRARY") ++ if(NOT target_type MATCHES "(SHARED|STATIC|UNKNOWN)_LIBRARY") + message(FATAL_ERROR "${package} is a ${target_type}: you cannot link against it") + endif() + #message(STATUS "${package} is a TARGET") +@@ -1670,6 +1687,19 @@ function(gaudi_resolve_link_libraries variable) + endforeach() + #message(STATUS "gaudi_resolve_link_libraries collected: ${collected}") + _gaudi_strip_build_type_libs(collected) ++ # resolve missing Boost::* targets, if needed ++ set(boost_components ${collected}) ++ list(FILTER boost_components INCLUDE REGEX "^Boost::") ++ list(TRANSFORM boost_components REPLACE "^Boost::" "") ++ set(missing_components) ++ foreach(comp IN LISTS boost_components) ++ if(NOT TARGET Boost::${comp}) ++ list(APPEND missing_components ${comp}) ++ endif() ++ endforeach() ++ if(missing_components) ++ find_package(Boost COMPONENTS ${missing_components} QUIET) ++ endif() + #message(STATUS "gaudi_resolve_link_libraries output: ${collected}") + set(${variable} ${collected} PARENT_SCOPE) + endfunction() +@@ -3277,6 +3307,14 @@ macro(gaudi_external_project_environment) + list(FIND used_gaudi_projects ${pack} gaudi_project_idx) + if((NOT pack STREQUAL GaudiProject) AND (gaudi_project_idx EQUAL -1)) + message(STATUS " ${pack}") ++ if(pack STREQUAL Boost) ++ if(NOT TARGET Boost::headers) ++ # this is needed to get the non-cache variables for the packages ++ # but we do not need to call it if we do not use FindBoost.cmake (Boost >= 1.70) ++ find_package(${pack} QUIET) ++ endif() ++ endif() ++ + if(NOT pack MATCHES "^Python(Interp|Libs)?$") + # this is needed to get the non-cache variables for the packages + find_package(${pack} QUIET) +@@ -3325,6 +3363,17 @@ macro(gaudi_external_project_environment) + list(APPEND environment SET QT_XKB_CONFIG_ROOT "/usr/share/X11/xkb") + endif() + endif() ++ elseif(pack MATCHES "^boost_(.*)$") ++ # We are using BoostConfig.cmake (>=1.70) and not FindBoost.cmake ++ if(TARGET "Boost::${CMAKE_MATCH_1}") ++ set(tgt_name "Boost::${CMAKE_MATCH_1}") ++ get_property(target_type TARGET ${tgt_name} PROPERTY TYPE) ++ if(target_type MATCHES "(SHARED|UNKNOWN)_LIBRARY") ++ # FIXME: I'm not sure it's good to rely on the "_RELEASE" suffix ++ get_property(lib_path TARGET ${tgt_name} PROPERTY IMPORTED_LOCATION_RELEASE) ++ get_filename_component(${pack}_LIBRARY_DIR "${lib_path}" PATH) ++ endif() ++ endif() + endif() + + list(APPEND binary_path ${${pack}_BINARY_PATH}) diff --git a/var/spack/repos/builtin/packages/gaudi/link_target_fixes32.patch b/var/spack/repos/builtin/packages/gaudi/link_target_fixes32.patch new file mode 100644 index 00000000000..95339a09318 --- /dev/null +++ b/var/spack/repos/builtin/packages/gaudi/link_target_fixes32.patch @@ -0,0 +1,107 @@ +diff --git a/cmake/GaudiProjectConfig.cmake b/cmake/GaudiProjectConfig.cmake +index 3da52a9c6..08c11c863 100644 +--- a/cmake/GaudiProjectConfig.cmake ++++ b/cmake/GaudiProjectConfig.cmake +@@ -346,6 +346,8 @@ macro(gaudi_project project version) + # Make sure we select the version of Python provided by LCG (if we are building in that context) + if(Python_config_version) + set(Python_config_version ${Python_config_version} CACHE STRING "LCG version of Python") ++ # Prevent special LCG versions (like 2.7.9.p1) to confuse CMake ++ string(REGEX REPLACE "([0-9]+\\.[0-9]+\\.[0-9]+).*" "\\1" Python_config_version "${Python_config_version}") + find_package(PythonInterp ${Python_config_version} QUIET) + find_package(PythonLibs ${Python_config_version} QUIET) + if(CMAKE_VERSION VERSION_GREATER 3.12) +@@ -363,10 +365,10 @@ macro(gaudi_project project version) + #-- Set up the boost_python_version variable for the project + find_package(PythonInterp) + find_package(Boost) +- if((Boost_VERSION GREATER 106700) OR (Boost_VERSION EQUAL 106700)) +- set(boost_python_version "${PYTHON_VERSION_MAJOR}${PYTHON_VERSION_MINOR}") ++ if((Boost_VERSION LESS 106700) OR (Boost_VERSION GREATER 1069000)) ++ set(boost_python_version "") + else() +- set(boost_python_version "") ++ set(boost_python_version "${Python_VERSION_MAJOR}${Python_VERSION_MINOR}") + endif() + + #--- Allow installation on failed builds +@@ -1607,9 +1609,25 @@ function(gaudi_resolve_link_libraries variable) + set(collected) + foreach(package ${ARGN}) + # check if it is an actual library or a target first ++ if(NOT TARGET ${package}) ++ if(package MATCHES "^Boost::(.*)$") ++ # special handling of Boost imported targets ++ find_package(Boost COMPONENTS ${CMAKE_MATCH_1} QUIET) ++ else() ++ # the target might be in a project namespace ++ foreach(_p IN LISTS used_gaudi_projects) ++ if(TARGET ${_p}::${package}) ++ #message(STATUS "using ${_p}::${package} for ${package}") ++ set(package ${_p}::${package}) ++ break() ++ endif() ++ endforeach() ++ endif() ++ endif() ++ + if(TARGET ${package}) + get_property(target_type TARGET ${package} PROPERTY TYPE) +- if(NOT target_type MATCHES "(SHARED|STATIC)_LIBRARY") ++ if(NOT target_type MATCHES "(SHARED|STATIC|UNKNOWN)_LIBRARY") + message(FATAL_ERROR "${package} is a ${target_type}: you cannot link against it") + endif() + #message(STATUS "${package} is a TARGET") +@@ -1657,6 +1675,19 @@ function(gaudi_resolve_link_libraries variable) + endforeach() + #message(STATUS "gaudi_resolve_link_libraries collected: ${collected}") + _gaudi_strip_build_type_libs(collected) ++ # resolve missing Boost::* targets, if needed ++ set(boost_components ${collected}) ++ list(FILTER boost_components INCLUDE REGEX "^Boost::") ++ list(TRANSFORM boost_components REPLACE "^Boost::" "") ++ set(missing_components) ++ foreach(comp IN LISTS boost_components) ++ if(NOT TARGET Boost::${comp}) ++ list(APPEND missing_components ${comp}) ++ endif() ++ endforeach() ++ if(missing_components) ++ find_package(Boost COMPONENTS ${missing_components} QUIET) ++ endif() + #message(STATUS "gaudi_resolve_link_libraries output: ${collected}") + set(${variable} ${collected} PARENT_SCOPE) + endfunction() +@@ -3262,6 +3293,14 @@ macro(gaudi_external_project_environment) + list(FIND used_gaudi_projects ${pack} gaudi_project_idx) + if((NOT pack STREQUAL GaudiProject) AND (gaudi_project_idx EQUAL -1)) + message(STATUS " ${pack}") ++ if(pack STREQUAL Boost) ++ if(NOT TARGET Boost::headers) ++ # this is needed to get the non-cache variables for the packages ++ # but we do not need to call it if we do not use FindBoost.cmake (Boost >= 1.70) ++ find_package(${pack} QUIET) ++ endif() ++ endif() ++ + if(NOT pack MATCHES "^Python(Interp|Libs)?$") + # this is needed to get the non-cache variables for the packages + find_package(${pack} QUIET) +@@ -3310,6 +3349,17 @@ macro(gaudi_external_project_environment) + list(APPEND environment SET QT_XKB_CONFIG_ROOT "/usr/share/X11/xkb") + endif() + endif() ++ elseif(pack MATCHES "^boost_(.*)$") ++ # We are using BoostConfig.cmake (>=1.70) and not FindBoost.cmake ++ if(TARGET "Boost::${CMAKE_MATCH_1}") ++ set(tgt_name "Boost::${CMAKE_MATCH_1}") ++ get_property(target_type TARGET ${tgt_name} PROPERTY TYPE) ++ if(target_type MATCHES "(SHARED|UNKNOWN)_LIBRARY") ++ # FIXME: I'm not sure it's good to rely on the "_RELEASE" suffix ++ get_property(lib_path TARGET ${tgt_name} PROPERTY IMPORTED_LOCATION_RELEASE) ++ get_filename_component(${pack}_LIBRARY_DIR "${lib_path}" PATH) ++ endif() ++ endif() + endif() + + list(APPEND binary_path ${${pack}_BINARY_PATH}) diff --git a/var/spack/repos/builtin/packages/gaudi/package.py b/var/spack/repos/builtin/packages/gaudi/package.py index 20f7c0f28b4..f529366a321 100644 --- a/var/spack/repos/builtin/packages/gaudi/package.py +++ b/var/spack/repos/builtin/packages/gaudi/package.py @@ -11,54 +11,71 @@ class Gaudi(CMakePackage): homepage = "http://gaudi.web.cern.ch/gaudi/" git = "https://gitlab.cern.ch/gaudi/Gaudi.git" + url = "https://gitlab.cern.ch/gaudi/Gaudi/-/archive/v33r1/Gaudi-v33r1.tar.gz" - version('develop', branch='master') - version('30.5', commit='2c70e73ee5b543b26197b90dd59ea4e4d359d230') + version('master', branch='master') + # major cmake config overhaul already in use by some + version('develop', git='https://gitlab.cern.ch/clemenci/Gaudi.git', branch='cmake-modernisation') + version('33.1', sha256='7eb6b2af64aeb965228d4b6ea66c7f9f57f832f93d5b8ad55c9105235af5b042') + version('33.0', sha256='76a967c41f579acc432593d498875dd4dc1f8afd5061e692741a355a9cf233c8') + version('32.2', sha256='e9ef3eb57fd9ac7b9d5647e278a84b2e6263f29f0b14dbe1321667d44d969d2e') version('31.0', commit='aeb156f0c40571b5753a9e1dab31e331491b2f3e') + version('30.5', commit='2c70e73ee5b543b26197b90dd59ea4e4d359d230') + + maintainers = ['drbenmorgan', "vvolkl"] - variant('tests', default=False, - description='Prepare to run the test suite') variant('optional', default=False, - description='Build most optional components') + description='Build most optional components and tests') + variant('docs', default=False, + description='Build documentation with Doxygen') variant('vtune', default=False, description='Build with Intel VTune profiler support') + # only build subdirectory GaudiExamples when +optional + patch("build_testing.patch", when="@:33.1") + # fix for the new cmake config, should be merged in branch + patch('python2.patch', when="@develop") + # fixes for the cmake config which could not find newer boost versions + patch("link_target_fixes.patch", when="@33.0:33.1") + patch("link_target_fixes32.patch", when="@:32.2") + # These dependencies are needed for a minimal Gaudi build depends_on('boost@1.67.0: +python') depends_on('cmake', type='build') + depends_on('cppgsl') depends_on('intel-tbb') depends_on('libuuid') - depends_on('python@:2.99.99') - depends_on('py-xenv@develop_2018-12-20:') + # some bugs with python 3.8 + depends_on('python@:3.7.99', when='@32.2:', type=('build', 'run')) + depends_on('python@:2.99.99', when='@:32.1', type=('build', 'run')) + depends_on('py-setuptools@:45.99.99', when='^python@:2.7.99', type='build') + depends_on('py-six', type=('build', 'run')) + depends_on('py-xenv@1:', type=('build', 'run')) depends_on('range-v3') depends_on('root +python +root7 +ssl +tbb +threads') depends_on('zlib') - # These dependencies are required by the Gaudi test suite - depends_on('aida', when='+tests') - depends_on('clhep', when='+tests') - depends_on('cppunit', when='+tests') - depends_on('gdb', when='+tests') - depends_on('gperftools', when='+tests') - depends_on('heppdt@:2.99.99', when='+tests') - depends_on('py-networkx', when='+tests') - depends_on('py-nose', when='+tests') - depends_on('py-setuptools', when='+tests') - depends_on('relax', when='+tests') - depends_on('xerces-c', when='+tests') + # todo: this should be a test dependency only, + # should be fixed in the cmake-modernisation branch + depends_on('py-nose', when="@develop", type=('build', 'run')) # Adding these dependencies triggers the build of most optional components depends_on('aida', when='+optional') depends_on('clhep', when='+optional') depends_on('cppgsl', when='+optional') depends_on('cppunit', when='+optional') - depends_on('doxygen +graphviz', when='+optional') + depends_on('doxygen +graphviz', when='+docs') depends_on('gperftools', when='+optional') + depends_on('gdb', when='+optional') depends_on('gsl', when='+optional') depends_on('heppdt@:2.99.99', when='+optional') depends_on('jemalloc', when='+optional') depends_on('libpng', when='+optional') depends_on('libunwind', when='+optional') + depends_on('py-networkx@:2.2', when='+optional ^python@:2.7.99') + depends_on('py-networkx', when='+optional ^python@3.0.0:') + depends_on('py-setuptools', when='+optional') + depends_on('py-nose', when='+optional') depends_on('relax', when='+optional') depends_on('xerces-c', when='+optional') # NOTE: pocl cannot be added as a minimal OpenCL implementation because @@ -66,3 +83,33 @@ class Gaudi(CMakePackage): # The Intel VTune dependency is taken aside because it requires a license depends_on('intel-parallel-studio -mpi +vtune', when='+vtune') + + def cmake_args(self): + args = [ + self.define_from_variant("BUILD_TESTING", "optional"), + self.define_from_variant("GAUDI_USE_AIDA", "optional"), + self.define_from_variant("GAUDI_USE_XERCESC", "optional"), + self.define_from_variant("GAUDI_USE_CLHEP", "optional"), + self.define_from_variant("GAUDI_USE_HEPPDT", "optional"), + self.define_from_variant("GAUDI_USE_CPPUNIT", "optional"), + self.define_from_variant("GAUDI_USE_UNWIND", "optional"), + self.define_from_variant("GAUDI_USE_GPERFTOOLS", "optional"), + self.define_from_variant("GAUDI_USE_DOXYGEN", "docs"), + self.define_from_variant("GAUDI_USE_INTELAMPLIFIER", "optional"), + self.define_from_variant("GAUDI_USE_JEMALLOC", "optional"), + # this is not really used in spack builds, but needs to be set + "-DHOST_BINARY_TAG=x86_64-linux-gcc9-opt", + ] + return args + + def setup_run_environment(self, env): + # environment as in Gaudi.xenv + env.prepend_path('PATH', self.prefix.scripts) + env.prepend_path('PYTHONPATH', self.prefix.python) + env.prepend_path('ROOT_INCLUDE_PATH', self.prefix.include) + + def url_for_version(self, version): + major = str(version[0]) + minor = str(version[1]) + url = "https://gitlab.cern.ch/gaudi/Gaudi/-/archive/v{0}r{1}/Gaudi-v{0}r{1}.tar.gz".format(major, minor) + return url diff --git a/var/spack/repos/builtin/packages/gaudi/python2.patch b/var/spack/repos/builtin/packages/gaudi/python2.patch new file mode 100644 index 00000000000..8afcebc9291 --- /dev/null +++ b/var/spack/repos/builtin/packages/gaudi/python2.patch @@ -0,0 +1,14 @@ +diff --git a/cmake/GaudiDependencies.cmake b/cmake/GaudiDependencies.cmake +index 7fc224cef..0323f8e5c 100644 +--- a/cmake/GaudiDependencies.cmake ++++ b/cmake/GaudiDependencies.cmake +@@ -97,7 +97,7 @@ foreach(component IN ITEMS system filesystem regex thread python unit_test_frame + endforeach() + + set(Python_FIND_STRATEGY VERSION) # Find latest version available +-find_package(Python 2.7.15 ${__quiet} COMPONENTS Interpreter Development) ++find_package(Python ${__quiet} COMPONENTS Interpreter Development) + set_package_properties(Python PROPERTIES TYPE REQUIRED) + + find_package(ROOT 6.18 ${__quiet} CONFIG COMPONENTS Core RIO Hist Thread Matrix +` diff --git a/var/spack/repos/builtin/packages/gaussian-view/package.py b/var/spack/repos/builtin/packages/gaussian-view/package.py new file mode 100644 index 00000000000..4de3c9e02be --- /dev/null +++ b/var/spack/repos/builtin/packages/gaussian-view/package.py @@ -0,0 +1,41 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +# ---------------------------------------------------------------------------- + +from spack import * +import os + + +class GaussianView(Package): + """GaussView 6 is the latest iteration of a graphical interface used with + Gaussian. It aids in the creation of Gaussian input files, enables the + user to run Gaussian calculations from a graphical interface without the + need for using a command line instruction, and helps in the interpretation + of Gaussian output""" + + homepage = "https://gaussian.com/gaussview6/" + manual_download = True + + version('6016', + '5dd6a8df8c81763e43a308b3a18d2d3b825d3597e9628dcf43e563d1867b9638', + extension='tbz') + + depends_on('gaussian@16-B.01', type='run') + + def url_for_version(self, version): + return "file://{0}/gv-{1}-Linux-x86_64.tbz".format(os.getcwd(), + version) + + def install(self, spec, prefix): + install_tree(os.getcwd(), self.prefix) + + def setup_run_environment(self, env): + env.set('GV_DIR', self.prefix) + env.prepend_path('PATH', self.prefix) + env.set('GV_LIB_PATH', self.prefix.lib) + env.prepend_path('GV_LIB_PATH', self.prefix.lib.MesaGL) + env.prepend_path('LD_LIBRARY_PATH', self.prefix.lib.MesaGL) + env.set('ALLOWINDIRECT', '1') + env.prepend_path('QT_PLUGIN_PATH', self.prefix.plugins) diff --git a/var/spack/repos/builtin/packages/gaussian/package.py b/var/spack/repos/builtin/packages/gaussian/package.py index f32b9b2b385..7a1f7e68216 100644 --- a/var/spack/repos/builtin/packages/gaussian/package.py +++ b/var/spack/repos/builtin/packages/gaussian/package.py @@ -2,50 +2,58 @@ # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) - from spack import * import os class Gaussian(Package): - """Gaussian is a computer program for computational chemistry""" + """Gaussian is a computer program for computational chemistry""" homepage = "http://www.gaussian.com/" - url = "file://{0}/g09.tgz".format(os.getcwd()) manual_download = True - version('09', '7d4c95b535e68e48af183920df427e4e') + maintainers = ['antoniokaust'] + + version('16-B.01', sha256='0b2cf60aa85d2c8c8e7547446e60e8e8cb67eec20e5f13c4a3e4e7616dcdf122') + version('09-D.01', sha256='ef14885b5e334b6ec44a93bfd7225c634247dc946416af3087ab055bf05f54cd') + + @property + def ver(self): + return self.version.string.split('-')[0] + + @property + def g_root(self): + return join_path(self.prefix, 'g' + self.ver) + + @property + def g_bsd(self): + return join_path(self.g_root, 'bsd') + + def url_for_version(self, version): + return "file://{0}/g{1}.tgz".format(os.getcwd(), version) def install(self, spec, prefix): - install_tree('.', prefix.bin) - patch_install_files = ['flc', - 'linda8.2/opteron-linux/bin/flc', - 'linda8.2/opteron-linux/bin/LindaLauncher', - 'linda8.2/opteron-linux/bin/ntsnet', - 'linda8.2/opteron-linux/bin/pmbuild', - 'linda8.2/opteron-linux/bin/vntsnet', - 'ntsnet' - ] - for filename in patch_install_files: - if os.path.isfile(filename): - filter_file('/mf/frisch/g09', prefix.bin, join_path(prefix.bin, - filename), string='True') - patch_install_files = ['linda8.2/opteron-linux/bin/ntsnet', - 'linda8.2/opteron-linux/bin/vntsnet', - ] - for filename in patch_install_files: - if os.path.isfile(filename): - filter_file('/usr/bin/linda', prefix.bin, join_path(prefix.bin, - filename), string='True') + install_tree('.', self.g_root) + + @run_after('install') + def bsd_install(self): + with working_dir(self.g_root): + bsd_install = Executable(join_path('bsd', 'install')) + bsd_install() def setup_run_environment(self, env): - env.set('g09root', self.prefix) - env.set('GAUSSIANHOME', self.prefix) - env.set('GAUSS_EXEDIR', self.prefix.bin) - env.set('G09_BASIS', self.prefix.bin.basis) - env.set('GAUSS_LEXEDIR', join_path(self.prefix.bin, 'linda-exe')) - env.set('GAUSS_ARCHDIR', self.prefix.bin.arch) - env.set('GAUSS_BSDDIR', self.prefix.bin.bsd) - env.prepend_path('LD_LIBRARY_PATH', join_path(self.prefix.bin, - 'linda8.2', 'opteron-linux', 'lib')) - env.prepend_path('LD_LIBRARY_PATH', self.prefix.bin) + env.set('g' + self.ver + 'root', self.prefix) + + env.prepend_path('GAUSS_EXEDIR', self.g_root) + env.prepend_path('GAUSS_EXEDIR', self.g_bsd) + + env.prepend_path('PATH', self.g_root) + env.prepend_path('PATH', self.g_bsd) + + env.set('GAUSS_LEXEDIR', join_path(self.g_root, 'linda-exe')) + env.set('GAUSS_ARCHDIR', join_path(self.g_root, 'arch')) + env.set('GAUSS_BSDDIR', self.g_bsd) + env.set('G' + self.ver + 'BASIS', join_path(self.g_root, 'basis')) + + env.prepend_path('LD_LIBRARY_PATH', self.g_root) + env.prepend_path('LD_LIBRARY_PATH', self.g_bsd) diff --git a/var/spack/repos/builtin/packages/gcc/glibc-2.31-libsanitizer-1-gcc-6.patch b/var/spack/repos/builtin/packages/gcc/glibc-2.31-libsanitizer-1-gcc-6.patch new file mode 100644 index 00000000000..4187b812d5d --- /dev/null +++ b/var/spack/repos/builtin/packages/gcc/glibc-2.31-libsanitizer-1-gcc-6.patch @@ -0,0 +1,39 @@ +From ce9568e9e9cf6094be30e748821421e703754ffc Mon Sep 17 00:00:00 2001 +From: Jakub Jelinek +Date: Fri, 8 Nov 2019 19:53:18 +0100 +Subject: [PATCH] backport: re PR sanitizer/92154 (new glibc breaks arm + bootstrap due to libsanitizer) + + Backported from mainline + 2019-10-22 Tamar Christina + Backported for version 5.3.0 <= gcc <= 6.5.0 + 2020-06-05 John L. Jolly + + PR sanitizer/92154 + * sanitizer_common/sanitizer_platform_limits_posix.cc: + Cherry-pick compiler-rt revision r375220. + +From-SVN: r277981 +--- + libsanitizer/ChangeLog | 9 +++++++++ + .../sanitizer_common/sanitizer_platform_limits_posix.cc | 6 +++++- + 2 files changed, 14 insertions(+), 1 deletion(-) + +diff --git a/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.cc b/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.cc +index 6cd4a5bac8b0..06a605ff4670 100644 +--- a/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.cc ++++ b/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.cc +@@ -1130,8 +1130,12 @@ CHECK_SIZE_AND_OFFSET(ipc_perm, uid); + #ifndef __GLIBC_PREREQ + #define __GLIBC_PREREQ(x, y) 0 + #endif +-#if !defined(__aarch64__) || !SANITIZER_LINUX || __GLIBC_PREREQ (2, 21) ++#if (!defined(__aarch64__) || !SANITIZER_LINUX || __GLIBC_PREREQ (2, 21)) && \ ++ !defined(__arm__) + /* On aarch64 glibc 2.20 and earlier provided incorrect mode field. */ ++/* On Arm glibc 2.31 and later provide a different mode field, this field is ++ never used by libsanitizer so we can simply ignore this assert for all glibc ++ versions. */ + CHECK_SIZE_AND_OFFSET(ipc_perm, mode); + #endif + diff --git a/var/spack/repos/builtin/packages/gcc/glibc-2.31-libsanitizer-2-gcc-6.patch b/var/spack/repos/builtin/packages/gcc/glibc-2.31-libsanitizer-2-gcc-6.patch new file mode 100644 index 00000000000..755db173ffb --- /dev/null +++ b/var/spack/repos/builtin/packages/gcc/glibc-2.31-libsanitizer-2-gcc-6.patch @@ -0,0 +1,69 @@ +From 75003cdd23c310ec385344e8040d490e8dd6d2be Mon Sep 17 00:00:00 2001 +From: Jakub Jelinek +Date: Fri, 20 Dec 2019 17:58:35 +0100 +Subject: [PATCH] backport: re PR sanitizer/92154 (new glibc breaks arm + bootstrap due to libsanitizer) + + Backported from mainline + 2019-11-26 Jakub Jelinek + Backported for version 5.3.0 <= gcc <= 6.5.0 + 2020-06-05 John L. Jolly + + PR sanitizer/92154 + * sanitizer_common/sanitizer_platform_limits_posix.h: Cherry-pick + llvm-project revision 947f9692440836dcb8d88b74b69dd379d85974ce. + * sanitizer_common/sanitizer_platform_limits_posix.cc: Likewise. + +From-SVN: r279653 +--- + libsanitizer/ChangeLog | 10 ++++++++++ + .../sanitizer_platform_limits_posix.cc | 9 +++------ + .../sanitizer_platform_limits_posix.h | 15 +-------------- + 3 files changed, 14 insertions(+), 20 deletions(-) + +diff --git a/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.cc b/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.cc +index 06a605ff4670..d823a12190c0 100644 +--- a/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.cc ++++ b/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.cc +@@ -1130,12 +1130,9 @@ CHECK_SIZE_AND_OFFSET(ipc_perm, uid); + #ifndef __GLIBC_PREREQ + #define __GLIBC_PREREQ(x, y) 0 + #endif +-#if (!defined(__aarch64__) || !SANITIZER_LINUX || __GLIBC_PREREQ (2, 21)) && \ +- !defined(__arm__) +-/* On aarch64 glibc 2.20 and earlier provided incorrect mode field. */ +-/* On Arm glibc 2.31 and later provide a different mode field, this field is +- never used by libsanitizer so we can simply ignore this assert for all glibc +- versions. */ ++#if !SANITIZER_LINUX || __GLIBC_PREREQ (2, 31) ++/* glibc 2.30 and earlier provided 16-bit mode field instead of 32-bit ++ on many architectures. */ + CHECK_SIZE_AND_OFFSET(ipc_perm, mode); + #endif + +diff --git a/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.h b/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.h +index 73af92af1e8f..6a673a7c9959 100644 +--- a/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.h ++++ b/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.h +@@ -211,20 +211,13 @@ namespace __sanitizer { + unsigned long __unused1; + unsigned long __unused2; + #elif defined(__sparc__) +-# if defined(__arch64__) + unsigned mode; +- unsigned short __pad1; +-# else +- unsigned short __pad1; +- unsigned short mode; + unsigned short __pad2; +-# endif + unsigned short __seq; + unsigned long long __unused1; + unsigned long long __unused2; + #else +- unsigned short mode; +- unsigned short __pad1; ++ unsigned int mode; + unsigned short __seq; + unsigned short __pad2; + #if defined(__x86_64__) && !defined(_LP64) diff --git a/var/spack/repos/builtin/packages/gcc/glibc-2.31-libsanitizer-2-gcc-7.patch b/var/spack/repos/builtin/packages/gcc/glibc-2.31-libsanitizer-2-gcc-7.patch new file mode 100644 index 00000000000..07cbb3fdb49 --- /dev/null +++ b/var/spack/repos/builtin/packages/gcc/glibc-2.31-libsanitizer-2-gcc-7.patch @@ -0,0 +1,69 @@ +From 75003cdd23c310ec385344e8040d490e8dd6d2be Mon Sep 17 00:00:00 2001 +From: Jakub Jelinek +Date: Fri, 20 Dec 2019 17:58:35 +0100 +Subject: [PATCH] backport: re PR sanitizer/92154 (new glibc breaks arm + bootstrap due to libsanitizer) + + Backported from mainline + 2019-11-26 Jakub Jelinek + Backported for version 7.1.0 <= gcc <= 7.4.0 + 2020-06-05 John L. Jolly + + PR sanitizer/92154 + * sanitizer_common/sanitizer_platform_limits_posix.h: Cherry-pick + llvm-project revision 947f9692440836dcb8d88b74b69dd379d85974ce. + * sanitizer_common/sanitizer_platform_limits_posix.cc: Likewise. + +From-SVN: r279653 +--- + libsanitizer/ChangeLog | 10 ++++++++++ + .../sanitizer_platform_limits_posix.cc | 9 +++------ + .../sanitizer_platform_limits_posix.h | 15 +-------------- + 3 files changed, 14 insertions(+), 20 deletions(-) + +diff --git a/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.cc b/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.cc +index 06a605ff4670..d823a12190c0 100644 +--- a/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.cc ++++ b/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.cc +@@ -1156,12 +1156,9 @@ CHECK_SIZE_AND_OFFSET(ipc_perm, uid); + CHECK_SIZE_AND_OFFSET(ipc_perm, gid); + CHECK_SIZE_AND_OFFSET(ipc_perm, cuid); + CHECK_SIZE_AND_OFFSET(ipc_perm, cgid); +-#if (!defined(__aarch64__) || !SANITIZER_LINUX || __GLIBC_PREREQ (2, 21)) && \ +- !defined(__arm__) +-/* On aarch64 glibc 2.20 and earlier provided incorrect mode field. */ +-/* On Arm glibc 2.31 and later provide a different mode field, this field is +- never used by libsanitizer so we can simply ignore this assert for all glibc +- versions. */ ++#if !SANITIZER_LINUX || __GLIBC_PREREQ (2, 31) ++/* glibc 2.30 and earlier provided 16-bit mode field instead of 32-bit ++ on many architectures. */ + CHECK_SIZE_AND_OFFSET(ipc_perm, mode); + #endif + +diff --git a/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.h b/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.h +index 73af92af1e8f..6a673a7c9959 100644 +--- a/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.h ++++ b/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.h +@@ -211,20 +211,13 @@ namespace __sanitizer { + unsigned long __unused1; + unsigned long __unused2; + #elif defined(__sparc__) +-# if defined(__arch64__) + unsigned mode; +- unsigned short __pad1; +-# else +- unsigned short __pad1; +- unsigned short mode; + unsigned short __pad2; +-# endif + unsigned short __seq; + unsigned long long __unused1; + unsigned long long __unused2; + #else +- unsigned short mode; +- unsigned short __pad1; ++ unsigned int mode; + unsigned short __seq; + unsigned short __pad2; + #if defined(__x86_64__) && !defined(_LP64) diff --git a/var/spack/repos/builtin/packages/gcc/package.py b/var/spack/repos/builtin/packages/gcc/package.py index cf16f1bc8f7..04b2181b626 100644 --- a/var/spack/repos/builtin/packages/gcc/package.py +++ b/var/spack/repos/builtin/packages/gcc/package.py @@ -19,11 +19,15 @@ class Gcc(AutotoolsPackage, GNUMirrorPackage): homepage = 'https://gcc.gnu.org' gnu_mirror_path = 'gcc/gcc-9.2.0/gcc-9.2.0.tar.xz' - svn = 'svn://gcc.gnu.org/svn/gcc/' + git = 'git://gcc.gnu.org/git/gcc.git' list_url = 'http://ftp.gnu.org/gnu/gcc/' list_depth = 1 - version('develop', svn=svn + 'trunk') + maintainers = ['michaelkuhn'] + + version('master', branch='master') + + version('10.1.0', sha256='b6898a23844b656f1b68691c5c012036c2e694ac4b53a8918d4712ad876e7ea2') version('9.3.0', sha256='71e197867611f6054aa1119b13a0c0abac12834765fe2d81f35ac57f84f742d1') version('9.2.0', sha256='ea6ef08f121239da5695f76c9b33637a118dcf63e24164422231917fa61fb206') @@ -34,6 +38,7 @@ class Gcc(AutotoolsPackage, GNUMirrorPackage): version('8.2.0', sha256='196c3c04ba2613f893283977e6011b2345d1cd1af9abeac58e916b1aab3e0080') version('8.1.0', sha256='1d1866f992626e61349a1ccd0b8d5253816222cdc13390dcfaa74b093aa2b153') + version('7.5.0', sha256='b81946e7f01f90528a1f7352ab08cc602b9ccc05d4e44da4bd501c5a189ee661') version('7.4.0', sha256='eddde28d04f334aec1604456e536416549e9b1aa137fc69204e65eb0c009fe51') version('7.3.0', sha256='832ca6ae04636adbb430e865a1451adf6979ab44ca1c8374f61fba65645ce15c') version('7.2.0', sha256='1cf7adf8ff4b5aa49041c8734bbcf1ad18cc4c94d0029aae0f4e48841088479a') @@ -88,12 +93,15 @@ class Gcc(AutotoolsPackage, GNUMirrorPackage): default=False, description='Target nvptx offloading to NVIDIA GPUs') + depends_on('flex', type='build', when='@master') + # https://gcc.gnu.org/install/prerequisites.html depends_on('gmp@4.3.2:') # GCC 7.3 does not compile with newer releases on some platforms, see # https://github.com/spack/spack/issues/6902#issuecomment-433030376 - depends_on('mpfr@2.4.2:3.1.6') - depends_on('mpc@0.8.1:', when='@4.5:') + depends_on('mpfr@2.4.2:3.1.6', when='@:9.9') + depends_on('mpfr@3.1.0:', when='@10:') + depends_on('mpc@1.0.1:', when='@4.5:') # Already released GCC versions do not support any newer version of ISL # GCC 5.4 https://github.com/spack/spack/issues/6902#issuecomment-433072097 # GCC 7.3 https://github.com/spack/spack/issues/6902#issuecomment-433030376 @@ -101,8 +109,10 @@ class Gcc(AutotoolsPackage, GNUMirrorPackage): depends_on('isl@0.14', when='@5.0:5.2') depends_on('isl@0.15', when='@5.3:5.9') depends_on('isl@0.15:0.18', when='@6:8.9') - depends_on('isl@0.15:0.20', when='@9:') + depends_on('isl@0.15:0.20', when='@9:9.9') + depends_on('isl@0.15:', when='@10:') depends_on('zlib', when='@6:') + depends_on('zstd', when='@10:') depends_on('iconv', when='platform=darwin') depends_on('gnat', when='languages=ada') depends_on('binutils~libiberty', when='+binutils') @@ -112,14 +122,12 @@ class Gcc(AutotoolsPackage, GNUMirrorPackage): # The server is sometimes a bit slow to respond timeout = {'timeout': 60} - resource( - name='newlib', + resource(name='newlib', url='ftp://sourceware.org/pub/newlib/newlib-3.0.0.20180831.tar.gz', sha256='3ad3664f227357df15ff34e954bfd9f501009a647667cd307bf0658aefd6eb5b', destination='newlibsource', when='+nvptx', - fetch_options=timeout - ) + fetch_options=timeout) # nvptx-tools does not seem to work as a dependency, # but does fine when the source is inside the gcc build directory @@ -235,10 +243,13 @@ class Gcc(AutotoolsPackage, GNUMirrorPackage): patch('piclibs.patch', when='+piclibs') patch('gcc-backport.patch', when='@4.7:4.9.2,5:5.3') - # Backport libsanitizer patch for glibc >= 2.31 and 8.1.0 <= gcc <= 9.2.0 + # Backport libsanitizer patch for glibc >= 2.31 and 5.3.0 <= gcc <= 9.2.0 # https://bugs.gentoo.org/708346 - patch('glibc-2.31-libsanitizer-1.patch', when='@8.1.0:8.3.99,9.0.0:9.2.0') - patch('glibc-2.31-libsanitizer-2.patch', when='@8.1.0:8.3.99,9.0.0:9.2.0') + patch('glibc-2.31-libsanitizer-1.patch', when='@7.1.0:7.5.0,8.1.0:8.3.0,9.0.0:9.2.0') + patch('glibc-2.31-libsanitizer-1-gcc-6.patch', when='@5.3.0:5.5.0,6.1.0:6.5.0') + patch('glibc-2.31-libsanitizer-2.patch', when='@8.1.0:8.3.0,9.0.0:9.2.0') + patch('glibc-2.31-libsanitizer-2-gcc-6.patch', when='@5.3.0:5.5.0,6.1.0:6.5.0') + patch('glibc-2.31-libsanitizer-2-gcc-7.patch', when='@7.1.0:7.5.0') # Older versions do not compile with newer versions of glibc # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=81712 patch('ucontext_t.patch', when='@4.9,5.1:5.4,6.1:6.4,7.1') @@ -252,6 +263,9 @@ class Gcc(AutotoolsPackage, GNUMirrorPackage): patch('sys_ustat.h.patch', when='@5.0:6.4,7.0:7.3,8.1') patch('sys_ustat-4.9.patch', when='@4.9') + # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=95005 + patch('zstd.patch', when='@10:') + build_directory = 'spack-build' def url_for_version(self, version): @@ -259,7 +273,7 @@ def url_for_version(self, version): # mirrors are tried. It takes care of modifying the suffix of gnu # mirror path so that Spack will also look for the correct file in # the mirrors - if (version < Version('6.4.0')and version != Version('5.5.0')) \ + if (version < Version('6.4.0') and version != Version('5.5.0')) \ or version == Version('7.1.0'): self.gnu_mirror_path = self.gnu_mirror_path.replace('xz', 'bz2') return super(Gcc, self).url_for_version(version) @@ -312,6 +326,9 @@ def configure_args(self): if self.version >= Version('6'): options.append('--with-system-zlib') + if 'zstd' in spec: + options.append('--with-zstd={0}'.format(spec['zstd'].prefix)) + # Enabling language "jit" requires --enable-host-shared. if 'languages=jit' in spec: options.append('--enable-host-shared') diff --git a/var/spack/repos/builtin/packages/gcc/zstd.patch b/var/spack/repos/builtin/packages/gcc/zstd.patch new file mode 100644 index 00000000000..8fb7583a0c2 --- /dev/null +++ b/var/spack/repos/builtin/packages/gcc/zstd.patch @@ -0,0 +1,43 @@ +--- a/gcc/Makefile.in ++++ b/gcc/Makefile.in +@@ -1075,7 +1075,8 @@ GNATMAKE = @GNATMAKE@ + # Libs needed (at present) just for jcf-dump. + LDEXP_LIB = @LDEXP_LIB@ + +-ZSTD_LIB = @ZSTD_LIB@ ++ZSTD_INC = @ZSTD_CPPFLAGS@ ++ZSTD_LIB = @ZSTD_LDFLAGS@ @ZSTD_LIB@ + + # Likewise, for use in the tools that must run on this machine + # even if we are cross-building GCC. +@@ -2275,7 +2276,7 @@ CFLAGS-version.o += -DBASEVER=$(BASEVER_s) -DDATESTAMP=$(DATESTAMP_s) \ + version.o: $(REVISION) $(DATESTAMP) $(BASEVER) $(DEVPHASE) + + # lto-compress.o needs $(ZLIBINC) added to the include flags. +-CFLAGS-lto-compress.o += $(ZLIBINC) ++CFLAGS-lto-compress.o += $(ZLIBINC) $(ZSTD_INC) + + CFLAGS-lto-streamer-in.o += -DTARGET_MACHINE=\"$(target_noncanonical)\" + +--- a/gcc/configure ++++ b/gcc/configure +@@ -786,6 +786,8 @@ LTLIBICONV + LIBICONV + ZSTD_LIB + ZSTD_INCLUDE ++ZSTD_LDFLAGS ++ZSTD_CPPFLAGS + DL_LIB + LDEXP_LIB + EXTRA_GCC_LIBS +--- a/gcc/configure.ac ++++ b/gcc/configure.ac +@@ -1339,6 +1339,8 @@ AC_SUBST(ZSTD_INCLUDE) + AC_SUBST(ZSTD_LIB) + ZSTD_CPPFLAGS= + ZSTD_LDFLAGS= ++AC_SUBST(ZSTD_CPPFLAGS) ++AC_SUBST(ZSTD_LDFLAGS) + AC_ARG_WITH(zstd, + [AS_HELP_STRING([--with-zstd=PATH], + [specify prefix directory for installed zstd library. diff --git a/var/spack/repos/builtin/packages/gdal/package.py b/var/spack/repos/builtin/packages/gdal/package.py index af3165db1f7..5aafdb44a1a 100644 --- a/var/spack/repos/builtin/packages/gdal/package.py +++ b/var/spack/repos/builtin/packages/gdal/package.py @@ -29,6 +29,7 @@ class Gdal(AutotoolsPackage): 'osgeo.gdal_array', 'osgeo.gdalconst' ] + version('3.1.0', sha256='e754a22242ccbec731aacdb2333b567d4c95b9b02d3ba1ea12f70508d244fcda') version('3.0.4', sha256='5569a4daa1abcbba47a9d535172fc335194d9214fdb96cd0f139bb57329ae277') version('3.0.3', sha256='e20add5802265159366f197a8bb354899e1693eab8dbba2208de14a457566109') version('3.0.2', sha256='c3765371ce391715c8f28bd6defbc70b57aa43341f6e94605f04fe3c92468983') @@ -296,8 +297,8 @@ def configure_args(self): if '+hdf4' in spec: args.append('--with-hdf4={0}'.format(spec['hdf'].prefix)) hdf4 = self.spec['hdf'] - if '+libtirpc' in hdf4: - libs.append('-ltirpc') + if '+external-xdr' in hdf4 and hdf4['rpc'].name != 'libc': + libs.append(hdf4['rpc'].libs.link_flags) else: args.append('--with-hdf4=no') diff --git a/var/spack/repos/builtin/packages/gdbm/gdbm_gcc_10.patch b/var/spack/repos/builtin/packages/gdbm/gdbm_gcc_10.patch new file mode 100644 index 00000000000..6f04bfcb329 --- /dev/null +++ b/var/spack/repos/builtin/packages/gdbm/gdbm_gcc_10.patch @@ -0,0 +1,11 @@ +--- gdbm-1.18.1/src/parseopt.c 2018-05-30 03:39:15.000000000 -0600 ++++ gdbm-1.18.1_new/src/parseopt.c 2020-04-30 10:29:52.869582500 -0600 +@@ -255,8 +255,6 @@ + } + + char *parseopt_program_name; +-char *parseopt_program_doc; +-char *parseopt_program_args; + const char *program_bug_address = "<" PACKAGE_BUGREPORT ">"; + void (*parseopt_help_hook) (FILE *stream); + diff --git a/var/spack/repos/builtin/packages/gdbm/package.py b/var/spack/repos/builtin/packages/gdbm/package.py index 8469cc4fb5c..ae89387f242 100644 --- a/var/spack/repos/builtin/packages/gdbm/package.py +++ b/var/spack/repos/builtin/packages/gdbm/package.py @@ -26,6 +26,7 @@ class Gdbm(AutotoolsPackage, GNUMirrorPackage): version('1.9', sha256='f85324d7de3777db167581fd5d3493d2daa3e85e195a8ae9afc05b34551b6e57') depends_on("readline") + patch('gdbm_gcc_10.patch', when='%gcc@10:') def configure_args(self): diff --git a/var/spack/repos/builtin/packages/geant4-data/package.py b/var/spack/repos/builtin/packages/geant4-data/package.py index 4157fb76c6e..57a60ededd9 100644 --- a/var/spack/repos/builtin/packages/geant4-data/package.py +++ b/var/spack/repos/builtin/packages/geant4-data/package.py @@ -15,6 +15,8 @@ class Geant4Data(BundlePackage): maintainers = ['drbenmorgan'] + version('10.6.2') + version('10.6.1') version('10.6.0') version('10.5.1') version('10.4.3') @@ -28,17 +30,18 @@ class Geant4Data(BundlePackage): # they generally don't change on the patch level # Can move to declaring on a dataset basis if needed # geant4@10.6.X - depends_on("g4ndl@4.6", when='@10.6.0') + depends_on("g4ndl@4.6", when='@10.6.0:10.6.9999') depends_on("g4emlow@7.9", when='@10.6.0') - depends_on("g4photonevaporation@5.5", when='@10.6.0') - depends_on("g4radioactivedecay@5.4", when='@10.6.0') - depends_on("g4particlexs@2.1", when='@10.6.0') - depends_on("g4pii@1.3", when='@10.6.0') - depends_on("g4realsurface@2.1.1", when='@10.6.0') - depends_on("g4saiddata@2.0", when='@10.6.0') - depends_on("g4abla@3.1", when='@10.6.0') - depends_on("g4incl@1.0", when='@10.6.0') - depends_on("g4ensdfstate@2.2", when='@10.6.0') + depends_on("g4emlow@7.9.1", when='@10.6.1:10.6.9999') + depends_on("g4photonevaporation@5.5", when='@10.6.0:10.6.9999') + depends_on("g4radioactivedecay@5.4", when='@10.6.0:10.6.9999') + depends_on("g4particlexs@2.1", when='@10.6.0:10.6.9999') + depends_on("g4pii@1.3", when='@10.6.0:10.6.9999') + depends_on("g4realsurface@2.1.1", when='@10.6.0:10.6.9999') + depends_on("g4saiddata@2.0", when='@10.6.0:10.6.9999') + depends_on("g4abla@3.1", when='@10.6.0:10.6.9999') + depends_on("g4incl@1.0", when='@10.6.0:10.6.9999') + depends_on("g4ensdfstate@2.2", when='@10.6.0:10.6.9999') # geant4@10.5.X depends_on("g4ndl@4.5", when='@10.5.0:10.5.9999') diff --git a/var/spack/repos/builtin/packages/geant4/package.py b/var/spack/repos/builtin/packages/geant4/package.py index 4bf35792aac..b559557340b 100644 --- a/var/spack/repos/builtin/packages/geant4/package.py +++ b/var/spack/repos/builtin/packages/geant4/package.py @@ -17,6 +17,8 @@ class Geant4(CMakePackage): maintainers = ['drbenmorgan'] + version('10.6.2', sha256='e381e04c02aeade1ed8cdd9fdbe7dcf5d6f0f9b3837a417976b839318a005dbd') + version('10.6.1', sha256='4fd64149ae26952672a81ce5579d3806fda4bd251d486897093ac57633a42b7e') version('10.6.0', sha256='eebe6a170546064ff81ab3b00f513ccd1d4122a026514982368d503ac55a4ee4') version('10.5.1', sha256='2397eb859dc4de095ff66059d8bda9f060fdc42e10469dd7890946293eeb0e39') version('10.4.3', sha256='67f3bb6405a2c77e573936c2b933f5a4a33915aa379626a2eb3012009b91e1da') @@ -36,10 +38,13 @@ class Geant4(CMakePackage): variant('x11', default=False, description='Optional X11 support') variant('motif', default=False, description='Optional motif support') variant('qt', default=False, description='Enable Qt support') + variant('python', default=False, description='Enable Python bindings') depends_on('cmake@3.5:', type='build') depends_on('cmake@3.8:', type='build', when='@10.6.0:') + depends_on('geant4-data@10.6.2', when='@10.6.2') + depends_on('geant4-data@10.6.1', when='@10.6.1') depends_on('geant4-data@10.6.0', when='@10.6.0') depends_on('geant4-data@10.5.1', when='@10.5.1') depends_on('geant4-data@10.4.3', when='@10.4.3') @@ -49,6 +54,12 @@ class Geant4(CMakePackage): depends_on("expat") depends_on("zlib") + # Python, with boost requirement dealt with in cxxstd section + depends_on('python@3:', when='+python') + extends('python', when='+python') + conflicts('+python', when='@:10.6.1', + msg='Geant4 <= 10.6.1 cannont be built with Python bindings') + for std in _cxxstd_values: # CLHEP version requirements to be reviewed depends_on('clhep@2.3.3.0: cxxstd=' + std, @@ -67,7 +78,11 @@ class Geant4(CMakePackage): depends_on('vecgeom@0.3rc cxxstd=' + std, when='@10.3.0:10.3.99 +vecgeom cxxstd=' + std) - # Visualization driver ependencies + # Boost.python, conflict handled earlier + depends_on('boost@1.70: +python cxxstd=' + std, + when='+python cxxstd=' + std) + + # Visualization driver dependencies depends_on("gl", when='+opengl') depends_on("glx", when='+opengl+x11') depends_on("libx11", when='+x11') @@ -103,7 +118,8 @@ def cmake_args(self): options.append(self.define_from_variant('GEANT4_BUILD_MULTITHREADED', 'threads')) if '+threads' in spec: - # This should be a variant + # Locked at global-dynamic to allow use cases that load the + # geant4 libs at application runtime options.append('-DGEANT4_BUILD_TLS_MODEL=global-dynamic') # install the data with geant4 @@ -134,4 +150,9 @@ def cmake_args(self): '-DQT_QMAKE_EXECUTABLE=%s' % spec['qt'].prefix.bin.qmake) + # Python + if spec.version > Version('10.6.1'): + options.append(self.define_from_variant('GEANT4_USE_PYTHON', + 'python')) + return options diff --git a/var/spack/repos/builtin/packages/genometools/package.py b/var/spack/repos/builtin/packages/genometools/package.py index ac7ef818053..ca42145a3e1 100644 --- a/var/spack/repos/builtin/packages/genometools/package.py +++ b/var/spack/repos/builtin/packages/genometools/package.py @@ -17,7 +17,7 @@ class Genometools(MakefilePackage): version('1.5.9', sha256='bba8e043f097e7c72e823f73cb0efbd20bbd60f1ce797a0e4c0ab632b170c909') depends_on('perl', type=('build', 'run')) - depends_on('cairo') + depends_on('cairo+pdf') depends_on('pango') # build fails with gcc 7" diff --git a/var/spack/repos/builtin/packages/geos/package.py b/var/spack/repos/builtin/packages/geos/package.py index 7c0ae0ea2e1..d34a88be9c1 100644 --- a/var/spack/repos/builtin/packages/geos/package.py +++ b/var/spack/repos/builtin/packages/geos/package.py @@ -14,10 +14,11 @@ class Geos(AutotoolsPackage): operators, as well as specific JTS enhanced topology functions.""" homepage = "http://trac.osgeo.org/geos/" - url = "https://download.osgeo.org/geos/geos-3.7.2.tar.bz2" + url = "https://download.osgeo.org/geos/geos-3.8.1.tar.bz2" maintainers = ['adamjstewart'] + version('3.8.1', sha256='4258af4308deb9dbb5047379026b4cd9838513627cb943a44e16c40e42ae17f7') version('3.7.2', sha256='2166e65be6d612317115bfec07827c11b403c3f303e0a7420a2106bc999d7707') version('3.6.2', sha256='045a13df84d605a866602f6020fc6cbf8bf4c42fb50de237a08926e1d7d7652a') version('3.6.1', sha256='4a2e4e3a7a09a7cfda3211d0f4a235d9fd3176ddf64bd8db14b4ead266189fc5') diff --git a/var/spack/repos/builtin/packages/gettext/package.py b/var/spack/repos/builtin/packages/gettext/package.py index 8eb0ad6f099..2d39c28e477 100644 --- a/var/spack/repos/builtin/packages/gettext/package.py +++ b/var/spack/repos/builtin/packages/gettext/package.py @@ -12,7 +12,8 @@ class Gettext(AutotoolsPackage, GNUMirrorPackage): homepage = "https://www.gnu.org/software/gettext/" gnu_mirror_path = "gettext/gettext-0.20.1.tar.xz" - version('0.20.1', sha256='53f02fbbec9e798b0faaf7c73272f83608e835c6288dd58be6c9bb54624a3800') + version('0.20.2', sha256='b22b818e644c37f6e3d1643a1943c32c3a9bff726d601e53047d2682019ceaba') + version('0.20.1', sha256='53f02fbbec9e798b0faaf7c73272f83608e835c6288dd58be6c9bb54624a3800') version('0.19.8.1', sha256='105556dbc5c3fbbc2aa0edb46d22d055748b6f5c7cd7a8d99f8e7eb84e938be4') version('0.19.7', sha256='378fa86a091cec3acdece3c961bb8d8c0689906287809a8daa79dc0c6398d934') @@ -27,6 +28,7 @@ class Gettext(AutotoolsPackage, GNUMirrorPackage): # Optional variants variant('libunistring', default=False, description='Use libunistring') + depends_on('iconv') # Recommended dependencies depends_on('ncurses', when='+curses') depends_on('libxml2', when='+libxml2') @@ -51,6 +53,7 @@ def configure_args(self): config_args = [ '--disable-java', '--disable-csharp', + '--with-libiconv-prefix={0}'.format(spec['iconv'].prefix), '--with-included-glib', '--with-included-gettext', '--with-included-libcroco', diff --git a/var/spack/repos/builtin/packages/git-lfs/package.py b/var/spack/repos/builtin/packages/git-lfs/package.py index 870c98fa0ab..56c7240400f 100644 --- a/var/spack/repos/builtin/packages/git-lfs/package.py +++ b/var/spack/repos/builtin/packages/git-lfs/package.py @@ -16,6 +16,8 @@ class GitLfs(MakefilePackage): homepage = "https://git-lfs.github.com" url = "https://github.com/git-lfs/git-lfs/archive/v2.6.1.tar.gz" + version('2.11.0', sha256='8183c4cbef8cf9c2e86b0c0a9822451e2df272f89ceb357c498bfdf0ff1b36c7') + version('2.10.0', sha256='07fd5c57a1039d5717dc192affbe3268ec2fd03accdca462cb504c0b4194cd23') version('2.9.0', sha256='f1963ad88747577ffeeb854649aeacaa741c59be74683da4d46b129a72d111b7') version('2.8.0', sha256='10b476bb8862ebceddc6f0a55f5fb63e2c1e5bed6554f6e3b207dd0155a196ad') version('2.7.2', sha256='e65659f12ec557ae8c778c01ca62d921413221864b68bd93cfa41399028ae67f') diff --git a/var/spack/repos/builtin/packages/git/package.py b/var/spack/repos/builtin/packages/git/package.py index 7b92fb65454..1094e3373d6 100644 --- a/var/spack/repos/builtin/packages/git/package.py +++ b/var/spack/repos/builtin/packages/git/package.py @@ -22,8 +22,12 @@ class Git(AutotoolsPackage): # * sha256_manpages: the sha256sum of the corresponding manpage from # https://www.kernel.org/pub/software/scm/git/git-manpages-{version}.tar.gz # You can find the source here: https://mirrors.edge.kernel.org/pub/software/scm/git/sha256sums.asc - releases = [ + { + 'version': '2.27.0', + 'sha256': '77ded85cbe42b1ffdc2578b460a1ef5d23bcbc6683eabcafbb0d394dffe2e787', + 'sha256_manpages': '414e4b17133e54d846f6bfa2479f9757c50e16c013eb76167a492ae5409b8947' + }, { 'version': '2.26.0', 'sha256': 'aa168c2318e7187cd295a645f7370cc6d71a324aafc932f80f00c780b6a26bed', diff --git a/var/spack/repos/builtin/packages/glib/package.py b/var/spack/repos/builtin/packages/glib/package.py index b7cf326e858..6426286b355 100644 --- a/var/spack/repos/builtin/packages/glib/package.py +++ b/var/spack/repos/builtin/packages/glib/package.py @@ -21,6 +21,8 @@ class Glib(Package): homepage = "https://developer.gnome.org/glib/" url = "https://ftp.gnome.org/pub/gnome/sources/glib/2.53/glib-2.53.1.tar.xz" + version('2.64.3', sha256='fe9cbc97925d14c804935f067a3ad77ef55c0bbe9befe68962318f5a767ceb22') + version('2.64.2', sha256='9a2f21ed8f13b9303399de13a0252b7cbcede593d26971378ec6cb90e87f2277') version('2.64.1', sha256='17967603bcb44b6dbaac47988d80c29a3d28519210b28157c2bd10997595bbc7') version('2.62.6', sha256='104fa26fbefae8024ff898330c671ec23ad075c1c0bce45c325c6d5657d58b9c') version('2.60.7', sha256='8b12c0af569afd3b71200556ad751bad4cf4bf7bc4b5f880638459a42ca86310') @@ -139,10 +141,18 @@ def configure_args(self): args.append('--with-libiconv=maybe') else: args.append('--with-libiconv=gnu') - if 'tracing=dtrace' in self.spec or 'tracing=systemtap' in self.spec: - args.append('--enable-tracing') + if self.spec.satisfies('@2.56:'): + for value in ('dtrace', 'systemtap'): + if ('tracing=' + value) in self.spec: + args.append('--enable-' + value) + else: + args.append('--disable-' + value) else: - args.append('--disable-tracing') + if ('tracing=dtrace' in self.spec + or 'tracing=systemtap' in self.spec): + args.append('--enable-tracing') + else: + args.append('--disable-tracing') # SELinux is not available in Spack, so glib should not use it. args.append('--disable-selinux') # glib should not use the globally installed gtk-doc. Otherwise, @@ -159,6 +169,11 @@ def configure_args(self): args.append('GTKDOC_REBASE={0}'.format(true)) return args + def setup_build_environment(self, env): + if self.spec.satisfies('platform=darwin'): + # https://github.com/pybind/pybind11/issues/595 + env.set('STRIP', 'strip -x') + @when('@:2.57.99') def install(self, spec, prefix): configure('--prefix={0}'.format(prefix), *self.configure_args()) diff --git a/var/spack/repos/builtin/packages/gmap-gsnap/package.py b/var/spack/repos/builtin/packages/gmap-gsnap/package.py index f87f4121c76..03d1edcd237 100644 --- a/var/spack/repos/builtin/packages/gmap-gsnap/package.py +++ b/var/spack/repos/builtin/packages/gmap-gsnap/package.py @@ -14,6 +14,7 @@ class GmapGsnap(AutotoolsPackage): homepage = "http://research-pub.gene.com/gmap/" url = "http://research-pub.gene.com/gmap/src/gmap-gsnap-2017-06-16.tar.gz" + version('2020-06-01', sha256='7917f9f78570943f419445e371f2cc948c6741e73c3cbb063391756f4479d365') version('2019-05-12', sha256='3dc1b6ee4f6c049c07bcf4a5aba30eb2d732997241cdcad818dab571719f8008') version('2019-02-15', sha256='7e82b9867a1e561b4816fb2f2fb916294077c384c6a88bb94cce39bfe71ab3ac') version('2018-07-04', sha256='a9f8c1f0810df65b2a089dc10be79611026f4c95e4681dba98fea3d55d598d24') diff --git a/var/spack/repos/builtin/packages/gnuplot/package.py b/var/spack/repos/builtin/packages/gnuplot/package.py index 9fb79e67466..59e527fcde4 100644 --- a/var/spack/repos/builtin/packages/gnuplot/package.py +++ b/var/spack/repos/builtin/packages/gnuplot/package.py @@ -122,7 +122,7 @@ def configure_args(self): options.append('--with-qt=no') if '+wx' in spec: - options.append('--with-wx=%s' % spec['wx'].prefix) + options.append('--with-wx=%s' % spec['wxwidgets'].prefix) else: options.append('--disable-wxwidgets') diff --git a/var/spack/repos/builtin/packages/go/package.py b/var/spack/repos/builtin/packages/go/package.py index 600d426a7ae..d0f515e94c7 100644 --- a/var/spack/repos/builtin/packages/go/package.py +++ b/var/spack/repos/builtin/packages/go/package.py @@ -6,6 +6,7 @@ import os import llnl.util.tty as tty from spack import * +import platform # - vanilla CentOS 7, and possibly other systems, fail a test: # TestCloneNEWUSERAndRemapRootDisableSetgroups @@ -35,9 +36,13 @@ class Go(Package): extendable = True + version('1.14.4', sha256='7011af3bbc2ac108d1b82ea8abb87b2e63f78844f0259be20cde4d42c5c40584') + version('1.14.3', sha256='93023778d4d1797b7bc6a53e86c3a9b150c923953225f8a48a2d5fabc971af56') version('1.14.2', sha256='98de84e69726a66da7b4e58eac41b99cbe274d7e8906eeb8a5b7eb0aadee7f7c') version('1.14.1', sha256='2ad2572115b0d1b4cb4c138e6b3a31cee6294cb48af75ee86bec3dca04507676') version('1.14', sha256='6d643e46ad565058c7a39dac01144172ef9bd476521f42148be59249e4b74389') + version('1.13.12', sha256='17ba2c4de4d78793a21cc659d9907f4356cd9c8de8b7d0899cdedcef712eba34') + version('1.13.11', sha256='89ed1abce25ad003521c125d6583c93c1280de200ad221f961085200a6c00679') version('1.13.10', sha256='eb9ccc8bf59ed068e7eff73e154e4f5ee7eec0a47a610fb864e3332a2fdc8b8c') version('1.13.9', sha256='34bb19d806e0bc4ad8f508ae24bade5e9fedfa53d09be63b488a9314d2d4f31d') version('1.13.8', sha256='b13bf04633d4d8cf53226ebeaace8d4d2fd07ae6fa676d0844a688339debec34') @@ -87,7 +92,10 @@ class Go(Package): depends_on('git', type=('build', 'link', 'run')) # TODO: Make non-c self-hosting compilers feasible without backflips # should be a dep on external go compiler - depends_on('go-bootstrap', type='build') + if platform.machine() == 'aarch64': + depends_on('gcc languages=go', type='build') + else: + depends_on('go-bootstrap', type='build') # https://github.com/golang/go/issues/17545 patch('time_test.patch', when='@1.6.4:1.7.4') diff --git a/var/spack/repos/builtin/packages/gobject-introspection/package.py b/var/spack/repos/builtin/packages/gobject-introspection/package.py index 294d1ba46e7..20823fd77fe 100644 --- a/var/spack/repos/builtin/packages/gobject-introspection/package.py +++ b/var/spack/repos/builtin/packages/gobject-introspection/package.py @@ -73,3 +73,7 @@ def install(self, spec, prefix): def setup_build_environment(self, env): env.set('SPACK_SBANG', "%s/bin/sbang" % spack_root) + + @property + def parallel(self): + return not self.spec.satisfies('%fj') diff --git a/var/spack/repos/builtin/packages/goblin-hmc-sim/package.py b/var/spack/repos/builtin/packages/goblin-hmc-sim/package.py new file mode 100644 index 00000000000..8b5e9ab9bdd --- /dev/null +++ b/var/spack/repos/builtin/packages/goblin-hmc-sim/package.py @@ -0,0 +1,28 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class GoblinHmcSim(MakefilePackage): + """ + The Goblin HMC-Sim is a Hybrid Memory Cube + Functional Simulation Environment + """ + + homepage = "https://github.com/tactcomplabs/gc64-hmcsim" + git = "https://github.com/tactcomplabs/gc64-hmcsim" + # The version numbers track the SST they were released with + url = "https://github.com/tactcomplabs/gc64-hmcsim/archive/sst-8.0.0-release.tar.gz" + # This works with parallel builds outside Spack + # For some reason .o files get thrashed inside Spack + parallel = False + + maintainers = ['jjwilke'] + + version('8.0.0', sha256="8a5e6b701865a581f15965d3ddd8c7d301b15f4b63543c444058e9c3688fd2c8") + + def install(self, spec, prefix): + install_tree(".", prefix) diff --git a/var/spack/repos/builtin/packages/gotcha/package.py b/var/spack/repos/builtin/packages/gotcha/package.py index 006b74a49af..f0cb3a7233d 100644 --- a/var/spack/repos/builtin/packages/gotcha/package.py +++ b/var/spack/repos/builtin/packages/gotcha/package.py @@ -15,6 +15,7 @@ class Gotcha(CMakePackage): version('develop', branch='develop') version('master', branch='master') + version('1.0.3', tag='1.0.3') version('1.0.2', tag='1.0.2') version('0.0.2', tag='0.0.2') diff --git a/var/spack/repos/builtin/packages/grafana/package.py b/var/spack/repos/builtin/packages/grafana/package.py new file mode 100644 index 00000000000..c323b942525 --- /dev/null +++ b/var/spack/repos/builtin/packages/grafana/package.py @@ -0,0 +1,27 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Grafana(Package): + """The tool for beautiful monitoring and metric analytics & dashboards + for Graphite, InfluxDB & Prometheus & More""" + + homepage = "https://grafana.com" + url = "https://github.com/grafana/grafana/archive/v6.7.3.tar.gz" + + version('6.7.3', sha256='2477b70bfc8770ab844ee683f72b5efe8a47324b9779663d8e5259ffb9ddb8d8') + version('6.7.2', sha256='dc81cdb77c1c0ae99ae3302a0ef8b3d577f4a717208a90df65da8fcb282122fc') + version('6.7.1', sha256='5750d286273069a195679d5586e810b0ca8cdd08ee07dcdd9b52cfaac8c62b89') + version('6.7.0', sha256='7f4e3f0d42b8188a334e97062c3bf63ff43af273095ba10147b299e3c1c5a7b7') + version('6.6.2', sha256='e11e5971d08e45e277b55e060c0ce3cf25ca0ba144367c53b4836f2d133ed9b8') + + depends_on('go', type='build') + + def install(self, spec, prefix): + go = which('go') + go('run', 'build.go', 'build') + install_tree('bin', prefix.bin) diff --git a/var/spack/repos/builtin/packages/graphviz/package.py b/var/spack/repos/builtin/packages/graphviz/package.py index 5e9032b3881..4f8e772f19d 100644 --- a/var/spack/repos/builtin/packages/graphviz/package.py +++ b/var/spack/repos/builtin/packages/graphviz/package.py @@ -55,6 +55,8 @@ class Graphviz(AutotoolsPackage): description='Build with libgd support (more output formats)') variant('pangocairo', default=False, description='Build with pango+cairo support (more output formats)') + variant('poppler', default=False, + description='Build with poppler support (pdf formats)') variant('qt', default=False, description='Build with Qt support') variant('quartz', default=(MACOS_VERSION is not None), @@ -93,12 +95,13 @@ class Graphviz(AutotoolsPackage): depends_on('ghostscript', when='+ghostscript') depends_on('gtkplus', when='+gtkplus') depends_on('gts', when='+gts') - depends_on('cairo', when='+pangocairo') + depends_on('cairo+pdf+png+svg', when='+pangocairo') depends_on('fontconfig', when='+pangocairo') depends_on('freetype', when='+pangocairo') depends_on('glib', when='+pangocairo') depends_on('libpng', when='+pangocairo') depends_on('pango', when='+pangocairo') + depends_on('poppler+glib', when='+poppler') depends_on('zlib') depends_on('qt', when='+qt') depends_on('libx11', when="+x") @@ -147,7 +150,7 @@ def configure_args(self): args.append('--{0}-swig'.format('enable' if use_swig else 'disable')) for var in ["expat", "gts", "ghostscript", "libgd", "pangocairo", - "qt", "quartz", "x"]: + "poppler", "qt", "quartz", "x"]: args += self.with_or_without(var) args.append('--{0}-gtk'.format( diff --git a/var/spack/repos/builtin/packages/grass/package.py b/var/spack/repos/builtin/packages/grass/package.py index cb83a5cc2ab..64352394320 100644 --- a/var/spack/repos/builtin/packages/grass/package.py +++ b/var/spack/repos/builtin/packages/grass/package.py @@ -15,9 +15,11 @@ class Grass(AutotoolsPackage): homepage = "https://grass.osgeo.org" url = "https://grass.osgeo.org/grass78/source/grass-7.8.2.tar.gz" list_url = "https://grass.osgeo.org/download/software/sources/" + git = "https://github.com/OSGeo/grass.git" maintainers = ['adamjstewart'] + version('master', branch='master') version('7.8.2', sha256='33576f7078f805b39ca20c2fa416ac79c64260c0581072a6dc7d813f53aa9abb') version('7.8.1', sha256='6ae578fd67afcce7abec4ba4505dcc55b3d2dfe0ca46b99d966cb148c654abb3') version('7.8.0', sha256='4b1192294e959ffd962282344e4ff325c4472f73abe605e246a1da3beda7ccfa') @@ -66,7 +68,10 @@ class Grass(AutotoolsPackage): depends_on('proj@:4', when='@:7.5') # GRASS 7.8.0 was supposed to support PROJ 6, but it still checks for # share/proj/epsg, which was removed in PROJ 6 - depends_on('proj@:5', when='@:7.8') + depends_on('proj@:5', when='@:7.8.0') + # PROJ6 support released in GRASS 7.8.1 + # https://courses.neteler.org/grass-gis-7-8-1-released-with-proj-6-and-gdal-3-support/ + depends_on('proj@6:', when='@7.8.1:') depends_on('python@2.7:', type=('build', 'run')) depends_on('python@2.7:2.8', when='@:7.6', type=('build', 'run')) depends_on('py-six', when='@7.8:', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/graylog2-server/package.py b/var/spack/repos/builtin/packages/graylog2-server/package.py new file mode 100644 index 00000000000..b34fd87a1b5 --- /dev/null +++ b/var/spack/repos/builtin/packages/graylog2-server/package.py @@ -0,0 +1,24 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Graylog2Server(Package): + """Free and open source log management.""" + + homepage = "https://www.graylog.org/" + url = "https://github.com/Graylog2/graylog2-server/archive/3.2.4.tar.gz" + + version('3.2.4', sha256='d34cc9fd42b2ee0b872c0f644fe53ef9b2e9790029c5d2182f782f66f1e1d99d') + version('3.2.3', sha256='6da5ba1da897a371a490a6ba7c9d017a479a22e3c16a39280a49e61f551280c0') + version('3.2.2', sha256='dc7baa5c0e451b0927b28320c4d9ca19810f4690eb2c521ed8a8272c99fb3bc3') + version('3.2.1', sha256='f570dbb557888ca4dbc932fb6ed840dbb616b9ed50e034d17de69a69f08d1aec') + version('3.2.0', sha256='094eed607d0d0a7c380825d6507c1e40a53c4493b5f9fe8ae5a3ddd86521711e') + + depends_on('java@8', type=("build", "run")) + + def install(self, spec, prefix): + install_tree('.', prefix) diff --git a/var/spack/repos/builtin/packages/gridlab-d/package.py b/var/spack/repos/builtin/packages/gridlab-d/package.py new file mode 100644 index 00000000000..d7942d2d21e --- /dev/null +++ b/var/spack/repos/builtin/packages/gridlab-d/package.py @@ -0,0 +1,59 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class GridlabD(AutotoolsPackage): + """ + Autotools package for Gridlab-D, a new power distribution system simulation + and analysis tool that provides valuable information to users who design + and operate distribution systems, and to utilities that wish to take + advantage of the latest energy technologies. Gridlab-D is a flexible + simulation environment that can be integrated with a variety of third-party + data management and analysis tools. + """ + + homepage = "https://www.gridlabd.org/" + git = "https://github.com/gridlab-d/gridlab-d" + + maintainers = ['0t1s1', 'yee29', 'afisher1'] + + # Using only develop as other branches and releases did not build properly. + version('develop', branch='develop') + + variant("mysql", + default=False, + description="Enable MySQL support for Gridlab-D.") + variant('helics', + default=False, + description='Enable Helics support for Gridlab-D.') + + # Add dependencies. + depends_on('autoconf', type='build') + depends_on('automake', type='build') + depends_on('libtool', type='build') + depends_on('m4', type='build') + depends_on("xerces-c") + depends_on("superlu-mt") + depends_on('helics', when='+helics') + + def configure_args(self): + args = [] + + if '+helics' in self.spec: + # Taken from + # https://github.com/GMLC-TDC/HELICS-Tutorial/tree/master/setup + args.append('--with-helics=' + self.spec['helics'].prefix) + args.append('CFLAGS=-g -O0 -w') + args.append('CXXFLAGS=-g -O0 -w -std=c++14') + args.append('LDFLAGS=-g -O0 -w') + + return args + + def setup_run_environment(self, env): + # Need to add GLPATH otherwise Gridlab-D will not run. + env.set('GLPATH', join_path(self.prefix, 'lib', 'gridlabd')) + env.prepend_path('GLPATH', join_path(self.prefix, 'share', 'gridlabd')) diff --git a/var/spack/repos/builtin/packages/gromacs/package.py b/var/spack/repos/builtin/packages/gromacs/package.py index 2175d7a6110..1aec4807239 100644 --- a/var/spack/repos/builtin/packages/gromacs/package.py +++ b/var/spack/repos/builtin/packages/gromacs/package.py @@ -24,6 +24,7 @@ class Gromacs(CMakePackage): maintainers = ['junghans', 'marvinbernhardt'] version('master', branch='master') + version('2020.2', sha256='7465e4cd616359d84489d919ec9e4b1aaf51f0a4296e693c249e83411b7bd2f3') version('2020.1', sha256='e1666558831a3951c02b81000842223698016922806a8ce152e8f616e29899cf') version('2020', sha256='477e56142b3dcd9cb61b8f67b24a55760b04d1655e8684f979a75a5eec40ba01') version('2019.6', sha256='bebe396dc0db11a9d4cc205abc13b50d88225617642508168a2195324f06a358') diff --git a/var/spack/repos/builtin/packages/grpc/package.py b/var/spack/repos/builtin/packages/grpc/package.py index 7996a6865b9..a7eff3e07f4 100644 --- a/var/spack/repos/builtin/packages/grpc/package.py +++ b/var/spack/repos/builtin/packages/grpc/package.py @@ -19,6 +19,8 @@ class Grpc(CMakePackage): version('1.24.3', sha256='c84b3fa140fcd6cce79b3f9de6357c5733a0071e04ca4e65ba5f8d306f10f033') version('1.23.1', sha256='dd7da002b15641e4841f20a1f3eb1e359edb69d5ccf8ac64c362823b05f523d9') + variant('shared', default=False, + description='Build shared instead of static libraries') variant('codegen', default=True, description='Builds code generation plugins for protobuf ' 'compiler (protoc)') @@ -31,6 +33,8 @@ class Grpc(CMakePackage): def cmake_args(self): args = [ + '-DBUILD_SHARED_LIBS:Bool={0}'.format( + 'ON' if '+shared' in self.spec else 'OFF'), '-DgRPC_BUILD_CODEGEN:Bool={0}'.format( 'ON' if '+codegen' in self.spec else 'OFF'), '-DgRPC_BUILD_CSHARP_EXT:Bool=OFF', diff --git a/var/spack/repos/builtin/packages/guacamole-server/package.py b/var/spack/repos/builtin/packages/guacamole-server/package.py new file mode 100644 index 00000000000..ea08a63639f --- /dev/null +++ b/var/spack/repos/builtin/packages/guacamole-server/package.py @@ -0,0 +1,26 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class GuacamoleServer(AutotoolsPackage): + """The guacamole-server package is a set of software which forms the + basis of the Guacamole stack. It consists of guacd, libguac, and + several protocol support libraries.""" + + homepage = "http://guacamole.apache.org/" + url = "https://github.com/apache/guacamole-server/archive/1.1.0.tar.gz" + + version('1.1.0', sha256='d0f0c66ebfa7a4fd6689ae5240f21797b5177945a042388b691b15b8bd5c81a8') + + depends_on('autoconf', type='build') + depends_on('automake', type='build') + depends_on('libtool', type='build') + depends_on('m4', type='build') + depends_on('cairo') + depends_on('libjpeg') + depends_on('libpng') + depends_on('uuid') diff --git a/var/spack/repos/builtin/packages/hbm-dramsim2/package.py b/var/spack/repos/builtin/packages/hbm-dramsim2/package.py new file mode 100644 index 00000000000..6ecd8de5145 --- /dev/null +++ b/var/spack/repos/builtin/packages/hbm-dramsim2/package.py @@ -0,0 +1,23 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class HbmDramsim2(MakefilePackage): + """ + HBM Simulator based on DRAMSim2 + """ + + homepage = "https://github.com/tactcomplabs/HBM" + git = "https://github.com/tactcomplabs/HBM" + url = "https://github.com/tactcomplabs/HBM/archive/hbm-1.0.0-release.tar.gz" + + maintainers = ['jjwilke'] + + version('1.0.0', sha256="0efad11c58197edb47ad1359f8f93fb45d882c6bebcf9f2143e0df7a719689a0") + + def install(self, spec, prefix): + install_tree(".", prefix) diff --git a/var/spack/repos/builtin/packages/hcoll/package.py b/var/spack/repos/builtin/packages/hcoll/package.py new file mode 100644 index 00000000000..f39e2e75960 --- /dev/null +++ b/var/spack/repos/builtin/packages/hcoll/package.py @@ -0,0 +1,31 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Hcoll(Package): + """Modern interface for Mellanox Fabric Collective Accelerator (FCA). FCA + is a MPI-integrated software package that utilizes CORE-Direct technology + for implementing the MPI collective communications.""" + + homepage = 'https://www.mellanox.com/products/fca' + has_code = False + + version('3.9.1927') + + # HCOLL needs to be added as an external package to SPACK. For this, the + # config file packages.yaml needs to be adjusted: + # + # hcoll: + # version: [3.9.1927] + # paths: + # hcoll@3.9.1927: /opt/mellanox/hcoll (path to your HCOLL installation) + # buildable: False + + def install(self, spec, prefix): + raise InstallError( + self.spec.format('{name} is not installable, you need to specify ' + 'it as an external package in packages.yaml')) diff --git a/var/spack/repos/builtin/packages/hdf/disable_doclint.patch b/var/spack/repos/builtin/packages/hdf/disable_doclint.patch new file mode 100644 index 00000000000..221d8c77c6c --- /dev/null +++ b/var/spack/repos/builtin/packages/hdf/disable_doclint.patch @@ -0,0 +1,11 @@ +--- a/java/src/Makefile.in ++++ b/java/src/Makefile.in +@@ -818,7 +818,7 @@ $(jarfile): classhdf_java.stamp classes docs + .PHONY: docs classes + + docs: +- $(JAVADOC) -sourcepath $(srcdir) -d javadoc -use -splitIndex -windowtitle $(WINDOWTITLE) -doctitle $(DOCTITLE) -J-Xmx180m -verbose -overview $(top_srcdir)/java/src/hdf/overview.html -classpath $(CLASSPATH_ENV) hdf.hdflib ++ $(JAVADOC) -sourcepath $(srcdir) -d javadoc -use -splitIndex -windowtitle $(WINDOWTITLE) -doctitle $(DOCTITLE) -J-Xmx180m -verbose -overview $(top_srcdir)/java/src/hdf/overview.html -classpath $(CLASSPATH_ENV) hdf.hdflib -Xdoclint:none + + clean: + rm -rf $(JAVAROOT)/* diff --git a/var/spack/repos/builtin/packages/hdf/package.py b/var/spack/repos/builtin/packages/hdf/package.py index 67c2744470c..d3076b35f9a 100644 --- a/var/spack/repos/builtin/packages/hdf/package.py +++ b/var/spack/repos/builtin/packages/hdf/package.py @@ -15,45 +15,138 @@ class Hdf(AutotoolsPackage): list_url = "https://support.hdfgroup.org/ftp/HDF/releases/" list_depth = 2 + version('4.2.15', sha256='dbeeef525af7c2d01539906c28953f0fdab7dba603d1bc1ec4a5af60d002c459') version('4.2.14', sha256='2d383e87c8a0ca6a5352adbd1d5546e6cc43dc21ff7d90f93efa644d85c0b14a') version('4.2.13', sha256='be9813c1dc3712c2df977d4960e1f13f20f447dfa8c3ce53331d610c1f470483') version('4.2.12', sha256='dd419c55e85d1a0e13f3ea5ed35d00710033ccb16c85df088eb7925d486e040c') version('4.2.11', sha256='c3f7753b2fb9b27d09eced4d2164605f111f270c9a60b37a578f7de02de86d24') variant('szip', default=False, description="Enable szip support") - variant('libtirpc', default=False, description="Use xdr library from libtirpc package; if false, will use system or hdf internal") + variant('external-xdr', default=True, + description="Use an external XDR backend") + variant('netcdf', default=False, + description='Build NetCDF API (version 2.3.2)') + variant('fortran', default=False, + description='Enable Fortran interface') + variant('java', default=False, + description='Enable Java JNI interface') + variant('shared', default=False, description='Enable shared library') + variant('pic', default=True, + description='Produce position-independent code') - depends_on('jpeg@6b:') - depends_on('szip', when='+szip') - depends_on('libtirpc', when='+libtirpc') depends_on('zlib@1.1.4:') + depends_on('jpeg') + depends_on('szip', when='+szip') + depends_on('rpc', when='+external-xdr') depends_on('bison', type='build') depends_on('flex', type='build') + depends_on('java@7:', when='+java', type=('build', 'run')) + + # https://forum.hdfgroup.org/t/cant-build-hdf-4-2-14-with-jdk-11-and-enable-java/5702 + patch('disable_doclint.patch', when='@:4.2.14^java@9:') + + conflicts('^libjpeg@:6a') + + # configure: error: Cannot build shared fortran libraries. + # Please configure with --disable-fortran flag. + conflicts('+fortran', when='+shared') + + # configure: error: Java requires shared libraries to be built + conflicts('+java', when='~shared') + + # configure: WARNING: unrecognized options: --enable-java + conflicts('+java', when='@:4.2.11') + + # The Java interface library uses netcdf-related macro definitions even + # when netcdf is disabled and the macros are not defined, e.g.: + # hdfsdsImp.c:158:30: error: 'MAX_NC_NAME' undeclared + conflicts('+java', when='@4.2.12:4.2.13~netcdf') + + # TODO: '@:4.2.14 ~external-xdr' and the fact that we compile for 64 bit + # architecture should be in conflict + + @property + def libs(self): + """HDF can be queried for the following parameters: + + - "shared": shared libraries (default if '+shared') + - "static": static libraries (default if '~shared') + - "transitive": append transitive dependencies to the list of static + libraries (the argument is ignored if shared libraries are + requested) + + :return: list of matching libraries + """ + libraries = ['libmfhdf', 'libdf'] + + query_parameters = self.spec.last_query.extra_parameters + + if 'shared' in query_parameters: + shared = True + elif 'static' in query_parameters: + shared = False + else: + shared = '+shared' in self.spec + + libs = find_libraries( + libraries, root=self.prefix, shared=shared, recursive=True + ) + + if not libs: + msg = 'Unable to recursively locate {0} {1} libraries in {2}' + raise spack.error.NoLibrariesError( + msg.format('shared' if shared else 'static', + self.spec.name, + self.spec.prefix)) + + if not shared and 'transitive' in query_parameters: + libs += self.spec['jpeg:transitive'].libs + libs += self.spec['zlib:transitive'].libs + if '+szip' in self.spec: + libs += self.spec['szip:transitive'].libs + if ('+external-xdr' in self.spec and + self.spec['rpc'].name != 'libc'): + libs += self.spec['rpc:transitive'].libs + + return libs + + def flag_handler(self, name, flags): + if '+pic' in self.spec: + if name == 'cflags': + flags.append(self.compiler.cc_pic_flag) + elif name == 'fflags': + flags.append(self.compiler.f77_pic_flag) + + return flags, None, None def configure_args(self): - spec = self.spec + config_args = ['--enable-production', + '--enable-static', + '--with-zlib=%s' % self.spec['zlib'].prefix, + '--with-jpeg=%s' % self.spec['jpeg'].prefix] - config_args = [ - 'CFLAGS={0}'.format(self.compiler.cc_pic_flag), - '--with-jpeg={0}'.format(spec['jpeg'].prefix), - '--with-zlib={0}'.format(spec['zlib'].prefix), - '--disable-netcdf', # must be disabled to build NetCDF with HDF4 - '--enable-fortran', - '--disable-shared', # fortran and shared libs are not compatible - '--enable-static', - '--enable-production' - ] + config_args += self.enable_or_disable('shared') + config_args += self.enable_or_disable('netcdf') + config_args += self.enable_or_disable('fortran') + config_args += self.enable_or_disable('java') - # Szip support - if '+szip' in spec: - config_args.append('--with-szlib={0}'.format(spec['szip'].prefix)) + if '+szip' in self.spec: + config_args.append('--with-szlib=%s' % self.spec['szip'].prefix) else: config_args.append('--without-szlib') - if '+libtirpc' in spec: - config_args.append('LIBS=-ltirpc') - config_args.append('CPPFLAGS=-I{0}/include/tirpc'.format( - spec['libtirpc'].prefix)) - + if '~external-xdr' in self.spec: + config_args.append('--enable-hdf4-xdr') + elif self.spec['rpc'].name != 'libc': + # We should not specify '--disable-hdf4-xdr' due to a bug in the + # configure script. + config_args.append('LIBS=%s' % self.spec['rpc'].libs.link_flags) return config_args + + # Otherwise, we randomly get: + # SDgetfilename: + # incorrect file being opened - expected , retrieved + def check(self): + with working_dir(self.build_directory): + make('check', parallel=False) diff --git a/var/spack/repos/builtin/packages/hdf5/package.py b/var/spack/repos/builtin/packages/hdf5/package.py index b4f427ff7b5..c81ffb74dcd 100644 --- a/var/spack/repos/builtin/packages/hdf5/package.py +++ b/var/spack/repos/builtin/packages/hdf5/package.py @@ -20,6 +20,7 @@ class Hdf5(AutotoolsPackage): list_url = "https://support.hdfgroup.org/ftp/HDF5/releases" list_depth = 3 git = "https://bitbucket.hdfgroup.org/scm/hdffv/hdf5.git" + maintainers = ['lrknox'] version('develop', branch='develop') @@ -63,6 +64,13 @@ class Hdf5(AutotoolsPackage): variant('szip', default=False, description='Enable szip support') variant('pic', default=True, description='Produce position-independent code (for shared libs)') + # Build HDF5 with API compaitibility. + variant('api', default='none', description='choose api compatibility', values=('v114', 'v112', 'v110', 'v18', 'v16'), multi=False) + + conflicts('api=v114', when='@1.6:1.12.99', msg='v114 is not compatible with this release') + conflicts('api=v112', when='@1.6:1.10.99', msg='v112 is not compatible with this release') + conflicts('api=v110', when='@1.6:1.8.99', msg='v110 is not compatible with this release') + conflicts('api=v18', when='@1.6:1.6.99', msg='v18 is not compatible with this release') depends_on('autoconf', type='build', when='@develop') depends_on('automake', type='build', when='@develop') @@ -230,8 +238,24 @@ def configure_args(self): extra_args += self.enable_or_disable('hl') extra_args += self.enable_or_disable('fortran') + api = self.spec.variants['api'].value + if api != 'none': + extra_args.append('--with-default-api-version=' + api) + if '+szip' in self.spec: - extra_args.append('--with-szlib=%s' % self.spec['szip'].prefix) + szip_spec = self.spec['szip'] + # The configure script of HDF5 accepts a comma-separated tuple of + # two paths: the first one points to the directory with include + # files, the second one points to the directory with library files. + # If the second path is not specified, the configure script assumes + # that it equals to prefix/lib. However, the correct directory + # might be prefix/lib64. It is not a problem when the building is + # done with Spack's compiler wrapper but it makes the Libtool + # files (*.la) invalid, which makes it problematic to use the + # installed library outside of Spack environment. + extra_args.append('--with-szlib=%s,%s' % + (szip_spec.headers.directories[0], + szip_spec.libs.directories[0])) else: extra_args.append('--without-szlib') diff --git a/var/spack/repos/builtin/packages/healpix-cxx/package.py b/var/spack/repos/builtin/packages/healpix-cxx/package.py new file mode 100644 index 00000000000..da3f1119d28 --- /dev/null +++ b/var/spack/repos/builtin/packages/healpix-cxx/package.py @@ -0,0 +1,29 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class HealpixCxx(AutotoolsPackage): + """Healpix-CXX is a C/C++ library for calculating + Hierarchical Equal Area isoLatitude Pixelation of a sphere.""" + + homepage = "https://healpix.sourceforge.io" + url = "https://ayera.dl.sourceforge.net/project/healpix/Healpix_3.50/healpix_cxx-3.50.0.tar.gz" + + version('3.50.0', sha256='6538ee160423e8a0c0f92cf2b2001e1a2afd9567d026a86ff6e2287c1580cb4c') + + depends_on('cfitsio') + depends_on('libsharp', type='build') + + def patch(self): + spec = self.spec + configure_fix = FileFilter('configure') + # Link libsharp static libs + configure_fix.filter( + r'^SHARP_LIBS=.*$', + 'SHARP_LIBS="-L{0} -lsharp -lc_utils -lfftpack -lm"' + .format(spec['libsharp'].prefix.lib) + ) diff --git a/var/spack/repos/builtin/packages/heffte/package.py b/var/spack/repos/builtin/packages/heffte/package.py index 4a3fec0f339..45f9b1ee8c7 100644 --- a/var/spack/repos/builtin/packages/heffte/package.py +++ b/var/spack/repos/builtin/packages/heffte/package.py @@ -10,23 +10,43 @@ class Heffte(CMakePackage): """Highly Efficient FFT for Exascale""" homepage = "https://bitbucket.org/icl/heffte" - url = "https://bitbucket.org/icl/heffte/get/v0.1.tar.gz" + url = "https://bitbucket.org/icl/heffte/get/v1.0.tar.gz" git = "https://bitbucket.org/icl/heffte.git" - version('master', branch='master') + maintainers = ['mkstoyanov'] + + version('develop', branch='master') + version('1.0', sha256='0902479fb5b1bad01438ca0a72efd577a3529c3d8bad0028f3c18d3a4935ca74') version('0.2', sha256='4e76ae60982b316c2e873b2e5735669b22620fefa1fc82f325cdb6989bec78d1') version('0.1', sha256='d279a03298d2dc76574b1ae1031acb4ea964348cf359273d1afa4668b5bfe748') + patch('threads10.patch', when='@1.0') + + variant('shared', default=True, description='Builds with shared libraries') + variant('fftw', default=False, description='Builds with support for FFTW backend') + variant('mkl', default=False, description='Builds with support for MKL backend') variant('cuda', default=False, description='Builds with support for GPUs via CUDA') - depends_on('fftw') - depends_on('mpi') - depends_on('cuda', when="+cuda") + conflicts('~fftw', when='~mkl~cuda') # requires at least one backend + conflicts('+fftw', when='+mkl') # old API supports at most one CPU backend + conflicts('openmpi~cuda', when='+cuda') # +cuda requires CUDA enabled OpenMPI + + depends_on('mpi', type=('build', 'run')) + + depends_on('fftw@3.3.8:', when="+fftw", type=('build', 'run')) + depends_on('intel@16.0:', when="+mkl", type=('build', 'run')) + depends_on('cuda@8.0:', when="+cuda", type=('build', 'run')) def cmake_args(self): - args = ['-DBUILD_SHARED=ON'] - if '+cuda' in self.spec: - args.append('-DBUILD_GPU=ON') - else: - args.append('-DBUILD_GPU=OFF') - return args + return [ + '-DBUILD_SHARED_LIBS={0:1s}'.format( + 'ON' if '+shared' in self.spec else 'OFF'), + '-DBUILD_GPU={0:1s}'.format( + 'ON' if ('+cuda' in self.spec and + '+fftw' in self.spec) else 'OFF'), + '-DHeffte_ENABLE_CUDA={0:1s}'.format( + 'ON' if '+cuda' in self.spec else 'OFF'), + '-DHeffte_ENABLE_FFTW={0:1s}'.format( + 'ON' if '+fftw' in self.spec else 'OFF'), + '-DHeffte_ENABLE_MKL={0:1s}'.format( + 'ON' if '+mkl' in self.spec else 'OFF'), ] diff --git a/var/spack/repos/builtin/packages/heffte/threads10.patch b/var/spack/repos/builtin/packages/heffte/threads10.patch new file mode 100644 index 00000000000..41d55d9bb80 --- /dev/null +++ b/var/spack/repos/builtin/packages/heffte/threads10.patch @@ -0,0 +1,13 @@ +diff --git a/cmake/HeffteConfig.cmake b/cmake/HeffteConfig.cmake +index bd67de9..ca06086 100644 +--- a/cmake/HeffteConfig.cmake ++++ b/cmake/HeffteConfig.cmake +@@ -19,6 +19,8 @@ if (NOT TARGET MPI::MPI_CXX) + find_package(MPI REQUIRED) + endif() + ++find_package(Threads) ++ + if ("@BUILD_SHARED_LIBS@") + set(Heffte_SHARED_FOUND "ON") + else() diff --git a/var/spack/repos/builtin/packages/helics/package.py b/var/spack/repos/builtin/packages/helics/package.py index 7aa4a06c466..50bfe67fa81 100644 --- a/var/spack/repos/builtin/packages/helics/package.py +++ b/var/spack/repos/builtin/packages/helics/package.py @@ -19,6 +19,9 @@ class Helics(CMakePackage): version('develop', branch='develop', submodules=True) version('master', branch='master', submodules=True) + version('2.5.2', sha256='81928f7e30233a07ae2bfe6c5489fdd958364c0549b2a3e6fdc6163d4b390311') + version('2.5.1', sha256='3fc3507f7c074ff8b6a17fe54676334158fb2ff7cc8e7f4df011938f28fdbbca') + version('2.5.0', sha256='6f4f9308ebb59d82d71cf068e0d9d66b6edfa7792d61d54f0a61bf20dd2a7428') version('2.4.2', sha256='957856f06ed6d622f05dfe53df7768bba8fe2336d841252f5fac8345070fa5cb') version('2.4.1', sha256='ac077e9efe466881ea366721cb31fb37ea0e72a881a717323ba4f3cdda338be4') @@ -38,15 +41,17 @@ class Helics(CMakePackage): variant('asio', default=True, description="Compile with ASIO libraries") variant('swig', default=False, description="Build language bindings with SWIG") variant('webserver', default=True, description="Enable the integrated webserver in the HELICS broker server") + variant('python', default=False, description="Enable Python interface") # Build dependency depends_on('git', type='build', when='@master:') depends_on('cmake@3.4:', type='build') - depends_on('boost@1.70: ~atomic ~chrono ~date_time ~exception ~filesystem ~graph ~iostreams ~locale ~log ~math ~program_options ~random ~regex ~serialization ~signals ~system ~test ~thread ~timer ~wave', type='build', when='+boost') + depends_on('boost@1.70:', type='build', when='+boost') depends_on('swig@3.0:', type='build', when='+swig') depends_on('libzmq@4.3:', when='+zmq') depends_on('mpi@2', when='+mpi') + depends_on('python@3:', when='+python') # OpenMPI doesn't work with HELICS <=2.4.1 conflicts('^openmpi', when='@:2.4.1 +mpi') @@ -59,6 +64,8 @@ class Helics(CMakePackage): conflicts('+tcp', when='~asio') conflicts('+udp', when='~asio') + extends('python', when='+python') + def cmake_args(self): spec = self.spec args = [ @@ -102,4 +109,13 @@ def cmake_args(self): args.append('-DHELICS_ENABLE_SWIG={0}'.format( 'ON' if '+swig' in spec else 'OFF')) + # Python + args.append('-DBUILD_PYTHON_INTERFACE={0}'.format( + 'ON' if '+python' in spec else 'OFF')) + return args + + def setup_run_environment(self, env): + spec = self.spec + if '+python' in spec: + env.prepend_path('PYTHONPATH', self.prefix.python) diff --git a/var/spack/repos/builtin/packages/hepmc/package.py b/var/spack/repos/builtin/packages/hepmc/package.py index d208e7ecde3..6581615a2be 100644 --- a/var/spack/repos/builtin/packages/hepmc/package.py +++ b/var/spack/repos/builtin/packages/hepmc/package.py @@ -14,11 +14,6 @@ class Hepmc(CMakePackage): homepage = "http://hepmc.web.cern.ch/hepmc/" url = "http://hepmc.web.cern.ch/hepmc/releases/hepmc2.06.09.tgz" - version('3.2.0', sha256='f132387763d170f25a7cc9f0bd586b83373c09acf0c3daa5504063ba460f89fc') - version('3.1.2', sha256='4133074b3928252877982f3d4b4c6c750bb7a324eb6c7bb2afc6fa256da3ecc7') - version('3.1.1', sha256='2fcbc9964d6f9f7776289d65f9c73033f85c15bf5f0df00c429a6a1d8b8248bb') - version('3.1.0', sha256='cd37eed619d58369041018b8627274ad790020a4714b54ac05ad1ebc1a6e7f8a') - version('3.0.0', sha256='7ac3c939a857a5ad67bea1e77e3eb16e80d38cfdf825252ac57160634c26d9ec') version('2.06.10', sha256='5adedd9e3f7447e1e5fc01b72f745ab87da2c1611df89208bb3d7c6ea94c11a4') version('2.06.09', sha256='e0f8fddd38472c5615210894444686ac5d72df3be682f7d151b562b236d9b422') version('2.06.08', sha256='8be6c1793e0a045f07ddb88bb64b46de7e66a52e75fb72b3f82f9a3e3ba8a8ce') @@ -26,52 +21,13 @@ class Hepmc(CMakePackage): version('2.06.06', sha256='8cdff26c10783ed4248220a84a43b7e1f9b59cc2c9a29bd634d024ca469db125') version('2.06.05', sha256='4c411077cc97522c03b74f973264b8d9fd2b6ccec0efc7ceced2645371c73618') - variant('python', default=False, description='Enable Python bindings') - variant('rootio', default=False, description='Enable ROOT I/O') - variant('interfaces', default=False, description='Install interfaces for some Monte-Carlo Event Gens') - depends_on('cmake@2.8.9:', type='build') - depends_on('python', when='+python') - depends_on('root', when='+rootio') - conflicts('+python', when='@:3.1') - conflicts('+rootio', when='@:2') - conflicts('+interfaces', when='@:2') - - @when('@:2') def cmake_args(self): return ['-Dmomentum:STRING=GEV', '-Dlength:STRING=MM'] - @when('@3:') - def cmake_args(self): - spec = self.spec - args = [ - '-Dmomentum:STRING=GEV', - '-Dlength:STRING=MM', - '-DHEPMC3_ENABLE_PYTHON={0}'.format(spec.satisfies('+python')), - '-DHEPMC3_ENABLE_ROOTIO={0}'.format(spec.satisfies('+rootio')), - '-DHEPMC3_INSTALL_INTERFACES={0}'.format( - spec.satisfies('+interfaces')), - ] - - if self.spec.satisfies('+python'): - py_ver = spec['python'].version.up_to(2) - py_sitepkg = join_path(self.prefix, site_packages_dir) - args.extend([ - '-DHEPMC3_PYTHON_VERSIONS={0}'.format(py_ver), - '-DHEPMC3_Python_SITEARCH{0}={1}'.format( - py_ver.joined, py_sitepkg) - ]) - - if self.spec.satisfies('+rootio'): - args.append('-DROOT_DIR={0}'.format(self.spec['root'].prefix)) - - return args - def url_for_version(self, version): - if version > Version("3.0.0"): - url = "http://hepmc.web.cern.ch/hepmc/releases/HepMC3-{0}.tar.gz" - elif version <= Version("2.06.08"): + if version <= Version("2.06.08"): url = "http://lcgapp.cern.ch/project/simu/HepMC/download/HepMC-{0}.tar.gz" else: url = "http://hepmc.web.cern.ch/hepmc/releases/hepmc{0}.tgz" diff --git a/var/spack/repos/builtin/packages/hepmc3/ba38f14d8f56c16cc4105d98f6d4540c928c6150.patch b/var/spack/repos/builtin/packages/hepmc3/ba38f14d8f56c16cc4105d98f6d4540c928c6150.patch new file mode 100644 index 00000000000..b09058104fe --- /dev/null +++ b/var/spack/repos/builtin/packages/hepmc3/ba38f14d8f56c16cc4105d98f6d4540c928c6150.patch @@ -0,0 +1,12 @@ +diff --git a/search/include/HepMC3/Feature.h b/search/include/HepMC3/Feature.h +index 0bd4f92b059d315c4a17ab03d49ba5b20af75d8e..e7717516a99c5c3d2553be2aca21378fb1ae7977 100644 +--- a/search/include/HepMC3/Feature.h ++++ b/search/include/HepMC3/Feature.h +@@ -293,7 +293,7 @@ public: + EvaluatorPtr functor = m_internal; + return [value, functor](ConstGenParticlePtr input)->bool{ + Feature_type local = (*functor)(input); +- return fabs(local - value) <= ((fabs(local) < fabs(value))? fabs(value) : fabs(local)) * std::numeric_limits::epsilon(); ++ return std::less_equal{}(fabs(local - value) , std::numeric_limits::epsilon()); + }; + } diff --git a/var/spack/repos/builtin/packages/hepmc3/package.py b/var/spack/repos/builtin/packages/hepmc3/package.py new file mode 100644 index 00000000000..361d3826f5c --- /dev/null +++ b/var/spack/repos/builtin/packages/hepmc3/package.py @@ -0,0 +1,61 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Hepmc3(CMakePackage): + """The HepMC package is an object oriented, C++ event record for + High Energy Physics Monte Carlo generators and simulation.""" + + homepage = "https://cern.ch/hepmc" + url = "https://gitlab.cern.ch/hepmc/HepMC3/-/archive/3.2.1/HepMC3-3.2.1.tar.gz" + git = "https://gitlab.cern.ch/hepmc/HepMC3.git" + + maintainers = ['vvolkl'] + + version('3.2.2', sha256='0e8cb4f78f804e38f7d29875db66f65e4c77896749d723548cc70fb7965e2d41') + version('3.2.1', sha256='6e4e4bb5708af105d4bf74efc2745e6efe704e942d46a8042f7dcae37a4739fe') + version('3.2.0', sha256='f132387763d170f25a7cc9f0bd586b83373c09acf0c3daa5504063ba460f89fc') + version('3.1.2', sha256='4133074b3928252877982f3d4b4c6c750bb7a324eb6c7bb2afc6fa256da3ecc7') + version('3.1.1', sha256='2fcbc9964d6f9f7776289d65f9c73033f85c15bf5f0df00c429a6a1d8b8248bb') + version('3.1.0', sha256='cd37eed619d58369041018b8627274ad790020a4714b54ac05ad1ebc1a6e7f8a') + # note that version 3.0.0 is not supported + # conflicts with cmake configuration + + variant('python', default=False, description='Enable Python bindings') + variant('rootio', default=False, description='Enable ROOT I/O') + variant('interfaces', default=False, description='Install interfaces for some Monte-Carlo Event Gens') + + depends_on('cmake@2.8.9:', type='build') + depends_on('root', when='+rootio') + depends_on('python', when="+python") + + conflicts('%gcc@9.3.0', when="@:3.1.1") + patch('ba38f14d8f56c16cc4105d98f6d4540c928c6150.patch', when="@3.1.2:3.2.1 %gcc@9.3.0") + + def cmake_args(self): + spec = self.spec + args = [ + '-Dmomentum:STRING=GEV', + '-Dlength:STRING=MM', + '-DHEPMC3_ENABLE_PYTHON={0}'.format(spec.satisfies('+python')), + '-DHEPMC3_ENABLE_ROOTIO={0}'.format(spec.satisfies('+rootio')), + '-DHEPMC3_INSTALL_INTERFACES={0}'.format( + spec.satisfies('+interfaces')), + ] + + if self.spec.satisfies('+python'): + py_ver = spec['python'].version.up_to(2) + py_sitepkg = join_path(self.prefix, site_packages_dir) + args.extend([ + '-DHEPMC3_PYTHON_VERSIONS={0}'.format(py_ver), + '-DHEPMC3_Python_SITEARCH{0}={1}'.format( + py_ver.joined, py_sitepkg) + ]) + + if self.spec.satisfies('+rootio'): + args.append('-DROOT_DIR={0}'.format(self.spec['root'].prefix)) + return args diff --git a/var/spack/repos/builtin/packages/hiredis/package.py b/var/spack/repos/builtin/packages/hiredis/package.py new file mode 100644 index 00000000000..74e4552b2c8 --- /dev/null +++ b/var/spack/repos/builtin/packages/hiredis/package.py @@ -0,0 +1,21 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Hiredis(MakefilePackage): + """Hiredis is a minimalistic C client library for the Redis database.""" + + homepage = "https://github.com/redis/hiredis" + url = "https://github.com/redis/hiredis/archive/v0.14.1.tar.gz" + + version('0.14.1', sha256='2663b2aed9fd430507e30fc5e63274ee40cdd1a296026e22eafd7d99b01c8913') + version('0.14.0', sha256='042f965e182b80693015839a9d0278ae73fae5d5d09d8bf6d0e6a39a8c4393bd') + version('0.13.3', sha256='717e6fc8dc2819bef522deaca516de9e51b9dfa68fe393b7db5c3b6079196f78') + version('0.13.2', sha256='b0cf73ebe039fe25ecaaa881acdda8bdc393ed997e049b04fc20865835953694') + + def install(self, spec, prefix): + make('PREFIX={0}'.format(prefix), 'install') diff --git a/var/spack/repos/builtin/packages/hpccg/package.py b/var/spack/repos/builtin/packages/hpccg/package.py index 67eb4c19418..a93c5852666 100644 --- a/var/spack/repos/builtin/packages/hpccg/package.py +++ b/var/spack/repos/builtin/packages/hpccg/package.py @@ -13,7 +13,7 @@ class Hpccg(MakefilePackage): """ homepage = "https://mantevo.org/about/applications/" - url = "https://github.com/Mantevo/mantevo.github.io/raw/master/download_files/HPCCG-1.0.tar.gz" + url = "http://downloads.mantevo.org/releaseTarballs/miniapps/HPCCG/HPCCG-1.0.tar.gz" tags = ['proxy-app'] diff --git a/var/spack/repos/builtin/packages/hpctoolkit/package.py b/var/spack/repos/builtin/packages/hpctoolkit/package.py index d64ce78fadc..9de2184d92f 100644 --- a/var/spack/repos/builtin/packages/hpctoolkit/package.py +++ b/var/spack/repos/builtin/packages/hpctoolkit/package.py @@ -18,7 +18,9 @@ class Hpctoolkit(AutotoolsPackage): git = "https://github.com/HPCToolkit/hpctoolkit.git" maintainers = ['mwkrentel'] - version('master', branch='master') + version('develop', branch='develop') + version('master', branch='master') + version('2020.06.12', commit='ac6ae1156e77d35596fea743ed8ae768f7222f19') version('2020.03.01', commit='94ede4e6fa1e05e6f080be8dc388240ea027f769') version('2019.12.28', commit='b4e1877ff96069fd8ed0fdf0e36283a5b4b62240') version('2019.08.14', commit='6ea44ed3f93ede2d0a48937f288a2d41188a277c') @@ -44,7 +46,7 @@ class Hpctoolkit(AutotoolsPackage): # We can't build with both PAPI and perfmon for risk of segfault # from mismatched header files (unless PAPI installs the perfmon # headers). - variant('papi', default=False, + variant('papi', default=True, description='Use PAPI instead of perfmon for access to ' 'the hardware performance counters.') @@ -60,12 +62,13 @@ class Hpctoolkit(AutotoolsPackage): ' +graph +regex +shared +multithreaded visibility=global' ) - depends_on('binutils+libiberty~nls', type='link', when='@master') + depends_on('binutils+libiberty~nls', type='link', when='@2020.04.00:') depends_on('binutils@:2.33.1+libiberty~nls', type='link', when='@:2020.03.99') depends_on('boost' + boost_libs) depends_on('bzip2+shared', type='link') depends_on('dyninst@9.3.2:') depends_on('elfutils+bzip2+xz~nls', type='link') + depends_on('gotcha@1.0.3:') depends_on('intel-tbb+shared') depends_on('libdwarf') depends_on('libmonitor+hpctoolkit+bgq', when='+bgq') @@ -105,6 +108,7 @@ def configure_args(self): '--with-bzip=%s' % spec['bzip2'].prefix, '--with-dyninst=%s' % spec['dyninst'].prefix, '--with-elfutils=%s' % spec['elfutils'].prefix, + '--with-gotcha=%s' % spec['gotcha'].prefix, '--with-tbb=%s' % spec['intel-tbb'].prefix, '--with-libdwarf=%s' % spec['libdwarf'].prefix, '--with-libmonitor=%s' % spec['libmonitor'].prefix, diff --git a/var/spack/repos/builtin/packages/hpcviewer/package.py b/var/spack/repos/builtin/packages/hpcviewer/package.py index 97688963cfe..f14f55d7876 100644 --- a/var/spack/repos/builtin/packages/hpcviewer/package.py +++ b/var/spack/repos/builtin/packages/hpcviewer/package.py @@ -37,6 +37,12 @@ class Hpcviewer(Package): maintainers = ['mwkrentel'] viewer_sha = { + ('2020.05', 'x86_64'): '27f99c94a69abd005303fb58360b0d1b3eb7d223cab81c38ae6ccdd83ec15106', + ('2020.05', 'ppc64'): '469bce07a75476c132d3791ca49e38db015917c9c36b4810e477bc1c54a13d68', + ('2020.05', 'ppc64le'): 'fc4491bf6d9eaf2b7f2d39b722c978597a881ece557fb05a4cf27caabb9e0b99', + ('2020.04', 'x86_64'): '5944c7b1e518b25d143df72b06a69cffb0bfc92186eb5efee2178fc2814a0b8b', + ('2020.04', 'ppc64'): 'ba60615a550aa77a17eb94272b62365a22298cebc6dc2cb7463686741e58d874', + ('2020.04', 'ppc64le'): '128494077979b447875ed730f1e8c5470fafcd52ae6debe61625031248d91f7c', ('2020.02', 'x86_64'): 'af1f514547a9325aee30eb891b31e38c7ea3f33d2d1978b44f83e7daa3d5de6b', ('2020.02', 'ppc64'): '7bb4926202db663aedd5a6830778c5f73f6b08a65d56861824ea95ba83b1f59c', ('2020.02', 'ppc64le'): 'cfcebb7ba301affd6d21d2afd43c540e6dd4c5bc39b0d20e8bd1e4fed6aa3481', @@ -64,6 +70,12 @@ class Hpcviewer(Package): } trace_sha = { + ('2020.05', 'x86_64'): 'a0b925099a00c10fcb38e937068e50937175fd46dc086121525e546a63a7fd83', + ('2020.05', 'ppc64'): '40526f62f36e5b6438021c2b557256638d41a6b8f4e101534b5230ac644a9b85', + ('2020.05', 'ppc64le'): 'c16e83b59362adcebecd4231374916a2b3a3c016f75a45b24e8398f777a24f89', + ('2020.04', 'x86_64'): '695f7a06479c2b6958a6ebc3985b7ed777e7e126c04424ce980b224690f769f3', + ('2020.04', 'ppc64'): '78cfadaf7bc6130cc4257241499b36f4f1c47f22d0daa29f5e733ca824a87b3c', + ('2020.04', 'ppc64le'): '28c225023accbc85a19c6d8fdcc14dae64a475ed5de2b94f18e58aab4edd2c09', ('2020.02', 'x86_64'): 'b7b634e91108aa50a2e8647ac6bac87df775ae38aff078545efaa84735e0a666', ('2020.02', 'ppc64'): 'a3e845901689e1b32bc6ab2826c6ac6ed352df4839090fa530b20f747e6e0957', ('2020.02', 'ppc64le'): 'a64a283f61e706d988952a7cede9fac0328b09d2d0b64e4c08acc54e38781c98', diff --git a/var/spack/repos/builtin/packages/hpl/package.py b/var/spack/repos/builtin/packages/hpl/package.py index 05909653c09..579abe76188 100644 --- a/var/spack/repos/builtin/packages/hpl/package.py +++ b/var/spack/repos/builtin/packages/hpl/package.py @@ -101,9 +101,15 @@ def configure(self, spec, prefix): @when('@2.3:') def configure_args(self): - config = [ - 'CFLAGS=-O3' - ] + filter_file( + r"^libs10=.*", "libs10=%s" % self.spec["blas"].libs.ld_flags, + "configure" + ) + + if '+openmp' in self.spec: + config = ['CFLAGS=-O3 ' + self.compiler.openmp_flag] + else: + config = ['CFLAGS=-O3'] if (self.spec.satisfies('^intel-mkl') or self.spec.satisfies('^intel-parallel-studio+mkl')): diff --git a/var/spack/repos/builtin/packages/hsf-cmaketools/package.py b/var/spack/repos/builtin/packages/hsf-cmaketools/package.py new file mode 100644 index 00000000000..78053515098 --- /dev/null +++ b/var/spack/repos/builtin/packages/hsf-cmaketools/package.py @@ -0,0 +1,26 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class HsfCmaketools(Package): + """CMake 'Find' modules for commonly used HEP Packages""" + + homepage = "https://github.com/HSF/cmaketools/" + url = "https://github.com/HSF/cmaketools/archive/1.8.tar.gz" + git = "https://github.com/HSF/cmaketools.git" + + maintainers = ['vvolkl'] + + version('master', branch='master') + version('1.8', sha256='91af30f5701dadf80a5d7e0d808c224c934f0784a3aff2d3b69aff24f7e1db41') + + # this package only needs to be installed in CMAKE_PREFIX_PATH + # which is set by spack + def install(self, spec, prefix): + mkdir(prefix.modules) + install_tree('modules', prefix.modules) + install("CMakeToolsConfig.cmake", prefix) diff --git a/var/spack/repos/builtin/packages/http-get/package.py b/var/spack/repos/builtin/packages/http-get/package.py new file mode 100644 index 00000000000..aa05b9f355d --- /dev/null +++ b/var/spack/repos/builtin/packages/http-get/package.py @@ -0,0 +1,35 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * +import datetime + + +class HttpGet(MakefilePackage): + """Http_get fetches an HTTP URL and dumps the contents to stdout. + It does not do gopher, ftp, file, news, or any other type of URL, + only HTTP. It can be configured to do HTTPS fetches as well.""" + + homepage = "http://www.acme.com/software/http_get/" + url = "http://www.acme.com/software/http_get/http_get_23May2018.tar.gz" + + version('2018-05-23', sha256='7d46ce25e53b6d3e27a99c1853c3054a046cc97d5e30a713a7ec986cfe7c4fe0') + + def url_for_version(self, version): + ver = datetime.datetime.strptime(str(version), '%Y-%m-%d').date() + verstr = datetime.datetime.strftime(ver, '%d%b%Y') + return "http://www.acme.com/software/http_get/http_get_{0}.tar.gz".format(verstr) + + def edit(self, spec, prefix): + makefile = FileFilter("Makefile") + makefile.filter("BINDIR =\t/usr/local/bin", + "BINDIR = {0}/bin".format(self.prefix)) + makefile.filter("MANDIR =\t/usr/local/man/man1", + "MANDIR={0}/man/man1".format(self.prefix)) + + def install(self, spec, prefix): + mkdirp(prefix.bin) + mkdirp(prefix.man.man1) + make('install') diff --git a/var/spack/repos/builtin/packages/http-load/package.py b/var/spack/repos/builtin/packages/http-load/package.py new file mode 100644 index 00000000000..4b2f61d18d4 --- /dev/null +++ b/var/spack/repos/builtin/packages/http-load/package.py @@ -0,0 +1,33 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * +import datetime + + +class HttpLoad(MakefilePackage): + """http_load - multiprocessing http test client""" + + homepage = "http://www.acme.com/software/http_load/" + url = "http://www.acme.com/software/http_load/http_load-09Mar2016.tar.gz" + + version('2016-03-09', sha256='a2bf118d88f6acd2a082cbf275d141538149caaa0a4ce1bcc66680668e781306') + + def url_for_version(self, version): + ver = datetime.datetime.strptime(str(version), '%Y-%m-%d').date() + verstr = datetime.datetime.strftime(ver, '%d%b%Y') + return "http://www.acme.com/software/http_load/http_load-{0}.tar.gz".format(verstr) + + def edit(self, spec, prefix): + makefile = FileFilter("Makefile") + makefile.filter("BINDIR =\t/usr/local/bin", + "BINDIR = {0}/bin".format(self.prefix)) + makefile.filter("MANDIR =\t/usr/local/man/man1", + "MANDIR={0}/man/man1".format(self.prefix)) + + def install(self, spec, prefix): + mkdirp(prefix.bin) + mkdirp(prefix.man.man1) + make('install') diff --git a/var/spack/repos/builtin/packages/http-ping/package.py b/var/spack/repos/builtin/packages/http-ping/package.py new file mode 100644 index 00000000000..7cf731e019f --- /dev/null +++ b/var/spack/repos/builtin/packages/http-ping/package.py @@ -0,0 +1,34 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * +import datetime + + +class HttpPing(MakefilePackage): + """http_ping is like the regular ping command, except that it sends + HTTP requests instead of ICMP echo requests.""" + + homepage = "http://www.acme.com/software/http_ping/" + url = "http://www.acme.com/software/http_ping/http_ping_09Mar2016.tar.gz" + + version('2016-03-09', sha256='6bdc570c776a760d2c08f7d18e00b0edd74cf603400929c66c512801b6bb5871') + + def url_for_version(self, version): + ver = datetime.datetime.strptime(str(version), '%Y-%m-%d').date() + verstr = datetime.datetime.strftime(ver, '%d%b%Y') + return "http://www.acme.com/software/http_ping/http_ping_{0}.tar.gz".format(verstr) + + def edit(self, spec, prefix): + makefile = FileFilter("Makefile") + makefile.filter("BINDIR =\t/usr/local/bin", + "BINDIR = {0}/bin".format(self.prefix)) + makefile.filter("MANDIR =\t/usr/local/man/man1", + "MANDIR={0}/man/man1".format(self.prefix)) + + def install(self, spec, prefix): + mkdirp(prefix.bin) + mkdirp(prefix.man.man1) + make('install') diff --git a/var/spack/repos/builtin/packages/http-post/package.py b/var/spack/repos/builtin/packages/http-post/package.py new file mode 100644 index 00000000000..bb9c0f25931 --- /dev/null +++ b/var/spack/repos/builtin/packages/http-post/package.py @@ -0,0 +1,35 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * +import datetime + + +class HttpPost(MakefilePackage): + """Http_post does a POST operation to an HTTP URL and dumps the results + to stdout. It does not do gopher, ftp, file, news, or any other type of + URL, only HTTP. It can be configured to do HTTPS POSTs as well.""" + + homepage = "http://www.acme.com/software/http_post/" + url = "http://www.acme.com/software/http_post/http_post_18May2018.tar.gz" + + version('2018-05-18', sha256='981c62bcc5cd12b8531f887b3e3779a63a7b7f370062575cded412865a20ea2c') + + def url_for_version(self, version): + ver = datetime.datetime.strptime(str(version), '%Y-%m-%d').date() + verstr = datetime.datetime.strftime(ver, '%d%b%Y') + return "http://www.acme.com/software/http_post/http_post_{0}.tar.gz".format(verstr) + + def edit(self, spec, prefix): + makefile = FileFilter("Makefile") + makefile.filter("BINDIR =\t/usr/local/bin", + "BINDIR = {0}/bin".format(self.prefix)) + makefile.filter("MANDIR =\t/usr/local/man/man1", + "MANDIR={0}/man/man1".format(self.prefix)) + + def install(self, spec, prefix): + mkdirp(prefix.bin) + mkdirp(prefix.man.man1) + make('install') diff --git a/var/spack/repos/builtin/packages/hwloc/package.py b/var/spack/repos/builtin/packages/hwloc/package.py index 2b7968a37a5..4edabfbf4ad 100644 --- a/var/spack/repos/builtin/packages/hwloc/package.py +++ b/var/spack/repos/builtin/packages/hwloc/package.py @@ -27,7 +27,10 @@ class Hwloc(AutotoolsPackage): list_depth = 2 git = 'https://github.com/open-mpi/hwloc.git' + maintainers = ['bgoglin'] + version('master', branch='master') + version('2.2.0', sha256='2defba03ddd91761b858cbbdc2e3a6e27b44e94696dbfa21380191328485a433') version('2.1.0', sha256='1fb8cc1438de548e16ec3bb9e4b2abb9f7ce5656f71c0906583819fcfa8c2031') version('2.0.2', sha256='27dcfe42e3fb3422b72ce48b48bf601c0a3e46e850ee72d9bdd17b5863b6e42c') version('2.0.1', sha256='f1156df22fc2365a31a3dc5f752c53aad49e34a5e22d75ed231cd97eaa437f9d') @@ -49,6 +52,7 @@ class Hwloc(AutotoolsPackage): variant('gl', default=False, description="Support GL device discovery") variant('cuda', default=False, description="Support CUDA devices") variant('libxml2', default=True, description="Build with libxml2") + variant('libudev', default=False, description="Build with libudev") variant('pci', default=(sys.platform != 'darwin'), description="Support analyzing devices on PCI bus") variant('shared', default=True, description="Build shared libraries") @@ -57,6 +61,17 @@ class Hwloc(AutotoolsPackage): default=False, description='Enable the Cairo back-end of hwloc\'s lstopo command' ) + variant( + 'netloc', + default=False, + description="Enable netloc [requires MPI]" + ) + + # netloc isn't available until version 2.0.0 + conflicts('+netloc', when="@:1.99.99") + + # libudev isn't available until version 1.11.0 + conflicts('+libudev', when="@:1.10") depends_on('pkgconfig', type='build') depends_on('m4', type='build', when='@master') @@ -71,6 +86,10 @@ class Hwloc(AutotoolsPackage): depends_on('cairo', when='+cairo') depends_on('numactl', when='@:1.11.11 platform=linux') + # When mpi=openmpi, this introduces an unresolvable dependency. + # See https://github.com/spack/spack/issues/15836 for details + depends_on('mpi', when='+netloc') + def url_for_version(self, version): return "http://www.open-mpi.org/software/hwloc/v%s/downloads/hwloc-%s.tar.gz" % (version.up_to(2), version) @@ -81,7 +100,7 @@ def configure_args(self): # (Alternatively, we could require OpenCL as dependency.) "--disable-opencl", ] - if '@2.0.0:' in self.spec: + if '+netloc' in self.spec: args.append('--enable-netloc') args.extend(self.enable_or_disable('cairo')) @@ -89,6 +108,7 @@ def configure_args(self): args.extend(self.enable_or_disable('gl')) args.extend(self.enable_or_disable('cuda')) args.extend(self.enable_or_disable('libxml2')) + args.extend(self.enable_or_disable('libudev')) args.extend(self.enable_or_disable('pci')) args.extend(self.enable_or_disable('shared')) diff --git a/var/spack/repos/builtin/packages/hybridsim/makefile.patch b/var/spack/repos/builtin/packages/hybridsim/makefile.patch new file mode 100644 index 00000000000..4512bc530fd --- /dev/null +++ b/var/spack/repos/builtin/packages/hybridsim/makefile.patch @@ -0,0 +1,20 @@ +diff --git a/Makefile b/Makefile +index c43f1e2..8b3bd86 100644 +--- a/Makefile ++++ b/Makefile +@@ -14,12 +14,11 @@ endif + CXXFLAGS+=$(OPTFLAGS) + + CUR_DIRECTORY=$(shell pwd) +-DRAM_LIB=$(CUR_DIRECTORY)/../DRAMSim2 +-NV_LIB=$(CUR_DIRECTORY)/../NVDIMMSim/src +-#NV_LIB=$(CUR_DIRECTORY)/../FNVSim ++DRAM_LIB=$(CUR_DIRECTORY)/DRAMSim2 ++NV_LIB=$(CUR_DIRECTORY)/NVDIMMSim + + INCLUDES=-I$(DRAM_LIB) -I$(NV_LIB) +-LIBS=-L${DRAM_LIB} -L${NV_LIB} -ldramsim -lnvdsim -Wl,-rpath ${DRAM_LIB} -Wl,-rpath ${NV_LIB} ++LIBS=-L${DRAM_LIB} -L${NV_LIB} -ldramsim -lnvdsim -Wl,-rpath -Wl,${DRAM_LIB} -Wl,-rpath -Wl,${NV_LIB} + + EXE_NAME=HybridSim + LIB_NAME=libhybridsim.so diff --git a/var/spack/repos/builtin/packages/hybridsim/package.py b/var/spack/repos/builtin/packages/hybridsim/package.py new file mode 100644 index 00000000000..089181ca0bd --- /dev/null +++ b/var/spack/repos/builtin/packages/hybridsim/package.py @@ -0,0 +1,37 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Hybridsim(MakefilePackage): + """ + HybridSim provides cycle-accurate simulation of a non-volatile + memory system augmented with a DRAM based cache. It uses DRAMSim2 + for the DRAM model and NVDIMMSim for the non-volatile memory model + """ + + homepage = "https://github.com/jimstevens2001/HybridSim" + git = "https://github.com/jimstevens2001/HybridSim" + url = "https://github.com/jimstevens2001/HybridSim/archive/v2.0.1.tar.gz" + + maintainers = ['jjwilke'] + + version('2.0.1', sha256="57b82ac929acd36de84525e4d61358f1ab6532f5b635ca3f560e563479921937") + + depends_on("dramsim2") + depends_on("nvdimmsim") + patch("makefile.patch", when="@2.0.1") + + def build(self, spec, prefix): + symlink(spec["dramsim2"].prefix, "DRAMSim2") + symlink(spec["nvdimmsim"].prefix, "NVDIMMSim") + if spec.satisfies("platform=darwin"): + make("libhybridsim.dylib") + else: + make("libhybridsim.so") + + def install(self, spec, prefix): + install_tree(".", prefix) diff --git a/var/spack/repos/builtin/packages/hydrogen/package.py b/var/spack/repos/builtin/packages/hydrogen/package.py index d0aef797e79..449fe046dc5 100644 --- a/var/spack/repos/builtin/packages/hydrogen/package.py +++ b/var/spack/repos/builtin/packages/hydrogen/package.py @@ -18,6 +18,7 @@ class Hydrogen(CMakePackage): maintainers = ['bvanessen'] version('develop', branch='hydrogen') + version('1.3.4', sha256='7979f6656f698f0bbad6798b39d4b569835b3013ff548d98089fce7c283c6741') version('1.3.3', sha256='a51a1cfd40ac74d10923dfce35c2c04a3082477683f6b35e7b558ea9f4bb6d51') version('1.3.2', sha256='50bc5e87955f8130003d04dfd9dcad63107e92b82704f8107baf95b0ccf98ed6') version('1.3.1', sha256='a8b8521458e9e747f2b24af87c4c2749a06e500019c383e0cefb33e5df6aaa1d') @@ -116,9 +117,6 @@ def cmake_args(self): args = [ '-DCMAKE_INSTALL_MESSAGE:STRING=LAZY', - '-DCMAKE_C_COMPILER=%s' % spec['mpi'].mpicc, - '-DCMAKE_CXX_COMPILER=%s' % spec['mpi'].mpicxx, - '-DCMAKE_Fortran_COMPILER=%s' % spec['mpi'].mpifc, '-DBUILD_SHARED_LIBS:BOOL=%s' % ('+shared' in spec), '-DHydrogen_ENABLE_OPENMP:BOOL=%s' % ('+hybrid' in spec), '-DHydrogen_ENABLE_QUADMATH:BOOL=%s' % ('+quad' in spec), diff --git a/var/spack/repos/builtin/packages/hyperscan/package.py b/var/spack/repos/builtin/packages/hyperscan/package.py new file mode 100644 index 00000000000..1fd057c7c6e --- /dev/null +++ b/var/spack/repos/builtin/packages/hyperscan/package.py @@ -0,0 +1,31 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * +import platform + +_versions = { + 'v5.2.1': { + 'Linux-aarch64': ('d2ac1669154ec27b794b64d026ad09caecee6e5e17fd35107595a7517711d2b9', 'https://github.com/kunpengcompute/hyperscan/archive/v5.2.1.aarch64.tar.gz'), + 'Linux-x86_64': ('fd879e4ee5ecdd125e3a79ef040886978ae8f1203832d5a3f050c48f17eec867', 'https://github.com/intel/hyperscan/archive/v5.2.1.tar.gz') + } +} + + +class Hyperscan(CMakePackage): + """High-performance regular expression matching library.""" + + homepage = "https://www.hyperscan.io/" + url = "https://github.com/intel/hyperscan/archive/v5.2.1.tar.gz" + + for ver, packages in _versions.items(): + key = "{0}-{1}".format(platform.system(), platform.machine()) + pkg = packages.get(key) + if pkg: + version(ver, sha256=pkg[0], url=pkg[1]) + + depends_on('boost') + depends_on('pcre') + depends_on('ragel', type='build') diff --git a/var/spack/repos/builtin/packages/ibm-java/package.py b/var/spack/repos/builtin/packages/ibm-java/package.py index df4b9d9c4e5..72739240d5d 100644 --- a/var/spack/repos/builtin/packages/ibm-java/package.py +++ b/var/spack/repos/builtin/packages/ibm-java/package.py @@ -10,15 +10,22 @@ class IbmJava(Package): """Binary distribution of the IBM Java Software Development Kit - for big and little-endian powerpc (power7, 8 and 9).""" + for big and little-endian powerpc (power7, 8 and 9). Note: IBM + is fairly aggressive about taking down old versions, so old + (and even current) versions may not be available.""" homepage = "https://developer.ibm.com/javasdk/" + maintainers = ['mwkrentel'] # Note: IBM is fairly aggressive about taking down old versions, # so we may need to update this frequently. Also, old revs may # not be available for download. version_list = [ + ('8.0.6.10', 'ppc64', 'ff5151ead88f891624eefe33d80d56c325ca0aa4b93bd96c135cad326993eda2'), + ('8.0.6.10', 'ppc64le', 'ea99ab28dd300b08940882d178247e99aafe5a998b1621cf288dfb247394e067'), + ('8.0.6.7', 'ppc64', 'a1accb461a039af4587ea86511e317fea1d423e7f781459a17ed3947afed2982'), + ('8.0.6.7', 'ppc64le', '9ede76a597af28c7f10c6f8a68788cc2dcd39fdab178c9bac56df8b3766ac717'), ('8.0.6.0', 'ppc64', 'e142746a83e47ab91d71839d5776f112ed154ae180d0628e3f10886151dad710'), ('8.0.6.0', 'ppc64le', '18c2eccf99225e6e7643141d8da4110cacc39f2fa00149fc26341d2272cc0102'), ('8.0.5.30', 'ppc64', 'd39ce321bdadd2b2b829637cacf9c1c0d90235a83ff6e7dcfa7078faca2f212f'), diff --git a/var/spack/repos/builtin/packages/iegenlib/package.py b/var/spack/repos/builtin/packages/iegenlib/package.py index be61f120c77..ee198fd588f 100644 --- a/var/spack/repos/builtin/packages/iegenlib/package.py +++ b/var/spack/repos/builtin/packages/iegenlib/package.py @@ -18,11 +18,13 @@ class Iegenlib(CMakePackage): version('master', branch='master') version('2018-07-03', - url="https://github.com/CompOpt4Apps/IEGenLib/archive/fc479ee6ff01dba26beffc1dc6bacdba03262138.zip", - sha256='b4c0b368363fcc1e34b388057cc0940bb87fc336cebb0772fd6055f45009b12b') + url="https://github.com/CompOpt4Apps/IEGenLib/archive/fc479ee6ff01dba26beffc1dc6bacdba03262138.zip", + sha256='b4c0b368363fcc1e34b388057cc0940bb87fc336cebb0772fd6055f45009b12b') depends_on('cmake@2.6:', type='build') depends_on('autoconf', type='build') + depends_on('automake', type='build') + depends_on('libtool', type='build') depends_on('texinfo', type='build') depends_on('isl') diff --git a/var/spack/repos/builtin/packages/intel-mpi/package.py b/var/spack/repos/builtin/packages/intel-mpi/package.py index 15cb84150ef..52e93e66728 100644 --- a/var/spack/repos/builtin/packages/intel-mpi/package.py +++ b/var/spack/repos/builtin/packages/intel-mpi/package.py @@ -63,3 +63,9 @@ def setup_dependent_build_environment(self, *args): 'F90': spack_fc, 'FC': spack_fc, }) + + def setup_run_environment(self, env): + super(self, IntelMpi).setup_run_environment(env) + + for name, value in self.mpi_compiler.wrappers.items(): + env.set(name, value) diff --git a/var/spack/repos/builtin/packages/intel-parallel-studio/package.py b/var/spack/repos/builtin/packages/intel-parallel-studio/package.py index 410cd4045e4..f59aa2b7ea6 100644 --- a/var/spack/repos/builtin/packages/intel-parallel-studio/package.py +++ b/var/spack/repos/builtin/packages/intel-parallel-studio/package.py @@ -225,3 +225,9 @@ def setup_dependent_build_environment(self, *args): 'F90': spack_fc, 'FC': spack_fc, }) + + def setup_run_environment(self, env): + super(self, IntelParallelStudio).setup_run_environment(env) + + for name, value in self.mpi_compiler_wrappers.items(): + env.set(name, value) diff --git a/var/spack/repos/builtin/packages/intel-pin/package.py b/var/spack/repos/builtin/packages/intel-pin/package.py index e886ba0784c..4378d8e7dd3 100644 --- a/var/spack/repos/builtin/packages/intel-pin/package.py +++ b/var/spack/repos/builtin/packages/intel-pin/package.py @@ -19,6 +19,7 @@ class IntelPin(Package): version('3.11', sha256='aa5abca475a6e106a75e6ed4ba518fb75a57549a59f00681e6bd6e3f221bd23a', url='https://software.intel.com/sites/landingpage/pintool/downloads/pin-3.11-97998-g7ecce2dac-gcc-linux.tar.gz') version('3.10', sha256='7c8f14c3a0654bab662b58aba460403138fa44517bd40052501e8e0075b2702a', url='https://software.intel.com/sites/landingpage/pintool/downloads/pin-3.10-97971-gc5e41af74-gcc-linux.tar.gz') version('3.7', sha256='4730328795be61f1addb0e505a3792a4b4ca80b1b9405acf217beec6b5b90fb8', url='https://software.intel.com/sites/landingpage/pintool/downloads/pin-3.7-97619-g0d0c92f4f-gcc-linux.tar.gz') + version('2.14', sha256="1c29f589515772411a699a82fc4a3156cad95863a29741dfa6522865d4d281a1", url="https://software.intel.com/sites/landingpage/pintool/downloads/pin-2.14-71313-gcc.4.4.7-linux.tar.gz") def install(self, spec, prefix): install_tree('.', prefix) diff --git a/var/spack/repos/builtin/packages/intel-tbb/package.py b/var/spack/repos/builtin/packages/intel-tbb/package.py index 94eec8a06e6..c470e8933b3 100644 --- a/var/spack/repos/builtin/packages/intel-tbb/package.py +++ b/var/spack/repos/builtin/packages/intel-tbb/package.py @@ -132,22 +132,22 @@ def coerce_to_spack(self, tbb_build_subdir): for f in fs: lines = open(f).readlines() of = open(f, "w") - for l in lines: - if l.strip().startswith("CPLUS ="): + for lin in lines: + if lin.strip().startswith("CPLUS ="): of.write("# coerced to spack\n") of.write("CPLUS = $(CXX)\n") - elif l.strip().startswith("CONLY ="): + elif lin.strip().startswith("CONLY ="): of.write("# coerced to spack\n") of.write("CONLY = $(CC)\n") else: - of.write(l) + of.write(lin) def install(self, spec, prefix): # Deactivate use of RTM with GCC when on an OS with a very old # assembler. if (spec.satisfies('%gcc@4.8.0: os=rhel6') - or spec.satisfies('%gcc@4.8.0: os=centos6') - or spec.satisfies('%gcc@4.8.0: os=scientific6')): + or spec.satisfies('%gcc@4.8.0: os=centos6') + or spec.satisfies('%gcc@4.8.0: os=scientific6')): filter_file(r'RTM_KEY.*=.*rtm.*', 'RTM_KEY =', join_path('build', 'linux.gcc.inc')) @@ -224,3 +224,9 @@ def darwin_fix(self): # Replace @rpath in ids with full path if sys.platform == 'darwin': fix_darwin_install_name(self.prefix.lib) + + @property + def libs(self): + shared = True if '+shared' in self.spec else False + return find_libraries( + 'libtbb*', root=self.prefix, shared=shared, recursive=True) diff --git a/var/spack/repos/builtin/packages/isaac-server/package.py b/var/spack/repos/builtin/packages/isaac-server/package.py index 24fae8c32f1..af693a339c5 100644 --- a/var/spack/repos/builtin/packages/isaac-server/package.py +++ b/var/spack/repos/builtin/packages/isaac-server/package.py @@ -17,6 +17,7 @@ class IsaacServer(CMakePackage): version('develop', branch='dev') version('master', branch='master') + version('1.5.0', sha256='4d5a150dfe064289d760da368102172f84e9e8851a177c8125a56e151db58dce') version('1.4.0', sha256='6cbd4cc54a22de5e5a3427e44141db6e7b80b33fe7a0c707390a113655bf344e') version('1.3.3', sha256='92a972d05d315ad66546671c047b7edf8ed0e05d64d2b8d77ababb5bb9b93d8e') version('1.3.2', sha256='e6eedc641de5b0a7c5ea5cda6b11e9b6d4a78dfac8be90302147b26d09859a68') diff --git a/var/spack/repos/builtin/packages/isaac/package.py b/var/spack/repos/builtin/packages/isaac/package.py index 2c0badc88c9..ddc62f3482c 100644 --- a/var/spack/repos/builtin/packages/isaac/package.py +++ b/var/spack/repos/builtin/packages/isaac/package.py @@ -17,6 +17,7 @@ class Isaac(CMakePackage): version('develop', branch='dev') version('master', branch='master') + version('1.5.0', sha256='4d5a150dfe064289d760da368102172f84e9e8851a177c8125a56e151db58dce') version('1.4.0', sha256='6cbd4cc54a22de5e5a3427e44141db6e7b80b33fe7a0c707390a113655bf344e') version('1.3.3', sha256='92a972d05d315ad66546671c047b7edf8ed0e05d64d2b8d77ababb5bb9b93d8e') version('1.3.2', sha256='e6eedc641de5b0a7c5ea5cda6b11e9b6d4a78dfac8be90302147b26d09859a68') diff --git a/var/spack/repos/builtin/packages/isc-dhcp/package.py b/var/spack/repos/builtin/packages/isc-dhcp/package.py new file mode 100644 index 00000000000..9a5ab764556 --- /dev/null +++ b/var/spack/repos/builtin/packages/isc-dhcp/package.py @@ -0,0 +1,32 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class IscDhcp(AutotoolsPackage): + """ISC DHCP offers a complete open source solution for + implementing DHCP servers, relay agents, and clients. ISC + DHCP supports both IPv4 and IPv6, and is suitable for use + in high-volume and high-reliability applications.""" + + homepage = "https://www.isc.org/dhcp/" + url = "https://downloads.isc.org/isc/dhcp/4.4.2/dhcp-4.4.2.tar.gz" + list_url = "https://downloads.isc.org/isc/dhcp" + list_depth = 1 + + parallel = False + + version('4.4.2', sha256='1a7ccd64a16e5e68f7b5e0f527fd07240a2892ea53fe245620f4f5f607004521') + version('4.4.1', sha256='2a22508922ab367b4af4664a0472dc220cc9603482cf3c16d9aff14f3a76b608') + version('4.4.0', sha256='4a90be0f22ad81c987f5584661b60a594f1b21c581b82bfba3ae60f89ae44397') + version('4.3.6', sha256='a41eaf6364f1377fe065d35671d9cf82bbbc8f21207819b2b9f33f652aec6f1b') + version('4.3.5', sha256='eb95936bf15d2393c55dd505bc527d1d4408289cec5a9fa8abb99f7577e7f954') + + depends_on('m4', type='build') + depends_on('autoconf', type='build') + depends_on('automake', type='build') + depends_on('libtool', type='build') + depends_on('bind9', type='build') diff --git a/var/spack/repos/builtin/packages/iwyu/package.py b/var/spack/repos/builtin/packages/iwyu/package.py index a05ff073b7b..eaf0a57913f 100644 --- a/var/spack/repos/builtin/packages/iwyu/package.py +++ b/var/spack/repos/builtin/packages/iwyu/package.py @@ -16,12 +16,19 @@ class Iwyu(CMakePackage): maintainers = ['sethrj'] + version('0.14', sha256='43184397db57660c32e3298a6b1fd5ab82e808a1f5ab0591d6745f8d256200ef') version('0.13', sha256='49294270aa64e8c04182369212cd919f3b3e0e47601b1f935f038c761c265bc9') version('0.12', sha256='a5892fb0abccb820c394e4e245c00ef30fc94e4ae58a048b23f94047c0816025') version('0.11', sha256='2d2877726c4aed9518cbb37673ffbc2b7da9c239bf8fe29432da35c1c0ec367a') - patch('iwyu-013-cmake.patch', when='@0.13') + patch('iwyu-013-cmake.patch', when='@0.13:0.14') + depends_on('llvm+clang@10.0:10.999', when='@0.14') depends_on('llvm+clang@9.0:9.999', when='@0.13') depends_on('llvm+clang@8.0:8.999', when='@0.12') depends_on('llvm+clang@7.0:7.999', when='@0.11') + + @when('@0.14:') + def cmake_args(self): + return [self.define('CMAKE_CXX_STANDARD', 14), + self.define('CMAKE_CXX_EXTENSIONS', False)] diff --git a/var/spack/repos/builtin/packages/jali/package.py b/var/spack/repos/builtin/packages/jali/package.py index cc2e025c889..e75dc7fb92f 100644 --- a/var/spack/repos/builtin/packages/jali/package.py +++ b/var/spack/repos/builtin/packages/jali/package.py @@ -13,11 +13,12 @@ class Jali(CMakePackage): homepage = "https://github.com/lanl/jali" git = "https://github.com/lanl/jali" - url = "https://github.com/lanl/jali/archive/1.1.1.tar.gz" + url = "https://github.com/lanl/jali/archive/1.1.4.tar.gz" maintainers = ['raovgarimella'] version('master', branch='master') + version('1.1.4', sha256='135ab02be1487fcdfb039613cbed630bce336d581a66468c66209db0a9d8a104') version('1.1.1', sha256='c96c000b3893ea7f15bbc886524476dd466ae145e77deedc27e412fcc3541207') version('1.1.0', sha256='783dfcd6a9284af83bb380ed257fa8b0757dc2f7f9196d935eb974fb6523c644') version('1.0.5', sha256='979170615d33a7bf20c96bd4d0285e05a2bbd901164e377a8bccbd9af9463801') @@ -31,7 +32,7 @@ class Jali(CMakePackage): depends_on('boost') - depends_on('mstk@3.3.0: +exodusii+parallel~use_markers partitioner=all', when='+mstk') + depends_on('mstk@3.3.5: +exodusii+parallel~use_markers partitioner=all', when='+mstk') depends_on('zoltan -fortran') depends_on('metis') diff --git a/var/spack/repos/builtin/packages/jansson/package.py b/var/spack/repos/builtin/packages/jansson/package.py index 6647b3d4e80..43982b42ccb 100644 --- a/var/spack/repos/builtin/packages/jansson/package.py +++ b/var/spack/repos/builtin/packages/jansson/package.py @@ -14,6 +14,11 @@ class Jansson(CMakePackage): url = "https://github.com/akheron/jansson/archive/v2.9.tar.gz" maintainers = ['ax3l'] + version('2.13.1', sha256='f22901582138e3203959c9257cf83eba9929ac41d7be4a42557213a22ebcc7a0') + version('2.13', sha256='beb47da10cb27668de3012cc193a1873a898ad5710a1126be9e6d3357beb5b30') + version('2.12', sha256='76260d30e9bbd0ef392798525e8cd7fe59a6450c54ca6135672e3cd6a1642941') + version('2.11', sha256='6ff0eab3a8baf64d21cae25f88a0311fb282006eb992080722a9099469c32881') + version('2.10', sha256='b0a899f90ade82e42da0ecabc8af1fa296d69691e7c0786c4994fb79d4833ebb') version('2.9', sha256='952fa714b399e71c1c3aa020e32e899f290c82126ca4d0d14cff5d10af457656') variant('shared', default=True, diff --git a/var/spack/repos/builtin/packages/jdk/package.py b/var/spack/repos/builtin/packages/jdk/package.py index 025d12be659..92af25ae9b6 100644 --- a/var/spack/repos/builtin/packages/jdk/package.py +++ b/var/spack/repos/builtin/packages/jdk/package.py @@ -32,6 +32,8 @@ class Jdk(Package): # found in a link above. The build number can be deciphered from the URL. # Alternatively, run `bin/java -version` after extracting. Replace '+' # symbol in version with '_', otherwise it will be interpreted as a variant + version('14_36', sha256='4639bbaecc9cc606f1a4b99fda1efcaefcbf57a7025b3828b095093a6c866afd', + url='https://download.oracle.com/otn-pub/java/jdk/14+36/076bab302c7b4508975440c56f6cc26a/jdk-14_linux-x64_bin.tar.gz') version('12.0.2_10', sha256='2dde6fda89a4ec6e6560ed464e917861c9e40bf576e7a64856dafc55abaaff51', url='https://download.oracle.com/otn-pub/java/jdk/12.0.2+10/e482c34c86bd4bf8b56c0b35558996b9/jdk-12.0.2_linux-x64_bin.tar.gz') version('12.0.1_12', sha256='9fd6dcdaf2cfca7da59e39b009a0f5bcd53bec2fb16105f7ca8d689cdab68d75', @@ -57,6 +59,7 @@ class Jdk(Package): version('1.8.0_131-b11', sha256='62b215bdfb48bace523723cdbb2157c665e6a25429c73828a32f00e587301236', url='https://download.oracle.com/otn-pub/java/jdk/8u131-b11/d54c1d3a095b4ff2b6607d096fa80163/jdk-8u131-linux-x64.tar.gz') + provides('java@14', when='@14.0:14.999') provides('java@12', when='@12.0:12.999') provides('java@11', when='@11.0:11.999') provides('java@10', when='@10.0:10.999') diff --git a/var/spack/repos/builtin/packages/jhpcn-df/package.py b/var/spack/repos/builtin/packages/jhpcn-df/package.py new file mode 100644 index 00000000000..3bb3132844d --- /dev/null +++ b/var/spack/repos/builtin/packages/jhpcn-df/package.py @@ -0,0 +1,37 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class JhpcnDf(CMakePackage): + """ + Data compression library based on Jointed Hierarchical Precision + Compression Number - Data Format + + JHPCN-DF is a novel lossy compression algorithm taylored for floating + point dataset. The algorithm enhances the effect of employing standard + compression algorithms like deflate because this approach makes + the occurence rate of the same byte pattern in data stream higher owing + to truncating some lower bits of significand. + """ + + homepage = "http://avr-aics-riken.github.io/JHPCN-DF/" + url = "https://github.com/avr-aics-riken/JHPCN-DF/archive/1.1.0.tar.gz" + + version('1.1.0', sha256='106d99cc4faac5c76e51e8bfe3193c1d3dc91648072cf418d868ed830592b04b') + + variant('lz4', default=False, description='Enable lz4') + variant('fortran', default=False, description='Enable Fortran Interface') + + depends_on('zlib', type='link') + depends_on('lz4@:1.7', when='+lz4', type='link') + + def cmake_args(self): + args = [ + self.define_from_variant('with_Fortran_interface', 'fortran'), + self.define_from_variant('with_lz4', 'lz4') + ] + return args diff --git a/var/spack/repos/builtin/packages/jsoncpp/package.py b/var/spack/repos/builtin/packages/jsoncpp/package.py index 546ea104f22..2593c9ae0fe 100644 --- a/var/spack/repos/builtin/packages/jsoncpp/package.py +++ b/var/spack/repos/builtin/packages/jsoncpp/package.py @@ -15,6 +15,7 @@ class Jsoncpp(CMakePackage): homepage = "https://github.com/open-source-parsers/jsoncpp" url = "https://github.com/open-source-parsers/jsoncpp/archive/1.7.3.tar.gz" + version('1.9.2', sha256='77a402fb577b2e0e5d0bdc1cf9c65278915cdb25171e3452c68b6da8a561f8f0') version('1.9.1', sha256='c7b40f5605dd972108f503f031b20186f5e5bca2b65cd4b8bd6c3e4ba8126697') version('1.9.0', sha256='bdd3ba9ed1f110b3eb57474d9094e90ab239b93b4803b4f9b1722c281e85a4ac') version('1.8.4', sha256='c49deac9e0933bcb7044f08516861a2d560988540b23de2ac1ad443b219afdb6') @@ -42,6 +43,17 @@ class Jsoncpp(CMakePackage): depends_on('cmake@3.1:', type='build') depends_on('python', type='test') + # Ref: https://github.com/open-source-parsers/jsoncpp/pull/1023 + # Released in 1.9.2, patch does not apply cleanly across releases. + # May apply to more compilers in the future. + @when('@:1.9.1 %clang@10.0.0:') + def patch(self): + filter_file( + 'return d >= min && d <= max;', + 'return d >= static_cast(min) && ' + 'd <= static_cast(max);', + 'src/lib_json/json_value.cpp') + def cmake_args(self): args = ['-DBUILD_SHARED_LIBS=ON'] cxxstd = self.spec.variants['cxxstd'].value diff --git a/var/spack/repos/builtin/packages/julia/package.py b/var/spack/repos/builtin/packages/julia/package.py index ec86c04886f..cd401dd9ea7 100644 --- a/var/spack/repos/builtin/packages/julia/package.py +++ b/var/spack/repos/builtin/packages/julia/package.py @@ -18,6 +18,7 @@ class Julia(Package): maintainers = ['glennpj'] version('master', branch='master') + version('1.4.1', sha256='b21585db55673ac0668c163678fcf2aad11eb7c64bb2aa03a43046115fab1553') version('1.4.0', sha256='880c73a08296ce8d94ad9605149f2a2b2b028e7202a700ef725da899300b8be9') version('1.3.1', sha256='053908ec2706eb76cfdc998c077de123ecb1c60c945b4b5057aa3be19147b723') version('1.2.0', sha256='2419b268fc5c3666dd9aeb554815fe7cf9e0e7265bc9b94a43957c31a68d9184') @@ -46,7 +47,8 @@ class Julia(Package): # Python only needed to build LLVM? depends_on('python@2.7:2.8', type='build', when='@:1.1') depends_on('python@2.7:', type='build', when='@1.2:') - depends_on('cmake @2.8:', type='build', when='@1.0:') + depends_on('cmake@2.8:', type='build', when='@1.0:') + depends_on('cmake@:3.11', type='build', when='@:1.4') depends_on('git', type='build', when='@master') # Combined build-time and run-time dependencies: diff --git a/var/spack/repos/builtin/packages/kea/package.py b/var/spack/repos/builtin/packages/kea/package.py new file mode 100644 index 00000000000..acb6516f397 --- /dev/null +++ b/var/spack/repos/builtin/packages/kea/package.py @@ -0,0 +1,22 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Kea(AutotoolsPackage): + """Modern, open source DHCPv4 & DHCPv6 server.""" + + homepage = "https://www.isc.org/kea/" + url = "https://downloads.isc.org/isc/kea/1.6.2/kea-1.6.2.tar.gz" + + version('1.6.2', sha256='2af7336027143c3e98d8d1d44165b2c2cbb0252a92bd88f6dd4d2c6adb69d7b5') + + depends_on('m4', type='build') + depends_on('autoconf', type='build') + depends_on('automake', type='build') + depends_on('libtool', type='build') + depends_on('log4cplus') + depends_on('boost') diff --git a/var/spack/repos/builtin/packages/knem/package.py b/var/spack/repos/builtin/packages/knem/package.py new file mode 100644 index 00000000000..473a1783193 --- /dev/null +++ b/var/spack/repos/builtin/packages/knem/package.py @@ -0,0 +1,46 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Knem(AutotoolsPackage): + """KNEM is a Linux kernel module enabling high-performance intra-node MPI + communication for large messages.""" + + homepage = "http://knem.gforge.inria.fr" + url = "http://gforge.inria.fr/frs/download.php/37186/knem-1.1.3.tar.gz" + list_url = "http://knem.gforge.inria.fr/download" + + maintainers = ['skosukhin'] + + version('1.1.3', sha256='50d3c4a20c140108b8ce47aaafd0ade0927d6f507e1b5cc690dd6bddeef30f60') + + variant('hwloc', default=True, + description='Enable hwloc in the user-space tools') + + depends_on('hwloc', when='+hwloc') + depends_on('pkgconfig', type='build', when='+hwloc') + + # The support for hwloc was added in 0.9.1: + conflicts('+hwloc', when='@:0.9.0') + + # Ideally, we should list all non-Linux-based platforms here: + conflicts('platform=darwin') + + # All compilers except for gcc are in conflict: + for __compiler in spack.compilers.supported_compilers(): + if __compiler != 'gcc': + conflicts('%{0}'.format(__compiler), + msg='Linux kernel module must be compiled with gcc') + + @run_before('build') + def override_kernel_compiler(self): + # Override the compiler for kernel module source files. We need + # this additional argument for all installation phases. + make.add_default_arg('CC={0}'.format(spack_cc)) + + def configure_args(self): + return self.enable_or_disable('hwloc') diff --git a/var/spack/repos/builtin/packages/kokkos-kernels/makefile.patch b/var/spack/repos/builtin/packages/kokkos-kernels-legacy/makefile.patch similarity index 100% rename from var/spack/repos/builtin/packages/kokkos-kernels/makefile.patch rename to var/spack/repos/builtin/packages/kokkos-kernels-legacy/makefile.patch diff --git a/var/spack/repos/builtin/packages/kokkos-kernels-legacy/package.py b/var/spack/repos/builtin/packages/kokkos-kernels-legacy/package.py new file mode 100644 index 00000000000..8507f90bf4c --- /dev/null +++ b/var/spack/repos/builtin/packages/kokkos-kernels-legacy/package.py @@ -0,0 +1,50 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class KokkosKernelsLegacy(MakefilePackage): + """Kokkos C++ Performance Portability Programming EcoSystem: Math Kernels - + Provides BLAS, Sparse BLAS and Graph Kernels.""" + + homepage = "https://github.com/kokkos/kokkos-kernels" + url = "https://github.com/kokkos/kokkos-kernels/archive/2.7.00.tar.gz" + + version('2.7.00', sha256='adf4af44eadbdfbeb9ec69dd5fae4e2852bd1fbe4a69213efd199e49f4098254') + version('2.6.00', sha256='14ebf806f66b9ca73949a478b8d959be7fa1165a640935760a724d7cc0a66335') + version('2.5.00', sha256='2c2289da3a41dafd97726e90507debafbb9f5e49ca5b0f5c8d1e044a5796f000') + + # make sure kokkos kernels version matches kokkos + depends_on('kokkos-legacy@2.5.00', when='@2.5.00') + depends_on('kokkos-legacy@2.6.00', when='@2.6.00') + depends_on('kokkos-legacy@2.7.00', when='@2.7.00') + + patch('makefile.patch') + + def edit(self, spec, prefix): + makefile = FileFilter("src/Makefile") + makefile.filter('CXX = .*', 'CXX = ' + env['CXX']) + + def build(self, spec, prefix): + with working_dir('build', create=True): + makefile_path = '%s%s' % (self.stage.source_path, '/src/Makefile') + copy(makefile_path, 'Makefile') + make_args = [ + 'KOKKOSKERNELS_INSTALL_PATH=%s' % prefix, + 'KOKKOSKERNELS_PATH=%s' % self.stage.source_path, + 'KOKKOS_PATH=%s' % spec['kokkos'].prefix + ] + + make('build', *make_args) + + def install(self, spec, prefix): + with working_dir('build', create=False): + make_args = [ + 'KOKKOSKERNELS_INSTALL_PATH=%s' % prefix, + 'KOKKOSKERNELS_PATH=%s' % self.stage.source_path, + 'KOKKOS_PATH=%s' % spec['kokkos'].prefix + ] + make('install', *make_args) diff --git a/var/spack/repos/builtin/packages/kokkos-kernels/package.py b/var/spack/repos/builtin/packages/kokkos-kernels/package.py index 614fa54196a..1613c204b80 100644 --- a/var/spack/repos/builtin/packages/kokkos-kernels/package.py +++ b/var/spack/repos/builtin/packages/kokkos-kernels/package.py @@ -2,52 +2,140 @@ # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) - from spack import * -class KokkosKernels(MakefilePackage): - """Kokkos C++ Performance Portability Programming EcoSystem: Math Kernels - - Provides BLAS, Sparse BLAS and Graph Kernels.""" +class KokkosKernels(CMakePackage, CudaPackage): + """Kokkos Kernels provides math kernels, often BLAS or LAPACK + for small matrices, that can be used in larger Kokkos parallel routines""" homepage = "https://github.com/kokkos/kokkos-kernels" - url = "https://github.com/kokkos/kokkos-kernels/archive/2.7.00.tar.gz" + git = "https://github.com/kokkos/kokkos-kernels.git" + url = "https://github.com/kokkos/kokkos-kernels/archive/3.1.00.tar.gz" - version('2.7.00', sha256='adf4af44eadbdfbeb9ec69dd5fae4e2852bd1fbe4a69213efd199e49f4098254') - version('2.6.00', sha256='14ebf806f66b9ca73949a478b8d959be7fa1165a640935760a724d7cc0a66335') - version('2.5.00', sha256='2c2289da3a41dafd97726e90507debafbb9f5e49ca5b0f5c8d1e044a5796f000') - version('develop', git='https://github.com/kokkos/kokkos-kernels', - branch='develop') + version('develop', branch='develop') + version('master', branch='master') + version('3.1.00', sha256="27fea241ae92f41bd5b070b1a590ba3a56a06aca750207a98bea2f64a4a40c89") + version('3.0.00', sha256="e4b832aed3f8e785de24298f312af71217a26067aea2de51531e8c1e597ef0e6") - # make sure kokkos kernels version matches kokkos - depends_on('kokkos@2.5.00', when='@2.5.00') - depends_on('kokkos@2.6.00', when='@2.6.00') - depends_on('kokkos@2.7.00', when='@2.7.00') - depends_on('kokkos@develop', when='@develop') + depends_on("kokkos") + depends_on("kokkos@develop", when="@develop") + depends_on("cmake@3.10:", type='build') - patch('makefile.patch') + backends = { + 'serial': (False, "enable Serial backend (default)"), + 'cuda': (False, "enable Cuda backend"), + 'openmp': (False, "enable OpenMP backend"), + } - def edit(self, spec, prefix): - makefile = FileFilter("src/Makefile") - makefile.filter('CXX = .*', 'CXX = ' + env['CXX']) + for backend in backends: + deflt, descr = backends[backend] + variant(backend.lower(), default=deflt, description=descr) + depends_on("kokkos+%s" % backend.lower(), when="+%s" % backend.lower()) - def build(self, spec, prefix): - with working_dir('build', create=True): - makefile_path = '%s%s' % (self.stage.source_path, '/src/Makefile') - copy(makefile_path, 'Makefile') - make_args = [ - 'KOKKOSKERNELS_INSTALL_PATH=%s' % prefix, - 'KOKKOSKERNELS_PATH=%s' % self.stage.source_path, - 'KOKKOS_PATH=%s' % spec['kokkos'].prefix - ] + space_etis = { + "execspace_cuda": ('auto', "", "cuda"), + "execspace_openmp": ('auto', "", "openmp"), + "execspace_threads": ('auto', "", "pthread"), + "execspace_serial": ('auto', "", "serial"), + "memspace_cudauvmspace": ('auto', "", "cuda"), + "memspace_cudaspace": ('auto', "", "cuda"), + } + for eti in space_etis: + deflt, descr, backend_required = space_etis[eti] + variant(eti, default=deflt, description=descr) + depends_on("kokkos+%s" % backend_required, when="+%s" % eti) - make('build', *make_args) + numeric_etis = { + "ordinals": ("int", "ORDINAL_", # default, cmake name + ["int", "int64_t"]), # allowed values + "offsets": ("int,size_t", "OFFSET_", + ["int", "size_t"]), + "layouts": ("left", "LAYOUT", + ["left", "right"]), + "scalars": ("double", "", + ["float", "double", "complex_float", "complex_double"]) + } + for eti in numeric_etis: + deflt, cmake_name, vals = numeric_etis[eti] + variant(eti, default=deflt, values=vals, multi=True) - def install(self, spec, prefix): - with working_dir('build', create=False): - make_args = [ - 'KOKKOSKERNELS_INSTALL_PATH=%s' % prefix, - 'KOKKOSKERNELS_PATH=%s' % self.stage.source_path, - 'KOKKOS_PATH=%s' % spec['kokkos'].prefix - ] - make('install', *make_args) + tpls = { + # variant name #deflt #spack name #root var name #docstring + "blas": (False, "blas", "BLAS", "Link to system BLAS"), + "lapack": (False, "lapack", "LAPACK", "Link to system LAPACK"), + "mkl": (False, "mkl", "MKL", "Link to system MKL"), + "cublas": (False, "cuda", None, "Link to CUDA BLAS library"), + "cusparse": (False, "cuda", None, "Link to CUDA sparse library"), + "superlu": (False, "superlu", "SUPERLU", "Link to SuperLU library"), + "cblas": (False, "cblas", "CBLAS", "Link to CBLAS library"), + "lapacke": (False, "clapack", "LAPACKE", "Link to LAPACKE library"), + } + + for tpl in tpls: + deflt, spackname, rootname, descr = tpls[tpl] + variant(tpl, default=deflt, description=descr) + depends_on(spackname, when="+%s" % tpl) + + def cmake_args(self): + spec = self.spec + options = [] + + isdiy = "+diy" in spec + if isdiy: + options.append("-DSpack_WORKAROUND=On") + + options.append("-DKokkos_ROOT=%s" % spec["kokkos"].prefix) + # Compiler weirdness due to nvcc_wrapper + options.append("-DCMAKE_CXX_COMPILER=%s" % spec["kokkos"].kokkos_cxx) + + if self.run_tests: + options.append("-DKokkosKernels_ENABLE_TESTS=ON") + + for tpl in self.tpls: + on_flag = "+%s" % tpl + off_flag = "~%s" % tpl + dflt, spackname, rootname, descr = self.tpls[tpl] + if on_flag in self.spec: + options.append("-DKokkosKernels_ENABLE_TPL_%s=ON" % + tpl.upper()) + if rootname: + options.append("-D%s_ROOT=%s" % + (rootname, spec[spackname].prefix)) + else: + pass # this should get picked up automatically, we hope + elif off_flag in self.spec: + options.append( + "-DKokkosKernels_ENABLE_TPL_%s=OFF" % tpl.upper()) + + for eti in self.numeric_etis: + deflt, cmake_name, vals = self.numeric_etis[eti] + for val in vals: + keyval = "%s=%s" % (eti, val) + cmake_option = "KokkosKernels_INST_%s%s" % ( + cmake_name.upper(), val.upper()) + if keyval in spec: + options.append("-D%s=ON" % cmake_option) + else: + options.append("-D%s=OFF" % cmake_option) + + for eti in self.space_etis: + deflt, descr, _ = self.space_etis[eti] + if deflt == "auto": + value = spec.variants[eti].value + # spack does these as strings, not reg booleans + if str(value) == "True": + options.append("-DKokkosKernels_INST_%s=ON" % eti.upper()) + elif str(value) == "False": + options.append("-DKokkosKernels_INST_%s=OFF" % eti.upper()) + else: + pass # don't pass anything, let CMake decide + else: # simple option + on_flag = "+%s" % eti + off_flag = "~%s" % eti + if on_flag in self.spec: + options.append("-DKokkosKernels_INST_%s=ON" % eti.upper()) + elif off_flag in self.spec: + options.append("-DKokkosKernels_INST_%s=OFF" % eti.upper()) + + return options diff --git a/var/spack/repos/builtin/packages/kokkos-legacy/package.py b/var/spack/repos/builtin/packages/kokkos-legacy/package.py new file mode 100644 index 00000000000..2adfa0c9b5b --- /dev/null +++ b/var/spack/repos/builtin/packages/kokkos-legacy/package.py @@ -0,0 +1,229 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class KokkosLegacy(Package): + """Kokkos implements a programming model in C++ for writing performance + portable applications targeting all major HPC platforms.""" + + homepage = "https://github.com/kokkos/kokkos" + url = "https://github.com/kokkos/kokkos/archive/2.03.00.tar.gz" + git = "https://github.com/kokkos/kokkos.git" + + version('2.9.00', sha256='e0621197791ed3a381b4f02c78fa529f3cff3abb74d52157b4add17e8aa04bc4') + version('2.8.00', sha256='1c72661f2d770517bff98837001b42b9c677d1df29f7493a1d7c008549aff630') + version('2.7.24', sha256='a308a80ea1488f4c18884b828ce7ae9f5210b9a6b2f61b208d875084d8da8cb0') + version('2.7.00', sha256='01595996e612ef7410aa42fa265a23101cfe1b6993fa9810ca844db5c89ad765') + version('2.5.00', sha256='ea232594bf746abb99ae2aafaeef5d07adc089968010a62a88aaa892106d9476') + version('2.04.11', sha256='f2680aee0169f6cbbec38410f9c80bf8a160435f6a07769c1e9112da8b9349a0') + version('2.04.04', sha256='5bac8ddc2fac9bc6e01dd40f92ca6cbbb346a25deca5be2fec71acf712d0d0c7') + version('2.04.00', sha256='b04658d368986df207662a7a37c1ad974c321447bc2c2b5b696d7e9ee4481f34') + version('2.03.13', sha256='002748bdd0319d5ab82606cf92dc210fc1c05d0607a2e1d5538f60512b029056') + version('2.03.05', sha256='b18ddaa1496130ff3f675ea9ddbc6df9cdf378d53edf96df89e70ff189e10e1d') + version('2.03.00', sha256='722bea558d8986efee765ac912febb3c1ce289a8e9bdfef77cd0145df0ea8a3d') + version('2.02.15', sha256='6b4a7f189f0341f378f950f3c798f520d2e473b13435b137ff3b666e799a076d') + version('2.02.07', sha256='7b4ac81021d6868f4eb8e2a1cb92ba76bad9c3f197403b8b1eac0f11c983247c') + + variant('debug', default=False, description="Build debug version of Kokkos") + + variant('serial', default=True, description="enable Serial backend (default)") + variant('pthreads', default=False, description="enable Pthreads backend") + variant('qthreads', default=False, description="enable Qthreads backend") + variant('cuda', default=False, description="enable Cuda backend") + variant('openmp', default=False, description="enable OpenMP backend") + + # Compilation options + variant('pic', default=False, + description="enable position independent code (-fPIC flag)") + + # Kokkos options + variant('aggressive_vectorization', default=False, + description="set aggressive_vectorization Kokkos option") + variant('disable_profiling', default=False, + description="set disable_profiling Kokkos option") + variant('disable_dualview_modify_check', default=False, + description="set disable_dualview_modify_check Kokkos option") + variant('enable_profile_load_print', default=False, + description="set enable_profile_load_print Kokkos option") + variant('compiler_warnings', default=False, + description="set compiler_warnings Kokkos option") + variant('disable_deprecated_code', default=False, + description="set disable_deprecated_code Kokkos option") + variant('enable_eti', default=False, + description="set enable_eti Kokkos option") + + # CUDA options + variant('force_uvm', default=False, + description="set force_uvm Kokkos CUDA option") + variant('use_ldg', default=False, + description="set use_ldg Kokkos CUDA option") + variant('rdc', default=False, + description="set rdc Kokkos CUDA option") + variant('enable_lambda', default=False, + description="set enable_lambda Kokkos CUDA option") + + host_values = ('AMDAVX', 'ARMv80', 'ARMv81', 'ARMv8-ThunderX', + 'Power7', 'Power8', 'Power9', + 'WSM', 'SNB', 'HSW', 'BDW', 'SKX', + 'KNC', 'KNL') + + gpu_values = ('Kepler30', 'Kepler32', 'Kepler35', 'Kepler37', + 'Maxwell50', 'Maxwell52', 'Maxwell53', + 'Pascal60', 'Pascal61', + 'Volta70', 'Volta72') + + # C++ standard variant + variant('cxxstd', default='none', + values=('c++11', 'c++14', 'c++17', 'c++1y', 'c++1z', 'c++2a'), + multi=False, + description='set cxxstandard Kokkos option') + + # Host architecture variant + variant( + 'host_arch', + default='none', + values=host_values, + description='Set the host architecture to use' + ) + + # GPU architecture variant + variant( + 'gpu_arch', + default='none', + values=gpu_values, + description='Set the GPU architecture to use' + ) + + # Checks on Kokkos version and Kokkos options + conflicts('+aggressive_vectorization', when='@:2.0.99',) + conflicts('+disable_profiling', when='@:2.0.99',) + conflicts('+disable_dualview_modify_check', when='@:2.03.04',) + conflicts('+enable_profile_load_print', when='@:2.03.04',) + conflicts('+compiler_warnings', when='@:2.03.14',) + conflicts('+disable_deprecated_code', when='@:2.5.99',) + conflicts('+enable_eti', when='@:2.6.99',) + + # Check that we haven't specified a gpu architecture + # without specifying CUDA + for p in gpu_values: + conflicts('gpu_arch={0}'.format(p), when='~cuda', + msg='Must specify CUDA backend to use a GPU architecture.') + + # Check that we haven't specified a Kokkos CUDA option + # without specifying CUDA + conflicts('+force_uvm', when='~cuda', + msg='Must enable CUDA to use force_uvm.') + conflicts('+use_ldg', when='~cuda', + msg='Must enable CUDA to use use_ldg.') + conflicts('+rdc', when='~cuda', + msg='Must enable CUDA to use rdc.') + conflicts('+enable_lambda', when='~cuda', + msg='Must enable CUDA to use enable_lambda.') + + # Check that we haven't asked for a GPU architecture that + # the revision of kokkos does not support + conflicts('gpu_arch=Volta70', when='@:2.5.99') + conflicts('gpu_arch=Volta72', when='@:2.5.99') + + # conflicts on kokkos version and cuda enabled + # see kokkos issue #1296 + # https://github.com/kokkos/kokkos/issues/1296 + conflicts('+cuda', when='@2.5.00:2.7.00', + msg='Kokkos build system has issue (#1296) when CUDA enabled' + ' in version 2.5.00 through 2.7.00.') + + # Specify that v1.x is required as v2.x has API changes + depends_on('hwloc@:1') + depends_on('qthreads', when='+qthreads') + depends_on('cuda', when='+cuda') + + # generate_makefile.bash calls cmake + depends_on('cmake@3.10:', type='build') + + def install(self, spec, prefix): + generate = which(join_path(self.stage.source_path, + 'generate_makefile.bash')) + with working_dir('build', create=True): + g_args = [ + '--prefix=%s' % prefix, + '--with-hwloc=%s' % spec['hwloc'].prefix, + ] + arch_args = [] + kokkos_options_args = [] + cuda_options_args = [] + + # PIC + if '+pic' in spec: + g_args.append('--cxxflags=-fPIC') + + # C++ standard + cxxstandard = spec.variants['cxxstd'].value + if cxxstandard != 'none': + g_args.append('--cxxstandard=%s' % cxxstandard) + + # Build Debug + if '+debug' in spec: + g_args.append('--debug') + + # Backends + if '+serial' in spec: + g_args.append('--with-serial') + if '+openmp' in spec: + g_args.append('--with-openmp') + if '+pthreads' in spec: + g_args.append('--with-pthread') + if '+qthreads' in spec: + g_args.append('--with-qthreads=%s' % spec['qthreads'].prefix) + if '+cuda' in spec: + g_args.append('--with-cuda=%s' % spec['cuda'].prefix) + # Host architectures + host_arch = spec.variants['host_arch'].value + # GPU architectures + gpu_arch = spec.variants['gpu_arch'].value + if host_arch != 'none': + arch_args.append(host_arch) + if gpu_arch != 'none': + arch_args.append(gpu_arch) + # Combined architecture flags + if arch_args: + g_args.append('--arch={0}'.format(','.join(arch_args))) + + # CUDA options + if '+force_uvm' in spec: + cuda_options_args.append('force_uvm') + if '+use_ldg' in spec: + cuda_options_args.append('use_ldg') + if '+rdc' in spec: + cuda_options_args.append('rdc') + if '+enable_lambda' in spec: + cuda_options_args.append('enable_lambda') + if cuda_options_args: + g_args.append('--with-cuda-options={0}' + .format(','.join(cuda_options_args))) + + # Kokkos options + if '+aggressive_vectorization' in spec: + kokkos_options_args.append('aggressive_vectorization') + if '+disable_profiling' in spec: + kokkos_options_args.append('disable_profiling') + if '+disable_dualview_modify_check' in spec: + kokkos_options_args.append('disable_dualview_modify_check') + if '+enable_profile_load_print' in spec: + kokkos_options_args.append('enable_profile_load_print') + if '+compiler_warnings' in spec: + kokkos_options_args.append('compiler_warnings') + if '+disable_deprecated_code' in spec: + kokkos_options_args.append('disable_deprecated_code') + if '+enable_eti' in spec: + kokkos_options_args.append('enable_eti') + if kokkos_options_args: + g_args.append('--with-options={0}' + .format(','.join(kokkos_options_args))) + + generate(*g_args) + make() + make('install') diff --git a/var/spack/repos/builtin/packages/kokkos-nvcc-wrapper/package.py b/var/spack/repos/builtin/packages/kokkos-nvcc-wrapper/package.py new file mode 100644 index 00000000000..8bb09f2759f --- /dev/null +++ b/var/spack/repos/builtin/packages/kokkos-nvcc-wrapper/package.py @@ -0,0 +1,43 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +from spack import * +import os + + +class KokkosNvccWrapper(CMakePackage): + """The NVCC wrapper provides a wrapper around NVCC to make it a + 'full' C++ compiler that accepts all flags""" + + homepage = "https://github.com/kokkos/kokkos" + git = "https://github.com/kokkos/nvcc_wrapper.git" + + version('3.1', url='https://github.com/kokkos/nvcc_wrapper/archive/3.1.tar.gz', + sha256="5171530763bb2845aa70651f3a8e5dc95b6eed76379323fde3205f97ed28ec8c", + default=True) + version('master', branch='master') + + variant("mpi", default=True, + description="use with MPI as the underlying compiler") + depends_on("cuda") + depends_on("mpi", when="+mpi") + depends_on("cmake@3.10:", type='build') + + def cmake_args(self): + options = [ + "-DCMAKE_CXX_COMPILER=%s" % os.environ["SPACK_CXX"], + "-DCMAKE_CUDA_HOST_COMPILER=%s" % os.environ["SPACK_CXX"], + "-DCMAKE_C_COMPILER=%s" % os.environ["SPACK_CC"], + ] + return options + + def setup_dependent_build_environment(self, env, dependent_spec): + wrapper = join_path(self.prefix.bin, "nvcc_wrapper") + env.set('MPICH_CXX', wrapper) + env.set('OMPI_CXX', wrapper) + env.set('KOKKOS_CXX', spack_cxx) + + def setup_dependent_package(self, module, dependent_spec): + wrapper = join_path(self.prefix.bin, "nvcc_wrapper") + self.spec.kokkos_cxx = wrapper diff --git a/var/spack/repos/builtin/packages/kokkos/package.py b/var/spack/repos/builtin/packages/kokkos/package.py index 4b8e02e4955..b5277e40a8e 100644 --- a/var/spack/repos/builtin/packages/kokkos/package.py +++ b/var/spack/repos/builtin/packages/kokkos/package.py @@ -2,226 +2,252 @@ # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) - from spack import * -class Kokkos(Package): +class Kokkos(CMakePackage, CudaPackage): """Kokkos implements a programming model in C++ for writing performance portable applications targeting all major HPC platforms.""" homepage = "https://github.com/kokkos/kokkos" - url = "https://github.com/kokkos/kokkos/archive/2.03.00.tar.gz" - git = "https://github.com/kokkos/kokkos.git" + git = "https://github.com/kokkos/kokkos.git" + url = "https://github.com/kokkos/kokkos/archive/3.1.01.tar.gz" version('develop', branch='develop') - version('2.9.00', sha256='e0621197791ed3a381b4f02c78fa529f3cff3abb74d52157b4add17e8aa04bc4') - version('2.8.00', sha256='1c72661f2d770517bff98837001b42b9c677d1df29f7493a1d7c008549aff630') - version('2.7.24', sha256='a308a80ea1488f4c18884b828ce7ae9f5210b9a6b2f61b208d875084d8da8cb0') - version('2.7.00', sha256='01595996e612ef7410aa42fa265a23101cfe1b6993fa9810ca844db5c89ad765') - version('2.5.00', sha256='ea232594bf746abb99ae2aafaeef5d07adc089968010a62a88aaa892106d9476') - version('2.04.11', sha256='f2680aee0169f6cbbec38410f9c80bf8a160435f6a07769c1e9112da8b9349a0') - version('2.04.04', sha256='5bac8ddc2fac9bc6e01dd40f92ca6cbbb346a25deca5be2fec71acf712d0d0c7') - version('2.04.00', sha256='b04658d368986df207662a7a37c1ad974c321447bc2c2b5b696d7e9ee4481f34') - version('2.03.13', sha256='002748bdd0319d5ab82606cf92dc210fc1c05d0607a2e1d5538f60512b029056') - version('2.03.05', sha256='b18ddaa1496130ff3f675ea9ddbc6df9cdf378d53edf96df89e70ff189e10e1d') - version('2.03.00', sha256='722bea558d8986efee765ac912febb3c1ce289a8e9bdfef77cd0145df0ea8a3d') - version('2.02.15', sha256='6b4a7f189f0341f378f950f3c798f520d2e473b13435b137ff3b666e799a076d') - version('2.02.07', sha256='7b4ac81021d6868f4eb8e2a1cb92ba76bad9c3f197403b8b1eac0f11c983247c') + version('master', branch='master') + version('3.1.01', sha256='ff5024ebe8570887d00246e2793667e0d796b08c77a8227fe271127d36eec9dd') + version('3.1.00', sha256="b935c9b780e7330bcb80809992caa2b66fd387e3a1c261c955d622dae857d878") + version('3.0.00', sha256="c00613d0194a4fbd0726719bbed8b0404ed06275f310189b3493f5739042a92b") - variant('debug', default=False, description="Build debug version of Kokkos") + depends_on("cmake@3.10:", type='build') - variant('serial', default=True, description="enable Serial backend (default)") - variant('pthreads', default=False, description="enable Pthreads backend") - variant('qthreads', default=False, description="enable Qthreads backend") - variant('cuda', default=False, description="enable Cuda backend") - variant('openmp', default=False, description="enable OpenMP backend") + devices_variants = { + 'cuda': [False, 'Whether to build CUDA backend'], + 'openmp': [False, 'Whether to build OpenMP backend'], + 'pthread': [False, 'Whether to build Pthread backend'], + 'serial': [True, 'Whether to build serial backend'], + 'hip': [False, 'Whether to build HIP backend'], + } + conflicts("+hip", when="@:3.0") - # Compilation options - variant('pic', default=False, - description="enable position independent code (-fPIC flag)") + tpls_variants = { + 'hpx': [False, 'Whether to enable the HPX library'], + 'hwloc': [False, 'Whether to enable the HWLOC library'], + 'numactl': [False, 'Whether to enable the LIBNUMA library'], + 'memkind': [False, 'Whether to enable the MEMKIND library'], + } - # Kokkos options - variant('aggressive_vectorization', default=False, - description="set aggressive_vectorization Kokkos option") - variant('disable_profiling', default=False, - description="set disable_profiling Kokkos option") - variant('disable_dualview_modify_check', default=False, - description="set disable_dualview_modify_check Kokkos option") - variant('enable_profile_load_print', default=False, - description="set enable_profile_load_print Kokkos option") - variant('compiler_warnings', default=False, - description="set compiler_warnings Kokkos option") - variant('disable_deprecated_code', default=False, - description="set disable_deprecated_code Kokkos option") - variant('enable_eti', default=False, - description="set enable_eti Kokkos option") + options_variants = { + 'aggressive_vectorization': [False, + 'Aggressively vectorize loops'], + 'compiler_warnings': [False, + 'Print all compiler warnings'], + 'cuda_lambda': [False, + 'Activate experimental lambda features'], + 'cuda_ldg_intrinsic': [False, + 'Use CUDA LDG intrinsics'], + 'cuda_relocatable_device_code': [False, + 'Enable RDC for CUDA'], + 'cuda_uvm': [False, + 'Enable unified virtual memory (UVM) for CUDA'], + 'debug': [False, + 'Activate extra debug features - may increase compiletimes'], + 'debug_bounds_check': [False, + 'Use bounds checking - will increase runtime'], + 'debug_dualview_modify_check': [False, 'Debug check on dual views'], + 'deprecated_code': [False, 'Whether to enable deprecated code'], + 'examples': [False, 'Whether to build OpenMP backend'], + 'explicit_instantiation': [False, + 'Explicitly instantiate template types'], + 'hpx_async_dispatch': [False, + 'Whether HPX supports asynchronous dispath'], + 'profiling': [True, + 'Create bindings for profiling tools'], + 'profiling_load_print': [False, + 'Print which profiling tools got loaded'], + 'qthread': [False, 'Eenable the QTHREAD library'], + 'tests': [False, 'Build for tests'], + } - # CUDA options - variant('force_uvm', default=False, - description="set force_uvm Kokkos CUDA option") - variant('use_ldg', default=False, - description="set use_ldg Kokkos CUDA option") - variant('rdc', default=False, - description="set rdc Kokkos CUDA option") - variant('enable_lambda', default=False, - description="set enable_lambda Kokkos CUDA option") + amd_gpu_arches = [ + 'fiji', + 'gfx901', + 'vega900', + 'vega906', + ] + variant("amd_gpu_arch", default='none', values=amd_gpu_arches, + description="AMD GPU architecture") + conflicts("+hip", when="amd_gpu_arch=none") - host_values = ('AMDAVX', 'ARMv80', 'ARMv81', 'ARMv8-ThunderX', - 'Power7', 'Power8', 'Power9', - 'WSM', 'SNB', 'HSW', 'BDW', 'SKX', - 'KNC', 'KNL') + spack_micro_arch_map = { + "aarch64": "", + "arm": "", + "ppc": "", + "ppc64": "", + "ppc64le": "", + "ppcle": "", + "sparc": None, + "sparc64": None, + "x86": "", + "x86_64": "", + "thunderx2": "THUNDERX2", + "k10": None, + "zen": "ZEN", + "bulldozer": "", + "piledriver": "", + "zen2": "ZEN2", + "steamroller": "KAVERI", + "excavator": "CARIZO", + "a64fx": "", + "power7": "POWER7", + "power8": "POWER8", + "power9": "POWER9", + "power8le": "POWER8", + "power9le": "POWER9", + "i686": None, + "pentium2": None, + "pentium3": None, + "pentium4": None, + "prescott": None, + "nocona": None, + "nehalem": None, + "sandybridge": "SNB", + "haswell": "HSW", + "mic_knl": "KNL", + "cannonlake": "SKX", + "cascadelake": "SKX", + "westmere": "WSM", + "core2": None, + "ivybridge": "SNB", + "broadwell": "BDW", + # @AndrewGaspar: Kokkos does not have an arch for plain-skylake - only + # for Skylake-X (i.e. Xeon). For now, I'm mapping this to Broadwell + # until Kokkos learns to optimize for SkyLake without the AVX-512 + # extensions. SkyLake with AVX-512 will still be optimized using the + # separate `skylake_avx512` arch. + "skylake": "BDW", + "icelake": "SKX", + "skylake_avx512": "SKX", + } - gpu_values = ('Kepler30', 'Kepler32', 'Kepler35', 'Kepler37', - 'Maxwell50', 'Maxwell52', 'Maxwell53', - 'Pascal60', 'Pascal61', - 'Volta70', 'Volta72') + spack_cuda_arch_map = { + "30": 'kepler30', + "32": 'kepler32', + "35": 'kepler35', + "37": 'kepler37', + "50": 'maxwell50', + "52": 'maxwell52', + "53": 'maxwell53', + "60": 'pascal60', + "61": 'pascal61', + "70": 'volta70', + "72": 'volta72', + "75": 'turing75', + } + cuda_arches = spack_cuda_arch_map.values() + conflicts("+cuda", when="cuda_arch=none") - # C++ standard variant - variant('cxxstd', default='none', - values=('c++11', 'c++14', 'c++17', 'c++1y', 'c++1z', 'c++2a'), - multi=False, - description='set cxxstandard Kokkos option') + devices_values = list(devices_variants.keys()) + for dev in devices_variants: + dflt, desc = devices_variants[dev] + variant(dev, default=dflt, description=desc) - # Host architecture variant - variant( - 'host_arch', - default='none', - values=host_values, - description='Set the host architecture to use' - ) + options_values = list(options_variants.keys()) + for opt in options_values: + if "cuda" in opt: + conflicts('+%s' % opt, when="~cuda", + msg="Must enable CUDA to use %s" % opt) + dflt, desc = options_variants[opt] + variant(opt, default=dflt, description=desc) - # GPU architecture variant - variant( - 'gpu_arch', - default='none', - values=gpu_values, - description='Set the GPU architecture to use' - ) + tpls_values = list(tpls_variants.keys()) + for tpl in tpls_values: + dflt, desc = tpls_variants[tpl] + variant(tpl, default=dflt, description=desc) + depends_on(tpl, when="+%s" % tpl) - # Checks on Kokkos version and Kokkos options - conflicts('+aggressive_vectorization', when='@:2.0.99',) - conflicts('+disable_profiling', when='@:2.0.99',) - conflicts('+disable_dualview_modify_check', when='@:2.03.04',) - conflicts('+enable_profile_load_print', when='@:2.03.04',) - conflicts('+compiler_warnings', when='@:2.03.14',) - conflicts('+disable_deprecated_code', when='@:2.5.99',) - conflicts('+enable_eti', when='@:2.6.99',) + variant("wrapper", default=False, + description="Use nvcc-wrapper for CUDA build") + depends_on("kokkos-nvcc-wrapper", when="+wrapper") + conflicts("+wrapper", when="~cuda") - # Check that we haven't specified a gpu architecture - # without specifying CUDA - for p in gpu_values: - conflicts('gpu_arch={0}'.format(p), when='~cuda', - msg='Must specify CUDA backend to use a GPU architecture.') + variant("std", default="11", values=["11", "14", "17", "20"], multi=False) + # nvcc does not currently work with C++17 or C++20 + conflicts("+cuda", when="std=17") + conflicts("+cuda", when="std=20") - # Check that we haven't specified a Kokkos CUDA option - # without specifying CUDA - conflicts('+force_uvm', when='~cuda', - msg='Must enable CUDA to use force_uvm.') - conflicts('+use_ldg', when='~cuda', - msg='Must enable CUDA to use use_ldg.') - conflicts('+rdc', when='~cuda', - msg='Must enable CUDA to use rdc.') - conflicts('+enable_lambda', when='~cuda', - msg='Must enable CUDA to use enable_lambda.') + variant('shared', default=True, description='Build shared libraries') - # Check that we haven't asked for a GPU architecture that - # the revision of kokkos does not support - conflicts('gpu_arch=Volta70', when='@:2.5.99') - conflicts('gpu_arch=Volta72', when='@:2.5.99') + def append_args(self, cmake_prefix, cmake_options, spack_options): + for opt in cmake_options: + enablestr = "+%s" % opt + optuc = opt.upper() + optname = "Kokkos_%s_%s" % (cmake_prefix, optuc) + option = None + if enablestr in self.spec: + option = "-D%s=ON" % optname + else: + # explicitly turn off if not enabled + # this avoids any confusing implicit defaults + # that come from the CMake + option = "-D%s=OFF" % optname + if option not in spack_options: + spack_options.append(option) - # conflicts on kokkos version and cuda enabled - # see kokkos issue #1296 - # https://github.com/kokkos/kokkos/issues/1296 - conflicts('+cuda', when='@2.5.00:2.7.00', - msg='Kokkos build system has issue (#1296) when CUDA enabled' - ' in version 2.5.00 through 2.7.00.') + def setup_dependent_package(self, module, dependent_spec): + try: + self.spec.kokkos_cxx = self.spec["kokkos-nvcc-wrapper"].kokkos_cxx + except Exception: + self.spec.kokkos_cxx = spack_cxx - # Specify that v1.x is required as v2.x has API changes - depends_on('hwloc@:1') - depends_on('qthreads', when='+qthreads') - depends_on('cuda', when='+cuda') + def cmake_args(self): + spec = self.spec + options = [] - def install(self, spec, prefix): - generate = which(join_path(self.stage.source_path, - 'generate_makefile.bash')) - with working_dir('build', create=True): - g_args = [ - '--prefix=%s' % prefix, - '--with-hwloc=%s' % spec['hwloc'].prefix, - ] - arch_args = [] - kokkos_options_args = [] - cuda_options_args = [] + isdiy = "+diy" in spec + if isdiy: + options.append("-DSpack_WORKAROUND=On") - # PIC - if '+pic' in spec: - g_args.append('--cxxflags=-fPIC') + spack_microarches = [] + if "+cuda" in spec: + # this is a list + for cuda_arch in spec.variants["cuda_arch"].value: + if not cuda_arch == "none": + kokkos_arch_name = self.spack_cuda_arch_map[cuda_arch] + spack_microarches.append(kokkos_arch_name) + kokkos_microarch_name = self.spack_micro_arch_map[spec.target.name] + if kokkos_microarch_name: + spack_microarches.append(kokkos_microarch_name) - # C++ standard - cxxstandard = spec.variants['cxxstd'].value - if cxxstandard != 'none': - g_args.append('--cxxstandard=%s' % cxxstandard) + for arch in self.amd_gpu_arches: + keyval = "amd_gpu_arch=%s" % arch + if keyval in spec: + spack_microarches.append(arch) - # Build Debug - if '+debug' in spec: - g_args.append('--debug') + for arch in spack_microarches: + options.append("-DKokkos_ARCH_%s=ON" % arch.upper()) - # Backends - if '+serial' in spec: - g_args.append('--with-serial') - if '+openmp' in spec: - g_args.append('--with-openmp') - if '+pthreads' in spec: - g_args.append('--with-pthread') - if '+qthreads' in spec: - g_args.append('--with-qthreads=%s' % spec['qthreads'].prefix) - if '+cuda' in spec: - g_args.append('--with-cuda=%s' % spec['cuda'].prefix) - # Host architectures - host_arch = spec.variants['host_arch'].value - # GPU architectures - gpu_arch = spec.variants['gpu_arch'].value - if host_arch != 'none': - arch_args.append(host_arch) - if gpu_arch != 'none': - arch_args.append(gpu_arch) - # Combined architecture flags - if arch_args: - g_args.append('--arch={0}'.format(','.join(arch_args))) + self.append_args("ENABLE", self.devices_values, options) + self.append_args("ENABLE", self.options_values, options) + self.append_args("ENABLE", self.tpls_values, options) - # CUDA options - if '+force_uvm' in spec: - cuda_options_args.append('force_uvm') - if '+use_ldg' in spec: - cuda_options_args.append('use_ldg') - if '+rdc' in spec: - cuda_options_args.append('rdc') - if '+enable_lambda' in spec: - cuda_options_args.append('enable_lambda') - if cuda_options_args: - g_args.append('--with-cuda-options={0}' - .format(','.join(cuda_options_args))) + for tpl in self.tpls_values: + var = "+%s" % tpl + if var in self.spec: + options.append("-D%s_DIR=%s" % (tpl, spec[tpl].prefix)) - # Kokkos options - if '+aggressive_vectorization' in spec: - kokkos_options_args.append('aggressive_vectorization') - if '+disable_profiling' in spec: - kokkos_options_args.append('disable_profiling') - if '+disable_dualview_modify_check' in spec: - kokkos_options_args.append('disable_dualview_modify_check') - if '+enable_profile_load_print' in spec: - kokkos_options_args.append('enable_profile_load_print') - if '+compiler_warnings' in spec: - kokkos_options_args.append('compiler_warnings') - if '+disable_deprecated_code' in spec: - kokkos_options_args.append('disable_deprecated_code') - if '+enable_eti' in spec: - kokkos_options_args.append('enable_eti') - if kokkos_options_args: - g_args.append('--with-options={0}' - .format(','.join(kokkos_options_args))) + # we do not need the compiler wrapper from Spack + # set the compiler explicitly (may be Spack wrapper or nvcc-wrapper) + try: + options.append("-DCMAKE_CXX_COMPILER=%s" % + self.spec["kokkos-nvcc-wrapper"].kokkos_cxx) + except Exception: + options.append("-DCMAKE_CXX_COMPILER=%s" % spack_cxx) - generate(*g_args) - make() - make('install') + # Set the C++ standard to use + options.append("-DKokkos_CXX_STANDARD=%s" % + self.spec.variants["std"].value) + + options.append('-DBUILD_SHARED_LIBS=%s' % ('+shared' in self.spec)) + + return options diff --git a/var/spack/repos/builtin/packages/kubernetes/package.py b/var/spack/repos/builtin/packages/kubernetes/package.py new file mode 100644 index 00000000000..5af5f6e67db --- /dev/null +++ b/var/spack/repos/builtin/packages/kubernetes/package.py @@ -0,0 +1,25 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Kubernetes(Package): + """Kubernetes is an open source system for managing containerized + applications across multiple hosts. It provides basic mechanisms + for deployment, maintenance, and scaling of applications.""" + + homepage = "https://kubernetes.io" + url = "https://github.com/kubernetes/kubernetes/archive/v1.19.0-alpha.0.tar.gz" + + version('1.18.1', sha256='33ca738f1f4e6ad453b80f231f71e62470b822f21d44dc5b8121b2964ae8e6f8') + version('1.18.0', sha256='6bd252b8b5401ad6f1fb34116cd5df59153beced3881b98464862a81c083f7ab') + version('1.17.4', sha256='b61a6eb3bd5251884f34853cc51aa31c6680e7e476268fe06eb33f3d95294f62') + + depends_on('go', type='build') + + def install(self, spec, prefix): + make() + install_tree('_output/bin', prefix.bin) diff --git a/var/spack/repos/builtin/packages/lammps/package.py b/var/spack/repos/builtin/packages/lammps/package.py index 9a6ef2e2b4d..b578016311a 100644 --- a/var/spack/repos/builtin/packages/lammps/package.py +++ b/var/spack/repos/builtin/packages/lammps/package.py @@ -21,6 +21,7 @@ class Lammps(CMakePackage, CudaPackage): tags = ['ecp', 'ecp-apps'] version('master', branch='master') + version('20200505', sha256='c49d77fd602d28ebd8cf10f7359b9fc4d14668c72039028ed7792453d416de73') version('20200303', sha256='9aa56dfb8673a06e6c88588505ec1dfc01dd94f9d60e719ed0c605e48cc06c58') version('20200227', sha256='1aabcf38bc72285797c710b648e906151a912c36b634a9c88ac383aacf85516e') version('20200218', sha256='73bcf146660804ced954f6a0a8dce937482677778d46018ca5a688127bf97211') @@ -55,12 +56,13 @@ def url_for_version(self, version): vdate.strftime("%d%b%Y").lstrip('0')) supported_packages = ['asphere', 'body', 'class2', 'colloid', 'compress', - 'coreshell', 'dipole', 'granular', 'kspace', 'latte', - 'manybody', 'mc', 'meam', 'misc', 'molecule', - 'mpiio', 'peri', 'poems', 'python', 'qeq', - 'replica', 'rigid', 'shock', 'snap', 'spin', 'srd', - 'user-atc', 'user-h5md', 'user-lb', 'user-misc', - 'user-netcdf', 'user-omp', 'user-reaxc', 'voronoi'] + 'coreshell', 'dipole', 'granular', 'kspace', + 'kokkos', 'latte', 'manybody', 'mc', 'meam', 'misc', + 'molecule', 'mpiio', 'peri', 'poems', 'python', + 'qeq', 'replica', 'rigid', 'shock', 'snap', 'spin', + 'srd', 'user-atc', 'user-h5md', 'user-lb', + 'user-misc', 'user-netcdf', 'user-omp', 'user-reaxc', + 'voronoi'] for pkg in supported_packages: variant(pkg, default=False, @@ -69,8 +71,6 @@ def url_for_version(self, version): description='Build the liblammps in addition to the executable') variant('mpi', default=True, description='Build with mpi') - variant('kokkos', default=False, - description='Build with Kokkos accelerated styles') variant('jpeg', default=True, description='Build with jpeg support') variant('png', default=True, @@ -106,7 +106,8 @@ def url_for_version(self, version): depends_on('jpeg', when='+jpeg') depends_on('libpng', when='+png') depends_on('ffmpeg', when='+ffmpeg') - depends_on('kokkos', when='+kokkos') + depends_on('kokkos+deprecated_code+shared@3.0', when='@20200303+kokkos') + depends_on('kokkos+shared@3.1:', when='@20200505:+kokkos') conflicts('+cuda', when='+opencl') conflicts('+body', when='+poems@:20180628') @@ -117,7 +118,8 @@ def url_for_version(self, version): conflicts('+user-misc', when='~manybody') conflicts('+user-phonon', when='~kspace') conflicts('+user-misc', when='~manybody') - conflicts('%gcc@9:', when='+openmp') + conflicts('%gcc@9:', when='@:20200303+openmp') + conflicts('+kokkos', when='@:20200227') patch("lib.patch", when="@20170901") patch("660.patch", when="@20170922") @@ -148,7 +150,7 @@ def cmake_args(self): args.append('-DPKG_GPU=ON') args.append('-DGPU_API=cuda') cuda_arch = spec.variants['cuda_arch'].value - if cuda_arch is not None: + if cuda_arch != 'none': args.append('-DGPU_ARCH=sm_{0}'.format(cuda_arch[0])) args.append('-DCUDA_MPS_SUPPORT={0}'.format( 'ON' if '+cuda_mps' in spec else 'OFF')) @@ -176,5 +178,11 @@ def cmake_args(self): args.append('{0}=OFF'.format(opt)) if '+kspace' in spec: args.append('-DFFT=FFTW3') + if '+kokkos' in spec: + args.append('-DEXTERNAL_KOKKOS=ON') return args + + def setup_run_environment(self, env): + env.set('LAMMPS_POTENTIALS', + self.prefix.share.lammps.potentials) diff --git a/var/spack/repos/builtin/packages/lapackpp/package.py b/var/spack/repos/builtin/packages/lapackpp/package.py new file mode 100644 index 00000000000..8b51f233ddd --- /dev/null +++ b/var/spack/repos/builtin/packages/lapackpp/package.py @@ -0,0 +1,31 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Lapackpp(CMakePackage): + """LAPACK++: C++ API for the Basic Linear Algebra Subroutines (University + of Tennessee)""" + + homepage = "https://bitbucket.org/icl/lapackpp" + hg = "https://bitbucket.org/icl/lapackpp" + maintainers = ['teonnik', 'Sely85'] + + version('develop', hg=hg, revision="7ffa486") + + variant('shared', default=True, + description='Build a shared version of the library') + + depends_on('blaspp') + + def cmake_args(self): + spec = self.spec + return [ + '-DBUILD_SHARED_LIBS:BOOL={0}'.format( + 'ON' if '+shared' in spec else 'OFF'), + '-DBUILD_LAPACKPP_TESTS:BOOL={0}'.format( + 'ON' if self.run_tests else 'OFF') + ] diff --git a/var/spack/repos/builtin/packages/legion/package.py b/var/spack/repos/builtin/packages/legion/package.py index 15effc6bea6..cac38dd0303 100644 --- a/var/spack/repos/builtin/packages/legion/package.py +++ b/var/spack/repos/builtin/packages/legion/package.py @@ -27,6 +27,7 @@ class Legion(CMakePackage): version('master', branch='master') version('ctrl-rep', branch='control_replication') + version('20.03.0', sha256='ae5feedb5ed9f357b56424b9d73cea4f224a61e291e022556f796d1ff24d1244') version('19.12.0', sha256='ea517638de7256723bb9c119796d4d9d4ef662c52d0151ad24af5288e5a72e7d') version('19.09.1', sha256='c507133fb9dce16b7fcccd7eb2933d13cce96ecf835da60a27c0f66840cabf51') version('19.09.0', sha256='a01c3e3c6698cafb64b77a66341cc06d039faed4fa31b764159f021b94ce13e8') diff --git a/var/spack/repos/builtin/packages/lhapdf/package.py b/var/spack/repos/builtin/packages/lhapdf/package.py new file mode 100644 index 00000000000..10a69aba755 --- /dev/null +++ b/var/spack/repos/builtin/packages/lhapdf/package.py @@ -0,0 +1,35 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Lhapdf(AutotoolsPackage): + """LHAPDF is a general purpose C++ interpolator, + used for evaluating PDFs from discretised data files. """ + + homepage = "https://lhapdf.hepforge.org/" + url = "https://lhapdf.hepforge.org/downloads/?f=LHAPDF-6.2.3.tar.gz" + + version('6.2.3', sha256='d6e63addc56c57b6286dc43ffc56d901516f4779a93a0f1547e14b32cfd82dd1') + + depends_on('autoconf', type='build') + depends_on('automake', type='build') + depends_on('libtool', type='build') + depends_on('m4', type='build') + + depends_on('python', type=('build', 'run')) + depends_on('py-cython', type='build') + depends_on('py-setuptools', type='build') + depends_on('boost', type='build') + depends_on('yaml-cpp', type='build', when='@:6.1.5') + + def configure_args(self): + args = ['--with-boost=' + self.spec['boost'].prefix, + 'FCFLAGS=-O3', 'CFLAGS=-O3', 'CXXFLAGS=-O3'] + + if self.spec.satisfies('@:6.1.5'): + args.append('--with-yaml-cpp=' + self.spec['yaml-cpp'].prefix) + return args diff --git a/var/spack/repos/builtin/packages/libaec/package.py b/var/spack/repos/builtin/packages/libaec/package.py index 5f12c653f1a..3ea9833b62c 100644 --- a/var/spack/repos/builtin/packages/libaec/package.py +++ b/var/spack/repos/builtin/packages/libaec/package.py @@ -22,3 +22,26 @@ class Libaec(CMakePackage): version('1.0.2', sha256='b9e5bbbc8bf9cbfd3b9b4ce38b3311f2c88d3d99f476edb35590eb0006aa1fc5') version('1.0.1', sha256='3668eb4ed36724441e488a7aadc197426afef4b1e8bd139af6d3e36023906459') version('1.0.0', sha256='849f08b08ddaaffe543d06d0ced5e4ee3e526b13a67c5f422d126b1c9cf1b546') + + @property + def libs(self): + query = self.spec.last_query + + libraries = ['libaec'] + + if 'szip' == query.name or 'szip' in query.extra_parameters: + libraries.insert(0, 'libsz') + + shared = 'static' not in query.extra_parameters + + libs = find_libraries( + libraries, root=self.prefix, shared=shared, recursive=True + ) + + if not libs: + msg = 'Unable to recursively locate {0} {1} libraries in {2}' + raise spack.error.NoLibrariesError( + msg.format('shared' if shared else 'static', + self.spec.name, + self.spec.prefix)) + return libs diff --git a/var/spack/repos/builtin/packages/libbsd/package.py b/var/spack/repos/builtin/packages/libbsd/package.py index ce99e985976..8ea91f4d5ab 100644 --- a/var/spack/repos/builtin/packages/libbsd/package.py +++ b/var/spack/repos/builtin/packages/libbsd/package.py @@ -14,7 +14,10 @@ class Libbsd(AutotoolsPackage): """ homepage = "https://libbsd.freedesktop.org/wiki/" - url = "https://libbsd.freedesktop.org/releases/libbsd-0.9.1.tar.xz" + urls = [ + "https://libbsd.freedesktop.org/releases/libbsd-0.9.1.tar.xz", + "https://mirrors.dotsrc.org/pub/mirrors/exherbo/libbsd-0.9.1.tar.xz" + ] version('0.10.0', sha256='34b8adc726883d0e85b3118fa13605e179a62b31ba51f676136ecb2d0bc1a887') version('0.9.1', sha256='56d835742327d69faccd16955a60b6dcf30684a8da518c4eca0ac713b9e0a7a4') diff --git a/var/spack/repos/builtin/packages/libbson/package.py b/var/spack/repos/builtin/packages/libbson/package.py index 9f9a1138847..e71cb2eb085 100644 --- a/var/spack/repos/builtin/packages/libbson/package.py +++ b/var/spack/repos/builtin/packages/libbson/package.py @@ -6,15 +6,19 @@ from spack import * -class Libbson(AutotoolsPackage): +class Libbson(Package): """libbson is a library providing useful routines related to building, parsing, and iterating BSON documents.""" - homepage = "https://github.com/mongodb/libbson" - url = "https://github.com/mongodb/libbson/releases/download/1.7.0/libbson-1.7.0.tar.gz" + homepage = "https://github.com/mongodb/mongo-c-driver" + url = "https://github.com/mongodb/mongo-c-driver/releases/download/1.16.2/mongo-c-driver-1.16.2.tar.gz" maintainers = ['michaelkuhn'] + version('1.16.2', sha256='0a722180e5b5c86c415b9256d753b2d5552901dc5d95c9f022072c3cd336887e') + version('1.9.5', sha256='6bb51b863a4641d6d7729e4b55df8f4389ed534c34eb3a1cda906a53df11072c') + version('1.9.4', sha256='c3cc230a3451bf7fedc5bb34c3191fd23d841e65ec415301f6c77e531924b769') + version('1.9.3', sha256='244e786c746fe6326433b1a6fcaadbdedc0da3d11c7b3168f0afa468f310e5f1') version('1.9.1', sha256='236d9fcec0fe419c2201481081e497f49136eda2349b61cfede6233013bf7601') version('1.8.1', sha256='9d18d14671b7890e27b2a5ce33a73a5ed5d33d39bba70209bae99c1dc7aa1ed4') version('1.8.0', sha256='63dea744b265a2e17c7b5e289f7803c679721d98e2975ea7d56bc1e7b8586bc1') @@ -23,12 +27,53 @@ class Libbson(AutotoolsPackage): version('1.6.2', sha256='aad410123e4bd8a9804c3c3d79e03344e2df104872594dc2cf19605d492944ba') version('1.6.1', sha256='5f160d44ea42ce9352a7a3607bc10d3b4b22d3271763aa3b3a12665e73e3a02d') + depends_on('cmake@3.1:', type='build', when='@1.10.0:') + depends_on('autoconf', type='build', when='@1.6.1') depends_on('automake', type='build', when='@1.6.1') depends_on('libtool', type='build', when='@1.6.1') depends_on('m4', type='build', when='@1.6.1') + def url_for_version(self, version): + if version >= Version('1.10.0'): + url = 'https://github.com/mongodb/mongo-c-driver/releases/download/{0}/mongo-c-driver-{0}.tar.gz' + else: + url = 'https://github.com/mongodb/libbson/releases/download/{0}/libbson-{0}.tar.gz' + + return url.format(version) + + def cmake_args(self): + args = [ + '-DENABLE_AUTOMATIC_INIT_AND_CLEANUP=OFF', + '-DENABLE_MONGOC=OFF' + ] + + return args + + def install(self, spec, prefix): + with working_dir('spack-build', create=True): + # We cannot simply do + # cmake('..', *std_cmake_args, *self.cmake_args()) + # because that is not Python 2 compatible. Instead, collect + # arguments into a temporary buffer first. + args = [] + args.extend(std_cmake_args) + args.extend(self.cmake_args()) + cmake('..', *args) + make() + make('install') + @property def force_autoreconf(self): # 1.6.1 tarball is broken return self.spec.satisfies('@1.6.1') + + @when('@:1.9.99') + def install(self, spec, prefix): + configure('--prefix={0}'.format(prefix)) + make() + if self.run_tests: + make('check') + make('install') + if self.run_tests: + make('installcheck') diff --git a/var/spack/repos/builtin/packages/libc/package.py b/var/spack/repos/builtin/packages/libc/package.py index a4fa9ac3a64..e328c40ea1f 100644 --- a/var/spack/repos/builtin/packages/libc/package.py +++ b/var/spack/repos/builtin/packages/libc/package.py @@ -7,13 +7,16 @@ class Libc(Package): - """Dummy libc package to provide `iconv` virtual package""" + """Dummy package to provide interfaces available in libc.""" homepage = "https://en.wikipedia.org/wiki/C_standard_library" - url = "" has_code = False phases = [] version('1.0') # Dummy - variant('iconv', default=False, description='Set to True if libc provides iconv') + + variant('iconv', default=False, description='Provides interfaces for Localization Functions') + variant('rpc', default=False, description='Provides interfaces for RPC') + provides('iconv', when='+iconv') + provides('rpc', when='+rpc') diff --git a/var/spack/repos/builtin/packages/libcgroup/package.py b/var/spack/repos/builtin/packages/libcgroup/package.py new file mode 100644 index 00000000000..797e10d7a43 --- /dev/null +++ b/var/spack/repos/builtin/packages/libcgroup/package.py @@ -0,0 +1,25 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Libcgroup(AutotoolsPackage): + """Library of control groups.""" + + homepage = "https://sourceforge.net/projects/libcg/" + url = "https://sourceforge.net/projects/libcg/files/libcgroup/v0.41/libcgroup-0.41.tar.bz2" + + version('0.41', sha256='e4e38bdc7ef70645ce33740ddcca051248d56b53283c0dc6d404e17706f6fb51') + version('0.37', sha256='15c8f3febb546530d3495af4e4904b3189c273277ca2d8553dec882cde1cd0f6') + version('0.36', sha256='8dcd2ae220435b3de736d3efb0023fdf1192d7a7f4032b439f3cf5342cff7b4c') + + depends_on('m4', type='build') + depends_on('autoconf', type='build') + depends_on('automake', type='build') + depends_on('libtool', type='build') + depends_on('bison', type='build') + depends_on('flex', type='build') + depends_on('linux-pam') diff --git a/var/spack/repos/builtin/packages/libconfuse/package.py b/var/spack/repos/builtin/packages/libconfuse/package.py new file mode 100644 index 00000000000..eedbaec83ad --- /dev/null +++ b/var/spack/repos/builtin/packages/libconfuse/package.py @@ -0,0 +1,21 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Libconfuse(AutotoolsPackage): + """Small configuration file parser library for C.""" + + homepage = "https://github.com/martinh/libconfuse" + url = "https://github.com/martinh/libconfuse/archive/v3.2.2.tar.gz" + + version('3.2.2', sha256='2cf7e032980aff8f488efba61510dc3fb95e9a4b9183f985dea457a5651b0e2c') + version('3.2.1', sha256='2eff8e3c300c4ed1d67fdb13f9d31a72a68e31874b4640db15334305bc40cebd') + + depends_on('m4', type='build') + depends_on('autoconf', type='build') + depends_on('automake', type='build') + depends_on('libtool', type='build') diff --git a/var/spack/repos/builtin/packages/libffi/clang-powerpc-3.2.1.patch b/var/spack/repos/builtin/packages/libffi/clang-powerpc-3.2.1.patch new file mode 100644 index 00000000000..b6749cc26f8 --- /dev/null +++ b/var/spack/repos/builtin/packages/libffi/clang-powerpc-3.2.1.patch @@ -0,0 +1,24 @@ +diff --git a/src/powerpc/linux64.S b/src/powerpc/linux64.S +index d2acb70..921e9cb 100644 +--- a/src/powerpc/linux64.S ++++ b/src/powerpc/linux64.S +@@ -83,6 +83,7 @@ ffi_call_LINUX64: + mr %r4, %r1 + # if defined _CALL_LINUX || _CALL_ELF == 2 + bl ffi_prep_args64 ++ nop + # else + bl .ffi_prep_args64 + # endif +diff --git a/src/powerpc/linux64_closure.S b/src/powerpc/linux64_closure.S +index 97421a4..3c62d5f 100644 +--- a/src/powerpc/linux64_closure.S ++++ b/src/powerpc/linux64_closure.S +@@ -152,6 +152,7 @@ ffi_closure_LINUX64: + # make the call + # if defined _CALL_LINUX || _CALL_ELF == 2 + bl ffi_closure_helper_LINUX64 ++ nop + # else + bl .ffi_closure_helper_LINUX64 + # endif diff --git a/var/spack/repos/builtin/packages/libffi/package.py b/var/spack/repos/builtin/packages/libffi/package.py index 4add7b1785e..5e952497aa5 100644 --- a/var/spack/repos/builtin/packages/libffi/package.py +++ b/var/spack/repos/builtin/packages/libffi/package.py @@ -14,8 +14,13 @@ class Libffi(AutotoolsPackage, SourcewarePackage): homepage = "https://sourceware.org/libffi/" sourceware_mirror_path = "libffi/libffi-3.2.1.tar.gz" + version('3.3', sha256='72fba7922703ddfa7a028d513ac15a85c8d54c8d67f55fa5a4802885dc652056') version('3.2.1', sha256='d06ebb8e1d9a22d19e38d63fdb83954253f39bedc5d46232a05645685722ca37') + patch('clang-powerpc-3.2.1.patch', when='@3.2.1%clang platform=linux') + # ref.: https://github.com/libffi/libffi/pull/561 + patch('powerpc-3.3.patch', when='@3.3') + @property def headers(self): # The headers are probably in self.prefix.lib but we search everywhere diff --git a/var/spack/repos/builtin/packages/libffi/powerpc-3.3.patch b/var/spack/repos/builtin/packages/libffi/powerpc-3.3.patch new file mode 100644 index 00000000000..4fd32b01028 --- /dev/null +++ b/var/spack/repos/builtin/packages/libffi/powerpc-3.3.patch @@ -0,0 +1,58 @@ +From de93adfb6f48100946bba2c3abad2a77a0cfde0b Mon Sep 17 00:00:00 2001 +From: Fabrice Fontaine +Date: Sun, 24 Nov 2019 09:52:01 +0100 +Subject: [PATCH] ffi_powerpc.h: fix build failure with powerpc7 + +This is a patch pulled down from the following: +https://github.com/buildroot/buildroot/blob/78926f610b1411b03464152472fd430012deb9ac/package/libffi/0004-ffi_powerpc.h-fix-build-failure-with-powerpc7.patch + +This issue is being hit on OpenBMC code when pulling the latest +libffi tag and building on a P8 ppc64le machine. I verified this +patch fixes the issue we are seeing. + +Below is the original commit message: + +Sicne commit 73dd43afc8a447ba98ea02e9aad4c6898dc77fb0, build on powerpc7 +fails on: + +In file included from ../src/powerpc/ffi.c:33:0: +../src/powerpc/ffi_powerpc.h:61:9: error: '_Float128' is not supported on this target + typedef _Float128 float128; + ^~~~~~~~~ + +Fix this build failure by checking for __HAVE_FLOAT128 before using +_Float128, as _Float128 is enabled only on specific conditions, see +output/host/powerpc64-buildroot-linux-gnu/sysroot/usr/include/bits/floatn.h: + + /* Defined to 1 if the current compiler invocation provides a + floating-point type with the IEEE 754 binary128 format, and this glibc + includes corresponding *f128 interfaces for it. */ + #if defined _ARCH_PWR8 && defined __LITTLE_ENDIAN__ && (_CALL_ELF == 2) \ + && defined __FLOAT128__ && !defined __NO_LONG_DOUBLE_MATH + # define __HAVE_FLOAT128 1 + #else + # define __HAVE_FLOAT128 0 + #endif + +Fixes: + - http://autobuild.buildroot.org/results/5c9dd8fb3b6a128882b6250f197c80232d8a3b53 + +Signed-off-by: Fabrice Fontaine +Signed-off-by: Andrew Geissler +--- + src/powerpc/ffi_powerpc.h | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) + +diff --git a/src/powerpc/ffi_powerpc.h b/src/powerpc/ffi_powerpc.h +index 8e2f2f0e..960a5c42 100644 +--- a/src/powerpc/ffi_powerpc.h ++++ b/src/powerpc/ffi_powerpc.h +@@ -57,7 +57,7 @@ typedef union + double d; + } ffi_dblfl; + +-#if defined(__FLOAT128_TYPE__) ++#if defined(__FLOAT128_TYPE__) && defined(__HAVE_FLOAT128) + typedef _Float128 float128; + #elif defined(__FLOAT128__) + typedef __float128 float128; diff --git a/var/spack/repos/builtin/packages/libflame/package.py b/var/spack/repos/builtin/packages/libflame/package.py index 1a8de4b47a4..7d7a9837d34 100644 --- a/var/spack/repos/builtin/packages/libflame/package.py +++ b/var/spack/repos/builtin/packages/libflame/package.py @@ -30,9 +30,9 @@ class Libflame(AutotoolsPackage): ' to their corresponding native C implementations' ' in libflame.') - variant('threads', default='no', + variant('threads', default='none', description='Multithreading support', - values=('pthreads', 'openmp', 'no'), + values=('pthreads', 'openmp', 'none'), multi=False) variant('static', default=True, @@ -70,6 +70,12 @@ def flag_handler(self, name, flags): flags.append('-std=gnu99') return (flags, None, None) + def enable_or_disable_threads(self, variant, options): + opt_val = self.spec.variants['threads'].value + if variant_val == 'none': + opt_val = 'no' + return ['--enable-multithreading={0}'.format(opt_val)] + def configure_args(self): # Libflame has a secondary dependency on BLAS, # but doesn't know which library name to expect: @@ -96,10 +102,9 @@ def configure_args(self): else: config_args.append("--disable-debug") - config_args.append('--enable-multithreading=' - + self.spec.variants['threads'].value) + config_args.extend(self.enable_or_disable('threads')) - if 'no' != self.spec.variants['threads'].value: + if 'none' != self.spec.variants['threads'].value: config_args.append("--enable-supermatrix") else: config_args.append("--disable-supermatrix") diff --git a/var/spack/repos/builtin/packages/libgit2/package.py b/var/spack/repos/builtin/packages/libgit2/package.py index 6fe9022179a..dd5d55fb6f4 100644 --- a/var/spack/repos/builtin/packages/libgit2/package.py +++ b/var/spack/repos/builtin/packages/libgit2/package.py @@ -18,6 +18,7 @@ class Libgit2(CMakePackage): maintainers = ["AndrewGaspar"] + version('1.0.1', sha256='1775427a6098f441ddbaa5bd4e9b8a043c7401e450ed761e69a415530fea81d2') version('1.0.0', sha256='6a1fa16a7f6335ce8b2630fbdbb5e57c4027929ebc56fcd1ac55edb141b409b4') version('0.99.0', sha256='174024310c1563097a6613a0d3f7539d11a9a86517cd67ce533849065de08a11') version('0.28.5', sha256='2b7b68aee6f123bc84cc502a9c12738435b8054e7d628962e091cd2a25be4f42') diff --git a/var/spack/repos/builtin/packages/libgpuarray/package.py b/var/spack/repos/builtin/packages/libgpuarray/package.py index edaebac7afc..84b0ad541e7 100644 --- a/var/spack/repos/builtin/packages/libgpuarray/package.py +++ b/var/spack/repos/builtin/packages/libgpuarray/package.py @@ -14,6 +14,7 @@ class Libgpuarray(CMakePackage): homepage = "http://deeplearning.net/software/libgpuarray/" url = "https://github.com/Theano/libgpuarray/archive/v0.6.1.tar.gz" + version('0.7.6', sha256='ad1c00dd47c3d36ee1708e5167377edbfcdb7226e837ef9c68b841afbb4a4f6a') version('0.7.5', sha256='39c4d2e743848be43c8819c736e089ae51b11aa446cc6ee05af945c2dfd63420') version('0.7.2', sha256='ef11ee6f8d62d53831277fd3dcab662aa770a5b5de2d30fe3018c4af959204da') version('0.7.1', sha256='4d0f9dd63b0595a8c04d8cee91b2619847c033b011c71d776caa784322382ed6') diff --git a/var/spack/repos/builtin/packages/libical/package.py b/var/spack/repos/builtin/packages/libical/package.py new file mode 100644 index 00000000000..6116b0b6497 --- /dev/null +++ b/var/spack/repos/builtin/packages/libical/package.py @@ -0,0 +1,22 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Libical(CMakePackage): + """Libical - an implementation of iCalendar protocols and data formats.""" + + homepage = "https://github.com/libical/libical" + url = "https://github.com/libical/libical/archive/v3.0.8.tar.gz" + + version('3.0.8', sha256='09fecacaf75ba5a242159e3a9758a5446b5ce4d0ab684f98a7040864e1d1286f') + + depends_on('cmake@3.11.0:', type='build') + depends_on('perl', type='build') + depends_on('icu4c') + + def cmake_args(self): + return ['-DENABLE_GTK_DOC=OFF'] diff --git a/var/spack/repos/builtin/packages/libjpeg-turbo/package.py b/var/spack/repos/builtin/packages/libjpeg-turbo/package.py index 10bddc37e03..4b81a01ef6c 100644 --- a/var/spack/repos/builtin/packages/libjpeg-turbo/package.py +++ b/var/spack/repos/builtin/packages/libjpeg-turbo/package.py @@ -8,13 +8,16 @@ class LibjpegTurbo(Package): """libjpeg-turbo is a fork of the original IJG libjpeg which uses SIMD to - accelerate baseline JPEG compression and decompression. libjpeg is a - library that implements JPEG image encoding, decoding and - transcoding.""" + accelerate baseline JPEG compression and decompression. + + libjpeg is a library that implements JPEG image encoding, decoding and + transcoding. + """ # https://github.com/libjpeg-turbo/libjpeg-turbo/blob/master/BUILDING.md homepage = "https://libjpeg-turbo.org/" - url = "https://github.com/libjpeg-turbo/libjpeg-turbo/archive/2.0.3.tar.gz" + url = "https://github.com/libjpeg-turbo/libjpeg-turbo/archive/2.0.3.tar.gz" + version('2.0.4', sha256='7777c3c19762940cff42b3ba4d7cd5c52d1671b39a79532050c85efb99079064') version('2.0.3', sha256='a69598bf079463b34d45ca7268462a18b6507fdaa62bb1dfd212f02041499b5d') version('2.0.2', sha256='b45255bd476c19c7c6b198c07c0487e8b8536373b82f2b38346b32b4fa7bb942') version('1.5.90', sha256='cb948ade92561d8626fd7866a4a7ba3b952f9759ea3dd642927bc687470f60b7') diff --git a/var/spack/repos/builtin/packages/libmonitor/package.py b/var/spack/repos/builtin/packages/libmonitor/package.py index 49509fbe36c..5a199ee333a 100644 --- a/var/spack/repos/builtin/packages/libmonitor/package.py +++ b/var/spack/repos/builtin/packages/libmonitor/package.py @@ -13,10 +13,11 @@ class Libmonitor(AutotoolsPackage): homepage = "https://github.com/HPCToolkit/libmonitor" git = "https://github.com/HPCToolkit/libmonitor.git" + maintainers = ['mwkrentel'] version('master', branch='master') - version('2018.07.18', commit='d28cc1d3c08c02013a68a022a57a6ac73db88166', - preferred=True) + version('2019.05.31', commit='c9767087d52e58a719aa7f149136b101e499db44') + version('2018.07.18', commit='d28cc1d3c08c02013a68a022a57a6ac73db88166') version('2013.02.18', commit='4f2311e413fd90583263d6f20453bbe552ccfef3') # Configure for Rice HPCToolkit. diff --git a/var/spack/repos/builtin/packages/libnetworkit/package.py b/var/spack/repos/builtin/packages/libnetworkit/package.py index 5d7f58eb906..04ea7b4e9d8 100644 --- a/var/spack/repos/builtin/packages/libnetworkit/package.py +++ b/var/spack/repos/builtin/packages/libnetworkit/package.py @@ -22,6 +22,7 @@ class Libnetworkit(CMakePackage): maintainers = ['fabratu'] + version('7.0', sha256='4faf16c5fae3e14d3c1b6f30e25c6e093dcf6a3dbf021235f3161ac2a527f682') version('6.1', sha256='22c953ea1054c356663b31c77114c2f0c8fec17e0e707aeec23026241beab9b2') variant('static', default=False, description='Enables the build of shared libraries') diff --git a/var/spack/repos/builtin/packages/libpciaccess/package.py b/var/spack/repos/builtin/packages/libpciaccess/package.py index 874db157210..4f1337db763 100644 --- a/var/spack/repos/builtin/packages/libpciaccess/package.py +++ b/var/spack/repos/builtin/packages/libpciaccess/package.py @@ -6,11 +6,11 @@ from spack import * -class Libpciaccess(AutotoolsPackage): +class Libpciaccess(AutotoolsPackage, XorgPackage): """Generic PCI access library.""" homepage = "http://cgit.freedesktop.org/xorg/lib/libpciaccess/" - url = "http://xorg.freedesktop.org/archive/individual/lib/libpciaccess-0.13.5.tar.gz" + xorg_mirror_path = "lib/libpciaccess-0.13.5.tar.gz" version('0.13.5', sha256='fe26ec788732b4ef60b550f2d3fa51c605d27f646e18ecec878f061807a3526e') version('0.13.4', sha256='74d92bda448e6fdb64fee4e0091255f48d625d07146a121653022ed3a0ca1f2f') diff --git a/var/spack/repos/builtin/packages/libsamplerate/package.py b/var/spack/repos/builtin/packages/libsamplerate/package.py new file mode 100644 index 00000000000..fcab8be9d3d --- /dev/null +++ b/var/spack/repos/builtin/packages/libsamplerate/package.py @@ -0,0 +1,22 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Libsamplerate(AutotoolsPackage): + """libsamplerate (also known as Secret Rabbit Code) is a library for + performing sample rate conversion of audio data.""" + + homepage = "http://www.mega-nerd.com/libsamplerate/history.html" + url = "http://www.mega-nerd.com/libsamplerate/libsamplerate-0.1.9.tar.gz" + + version('0.1.9', sha256='0a7eb168e2f21353fb6d84da152e4512126f7dc48ccb0be80578c565413444c1') + version('0.1.8', sha256='93b54bdf46d5e6d2354b7034395fe329c222a966790de34520702bb9642f1c06') + + depends_on('m4', type='build') + depends_on('autoconf', type='build') + depends_on('automake', type='build') + depends_on('libtool', type='build') diff --git a/var/spack/repos/builtin/packages/libsecret/package.py b/var/spack/repos/builtin/packages/libsecret/package.py index 8c41caf1c37..6302b9c69d3 100644 --- a/var/spack/repos/builtin/packages/libsecret/package.py +++ b/var/spack/repos/builtin/packages/libsecret/package.py @@ -25,6 +25,8 @@ class Libsecret(AutotoolsPackage): # Optional Vala support is not implemented yet # variant('vala', default=False, descript='Build with Vala support') + depends_on('pkgconfig', type='build') +# depends_on('mesa') # https://gitlab.gnome.org/GNOME/libsecret/blob/master/meson.build depends_on('glib@2.44:') depends_on('libgcrypt@1.2.2:', when='+gcrypt') diff --git a/var/spack/repos/builtin/packages/libsharp/package.py b/var/spack/repos/builtin/packages/libsharp/package.py index c78534c6c47..aa7e5184b64 100644 --- a/var/spack/repos/builtin/packages/libsharp/package.py +++ b/var/spack/repos/builtin/packages/libsharp/package.py @@ -40,5 +40,7 @@ def configure_args(self): return args def install(self, spec, prefix): - install_tree('auto/include', join_path(prefix, 'include')) - install_tree('auto/lib', join_path(prefix, 'lib')) + # Libsharp's only caller healpix include headers like 'libsharp/xxx.h' + # Install xxx.h to include/libsharp + install_tree('auto/include', prefix.include.libsharp) + install_tree('auto/lib', prefix.lib) diff --git a/var/spack/repos/builtin/packages/libsm/package.py b/var/spack/repos/builtin/packages/libsm/package.py index cabd2aae3d5..269f2ededbf 100644 --- a/var/spack/repos/builtin/packages/libsm/package.py +++ b/var/spack/repos/builtin/packages/libsm/package.py @@ -20,3 +20,4 @@ class Libsm(AutotoolsPackage, XorgPackage): depends_on('xtrans', type='build') depends_on('pkgconfig', type='build') depends_on('util-macros', type='build') + depends_on('libuuid') diff --git a/var/spack/repos/builtin/packages/libspatialite/package.py b/var/spack/repos/builtin/packages/libspatialite/package.py index 45fe1f6109c..2eb9b9df2f5 100644 --- a/var/spack/repos/builtin/packages/libspatialite/package.py +++ b/var/spack/repos/builtin/packages/libspatialite/package.py @@ -3,6 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) +import os from spack import * @@ -13,12 +14,25 @@ class Libspatialite(AutotoolsPackage): homepage = "http://www.gaia-gis.it" url = "http://www.gaia-gis.it/gaia-sins/libspatialite-sources/libspatialite-4.3.0a.tar.gz" - version('4.3.0a', sha256='88900030a4762904a7880273f292e5e8ca6b15b7c6c3fb88ffa9e67ee8a5a499') + # Must download manually from: + # https://www.gaia-gis.it/fossil/libspatialite/info/c7f67038bf06d98d + # For instructions on the file:// below.. + # https://github.com/spack/spack/issues/2489 + version('5.0.0.2.c7f67038bf', + sha256='f8100f71b769c7db066c6f938af6b00e920e4b90ac14c00a4f3ed7171565caab', + url="file://%s/SpatiaLite-c7f67038bf.tar.gz" % os.getcwd()) + + version('5.0.0-beta0', sha256='caacf5378a5cfab9b8e98bb361e2b592e714e21f5c152b795df80d0ab1da1c42') + version('4.3.0a', preferred=True, + sha256='88900030a4762904a7880273f292e5e8ca6b15b7c6c3fb88ffa9e67ee8a5a499') version('3.0.1', sha256='4983d6584069fd5ff0cfcccccee1015088dab2db177c0dc7050ce8306b68f8e6') depends_on('pkgconfig', type='build') depends_on('sqlite+rtree') - depends_on('proj@:5') + depends_on('proj@:5', when='@:4.999.999') + # PROJ.6 is OK w/ newer versions + # https://www.gaia-gis.it/fossil/libspatialite/wiki?name=PROJ.6 + depends_on('proj') depends_on('geos') depends_on('freexl') depends_on('iconv') diff --git a/var/spack/repos/builtin/packages/libszip/package.py b/var/spack/repos/builtin/packages/libszip/package.py index 00a2b9b5bcb..b72e2ba2765 100644 --- a/var/spack/repos/builtin/packages/libszip/package.py +++ b/var/spack/repos/builtin/packages/libszip/package.py @@ -24,6 +24,22 @@ class Libszip(AutotoolsPackage): version('2.1.1', sha256='21ee958b4f2d4be2c9cabfa5e1a94877043609ce86fde5f286f105f7ff84d412') version('2.1', sha256='a816d95d5662e8279625abdbea7d0e62157d7d1f028020b1075500bf483ed5ef') + @property + def libs(self): + shared = 'static' not in self.spec.last_query.extra_parameters + + libs = find_libraries( + 'libsz', root=self.prefix, shared=shared, recursive=True + ) + + if not libs: + msg = 'Unable to recursively locate {0} {1} libraries in {2}' + raise spack.error.NoLibrariesError( + msg.format('shared' if shared else 'static', + self.spec.name, + self.spec.prefix)) + return libs + def configure_args(self): return [ '--enable-production', diff --git a/var/spack/repos/builtin/packages/libtirpc/package.py b/var/spack/repos/builtin/packages/libtirpc/package.py index e6ce06e0ea0..f83c7499a09 100644 --- a/var/spack/repos/builtin/packages/libtirpc/package.py +++ b/var/spack/repos/builtin/packages/libtirpc/package.py @@ -15,6 +15,8 @@ class Libtirpc(AutotoolsPackage): version('1.1.4', sha256='2ca529f02292e10c158562295a1ffd95d2ce8af97820e3534fe1b0e3aec7561d') + provides('rpc') + # FIXME: build error on macOS # auth_none.c:81:9: error: unknown type name 'mutex_t' diff --git a/var/spack/repos/builtin/packages/libtree/package.py b/var/spack/repos/builtin/packages/libtree/package.py index d4c2749daa8..10ca8db31db 100644 --- a/var/spack/repos/builtin/packages/libtree/package.py +++ b/var/spack/repos/builtin/packages/libtree/package.py @@ -15,6 +15,9 @@ class Libtree(CMakePackage): maintainers = ['haampie'] + version('1.2.0', sha256='4316a52aed7c8d2f7d2736c935bbda952204be92e56948110a143283764c427c') + version('1.1.4', sha256='cfafb24c8f5e0d356c82777c338d58730ca6f3cb76dfe8a6857ee3ad65bf8be7') + version('1.1.3', sha256='7baf5aaecd3a076bf1e7a1aa86979e7b841ab3f678ca8ac0e2a22bbbccf0dd06') version('1.1.2', sha256='31641c6bf6c2980ffa7b4c57392460434f97ba66fe51fe6346867430b33a0374') version('1.1.1', sha256='3e8543145a40a94e9e2ce9fed003d2bf68294e1fce9607028a286bc132e17dc4') version('1.1.0', sha256='6cf36fb9a4c8c3af01855527d4931110732bb2d1c19be9334c689f1fd1c78536') diff --git a/var/spack/repos/builtin/packages/libunwind/package.py b/var/spack/repos/builtin/packages/libunwind/package.py index 971840f5e04..dcabc86ec55 100644 --- a/var/spack/repos/builtin/packages/libunwind/package.py +++ b/var/spack/repos/builtin/packages/libunwind/package.py @@ -16,9 +16,12 @@ class Libunwind(AutotoolsPackage): maintainers = ['mwkrentel'] version('master', branch='master') + version('1.5-head', branch='v1.5-stable') + version('1.5-rc1', sha256='3e0cbc6dee326592097ef06e97cf76ef597987eddd0df8bea49b0594e587627a') version('1.4-head', branch='v1.4-stable') + version('1.4.0', sha256='df59c931bd4d7ebfd83ee481c943edf015138089b8e50abed8d9c57ba9338435', preferred=True) version('1.4-rc1', sha256='1928459139f048f9b4aca4bb5010540cb7718d44220835a2980b85429007fa9f') - version('1.3.1', sha256='43997a3939b6ccdf2f669b50fdb8a4d3205374728c2923ddc2354c65260214f8', preferred=True) + version('1.3.1', sha256='43997a3939b6ccdf2f669b50fdb8a4d3205374728c2923ddc2354c65260214f8') version('1.2.1', sha256='3f3ecb90e28cbe53fba7a4a27ccce7aad188d3210bb1964a923a731a27a75acb') version('1.1', sha256='9dfe0fcae2a866de9d3942c66995e4b460230446887dbdab302d41a8aee8d09a') @@ -26,15 +29,15 @@ class Libunwind(AutotoolsPackage): description='Support xz (lzma) compressed symbol tables.') variant('zlib', default=False, - description='Support zlib compressed symbol tables (master ' - 'branch only).') + description='Support zlib compressed symbol tables ' + '(1.5 and later).') # The libunwind releases contain the autotools generated files, # but the git repo snapshots do not. - depends_on('autoconf', type='build', when='@master,1.4-head') - depends_on('automake', type='build', when='@master,1.4-head') - depends_on('libtool', type='build', when='@master,1.4-head') - depends_on('m4', type='build', when='@master,1.4-head') + depends_on('autoconf', type='build', when='@master,1.4-head,1.5-head') + depends_on('automake', type='build', when='@master,1.4-head,1.5-head') + depends_on('libtool', type='build', when='@master,1.4-head,1.5-head') + depends_on('m4', type='build', when='@master,1.4-head,1.5-head') depends_on('xz', type='link', when='+xz') depends_on('zlib', type='link', when='+zlib') @@ -44,7 +47,15 @@ class Libunwind(AutotoolsPackage): provides('unwind') - flag_handler = AutotoolsPackage.build_system_flags + def flag_handler(self, name, flags): + wrapper_flags = None + + if name == 'cflags': + # https://github.com/libunwind/libunwind/pull/166 + if self.spec.satisfies('@:1.4 %gcc@10:'): + wrapper_flags = ['-fcommon'] + + return (wrapper_flags, None, flags) def configure_args(self): spec = self.spec @@ -55,8 +66,8 @@ def configure_args(self): else: args.append('--disable-minidebuginfo') - # zlib support is only in the master branch (for now). - if spec.satisfies('@master'): + # zlib support is available in 1.5.x and later + if spec.satisfies('@1.5:'): if '+zlib' in spec: args.append('--enable-zlibdebuginfo') else: diff --git a/var/spack/repos/builtin/packages/libuv/package.py b/var/spack/repos/builtin/packages/libuv/package.py index 55d7dcebba0..309270a3ff6 100644 --- a/var/spack/repos/builtin/packages/libuv/package.py +++ b/var/spack/repos/builtin/packages/libuv/package.py @@ -19,6 +19,12 @@ class Libuv(AutotoolsPackage): depends_on('autoconf', type='build') depends_on('libtool', type='build') + # Tries to build an Objective-C file with GCC's C frontend + # https://github.com/libuv/libuv/issues/2805 + conflicts('%gcc platform=darwin', + msg='libuv does not compile with GCC on macOS yet, use clang. ' + 'See: https://github.com/libuv/libuv/issues/2805') + def autoreconf(self, spec, prefix): # This is needed because autogen.sh generates on-the-fly # an m4 macro needed during configuration diff --git a/var/spack/repos/builtin/packages/libxml2/package.py b/var/spack/repos/builtin/packages/libxml2/package.py index 8c024854ea1..22add8c7a31 100644 --- a/var/spack/repos/builtin/packages/libxml2/package.py +++ b/var/spack/repos/builtin/packages/libxml2/package.py @@ -14,11 +14,12 @@ class Libxml2(AutotoolsPackage): homepage = "http://xmlsoft.org" url = "http://xmlsoft.org/sources/libxml2-2.9.8.tar.gz" - version('2.9.9', sha256='94fb70890143e3c6549f265cee93ec064c80a84c42ad0f23e85ee1fd6540a871') - version('2.9.8', sha256='0b74e51595654f958148759cfef0993114ddccccbb6f31aee018f3558e8e2732') - version('2.9.4', sha256='ffb911191e509b966deb55de705387f14156e1a56b21824357cdf0053233633c') - version('2.9.2', sha256='5178c30b151d044aefb1b08bf54c3003a0ac55c59c866763997529d60770d5bc') - version('2.7.8', sha256='cda23bc9ebd26474ca8f3d67e7d1c4a1f1e7106364b690d822e009fdc3c417ec') + version('2.9.10', sha256='aafee193ffb8fe0c82d4afef6ef91972cbaf5feea100edc2f262750611b4be1f') + version('2.9.9', sha256='94fb70890143e3c6549f265cee93ec064c80a84c42ad0f23e85ee1fd6540a871') + version('2.9.8', sha256='0b74e51595654f958148759cfef0993114ddccccbb6f31aee018f3558e8e2732') + version('2.9.4', sha256='ffb911191e509b966deb55de705387f14156e1a56b21824357cdf0053233633c') + version('2.9.2', sha256='5178c30b151d044aefb1b08bf54c3003a0ac55c59c866763997529d60770d5bc') + version('2.7.8', sha256='cda23bc9ebd26474ca8f3d67e7d1c4a1f1e7106364b690d822e009fdc3c417ec') variant('python', default=False, description='Enable Python support') diff --git a/var/spack/repos/builtin/packages/libxsmm/package.py b/var/spack/repos/builtin/packages/libxsmm/package.py index 12cfc32dfd2..caf6681e47b 100644 --- a/var/spack/repos/builtin/packages/libxsmm/package.py +++ b/var/spack/repos/builtin/packages/libxsmm/package.py @@ -14,10 +14,11 @@ class Libxsmm(MakefilePackage): and deep learning primitives.""" homepage = 'https://github.com/hfp/libxsmm' - url = 'https://github.com/hfp/libxsmm/archive/1.15.tar.gz' + url = 'https://github.com/hfp/libxsmm/archive/1.16.tar.gz' git = 'https://github.com/hfp/libxsmm.git' version('master', branch='master') + version('1.16', sha256='4f4f2ad97815413af80821d2e306eb6f00541941ad412662da05c02361a20e07') version('1.15', sha256='499e5adfbf90cd3673309243c2b56b237d54f86db2437e1ac06c8746b55ab91c') version('1.14', sha256='9c0af4509ea341d1ee2c6c19fc6f19289318c3bd4b17844efeb9e7f9691abf76') version('1.13', sha256='47c034e169820a9633770eece0e0fdd8d4a744e09b81da2af8c2608a4625811e') diff --git a/var/spack/repos/builtin/packages/libxv/package.py b/var/spack/repos/builtin/packages/libxv/package.py index fdb3b6cf7f5..fc948f63f1a 100644 --- a/var/spack/repos/builtin/packages/libxv/package.py +++ b/var/spack/repos/builtin/packages/libxv/package.py @@ -19,6 +19,6 @@ class Libxv(AutotoolsPackage, XorgPackage): depends_on('libxext') depends_on('xextproto', type='build') - depends_on('videoproto', type='build') + depends_on('videoproto') depends_on('pkgconfig', type='build') depends_on('util-macros', type='build') diff --git a/var/spack/repos/builtin/packages/libyaml/package.py b/var/spack/repos/builtin/packages/libyaml/package.py index d795b2f8a6f..26686dbd524 100644 --- a/var/spack/repos/builtin/packages/libyaml/package.py +++ b/var/spack/repos/builtin/packages/libyaml/package.py @@ -10,10 +10,12 @@ class Libyaml(AutotoolsPackage): """A C library for parsing and emitting YAML.""" homepage = "https://pyyaml.org/wiki/LibYAML" - url = "https://pyyaml.org/download/libyaml/yaml-0.2.2.tar.gz" + url = "https://pyyaml.org/download/libyaml/yaml-0.2.4.tar.gz" git = "https://github.com/yaml/libyaml.git" version('master', branch='master') + version('0.2.4', sha256='d80aeda8747b7c26fbbfd87ab687786e58394a8435ae3970e79cb97882e30557') + version('0.2.3', sha256='08bbb80284d77092e68a6f69f1e480e8ed93e215c47b2ca29290e3bd5a191108') version('0.2.2', sha256='4a9100ab61047fd9bd395bcef3ce5403365cafd55c1e0d0299cde14958e47be9') version('0.2.1', sha256='78281145641a080fb32d6e7a87b9c0664d611dcb4d542e90baf731f51cbb59cd') version('0.1.7', sha256='8088e457264a98ba451a90b8661fcb4f9d6f478f7265d48322a196cec2480729') diff --git a/var/spack/repos/builtin/packages/likwid/package.py b/var/spack/repos/builtin/packages/likwid/package.py index 8556fb0d8e9..8e89693ce1b 100644 --- a/var/spack/repos/builtin/packages/likwid/package.py +++ b/var/spack/repos/builtin/packages/likwid/package.py @@ -29,6 +29,7 @@ class Likwid(Package): version('4.3.1', sha256='4b40a96717da54514274d166f9b71928545468091c939c1d74109733279eaeb1') version('4.3.0', sha256='86fc5f82c80fcff1a643394627839ec79f1ca2bcfad30000eb7018da592588b4') + patch('https://github.com/RRZE-HPC/likwid/commit/e0332ace8fe8ca7dcd4b4477a25e37944f173a5c.patch', sha256='c3b8f939a46b425665577ce764d4fba080a23cab5999c53db71655fd54d7e0b1', when='@5.0.1') patch('https://github.com/RRZE-HPC/likwid/commit/d2d0ef333b5e0997d7c80fc6ac1a473b5e47d084.patch', sha256='636cbf40669261fdb36379d67253be2b731cfa7b6d610d232767d72fbdf08bc0', when='@4.3.4') # NOTE: There is no way to use an externally provided hwloc with Likwid. @@ -106,6 +107,12 @@ def install(self, spec, prefix): spec['lua'].prefix.bin), 'config.mk') + # https://github.com/RRZE-HPC/likwid/issues/287 + if self.spec.satisfies('@:5.0.2 %gcc@10:'): + filter_file(r'^(CFLAGS.*)', + '\\1 -fcommon', + 'make/include_GCC.mk') + env['PWD'] = os.getcwd() make() make('install') diff --git a/var/spack/repos/builtin/packages/linsys-v/package.py b/var/spack/repos/builtin/packages/linsys-v/package.py new file mode 100644 index 00000000000..545a2a8bec9 --- /dev/null +++ b/var/spack/repos/builtin/packages/linsys-v/package.py @@ -0,0 +1,69 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack import * + + +class LinsysV(MakefilePackage): + """LINSYS_V: Verified Solution of Linear Systems""" + + homepage = "http://www.math.twcu.ac.jp/ogita/post-k/" + url = "http://www.math.twcu.ac.jp/ogita/post-k/software/LINSYS_V/LINSYS_V_alpha.tar.gz" + + version( + "alpha", + sha256="6666bc837bb6598b7cdefb233d3d0f2c308a24fe3465e4fe9b6c9762810bb320", + ) + + depends_on("mpi") + depends_on("blas", type="link") + depends_on("lapack", type="link") + depends_on("scalapack", type="link") + + def patch(self): + math_libs = ( + self.spec["lapack"].libs + + self.spec["blas"].libs + + self.spec["scalapack"].libs + ) + makefile = FileFilter("Makefile") + if self.spec.satisfies("%gcc"): + makefile.filter(r"^ENV\s+=\sK", "#ENV=K") + makefile.filter(r"^#ENV\s+=\sGCC", "ENV=GCC") + makefile.filter(r"^MKL\s+=\s1", "MKL=0") + makefile.filter(r"^CC\s+=\smpicc", + "CC={0}".format(self.spec["mpi"].mpicc)) + makefile.filter( + r"^LIBS\s+=\s-lscalapack\s-lblacs\s-llapack\s-lblas", + "LIBS={0}".format(math_libs.ld_flags) + " -lm", + ) + elif self.spec.satisfies("%fj"): + makefile.filter(r"^#ENV\s+=\sK", "ENV=K") + makefile.filter(r"^ENV\s+=\sGCC", "#ENV=GCC") + makefile.filter(r"^MKL\s+=\s1", "MKL=0") + makefile.filter( + r"^CC\s+=\smpifccpx", + "CC={0}".format(self.spec["mpi"].mpicc) + ) + makefile.filter( + r"^CFLAGS\s+=\s-Kfast,openmp", + "CFLAGS=-Ofast -fstrict-aliasing {0}".format( + self.compiler.openmp_flag + ), + ) + makefile.filter( + r"^LIBS\s+=\s-SCALAPACK\s-SSL2BLAMP", + "LIBS=-SSL2BLAMP {0}".format(math_libs.ld_flags), + ) + elif self.spec.satisfies("%intel"): + makefile.filter(r"^ENV\s+=\sGCC", "#ENV=GCC") + makefile.filter(r"^ENV\s+=\sICC", "ENV=ICC") + makefile.filter(r"^C\s+=\smpiicc", + "CC={0}".format(self.spec["mpi"].mpicc)) + + def install(self, spec, prefix): + mkdirp(prefix.bin) + install("ex_linsys_v", prefix.bin) diff --git a/var/spack/repos/builtin/packages/linux-pam/package.py b/var/spack/repos/builtin/packages/linux-pam/package.py new file mode 100644 index 00000000000..0dff1e0654b --- /dev/null +++ b/var/spack/repos/builtin/packages/linux-pam/package.py @@ -0,0 +1,26 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class LinuxPam(AutotoolsPackage): + """Linux PAM (Pluggable Authentication Modules for Linux) project.""" + + homepage = "http://www.linux-pam.org/" + url = "https://github.com/linux-pam/linux-pam/releases/download/v1.3.1/Linux-PAM-1.3.1.tar.xz" + + version('1.3.1', sha256='eff47a4ecd833fbf18de9686632a70ee8d0794b79aecb217ebd0ce11db4cd0db') + + depends_on('m4', type='build') + depends_on('autoconf', type='build') + depends_on('automake', type='build') + depends_on('libtool', type='build') + + def configure_args(self): + config_args = [ + '--includedir=' + self.prefix.include.security + ] + return config_args diff --git a/var/spack/repos/builtin/packages/llvm/package.py b/var/spack/repos/builtin/packages/llvm/package.py index f56a68403b9..53ce3e12c29 100644 --- a/var/spack/repos/builtin/packages/llvm/package.py +++ b/var/spack/repos/builtin/packages/llvm/package.py @@ -119,6 +119,8 @@ class Llvm(CMakePackage, CudaPackage): default=False, description="Build with OpenMP capable thread sanitizer", ) + variant('code_signing', default=False, + description="Enable code-signing on macOS") variant("python", default=False, description="Install python bindings") extends("python", when="+python") @@ -176,6 +178,20 @@ class Llvm(CMakePackage, CudaPackage): # MLIR exists in > 10.x conflicts("+mlir", when="@:9") + # code signing is only necessary on macOS", + conflicts('+code_signing', when='platform=linux') + conflicts('+code_signing', when='platform=bgq') + conflicts('+code_signing', when='platform=cray') + + conflicts( + '+code_signing', + when='~lldb platform=darwin', + msg="code signing is only necessary for building the " + "in-tree debug server on macOS. Turning this variant " + "off enables a build of llvm with lldb that uses the " + "system debug server", + ) + # Github issue #4986 patch("llvm_gcc7.patch", when="@4.0.0:4.0.1+lldb %gcc@7.0:") # Backport from llvm master + additional fix @@ -192,31 +208,31 @@ class Llvm(CMakePackage, CudaPackage): # https://bugs.llvm.org/show_bug.cgi?id=39696 patch("thread-p9.patch", when="@develop+libcxx") - @run_before("cmake") - def check_darwin_lldb_codesign_requirement(self): - if not self.spec.satisfies("+lldb platform=darwin"): - return - codesign = which("codesign") - mkdir("tmp") - llvm_check_file = join_path("tmp", "llvm_check") - copy("/usr/bin/false", llvm_check_file) - - try: - codesign("-f", "-s", "lldb_codesign", "--dryrun", llvm_check_file) - - except ProcessError: - # Newer LLVM versions have a simple script that sets up - # automatically - setup = Executable("./lldb/scripts/macos-setup-codesign.sh") + @run_before('cmake') + def codesign_check(self): + if self.spec.satisfies("+code_signing"): + codesign = which('codesign') + mkdir('tmp') + llvm_check_file = join_path('tmp', 'llvm_check') + copy('/usr/bin/false', llvm_check_file) try: - setup() - except Exception: - raise RuntimeError( - 'The "lldb_codesign" identity must be available to build ' - "LLVM with LLDB. See https://lldb.llvm.org/resources/" - "build.html#code-signing-on-macos for details on how to " - "create this identity." - ) + codesign('-f', '-s', 'lldb_codesign', '--dryrun', + llvm_check_file) + + except ProcessError: + # Newer LLVM versions have a simple script that sets up + # automatically when run with sudo priviliges + setup = Executable("./lldb/scripts/macos-setup-codesign.sh") + try: + setup() + except Exception: + raise RuntimeError( + 'spack was unable to either find or set up' + 'code-signing on your system. Please refer to' + 'https://lldb.llvm.org/resources/build.html#' + 'code-signing-on-macos for details on how to' + 'create this identity.' + ) def setup_build_environment(self, env): env.append_flags("CXXFLAGS", self.compiler.cxx11_flag) @@ -271,8 +287,12 @@ def cmake_args(self): if "+python" in spec and "+lldb" in spec and spec.satisfies("@5.0.0:"): cmake_args.append("-DLLDB_USE_SYSTEM_SIX:Bool=TRUE") - if "~python" in spec and "+lldb" in spec: - cmake_args.append("-DLLDB_DISABLE_PYTHON:Bool=TRUE") + if "+lldb" in spec and spec.satisfies("@:9.9.9"): + cmake_args.append("-DLLDB_DISABLE_PYTHON:Bool={0}".format( + 'ON' if '~python' in spec else 'OFF')) + if "+lldb" in spec and spec.satisfies("@10.0.0:"): + cmake_args.append("-DLLDB_ENABLE_PYTHON:Bool={0}".format( + 'ON' if '+python' in spec else 'OFF')) if "+gold" in spec: cmake_args.append( @@ -355,6 +375,9 @@ def cmake_args(self): ): cmake_args.append("-DCMAKE_BUILD_WITH_INSTALL_RPATH=1") + if self.spec.satisfies("~code_signing platform=darwin"): + cmake_args.append('-DLLDB_USE_SYSTEM_DEBUGSERVER=ON') + # Semicolon seperated list of projects to enable cmake_args.append( "-DLLVM_ENABLE_PROJECTS:STRING={0}".format(";".join(projects)) diff --git a/var/spack/repos/builtin/packages/log4c/package.py b/var/spack/repos/builtin/packages/log4c/package.py new file mode 100644 index 00000000000..afc9da054a2 --- /dev/null +++ b/var/spack/repos/builtin/packages/log4c/package.py @@ -0,0 +1,17 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Log4c(AutotoolsPackage): + """Library for writing log messages from C programs""" + + homepage = "http://log4c.sourceforge.net/" + url = "https://downloads.sourceforge.net/project/log4c/log4c/1.2.4/log4c-1.2.4.tar.gz" + + version('1.2.4', sha256='5991020192f52cc40fa852fbf6bbf5bd5db5d5d00aa9905c67f6f0eadeed48ea') + + depends_on('expat@1.95.1:') diff --git a/var/spack/repos/builtin/packages/lsf/package.py b/var/spack/repos/builtin/packages/lsf/package.py index ccb04d01acc..5c92693c4d4 100644 --- a/var/spack/repos/builtin/packages/lsf/package.py +++ b/var/spack/repos/builtin/packages/lsf/package.py @@ -10,7 +10,10 @@ class Lsf(Package): """IBM Platform LSF is a batch scheduler for HPC environments""" - homepage = "https://www.ibm.com/marketplace/hpc-workload-management" + homepage = "https://www.ibm.com/products/hpc-workload-management" + has_code = False + + version('10.1') # LSF needs to be added as an external package to SPACK. For this, the # config file packages.yaml needs to be adjusted: @@ -21,4 +24,6 @@ class Lsf(Package): # buildable: False def install(self, spec, prefix): - raise InstallError('LSF is not installable; it is vendor supplied') + raise InstallError( + self.spec.format('{name} is not installable, you need to specify ' + 'it as an external package in packages.yaml')) diff --git a/var/spack/repos/builtin/packages/ltr-retriever/package.py b/var/spack/repos/builtin/packages/ltr-retriever/package.py new file mode 100644 index 00000000000..2cc7f0a2319 --- /dev/null +++ b/var/spack/repos/builtin/packages/ltr-retriever/package.py @@ -0,0 +1,45 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * +from os import symlink + + +class LtrRetriever(Package): + """LTR_retriever is a highly accurate and sensitive program for + identification of LTR retrotransposons.""" + + homepage = "https://github.com/oushujun/LTR_retriever" + url = "https://github.com/oushujun/LTR_retriever/archive/v2.8.7.tar.gz" + + version('2.8.7', sha256='29ca6f699c57b5e964aa0ee6c7d3e1e4cd5362dadd789e5f0e8c82fe0bb29369') + + depends_on('perl', type='run') + depends_on('blast-plus', type='run') + depends_on('hmmer@3.1b2:', type='run') + depends_on('cdhit', type='run') + depends_on('repeatmasker', type='run') + + def install(self, spec, prefix): + filter_file(r'BLAST\+=.*', 'BLAST+=%s' % spec['blast-plus'].prefix.bin, + 'paths') + filter_file('RepeatMasker=.*', + 'RepeatMasker=%s' % spec['repeatmasker'].prefix.bin, + 'paths') + filter_file('HMMER=.*', + 'HMMER=%s' % spec['hmmer'].prefix.bin, + 'paths') + filter_file('CDHIT=.*', + 'CDHIT=%s' % spec['cdhit'].prefix, + 'paths') + filter_file('BLAST=.*', '', 'paths') + + mkdirp(prefix.opt) + mkdirp(prefix.bin) + + install_tree('.', prefix.opt.ltr_retriever) + + symlink(prefix.opt.ltr_retriever.LTR_retriever, + prefix.bin.LTR_retriever) diff --git a/var/spack/repos/builtin/packages/lua/package.py b/var/spack/repos/builtin/packages/lua/package.py index 1c6b152aa34..28554dae5b2 100644 --- a/var/spack/repos/builtin/packages/lua/package.py +++ b/var/spack/repos/builtin/packages/lua/package.py @@ -54,7 +54,7 @@ def install(self, spec, prefix): 'MYLDFLAGS=-L%s -L%s' % ( spec['readline'].prefix.lib, spec['ncurses'].prefix.lib), - 'MYLIBS=-lncursesw', + 'MYLIBS=-lncursesw -ltinfow', 'CC=%s -std=gnu99 %s' % (spack_cc, self.compiler.cc_pic_flag), target) diff --git a/var/spack/repos/builtin/packages/lxc/package.py b/var/spack/repos/builtin/packages/lxc/package.py new file mode 100644 index 00000000000..0555b6b642b --- /dev/null +++ b/var/spack/repos/builtin/packages/lxc/package.py @@ -0,0 +1,33 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Lxc(AutotoolsPackage): + """LXC is the well-known and heavily tested low-level Linux container + runtime. It is in active development since 2008 and has proven itself + in critical production environments world-wide. Some of its core + contributors are the same people that helped to implement various + well-known containerization features inside the Linux kernel.""" + + homepage = "https://linuxcontainers.org/lxc/" + url = "https://github.com/lxc/lxc/archive/lxc-4.0.2.tar.gz" + + version('4.0.2', sha256='89a9f1c6c9c0c43ffc4ec4d281381d60dcf698af1578effa491be97885ab282a') + version('4.0.1', sha256='5b17c48db24d93f8a687bf4557358e252126c50a66f5756b3e0ea2cf04a60d05') + version('4.0.0', sha256='8cd36f002f656bbcd01679e6b8892f81ed036d5589aed45b36358014b32277dd') + version('3.2.1', sha256='59f46fad0a6d921c59a6768ba781295e0986989a96e2d216de2b4b3a14392e65') + version('3.2.0', sha256='5dbf25a1c15aa96e184a4e9ef580d40f08bf06818ad21614d6c79fce5447c7eb') + version('3.1.0', sha256='14c34bb3390c60331107a5fbd5c6520e4873c774de2293e9efcb3c0e860b807d') + version('3.0.4', sha256='12a126e634a8df81507fd9d3a4984bacaacf22153c11f024e215810ea78fcc4f') + version('3.0.3', sha256='e794f287755d2529cb49f01b72802abfec31f2a02259719b60a62897da6e8298') + version('2.0.11', sha256='31334ffe0e2d8e38779d80ce670a523f4f5559c2a02c9e085c2f0cf43995d0b0') + version('2.0.10', sha256='b748de0914467aafea18a568602735907fc95f4272609dba7b0f8c91d7dde776') + + depends_on('autoconf', type='build') + depends_on('automake', type='build') + depends_on('libtool', type='build') + depends_on('m4', type='build') diff --git a/var/spack/repos/builtin/packages/lynx/package.py b/var/spack/repos/builtin/packages/lynx/package.py new file mode 100644 index 00000000000..66c6f5408e9 --- /dev/null +++ b/var/spack/repos/builtin/packages/lynx/package.py @@ -0,0 +1,29 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Lynx(AutotoolsPackage): + """Lynx is the text web browser.""" + + homepage = "https://lynx.invisible-island.net/" + url = "https://invisible-mirror.net/archives/lynx/tarballs/lynx2.8.9rel.1.tar.gz" + + version('2.8.9.1', sha256='a46e4167b8f02c066d2fe2eafcc5603367be0e3fe2e59e9fc4eb016f306afc8e') + + depends_on('ncurses') + + def url_for_version(self, version): + version_str = version.string + index = version_str.rfind('.') + tmp = list(version_str) + if index >= 0: + tmp.insert(index, 'rel') + version_str = ''.join(tmp) + else: + version_str = version + url = "https://invisible-mirror.net/archives/lynx/tarballs/lynx{0}.tar.gz".format(version_str) + return url diff --git a/var/spack/repos/builtin/packages/lzop/package.py b/var/spack/repos/builtin/packages/lzop/package.py new file mode 100644 index 00000000000..9a7096e01bb --- /dev/null +++ b/var/spack/repos/builtin/packages/lzop/package.py @@ -0,0 +1,23 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Lzop(CMakePackage): + """lzop is a file compressor which is very similar to gzip. lzop uses + the LZO data compression library for compression services, and its main + advantages over gzip are much higher compression and decompression speed + (at the cost of some compression ratio).""" + + homepage = "https://www.lzop.org" + url = "https://www.lzop.org/download/lzop-1.03.tar.gz" + + version('1.04', sha256='7e72b62a8a60aff5200a047eea0773a8fb205caf7acbe1774d95147f305a2f41') + version('1.03', sha256='c1425b8c77d49f5a679d5a126c90ea6ad99585a55e335a613cae59e909dbb2c9') + version('1.01', sha256='28acd94d933befbc3af986abcfe833173fb7563b66533fdb4ac592f38bb944c7') + + depends_on('pkgconfig', type='build') + depends_on('lzo') diff --git a/var/spack/repos/builtin/packages/magics/package.py b/var/spack/repos/builtin/packages/magics/package.py index 627454af755..c9c331eeb5e 100644 --- a/var/spack/repos/builtin/packages/magics/package.py +++ b/var/spack/repos/builtin/packages/magics/package.py @@ -14,12 +14,13 @@ class Magics(CMakePackage): to be as backwards-compatible as possible with the Fortran interface.""" homepage = "https://software.ecmwf.int/wiki/display/MAGP/Magics" - url = "https://confluence.ecmwf.int/download/attachments/3473464/Magics-4.1.0-Source.tar.gz?api=v2" + url = "https://confluence.ecmwf.int/download/attachments/3473464/Magics-4.2.4-Source.tar.gz?api=v2" list_url = "https://software.ecmwf.int/wiki/display/MAGP/Releases" # The policy on which minor releases remain available and which get deleted # after a newer version becomes available is unclear. - version('4.1.0', sha256='e56fb1bf82d57957a61a76284ad22024f4a7c3c989f6f796e57dfd45d88400c0') + version('4.2.4', sha256='920c7dbb1aaabe65a31c6c18010829210f8b2f8d614b6c405dc5a4530e346f07') + version('4.1.0', sha256='da626c31f53716990754dd72ab7b2f3902a8ad924b23ef3309bd14900d170541') version('2.34.3', sha256='38487562e83c0470f94d9c7fb9418cbadf92f1e643033237baba2abdc77e6238') version('2.34.1', sha256='8df27f8f262ebc32a61f8696df15a7b4a6e4203b2a8e53fe7aa13caa1c4e3fa4') version('2.33.0', sha256='32d3079749f89988715a8c3df01b712d9b989b7fd242828ec09563e47c5a3e82') @@ -92,6 +93,10 @@ class Magics(CMakePackage): depends_on('python', when='+python', type=('link', 'run')) depends_on('py-numpy', when='+python', type=('build', 'run')) depends_on('swig', when='+python', type='build') + # Versions @4: supports python 2.7 and 3.x, and require python + # and py-jinja2 for build (even though does not build python interface) + depends_on('python', when='@4.0.0:', type=('build')) + depends_on('py-jinja2', when='@4.0.0:', type=('build')) conflicts('grib=eccodes', when='@:2.29.0', msg='Eccodes is supported starting version 2.29.1') diff --git a/var/spack/repos/builtin/packages/maloc/package.py b/var/spack/repos/builtin/packages/maloc/package.py new file mode 100644 index 00000000000..058ce3f0c88 --- /dev/null +++ b/var/spack/repos/builtin/packages/maloc/package.py @@ -0,0 +1,39 @@ +# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Maloc(AutotoolsPackage): + """MALOC (Minimal Abstraction Layer for Object-oriented C) + is a small, portable, abstract C environment library for + object-oriented C programming. MALOC is used as the + foundation layer for a number of scientific applications, + including MC, SG, and APBS.""" + + homepage = "http://fetk.org/codes/maloc/" + url = "http://www.fetk.org/codes/download/maloc-1.0.tar.gz" + + version('1.5', sha256='58e1197fcd4c74d3cbb1d39d712eb0a3c5886a1e6629f22c5c78ce2bac983fc0') + version('1.4', sha256='cba0c6730f148bf7ddb77dac07e497655642f43b632256fcebf3192b45af1833') + version('1.3', sha256='337788ac8f263487aba5b3aa5ef7f33eaac1d3951ad49349078d5ed77482ad2e') + version('1.2', sha256='e6033195a054bad7527d360e52349a4d1eb876c681a58fa373f42fd1ab26962c') + version('1.1', sha256='b5dd7923e84f13e7ed43304ed1062de24171c5a7a042a12b0d1e501d6eaedf58') + version('1.0', sha256='23f3ea3215067fd8f1ba4c407375f387b5f1d11258f29508295e651828d32cb7') + + variant('doc', default=False, description='Build documentation.') + + depends_on('graphviz', type='build', when='+doc') + depends_on('doxygen', type='build', when='+doc') + + def configure_args(self): + spec = self.spec + args = [] + + if '~doc' in spec: + args.append('--with-doxygen=no') + args.append('--with-dot=no') + + return args diff --git a/var/spack/repos/builtin/packages/margo/package.py b/var/spack/repos/builtin/packages/margo/package.py index 30e124cac64..d43d29841f2 100644 --- a/var/spack/repos/builtin/packages/margo/package.py +++ b/var/spack/repos/builtin/packages/margo/package.py @@ -28,7 +28,7 @@ class Margo(AutotoolsPackage): depends_on('pkgconfig', type='build') depends_on('mercury') - depends_on('argobots@1.0rc1:') + depends_on('argobots@1.0:') build_directory = 'spack-build' diff --git a/var/spack/repos/builtin/packages/mariadb/package.py b/var/spack/repos/builtin/packages/mariadb/package.py index 9d3f5153174..11df1fc7283 100644 --- a/var/spack/repos/builtin/packages/mariadb/package.py +++ b/var/spack/repos/builtin/packages/mariadb/package.py @@ -33,6 +33,7 @@ class Mariadb(CMakePackage): 'operations in the mariadb client library.') provides('mariadb-client') + provides('mysql-client') depends_on('boost') depends_on('cmake@2.6:', type='build') diff --git a/var/spack/repos/builtin/packages/memtester/package.py b/var/spack/repos/builtin/packages/memtester/package.py new file mode 100644 index 00000000000..7821d1b15df --- /dev/null +++ b/var/spack/repos/builtin/packages/memtester/package.py @@ -0,0 +1,24 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Memtester(MakefilePackage): + """A userspace utility for testing the memory subsystem for faults.""" + + homepage = "http://pyropus.ca/software/memtester/" + url = "http://pyropus.ca/software/memtester/old-versions/memtester-4.3.0.tar.gz" + + version('4.3.0', sha256='f9dfe2fd737c38fad6535bbab327da9a21f7ce4ea6f18c7b3339adef6bf5fd88') + version('4.2.2', sha256='a494569d58d642c796332a1b7f3b4b86845b52da66c15c96fbeecd74e48dae8e') + version('4.2.1', sha256='3433e1c757e56457610f5a97bf1a2d612c609290eba5183dd273e070134a21d2') + version('4.2.0', sha256='cb9d5437a0c429d18500bddef93084bb2fead0d5ccfedfd00ee28ff118e52695') + version('4.1.3', sha256='ac56f0b6d6d6e58bcf2a3fa7f2c9b29894f5177871f21115a1906c535106acf6') + + def edit(self, spec, prefix): + makefile = FileFilter("Makefile") + makefile.filter("INSTALLPATH\t= /usr/local", + "INSTALLPATH\t= {0}".format(self.prefix)) diff --git a/var/spack/repos/builtin/packages/mesa/package.py b/var/spack/repos/builtin/packages/mesa/package.py index 5a2893a645c..b277a03d194 100644 --- a/var/spack/repos/builtin/packages/mesa/package.py +++ b/var/spack/repos/builtin/packages/mesa/package.py @@ -13,6 +13,7 @@ class Mesa(AutotoolsPackage): - a system for rendering interactive 3D graphics.""" homepage = "http://www.mesa3d.org" + maintainers = ['v-dobrev'] # Note that we always want to build from the git repo instead of a # tarball since the tarball has pre-generated files for certain versions @@ -180,7 +181,8 @@ def configure_args(self): @property def libs(self): for dir in ['lib64', 'lib']: - libs = find_libraries('libGL', join_path(self.prefix, dir), + libs = find_libraries(['libGL', 'libOSMesa'], + join_path(self.prefix, dir), shared=True, recursive=False) if libs: return libs diff --git a/var/spack/repos/builtin/packages/meson/package.py b/var/spack/repos/builtin/packages/meson/package.py index dbda0f5e780..8ff3ddf32e0 100644 --- a/var/spack/repos/builtin/packages/meson/package.py +++ b/var/spack/repos/builtin/packages/meson/package.py @@ -14,6 +14,11 @@ class Meson(PythonPackage): homepage = "http://mesonbuild.com/" url = "https://github.com/mesonbuild/meson/archive/0.49.0.tar.gz" + maintainers = ['michaelkuhn'] + + version('0.54.3', sha256='c25caff342b5368bfe33fab6108f454fcf12e2f2cef70817205872ddef669e8b') + version('0.54.2', sha256='85cafdc70ae7d1d9d506e7356b917c649c4df2077bd6a0382db37648aa4ecbdb') + version('0.54.1', sha256='854e8b94ab36e5aece813d2b2aee8a639bd52201dfea50890722ac9128e2f59e') version('0.54.0', sha256='95efdbaa7cb3e915ab9a7b26b1412475398fdc3e834842a780f1646c7764f2d9') version('0.53.2', sha256='eab4f5d5dde12d002b7ddd958a9a0658589b63622b6cea2715e0235b95917888') version('0.49.1', sha256='a944e7f25a2bc8e4ba3502ab5835d8a8b8f2530415c9d6fcffb53e0abaea2ced') diff --git a/var/spack/repos/builtin/packages/metall/package.py b/var/spack/repos/builtin/packages/metall/package.py index cec7669ad2f..ea1351fe50b 100644 --- a/var/spack/repos/builtin/packages/metall/package.py +++ b/var/spack/repos/builtin/packages/metall/package.py @@ -9,10 +9,12 @@ class Metall(CMakePackage): homepage = "https://github.com/LLNL/metall" git = "https://github.com/LLNL/metall.git" + url = "https://github.com/LLNL/metall/archive/v0.2.tar.gz" maintainers = ['KIwabuchi', 'rogerpearce', 'mayagokhale'] version('develop', branch='develop') + version('0.2', sha256='35cdf3505d2f8d0282a0d5c60b69a0ec5ec6d77ac3facce7549eb874df27be1d') depends_on('boost@1.64:', type=('build', 'link')) diff --git a/var/spack/repos/builtin/packages/metis/package.py b/var/spack/repos/builtin/packages/metis/package.py index 12d1223a727..524561310b9 100644 --- a/var/spack/repos/builtin/packages/metis/package.py +++ b/var/spack/repos/builtin/packages/metis/package.py @@ -171,11 +171,10 @@ def install(self, spec, prefix): @when('@5:') def install(self, spec, prefix): source_directory = self.stage.source_path - build_directory = join_path(source_directory, 'build') + build_directory = join_path(self.stage.path, 'build') - options = std_cmake_args[:] + options = CMakePackage._std_args(self) options.append('-DGKLIB_PATH:PATH=%s/GKlib' % source_directory) - options.append('-DCMAKE_INSTALL_NAME_DIR:PATH=%s/lib' % prefix) # Normally this is available via the 'CMakePackage' object, but metis # IS-A 'Package' (not a 'CMakePackage') to support non-cmake metis@:5. diff --git a/var/spack/repos/builtin/packages/mfem/package.py b/var/spack/repos/builtin/packages/mfem/package.py index 5f6d8b5c0a2..3a4abab49bf 100644 --- a/var/spack/repos/builtin/packages/mfem/package.py +++ b/var/spack/repos/builtin/packages/mfem/package.py @@ -366,6 +366,21 @@ def find_optional_library(name, prefix): cxxflags = spec.compiler_flags['cxxflags'] if cxxflags: + # Add opt/debug flags if they are not present in global cxx flags + opt_flag_found = any(f in self.compiler.opt_flags + for f in cxxflags) + debug_flag_found = any(f in self.compiler.debug_flags + for f in cxxflags) + + if '+debug' in spec: + if not debug_flag_found: + cxxflags.append('-g') + if not opt_flag_found: + cxxflags.append('-O0') + else: + if not opt_flag_found: + cxxflags.append('-O2') + cxxflags = [(xcompiler + flag) for flag in cxxflags] if '+cuda' in spec: cxxflags += [ diff --git a/var/spack/repos/builtin/packages/miniaero/package.py b/var/spack/repos/builtin/packages/miniaero/package.py index f4679477199..3588e87d28d 100644 --- a/var/spack/repos/builtin/packages/miniaero/package.py +++ b/var/spack/repos/builtin/packages/miniaero/package.py @@ -19,14 +19,15 @@ class Miniaero(MakefilePackage): version('2016-11-11', commit='f46d135479a5be19ec5d146ccaf0e581aeff4596') - depends_on('kokkos') + depends_on('kokkos-legacy') @property def build_targets(self): targets = [ '--directory=kokkos', 'CXX=c++', - 'KOKKOS_PATH={0}'.format(self.spec['kokkos'].prefix) + 'KOKKOS_PATH={0}'.format( + self.spec['kokkos-legacy'].prefix) ] return targets diff --git a/var/spack/repos/builtin/packages/miniconda3/package.py b/var/spack/repos/builtin/packages/miniconda3/package.py index f75ce7420c7..b3bda3dfaad 100644 --- a/var/spack/repos/builtin/packages/miniconda3/package.py +++ b/var/spack/repos/builtin/packages/miniconda3/package.py @@ -13,6 +13,7 @@ class Miniconda3(Package): homepage = "https://conda.io/miniconda.html" url = "https://repo.continuum.io/miniconda/Miniconda3-4.6.14-Linux-x86_64.sh" + version('4.8.2', sha256='5bbb193fd201ebe25f4aeb3c58ba83feced6a25982ef4afa86d5506c3656c142', url='https://repo.anaconda.com/miniconda/Miniconda3-py38_4.8.2-Linux-x86_64.sh', expand=False) version('4.7.12.1', sha256='bfe34e1fa28d6d75a7ad05fd02fa5472275673d5f5621b77380898dee1be15d2', expand=False) version('4.6.14', sha256='0d6b23895a91294a4924bd685a3a1f48e35a17970a073cd2f684ffe2c31fc4be', expand=False) version('4.5.11', sha256='ea4594241e13a2671c5b158b3b813f0794fe58d514795fbf72a1aad24db918cf', expand=False) diff --git a/var/spack/repos/builtin/packages/miniforge3/package.py b/var/spack/repos/builtin/packages/miniforge3/package.py new file mode 100644 index 00000000000..621db7b0342 --- /dev/null +++ b/var/spack/repos/builtin/packages/miniforge3/package.py @@ -0,0 +1,42 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Miniforge3(Package): + """Miniforge3 is a minimal installer for conda specific to conda-forge.""" + + homepage = "https://github.com/conda-forge/miniforge" + url = "https://github.com/conda-forge/miniforge/releases/download/4.8.3-2/Miniforge3-4.8.3-2-Linux-x86_64.sh" + + version('4.8.3-4-Linux-x86_64', + url='https://github.com/conda-forge/miniforge/releases/download/4.8.3-4/Miniforge3-4.8.3-4-Linux-x86_64.sh', + sha256='24951262a126582f5f2e1cf82c9cd0fa20e936ef3309fdb8397175f29e647646', + expand=False) + version('4.8.3-4-Linux-aarch64', + url='https://github.com/conda-forge/miniforge/releases/download/4.8.3-4/Miniforge3-4.8.3-4-Linux-aarch64.sh', + sha256='52a8dde14ecfb633800a2de26543a78315058e30f5883701da1ad2f2d5ba9ed8', + expand=False) + version('4.8.3-2-Linux-x86_64', + url='https://github.com/conda-forge/miniforge/releases/download/4.8.3-2/Miniforge3-4.8.3-2-Linux-x86_64.sh', + sha256='c8e5b894fe91ce0f86e61065d2247346af107f8d53de0ad89ec848701c4ec1f9', + expand=False) + version('4.8.3-2-Linux-aarch64', + url='https://github.com/conda-forge/miniforge/releases/download/4.8.3-2/Miniforge3-4.8.3-2-Linux-aarch64.sh', + sha256='bfefc0ede6354568978b4198607edd7f17c2f50ca4c6a47e9f22f8c257c8230a', + expand=False) + version('4.8.3-2-MacOSX-x86_64', + url='https://github.com/conda-forge/miniforge/releases/download/4.8.3-2/Miniforge3-4.8.3-2-MacOSX-x86_64.sh', + sha256='25ca082ab00a776db356f9bbc660edf6d24659e2aec1cbec5fd4ce992d4d193d', + expand=False) + + def install(self, spec, prefix): + mkdirp(prefix) + pkgname = 'Miniforge3-{0}.sh'.format(self.version) + chmod = which('chmod') + chmod('+x', pkgname) + sh = which('sh') + sh('./{0}'.format(pkgname), '-b', '-f', '-s', '-p', prefix) diff --git a/var/spack/repos/builtin/packages/minighost/package.py b/var/spack/repos/builtin/packages/minighost/package.py index f39655baf05..b1903f7e62a 100644 --- a/var/spack/repos/builtin/packages/minighost/package.py +++ b/var/spack/repos/builtin/packages/minighost/package.py @@ -16,7 +16,7 @@ class Minighost(MakefilePackage): """ homepage = "http://mantevo.org" - url = "https://github.com/Mantevo/mantevo.github.io/raw/master/download_files/miniGhost_1.0.1.tar.gz" + url = "http://downloads.mantevo.org/releaseTarballs/miniapps/MiniGhost/miniGhost_1.0.1.tar.gz" tags = ['proxy-app'] diff --git a/var/spack/repos/builtin/packages/minimap2/package.py b/var/spack/repos/builtin/packages/minimap2/package.py index 16f7f7aec12..4c8e2a1c4c3 100644 --- a/var/spack/repos/builtin/packages/minimap2/package.py +++ b/var/spack/repos/builtin/packages/minimap2/package.py @@ -28,7 +28,7 @@ def install_minimap2(self): if self.spec.target.family == 'aarch64': make_arg.extend([ 'arm_neon=1', - 'aarch64~1' + 'aarch64=1' ]) make(*make_arg) mkdirp(prefix.bin) diff --git a/var/spack/repos/builtin/packages/minimd/package.py b/var/spack/repos/builtin/packages/minimd/package.py index d505015fb59..a4d99402b4c 100644 --- a/var/spack/repos/builtin/packages/minimd/package.py +++ b/var/spack/repos/builtin/packages/minimd/package.py @@ -16,7 +16,7 @@ class Minimd(MakefilePackage): """ homepage = "http://mantevo.org" - url = "https://github.com/Mantevo/mantevo.github.io/raw/master/download_files/miniMD_1.2.tgz" + url = "http://downloads.mantevo.org/releaseTarballs/miniapps/MiniMD/miniMD_1.2.tgz" tags = ['proxy-app'] diff --git a/var/spack/repos/builtin/packages/minismac2d/package.py b/var/spack/repos/builtin/packages/minismac2d/package.py index 545176aa776..5e891562780 100644 --- a/var/spack/repos/builtin/packages/minismac2d/package.py +++ b/var/spack/repos/builtin/packages/minismac2d/package.py @@ -14,7 +14,7 @@ class Minismac2d(MakefilePackage): """ homepage = "http://mantevo.org" - url = "https://github.com/Mantevo/mantevo.github.io/raw/master/download_files/miniSMAC2D-2.0.tgz" + url = "http://downloads.mantevo.org/releaseTarballs/miniapps/MiniSMAC2D/miniSMAC2D-2.0.tgz" tags = ['proxy-app'] diff --git a/var/spack/repos/builtin/packages/minixyce/package.py b/var/spack/repos/builtin/packages/minixyce/package.py index ee8fe84c864..17d4a4b0322 100644 --- a/var/spack/repos/builtin/packages/minixyce/package.py +++ b/var/spack/repos/builtin/packages/minixyce/package.py @@ -13,7 +13,7 @@ class Minixyce(MakefilePackage): """ homepage = "https://mantevo.org" - url = "https://github.com/Mantevo/mantevo.github.io/raw/master/download_files/miniXyce_1.0.tar.gz" + url = "http://downloads.mantevo.org/releaseTarballs/miniapps/MiniXyce/miniXyce_1.0.tar.gz" tags = ['proxy-app'] diff --git a/var/spack/repos/builtin/packages/modylas/gcc_format.patch b/var/spack/repos/builtin/packages/modylas/gcc_format.patch new file mode 100755 index 00000000000..086c4275aac --- /dev/null +++ b/var/spack/repos/builtin/packages/modylas/gcc_format.patch @@ -0,0 +1,13 @@ +diff --git a/source/src/parse_f.f b/source/src/parse_f.f +index c58f90e..37821dd 100644 +--- a/source/src/parse_f.f ++++ b/source/src/parse_f.f +@@ -360,7 +360,7 @@ + stop + endif + +- 9000 format('ERROR: the order of atoms is wrong:',i) ++ 9000 format('ERROR: the order of atoms is wrong:',i8) + + return + end diff --git a/var/spack/repos/builtin/packages/modylas/makefile.patch b/var/spack/repos/builtin/packages/modylas/makefile.patch new file mode 100755 index 00000000000..b308306ff5e --- /dev/null +++ b/var/spack/repos/builtin/packages/modylas/makefile.patch @@ -0,0 +1,418 @@ +diff --git a/source/src/Makefile.am b/source/src/Makefile.am +index 5a41380..1807821 100644 +--- a/source/src/Makefile.am ++++ b/source/src/Makefile.am +@@ -21,6 +21,7 @@ modylas_mdtrj2xyz_SOURCES = version.f fmodules.f MAP_module.f \ + parse_f.f parse_input.f mdff.f + if HAVE_MPI + bin_PROGRAMS += modylas ++FCFLAGS += -DMPIPARA + endif + FCCOMPILE = $(FC) $(AM_FCFLAGS) $(FCFLAGS) + FCLD = $(FC) +diff --git a/source/src/Makefile.in b/source/src/Makefile.in +index c0f5dca..7c1988f 100644 +--- a/source/src/Makefile.in ++++ b/source/src/Makefile.in +@@ -1,9 +1,8 @@ +-# Makefile.in generated by automake 1.11.1 from Makefile.am. ++# Makefile.in generated by automake 1.16.1 from Makefile.am. + # @configure_input@ + +-# Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, +-# 2003, 2004, 2005, 2006, 2007, 2008, 2009 Free Software Foundation, +-# Inc. ++# Copyright (C) 1994-2018 Free Software Foundation, Inc. ++ + # This Makefile.in is free software; the Free Software Foundation + # gives unlimited permission to copy and/or distribute it, + # with or without modifications, as long as this notice is preserved. +@@ -16,6 +15,61 @@ + @SET_MAKE@ + + VPATH = @srcdir@ ++am__is_gnu_make = { \ ++ if test -z '$(MAKELEVEL)'; then \ ++ false; \ ++ elif test -n '$(MAKE_HOST)'; then \ ++ true; \ ++ elif test -n '$(MAKE_VERSION)' && test -n '$(CURDIR)'; then \ ++ true; \ ++ else \ ++ false; \ ++ fi; \ ++} ++am__make_running_with_option = \ ++ case $${target_option-} in \ ++ ?) ;; \ ++ *) echo "am__make_running_with_option: internal error: invalid" \ ++ "target option '$${target_option-}' specified" >&2; \ ++ exit 1;; \ ++ esac; \ ++ has_opt=no; \ ++ sane_makeflags=$$MAKEFLAGS; \ ++ if $(am__is_gnu_make); then \ ++ sane_makeflags=$$MFLAGS; \ ++ else \ ++ case $$MAKEFLAGS in \ ++ *\\[\ \ ]*) \ ++ bs=\\; \ ++ sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ ++ | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ ++ esac; \ ++ fi; \ ++ skip_next=no; \ ++ strip_trailopt () \ ++ { \ ++ flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ ++ }; \ ++ for flg in $$sane_makeflags; do \ ++ test $$skip_next = yes && { skip_next=no; continue; }; \ ++ case $$flg in \ ++ *=*|--*) continue;; \ ++ -*I) strip_trailopt 'I'; skip_next=yes;; \ ++ -*I?*) strip_trailopt 'I';; \ ++ -*O) strip_trailopt 'O'; skip_next=yes;; \ ++ -*O?*) strip_trailopt 'O';; \ ++ -*l) strip_trailopt 'l'; skip_next=yes;; \ ++ -*l?*) strip_trailopt 'l';; \ ++ -[dEDm]) skip_next=yes;; \ ++ -[JT]) skip_next=yes;; \ ++ esac; \ ++ case $$flg in \ ++ *$$target_option*) has_opt=yes; break;; \ ++ esac; \ ++ done; \ ++ test $$has_opt = yes ++am__make_dryrun = (target_option=n; $(am__make_running_with_option)) ++am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) + pkgdatadir = $(datadir)/@PACKAGE@ + pkgincludedir = $(includedir)/@PACKAGE@ + pkglibdir = $(libdir)/@PACKAGE@ +@@ -35,13 +89,13 @@ POST_UNINSTALL = : + bin_PROGRAMS = modylas-text2bin$(EXEEXT) modylas-mdtrj2xyz$(EXEEXT) \ + $(am__EXEEXT_1) + @HAVE_MPI_TRUE@am__append_1 = modylas ++@HAVE_MPI_TRUE@am__append_2 = -DMPIPARA + subdir = src +-DIST_COMMON = $(srcdir)/Doxyfile.in $(srcdir)/Makefile.am \ +- $(srcdir)/Makefile.in + ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 + am__aclocal_m4_deps = $(top_srcdir)/configure.in + am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ + $(ACLOCAL_M4) ++DIST_COMMON = $(srcdir)/Makefile.am $(am__DIST_COMMON) + mkinstalldirs = $(install_sh) -d + CONFIG_HEADER = $(top_builddir)/config.h + CONFIG_CLEAN_FILES = Doxyfile +@@ -80,18 +134,62 @@ am_modylas_text2bin_OBJECTS = version.$(OBJEXT) fmodules.$(OBJEXT) \ + mdff.$(OBJEXT) + modylas_text2bin_OBJECTS = $(am_modylas_text2bin_OBJECTS) + modylas_text2bin_LDADD = $(LDADD) ++AM_V_P = $(am__v_P_@AM_V@) ++am__v_P_ = $(am__v_P_@AM_DEFAULT_V@) ++am__v_P_0 = false ++am__v_P_1 = : ++AM_V_GEN = $(am__v_GEN_@AM_V@) ++am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@) ++am__v_GEN_0 = @echo " GEN " $@; ++am__v_GEN_1 = ++AM_V_at = $(am__v_at_@AM_V@) ++am__v_at_ = $(am__v_at_@AM_DEFAULT_V@) ++am__v_at_0 = @ ++am__v_at_1 = + DEFAULT_INCLUDES = -I.@am__isrc@ -I$(top_builddir) + F77COMPILE = $(F77) $(AM_FFLAGS) $(FFLAGS) ++AM_V_F77 = $(am__v_F77_@AM_V@) ++am__v_F77_ = $(am__v_F77_@AM_DEFAULT_V@) ++am__v_F77_0 = @echo " F77 " $@; ++am__v_F77_1 = + F77LD = $(F77) ++AM_V_F77LD = $(am__v_F77LD_@AM_V@) ++am__v_F77LD_ = $(am__v_F77LD_@AM_DEFAULT_V@) ++am__v_F77LD_0 = @echo " F77LD " $@; ++am__v_F77LD_1 = + SOURCES = $(modylas_SOURCES) $(modylas_mdtrj2xyz_SOURCES) \ + $(modylas_text2bin_SOURCES) + DIST_SOURCES = $(modylas_SOURCES) $(modylas_mdtrj2xyz_SOURCES) \ + $(modylas_text2bin_SOURCES) ++am__can_run_installinfo = \ ++ case $$AM_UPDATE_INFO_DIR in \ ++ n|no|NO) false;; \ ++ *) (install-info --version) >/dev/null 2>&1;; \ ++ esac ++am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) ++# Read a list of newline-separated strings from the standard input, ++# and print each of them once, without duplicates. Input order is ++# *not* preserved. ++am__uniquify_input = $(AWK) '\ ++ BEGIN { nonempty = 0; } \ ++ { items[$$0] = 1; nonempty = 1; } \ ++ END { if (nonempty) { for (i in items) print i; }; } \ ++' ++# Make sure the list of sources is unique. This is necessary because, ++# e.g., the same source file might be shared among _SOURCES variables ++# for different programs/libraries. ++am__define_uniq_tagged_files = \ ++ list='$(am__tagged_files)'; \ ++ unique=`for i in $$list; do \ ++ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ ++ done | $(am__uniquify_input)` + ETAGS = etags + CTAGS = ctags ++am__DIST_COMMON = $(srcdir)/Doxyfile.in $(srcdir)/Makefile.in + DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) + ACLOCAL = @ACLOCAL@ + AMTAR = @AMTAR@ ++AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@ + AUTOCONF = @AUTOCONF@ + AUTOHEADER = @AUTOHEADER@ + AUTOMAKE = @AUTOMAKE@ +@@ -109,7 +207,7 @@ ECHO_T = @ECHO_T@ + EXEEXT = @EXEEXT@ + F77 = @F77@ + FC = @FC@ +-FCFLAGS = @FCFLAGS@ ++FCFLAGS = @FCFLAGS@ $(am__append_2) + INSTALL = @INSTALL@ + INSTALL_DATA = @INSTALL_DATA@ + INSTALL_PROGRAM = @INSTALL_PROGRAM@ +@@ -127,6 +225,7 @@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ + PACKAGE_NAME = @PACKAGE_NAME@ + PACKAGE_STRING = @PACKAGE_STRING@ + PACKAGE_TARNAME = @PACKAGE_TARNAME@ ++PACKAGE_URL = @PACKAGE_URL@ + PACKAGE_VERSION = @PACKAGE_VERSION@ + PATH_SEPARATOR = @PATH_SEPARATOR@ + SET_MAKE = @SET_MAKE@ +@@ -186,7 +285,7 @@ modylas_SOURCES = version.f fmodules.f main_f90.f parse_f.f app_f90.f \ + diagonal.f matrix_inverse.f \ + comm.f comm_2.f comm_3.f comm_fmm.f \ + opt_integrate.f position_constrain.f \ +- calc_center_of_mass.f shake_rattle_com.f ++ calc_center_of_mass.f shake_rattle_com.f + + modylas_text2bin_SOURCES = version.f fmodules.f MAP_module.f \ + modylas-text2bin.f \ +@@ -218,14 +317,13 @@ $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) + echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign src/Makefile'; \ + $(am__cd) $(top_srcdir) && \ + $(AUTOMAKE) --foreign src/Makefile +-.PRECIOUS: Makefile + Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status + @case '$?' in \ + *config.status*) \ + cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ + *) \ +- echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ +- cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ ++ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__maybe_remake_depfiles)'; \ ++ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__maybe_remake_depfiles);; \ + esac; + + $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) +@@ -240,14 +338,18 @@ Doxyfile: $(top_builddir)/config.status $(srcdir)/Doxyfile.in + cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ + install-binPROGRAMS: $(bin_PROGRAMS) + @$(NORMAL_INSTALL) +- test -z "$(bindir)" || $(MKDIR_P) "$(DESTDIR)$(bindir)" + @list='$(bin_PROGRAMS)'; test -n "$(bindir)" || list=; \ ++ if test -n "$$list"; then \ ++ echo " $(MKDIR_P) '$(DESTDIR)$(bindir)'"; \ ++ $(MKDIR_P) "$(DESTDIR)$(bindir)" || exit 1; \ ++ fi; \ + for p in $$list; do echo "$$p $$p"; done | \ + sed 's/$(EXEEXT)$$//' | \ +- while read p p1; do if test -f $$p; \ +- then echo "$$p"; echo "$$p"; else :; fi; \ ++ while read p p1; do if test -f $$p \ ++ ; then echo "$$p"; echo "$$p"; else :; fi; \ + done | \ +- sed -e 'p;s,.*/,,;n;h' -e 's|.*|.|' \ ++ sed -e 'p;s,.*/,,;n;h' \ ++ -e 's|.*|.|' \ + -e 'p;x;s,.*/,,;s/$(EXEEXT)$$//;$(transform);s/$$/$(EXEEXT)/' | \ + sed 'N;N;N;s,\n, ,g' | \ + $(AWK) 'BEGIN { files["."] = ""; dirs["."] = 1 } \ +@@ -268,22 +370,26 @@ uninstall-binPROGRAMS: + @list='$(bin_PROGRAMS)'; test -n "$(bindir)" || list=; \ + files=`for p in $$list; do echo "$$p"; done | \ + sed -e 'h;s,^.*/,,;s/$(EXEEXT)$$//;$(transform)' \ +- -e 's/$$/$(EXEEXT)/' `; \ ++ -e 's/$$/$(EXEEXT)/' \ ++ `; \ + test -n "$$list" || exit 0; \ + echo " ( cd '$(DESTDIR)$(bindir)' && rm -f" $$files ")"; \ + cd "$(DESTDIR)$(bindir)" && rm -f $$files + + clean-binPROGRAMS: + -test -z "$(bin_PROGRAMS)" || rm -f $(bin_PROGRAMS) +-modylas$(EXEEXT): $(modylas_OBJECTS) $(modylas_DEPENDENCIES) ++ ++modylas$(EXEEXT): $(modylas_OBJECTS) $(modylas_DEPENDENCIES) $(EXTRA_modylas_DEPENDENCIES) + @rm -f modylas$(EXEEXT) +- $(F77LINK) $(modylas_OBJECTS) $(modylas_LDADD) $(LIBS) +-modylas-mdtrj2xyz$(EXEEXT): $(modylas_mdtrj2xyz_OBJECTS) $(modylas_mdtrj2xyz_DEPENDENCIES) ++ $(AM_V_F77LD)$(F77LINK) $(modylas_OBJECTS) $(modylas_LDADD) $(LIBS) ++ ++modylas-mdtrj2xyz$(EXEEXT): $(modylas_mdtrj2xyz_OBJECTS) $(modylas_mdtrj2xyz_DEPENDENCIES) $(EXTRA_modylas_mdtrj2xyz_DEPENDENCIES) + @rm -f modylas-mdtrj2xyz$(EXEEXT) +- $(F77LINK) $(modylas_mdtrj2xyz_OBJECTS) $(modylas_mdtrj2xyz_LDADD) $(LIBS) +-modylas-text2bin$(EXEEXT): $(modylas_text2bin_OBJECTS) $(modylas_text2bin_DEPENDENCIES) ++ $(AM_V_F77LD)$(F77LINK) $(modylas_mdtrj2xyz_OBJECTS) $(modylas_mdtrj2xyz_LDADD) $(LIBS) ++ ++modylas-text2bin$(EXEEXT): $(modylas_text2bin_OBJECTS) $(modylas_text2bin_DEPENDENCIES) $(EXTRA_modylas_text2bin_DEPENDENCIES) + @rm -f modylas-text2bin$(EXEEXT) +- $(F77LINK) $(modylas_text2bin_OBJECTS) $(modylas_text2bin_LDADD) $(LIBS) ++ $(AM_V_F77LD)$(F77LINK) $(modylas_text2bin_OBJECTS) $(modylas_text2bin_LDADD) $(LIBS) + + mostlyclean-compile: + -rm -f *.$(OBJEXT) +@@ -292,31 +398,20 @@ distclean-compile: + -rm -f *.tab.c + + .f.o: +- $(F77COMPILE) -c -o $@ $< ++ $(AM_V_F77)$(F77COMPILE) -c -o $@ $< + + .f.obj: +- $(F77COMPILE) -c -o $@ `$(CYGPATH_W) '$<'` +- +-ID: $(HEADERS) $(SOURCES) $(LISP) $(TAGS_FILES) +- list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \ +- unique=`for i in $$list; do \ +- if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ +- done | \ +- $(AWK) '{ files[$$0] = 1; nonempty = 1; } \ +- END { if (nonempty) { for (i in files) print i; }; }'`; \ +- mkid -fID $$unique +-tags: TAGS +- +-TAGS: $(HEADERS) $(SOURCES) $(TAGS_DEPENDENCIES) \ +- $(TAGS_FILES) $(LISP) ++ $(AM_V_F77)$(F77COMPILE) -c -o $@ `$(CYGPATH_W) '$<'` ++ ++ID: $(am__tagged_files) ++ $(am__define_uniq_tagged_files); mkid -fID $$unique ++tags: tags-am ++TAGS: tags ++ ++tags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) + set x; \ + here=`pwd`; \ +- list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \ +- unique=`for i in $$list; do \ +- if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ +- done | \ +- $(AWK) '{ files[$$0] = 1; nonempty = 1; } \ +- END { if (nonempty) { for (i in files) print i; }; }'`; \ ++ $(am__define_uniq_tagged_files); \ + shift; \ + if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \ + test -n "$$unique" || unique=$$empty_fix; \ +@@ -328,15 +423,11 @@ TAGS: $(HEADERS) $(SOURCES) $(TAGS_DEPENDENCIES) \ + $$unique; \ + fi; \ + fi +-ctags: CTAGS +-CTAGS: $(HEADERS) $(SOURCES) $(TAGS_DEPENDENCIES) \ +- $(TAGS_FILES) $(LISP) +- list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \ +- unique=`for i in $$list; do \ +- if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ +- done | \ +- $(AWK) '{ files[$$0] = 1; nonempty = 1; } \ +- END { if (nonempty) { for (i in files) print i; }; }'`; \ ++ctags: ctags-am ++ ++CTAGS: ctags ++ctags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) ++ $(am__define_uniq_tagged_files); \ + test -z "$(CTAGS_ARGS)$$unique" \ + || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \ + $$unique +@@ -345,11 +436,29 @@ GTAGS: + here=`$(am__cd) $(top_builddir) && pwd` \ + && $(am__cd) $(top_srcdir) \ + && gtags -i $(GTAGS_ARGS) "$$here" ++cscopelist: cscopelist-am ++ ++cscopelist-am: $(am__tagged_files) ++ list='$(am__tagged_files)'; \ ++ case "$(srcdir)" in \ ++ [\\/]* | ?:[\\/]*) sdir="$(srcdir)" ;; \ ++ *) sdir=$(subdir)/$(srcdir) ;; \ ++ esac; \ ++ for i in $$list; do \ ++ if test -f "$$i"; then \ ++ echo "$(subdir)/$$i"; \ ++ else \ ++ echo "$$sdir/$$i"; \ ++ fi; \ ++ done >> $(top_builddir)/cscope.files + + distclean-tags: + -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags + +-distdir: $(DISTFILES) ++distdir: $(BUILT_SOURCES) ++ $(MAKE) $(AM_MAKEFLAGS) distdir-am ++ ++distdir-am: $(DISTFILES) + @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ + topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ + list='$(DISTFILES)'; \ +@@ -396,10 +505,15 @@ install-am: all-am + + installcheck: installcheck-am + install-strip: +- $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ +- install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ +- `test -z '$(STRIP)' || \ +- echo "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'"` install ++ if test -z '$(STRIP)'; then \ ++ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ ++ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ ++ install; \ ++ else \ ++ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ ++ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ ++ "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ ++ fi + mostlyclean-generic: + + clean-generic: +@@ -474,18 +588,21 @@ uninstall-am: uninstall-binPROGRAMS + + .MAKE: install-am install-strip + +-.PHONY: CTAGS GTAGS all all-am check check-am clean clean-binPROGRAMS \ +- clean-generic ctags distclean distclean-compile \ +- distclean-generic distclean-tags distdir dvi dvi-am html \ +- html-am info info-am install install-am install-binPROGRAMS \ +- install-data install-data-am install-dvi install-dvi-am \ +- install-exec install-exec-am install-html install-html-am \ +- install-info install-info-am install-man install-pdf \ +- install-pdf-am install-ps install-ps-am install-strip \ +- installcheck installcheck-am installdirs maintainer-clean \ +- maintainer-clean-generic mostlyclean mostlyclean-compile \ +- mostlyclean-generic pdf pdf-am ps ps-am tags uninstall \ +- uninstall-am uninstall-binPROGRAMS ++.PHONY: CTAGS GTAGS TAGS all all-am check check-am clean \ ++ clean-binPROGRAMS clean-generic cscopelist-am ctags ctags-am \ ++ distclean distclean-compile distclean-generic distclean-tags \ ++ distdir dvi dvi-am html html-am info info-am install \ ++ install-am install-binPROGRAMS install-data install-data-am \ ++ install-dvi install-dvi-am install-exec install-exec-am \ ++ install-html install-html-am install-info install-info-am \ ++ install-man install-pdf install-pdf-am install-ps \ ++ install-ps-am install-strip installcheck installcheck-am \ ++ installdirs maintainer-clean maintainer-clean-generic \ ++ mostlyclean mostlyclean-compile mostlyclean-generic pdf pdf-am \ ++ ps ps-am tags tags-am uninstall uninstall-am \ ++ uninstall-binPROGRAMS ++ ++.PRECIOUS: Makefile + + parse_input.o: parse.o + parse.o: hash.o diff --git a/var/spack/repos/builtin/packages/modylas/package.py b/var/spack/repos/builtin/packages/modylas/package.py new file mode 100644 index 00000000000..ada4bc7fed1 --- /dev/null +++ b/var/spack/repos/builtin/packages/modylas/package.py @@ -0,0 +1,48 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * +import os + + +class Modylas(AutotoolsPackage): + """ + The 'MOlecular DYnamics Software for LArge Systems' (MODYLAS) is + a general-purpose, molecular dynamics simulation program suited + to the simulation of very large physical, chemical, + and biological systems. + """ + + homepage = "https://www.modylas.org" + url = "file://{0}/MODYLAS_1.0.4.tar.gz".format(os.getcwd()) + + version('1.0.4', 'e0b5cccf8e363c1182eced37aa31b06b1c5b1526da7d449a6142424ac4ea6311') + + variant('mpi', default=True, description='Enable MPI support') + + # to define MPIPARA when +mpi + patch('makefile.patch') + # fix no width I in format + patch('gcc_format.patch', when='%gcc') + + depends_on('autoconf', type='build') + depends_on('automake', type='build') + depends_on('mpi', when='+mpi') + + build_directory = 'source' + configure_directory = 'source' + + def setup_build_environment(self, env): + if self.spec.satisfies('+mpi'): + env.set('FC', self.spec['mpi'].mpifc, force=True) + fflags = ['-O3', self.compiler.openmp_flag] + if self.spec.satisfies('%gcc'): + fflags.append('-cpp') + elif self.spec.satisfies('%fj'): + fflags.append('-Cpp') + env.set('FCFLAGS', ' '.join(fflags)) + + def configure_args(self): + return self.enable_or_disable('mpi') diff --git a/var/spack/repos/builtin/packages/mongo-c-driver/package.py b/var/spack/repos/builtin/packages/mongo-c-driver/package.py index 27008d7561a..ee3fc581b9d 100644 --- a/var/spack/repos/builtin/packages/mongo-c-driver/package.py +++ b/var/spack/repos/builtin/packages/mongo-c-driver/package.py @@ -6,7 +6,7 @@ from spack import * -class MongoCDriver(AutotoolsPackage): +class MongoCDriver(Package): """libmongoc is a client library written in C for MongoDB.""" homepage = "https://github.com/mongodb/mongo-c-driver" @@ -14,6 +14,7 @@ class MongoCDriver(AutotoolsPackage): maintainers = ['michaelkuhn'] + version('1.16.2', sha256='0a722180e5b5c86c415b9256d753b2d5552901dc5d95c9f022072c3cd336887e') version('1.9.5', sha256='4a4bd0b0375450250a3da50c050b84b9ba8950ce32e16555714e75ebae0b8019') version('1.9.4', sha256='910c2f1b2e3df4d0ea39c2f242160028f90fcb8201f05339a730ec4ba70811fb') version('1.9.3', sha256='c2c94ef63aaa09efabcbadc4ac3c8740faa102266bdd2559d550f1955b824398') @@ -28,20 +29,73 @@ class MongoCDriver(AutotoolsPackage): variant('ssl', default=True, description='Enable SSL support.') variant('snappy', default=True, description='Enable Snappy support.') variant('zlib', default=True, description='Enable zlib support.') + variant('zstd', default=True, description='Enable zstd support.') patch('https://github.com/mongodb/mongo-c-driver/pull/466.patch', sha256='713a872217d11aba04a774785a2824d26b566543c270a1fa386114f5200fda20', when='@1.8.1') + depends_on('cmake@3.1:', type='build', when='@1.10.0:') + depends_on('autoconf', type='build', when='@1.8.1') depends_on('automake', type='build', when='@1.8.1') depends_on('libtool', type='build', when='@1.8.1') depends_on('m4', type='build', when='@1.8.1') + depends_on('pkgconfig', type='build') - depends_on('libbson') + # When updating mongo-c-driver, libbson has to be kept in sync. + depends_on('libbson@1.16.0:1.16.99', when='@1.16') + depends_on('libbson@1.9.0:1.9.99', when='@1.9') + depends_on('libbson@1.8.0:1.8.99', when='@1.8') + depends_on('libbson@1.7.0:1.7.99', when='@1.7') + depends_on('libbson@1.6.0:1.6.99', when='@1.6') depends_on('openssl', when='+ssl') depends_on('snappy', when='+snappy') depends_on('zlib', when='+zlib') + depends_on('zstd', when='+zstd') + + def cmake_args(self): + spec = self.spec + + args = [ + '-DENABLE_AUTOMATIC_INIT_AND_CLEANUP=OFF', + '-DENABLE_BSON=SYSTEM' + ] + + if '+ssl' in spec: + args.append('-DENABLE_SSL=OPENSSL') + else: + args.append('-DENABLE_SSL=OFF') + + if '+snappy' in spec: + args.append('-DENABLE_SNAPPY=ON') + else: + args.append('-DENABLE_SNAPPY=OFF') + + if '+zlib' in spec: + args.append('-DENABLE_ZLIB=SYSTEM') + else: + args.append('-DENABLE_ZLIB=OFF') + + if '+zstd' in spec: + args.append('-DENABLE_ZSTD=ON') + else: + args.append('-DENABLE_ZSTD=OFF') + + return args + + def install(self, spec, prefix): + with working_dir('spack-build', create=True): + # We cannot simply do + # cmake('..', *std_cmake_args, *self.cmake_args()) + # because that is not Python 2 compatible. Instead, collect + # arguments into a temporary buffer first. + args = [] + args.extend(std_cmake_args) + args.extend(self.cmake_args()) + cmake('..', *args) + make() + make('install') @property def force_autoreconf(self): @@ -52,7 +106,8 @@ def configure_args(self): spec = self.spec args = [ - '--disable-automatic-init-and-cleanup' + '--disable-automatic-init-and-cleanup', + '--with-libbson=system' ] if '+ssl' in spec: @@ -72,8 +127,14 @@ def configure_args(self): elif spec.satisfies('@1.8.1:'): args.append('--with-zlib=system') - if spec.satisfies('@1.9.3:'): - args.append('--with-libbson=auto') - else: - args.append('--with-libbson=system') return args + + @when('@:1.9.99') + def install(self, spec, prefix): + configure('--prefix={0}'.format(prefix), *self.configure_args()) + make() + if self.run_tests: + make('check') + make('install') + if self.run_tests: + make('installcheck') diff --git a/var/spack/repos/builtin/packages/mpfr/package.py b/var/spack/repos/builtin/packages/mpfr/package.py index b163d8a7244..fa97da76ed0 100644 --- a/var/spack/repos/builtin/packages/mpfr/package.py +++ b/var/spack/repos/builtin/packages/mpfr/package.py @@ -37,7 +37,7 @@ class Mpfr(AutotoolsPackage, GNUMirrorPackage): # Check the Bugs section of old release pages for patches. # https://www.mpfr.org/mpfr-X.Y.Z/#bugs patches = { - '4.0.2': '8f15fd27ab65341a60d724d594897d32f4597ddf642d0dc121995e2150181b0c', + '4.0.2': '3f80b836948aa96f8d1cb9cc7f3f55973f19285482a96f9a4e1623d460bcccf0', '4.0.1': '5230aab653fa8675fc05b5bdd3890e071e8df49a92a9d58c4284024affd27739', '3.1.6': '7a6dd71bcda4803d6b89612706a17b8816e1acd5dd9bf1bec29cf748f3b60008', '3.1.5': '1ae14fb3a54ae8e0faed20801970255b279eee9e5ac624891ab5d29727f0bc04', diff --git a/var/spack/repos/builtin/packages/mpich/package.py b/var/spack/repos/builtin/packages/mpich/package.py index 8360046ab43..c9c7886a088 100644 --- a/var/spack/repos/builtin/packages/mpich/package.py +++ b/var/spack/repos/builtin/packages/mpich/package.py @@ -144,6 +144,7 @@ class Mpich(AutotoolsPackage): conflicts('netmod=tcp', when='device=ch4') conflicts('pmi=pmi2', when='device=ch3 netmod=ofi') conflicts('pmi=pmix', when='device=ch3') + conflicts('pmi=pmix', when='+hydra') # MPICH does not require libxml2 and libpciaccess for versions before 3.3 # when ~hydra is set: prevent users from setting +libxml2 and +pci in this @@ -155,9 +156,16 @@ def setup_build_environment(self, env): env.unset('F90') env.unset('F90FLAGS') - def setup_dependent_build_environment(self, env, dependent_spec): - # On Cray, the regular compiler wrappers *are* the MPI wrappers. - if 'platform=cray' in self.spec: + # https://bugzilla.redhat.com/show_bug.cgi?id=1795817 + if self.spec.satisfies('%gcc@10:'): + env.set('FFLAGS', '-fallow-argument-mismatch') + + def setup_run_environment(self, env): + # Because MPI implementations provide compilers, they have to add to + # their run environments the code to make the compilers available. + # For Cray MPIs, the regular compiler wrappers *are* the MPI wrappers. + # Cray MPIs always have cray in the module name, e.g. "cray-mpich" + if self.spec.external_module and 'cray' in self.spec.external_module: env.set('MPICC', spack_cc) env.set('MPICXX', spack_cxx) env.set('MPIF77', spack_fc) @@ -168,6 +176,9 @@ def setup_dependent_build_environment(self, env, dependent_spec): env.set('MPIF77', join_path(self.prefix.bin, 'mpif77')) env.set('MPIF90', join_path(self.prefix.bin, 'mpif90')) + def setup_dependent_build_environment(self, env, dependent_spec): + self.setup_run_environment(env) + env.set('MPICH_CC', spack_cc) env.set('MPICH_CXX', spack_cxx) env.set('MPICH_F77', spack_f77) @@ -175,7 +186,9 @@ def setup_dependent_build_environment(self, env, dependent_spec): env.set('MPICH_FC', spack_fc) def setup_dependent_package(self, module, dependent_spec): - if 'platform=cray' in self.spec: + # For Cray MPIs, the regular compiler wrappers *are* the MPI wrappers. + # Cray MPIs always have cray in the module name, e.g. "cray-mpich" + if self.spec.external_module and 'cray' in self.spec.external_module: self.spec.mpicc = spack_cc self.spec.mpicxx = spack_cxx self.spec.mpifc = spack_fc diff --git a/var/spack/repos/builtin/packages/mpt/package.py b/var/spack/repos/builtin/packages/mpt/package.py index 12f7202ebc6..45dfce6ba5b 100644 --- a/var/spack/repos/builtin/packages/mpt/package.py +++ b/var/spack/repos/builtin/packages/mpt/package.py @@ -39,13 +39,20 @@ def libs(self): ) def setup_dependent_build_environment(self, env, dependent_spec): + self.setup_run_environment(env) + + # use the Spack compiler wrappers under MPI + env.set('MPICC_CC', spack_cc) + env.set('MPICXX_CXX', spack_cxx) + env.set('MPIF90_F90', spack_fc) + + def setup_run_environment(self, env): + # Because MPI is both runtime and compiler, we have to setup the mpi + # compilers as part of the run environment. env.set('MPICC', self.prefix.bin.mpicc) env.set('MPICXX', self.prefix.bin.mpicxx) env.set('MPIF77', self.prefix.bin.mpif77) env.set('MPIF90', self.prefix.bin.mpif90) - env.set('MPICC_CC', spack_cc) - env.set('MPICXX_CXX', spack_cxx) - env.set('MPIF90_F90', spack_fc) def setup_dependent_package(self, module, dependent_spec): if 'platform=cray' in self.spec: diff --git a/var/spack/repos/builtin/packages/mt-metis/non_x8664.patch b/var/spack/repos/builtin/packages/mt-metis/non_x8664.patch new file mode 100755 index 00000000000..8eb305e5c2f --- /dev/null +++ b/var/spack/repos/builtin/packages/mt-metis/non_x8664.patch @@ -0,0 +1,14 @@ +diff --git a/domlib/dlmacros.h b/domlib/dlmacros.h +index 2cbca90..d231b75 100644 +--- a/domlib/dlmacros.h ++++ b/domlib/dlmacros.h +@@ -211,7 +211,9 @@ + #else + static inline void _mm_pause(void) + { ++#ifdef _x86_64__ + __asm__ ( "pause;" ); ++#endif + } + #endif + diff --git a/var/spack/repos/builtin/packages/mt-metis/package.py b/var/spack/repos/builtin/packages/mt-metis/package.py new file mode 100644 index 00000000000..805c86836c6 --- /dev/null +++ b/var/spack/repos/builtin/packages/mt-metis/package.py @@ -0,0 +1,37 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class MtMetis(CMakePackage): + """ + mt-Metis is a multithreaded multilevel graph partitioning an ordering + tool. It is based on the algorithms used in Metis and ParMetis + """ + + homepage = "http://glaros.dtc.umn.edu/gkhome/views/metis" + url = "http://glaros.dtc.umn.edu/gkhome/fetch/sw/metis/mt-metis-0.6.0.tar.gz" + + version('0.6.0', sha256='cb8fb836b630a899edbeca4e1da19ec9eb47e89903bda83e7ec62cb0ffdcc284') + + # avoid asm('pause') for no x86_64 familly. + patch('non_x8664.patch') + + variant('shared', default=True, description='Enable build of shared libraries') + + def cmake_args(self): + define = CMakePackage.define + cmake_args = [ + define('DOMLIB_PATH', 'domlib'), + define('WILDRIVER_PATH', 'wildriver'), + define('METIS_PATH', 'metis'), + self.define_from_variant('SHARED', 'shared'), + ] + return cmake_args + + @property + def libs(self): + return find_libraries(['libmtmetis', 'libwildriver'], self.prefix.lib) diff --git a/var/spack/repos/builtin/packages/mummer4/package.py b/var/spack/repos/builtin/packages/mummer4/package.py new file mode 100644 index 00000000000..2ba974dca91 --- /dev/null +++ b/var/spack/repos/builtin/packages/mummer4/package.py @@ -0,0 +1,21 @@ +# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Mummer4(AutotoolsPackage): + """MUMmer is a versatil alignment tool for DNA and protein sequences.""" + + homepage = "https://github.com/mummer4/mummer" + url = "https://github.com/mummer4/mummer/releases/download/v4.0.0beta2/mummer-4.0.0beta2.tar.gz" + + version('4.0.0beta2', sha256='cece76e418bf9c294f348972e5b23a0230beeba7fd7d042d5584ce075ccd1b93') + + conflicts('%gcc@:4.7') + + depends_on('perl@5.6.0:', type=('build', 'run')) + depends_on('awk', type='run') + depends_on('sed', type='run') diff --git a/var/spack/repos/builtin/packages/mvapich2/package.py b/var/spack/repos/builtin/packages/mvapich2/package.py index f6301a564a8..dac9b69c496 100644 --- a/var/spack/repos/builtin/packages/mvapich2/package.py +++ b/var/spack/repos/builtin/packages/mvapich2/package.py @@ -10,10 +10,11 @@ class Mvapich2(AutotoolsPackage): """MVAPICH2 is an MPI implementation for Infiniband networks.""" homepage = "http://mvapich.cse.ohio-state.edu/" - url = "http://mvapich.cse.ohio-state.edu/download/mvapich/mv2/mvapich2-2.3.3.tar.gz" + url = "http://mvapich.cse.ohio-state.edu/download/mvapich/mv2/mvapich2-2.3.4.tar.gz" list_url = "http://mvapich.cse.ohio-state.edu/downloads/" # Prefer the latest stable release + version('2.3.4', sha256='7226a45c7c98333c8e5d2888119cce186199b430c13b7b1dca1769909e68ea7a') version('2.3.3', sha256='41d3261be57e5bc8aabf4e32981543c015c5443ff032a26f18205985e18c2b73') version('2.3.2', sha256='30cc0d7bcaa075d204692f76bca4d65a539e0f661c7460ffa9f835d6249e1ebf') version('2.3.1', sha256='314e12829f75f3ed83cd4779a972572d1787aac6543a3d024ea7c6080e0ee3bf') @@ -207,23 +208,48 @@ def setup_run_environment(self, env): if 'process_managers=slurm' in self.spec: env.set('SLURM_MPI_TYPE', 'pmi2') - def setup_dependent_build_environment(self, env, dependent_spec): - env.set('MPICC', os.path.join(self.prefix.bin, 'mpicc')) - env.set('MPICXX', os.path.join(self.prefix.bin, 'mpicxx')) - env.set('MPIF77', os.path.join(self.prefix.bin, 'mpif77')) - env.set('MPIF90', os.path.join(self.prefix.bin, 'mpif90')) + # Because MPI functions as a compiler, we need to treat it as one and + # add its compiler paths to the run environment. + self.setup_compiler_environment(env) + def setup_dependent_build_environment(self, env, dependent_spec): + self.setup_compiler_environment(env) + + # use the Spack compiler wrappers under MPI env.set('MPICH_CC', spack_cc) env.set('MPICH_CXX', spack_cxx) env.set('MPICH_F77', spack_f77) env.set('MPICH_F90', spack_fc) env.set('MPICH_FC', spack_fc) + def setup_compiler_environment(self, env): + # For Cray MPIs, the regular compiler wrappers *are* the MPI wrappers. + # Cray MPIs always have cray in the module name, e.g. "cray-mvapich" + if self.spec.external_module and 'cray' in self.spec.external_module: + env.set('MPICC', spack_cc) + env.set('MPICXX', spack_cxx) + env.set('MPIF77', spack_fc) + env.set('MPIF90', spack_fc) + else: + env.set('MPICC', join_path(self.prefix.bin, 'mpicc')) + env.set('MPICXX', join_path(self.prefix.bin, 'mpicxx')) + env.set('MPIF77', join_path(self.prefix.bin, 'mpif77')) + env.set('MPIF90', join_path(self.prefix.bin, 'mpif90')) + def setup_dependent_package(self, module, dependent_spec): - self.spec.mpicc = os.path.join(self.prefix.bin, 'mpicc') - self.spec.mpicxx = os.path.join(self.prefix.bin, 'mpicxx') - self.spec.mpifc = os.path.join(self.prefix.bin, 'mpif90') - self.spec.mpif77 = os.path.join(self.prefix.bin, 'mpif77') + # For Cray MPIs, the regular compiler wrappers *are* the MPI wrappers. + # Cray MPIs always have cray in the module name, e.g. "cray-mvapich" + if self.spec.external_module and 'cray' in self.spec.external_module: + self.spec.mpicc = spack_cc + self.spec.mpicxx = spack_cxx + self.spec.mpifc = spack_fc + self.spec.mpif77 = spack_f77 + else: + self.spec.mpicc = join_path(self.prefix.bin, 'mpicc') + self.spec.mpicxx = join_path(self.prefix.bin, 'mpicxx') + self.spec.mpifc = join_path(self.prefix.bin, 'mpif90') + self.spec.mpif77 = join_path(self.prefix.bin, 'mpif77') + self.spec.mpicxx_shared_libs = [ os.path.join(self.prefix.lib, 'libmpicxx.{0}'.format(dso_suffix)), os.path.join(self.prefix.lib, 'libmpi.{0}'.format(dso_suffix)) diff --git a/var/spack/repos/builtin/packages/mxm/package.py b/var/spack/repos/builtin/packages/mxm/package.py new file mode 100644 index 00000000000..897f1c24b3e --- /dev/null +++ b/var/spack/repos/builtin/packages/mxm/package.py @@ -0,0 +1,30 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Mxm(Package): + """Mellanox Messaging Accelerator (MXM) provides enhancements to parallel + communication libraries by fully utilizing the underlying networking + infrastructure provided by Mellanox HCA/switch hardware.""" + + homepage = 'https://www.mellanox.com/products/mxm' + has_code = False + + version('3.6.3104') + + # MXM needs to be added as an external package to SPACK. For this, the + # config file packages.yaml needs to be adjusted: + # mxm: + # version: [3.6.3104] + # paths: + # mxm@3.6.3104: /opt/mellanox/mxm (path to your MXM installation) + # buildable: False + + def install(self, spec, prefix): + raise InstallError( + self.spec.format('{name} is not installable, you need to specify ' + 'it as an external package in packages.yaml')) diff --git a/var/spack/repos/builtin/packages/mxnet/makefile.opencv.patch b/var/spack/repos/builtin/packages/mxnet/makefile.opencv.patch new file mode 100644 index 00000000000..45e04ea626e --- /dev/null +++ b/var/spack/repos/builtin/packages/mxnet/makefile.opencv.patch @@ -0,0 +1,35 @@ +commit 638527ffaa35ffb0bf88a32910b809df09432406 +Author: Toyohisa Kameyama +Date: Mon May 18 11:58:00 2020 +0900 + + Makefile bug fix for opencv4. + +diff --git a/Makefile b/Makefile +index 51930c782..7f5d4da39 100644 +--- a/Makefile ++++ b/Makefile +@@ -165,7 +165,23 @@ endif + ifeq ($(USE_OPENCV), 1) + CFLAGS += -DMXNET_USE_OPENCV=1 + ifneq ($(filter-out NONE, $(USE_OPENCV_INC_PATH)),) +- CFLAGS += -I$(USE_OPENCV_INC_PATH)/include ++ ifneq ($(wildcard $(USE_OPENCV_INC_PATH)/include/opencv4/opencv2/opencv*),) ++ opencv_inc = -I$(USE_OPENCV_INC_PATH)/include/opencv4 ++ endif ++ ifneq ($(wildcard $(USE_OPENCV_INC_PATH)/include/opencv2/opencv*),) ++ opencv_inc = -I$(USE_OPENCV_INC_PATH)/include ++ endif ++ ifneq ($(wildcard $(USE_OPENCV_INC_PATH)/opencv4/opencv2/opencv*),) ++ opencv_inc = -I$(USE_OPENCV_INC_PATH)/opencv4 ++ endif ++ ifneq ($(wildcard $(USE_OPENCV_INC_PATH)/opencv2/opencv*),) ++ opencv_inc = -I$(USE_OPENCV_INC_PATH) ++ endif ++ ifneq ($(filter-out NONE, $(opencv_inc)),) ++ CFLAGS += $(opencv_inc) ++ else ++$(error Cannot determine OpenCV include path) ++ endif + ifeq ($(filter-out NONE, $(USE_OPENCV_LIB_PATH)),) + $(error Please add the path of OpenCV shared library path into `USE_OPENCV_LIB_PATH`, when `USE_OPENCV_INC_PATH` is not NONE) + endif diff --git a/var/spack/repos/builtin/packages/mxnet/package.py b/var/spack/repos/builtin/packages/mxnet/package.py index 492a2573bc1..8ef5035c077 100644 --- a/var/spack/repos/builtin/packages/mxnet/package.py +++ b/var/spack/repos/builtin/packages/mxnet/package.py @@ -15,6 +15,7 @@ class Mxnet(MakefilePackage): maintainers = ['adamjstewart'] + version('1.6.0', sha256='01eb06069c90f33469c7354946261b0a94824bbaf819fd5d5a7318e8ee596def') version('1.3.0', sha256='c00d6fbb2947144ce36c835308e603f002c1eb90a9f4c5a62f4d398154eed4d2') variant('cuda', default=False, description='Enable CUDA support') @@ -33,14 +34,19 @@ class Mxnet(MakefilePackage): depends_on('cudnn', when='+cuda') depends_on('cudnn', when='+cuda') depends_on('cub', when='+cuda') + depends_on('opencv+core+imgproc+highgui+jpeg+png+tiff~eigen~ipp@3.0:3.4.99', when='@1.3.0 +opencv') depends_on('opencv+core+imgproc+highgui+jpeg+png+tiff~eigen~ipp@3.0:', when='+opencv') # python extensions depends_on('python@2.7:', type=('build', 'run'), when='+python') depends_on('py-setuptools', type='build', when='+python') + depends_on('py-numpy@:1.15.0', type=('build', 'run'), when='@1.3.0 +python') + depends_on('py-numpy@1.16:', type=('build', 'run'), when='@1.6.0 +python') extends('python', when='+python') patch('makefile.patch', when='@0.10:0.11') + patch('makefile.opencv.patch', when='@1.6.0') + patch('parallell_shuffle.patch', when='@1.6.0') def build(self, spec, prefix): # copy template configuration file @@ -73,13 +79,25 @@ def build(self, spec, prefix): ] if '+opencv' in spec: - filter_file('$(shell pkg-config --cflags opencv)', - '-I%s' % spec['opencv'].prefix.include, - 'Makefile', string=True) - filter_file('$(filter-out -lopencv_ts, ' - '$(shell pkg-config --libs opencv))', - '-lopencv_core -lopencv_imgproc -lopencv_imgcodecs', - 'Makefile', string=True) + if spec.satisfies('@1.3.0'): + filter_file( + '$(shell pkg-config --cflags opencv)', + spec['opencv'].headers.include_flags, + 'Makefile', string=True + ) + filter_file( + '$(filter-out -lopencv_ts, ' + '$(shell pkg-config --libs opencv))', + spec['opencv'].libs.link_flags, + 'Makefile', string=True + ) + else: + args.extend( + ['USE_OPENCV_INC_PATH=' + + spec['opencv'].headers.directories[0], + 'USE_OPENCV_LIB_PATH=' + + spec['opencv'].libs.directories[0]] + ) if 'openblas' in spec: args.extend(['USE_BLAS=openblas']) diff --git a/var/spack/repos/builtin/packages/mxnet/parallell_shuffle.patch b/var/spack/repos/builtin/packages/mxnet/parallell_shuffle.patch new file mode 100644 index 00000000000..eb286cde57a --- /dev/null +++ b/var/spack/repos/builtin/packages/mxnet/parallell_shuffle.patch @@ -0,0 +1,13 @@ +--- a/src/operator/random/shuffle_op.cc 2020-01-31 08:45:22.000000000 +0900 ++++ b/src/operator/random/shuffle_op.cc 2020-06-15 11:34:32.000000000 +0900 +@@ -22,8 +22,8 @@ + * \file shuffle_op.cc + * \brief Operator to shuffle elements of an NDArray + */ +-#if !defined (__ANDROID__) && ((__GNUC__ > 4 &&\ +- !defined(__clang__major__)) || (__clang_major__ > 4 && __linux__)) ++#if ((__GNUC__ > 4 && !defined(__clang__major__)) || (__clang_major__ > 4 && __linux__)) && \ ++ defined(_OPENMP) && !defined(__ANDROID__) + #define USE_GNU_PARALLEL_SHUFFLE + #endif + diff --git a/var/spack/repos/builtin/packages/mysql/package.py b/var/spack/repos/builtin/packages/mysql/package.py index 7501d57cbd0..80b49865b6f 100644 --- a/var/spack/repos/builtin/packages/mysql/package.py +++ b/var/spack/repos/builtin/packages/mysql/package.py @@ -22,6 +22,7 @@ class Mysql(CMakePackage): version('8.0.13', sha256='d85eb7f98b6aa3e2c6fe38263bf40b22acb444a4ce1f4668473e9e59fb98d62e') version('8.0.12', sha256='69f16e20834dbc60cb28d6df7351deda323330b9de685d22415f135bcedd1b20') version('8.0.11', sha256='3bde3e30d5d4afcedfc6db9eed5c984237ac7db9480a9cc3bddc026d50700bf9') + version('5.7.27', sha256='f8b65872a358d6f5957de86715c0a3ef733b60451dad8d64a8fd1a92bf091bba') version('5.7.26', sha256='5f01d579a20199e06fcbc28f0801c3cb545a54a2863ed8634f17fe526480b9f1') version('5.7.25', sha256='53751c6243806103114567c1a8b6a3ec27f23c0e132f377a13ce1eb56c63723f') version('5.7.24', sha256='05bf0c92c6a97cf85b67fff1ac83ca7b3467aea2bf306374d727fa4f18431f87') @@ -77,11 +78,16 @@ class Mysql(CMakePackage): # Each version of MySQL requires a specific version of boost # See BOOST_PACKAGE_NAME in cmake/boost.cmake - # 8.0.16+ - depends_on('boost@1.69.0 cxxstd=98', type='build', when='@8.0.16: cxxstd=98') - depends_on('boost@1.69.0 cxxstd=11', type='build', when='@8.0.16: cxxstd=11') - depends_on('boost@1.69.0 cxxstd=14', type='build', when='@8.0.16: cxxstd=14') - depends_on('boost@1.69.0 cxxstd=17', type='build', when='@8.0.16: cxxstd=17') + # 8.0.19+ + depends_on('boost@1.70.0 cxxstd=98', type='build', when='@8.0.19: cxxstd=98') + depends_on('boost@1.70.0 cxxstd=11', type='build', when='@8.0.19: cxxstd=11') + depends_on('boost@1.70.0 cxxstd=11', type='build', when='@8.0.19: cxxstd=14') + depends_on('boost@1.70.0 cxxstd=17', type='build', when='@8.0.19: cxxstd=17') + # 8.0.16--8.0.18 + depends_on('boost@1.69.0 cxxstd=98', type='build', when='@8.0.16:8.0.18 cxxstd=98') + depends_on('boost@1.69.0 cxxstd=11', type='build', when='@8.0.16:8.0.18 cxxstd=11') + depends_on('boost@1.69.0 cxxstd=14', type='build', when='@8.0.16:8.0.18 cxxstd=14') + depends_on('boost@1.69.0 cxxstd=17', type='build', when='@8.0.16:8.0.18 cxxstd=17') # 8.0.14--8.0.15 depends_on('boost@1.68.0 cxxstd=98', type='build', when='@8.0.14:8.0.15 cxxstd=98') depends_on('boost@1.68.0 cxxstd=11', type='build', when='@8.0.14:8.0.15 cxxstd=11') @@ -103,6 +109,7 @@ class Mysql(CMakePackage): depends_on('boost@1.59.0 cxxstd=14', when='@5.7.0:5.7.999 cxxstd=14') depends_on('boost@1.59.0 cxxstd=17', when='@5.7.0:5.7.999 cxxstd=17') + depends_on('rpcsvc-proto') depends_on('ncurses') depends_on('openssl') depends_on('libtirpc', when='@5.7.0:') diff --git a/var/spack/repos/builtin/packages/nag/package.py b/var/spack/repos/builtin/packages/nag/package.py index 1c777f39880..ae191f9c085 100644 --- a/var/spack/repos/builtin/packages/nag/package.py +++ b/var/spack/repos/builtin/packages/nag/package.py @@ -12,7 +12,7 @@ class Nag(Package): homepage = "http://www.nag.com/nagware/np.asp" maintainers = ['ThemosTsikas'] - version('7.0', sha256='328f35fc419415dad66ed3bf5c7bc7f3260d3f9977c7cf37b3e56c29abdf41f0') + version('7.0', sha256='bd53c988efe4bfd476a5f556ebd2b8924dc2a039f485e8e017581bca4e12ba01') version('6.2', sha256='9b60f6ffa4f4be631079676963e74eea25e8824512e5c864eb06758b2a3cdd2d') version('6.1', sha256='32580e0004e6798abf1fa52f0070281b28abeb0da2387530a4cc41218e813c7c') diff --git a/var/spack/repos/builtin/packages/namd/package.py b/var/spack/repos/builtin/packages/namd/package.py index dcd80e94a85..a85dd7fd42a 100644 --- a/var/spack/repos/builtin/packages/namd/package.py +++ b/var/spack/repos/builtin/packages/namd/package.py @@ -15,8 +15,11 @@ class Namd(MakefilePackage): homepage = "http://www.ks.uiuc.edu/Research/namd/" url = "file://{0}/NAMD_2.12_Source.tar.gz".format(os.getcwd()) + git = "https://charm.cs.illinois.edu/gerrit/namd.git" manual_download = True + version("develop", branch="master") + version('2.14b1', sha256='9407e54f5271b3d3039a5a9d2eae63c7e108ce31b7481e2197c19e1125b43919') version('2.13', '9e3323ed856e36e34d5c17a7b0341e38') version('2.12', '2a1191909b1ab03bf0205971ad4d8ee9') @@ -26,7 +29,9 @@ class Namd(MakefilePackage): variant('interface', default='none', values=('none', 'tcl', 'python'), description='Enables TCL and/or python interface') - depends_on('charmpp') + depends_on('charmpp@6.10.1:', when="@2.14b1:") + depends_on('charmpp@6.8.2', when="@2.13") + depends_on('charmpp@6.7.1', when="@2.12") depends_on('fftw@:2.99', when="fftw=2") depends_on('fftw@3:', when="fftw=3") @@ -69,21 +74,29 @@ def build_directory(self): return '{0}-spack'.format(self.arch) def edit(self, spec, prefix): + m64 = '-m64 ' if not spec.satisfies('arch=aarch64:') else '' with working_dir('arch'): with open('{0}.arch'.format(self.build_directory), 'w') as fh: # this options are take from the default provided # configuration files - optims_opts = { - 'gcc': '-m64 -O3 -fexpensive-optimizations -ffast-math', - 'intel': '-O2 -ip' - } + # https://github.com/UIUC-PPL/charm/pull/2778 + if self.spec.satisfies('^charmpp@:6.10.1'): + optims_opts = { + 'gcc': m64 + '-O3 -fexpensive-optimizations \ + -ffast-math -lpthread', + 'intel': '-O2 -ip'} + else: + optims_opts = { + 'gcc': m64 + '-O3 -fexpensive-optimizations \ + -ffast-math', + 'intel': '-O2 -ip'} optim_opts = optims_opts[self.compiler.name] \ if self.compiler.name in optims_opts else '' fh.write('\n'.join([ 'NAMD_ARCH = {0}'.format(self.arch), - 'CHARMARCH = ', + 'CHARMARCH = {0}'.format(self.spec['charmpp'].charmarch), 'CXX = {0.cxx} {0.cxx11_flag}'.format( self.compiler), 'CXXOPTS = {0}'.format(optim_opts), diff --git a/var/spack/repos/builtin/packages/ncbi-magicblast/package.py b/var/spack/repos/builtin/packages/ncbi-magicblast/package.py index eb93f59b9d3..54588463bd0 100644 --- a/var/spack/repos/builtin/packages/ncbi-magicblast/package.py +++ b/var/spack/repos/builtin/packages/ncbi-magicblast/package.py @@ -13,6 +13,7 @@ class NcbiMagicblast(AutotoolsPackage): homepage = "https://ncbi.github.io/magicblast/" url = "ftp://ftp.ncbi.nlm.nih.gov/blast/executables/magicblast/1.3.0/ncbi-magicblast-1.3.0-src.tar.gz" + version('1.5.0', sha256='b261914d9f7ffc0e655079ceba3e348ba11df1a1f73c4e47a4b1ca154754985c') version('1.3.0', sha256='47b9b65d595b5cb0c4fef22bc7f7c038fb8d4a0accdbe560d7232820575aff67') depends_on('lmdb') diff --git a/var/spack/repos/builtin/packages/ncl/package.py b/var/spack/repos/builtin/packages/ncl/package.py index e6e09faae1e..4d9980ecc55 100644 --- a/var/spack/repos/builtin/packages/ncl/package.py +++ b/var/spack/repos/builtin/packages/ncl/package.py @@ -30,6 +30,9 @@ class Ncl(Package): patch('ymake-filter.patch', when="@6.4.0") # ymake additional local library and includes will be filtered improperly patch('ymake.patch', when="@6.4.0:") + # ncl does not build with gcc@10: + # https://github.com/NCAR/ncl/issues/123 + patch('https://src.fedoraproject.org/rpms/ncl/raw/12778c55142b5b1ccc26dfbd7857da37332940c2/f/ncl-boz.patch', when='%gcc@10:', sha256='64f3502c9deab48615a4cbc26073173081c0774faf75778b044d251e45d238f7') # This installation script is implemented according to this manual: # http://www.ncl.ucar.edu/Download/build_from_src.shtml @@ -144,6 +147,10 @@ def prepare_site_config(self): cc_flags.append('-fp-model precise') c2f_flags.extend(['-lifcore', '-lifport']) + if self.spec.satisfies('%gcc@10:'): + fc_flags.append('-fallow-argument-mismatch') + cc_flags.append('-fcommon') + with open('./config/Spack', 'w') as f: f.writelines([ '#define HdfDefines\n', diff --git a/var/spack/repos/builtin/packages/neovim/package.py b/var/spack/repos/builtin/packages/neovim/package.py index 8a574f58e99..a74d4816a86 100644 --- a/var/spack/repos/builtin/packages/neovim/package.py +++ b/var/spack/repos/builtin/packages/neovim/package.py @@ -4,15 +4,20 @@ # SPDX-License-Identifier: (Apache-2.0 OR MIT) +import os from spack import * class Neovim(CMakePackage): - """NeoVim: the future of vim""" + """Neovim: Vim-fork focused on extensibility and usability""" - homepage = "http://neovim.io" - url = "https://github.com/neovim/neovim/archive/v0.3.4.tar.gz" + homepage = "https://neovim.io" + git = "https://github.com/neovim/neovim.git" + url = "https://github.com/neovim/neovim/archive/v0.4.3.tar.gz" + version('master', branch='master') + version('stable', tag='stable') + version('0.4.3', sha256='91a0b5d32204a821bf414690e6b48cf69224d1961d37158c2b383f6a6cf854d2') version('0.3.4', sha256='a641108bdebfaf319844ed46b1bf35d6f7c30ef5aeadeb29ba06e19c3274bc0e') version('0.3.1', sha256='bc5e392d4c076407906ccecbc283e1a44b7832c2f486cad81aa04cc29973ad22') version('0.3.0', sha256='f7acb61b16d3f521907d99c486b7a9f1e505e8b2a18c9ef69a6d7f18f29f74b8') @@ -20,21 +25,37 @@ class Neovim(CMakePackage): version('0.2.1', sha256='9e2c068a8994c9023a5f84cde9eb7188d3c85996a7e42e611e3cd0996e345dd3') version('0.2.0', sha256='72e263f9d23fe60403d53a52d4c95026b0be428c1b9c02b80ab55166ea3f62b5') - depends_on('lua@5.1:5.2') - depends_on('lua-lpeg') - depends_on('lua-mpack') - depends_on('lua-bitlib') - depends_on('libuv') - depends_on('jemalloc') - depends_on('libtermkey') - depends_on('libvterm') - depends_on('unibilium') - depends_on('msgpack-c') - depends_on('gperf') + depends_on('cmake@3.0:', type='build') + + depends_on('lua@5.1:5.2', when='@:0.4.0') + depends_on('lua-lpeg', when='@:0.4.0') + depends_on('lua-mpack', when='@:0.4.0') + depends_on('lua-bitlib', when='@:0.4.0') + depends_on('libuv', when='@:0.4.0') + depends_on('jemalloc', when='@:0.4.0') + depends_on('libtermkey', when='@:0.4.0') + depends_on('libvterm', when='@:0.4.0') + depends_on('unibilium', when='@:0.4.0') + depends_on('msgpack-c', when='@:0.4.0') + depends_on('gperf', when='@:0.4.0') + + @run_before('cmake') + def build_dependencies(self): + if self.version < Version('0.4.0'): + return + + deps_build_dir = '.deps' + options = [ + '-G', self.generator, + os.path.join(os.path.abspath(self.root_cmakelists_dir), + 'third-party'), + ] + with working_dir(deps_build_dir, create=True): + cmake(*options) + make() def cmake_args(self): args = [] - if self.version >= Version('0.2.1'): + if Version('0.2.1') <= self.version < Version('0.4.0'): args = ['-DPREFER_LUA=ON'] - return args diff --git a/var/spack/repos/builtin/packages/netcdf-c/package.py b/var/spack/repos/builtin/packages/netcdf-c/package.py index 42c28e53215..6043e1122d7 100644 --- a/var/spack/repos/builtin/packages/netcdf-c/package.py +++ b/var/spack/repos/builtin/packages/netcdf-c/package.py @@ -61,6 +61,7 @@ def url_for_version(self, version): description='Produce position-independent code (for shared libs)') variant('shared', default=True, description='Enable shared library') variant('dap', default=False, description='Enable DAP support') + variant('jna', default=False, description='Enable JNA support') # It's unclear if cdmremote can be enabled if '--enable-netcdf-4' is passed # to the configure script. Since netcdf-4 support is mandatory we comment @@ -74,7 +75,7 @@ def url_for_version(self, version): depends_on('libtool', type='build', when='@4.7.0') depends_on("m4", type='build') - depends_on("hdf", when='+hdf4') + depends_on("hdf~netcdf", when='+hdf4') # curl 7.18.0 or later is required: # http://www.unidata.ucar.edu/software/netcdf/docs/getting_and_building_netcdf.html @@ -181,6 +182,9 @@ def configure_args(self): else: config_args.append('--disable-parallel4') + if self.spec.satisfies('@4.3.2:'): + config_args += self.enable_or_disable('jna') + # Starting version 4.1.3, --with-hdf5= and other such configure options # are removed. Variables CPPFLAGS, LDFLAGS, and LD_LIBRARY_PATH must be # used instead. @@ -215,6 +219,8 @@ def configure_args(self): if '+szip' in hdf4: # This should also come from hdf4.libs libs.append('-lsz') + if '+external-xdr' in hdf4 and hdf4['rpc'].name != 'libc': + libs.append(hdf4['rpc'].libs.link_flags) # Fortran support # In version 4.2+, NetCDF-C and NetCDF-Fortran have split. diff --git a/var/spack/repos/builtin/packages/netcdf-cxx4/package.py b/var/spack/repos/builtin/packages/netcdf-cxx4/package.py index 3847cc48dd4..66b294992fd 100644 --- a/var/spack/repos/builtin/packages/netcdf-cxx4/package.py +++ b/var/spack/repos/builtin/packages/netcdf-cxx4/package.py @@ -24,12 +24,7 @@ class NetcdfCxx4(AutotoolsPackage): variant('static', default=True, description='Enable building static libraries') variant('shared', default=True, description='Enable shared library') variant('pic', default=True, description='Produce position-independent code (for shared libs)') - variant('dap', default=False, description='Enable DAP support') - variant('jna', default=False, description='Enable JNA support') variant('doxygen', default=True, description='Enable doxygen docs') - variant('ncgen4', default=True, description='Enable generating netcdf-4 data') - variant('pnetcdf', default=True, description='Enable parallel-netcdf') - variant('netcdf4', default=False, description='Enable netcdf-4 data structure') depends_on('netcdf-c') @@ -37,6 +32,7 @@ class NetcdfCxx4(AutotoolsPackage): depends_on('autoconf', type='build') depends_on('libtool', type='build') depends_on('m4', type='build') + depends_on('doxygen', when='+doxygen', type='build') conflicts('~shared', when='~static') @@ -75,31 +71,6 @@ def configure_args(self): else: config_args.append('--without-pic') - if '+dap' in self.spec: - config_args.append('--enable-dap') - else: - config_args.append('--disable-dap') - - if '+jna' in self.spec: - config_args.append('--enable-jna') - else: - config_args.append('--disable-jna') - - if '+pnetcdf' in self.spec: - config_args.append('--enable-pnetcdf') - else: - config_args.append('--disable-pnetcdf') - - if '+netcdf4' in self.spec: - config_args.append('--enable-netcdf-4') - else: - config_args.append('--disable-netcdf-4') - - if '+ncgen4' in self.spec: - config_args.append('--enable-ncgen4') - else: - config_args.append('--disable-ncgen4') - if '+doxygen' in self.spec: config_args.append('--enable-doxygen') else: diff --git a/var/spack/repos/builtin/packages/netcdf-fortran/package.py b/var/spack/repos/builtin/packages/netcdf-fortran/package.py index 1b55df15078..74347136ddf 100644 --- a/var/spack/repos/builtin/packages/netcdf-fortran/package.py +++ b/var/spack/repos/builtin/packages/netcdf-fortran/package.py @@ -27,13 +27,15 @@ class NetcdfFortran(AutotoolsPackage): variant('pic', default=True, description='Produce position-independent code (for shared libs)') variant('shared', default=True, description='Enable shared library') + variant('doc', default=False, description='Enable building docs') # We need to build with MPI wrappers if parallel I/O features is enabled: # https://www.unidata.ucar.edu/software/netcdf/docs/building_netcdf_fortran.html depends_on('mpi', when='+mpi') - depends_on('netcdf-c~mpi', when='~mpi') + depends_on('netcdf-c~mpi~parallel-netcdf', when='~mpi') depends_on('netcdf-c+mpi', when='+mpi') + depends_on('doxygen', when='+doc', type='build') # The default libtool.m4 is too old to handle NAG compiler properly: # https://github.com/Unidata/netcdf-fortran/issues/94 @@ -82,6 +84,11 @@ def flag_handler(self, name, flags): # building takes place outside of Spack environment, i.e. # without Spack's compiler wrappers. config_flags = [self.spec['netcdf-c'].libs.search_flags] + elif name == 'fflags' and self.spec.satisfies('%gcc@10:'): + # https://github.com/Unidata/netcdf-fortran/issues/212 + if config_flags is None: + config_flags = [] + config_flags.append('-fallow-argument-mismatch') return flags, None, config_flags @@ -120,6 +127,11 @@ def configure_args(self): config_args.append('FC=%s' % self.spec['mpi'].mpifc) config_args.append('F77=%s' % self.spec['mpi'].mpif77) + if '+doc' in self.spec: + config_args.append('--enable-doxygen') + else: + config_args.append('--disable-doxygen') + return config_args @when('@:4.4.5') diff --git a/var/spack/repos/builtin/packages/netdata/package.py b/var/spack/repos/builtin/packages/netdata/package.py new file mode 100644 index 00000000000..830e2334522 --- /dev/null +++ b/var/spack/repos/builtin/packages/netdata/package.py @@ -0,0 +1,22 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Netdata(AutotoolsPackage): + """Real-time performance monitoring, done right!""" + + homepage = "https://www.netdata.cloud/" + url = "https://github.com/netdata/netdata/archive/v1.22.1.tar.gz" + + version('1.22.1', sha256='6efd785eab82f98892b4b4017cadfa4ce1688985915499bc75f2f888765a3446') + + depends_on('m4', type='build') + depends_on('autoconf', type='build') + depends_on('automake', type='build') + depends_on('libtool', type='build') + depends_on('libuv') + depends_on('libuuid') diff --git a/var/spack/repos/builtin/packages/netkit-ftp/package.py b/var/spack/repos/builtin/packages/netkit-ftp/package.py new file mode 100644 index 00000000000..fe76bea86b4 --- /dev/null +++ b/var/spack/repos/builtin/packages/netkit-ftp/package.py @@ -0,0 +1,20 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class NetkitFtp(AutotoolsPackage): + """netkit-ftp is the original file transfer client program for Linux.""" + + homepage = "http://ftp.uk.linux.org/pub/linux/Networking/netkit" + git = "https://github.com/mmaraya/netkit-ftp.git" + + version('master', branch='master') + + def install(self, spec, prefix): + mkdirp(prefix.bin) + mkdirp(prefix.man.man1) + make('install') diff --git a/var/spack/repos/builtin/packages/npm/package.py b/var/spack/repos/builtin/packages/npm/package.py index 43be2a20017..0403f522bd9 100644 --- a/var/spack/repos/builtin/packages/npm/package.py +++ b/var/spack/repos/builtin/packages/npm/package.py @@ -54,11 +54,11 @@ def install(self, spec, prefix): def setup_dependent_build_environment(self, env, dependent_spec): npm_config_cache_dir = "%s/npm-cache" % dependent_spec.prefix if not os.path.isdir(npm_config_cache_dir): - mkdir(npm_config_cache_dir) + mkdirp(npm_config_cache_dir) env.set('npm_config_cache', npm_config_cache_dir) def setup_dependent_run_environment(self, env, dependent_spec): npm_config_cache_dir = "%s/npm-cache" % dependent_spec.prefix if not os.path.isdir(npm_config_cache_dir): - mkdir(npm_config_cache_dir) + mkdirp(npm_config_cache_dir) env.set('npm_config_cache', npm_config_cache_dir) diff --git a/var/spack/repos/builtin/packages/ns-3-dev/package.py b/var/spack/repos/builtin/packages/ns-3-dev/package.py new file mode 100644 index 00000000000..6f1d85f6dbc --- /dev/null +++ b/var/spack/repos/builtin/packages/ns-3-dev/package.py @@ -0,0 +1,52 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Ns3Dev(WafPackage): + """ + ns-3 is a discrete-event network simulator, + targeted primarily for research and educational use + """ + + homepage = "https://www.nsnam.org/" + url = "https://gitlab.com/nsnam/ns-3-dev/-/archive/ns-3.30.1/ns-3-dev-ns-3.30.1.tar.bz2" + + maintainers = ['yee29'] + + version('3.30.1', sha256='e8b3849d83a224f42c0cd2b9e692ec961455aca23f36fb86fcf6bbed2b495a3d') + version('3.30', sha256='53cefcad74fec6cc332368a05ed1f8c1a29f86295cb44b6b0509c6d2d18d90d0') + version('3.29', sha256='0254341487891421e4c6040476c6634c4c2931d4f7c6b9617a6ae494c8ee6ffd') + version('3.28', sha256='5295e1f6e2ee1ff8cd92d3937c8b3266e0d5926adffc42c7fb0ea9ce549a91b7') + version('3.27', sha256='26233011654043822b8ede525a52f8532ed181997b609a606681a0d5c8d64a26') + + variant('helics', default=False, description="Enable Helics support in ns-3") + variant('boost', default=True, description="Compile with Boost libraries") + + # Build dependency + depends_on('helics', when='+helics') + depends_on('boost', when='+boost') + depends_on('pkgconfig', type='build') + + resource(name='helics', + when='+helics', + git='https://github.com/GMLC-TDC/helics-ns3.git', + destination='contrib', placement='helics') + + def configure_args(self): + args = [] + + if '+boost' in self.spec: + args.extend([ + '--boost-includes={0}'.format( + self.spec['boost'].prefix.include), + '--boost-libs={0}'.format( + self.spec['boost'].prefix.lib) + ]) + + if '+helics' in self.spec: + args.append('--with-helics={0}'.format(self.spec['helics'].prefix)) + return args diff --git a/var/spack/repos/builtin/packages/nvdimmsim/package.py b/var/spack/repos/builtin/packages/nvdimmsim/package.py new file mode 100644 index 00000000000..0d0a0964d47 --- /dev/null +++ b/var/spack/repos/builtin/packages/nvdimmsim/package.py @@ -0,0 +1,32 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Nvdimmsim(MakefilePackage): + """ + NVDIMMSim is a cycle-accurate non-volatile memory simulator + for devices such as NAND flash + """ + + homepage = "https://github.com/slunk/NVDIMMSim" + git = "https://github.com/slunk/NVDIMMSim" + url = "https://github.com/jimstevens2001/NVDIMMSim/archive/v2.0.0.tar.gz" + + maintainers = ['jjwilke'] + + version('2.0.0', sha256="2a621ef10be5e52a1f543985d08354a2e6ee6532b5720e5f17ad6362cfd4adef") + + def build(self, spec, prefix): + with working_dir("src"): + if spec.satisfies("platform=darwin"): + make("libnvdsim.dylib") + else: + make("libnvdsim.so") + + def install(self, spec, prefix): + with working_dir("src"): + install_tree(".", prefix) diff --git a/var/spack/repos/builtin/packages/ocaml/package.py b/var/spack/repos/builtin/packages/ocaml/package.py index f155c55c13d..4e642b9f567 100644 --- a/var/spack/repos/builtin/packages/ocaml/package.py +++ b/var/spack/repos/builtin/packages/ocaml/package.py @@ -29,12 +29,22 @@ class Ocaml(Package): sanity_check_file = ['bin/ocaml'] + variant( + 'force-safe-string', default=True, + description='Enforce safe (immutable) strings' + ) + def url_for_version(self, version): url = "http://caml.inria.fr/pub/distrib/ocaml-{0}/ocaml-{1}.tar.gz" return url.format(str(version)[:-2], version) def install(self, spec, prefix): - configure('-prefix', '{0}'.format(prefix)) + base_args = ['-prefix', '{0}'.format(prefix)] + + if self.spec.satisfies('~force-safe-string'): + base_args += ['--disable-force-safe-string'] + + configure(*(base_args)) make('world.opt') make('install', 'PREFIX={0}'.format(prefix)) diff --git a/var/spack/repos/builtin/packages/ocl-icd/package.py b/var/spack/repos/builtin/packages/ocl-icd/package.py new file mode 100644 index 00000000000..fa49bab56a7 --- /dev/null +++ b/var/spack/repos/builtin/packages/ocl-icd/package.py @@ -0,0 +1,30 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class OclIcd(AutotoolsPackage): + """This package aims at creating an Open Source alternative to vendor specific +OpenCL ICD loaders.""" + + homepage = "https://github.com/OCL-dev/ocl-icd" + url = "https://github.com/OCL-dev/ocl-icd/archive/v2.2.12.tar.gz" + + version('2.2.12', sha256='17500e5788304eef5b52dbe784cec197bdae64e05eecf38317840d2d05484272') + version('2.2.11', sha256='c1865ef7701b8201ebc6930ed3ac757c7e5cb30f3aa4c1e742a6bc022f4f2292') + version('2.2.10', sha256='d0459fa1421e8d86aaf0a4df092185ea63bc4e1a7682d3af261ae5d3fae063c7') + version('2.2.9', sha256='88da749bc2bd75149f0bb6e72eb4a9d74401a54f4508bc730f13cc03c57a17ed') + version('2.2.8', sha256='8a8a405c7d659b905757a358dc467f4aa3d7e4dff1d1624779065764d962a246') + version('2.2.7', sha256='b8e68435904e1a95661c385f24d6924ed28f416985c6db5a3c7448698ad5fea2') + version('2.2.6', sha256='4567cae92f58c1d6ecfc771c456fa95f206d8a5c7c5d6c9010ec688a9fd83750') + version('2.2.5', sha256='50bf51f4544f83e69a5a2f564732a2adca63fbe9511430aba12f8d6f3a53ae59') + version('2.2.4', sha256='92853137ffff393cc74f829357fdd80ac46a82b46c970e80195db86164cca316') + version('2.2.3', sha256='46b8355d90f8cc240555e4e077f223c47b950abeadf3e1af52d6e68d2efc2ff3') + + depends_on('autoconf', type='build') + depends_on('automake', type='build') + depends_on('libtool', type='build') + depends_on('m4', type='build') diff --git a/var/spack/repos/builtin/packages/onednn/package.py b/var/spack/repos/builtin/packages/onednn/package.py index 8318d70d901..b723fabd9c9 100644 --- a/var/spack/repos/builtin/packages/onednn/package.py +++ b/var/spack/repos/builtin/packages/onednn/package.py @@ -12,10 +12,11 @@ class Onednn(CMakePackage): Formerly known as Intel MKL-DNN and DNNL.""" homepage = "https://01.org/dnnl" - url = "https://github.com/oneapi-src/oneDNN/archive/v1.4.tar.gz" + url = "https://github.com/oneapi-src/oneDNN/archive/v1.5.tar.gz" maintainers = ['adamjstewart'] + version('1.5', sha256='2aacc00129418185e0bc1269d3ef82f93f08de2c336932989c0c360279129edb') version('1.4', sha256='54737bcb4dc1961d32ee75da3ecc529fa48198f8b2ca863a079e19a9c4adb70f') version('1.3', sha256='b87c23b40a93ef5e479c81028db71c4847225b1a170f82af5e79f1cda826d3bf') version('1.2.2', sha256='251dd17643cff285f38b020fc4ac9245d8d596f3e2140b98982ffc32eae3943c') @@ -30,6 +31,7 @@ class Onednn(CMakePackage): version('1.0.2', sha256='9281715436adb7b9eef63fad419a581f397218824bc1271e557c134725c03916') version('1.0.1', sha256='8fee2324267811204c1f877a1dea70b23ab3d5f4c3ea0198d81f0921aa70d76e') version('1.0', sha256='7bfe11cac1d1f5dc1b60c1258e79d8cc84944d459e3758d50c1f7feba05bc6d7') + version('0.21.5', sha256='ebb146cadda1c14767251ded54219c8215daee84aa1ac773cf43b5c2ae53160b') version('0.21.4', sha256='00ace1ce08cab3408bc83e6b9d55ccba661761e044c03175d58caccedddf93b3') version('0.21.3', sha256='a0211aeb5e7dad50b97fa5dffc1a2fe2fe732572d4164e1ee8750a2ede43fbec') version('0.21.2', sha256='5897bfd0e321a761de0c57ba1dfe0ebc753cc0d8a18bda2056af48022706a297') diff --git a/var/spack/repos/builtin/packages/openblas/openblas_appleclang11.patch b/var/spack/repos/builtin/packages/openblas/openblas_appleclang11.patch new file mode 100644 index 00000000000..88fc14aeb08 --- /dev/null +++ b/var/spack/repos/builtin/packages/openblas/openblas_appleclang11.patch @@ -0,0 +1,37 @@ +From 90dba9f71668c0de77b77f32462c78fbbd424db1 Mon Sep 17 00:00:00 2001 +From: Martin Kroeker +Date: Tue, 5 May 2020 10:44:50 +0200 +Subject: [PATCH] Duplicate earlier Clang 9.0.0 workaround for corresponding + Apple Clang version + +As discussed on the original PR #2329, the "Apple Clang 11.0.3" that appears to be based the same LLVM release produces the same miscompilation of this file. +--- + kernel/x86_64/dsymv_L_microk_skylakex-2.c | 7 ++++++- + 1 file changed, 6 insertions(+), 1 deletion(-) + +diff --git a/kernel/x86_64/dsymv_L_microk_skylakex-2.c b/kernel/x86_64/dsymv_L_microk_skylakex-2.c +index bdcd914fb..f0df5aaa8 100644 +--- a/kernel/x86_64/dsymv_L_microk_skylakex-2.c ++++ b/kernel/x86_64/dsymv_L_microk_skylakex-2.c +@@ -36,7 +36,9 @@ USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + #if defined(__clang_patchlevel__) && __clang_major__ == 9 && __clang_minor__ == 0 && __clang_patchlevel__ == 0 + #pragma clang optimize off + #endif +- ++#if defined(__apple_build_version__) && __clang_major__ == 11 && __clang_minor__ == 0 && __clang_patchlevel__ == 3 ++#pragma clang optimize off ++#endif + static void dsymv_kernel_4x4(BLASLONG from, BLASLONG to, FLOAT **a, FLOAT *x, FLOAT *y, FLOAT *temp1, FLOAT *temp2) + { + +@@ -164,6 +166,9 @@ static void dsymv_kernel_4x4(BLASLONG from, BLASLONG to, FLOAT **a, FLOAT *x, FL + #if defined(__clang_patchlevel__) && __clang_major__ == 9 && __clang_minor__ == 0 && __clang_patchlevel__ == 0 + #pragma clang optimize on + #endif ++#if defined(__apple_build_version__) && __clang_major__ == 11 && __clang_minor__ == 0 && __clang_patchlevel__ == 3 ++#pragma clang optimize on ++#endif + + #else + #include "dsymv_L_microk_haswell-2.c" + diff --git a/var/spack/repos/builtin/packages/openblas/package.py b/var/spack/repos/builtin/packages/openblas/package.py index 7d71702de0f..46cca3afe3c 100644 --- a/var/spack/repos/builtin/packages/openblas/package.py +++ b/var/spack/repos/builtin/packages/openblas/package.py @@ -18,6 +18,7 @@ class Openblas(MakefilePackage): git = 'https://github.com/xianyi/OpenBLAS.git' version('develop', branch='develop') + version('0.3.10', sha256='0484d275f87e9b8641ff2eecaa9df2830cbe276ac79ad80494822721de6e1693') version('0.3.9', sha256='17d4677264dfbc4433e97076220adc79b050e4f8a083ea3f853a53af253bc380') version('0.3.8', sha256='8f86ade36f0dbed9ac90eb62575137388359d97d8f93093b38abe166ad7ef3a8') version('0.3.7', sha256='bde136122cef3dd6efe2de1c6f65c10955bbb0cc01a520c2342f5287c28f9379') @@ -92,6 +93,9 @@ class Openblas(MakefilePackage): # Fix https://github.com/xianyi/OpenBLAS/issues/2431 # Patch derived from https://github.com/xianyi/OpenBLAS/pull/2424 patch('openblas-0.3.8-darwin.patch', when='@0.3.8 platform=darwin') + # Fix ICE in LLVM 9.0.0 https://github.com/xianyi/OpenBLAS/pull/2329 + # Patch as in https://github.com/xianyi/OpenBLAS/pull/2597 + patch('openblas_appleclang11.patch', when='@0.3.8:0.3.9 %clang@11.0.3-apple') # Add conditions to f_check to determine the Fujitsu compiler patch('openblas_fujitsu.patch', when='%fj') diff --git a/var/spack/repos/builtin/packages/opencv/opencv3.2_cmake.patch b/var/spack/repos/builtin/packages/opencv/opencv3.2_cmake.patch new file mode 100755 index 00000000000..c479fb450e4 --- /dev/null +++ b/var/spack/repos/builtin/packages/opencv/opencv3.2_cmake.patch @@ -0,0 +1,84 @@ +commit dd7f88bd68f95e56437035cc95d4af482482fcc2 +Author: Alexander Alekhin +Date: Thu Jun 7 12:32:48 2018 +0300 + + python: support standalone Python bindings build + + - requires OpenCV source directory + - requires OpenCV binary directory with built modules and 'python_bindings_generator' target + +diff --git a/cmake/OpenCVDetectPython.cmake b/cmake/OpenCVDetectPython.cmake +index 6dec76ff66..b6c7a2535c 100644 +--- a/cmake/OpenCVDetectPython.cmake ++++ b/cmake/OpenCVDetectPython.cmake +@@ -27,6 +27,12 @@ function(find_python preferred_version min_version library_env include_dir_env + debug_library include_path include_dir include_dir2 packages_path + numpy_include_dirs numpy_version) + if(NOT ${found}) ++ if(" ${executable}" STREQUAL " PYTHON_EXECUTABLE") ++ set(__update_python_vars 0) ++ else() ++ set(__update_python_vars 1) ++ endif() ++ + ocv_check_environment_variables(${executable}) + if(${executable}) + set(PYTHON_EXECUTABLE "${${executable}}") +@@ -47,7 +53,7 @@ if(NOT ${found}) + endforeach() + endif() + +- string(REGEX MATCH "^[0-9]+" _preferred_version_major ${preferred_version}) ++ string(REGEX MATCH "^[0-9]+" _preferred_version_major "${preferred_version}") + + find_host_package(PythonInterp "${preferred_version}") + if(NOT PYTHONINTERP_FOUND) +@@ -56,7 +62,7 @@ if(NOT ${found}) + + if(PYTHONINTERP_FOUND) + # Check if python major version is correct +- if(${_preferred_version_major} EQUAL ${PYTHON_VERSION_MAJOR}) ++ if("${_preferred_version_major}" STREQUAL "" OR "${_preferred_version_major}" STREQUAL "${PYTHON_VERSION_MAJOR}") + # Copy outputs + set(_found ${PYTHONINTERP_FOUND}) + set(_executable ${PYTHON_EXECUTABLE}) +@@ -65,7 +71,9 @@ if(NOT ${found}) + set(_version_minor ${PYTHON_VERSION_MINOR}) + set(_version_patch ${PYTHON_VERSION_PATCH}) + endif() ++ endif() + ++ if(__update_python_vars) + # Clear find_host_package side effects + unset(PYTHONINTERP_FOUND) + unset(PYTHON_EXECUTABLE CACHE) +@@ -109,7 +117,8 @@ if(NOT ${found}) + set(_library_release ${PYTHON_LIBRARY_RELEASE}) + set(_include_dir ${PYTHON_INCLUDE_DIR}) + set(_include_dir2 ${PYTHON_INCLUDE_DIR2}) +- ++ endif() ++ if(__update_python_vars) + # Clear find_package side effects + unset(PYTHONLIBS_FOUND) + unset(PYTHON_LIBRARIES) +@@ -160,7 +169,7 @@ if(NOT ${found}) + unset(_path) + endif() + +- set(_numpy_include_dirs ${${numpy_include_dirs}}) ++ set(_numpy_include_dirs "${${numpy_include_dirs}}") + + if(NOT _numpy_include_dirs) + if(CMAKE_CROSSCOMPILING) +@@ -222,6 +231,10 @@ if(NOT ${found}) + endif() + endfunction(find_python) + ++if(OPENCV_PYTHON_SKIP_DETECTION) ++ return() ++endif() ++ + find_python(2.7 "${MIN_VER_PYTHON2}" PYTHON2_LIBRARY PYTHON2_INCLUDE_DIR + PYTHON2INTERP_FOUND PYTHON2_EXECUTABLE PYTHON2_VERSION_STRING + PYTHON2_VERSION_MAJOR PYTHON2_VERSION_MINOR PYTHON2LIBS_FOUND diff --git a/var/spack/repos/builtin/packages/opencv/opencv3.2_ffmpeg.patch b/var/spack/repos/builtin/packages/opencv/opencv3.2_ffmpeg.patch new file mode 100755 index 00000000000..d7786308d8c --- /dev/null +++ b/var/spack/repos/builtin/packages/opencv/opencv3.2_ffmpeg.patch @@ -0,0 +1,107 @@ +commit b1d208891b9f6ae3968730b120a5d0dcbba679d0 +Author: Jonathan Viney +Date: Sun Nov 19 07:08:41 2017 +1300 + + Merge pull request #10011 from jviney:master + + Fix build with FFmpeg master. Some deprecated APIs have been removed. (#10011) + + * Fix build with FFmpeg master. + + * ffmpeg: update AVFMT_RAWPICTURE support removal + +diff --git a/modules/videoio/src/cap_ffmpeg_impl.hpp b/modules/videoio/src/cap_ffmpeg_impl.hpp +index 5f51e65134..46461483a7 100644 +--- a/modules/videoio/src/cap_ffmpeg_impl.hpp ++++ b/modules/videoio/src/cap_ffmpeg_impl.hpp +@@ -149,6 +149,10 @@ extern "C" { + #define AV_PIX_FMT_GRAY16BE PIX_FMT_GRAY16BE + #endif + ++#ifndef PKT_FLAG_KEY ++#define PKT_FLAG_KEY AV_PKT_FLAG_KEY ++#endif ++ + #if LIBAVUTIL_BUILD >= (LIBAVUTIL_VERSION_MICRO >= 100 \ + ? CALC_FFMPEG_VERSION(52, 38, 100) : CALC_FFMPEG_VERSION(52, 13, 0)) + #define USE_AV_FRAME_GET_BUFFER 1 +@@ -1570,7 +1574,11 @@ static AVStream *icv_add_video_stream_FFMPEG(AVFormatContext *oc, + // some formats want stream headers to be seperate + if(oc->oformat->flags & AVFMT_GLOBALHEADER) + { ++#if LIBAVCODEC_BUILD > CALC_FFMPEG_VERSION(56, 35, 0) ++ c->flags |= AV_CODEC_FLAG_GLOBAL_HEADER; ++#else + c->flags |= CODEC_FLAG_GLOBAL_HEADER; ++#endif + } + #endif + +@@ -1598,23 +1606,24 @@ static int icv_av_write_frame_FFMPEG( AVFormatContext * oc, AVStream * video_st, + #endif + int ret = OPENCV_NO_FRAMES_WRITTEN_CODE; + +- if (oc->oformat->flags & AVFMT_RAWPICTURE) { ++#if LIBAVFORMAT_BUILD < CALC_FFMPEG_VERSION(57, 0, 0) ++ if (oc->oformat->flags & AVFMT_RAWPICTURE) ++ { + /* raw video case. The API will change slightly in the near + futur for that */ + AVPacket pkt; + av_init_packet(&pkt); + +-#ifndef PKT_FLAG_KEY +-#define PKT_FLAG_KEY AV_PKT_FLAG_KEY +-#endif +- + pkt.flags |= PKT_FLAG_KEY; + pkt.stream_index= video_st->index; + pkt.data= (uint8_t *)picture; + pkt.size= sizeof(AVPicture); + + ret = av_write_frame(oc, &pkt); +- } else { ++ } ++ else ++#endif ++ { + /* encode the image */ + AVPacket pkt; + av_init_packet(&pkt); +@@ -1772,7 +1781,9 @@ void CvVideoWriter_FFMPEG::close() + /* write the trailer, if any */ + if(ok && oc) + { +- if( (oc->oformat->flags & AVFMT_RAWPICTURE) == 0 ) ++#if LIBAVFORMAT_BUILD < CALC_FFMPEG_VERSION(57, 0, 0) ++ if (!(oc->oformat->flags & AVFMT_RAWPICTURE)) ++#endif + { + for(;;) + { +@@ -2071,7 +2082,11 @@ bool CvVideoWriter_FFMPEG::open( const char * filename, int fourcc, + + outbuf = NULL; + +- if (!(oc->oformat->flags & AVFMT_RAWPICTURE)) { ++ ++#if LIBAVFORMAT_BUILD < CALC_FFMPEG_VERSION(57, 0, 0) ++ if (!(oc->oformat->flags & AVFMT_RAWPICTURE)) ++#endif ++ { + /* allocate output buffer */ + /* assume we will never get codec output with more than 4 bytes per pixel... */ + outbuf_size = width*height*4; +@@ -2376,7 +2391,11 @@ AVStream* OutputMediaStream_FFMPEG::addVideoStream(AVFormatContext *oc, CV_CODEC + // some formats want stream headers to be seperate + if (oc->oformat->flags & AVFMT_GLOBALHEADER) + { +- c->flags |= CODEC_FLAG_GLOBAL_HEADER; ++ #if LIBAVCODEC_BUILD > CALC_FFMPEG_VERSION(56, 35, 0) ++ c->flags |= AV_CODEC_FLAG_GLOBAL_HEADER; ++ #else ++ c->flags |= CODEC_FLAG_GLOBAL_HEADER; ++ #endif + } + #endif + diff --git a/var/spack/repos/builtin/packages/opencv/opencv3.2_fj.patch b/var/spack/repos/builtin/packages/opencv/opencv3.2_fj.patch new file mode 100644 index 00000000000..dae8af256bb --- /dev/null +++ b/var/spack/repos/builtin/packages/opencv/opencv3.2_fj.patch @@ -0,0 +1,20 @@ +diff -ru opencv-3.2.0.org/modules/core/include/opencv2/core/hal/intrin_neon.hpp opencv-3.2.0/modules/core/include/opencv2/core/hal/intrin_neon.hpp +--- opencv-3.2.0.org/modules/core/include/opencv2/core/hal/intrin_neon.hpp 2016-12-23 21:54:44.000000000 +0900 ++++ opencv-3.2.0/modules/core/include/opencv2/core/hal/intrin_neon.hpp 2020-05-27 14:39:11.288700481 +0900 +@@ -282,10 +282,16 @@ + { return (int16x4_t)a; } + template static inline float16x4_t vreinterpret_f16_s16(T a) + { return (float16x4_t)a; } ++#ifdef vld1_f16 ++#undef vld1_f16 ++#endif + template static inline float16x4_t vld1_f16(const T* ptr) + { return vreinterpret_f16_s16(vld1_s16((const short*)ptr)); } ++#ifdef vst1_f16 ++#undef vst1_f16 ++#endif + template static inline void vst1_f16(T* ptr, float16x4_t a) + { vst1_s16((short*)ptr, vreinterpret_s16_f16(a)); } + + struct v_float16x4 + { diff --git a/var/spack/repos/builtin/packages/opencv/opencv3.2_python3.7.patch b/var/spack/repos/builtin/packages/opencv/opencv3.2_python3.7.patch new file mode 100755 index 00000000000..d96011330d6 --- /dev/null +++ b/var/spack/repos/builtin/packages/opencv/opencv3.2_python3.7.patch @@ -0,0 +1,22 @@ +commit 0c4328fbf3da0da57a91b2133578c5100370b867 +Author: ilovezfs +Date: Tue Jul 3 06:31:39 2018 +0000 + + Python 3.7 compatability + + The result of PyUnicode_AsUTF8() is now of type const char * rather of + char *. + +diff --git a/modules/python/src2/cv2.cpp b/modules/python/src2/cv2.cpp +index e16fcbacf2..5a6bf7ef62 100644 +--- a/modules/python/src2/cv2.cpp ++++ b/modules/python/src2/cv2.cpp +@@ -916,7 +916,7 @@ bool pyopencv_to(PyObject* obj, String& value, const char* name) + (void)name; + if(!obj || obj == Py_None) + return true; +- char* str = PyString_AsString(obj); ++ const char* str = PyString_AsString(obj); + if(!str) + return false; + value = String(str); diff --git a/var/spack/repos/builtin/packages/opencv/opencv3.2_regacyvtk.patch b/var/spack/repos/builtin/packages/opencv/opencv3.2_regacyvtk.patch new file mode 100755 index 00000000000..f5341c62fa9 --- /dev/null +++ b/var/spack/repos/builtin/packages/opencv/opencv3.2_regacyvtk.patch @@ -0,0 +1,119 @@ +commit 235889ddbb1aaa4f0dfaee730f42a8476c0e50f0 +Author: jasjuang +Date: Thu Dec 21 01:33:25 2017 -0800 + + handle legacy VTK functions + +diff --git a/modules/viz/src/clouds.cpp b/modules/viz/src/clouds.cpp +index 48d057d2a8..ab8fd43e40 100644 +--- a/modules/viz/src/clouds.cpp ++++ b/modules/viz/src/clouds.cpp +@@ -77,7 +77,9 @@ cv::viz::WCloud::WCloud(cv::InputArray cloud, cv::InputArray colors, cv::InputAr + vtkSmartPointer mapper = vtkSmartPointer::New(); + VtkUtils::SetInputData(mapper, cloud_source->GetOutput()); + mapper->SetScalarModeToUsePointData(); ++#if VTK_MAJOR_VERSION < 8 + mapper->ImmediateModeRenderingOff(); ++#endif + mapper->SetScalarRange(0, 255); + mapper->ScalarVisibilityOn(); + +@@ -117,7 +119,9 @@ cv::viz::WPaintedCloud::WPaintedCloud(InputArray cloud) + + vtkSmartPointer mapper = vtkSmartPointer::New(); + VtkUtils::SetInputData(mapper, vtkPolyData::SafeDownCast(elevation->GetOutput())); ++#if VTK_MAJOR_VERSION < 8 + mapper->ImmediateModeRenderingOff(); ++#endif + mapper->ScalarVisibilityOn(); + mapper->SetColorModeToMapScalars(); + +@@ -143,7 +147,9 @@ cv::viz::WPaintedCloud::WPaintedCloud(InputArray cloud, const Point3d& p1, const + + vtkSmartPointer mapper = vtkSmartPointer::New(); + VtkUtils::SetInputData(mapper, vtkPolyData::SafeDownCast(elevation->GetOutput())); ++#if VTK_MAJOR_VERSION < 8 + mapper->ImmediateModeRenderingOff(); ++#endif + mapper->ScalarVisibilityOn(); + mapper->SetColorModeToMapScalars(); + +@@ -182,7 +188,9 @@ cv::viz::WPaintedCloud::WPaintedCloud(InputArray cloud, const Point3d& p1, const + + vtkSmartPointer mapper = vtkSmartPointer::New(); + VtkUtils::SetInputData(mapper, vtkPolyData::SafeDownCast(elevation->GetOutput())); ++#if VTK_MAJOR_VERSION < 8 + mapper->ImmediateModeRenderingOff(); ++#endif + mapper->ScalarVisibilityOn(); + mapper->SetColorModeToMapScalars(); + mapper->SetLookupTable(color_transfer); +@@ -211,7 +219,9 @@ cv::viz::WCloudCollection::WCloudCollection() + vtkSmartPointer mapper = vtkSmartPointer::New(); + mapper->SetInputConnection(append_filter->GetOutputPort()); + mapper->SetScalarModeToUsePointData(); ++#if VTK_MAJOR_VERSION < 8 + mapper->ImmediateModeRenderingOff(); ++#endif + mapper->SetScalarRange(0, 255); + mapper->ScalarVisibilityOn(); + +@@ -416,7 +426,9 @@ cv::viz::WMesh::WMesh(const Mesh &mesh) + + vtkSmartPointer mapper = vtkSmartPointer::New(); + mapper->SetScalarModeToUsePointData(); ++#if VTK_MAJOR_VERSION < 8 + mapper->ImmediateModeRenderingOff(); ++#endif + VtkUtils::SetInputData(mapper, polydata); + + vtkSmartPointer actor = vtkSmartPointer::New(); +@@ -468,7 +480,9 @@ cv::viz::WWidgetMerger::WWidgetMerger() + vtkSmartPointer mapper = vtkSmartPointer::New(); + mapper->SetInputConnection(append_filter->GetOutputPort()); + mapper->SetScalarModeToUsePointData(); ++#if VTK_MAJOR_VERSION < 8 + mapper->ImmediateModeRenderingOff(); ++#endif + mapper->SetScalarRange(0, 255); + mapper->ScalarVisibilityOn(); + +diff --git a/modules/viz/src/widget.cpp b/modules/viz/src/widget.cpp +index 0473c274bc..b324a4e26c 100644 +--- a/modules/viz/src/widget.cpp ++++ b/modules/viz/src/widget.cpp +@@ -91,7 +91,9 @@ cv::viz::Widget cv::viz::Widget::fromPlyFile(const String &file_name) + + vtkSmartPointer mapper = vtkSmartPointer::New(); + mapper->SetInputConnection( reader->GetOutputPort() ); ++#if VTK_MAJOR_VERSION < 8 + mapper->ImmediateModeRenderingOff(); ++#endif + + vtkSmartPointer actor = vtkSmartPointer::New(); + actor->GetProperty()->SetInterpolationToFlat(); +@@ -113,7 +115,11 @@ void cv::viz::Widget::setRenderingProperty(int property, double value) + case POINT_SIZE: actor->GetProperty()->SetPointSize(float(value)); break; + case OPACITY: actor->GetProperty()->SetOpacity(value); break; + case LINE_WIDTH: actor->GetProperty()->SetLineWidth(float(value)); break; ++#if VTK_MAJOR_VERSION < 8 + case IMMEDIATE_RENDERING: actor->GetMapper()->SetImmediateModeRendering(int(value)); break; ++#else ++ case IMMEDIATE_RENDERING: std::cerr << "this property has no effect" << std::endl; break; ++#endif + case AMBIENT: actor->GetProperty()->SetAmbient(float(value)); break; + case LIGHTING: + { +@@ -191,8 +197,11 @@ double cv::viz::Widget::getRenderingProperty(int property) const + case POINT_SIZE: value = actor->GetProperty()->GetPointSize(); break; + case OPACITY: value = actor->GetProperty()->GetOpacity(); break; + case LINE_WIDTH: value = actor->GetProperty()->GetLineWidth(); break; ++#if VTK_MAJOR_VERSION < 8 + case IMMEDIATE_RENDERING: value = actor->GetMapper()->GetImmediateModeRendering(); break; +- ++#else ++ case IMMEDIATE_RENDERING: std::cerr << "this property has no effect" << std::endl; break; ++#endif + case FONT_SIZE: + { + vtkTextActor* text_actor = vtkTextActor::SafeDownCast(actor); diff --git a/var/spack/repos/builtin/packages/opencv/opencv3.2_vtk.patch b/var/spack/repos/builtin/packages/opencv/opencv3.2_vtk.patch new file mode 100755 index 00000000000..e5eecd70928 --- /dev/null +++ b/var/spack/repos/builtin/packages/opencv/opencv3.2_vtk.patch @@ -0,0 +1,20 @@ +commit d810c73396ec7931addf3e7f75b17daf4e184bf4 +Author: Adam Rankin +Date: Sun Mar 11 22:28:18 2018 -0400 + + Update precomp.hpp + + Enabling build of visualization module when using VTK 9 (current HEAD of d5bbb9e99bbc6d11d2196c48bfd8f33508554551) + +diff --git a/modules/viz/src/precomp.hpp b/modules/viz/src/precomp.hpp +index 369db191d3..dc5a5db1ea 100644 +--- a/modules/viz/src/precomp.hpp ++++ b/modules/viz/src/precomp.hpp +@@ -104,6 +104,7 @@ + #include + #include + #include ++#include + #include + #include + #include diff --git a/var/spack/repos/builtin/packages/opencv/package.py b/var/spack/repos/builtin/packages/opencv/package.py index 0e5bca79075..52f59263b84 100644 --- a/var/spack/repos/builtin/packages/opencv/package.py +++ b/var/spack/repos/builtin/packages/opencv/package.py @@ -121,6 +121,13 @@ class Opencv(CMakePackage, CudaPackage): # the current development branch of OpenCV. See #8461 for more information. patch('dnn_cuda.patch', when='@3.3.0:3.4.1+cuda+dnn') + patch('opencv3.2_cmake.patch', when='@3.2') + patch('opencv3.2_vtk.patch', when='@3.2+vtk') + patch('opencv3.2_regacyvtk.patch', when='@3.2+vtk') + patch('opencv3.2_ffmpeg.patch', when='@3.2+videoio') + patch('opencv3.2_python3.7.patch', when='@3.2+python') + patch('opencv3.2_fj.patch', when='@3.2 %fj') + depends_on('eigen', when='+eigen') depends_on('zlib', when='+zlib') depends_on('libpng', when='+png') @@ -147,6 +154,9 @@ class Opencv(CMakePackage, CudaPackage): conflicts('cuda@10:', when='+cudacodec') conflicts('cuda', when='~contrib', msg='cuda support requires +contrib') + # IPP is provided x86_64 only + conflicts('+ipp', when="arch=aarch64:") + extends('python', when='+python') def cmake_args(self): diff --git a/var/spack/repos/builtin/packages/openfoam/common/spack-Allwmake b/var/spack/repos/builtin/packages/openfoam/common/spack-Allwmake index 83c9e273b5c..2b4d8f84123 100755 --- a/var/spack/repos/builtin/packages/openfoam/common/spack-Allwmake +++ b/var/spack/repos/builtin/packages/openfoam/common/spack-Allwmake @@ -7,8 +7,8 @@ export FOAM_INST_DIR=$(cd .. && pwd -L) # Prevent influence of user/site config when building export FOAM_CONFIG_MODE="o" -. "$PWD/etc/bashrc" '' # No arguments -mkdir -p "$FOAM_APPBIN" "$FOAM_LIBBIN" 2>/dev/null # Allow build interrupt +. "$PWD"/etc/bashrc '' # No arguments +mkdir -p "$FOAM_APPBIN" "$FOAM_LIBBIN" # Allow build interrupt echo "Build openfoam with SPACK ($@)" echo "WM_PROJECT_DIR = $WM_PROJECT_DIR" diff --git a/var/spack/repos/builtin/packages/openfoam/common/spack-derived-Allwmake b/var/spack/repos/builtin/packages/openfoam/common/spack-derived-Allwmake index 02741f1d133..b09cf99083c 100755 --- a/var/spack/repos/builtin/packages/openfoam/common/spack-derived-Allwmake +++ b/var/spack/repos/builtin/packages/openfoam/common/spack-derived-Allwmake @@ -10,7 +10,7 @@ } export FOAM_INST_DIR=$(cd $FOAM_PROJECT_DIR/.. && pwd -L) # Needed by foam-extend -. $FOAM_PROJECT_DIR/etc/bashrc '' # No arguments +. "$FOAM_PROJECT_DIR"/etc/bashrc '' # No arguments # Package-specific adjustments [ -f spack-config.sh ] && . ./spack-config.sh '' # No arguments diff --git a/var/spack/repos/builtin/packages/openfoam/common/spack-dummy-Allwmake b/var/spack/repos/builtin/packages/openfoam/common/spack-dummy-Allwmake new file mode 100755 index 00000000000..ac44a016dbe --- /dev/null +++ b/var/spack/repos/builtin/packages/openfoam/common/spack-dummy-Allwmake @@ -0,0 +1,18 @@ +#!/bin/bash +# Build wrapper script for dummy build + +# Prevent influence of user/site config when building +export FOAM_CONFIG_MODE="o" +. "$PWD"/etc/bashrc '' # No arguments + +echo "Dummy build openfoam with SPACK ($@)" +echo "WM_PROJECT_DIR = $WM_PROJECT_DIR" + +if [ -f applications/test/00-dummy/Allwmake ] +then + applications/test/00-dummy/Allwmake $@ +else + echo "Nothing to make" +fi + +# ----------------------------------------------------------------------------- diff --git a/var/spack/repos/builtin/packages/openfoam/package.py b/var/spack/repos/builtin/packages/openfoam/package.py index fd915abae24..1f818c54047 100644 --- a/var/spack/repos/builtin/packages/openfoam/package.py +++ b/var/spack/repos/builtin/packages/openfoam/package.py @@ -265,6 +265,7 @@ class Openfoam(Package): version('develop', branch='develop', submodules='True') version('master', branch='master', submodules='True') + version('1912_200506', sha256='831a39ff56e268e88374d0a3922479fd80260683e141e51980242cc281484121') version('1912_200403', sha256='1de8f4ddd39722b75f6b01ace9f1ba727b53dd999d1cd2b344a8c677ac2db4c0') version('1912', sha256='437feadf075419290aa8bf461673b723a60dc39525b23322850fb58cb48548f2') version('1906_200312', sha256='f75645151ed5d8c5da592d307480979fe580a25627cc0c9718ef370211577594') @@ -348,10 +349,10 @@ class Openfoam(Package): patch('https://develop.openfoam.com/Development/openfoam/commit/8831dfc58b0295d0d301a78341dd6f4599073d45.patch', when='@1806', sha256='21f1ab68c82dfa41ed1a4439427c94c43ddda02c84175c30da623d905d3e5d61' - ) + ) # Some user config settings - # default: 'compile-option': 'RpathOpt', + # default: 'compile-option': '-spack', # default: 'mplib': 'USERMPI', # Use user mpi for spack config = { # Add links into bin/, lib/ (eg, for other applications) @@ -421,15 +422,15 @@ def setup_run_environment(self, env): blacklist=[ # Blacklist these # Inadvertent changes # ------------------- - 'PS1', # Leave unaffected - 'MANPATH', # Leave unaffected + 'PS1', # Leave untouched + 'MANPATH', # Leave untouched # Unneeded bits # ------------- # 'FOAM_SETTINGS', # Do not use with modules # 'FOAM_INST_DIR', # Old # 'FOAM_(APP|ETC|SRC|SOLVERS|UTILITIES)', - # 'FOAM_TUTORIALS', # can be useful + # 'FOAM_TUTORIALS', # May be useful # 'WM_OSTYPE', # Purely optional value # Third-party cruft - only used for orig compilation @@ -443,7 +444,7 @@ def setup_run_environment(self, env): '(FOAM|WM)_.*USER_.*', ], whitelist=[ # Whitelist these - 'MPI_ARCH_PATH', # Can be needed for compilation + 'MPI_ARCH_PATH', # Can be required for compilation ]) env.extend(mods) @@ -540,6 +541,43 @@ def patch(self): rcfile, backup=False) + @when('@1906: %fj') + @run_before('configure') + def make_fujitsu_rules(self): + """Create Fujitsu rules (clang variant) unless supplied upstream. + Implemented for 1906 and later (older rules are too messy to edit). + Already included after 1912. + """ + general_rules = 'wmake/rules/General' + arch_rules = 'wmake/rules/linuxARM64' # self.arch + src = arch_rules + 'Clang' + dst = arch_rules + 'Fujitsu' # self.compiler + + if os.path.exists(dst): + return + + # Handle rules/ or rules// + if not os.path.exists(src): + src = join_path(arch_rules, 'Clang') + dst = join_path(arch_rules, 'Fujitsu') # self.compiler + if os.path.exists(dst): + return + + tty.info('Add Fujitsu wmake rules') + copy_tree(src, dst) + + for cfg in ['c', 'c++', 'general']: + rule = join_path(dst, cfg) + filter_file('Clang', 'Fujitsu', rule, backup=False) + + src = join_path(general_rules, 'Clang') + dst = join_path(general_rules, 'Fujitsu') # self.compiler + copy_tree(src, dst) + filter_file('clang', spack_cc, join_path(dst, 'c'), + backup=False, string=True) + filter_file('clang++', spack_cxx, join_path(dst, 'c++'), + backup=False, string=True) + def configure(self, spec, prefix): """Make adjustments to the OpenFOAM configuration files in their various locations: etc/bashrc, etc/config.sh/FEATURE and customizations that @@ -593,11 +631,11 @@ def configure(self, spec, prefix): 'metis': {}, 'ensight': {}, # Disable settings 'paraview': [], - 'gperftools': [], # Currently unused + 'gperftools': [], # Disable settings 'vtk': [], } - # With adios2 after 1912 or develop (after 2019-10-01) + # With adios2 after 1912 if spec.satisfies('@1912:'): self.etc_config['adios2'] = [ ('ADIOS2_ARCH_PATH', spec['adios2'].prefix), @@ -794,13 +832,13 @@ class OpenfoamArch(object): Keywords label-size=[True] supports int32/int64 - compile-option[=RpathOpt] + compile-option[=-spack] mplib[=USERMPI] """ #: Map spack compiler names to OpenFOAM compiler names # By default, simply capitalize the first letter - compiler_mapping = {'intel': 'icc'} + compiler_mapping = {'intel': 'Icc', 'fj': 'Fujitsu'} def __init__(self, spec, **kwargs): # Some user settings, to be adjusted manually or via variants @@ -808,10 +846,9 @@ def __init__(self, spec, **kwargs): self.arch_option = '' # Eg, -march=knl self.label_size = None # <- +int64 self.precision_option = 'DP' # <- +float32 - self.compile_option = kwargs.get('compile-option', 'RpathOpt') + self.compile_option = kwargs.get('compile-option', '-spack') self.arch = None self.options = None - self.rule = None self.mplib = kwargs.get('mplib', 'USERMPI') # Normally support WM_LABEL_OPTION, but not yet for foam-extend @@ -823,6 +860,9 @@ def __init__(self, spec, **kwargs): if '+float32' in spec: self.precision_option = 'SP' + # TDB: mixed precision? + # self.precision_option = 'SPDP' + # Processor/architecture-specific optimizations if '+knl' in spec: self.arch_option = '-march=knl' @@ -855,10 +895,9 @@ def __init__(self, spec, **kwargs): self.arch = platform - # Capitalized version of the compiler name, which usually corresponds - # to how OpenFOAM will camel-case things. - # Use compiler_mapping to handing special cases. - # Also handle special compiler options (eg, KNL) + # Capitalize first letter of compiler name, which corresponds + # to how OpenFOAM handles things (eg, gcc -> Gcc). + # Use compiler_mapping for special cases. comp = spec.compiler.name if comp in self.compiler_mapping: @@ -866,7 +905,6 @@ def __init__(self, spec, **kwargs): comp = comp.capitalize() self.compiler = comp - self.rule = self.arch + self.compiler # Build WM_OPTIONS # ---- @@ -876,7 +914,8 @@ def __init__(self, spec, **kwargs): # WM_OPTIONS=$WM_ARCH$WM_COMPILER$WM_PRECISION_OPTION$WM_COMPILE_OPTION # ---- self.options = ''.join([ - self.rule, + self.arch, + self.compiler, self.precision_option, ('Int' + self.label_size if self.label_size else ''), self.compile_option]) @@ -897,46 +936,45 @@ def foam_dict(self): ('WM_MPLIB', self.mplib), ]) - def _rule_directory(self, projdir=None, general=False): - """The wmake/rules/ compiler directory""" + def _rule_directory(self, projdir, general=False): + """Return the wmake/rules/ General or compiler rules directory. + Supports wmake/rules/ and wmake/rules//. + """ + rules_dir = os.path.join(projdir, 'wmake', 'rules') if general: - relative = os.path.join('wmake', 'rules', 'General') + return os.path.join(rules_dir, 'General') + + arch_dir = os.path.join(rules_dir, self.arch) + comp_rules = arch_dir + self.compiler + if os.path.isdir(comp_rules): + return comp_rules else: - relative = os.path.join('wmake', 'rules', self.rule) - if projdir: - return os.path.join(projdir, relative) - else: - return relative + return os.path.join(arch_dir, self.compiler) def has_rule(self, projdir): - """Verify that a wmake/rules/ compiler rule exists in the project - directory. + """Verify that a wmake/rules/ compiler rule exists in the project. """ # Insist on a wmake rule for this architecture/compiler combination rule_dir = self._rule_directory(projdir) if not os.path.isdir(rule_dir): raise InstallError( - 'No wmake rule for {0}'.format(self.rule)) - if not re.match(r'.+Opt$', self.compile_option): - raise InstallError( - "WM_COMPILE_OPTION={0} is not type '*Opt'" - .format(self.compile_option)) + 'No wmake rule for {0} {1}'.format(self.arch, self.compiler)) return True def create_rules(self, projdir, foam_pkg): - """ Create cRpathOpt,c++RpathOpt and mplibUSER,mplibUSERMPI + """ Create {c,c++}-spack and mplib{USER,USERMPI} rules in the specified project directory. - The compiler rules are based on the respective cOpt,c++Opt rules + The compiler rules are based on the respective {c,c++}Opt rules but with additional rpath information for the OpenFOAM libraries. - The rpath rules allow wmake to use spack information with minimal - modification to OpenFOAM. + The '-spack' rules channel spack information into OpenFOAM wmake + rules with minimal modification to OpenFOAM. The rpath is used for the installed libpath (continue to use LD_LIBRARY_PATH for values during the build). """ # Note: the 'c' rules normally don't need rpath, since they are just - # used for statically linked wmake utilities, but left in anyhow. + # used for some statically linked wmake tools, but left in anyhow. # rpath for installed OpenFOAM libraries rpath = '{0}{1}'.format( diff --git a/var/spack/repos/builtin/packages/opengl/package.py b/var/spack/repos/builtin/packages/opengl/package.py index 603dc9cb88d..413b8e0f453 100644 --- a/var/spack/repos/builtin/packages/opengl/package.py +++ b/var/spack/repos/builtin/packages/opengl/package.py @@ -20,6 +20,17 @@ class Opengl(Package): provides('gl@:4.2', when='@4.2:') provides('gl@:4.1', when='@4.1:') provides('gl@:3.3', when='@3.3:') + provides('gl@:3.2', when='@3.2:') + provides('gl@:3.1', when='@3.1:') + provides('gl@:3.0', when='@3.0:') + provides('gl@:2.1', when='@2.1:') + provides('gl@:2.0', when='@2.0:') + provides('gl@:1.5', when='@1.5:') + provides('gl@:1.4', when='@1.4:') + provides('gl@:1.3', when='@1.3:') + provides('gl@:1.2', when='@1.2:') + provides('gl@:1.1', when='@1.1:') + provides('gl@:1.0', when='@1.0:') if sys.platform != 'darwin': provides('glx@1.4') diff --git a/var/spack/repos/builtin/packages/openmpi/nag_ltmain_4.patch b/var/spack/repos/builtin/packages/openmpi/nag_pthread/1.10.4_1.10.999.patch similarity index 100% rename from var/spack/repos/builtin/packages/openmpi/nag_ltmain_4.patch rename to var/spack/repos/builtin/packages/openmpi/nag_pthread/1.10.4_1.10.999.patch diff --git a/var/spack/repos/builtin/packages/openmpi/nag_ltmain_3.patch b/var/spack/repos/builtin/packages/openmpi/nag_pthread/2.0.0_2.1.1.patch similarity index 100% rename from var/spack/repos/builtin/packages/openmpi/nag_ltmain_3.patch rename to var/spack/repos/builtin/packages/openmpi/nag_pthread/2.0.0_2.1.1.patch diff --git a/var/spack/repos/builtin/packages/openmpi/nag_ltmain_2.patch b/var/spack/repos/builtin/packages/openmpi/nag_pthread/2.1.2_2.1.3_3.0.0.patch similarity index 100% rename from var/spack/repos/builtin/packages/openmpi/nag_ltmain_2.patch rename to var/spack/repos/builtin/packages/openmpi/nag_pthread/2.1.2_2.1.3_3.0.0.patch diff --git a/var/spack/repos/builtin/packages/openmpi/nag_ltmain_1.patch b/var/spack/repos/builtin/packages/openmpi/nag_pthread/2.1.4_2.1.999_3.0.1_4.patch similarity index 100% rename from var/spack/repos/builtin/packages/openmpi/nag_ltmain_1.patch rename to var/spack/repos/builtin/packages/openmpi/nag_pthread/2.1.4_2.1.999_3.0.1_4.patch diff --git a/var/spack/repos/builtin/packages/openmpi/package.py b/var/spack/repos/builtin/packages/openmpi/package.py index 00cae9023bb..4184e64f021 100644 --- a/var/spack/repos/builtin/packages/openmpi/package.py +++ b/var/spack/repos/builtin/packages/openmpi/package.py @@ -4,59 +4,12 @@ # SPDX-License-Identifier: (Apache-2.0 OR MIT) +import itertools import os import sys import llnl.util.tty as tty -def _verbs_dir(): - """Try to find the directory where the OpenFabrics verbs package is - installed. Return None if not found. - """ - try: - # Try to locate Verbs by looking for a utility in the path - ibv_devices = which("ibv_devices") - # Run it (silently) to ensure it works - ibv_devices(output=str, error=str) - # Get path to executable - path = ibv_devices.exe[0] - # Remove executable name and "bin" directory - path = os.path.dirname(path) - path = os.path.dirname(path) - # There's usually no "/include" on Unix; use "/usr/include" instead - if path == "/": - path = "/usr" - return path - except TypeError: - return None - except ProcessError: - return None - - -def _mxm_dir(): - """Look for default directory where the Mellanox package is - installed. Return None if not found. - """ - # Only using default directory; make this more flexible in the future - path = "/opt/mellanox/mxm" - if os.path.isdir(path): - return path - else: - return None - - -def _tm_dir(): - """Look for default directory where the PBS/TM package is - installed. Return None if not found. - """ - # /opt/pbs from PBS 18+; make this more flexible in the future - paths_list = ("/opt/pbs", ) - for path in paths_list: - if os.path.isdir(path) and os.path.isfile(path + "/include/tm.h"): - return path - return None - - class Openmpi(AutotoolsPackage): """An open source Message Passing Interface implementation. @@ -80,13 +33,15 @@ class Openmpi(AutotoolsPackage): version('master', branch='master') # Current - version('4.0.3', sha256='1402feced8c3847b3ab8252165b90f7d1fa28c23b6b2ca4632b6e4971267fd03') # libmpi.so.40.20.3 + version('4.0.4', sha256='47e24eb2223fe5d24438658958a313b6b7a55bb281563542e1afc9dec4a31ac4') # libmpi.so.40.20.4 # Still supported + version('4.0.3', sha256='1402feced8c3847b3ab8252165b90f7d1fa28c23b6b2ca4632b6e4971267fd03') # libmpi.so.40.20.3 version('4.0.2', sha256='900bf751be72eccf06de9d186f7b1c4b5c2fa9fa66458e53b77778dffdfe4057') # libmpi.so.40.20.2 version('4.0.1', sha256='cce7b6d20522849301727f81282201d609553103ac0b09162cf28d102efb9709') # libmpi.so.40.20.1 version('4.0.0', sha256='2f0b8a36cfeb7354b45dda3c5425ef8393c9b04115570b615213faaa3f97366b') # libmpi.so.40.20.0 - version('3.1.5', preferred=True, sha256='fbf0075b4579685eec8d56d34d4d9c963e6667825548554f5bf308610af72133') # libmpi.so.40.10.4 + version('3.1.6', preferred=True, sha256='50131d982ec2a516564d74d5616383178361c2f08fdd7d1202b80bdf66a0d279') # libmpi.so.40.10.4 + version('3.1.5', sha256='fbf0075b4579685eec8d56d34d4d9c963e6667825548554f5bf308610af72133') # libmpi.so.40.10.4 version('3.1.4', sha256='17a69e0054db530c7dc119f75bd07d079efa147cf94bf27e590905864fe379d6') # libmpi.so.40.10.4 version('3.1.3', sha256='8be04307c00f51401d3fb9d837321781ea7c79f2a5a4a2e5d4eaedc874087ab6') # libmpi.so.40.10.3 version('3.1.2', sha256='c654ed847f34a278c52a15c98add40402b4a90f0c540779f1ae6c489af8a76c5') # libmpi.so.40.10.2 @@ -204,41 +159,66 @@ class Openmpi(AutotoolsPackage): patch('btl_vader.patch', when='@3.0.1:3.0.2') patch('btl_vader.patch', when='@3.1.0:3.1.2') - # Reported upstream: https://github.com/open-mpi/ompi/pull/6378 + # Make NAG compiler pass the -pthread option to the linker: + # https://github.com/open-mpi/ompi/pull/6378 # We support only versions based on Libtool 2.4.6. - patch('nag_ltmain_1.patch', when='@2.1.4:2.1.999,3.0.1:4%nag') - patch('nag_ltmain_2.patch', when='@2.1.2:2.1.3,3.0.0%nag') - patch('nag_ltmain_3.patch', when='@2.0.0:2.1.1%nag') - patch('nag_ltmain_4.patch', when='@1.10.4:1.10.999%nag') + patch('nag_pthread/2.1.4_2.1.999_3.0.1_4.patch', when='@2.1.4:2.1.999,3.0.1:4%nag') + patch('nag_pthread/2.1.2_2.1.3_3.0.0.patch', when='@2.1.2:2.1.3,3.0.0%nag') + patch('nag_pthread/2.0.0_2.1.1.patch', when='@2.0.0:2.1.1%nag') + patch('nag_pthread/1.10.4_1.10.999.patch', when='@1.10.4:1.10.999%nag') + + # Fix MPI_Sizeof() in the "mpi" Fortran module for compilers that do not + # support "IGNORE TKR" functionality (e.g. NAG). + # The issue has been resolved upstream in two steps: + # 1) https://github.com/open-mpi/ompi/pull/2294 + # 2) https://github.com/open-mpi/ompi/pull/5099 + # The first one was applied starting version v3.0.0 and backported to + # v1.10. A subset with relevant modifications is applicable starting + # version 1.8.4. + patch('use_mpi_tkr_sizeof/step_1.patch', when='@1.8.4:1.10.6,2:2.999') + # The second patch was applied starting version v4.0.0 and backported to + # v2.x, v3.0.x, and v3.1.x. + patch('use_mpi_tkr_sizeof/step_2.patch', when='@1.8.4:2.1.3,3:3.0.1') variant( 'fabrics', values=disjoint_sets( - ('auto',), ('psm', 'psm2', 'verbs', 'mxm', 'ucx', 'libfabric') + ('auto',), + ('psm', 'psm2', 'verbs', + 'mxm', 'ucx', 'ofi', + 'fca', 'hcoll', + 'xpmem', 'cma', 'knem') # shared memory transports ).with_non_feature_values('auto', 'none'), description="List of fabrics that are enabled; " - "'auto' lets openmpi determine", + "'auto' lets openmpi determine", ) variant( 'schedulers', values=disjoint_sets( - ('auto',), ('alps', 'lsf', 'tm', 'slurm', 'sge', 'loadleveler') + ('auto',), + ('alps', 'lsf', 'tm', + 'slurm', 'sge', 'loadleveler') ).with_non_feature_values('auto', 'none'), description="List of schedulers for which support is enabled; " - "'auto' lets openmpi determine", + "'auto' lets openmpi determine", ) # Additional support options + variant('atomics', default=False, description='Enable built-in atomics') variant('java', default=False, description='Build Java support') + variant('static', default=True, description='Build static libraries') variant('sqlite3', default=False, description='Build SQLite3 support') variant('vt', default=True, description='Build VampirTrace support') variant('thread_multiple', default=False, description='Enable MPI_THREAD_MULTIPLE support') variant('cuda', default=False, description='Enable CUDA support') variant('pmi', default=False, description='Enable PMI support') + variant('wrapper-rpath', default=True, + description='Enable rpath support in the wrappers') variant('cxx', default=False, description='Enable C++ MPI bindings') variant('cxx_exceptions', default=False, description='Enable C++ Exception support') + variant('gpfs', default=True, description='Enable GPFS support (if present)') # Adding support to build a debug version of OpenMPI that activates # Memchecker, as described here: # @@ -271,6 +251,8 @@ class Openmpi(AutotoolsPackage): depends_on('m4', type='build', when='@develop') depends_on('perl', type='build', when='@develop') + depends_on('pkgconfig', type='build') + depends_on('hwloc') # ompi@:3.0.0 doesn't support newer hwloc releases: # "configure: error: OMPI does not currently support hwloc v2 API" @@ -283,31 +265,60 @@ class Openmpi(AutotoolsPackage): depends_on('sqlite', when='+sqlite3@:1.11') depends_on('zlib', when='@3.0.0:') depends_on('valgrind~mpi', when='+memchecker') + + depends_on('opa-psm2', when='fabrics=psm2') + depends_on('rdma-core', when='fabrics=verbs') + depends_on('mxm', when='fabrics=mxm') + depends_on('binutils+libiberty', when='fabrics=mxm') depends_on('ucx', when='fabrics=ucx') depends_on('ucx +thread_multiple', when='fabrics=ucx +thread_multiple') depends_on('ucx +thread_multiple', when='@3.0.0: fabrics=ucx') - depends_on('libfabric', when='fabrics=libfabric') - depends_on('slurm', when='schedulers=slurm') - depends_on('lsf', when='schedulers=lsf') - depends_on('binutils+libiberty', when='fabrics=mxm') + depends_on('libfabric', when='fabrics=ofi') + depends_on('fca', when='fabrics=fca') + depends_on('hcoll', when='fabrics=hcoll') + depends_on('xpmem', when='fabrics=xpmem') + depends_on('knem', when='fabrics=knem') + + depends_on('lsf', when='schedulers=lsf') + depends_on('openpbs', when='schedulers=tm') + depends_on('slurm', when='schedulers=slurm') + + # CUDA support was added in 1.7 + conflicts('+cuda', when='@:1.6') + # PMI support was added in 1.5.5 + conflicts('+pmi', when='@:1.5.4') + # RPATH support in the wrappers was added in 1.7.4 + conflicts('+wrapper-rpath', when='@:1.7.3') - conflicts('+cuda', when='@:1.6') # CUDA support was added in 1.7 - conflicts('fabrics=psm2', when='@:1.8') # PSM2 support was added in 1.10.0 - conflicts('fabrics=mxm', when='@:1.5.3') # MXM support was added in 1.5.4 - conflicts('+pmi', when='@:1.5.4') # PMI support was added in 1.5.5 - conflicts('schedulers=slurm ~pmi', when='@1.5.4:', - msg='+pmi is required for openmpi(>=1.5.5) to work with SLURM.') - conflicts('schedulers=loadleveler', when='@3.0.0:', - msg='The loadleveler scheduler is not supported with ' - 'openmpi(>=3.0.0).') conflicts('+cxx', when='@5:', msg='C++ MPI bindings are removed in 5.0.X release') conflicts('+cxx_exceptions', when='@5:', msg='C++ exceptions are removed in 5.0.X release') + # PSM2 support was added in 1.10.0 + conflicts('fabrics=psm2', when='@:1.8') + # MXM support was added in 1.5.4 + conflicts('fabrics=mxm', when='@:1.5.3') + # libfabric (OFI) support was added in 1.10.0 + conflicts('fabrics=ofi', when='@:1.8') + # fca support was added in 1.5.0 and removed in 5.0.0 + conflicts('fabrics=fca', when='@:1.4,5:') + # hcoll support was added in 1.7.3: + conflicts('fabrics=hcoll', when='@:1.7.2') + # xpmem support was added in 1.7 + conflicts('fabrics=xpmem', when='@:1.6') + # cma support was added in 1.7 + conflicts('fabrics=cma', when='@:1.6') + # knem support was added in 1.5 + conflicts('fabrics=knem', when='@:1.4') + + conflicts('schedulers=slurm ~pmi', when='@1.5.4:', + msg='+pmi is required for openmpi(>=1.5.5) to work with SLURM.') + conflicts('schedulers=loadleveler', when='@3.0.0:', + msg='The loadleveler scheduler is not supported with ' + 'openmpi(>=3.0.0).') + filter_compiler_wrappers('openmpi/*-wrapper-data*', relative_root='share') - conflicts('fabrics=libfabric', when='@:1.8') # libfabric support was added in 1.10.0 - # It may be worth considering making libfabric an exclusive fabrics choice def url_for_version(self, version): url = "http://www.open-mpi.org/software/ompi/v{0}/downloads/openmpi-{1}.tar.bz2" @@ -332,12 +343,18 @@ def libs(self): libraries, root=self.prefix, shared=True, recursive=True ) - def setup_dependent_build_environment(self, env, dependent_spec): + def setup_run_environment(self, env): + # Because MPI is both a runtime and a compiler, we have to setup the + # compiler components as part of the run environment. env.set('MPICC', join_path(self.prefix.bin, 'mpicc')) env.set('MPICXX', join_path(self.prefix.bin, 'mpic++')) env.set('MPIF77', join_path(self.prefix.bin, 'mpif77')) env.set('MPIF90', join_path(self.prefix.bin, 'mpif90')) + def setup_dependent_build_environment(self, env, dependent_spec): + self.setup_run_environment(env) + + # Use the spack compiler wrappers under MPI env.set('OMPI_CC', spack_cc) env.set('OMPI_CXX', spack_cxx) env.set('OMPI_FC', spack_fc) @@ -353,44 +370,72 @@ def setup_dependent_package(self, module, dependent_spec): join_path(self.prefix.lib, 'libmpi.{0}'.format(dso_suffix)) ] + # Most of the following with_or_without methods might seem redundant + # because Spack compiler wrapper adds the required -I and -L flags, which + # is enough for the configure script to find them. However, we also need + # the flags in Libtool (lib/*.la) and pkg-config (lib/pkgconfig/*.pc). + # Therefore, we pass the prefixes explicitly. + + def with_or_without_psm2(self, activated): + if not activated: + return '--without-psm2' + return '--with-psm2={0}'.format(self.spec['opa-psm2'].prefix) + def with_or_without_verbs(self, activated): - # Up through version 1.6, this option was previously named - # --with-openib - opt = 'openib' - # In version 1.7, it was renamed to be --with-verbs - if self.spec.satisfies('@1.7:'): - opt = 'verbs' - # If the option has not been activated return - # --without-openib or --without-verbs + # Up through version 1.6, this option was named --with-openib. + # In version 1.7, it was renamed to be --with-verbs. + opt = 'verbs' if self.spec.satisfies('@1.7:') else 'openib' if not activated: return '--without-{0}'.format(opt) - line = '--with-{0}'.format(opt) - path = _verbs_dir() - if (path is not None) and (path not in ('/usr', '/usr/local')): - line += '={0}'.format(path) - return line + return '--with-{0}={1}'.format(opt, self.spec['rdma-core'].prefix) def with_or_without_mxm(self, activated): - opt = 'mxm' - # If the option has not been activated return --without-mxm + if not activated: + return '--without-mxm' + return '--with-mxm={0}'.format(self.spec['mxm'].prefix) + + def with_or_without_ucx(self, activated): + if not activated: + return '--without-ucx' + return '--with-ucx={0}'.format(self.spec['ucx'].prefix) + + def with_or_without_ofi(self, activated): + # Up through version 3.0.3 this option was name --with-libfabric. + # In version 3.0.4, the old name was deprecated in favor of --with-ofi. + opt = 'ofi' if self.spec.satisfies('@3.0.4:') else 'libfabric' if not activated: return '--without-{0}'.format(opt) - line = '--with-{0}'.format(opt) - path = _mxm_dir() - if path is not None: - line += '={0}'.format(path) - return line + return '--with-{0}={1}'.format(opt, self.spec['libfabric'].prefix) + + def with_or_without_fca(self, activated): + if not activated: + return '--without-fca' + return '--with-fca={0}'.format(self.spec['fca'].prefix) + + def with_or_without_hcoll(self, activated): + if not activated: + return '--without-hcoll' + return '--with-hcoll={0}'.format(self.spec['hcoll'].prefix) + + def with_or_without_xpmem(self, activated): + if not activated: + return '--without-xpmem' + return '--with-xpmem={0}'.format(self.spec['xpmem'].prefix) + + def with_or_without_knem(self, activated): + if not activated: + return '--without-knem' + return '--with-knem={0}'.format(self.spec['knem'].prefix) + + def with_or_without_lsf(self, activated): + if not activated: + return '--without-lsf' + return '--with-lsf={0}'.format(self.spec['lsf'].prefix) def with_or_without_tm(self, activated): - opt = 'tm' - # If the option has not been activated return --without-tm if not activated: - return '--without-{0}'.format(opt) - line = '--with-{0}'.format(opt) - path = _tm_dir() - if path is not None: - line += '={0}'.format(path) - return line + return '--without-tm' + return '--with-tm={0}'.format(self.spec['openpbs'].prefix) @run_before('autoreconf') def die_without_fortran(self): @@ -407,6 +452,11 @@ def autoreconf(self, spec, prefix): perl = which('perl') perl('autogen.pl') + def setup_build_environment(self, env): + if '~gpfs' in self.spec: + env.set('ac_cv_header_gpfs_h', 'no') + env.set('ac_cv_header_gpfs_fcntl_h', 'no') + def configure_args(self): spec = self.spec config_args = [ @@ -414,12 +464,15 @@ def configure_args(self): '--disable-silent-rules' ] - # Add extra_rpaths dirs from compilers.yaml into link wrapper - rpaths = [self.compiler.cc_rpath_arg + path - for path in self.compiler.extra_rpaths] - config_args.extend([ - '--with-wrapper-ldflags={0}'.format(' '.join(rpaths)) - ]) + # All rpath flags should be appended with self.compiler.cc_rpath_arg. + # Later, we might need to update share/openmpi/mpic++-wrapper-data.txt + # and mpifort-wrapper-data.txt (see filter_rpaths()). + wrapper_ldflags = [] + + if '+atomics' in spec: + config_args.append('--enable-builtin-atomics') + else: + config_args.append('--disable-builtin-atomics') # According to this comment on github: # @@ -431,9 +484,14 @@ def configure_args(self): if spec.satisfies('schedulers=slurm'): config_args.append('--with-pmi={0}'.format(spec['slurm'].prefix)) if spec.satisfies('@3.1.3:') or spec.satisfies('@3.0.3'): - config_args.append('--enable-static') + if '+static' in spec: + config_args.append('--enable-static') else: - config_args.append('--enable-static') + if '+static' in spec: + config_args.append('--enable-static') + else: + config_args.append('--disable-static') + config_args.extend(self.with_or_without('pmi')) if spec.satisfies('@3.0.0:', strict=True): @@ -452,12 +510,12 @@ def configure_args(self): # Fabrics if 'fabrics=auto' not in spec: config_args.extend(self.with_or_without('fabrics')) - # The wrappers fail to automatically link libfabric. This will cause - # undefined references unless we add the appropriate flags. - if 'fabrics=libfabric' in spec: - config_args.append('--with-wrapper-ldflags=-L{0} -Wl,-rpath={0}' - .format(spec['libfabric'].prefix.lib)) - config_args.append('--with-wrapper-libs=-lfabric') + + if spec.satisfies('@2.0.0'): + if 'fabrics=xpmem' in spec and 'platform=cray' in spec: + config_args.append('--with-cray-xpmem') + else: + config_args.append('--without-cray-xpmem') # Schedulers if 'schedulers=auto' not in spec: @@ -533,6 +591,25 @@ def configure_args(self): else: config_args.append('--without-cuda') + if '+wrapper-rpath' in spec: + config_args.append('--enable-wrapper-rpath') + + # Disable new dynamic tags in the wrapper (--disable-new-dtags) + # In the newer versions this can be done with a configure option + # (for older versions, we rely on filter_compiler_wrappers() and + # filter_pc_files()): + if spec.satisfies('@3.0.5:'): + config_args.append('--disable-wrapper-runpath') + + # Add extra_rpaths and implicit_rpaths into the wrappers. + wrapper_ldflags.extend([ + self.compiler.cc_rpath_arg + path + for path in itertools.chain( + self.compiler.extra_rpaths, + self.compiler.implicit_rpaths())]) + else: + config_args.append('--disable-wrapper-rpath') + if spec.satisfies('@:4'): if '+cxx' in spec: config_args.append('--enable-mpi-cxx') @@ -544,8 +621,58 @@ def configure_args(self): else: config_args.append('--disable-cxx-exceptions') + if wrapper_ldflags: + config_args.append( + '--with-wrapper-ldflags={0}'.format(' '.join(wrapper_ldflags))) + return config_args + @when('+wrapper-rpath') + @run_after('install') + def filter_rpaths(self): + + def filter_lang_rpaths(lang_tokens, rpath_arg): + if self.compiler.cc_rpath_arg == rpath_arg: + return + + files = find(self.spec.prefix.share.openmpi, + ['*{0}-wrapper-data*'.format(t) for t in lang_tokens]) + files.extend(find(self.spec.prefix.lib.pkgconfig, + ['ompi-{0}.pc'.format(t) for t in lang_tokens])) + + x = FileFilter(*[f for f in files if not os.path.islink(f)]) + + # Replace self.compiler.cc_rpath_arg, which have been added as + # '--with-wrapper-ldflags', with rpath_arg in the respective + # language-specific wrappers and pkg-config files. + x.filter(self.compiler.cc_rpath_arg, rpath_arg, + string=True, backup=False) + + if self.spec.satisfies('@:1.10.3,2:2.1.1'): + # Replace Libtool-style RPATH prefixes '-Wl,-rpath -Wl,' with + # rpath_arg for old version of OpenMPI, which assumed that CXX + # and FC had the same prefixes as CC. + x.filter('-Wl,-rpath -Wl,', rpath_arg, + string=True, backup=False) + + filter_lang_rpaths(['c++', 'CC', 'cxx'], self.compiler.cxx_rpath_arg) + filter_lang_rpaths(['fort', 'f77', 'f90'], self.compiler.fc_rpath_arg) + + @when('@:3.0.4+wrapper-rpath') + @run_after('install') + def filter_pc_files(self): + files = find(self.spec.prefix.lib.pkgconfig, '*.pc') + x = FileFilter(*[f for f in files if not os.path.islink(f)]) + + # Remove this linking flag if present (it turns RPATH into RUNPATH) + x.filter('{0}--enable-new-dtags'.format(self.compiler.linker_arg), '', + string=True, backup=False) + + # NAG compiler is usually mixed with GCC, which has a different + # prefix for linker arguments. + if self.compiler.name == 'nag': + x.filter('-Wl,--enable-new-dtags', '', string=True, backup=False) + @run_after('install') def delete_mpirun_mpiexec(self): # The preferred way to run an application when Slurm is the diff --git a/var/spack/repos/builtin/packages/openmpi/use_mpi_tkr_sizeof/step_1.patch b/var/spack/repos/builtin/packages/openmpi/use_mpi_tkr_sizeof/step_1.patch new file mode 100644 index 00000000000..53d9e0018db --- /dev/null +++ b/var/spack/repos/builtin/packages/openmpi/use_mpi_tkr_sizeof/step_1.patch @@ -0,0 +1,584 @@ +--- a/ompi/mpi/fortran/use-mpi-tkr/mpi-f90-interfaces.h ++++ b/ompi/mpi/fortran/use-mpi-tkr/mpi-f90-interfaces.h +@@ -1650,570 +1650,6 @@ end subroutine MPI_Request_get_status + end interface + + +-interface MPI_Sizeof +- +-subroutine MPI_Sizeof0DCH(x, size, ierror) +- character, intent(in) :: x +- integer, intent(out) :: size +- integer, intent(out) :: ierror +-end subroutine MPI_Sizeof0DCH +- +- +-subroutine MPI_Sizeof0DL(x, size, ierror) +- logical, intent(in) :: x +- integer, intent(out) :: size +- integer, intent(out) :: ierror +-end subroutine MPI_Sizeof0DL +- +- +-subroutine MPI_Sizeof0DI1(x, size, ierror) +- integer*1, intent(in) :: x +- integer, intent(out) :: size +- integer, intent(out) :: ierror +-end subroutine MPI_Sizeof0DI1 +- +- +-subroutine MPI_Sizeof0DI2(x, size, ierror) +- integer*2, intent(in) :: x +- integer, intent(out) :: size +- integer, intent(out) :: ierror +-end subroutine MPI_Sizeof0DI2 +- +- +-subroutine MPI_Sizeof0DI4(x, size, ierror) +- integer*4, intent(in) :: x +- integer, intent(out) :: size +- integer, intent(out) :: ierror +-end subroutine MPI_Sizeof0DI4 +- +- +-subroutine MPI_Sizeof0DI8(x, size, ierror) +- integer*8, intent(in) :: x +- integer, intent(out) :: size +- integer, intent(out) :: ierror +-end subroutine MPI_Sizeof0DI8 +- +- +-subroutine MPI_Sizeof0DR4(x, size, ierror) +- real*4, intent(in) :: x +- integer, intent(out) :: size +- integer, intent(out) :: ierror +-end subroutine MPI_Sizeof0DR4 +- +- +-subroutine MPI_Sizeof0DR8(x, size, ierror) +- real*8, intent(in) :: x +- integer, intent(out) :: size +- integer, intent(out) :: ierror +-end subroutine MPI_Sizeof0DR8 +- +- +-subroutine MPI_Sizeof0DC8(x, size, ierror) +- complex*8, intent(in) :: x +- integer, intent(out) :: size +- integer, intent(out) :: ierror +-end subroutine MPI_Sizeof0DC8 +- +- +-subroutine MPI_Sizeof0DC16(x, size, ierror) +- complex*16, intent(in) :: x +- integer, intent(out) :: size +- integer, intent(out) :: ierror +-end subroutine MPI_Sizeof0DC16 +- +- +-subroutine MPI_Sizeof1DCH(x, size, ierror) +- character, dimension(*), intent(in) :: x +- integer, intent(out) :: size +- integer, intent(out) :: ierror +-end subroutine MPI_Sizeof1DCH +- +- +-subroutine MPI_Sizeof1DL(x, size, ierror) +- logical, dimension(*), intent(in) :: x +- integer, intent(out) :: size +- integer, intent(out) :: ierror +-end subroutine MPI_Sizeof1DL +- +- +-subroutine MPI_Sizeof1DI1(x, size, ierror) +- integer*1, dimension(*), intent(in) :: x +- integer, intent(out) :: size +- integer, intent(out) :: ierror +-end subroutine MPI_Sizeof1DI1 +- +- +-subroutine MPI_Sizeof1DI2(x, size, ierror) +- integer*2, dimension(*), intent(in) :: x +- integer, intent(out) :: size +- integer, intent(out) :: ierror +-end subroutine MPI_Sizeof1DI2 +- +- +-subroutine MPI_Sizeof1DI4(x, size, ierror) +- integer*4, dimension(*), intent(in) :: x +- integer, intent(out) :: size +- integer, intent(out) :: ierror +-end subroutine MPI_Sizeof1DI4 +- +- +-subroutine MPI_Sizeof1DI8(x, size, ierror) +- integer*8, dimension(*), intent(in) :: x +- integer, intent(out) :: size +- integer, intent(out) :: ierror +-end subroutine MPI_Sizeof1DI8 +- +- +-subroutine MPI_Sizeof1DR4(x, size, ierror) +- real*4, dimension(*), intent(in) :: x +- integer, intent(out) :: size +- integer, intent(out) :: ierror +-end subroutine MPI_Sizeof1DR4 +- +- +-subroutine MPI_Sizeof1DR8(x, size, ierror) +- real*8, dimension(*), intent(in) :: x +- integer, intent(out) :: size +- integer, intent(out) :: ierror +-end subroutine MPI_Sizeof1DR8 +- +- +-subroutine MPI_Sizeof1DC8(x, size, ierror) +- complex*8, dimension(*), intent(in) :: x +- integer, intent(out) :: size +- integer, intent(out) :: ierror +-end subroutine MPI_Sizeof1DC8 +- +- +-subroutine MPI_Sizeof1DC16(x, size, ierror) +- complex*16, dimension(*), intent(in) :: x +- integer, intent(out) :: size +- integer, intent(out) :: ierror +-end subroutine MPI_Sizeof1DC16 +- +- +-subroutine MPI_Sizeof2DCH(x, size, ierror) +- character, dimension(1,*), intent(in) :: x +- integer, intent(out) :: size +- integer, intent(out) :: ierror +-end subroutine MPI_Sizeof2DCH +- +- +-subroutine MPI_Sizeof2DL(x, size, ierror) +- logical, dimension(1,*), intent(in) :: x +- integer, intent(out) :: size +- integer, intent(out) :: ierror +-end subroutine MPI_Sizeof2DL +- +- +-subroutine MPI_Sizeof2DI1(x, size, ierror) +- integer*1, dimension(1,*), intent(in) :: x +- integer, intent(out) :: size +- integer, intent(out) :: ierror +-end subroutine MPI_Sizeof2DI1 +- +- +-subroutine MPI_Sizeof2DI2(x, size, ierror) +- integer*2, dimension(1,*), intent(in) :: x +- integer, intent(out) :: size +- integer, intent(out) :: ierror +-end subroutine MPI_Sizeof2DI2 +- +- +-subroutine MPI_Sizeof2DI4(x, size, ierror) +- integer*4, dimension(1,*), intent(in) :: x +- integer, intent(out) :: size +- integer, intent(out) :: ierror +-end subroutine MPI_Sizeof2DI4 +- +- +-subroutine MPI_Sizeof2DI8(x, size, ierror) +- integer*8, dimension(1,*), intent(in) :: x +- integer, intent(out) :: size +- integer, intent(out) :: ierror +-end subroutine MPI_Sizeof2DI8 +- +- +-subroutine MPI_Sizeof2DR4(x, size, ierror) +- real*4, dimension(1,*), intent(in) :: x +- integer, intent(out) :: size +- integer, intent(out) :: ierror +-end subroutine MPI_Sizeof2DR4 +- +- +-subroutine MPI_Sizeof2DR8(x, size, ierror) +- real*8, dimension(1,*), intent(in) :: x +- integer, intent(out) :: size +- integer, intent(out) :: ierror +-end subroutine MPI_Sizeof2DR8 +- +- +-subroutine MPI_Sizeof2DC8(x, size, ierror) +- complex*8, dimension(1,*), intent(in) :: x +- integer, intent(out) :: size +- integer, intent(out) :: ierror +-end subroutine MPI_Sizeof2DC8 +- +- +-subroutine MPI_Sizeof2DC16(x, size, ierror) +- complex*16, dimension(1,*), intent(in) :: x +- integer, intent(out) :: size +- integer, intent(out) :: ierror +-end subroutine MPI_Sizeof2DC16 +- +- +-subroutine MPI_Sizeof3DCH(x, size, ierror) +- character, dimension(1,1,*), intent(in) :: x +- integer, intent(out) :: size +- integer, intent(out) :: ierror +-end subroutine MPI_Sizeof3DCH +- +- +-subroutine MPI_Sizeof3DL(x, size, ierror) +- logical, dimension(1,1,*), intent(in) :: x +- integer, intent(out) :: size +- integer, intent(out) :: ierror +-end subroutine MPI_Sizeof3DL +- +- +-subroutine MPI_Sizeof3DI1(x, size, ierror) +- integer*1, dimension(1,1,*), intent(in) :: x +- integer, intent(out) :: size +- integer, intent(out) :: ierror +-end subroutine MPI_Sizeof3DI1 +- +- +-subroutine MPI_Sizeof3DI2(x, size, ierror) +- integer*2, dimension(1,1,*), intent(in) :: x +- integer, intent(out) :: size +- integer, intent(out) :: ierror +-end subroutine MPI_Sizeof3DI2 +- +- +-subroutine MPI_Sizeof3DI4(x, size, ierror) +- integer*4, dimension(1,1,*), intent(in) :: x +- integer, intent(out) :: size +- integer, intent(out) :: ierror +-end subroutine MPI_Sizeof3DI4 +- +- +-subroutine MPI_Sizeof3DI8(x, size, ierror) +- integer*8, dimension(1,1,*), intent(in) :: x +- integer, intent(out) :: size +- integer, intent(out) :: ierror +-end subroutine MPI_Sizeof3DI8 +- +- +-subroutine MPI_Sizeof3DR4(x, size, ierror) +- real*4, dimension(1,1,*), intent(in) :: x +- integer, intent(out) :: size +- integer, intent(out) :: ierror +-end subroutine MPI_Sizeof3DR4 +- +- +-subroutine MPI_Sizeof3DR8(x, size, ierror) +- real*8, dimension(1,1,*), intent(in) :: x +- integer, intent(out) :: size +- integer, intent(out) :: ierror +-end subroutine MPI_Sizeof3DR8 +- +- +-subroutine MPI_Sizeof3DC8(x, size, ierror) +- complex*8, dimension(1,1,*), intent(in) :: x +- integer, intent(out) :: size +- integer, intent(out) :: ierror +-end subroutine MPI_Sizeof3DC8 +- +- +-subroutine MPI_Sizeof3DC16(x, size, ierror) +- complex*16, dimension(1,1,*), intent(in) :: x +- integer, intent(out) :: size +- integer, intent(out) :: ierror +-end subroutine MPI_Sizeof3DC16 +- +- +-subroutine MPI_Sizeof4DCH(x, size, ierror) +- character, dimension(1,1,1,*), intent(in) :: x +- integer, intent(out) :: size +- integer, intent(out) :: ierror +-end subroutine MPI_Sizeof4DCH +- +- +-subroutine MPI_Sizeof4DL(x, size, ierror) +- logical, dimension(1,1,1,*), intent(in) :: x +- integer, intent(out) :: size +- integer, intent(out) :: ierror +-end subroutine MPI_Sizeof4DL +- +- +-subroutine MPI_Sizeof4DI1(x, size, ierror) +- integer*1, dimension(1,1,1,*), intent(in) :: x +- integer, intent(out) :: size +- integer, intent(out) :: ierror +-end subroutine MPI_Sizeof4DI1 +- +- +-subroutine MPI_Sizeof4DI2(x, size, ierror) +- integer*2, dimension(1,1,1,*), intent(in) :: x +- integer, intent(out) :: size +- integer, intent(out) :: ierror +-end subroutine MPI_Sizeof4DI2 +- +- +-subroutine MPI_Sizeof4DI4(x, size, ierror) +- integer*4, dimension(1,1,1,*), intent(in) :: x +- integer, intent(out) :: size +- integer, intent(out) :: ierror +-end subroutine MPI_Sizeof4DI4 +- +- +-subroutine MPI_Sizeof4DI8(x, size, ierror) +- integer*8, dimension(1,1,1,*), intent(in) :: x +- integer, intent(out) :: size +- integer, intent(out) :: ierror +-end subroutine MPI_Sizeof4DI8 +- +- +-subroutine MPI_Sizeof4DR4(x, size, ierror) +- real*4, dimension(1,1,1,*), intent(in) :: x +- integer, intent(out) :: size +- integer, intent(out) :: ierror +-end subroutine MPI_Sizeof4DR4 +- +- +-subroutine MPI_Sizeof4DR8(x, size, ierror) +- real*8, dimension(1,1,1,*), intent(in) :: x +- integer, intent(out) :: size +- integer, intent(out) :: ierror +-end subroutine MPI_Sizeof4DR8 +- +- +-subroutine MPI_Sizeof4DC8(x, size, ierror) +- complex*8, dimension(1,1,1,*), intent(in) :: x +- integer, intent(out) :: size +- integer, intent(out) :: ierror +-end subroutine MPI_Sizeof4DC8 +- +- +-subroutine MPI_Sizeof4DC16(x, size, ierror) +- complex*16, dimension(1,1,1,*), intent(in) :: x +- integer, intent(out) :: size +- integer, intent(out) :: ierror +-end subroutine MPI_Sizeof4DC16 +- +- +-subroutine MPI_Sizeof5DCH(x, size, ierror) +- character, dimension(1,1,1,1,*), intent(in) :: x +- integer, intent(out) :: size +- integer, intent(out) :: ierror +-end subroutine MPI_Sizeof5DCH +- +- +-subroutine MPI_Sizeof5DL(x, size, ierror) +- logical, dimension(1,1,1,1,*), intent(in) :: x +- integer, intent(out) :: size +- integer, intent(out) :: ierror +-end subroutine MPI_Sizeof5DL +- +- +-subroutine MPI_Sizeof5DI1(x, size, ierror) +- integer*1, dimension(1,1,1,1,*), intent(in) :: x +- integer, intent(out) :: size +- integer, intent(out) :: ierror +-end subroutine MPI_Sizeof5DI1 +- +- +-subroutine MPI_Sizeof5DI2(x, size, ierror) +- integer*2, dimension(1,1,1,1,*), intent(in) :: x +- integer, intent(out) :: size +- integer, intent(out) :: ierror +-end subroutine MPI_Sizeof5DI2 +- +- +-subroutine MPI_Sizeof5DI4(x, size, ierror) +- integer*4, dimension(1,1,1,1,*), intent(in) :: x +- integer, intent(out) :: size +- integer, intent(out) :: ierror +-end subroutine MPI_Sizeof5DI4 +- +- +-subroutine MPI_Sizeof5DI8(x, size, ierror) +- integer*8, dimension(1,1,1,1,*), intent(in) :: x +- integer, intent(out) :: size +- integer, intent(out) :: ierror +-end subroutine MPI_Sizeof5DI8 +- +- +-subroutine MPI_Sizeof5DR4(x, size, ierror) +- real*4, dimension(1,1,1,1,*), intent(in) :: x +- integer, intent(out) :: size +- integer, intent(out) :: ierror +-end subroutine MPI_Sizeof5DR4 +- +- +-subroutine MPI_Sizeof5DR8(x, size, ierror) +- real*8, dimension(1,1,1,1,*), intent(in) :: x +- integer, intent(out) :: size +- integer, intent(out) :: ierror +-end subroutine MPI_Sizeof5DR8 +- +- +-subroutine MPI_Sizeof5DC8(x, size, ierror) +- complex*8, dimension(1,1,1,1,*), intent(in) :: x +- integer, intent(out) :: size +- integer, intent(out) :: ierror +-end subroutine MPI_Sizeof5DC8 +- +- +-subroutine MPI_Sizeof5DC16(x, size, ierror) +- complex*16, dimension(1,1,1,1,*), intent(in) :: x +- integer, intent(out) :: size +- integer, intent(out) :: ierror +-end subroutine MPI_Sizeof5DC16 +- +- +-subroutine MPI_Sizeof6DCH(x, size, ierror) +- character, dimension(1,1,1,1,1,*), intent(in) :: x +- integer, intent(out) :: size +- integer, intent(out) :: ierror +-end subroutine MPI_Sizeof6DCH +- +- +-subroutine MPI_Sizeof6DL(x, size, ierror) +- logical, dimension(1,1,1,1,1,*), intent(in) :: x +- integer, intent(out) :: size +- integer, intent(out) :: ierror +-end subroutine MPI_Sizeof6DL +- +- +-subroutine MPI_Sizeof6DI1(x, size, ierror) +- integer*1, dimension(1,1,1,1,1,*), intent(in) :: x +- integer, intent(out) :: size +- integer, intent(out) :: ierror +-end subroutine MPI_Sizeof6DI1 +- +- +-subroutine MPI_Sizeof6DI2(x, size, ierror) +- integer*2, dimension(1,1,1,1,1,*), intent(in) :: x +- integer, intent(out) :: size +- integer, intent(out) :: ierror +-end subroutine MPI_Sizeof6DI2 +- +- +-subroutine MPI_Sizeof6DI4(x, size, ierror) +- integer*4, dimension(1,1,1,1,1,*), intent(in) :: x +- integer, intent(out) :: size +- integer, intent(out) :: ierror +-end subroutine MPI_Sizeof6DI4 +- +- +-subroutine MPI_Sizeof6DI8(x, size, ierror) +- integer*8, dimension(1,1,1,1,1,*), intent(in) :: x +- integer, intent(out) :: size +- integer, intent(out) :: ierror +-end subroutine MPI_Sizeof6DI8 +- +- +-subroutine MPI_Sizeof6DR4(x, size, ierror) +- real*4, dimension(1,1,1,1,1,*), intent(in) :: x +- integer, intent(out) :: size +- integer, intent(out) :: ierror +-end subroutine MPI_Sizeof6DR4 +- +- +-subroutine MPI_Sizeof6DR8(x, size, ierror) +- real*8, dimension(1,1,1,1,1,*), intent(in) :: x +- integer, intent(out) :: size +- integer, intent(out) :: ierror +-end subroutine MPI_Sizeof6DR8 +- +- +-subroutine MPI_Sizeof6DC8(x, size, ierror) +- complex*8, dimension(1,1,1,1,1,*), intent(in) :: x +- integer, intent(out) :: size +- integer, intent(out) :: ierror +-end subroutine MPI_Sizeof6DC8 +- +- +-subroutine MPI_Sizeof6DC16(x, size, ierror) +- complex*16, dimension(1,1,1,1,1,*), intent(in) :: x +- integer, intent(out) :: size +- integer, intent(out) :: ierror +-end subroutine MPI_Sizeof6DC16 +- +- +-subroutine MPI_Sizeof7DCH(x, size, ierror) +- character, dimension(1,1,1,1,1,1,*), intent(in) :: x +- integer, intent(out) :: size +- integer, intent(out) :: ierror +-end subroutine MPI_Sizeof7DCH +- +- +-subroutine MPI_Sizeof7DL(x, size, ierror) +- logical, dimension(1,1,1,1,1,1,*), intent(in) :: x +- integer, intent(out) :: size +- integer, intent(out) :: ierror +-end subroutine MPI_Sizeof7DL +- +- +-subroutine MPI_Sizeof7DI1(x, size, ierror) +- integer*1, dimension(1,1,1,1,1,1,*), intent(in) :: x +- integer, intent(out) :: size +- integer, intent(out) :: ierror +-end subroutine MPI_Sizeof7DI1 +- +- +-subroutine MPI_Sizeof7DI2(x, size, ierror) +- integer*2, dimension(1,1,1,1,1,1,*), intent(in) :: x +- integer, intent(out) :: size +- integer, intent(out) :: ierror +-end subroutine MPI_Sizeof7DI2 +- +- +-subroutine MPI_Sizeof7DI4(x, size, ierror) +- integer*4, dimension(1,1,1,1,1,1,*), intent(in) :: x +- integer, intent(out) :: size +- integer, intent(out) :: ierror +-end subroutine MPI_Sizeof7DI4 +- +- +-subroutine MPI_Sizeof7DI8(x, size, ierror) +- integer*8, dimension(1,1,1,1,1,1,*), intent(in) :: x +- integer, intent(out) :: size +- integer, intent(out) :: ierror +-end subroutine MPI_Sizeof7DI8 +- +- +-subroutine MPI_Sizeof7DR4(x, size, ierror) +- real*4, dimension(1,1,1,1,1,1,*), intent(in) :: x +- integer, intent(out) :: size +- integer, intent(out) :: ierror +-end subroutine MPI_Sizeof7DR4 +- +- +-subroutine MPI_Sizeof7DR8(x, size, ierror) +- real*8, dimension(1,1,1,1,1,1,*), intent(in) :: x +- integer, intent(out) :: size +- integer, intent(out) :: ierror +-end subroutine MPI_Sizeof7DR8 +- +- +-subroutine MPI_Sizeof7DC8(x, size, ierror) +- complex*8, dimension(1,1,1,1,1,1,*), intent(in) :: x +- integer, intent(out) :: size +- integer, intent(out) :: ierror +-end subroutine MPI_Sizeof7DC8 +- +- +-subroutine MPI_Sizeof7DC16(x, size, ierror) +- complex*16, dimension(1,1,1,1,1,1,*), intent(in) :: x +- integer, intent(out) :: size +- integer, intent(out) :: ierror +-end subroutine MPI_Sizeof7DC16 +- +-end interface +- +- + interface MPI_Start + + subroutine MPI_Start(request, ierror) +--- a/ompi/mpi/fortran/use-mpi-tkr/mpi.F90 ++++ b/ompi/mpi/fortran/use-mpi-tkr/mpi.F90 +@@ -50,4 +50,8 @@ module mpi + + include "mpi-f90-interfaces.h" + ++#if OMPI_FORTRAN_BUILD_SIZEOF ++ include "mpi-tkr-sizeof.h" ++#endif ++ + end module mpi diff --git a/var/spack/repos/builtin/packages/openmpi/use_mpi_tkr_sizeof/step_2.patch b/var/spack/repos/builtin/packages/openmpi/use_mpi_tkr_sizeof/step_2.patch new file mode 100644 index 00000000000..6ca5051a596 --- /dev/null +++ b/var/spack/repos/builtin/packages/openmpi/use_mpi_tkr_sizeof/step_2.patch @@ -0,0 +1,22 @@ +--- a/ompi/mpi/fortran/configure-fortran-output.h.in ++++ b/ompi/mpi/fortran/configure-fortran-output.h.in +@@ -47,6 +47,8 @@ + ! Line 2 of the ignore TKR syntax + #define OMPI_FORTRAN_IGNORE_TKR_TYPE @OMPI_FORTRAN_IGNORE_TKR_TYPE@ + ++ ++#define OMPI_FORTRAN_BUILD_SIZEOF @OMPI_FORTRAN_BUILD_SIZEOF@ + ! Integers + + #define OMPI_HAVE_FORTRAN_INTEGER1 @OMPI_HAVE_FORTRAN_INTEGER1@ +--- a/ompi/mpi/fortran/use-mpi-tkr/Makefile.in ++++ b/ompi/mpi/fortran/use-mpi-tkr/Makefile.in +@@ -2023,6 +2023,8 @@ uninstall-am: uninstall-libLTLIBRARIES uninstall-local + @OMPI_BUILD_FORTRAN_USEMPI_TKR_BINDINGS_TRUE@mpi.lo: $(top_builddir)/ompi/mpi/fortran/configure-fortran-output.h + @OMPI_BUILD_FORTRAN_USEMPI_TKR_BINDINGS_TRUE@mpi.lo: mpi-f90-cptr-interfaces.F90 + ++@BUILD_FORTRAN_SIZEOF_TRUE@@OMPI_BUILD_FORTRAN_USEMPI_TKR_BINDINGS_TRUE@mpi.lo: mpi-tkr-sizeof.h ++ + @OMPI_BUILD_FORTRAN_USEMPI_TKR_BINDINGS_TRUE@mpi-tkr-sizeof.h: $(top_builddir)/config.status + @OMPI_BUILD_FORTRAN_USEMPI_TKR_BINDINGS_TRUE@mpi-tkr-sizeof.h: $(sizeof_pl) + @OMPI_BUILD_FORTRAN_USEMPI_TKR_BINDINGS_TRUE@mpi-tkr-sizeof.h: diff --git a/var/spack/repos/builtin/packages/openpbs/install.patch b/var/spack/repos/builtin/packages/openpbs/install.patch new file mode 100644 index 00000000000..8e97fd497c4 --- /dev/null +++ b/var/spack/repos/builtin/packages/openpbs/install.patch @@ -0,0 +1,52 @@ +--- a/m4/pbs_systemd_unitdir.m4 ++++ b/m4/pbs_systemd_unitdir.m4 +@@ -39,14 +39,19 @@ + AC_DEFUN([PBS_AC_SYSTEMD_UNITDIR], + [ + AC_MSG_CHECKING([system/machine type for systemd unit dir]) +- systemd_dir="/usr/lib/systemd/system" +- AS_IF([test -r "/etc/os-release"], +- [system_type=$( cat /etc/os-release | awk -F'=' '/^ID=/' | cut -d "=" -f 2 ) +- AS_IF([test "x$system_type" = "xubuntu" -o "x$system_type" = "xdebian"], +- [systemd_dir="/lib/systemd/system"]) +- ] +- ) +- _unitdir=$systemd_dir ++dnl systemd_dir="/usr/lib/systemd/system" ++dnl AS_IF([test -r "/etc/os-release"], ++dnl [system_type=$( cat /etc/os-release | awk -F'=' '/^ID=/' | cut -d "=" -f 2 ) ++dnl AS_IF([test "x$system_type" = "xubuntu" -o "x$system_type" = "xdebian"], ++dnl [systemd_dir="/lib/systemd/system"]) ++dnl ] ++dnl ) ++dnl _unitdir=$systemd_dir ++ _save_prefix=$prefix; _save_exec_prefix=$exec_prefix ++ test "x$prefix" = xNONE && prefix=$ac_default_prefix ++ test "x$exec_prefix" = xNONE && exec_prefix=$prefix ++ eval "_unitdir=$libdir/systemd/system" ++ prefix=$_save_prefix; exec_prefix=$_save_exec_prefix + AC_MSG_RESULT([$_unitdir]) + AC_SUBST([_unitdir]) + ]) +--- a/src/cmds/scripts/Makefile.am ++++ b/src/cmds/scripts/Makefile.am +@@ -67,7 +67,7 @@ dist_pythonlib_PYTHON = \ + pbs_bootcheck.py \ + pbs_topologyinfo.py + +-sysprofiledir = /etc/profile.d ++sysprofiledir = $(sysconfdir) + + dist_sysprofile_DATA = \ + pbs.csh \ +--- a/test/fw/Makefile.am ++++ b/test/fw/Makefile.am +@@ -59,7 +59,7 @@ ptlpkg_pylib_pluginsdir = $(ptlpkg_pylib_utilsdir)/plugins + + dist_ptlpkg_pylib_plugins_PYTHON = $(wildcard $(srcdir)/ptl/utils/plugins/*.py) + +-sysprofiledir = /etc/profile.d ++sysprofiledir = $(sysconfdir) + + dist_sysprofile_DATA = \ + ptl.csh \ diff --git a/var/spack/repos/builtin/packages/openpbs/no_crypt.patch b/var/spack/repos/builtin/packages/openpbs/no_crypt.patch new file mode 100644 index 00000000000..b91683478d2 --- /dev/null +++ b/var/spack/repos/builtin/packages/openpbs/no_crypt.patch @@ -0,0 +1,21 @@ +--- a/configure.ac ++++ b/configure.ac +@@ -83,10 +83,6 @@ AC_CHECK_LIB([c], [ruserok], + [], + AC_CHECK_LIB(socket, ruserok) + ) +-AC_CHECK_LIB([c], [crypt], +- [], +- AC_CHECK_LIB(crypt, crypt) +-) + AC_CHECK_LIB([c], [posix_openpt], + AC_DEFINE([HAVE_POSIX_OPENPT], [], [Defined whe posix_openpt is available]) + ) +@@ -125,7 +121,6 @@ AC_CHECK_HEADERS([ \ + arpa/inet.h \ + asm/types.h \ + assert.h \ +- crypt.h \ + ctype.h \ + dirent.h \ + dlfcn.h \ diff --git a/var/spack/repos/builtin/packages/openpbs/package.py b/var/spack/repos/builtin/packages/openpbs/package.py new file mode 100644 index 00000000000..695e5ad595b --- /dev/null +++ b/var/spack/repos/builtin/packages/openpbs/package.py @@ -0,0 +1,94 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +import llnl.util.tty as tty +from spack import * + + +class Openpbs(AutotoolsPackage): + """PBS Professional software optimizes job scheduling and workload + management in high-performance computing (HPC) environments - clusters, + clouds, and supercomputers - improving system efficiency and people's + productivity.""" + + # TODO: update the description and the homepage url once the renaming is + # finished: http://community.pbspro.org/t/openpbs-and-version-20-0/2075 + + homepage = "https://www.pbspro.org" + url = "https://github.com/openpbs/openpbs/archive/v19.1.3.tar.gz" + + version('19.1.3', sha256='6e9d2614f839ff3d91d0ace3de04752b7c075da60c72fe6def76437aa05c9857') + + depends_on('autoconf', type='build') + depends_on('automake', type='build') + depends_on('libtool', type='build') + depends_on('m4', type='build') + depends_on('flex', type='build') + depends_on('bison', type='build') + depends_on('perl', type='build') + + depends_on('ssmtp', type=('build', 'run')) + depends_on('xauth', type=('build', 'run')) + + depends_on('python@2.6:2.7', type=('build', 'link', 'run')) + + depends_on('libx11') + depends_on('libice') + depends_on('libsm') + depends_on('openssl') + depends_on('postgresql') + depends_on('expat') + depends_on('libedit') + depends_on('ncurses') + depends_on('hwloc@:1') + depends_on('libical') + depends_on('swig') + depends_on('tcl') + depends_on('tk') + depends_on('zlib') + + # The configure script cannot properly handle dependencies in non-system + # directories. + patch('with_lib.patch') + + # The package does not really depend on libcrypt but links to it. We + # eliminate this redundant dependency to avoid linking to a system library. + patch('no_crypt.patch') + + # Fix installation directories. + patch('install.patch') + + # Link to the dynamic library of Python instead of the static one. + patch('python.patch') + + def autoreconf(self, spec, prefix): + Executable('./autogen.sh')() + + def configure_args(self): + return [ + '--x-includes=%s' % self.spec['libx11'].prefix.include, + '--x-libraries=%s' % self.spec['libx11'].prefix.lib, + '--with-pbs-server-home=%s' % self.spec.prefix.var.spool.pbs, + '--with-database-dir=%s' % self.spec['postgresql'].prefix, + '--with-pbs-conf-file=%s' % self.spec.prefix.etc.join('pbs.conf'), + '--with-expat=%s' % self.spec['expat'].prefix, + '--with-editline=%s' % self.spec['libedit'].prefix, + '--with-hwloc=%s' % self.spec['hwloc'].prefix, + '--with-libical=%s' % self.spec['libical'].prefix, + '--with-sendmail=%s' % self.spec['ssmtp'].prefix.sbin.sendmail, + '--with-swig=%s' % self.spec['swig'].prefix, + '--with-tcl=%s' % self.spec['tcl'].prefix, + # The argument --with-tk is introduced with with_lib.patch + '--with-tk=%s' % self.spec['tk'].prefix, + '--with-xauth=xauth', + '--with-libz=%s' % self.spec['zlib'].prefix] + + @run_after('install') + def post_install(self): + # Calling the postinstall script requires root privileges + # Executable(self.prefix.libexec.pbs_postinstall)() + tty.warn(self.spec.format( + 'To finalize the installation of {name}, you need to run ' + '"{prefix}/libexec/pbs_postinstall" with root privileges')) diff --git a/var/spack/repos/builtin/packages/openpbs/python.patch b/var/spack/repos/builtin/packages/openpbs/python.patch new file mode 100644 index 00000000000..aacf73140eb --- /dev/null +++ b/var/spack/repos/builtin/packages/openpbs/python.patch @@ -0,0 +1,11 @@ +--- a/buildutils/python-autoconf.py ++++ b/buildutils/python-autoconf.py +@@ -70,7 +70,7 @@ if py_stdlibdir: + else: + py_stdlibdir_real = "%s/lib" % (sysconfig.PREFIX,) + +-py_lib_configdir = get_py_config_var('LIBPL') ++py_lib_configdir = '' # get_py_config_var('LIBPL') + if py_lib_configdir: + py_lib_configdir=py_lib_configdir.replace(py_stdlibdir,py_stdlibdir_real) + diff --git a/var/spack/repos/builtin/packages/openpbs/with_lib.patch b/var/spack/repos/builtin/packages/openpbs/with_lib.patch new file mode 100644 index 00000000000..6a607a1611e --- /dev/null +++ b/var/spack/repos/builtin/packages/openpbs/with_lib.patch @@ -0,0 +1,231 @@ +--- a/m4/with_database_dir.m4 ++++ b/m4/with_database_dir.m4 +@@ -56,20 +56,11 @@ AC_DEFUN([PBS_AC_WITH_DATABASE_DIR], + AS_IF([test -r "$database_dir/include/postgresql/libpq-fe.h"], + [database_inc="-I$database_dir/include/postgresql"], + AC_MSG_ERROR([Database headers not found.])))) +- AS_IF([test "$database_dir" = "/usr"], +- # Using system installed PostgreSQL +- AS_IF([test -r "/usr/lib64/libpq.so" -o -r "/usr/lib/libpq.so" -o -r "/usr/lib/x86_64-linux-gnu/libpq.so"], +- [database_lib="-lpq"], +- AC_MSG_ERROR([PBS database shared object library not found.])), +- # Using developer installed PostgreSQL +- AS_IF([test -r "$database_dir/lib64/libpq.a"], +- [database_lib="$database_dir/lib64/libpq.a"], +- AS_IF([test -r "$database_dir/lib/libpq.a"], +- [database_lib="$database_dir/lib/libpq.a"], +- AC_MSG_ERROR([PBS database library not found.]) +- ) +- ) +- ) ++ AS_IF([test -r "$database_dir/lib64/libpq.so"], ++ [database_lib="-L$database_dir/lib64 -lpq"], ++ [test -r "$database_dir/lib/libpq.so"], ++ [database_lib="-L$database_dir/lib -lpq"], ++ AC_MSG_ERROR([PBS database shared object library not found.])) + AC_MSG_RESULT([$database_dir]) + AC_SUBST([database_dir]) + AC_SUBST([database_inc]) +--- a/m4/with_editline.m4 ++++ b/m4/with_editline.m4 +@@ -52,31 +52,12 @@ AC_DEFUN([PBS_AC_WITH_EDITLINE], + AS_IF([test "$editline_dir" != "/usr"], + [editline_inc="-I$editline_dir/include"]), + AC_MSG_ERROR([editline headers not found.])) +- AS_IF([test "$editline_dir" = "/usr"], +- # Using system installed editline +- AS_IF([test -r /usr/lib64/libedit.so], +- [editline_lib="-ledit"], +- AS_IF([test -r /usr/lib/libedit.so], +- [editline_lib="-ledit"], +- AS_IF([test -r /usr/lib/x86_64-linux-gnu/libedit.so], +- [editline_lib="-ledit"], +- AC_MSG_ERROR([editline shared object library not found.])))), +- # Using developer installed editline +- AS_IF([test -r "${editline_dir}/lib64/libedit.a"], +- [editline_lib="${editline_dir}/lib64/libedit.a"], +- AS_IF([test -r "${editline_dir}/lib/libedit.a"], +- [editline_lib="${editline_dir}/lib/libedit.a"], +- AC_MSG_ERROR([editline library not found.]) +- ) +- ) +- ) ++ AS_IF([test -r "$editline_dir/lib64/libedit.so"], ++ [editline_lib="-L$editline_dir/lib64 -ledit"], ++ [test -r "$editline_dir/lib/libedit.so"], ++ [editline_lib="-L$editline_dir/lib -ledit"], ++ AC_MSG_ERROR([editline shared object library not found.])) + AC_MSG_RESULT([$editline_dir]) +- AC_CHECK_LIB([ncurses], [tgetent], +- [curses_lib="-lncurses"], +- AC_CHECK_LIB([curses], [tgetent], +- [curses_lib="-lcurses"], +- AC_MSG_ERROR([curses library not found.]))) +- [editline_lib="$editline_lib $curses_lib"] + AC_SUBST(editline_inc) + AC_SUBST(editline_lib) + AC_DEFINE([QMGR_HAVE_HIST], [], [Defined when editline is available]) +--- a/m4/with_expat.m4 ++++ b/m4/with_expat.m4 +@@ -52,20 +52,11 @@ AC_DEFUN([PBS_AC_WITH_EXPAT], + AS_IF([test "$expat_dir" != "/usr"], + [expat_inc="-I$expat_dir/include"]), + AC_MSG_ERROR([expat headers not found.])) +- AS_IF([test "$expat_dir" = "/usr"], +- # Using system installed expat +- AS_IF([test -r "/usr/lib64/libexpat.so" -o -r "/usr/lib/libexpat.so" -o -r "/usr/lib/x86_64-linux-gnu/libexpat.so"], +- [expat_lib="-lexpat"], +- AC_MSG_ERROR([expat shared object library not found.])), +- # Using developer installed expat +- AS_IF([test -r "${expat_dir}/lib64/libexpat.a"], +- [expat_lib="${expat_dir}/lib64/libexpat.a"], +- AS_IF([test -r "${expat_dir}/lib/libexpat.a"], +- [expat_lib="${expat_dir}/lib/libexpat.a"], +- AC_MSG_ERROR([expat library not found.]) +- ) +- ) +- ) ++ AS_IF([test -r "$expat_dir/lib64/libexpat.so"], ++ [expat_lib="-L$expat_dir/lib64 -lexpat"], ++ [test -r "$expat_dir/lib/libexpat.so"], ++ [expat_lib="-L$expat_dir/lib -lexpat"], ++ AC_MSG_ERROR([expat shared object library not found.])) + AC_MSG_RESULT([$expat_dir]) + AC_SUBST(expat_inc) + AC_SUBST(expat_lib) +--- a/m4/with_hwloc.m4 ++++ b/m4/with_hwloc.m4 +@@ -56,28 +56,12 @@ AC_DEFUN([PBS_AC_WITH_HWLOC], + [hwloc_inc="-I$hwloc_dir/include"]), + AC_MSG_ERROR([hwloc headers not found.]) + ) +- AS_IF([test "$hwloc_dir" = "/usr"], +- # Using system installed hwloc +- AS_IF([test -r "/usr/lib64/libhwloc.so" -o -r "/usr/lib/libhwloc.so" -o -r "/usr/lib/x86_64-linux-gnu/libhwloc.so"], +- [hwloc_lib="-lhwloc"], +- AC_MSG_ERROR([hwloc shared object library not found.]) +- ), +- # Using developer installed hwloc +- AS_IF([test -r "${hwloc_dir}/lib64/libhwloc_embedded.a"], +- [hwloc_lib="${hwloc_dir}/lib64/libhwloc_embedded.a"], +- AS_IF([test -r "${hwloc_dir}/lib/libhwloc_embedded.a"], +- [hwloc_lib="${hwloc_dir}/lib/libhwloc_embedded.a"], +- AC_MSG_ERROR([hwloc library not found.]) +- ) +- ) +- ) ++ AS_IF([test -r "$hwloc_dir/lib64/libhwloc.so"], ++ [hwloc_lib="-L$hwloc_dir/lib64 -lhwloc"], ++ [test -r "$hwloc_dir/lib/libhwloc.so"], ++ [hwloc_lib="-L$hwloc_dir/lib -lhwloc"], ++ AC_MSG_ERROR([hwloc shared object library not found.])) + AC_MSG_RESULT([$hwloc_dir]) +- AS_CASE([x$target_os], +- [xlinux*], +- AC_CHECK_LIB([numa], [mbind], [hwloc_lib="$hwloc_lib -lnuma"]) +- AC_CHECK_LIB([udev], [udev_new], [hwloc_lib="$hwloc_lib -ludev"]) +- AC_CHECK_LIB([pciaccess], [pci_system_init], [hwloc_lib="$hwloc_lib -lpciaccess"]) +- ) + AC_SUBST(hwloc_flags) + AC_SUBST(hwloc_inc) + AC_SUBST(hwloc_lib) +--- a/m4/with_libical.m4 ++++ b/m4/with_libical.m4 +@@ -59,23 +59,13 @@ AC_DEFUN([PBS_AC_WITH_LIBICAL], + AS_IF([test $libical_version -gt 1], + AC_DEFINE([LIBICAL_API2], [], [Defined when libical version >= 2]) + ) +- AS_IF([test "$libical_dir" = "/usr"], +- dnl Using system installed libical +- libical_inc="" +- AS_IF([test -r "/usr/lib64/libical.so" -o -r "/usr/lib/libical.so" -o -r "/usr/lib/x86_64-linux-gnu/libical.so"], +- [libical_lib="-lical"], +- AC_MSG_ERROR([libical shared object library not found.]) +- ), +- dnl Using developer installed libical +- libical_inc="-I$libical_include" +- AS_IF([test -r "${libical_dir}/lib64/libical.a"], +- [libical_lib="${libical_dir}/lib64/libical.a"], +- AS_IF([test -r "${libical_dir}/lib/libical.a"], +- [libical_lib="${libical_dir}/lib/libical.a"], +- AC_MSG_ERROR([ical library not found.]) +- ) +- ) +- ) ++ AS_IF([test "$libical_dir" != "/usr"], ++ [libical_inc="-I$libical_include"]) ++ AS_IF([test -r "$libical_dir/lib64/libical.so"], ++ [libical_lib="-L$libical_dir/lib64 -lical"], ++ [test -r "$libical_dir/lib/libical.so"], ++ [libical_lib="-L$libical_dir/lib -lical"], ++ AC_MSG_ERROR([libical shared object library not found.])) + AC_MSG_RESULT([$libical_dir]) + AC_SUBST(libical_inc) + AC_SUBST(libical_lib) +--- a/m4/with_tcl.m4 ++++ b/m4/with_tcl.m4 +@@ -52,9 +52,7 @@ AC_DEFUN([PBS_AC_WITH_TCL], + [. "$tcl_dir/lib64/tclConfig.sh"], + AS_IF([test -r "$tcl_dir/lib/tclConfig.sh"], + [. "$tcl_dir/lib/tclConfig.sh"], +- AS_IF([test -r "$tcl_dir/lib/x86_64-linux-gnu/tclConfig.sh"], +- [. "$tcl_dir/lib/x86_64-linux-gnu/tclConfig.sh"], +- AC_MSG_ERROR([tclConfig.sh not found])))) ++ AC_MSG_ERROR([tclConfig.sh not found]))) + AC_MSG_RESULT([$tcl_dir]) + AC_MSG_CHECKING([for Tcl version]) + AS_IF([test "x$TCL_VERSION" = "x"], +@@ -62,32 +60,34 @@ AC_DEFUN([PBS_AC_WITH_TCL], + AC_MSG_RESULT([$TCL_VERSION]) + [tcl_version="$TCL_VERSION"] + AC_SUBST(tcl_version) ++ ++ AC_ARG_WITH([tk], ++ AS_HELP_STRING([--with-tk=DIR], ++ [Specify the directory where Tk is installed.] ++ ) ++ ) ++ AS_IF([test "x$with_tk" != "x"], ++ tk_dir=["$with_tk"], ++ tk_dir=["/usr"] ++ ) + AC_MSG_CHECKING([for Tk]) +- AS_IF([test -r "$tcl_dir/lib64/tkConfig.sh"], +- [. "$tcl_dir/lib64/tkConfig.sh"], +- AS_IF([test -r "$tcl_dir/lib/tkConfig.sh"], +- [. "$tcl_dir/lib/tkConfig.sh"], +- AS_IF([test -r "$tcl_dir/lib/x86_64-linux-gnu/tkConfig.sh"], +- [. "$tcl_dir/lib/x86_64-linux-gnu/tkConfig.sh"], +- AC_MSG_ERROR([tkConfig.sh not found])))) +- AC_MSG_RESULT([$tcl_dir]) ++ AS_IF([test -r "$tk_dir/lib64/tkConfig.sh"], ++ [. "$tk_dir/lib64/tkConfig.sh"], ++ AS_IF([test -r "$tk_dir/lib/tkConfig.sh"], ++ [. "$tk_dir/lib/tkConfig.sh"], ++ AC_MSG_ERROR([tkConfig.sh not found]))) ++ AC_MSG_RESULT([$tk_dir]) + AC_MSG_CHECKING([for Tk version]) + AS_IF([test "x$TK_VERSION" = "x"], + AC_MSG_ERROR([Could not determine Tk version])) + AC_MSG_RESULT([$TK_VERSION]) + [tk_version="$TK_VERSION"] + AC_SUBST(tk_version) +- AS_IF([test x$TCL_INCLUDE_SPEC = x], +- # Using developer installed tcl +- [tcl_inc="-I$tcl_dir/include"] +- [tcl_lib="$tcl_dir/lib/libtcl$TCL_VERSION.a $TCL_LIBS"] +- [tk_inc="-I$tcl_dir/include"] +- [tk_lib="$tcl_dir/lib/libtcl$TCL_VERSION.a $tcl_dir/lib/libtk$TK_VERSION.a $TK_LIBS"], +- # Using system installed tcl +- [tcl_inc="$TCL_INCLUDE_SPEC"] +- [tcl_lib="$TCL_LIB_SPEC $TCL_LIBS"] +- [tk_inc="$TK_INCLUDE_SPEC"] +- [tk_lib=`echo "$TCL_LIB_SPEC $TK_LIB_SPEC $TK_LIBS" | ${SED} -e 's/-lXss //'`]) ++ ++ tcl_inc="$TCL_INCLUDE_SPEC" ++ tcl_lib="$TCL_LIB_SPEC $TCL_LIBS" ++ tk_inc="$TK_INCLUDE_SPEC" ++ tk_lib=`echo "$TCL_LIB_SPEC $TK_LIB_SPEC $TK_LIBS" | ${SED} -e 's/-lXss //'` + AC_SUBST(tcl_inc) + AC_SUBST(tcl_lib) + AC_SUBST(tk_inc) diff --git a/var/spack/repos/builtin/packages/openscenegraph/glibc-jasper.patch b/var/spack/repos/builtin/packages/openscenegraph/glibc-jasper.patch new file mode 100644 index 00000000000..d826b8885ad --- /dev/null +++ b/var/spack/repos/builtin/packages/openscenegraph/glibc-jasper.patch @@ -0,0 +1,15 @@ +diff --git a/src/osgPlugins/jp2/ReaderWriterJP2.cpp b/src/osgPlugins/jp2/ReaderWriterJP2.cpp +index 7b3c6cc..d949c2c 100644 +--- a/src/osgPlugins/jp2/ReaderWriterJP2.cpp ++++ b/src/osgPlugins/jp2/ReaderWriterJP2.cpp +@@ -15,6 +15,10 @@ + #include + #include + ++#ifndef SIZE_MAX ++#define SIZE_MAX ((size_t)(-1)) ++#endif ++ + extern "C" + { + #include diff --git a/var/spack/repos/builtin/packages/openscenegraph/package.py b/var/spack/repos/builtin/packages/openscenegraph/package.py index c0cb024c144..2a6da5dbff2 100644 --- a/var/spack/repos/builtin/packages/openscenegraph/package.py +++ b/var/spack/repos/builtin/packages/openscenegraph/package.py @@ -3,7 +3,6 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) - from spack import * @@ -12,34 +11,64 @@ class Openscenegraph(CMakePackage): that's used in a variety of visual simulation applications.""" homepage = "http://www.openscenegraph.org" + git = "https://github.com/openscenegraph/OpenSceneGraph.git" url = "https://github.com/openscenegraph/OpenSceneGraph/archive/OpenSceneGraph-3.6.4.tar.gz" + version('3.6.5', sha256='aea196550f02974d6d09291c5d83b51ca6a03b3767e234a8c0e21322927d1e12') version('3.6.4', sha256='81394d1b484c631028b85d21c5535280c21bbd911cb058e8746c87e93e7b9d33') + version('3.4.1', sha256='930eb46f05781a76883ec16c5f49cfb29a059421db131005d75bec4d78401fd5') + version('3.4.0', sha256='0d5efe12b923130d14a6fce5866675d7625fcfb1c004c9f9b10034b9feb61ac2') version('3.2.3', sha256='a1ecc6524197024834e1277916922b32f30246cb583e27ed19bf3bf889534362') version('3.1.5', sha256='dddecf2b33302076712100af59b880e7647bc595a9a7cc99186e98d6e0eaeb5c') variant('shared', default=True, description='Builds a shared version of the library') + variant('ffmpeg', default=False, description='Builds ffmpeg plugin for audio encoding/decoding') depends_on('cmake@2.8.7:', type='build') - depends_on('qt@4:') + depends_on('qt+opengl') + depends_on('qt@4:', when='@3.2:') + depends_on('qt@:4', when='@:3.1') + depends_on('libxinerama') + depends_on('libxrandr') + depends_on('libpng') + depends_on('jasper') + depends_on('libtiff') + depends_on('glib') depends_on('zlib') + depends_on('ffmpeg+avresample', when='+ffmpeg') + # https://github.com/openscenegraph/OpenSceneGraph/issues/167 + depends_on('ffmpeg@:2', when='@:3.4.0+ffmpeg') + + patch('glibc-jasper.patch', when='@3.4%gcc') + def cmake_args(self): spec = self.spec shared_status = 'ON' if '+shared' in spec else 'OFF' + opengl_profile = 'GL{0}'.format(spec['gl'].version.up_to(1)) args = [ + # Variant Options # '-DDYNAMIC_OPENSCENEGRAPH={0}'.format(shared_status), '-DDYNAMIC_OPENTHREADS={0}'.format(shared_status), - '-DZLIB_INCLUDE_DIR={0}'.format(spec['zlib'].prefix.include), - '-DZLIB_LIBRARY={0}/libz.{1}'.format(spec['zlib'].prefix.lib, - dso_suffix), + '-DOPENGL_PROFILE={0}'.format(opengl_profile), + + # General Options # '-DBUILD_OSG_APPLICATIONS=OFF', '-DOSG_NOTIFY_DISABLED=ON', '-DLIB_POSTFIX=', + '-DCMAKE_RELWITHDEBINFO_POSTFIX=', + '-DCMAKE_MINSIZEREL_POSTFIX=' ] + if spec.satisfies('~ffmpeg'): + for ffmpeg_lib in ['libavcodec', 'libavformat', 'libavutil']: + args.extend([ + '-DFFMPEG_{0}_INCLUDE_DIRS='.format(ffmpeg_lib.upper()), + '-DFFMPEG_{0}_LIBRARIES='.format(ffmpeg_lib.upper()), + ]) + # NOTE: This is necessary in order to allow OpenSceneGraph to compile # despite containing a number of implicit bool to int conversions. if spec.satisfies('%gcc'): diff --git a/var/spack/repos/builtin/packages/openssh/package.py b/var/spack/repos/builtin/packages/openssh/package.py index 71f15f731e2..37f12663b01 100644 --- a/var/spack/repos/builtin/packages/openssh/package.py +++ b/var/spack/repos/builtin/packages/openssh/package.py @@ -18,6 +18,8 @@ class Openssh(AutotoolsPackage): homepage = "https://www.openssh.com/" url = "https://mirrors.sonic.net/pub/OpenBSD/OpenSSH/portable/openssh-7.6p1.tar.gz" + version('8.3p1', sha256='f2befbe0472fe7eb75d23340eb17531cb6b3aac24075e2066b41f814e12387b2') + version('8.1p1', sha256='02f5dbef3835d0753556f973cd57b4c19b6b1f6cd24c03445e23ac77ca1b93ff') version('7.9p1', sha256='6b4b3ba2253d84ed3771c8050728d597c91cfce898713beb7b64a305b6f11aad') version('7.6p1', sha256='a323caeeddfe145baaa0db16e98d784b1fbc7dd436a6bf1f479dfd5cd1d21723') version('7.5p1', sha256='9846e3c5fab9f0547400b4d2c017992f914222b3fd1f8eee6c7dc6bc5e59f9f0') diff --git a/var/spack/repos/builtin/packages/openssl/package.py b/var/spack/repos/builtin/packages/openssl/package.py index fe650f71337..ae47568ce82 100644 --- a/var/spack/repos/builtin/packages/openssl/package.py +++ b/var/spack/repos/builtin/packages/openssl/package.py @@ -25,6 +25,7 @@ class Openssl(Package): # Uses Fake Autotools, should subclass Package # The latest stable version is the 1.1.1 series. This is also our Long Term # Support (LTS) version, supported until 11th September 2023. + version('1.1.1g', sha256='ddb04774f1e32f0c49751e21b67216ac87852ceb056b75209af2443400636d46') version('1.1.1f', sha256='186c6bfe6ecfba7a5b48c47f8a1673d0f3b0e5ba2e25602dd23b629975da3f35') version('1.1.1e', sha256='694f61ac11cb51c9bf73f54e771ff6022b0327a43bbdfa1b2f19de1662a6dcbe') version('1.1.1d', sha256='1e3a91bc1f9dfce01af26026f856e064eab4c8ee0a8f457b5ae30b40b8b711f2') @@ -33,8 +34,7 @@ class Openssl(Package): # Uses Fake Autotools, should subclass Package version('1.1.1a', sha256='fc20130f8b7cbd2fb918b2f14e2f429e109c31ddd0fb38fc5d71d9ffed3f9f41') version('1.1.1', sha256='2836875a0f89c03d0fdf483941512613a50cfb421d6fd94b9f41d7279d586a3d') - # The 1.1.0 series is currently only receiving security fixes and will go - # out of support on 11th September 2019. + # The 1.1.0 series is out of support and should not be used. version('1.1.0l', sha256='74a2f756c64fd7386a29184dc0344f4831192d61dc2481a93a4c5dd727f41148') version('1.1.0k', sha256='efa4965f4f773574d6cbda1cf874dbbe455ab1c0d4f906115f867d30444470b1') version('1.1.0j', sha256='31bec6c203ce1a8e93d5994f4ed304c63ccf07676118b6634edded12ad1b3246') @@ -44,9 +44,7 @@ class Openssl(Package): # Uses Fake Autotools, should subclass Package version('1.1.0d', sha256='7d5ebb9e89756545c156ff9c13cf2aa6214193b010a468a3bc789c3c28fe60df') version('1.1.0c', sha256='fc436441a2e05752d31b4e46115eb89709a28aef96d4fe786abe92409b2fd6f5') - # Our previous LTS version (1.0.2 series) will continue to be supported - # until 31st December 2019 (security fixes only during the last year of - # support). + # The 1.0.2 series is out of support and should not be used. version('1.0.2u', sha256='ecd0c6ffb493dd06707d38b14bb4d8c2288bb7033735606569d8f90f89669d16') version('1.0.2t', sha256='14cb464efe7ac6b54799b34456bd69558a749a4931ecfd9cf9f71d7881cac7bc') version('1.0.2s', sha256='cabd5c9492825ce5bd23f3c3aeed6a97f8142f606d893df216411f07d1abab96') @@ -64,7 +62,7 @@ class Openssl(Package): # Uses Fake Autotools, should subclass Package version('1.0.2e', sha256='e23ccafdb75cfcde782da0151731aa2185195ac745eea3846133f2e05c0e0bff') version('1.0.2d', sha256='671c36487785628a703374c652ad2cebea45fa920ae5681515df25d9f2c9a8c8') - # The 1.0.1 version is now out of support and should not be used. + # The 1.0.1 version is out of support and should not be used. version('1.0.1u', sha256='4312b4ca1215b6f2c97007503d80db80d5157f76f8f7d3febbe6b4c56ff26739') version('1.0.1t', sha256='4a6ee491a2fdb22e519c76fdc2a628bb3cec12762cd456861d207996c8a07088') version('1.0.1r', sha256='784bd8d355ed01ce98b812f873f8b2313da61df7c7b5677fcf2e57b0863a3346') @@ -156,3 +154,6 @@ def link_system_certs(self): if os.path.isdir(sys_certs) and not os.path.islink(pkg_certs): os.rmdir(pkg_certs) os.symlink(sys_certs, pkg_certs) + + def setup_build_environment(self, env): + env.set('PERL', self.spec['perl'].prefix.bin.perl) diff --git a/var/spack/repos/builtin/packages/pango/package.py b/var/spack/repos/builtin/packages/pango/package.py index 52874160fc4..7d7ef787cec 100644 --- a/var/spack/repos/builtin/packages/pango/package.py +++ b/var/spack/repos/builtin/packages/pango/package.py @@ -23,7 +23,7 @@ class Pango(AutotoolsPackage): variant('X', default=False, description="Enable an X toolkit") - depends_on("pkgconfig", type="build") + depends_on("pkgconfig@0.9.0:", type="build") depends_on("harfbuzz") depends_on("cairo+ft+fc") depends_on("cairo~X", when='~X') @@ -31,6 +31,14 @@ class Pango(AutotoolsPackage): depends_on("libxft", when='+X') depends_on("glib") depends_on('gobject-introspection') + depends_on('fontconfig') + depends_on('freetype@2:') + depends_on('libffi') + + depends_on('harfbuzz@1.2.3:', when='@1.41.0') + depends_on('libxft@2.0.0:', when='@1.41.0 +X') + depends_on('glib@2.33.12:', when='@1.41.0') + depends_on('fontconfig@2.11.91:', when='@1.41.0') def url_for_version(self, version): url = "http://ftp.gnome.org/pub/GNOME/sources/pango/{0}/pango-{1}.tar.xz" diff --git a/var/spack/repos/builtin/packages/papi/package.py b/var/spack/repos/builtin/packages/papi/package.py index 35dd4f9ba2c..76a67f8e775 100644 --- a/var/spack/repos/builtin/packages/papi/package.py +++ b/var/spack/repos/builtin/packages/papi/package.py @@ -2,6 +2,7 @@ # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) + import glob import os import sys @@ -38,10 +39,20 @@ class Papi(AutotoolsPackage): variant('lmsensors', default=False, description='Enable lm_sensors support') variant('sde', default=False, description='Enable software defined events') + variant('shared', default=True, description='Build shared libraries') + # PAPI requires building static libraries, so there is no "static" variant + variant('static_tools', default=False, description='Statically link the PAPI tools') + # The PAPI configure option "--with-shlib-tools" is deprecated + # and therefore not implemented here + depends_on('lm-sensors', when='+lmsensors') conflicts('%gcc@8:', when='@5.3.0', msg='Requires GCC version less than 8.0') + # This is the only way to match exactly version 6.0.0 without also + # including version 6.0.0.1 due to spack version matching logic + conflicts('@5.9.99999:6.0.0.a', when='+static_tools', msg='Static tools cannot build on version 6.0.0') + # Does not build with newer versions of gcc, see # https://bitbucket.org/icl/papi/issues/46/cannot-compile-on-arch-linux patch('https://bitbucket.org/icl/papi/commits/53de184a162b8a7edff48fed01a15980664e15b1/raw', sha256='64c57b3ad4026255238cc495df6abfacc41de391a0af497c27d0ac819444a1f8', when='@5.4.0:5.6.99%gcc@8:') @@ -55,15 +66,24 @@ def setup_build_environment(self, env): setup_run_environment = setup_build_environment def configure_args(self): + spec = self.spec # PAPI uses MPI if MPI is present; since we don't require # an MPI package, we ensure that all attempts to use MPI # fail, so that PAPI does not get confused options = ['MPICC=:'] - # Build a list of activated variants (optional PAPI components) - variants = filter(lambda x: self.spec.variants[x].value is True, - self.spec.variants) - if variants: - options.append('--with-components={0}'.format(' '.join(variants))) + # Build a list of PAPI components + components = filter( + lambda x: spec.variants[x].value, + ['example', 'infiniband', 'powercap', 'rapl', 'lmsensors', 'sde']) + if components: + options.append('--with-components=' + ' '.join(components)) + + build_shared = 'yes' if '+shared' in spec else 'no' + options.append('--with-shared-lib=' + build_shared) + + if '+static_tools' in spec: + options.append('--with-static-tools') + return options @run_before('configure') diff --git a/var/spack/repos/builtin/packages/parallel-netcdf/package.py b/var/spack/repos/builtin/packages/parallel-netcdf/package.py index dc2f1b4ac37..e34b766ec81 100644 --- a/var/spack/repos/builtin/packages/parallel-netcdf/package.py +++ b/var/spack/repos/builtin/packages/parallel-netcdf/package.py @@ -50,6 +50,8 @@ def url_for_version(self, version): depends_on('automake', when='@master', type='build') depends_on('libtool', when='@master', type='build') + depends_on('perl', type='build') + conflicts('+shared', when='@:1.9%nag+fortran') conflicts('+shared', when='@:1.8') @@ -95,13 +97,27 @@ def configure_args(self): args += self.enable_or_disable('cxx') args += self.enable_or_disable('fortran') + flags = { + 'CFLAGS': [], + 'CXXFLAGS': [], + 'FFLAGS': [], + 'FCFLAGS': [], + } + if '+pic' in self.spec: - args.extend([ - 'CFLAGS=' + self.compiler.cc_pic_flag, - 'CXXFLAGS=' + self.compiler.cxx_pic_flag, - 'FFLAGS=' + self.compiler.f77_pic_flag, - 'FCFLAGS=' + self.compiler.fc_pic_flag, - ]) + flags['CFLAGS'].append(self.compiler.cc_pic_flag) + flags['CXXFLAGS'].append(self.compiler.cxx_pic_flag) + flags['FFLAGS'].append(self.compiler.f77_pic_flag) + flags['FCFLAGS'].append(self.compiler.fc_pic_flag) + + # https://github.com/Parallel-NetCDF/PnetCDF/issues/61 + if self.spec.satisfies('%gcc@10:'): + flags['FFLAGS'].append('-fallow-argument-mismatch') + flags['FCFLAGS'].append('-fallow-argument-mismatch') + + for key, value in sorted(flags.items()): + if value: + args.append('{0}={1}'.format(key, ' '.join(value))) if self.version >= Version('1.8'): args.append('--enable-relax-coord-bound') diff --git a/var/spack/repos/builtin/packages/paraver/package.py b/var/spack/repos/builtin/packages/paraver/package.py index fe9e31b2f20..fe305fc07ef 100644 --- a/var/spack/repos/builtin/packages/paraver/package.py +++ b/var/spack/repos/builtin/packages/paraver/package.py @@ -51,6 +51,6 @@ def install(self, spec, prefix): "--with-paraver=%s" % prefix, "--with-boost=%s" % spec['boost'].prefix, "--with-boost-serialization=boost_serialization", - "--with-wxdir=%s" % spec['wx'].prefix.bin) + "--with-wxdir=%s" % spec['wxwidgets'].prefix.bin) make() make("install") diff --git a/var/spack/repos/builtin/packages/paraview/package.py b/var/spack/repos/builtin/packages/paraview/package.py index 48dff877e0a..bb8b140b748 100644 --- a/var/spack/repos/builtin/packages/paraview/package.py +++ b/var/spack/repos/builtin/packages/paraview/package.py @@ -47,11 +47,16 @@ class Paraview(CMakePackage, CudaPackage): variant('hdf5', default=False, description="Use external HDF5") variant('shared', default=True, description='Builds a shared version of the library') + variant('kits', default=True, + description='Use module kits') conflicts('+python', when='+python3') conflicts('+python', when='@5.6:') conflicts('+python3', when='@:5.5') conflicts('+shared', when='+cuda') + # Legacy rendering dropped in 5.5 + # See commit: https://gitlab.kitware.com/paraview/paraview/-/commit/798d328c + conflicts('~opengl2', when='@5.5:') # Workaround for # adding the following to your packages.yaml @@ -271,6 +276,17 @@ def nvariant_bool(feature): '-DPARAVIEW_DO_UNIX_STYLE_INSTALLS:BOOL=ON', ]) + if '+kits' in spec: + if spec.satisfies('@5.0:5.6'): + cmake_args.append( + '-DVTK_ENABLE_KITS:BOOL=ON') + elif spec.satisfies('@5.7'): + cmake_args.append( + '-DPARAVIEW_ENABLE_KITS:BOOL=ON') + else: + cmake_args.append( + '-DPARAVIEW_BUILD_WITH_KITS:BOOL=ON') + # Hide git from Paraview so it will not use `git describe` # to find its own version number if spec.satisfies('@5.4.0:5.4.1'): diff --git a/var/spack/repos/builtin/packages/pathfinder/package.py b/var/spack/repos/builtin/packages/pathfinder/package.py index 3208448d813..8ce7ecca1e5 100644 --- a/var/spack/repos/builtin/packages/pathfinder/package.py +++ b/var/spack/repos/builtin/packages/pathfinder/package.py @@ -11,7 +11,7 @@ class Pathfinder(MakefilePackage): """Proxy Application. Signature search.""" homepage = "https://mantevo.org/packages/" - url = "https://github.com/Mantevo/mantevo.github.io/raw/master/download_files/PathFinder_1.0.0.tgz" + url = "http://downloads.mantevo.org/releaseTarballs/miniapps/PathFinder/PathFinder_1.0.0.tgz" tags = ['proxy-app'] diff --git a/var/spack/repos/builtin/packages/pcre/package.py b/var/spack/repos/builtin/packages/pcre/package.py index 85ba9c52b89..a4d17941480 100644 --- a/var/spack/repos/builtin/packages/pcre/package.py +++ b/var/spack/repos/builtin/packages/pcre/package.py @@ -14,6 +14,7 @@ class Pcre(AutotoolsPackage): homepage = "http://www.pcre.org" url = "https://ftp.pcre.org/pub/pcre/pcre-8.42.tar.bz2" + version('8.44', sha256='19108658b23b3ec5058edc9f66ac545ea19f9537234be1ec62b714c84399366d') version('8.43', sha256='91e762520003013834ac1adb4a938d53b22a216341c061b0cf05603b290faf6b') version('8.42', sha256='2cd04b7c887808be030254e8d77de11d3fe9d4505c39d4b15d2664ffe8bf9301') version('8.41', sha256='e62c7eac5ae7c0e7286db61ff82912e1c0b7a0c13706616e94a7dd729321b530') diff --git a/var/spack/repos/builtin/packages/pcre2/package.py b/var/spack/repos/builtin/packages/pcre2/package.py index eaa5928ae27..2b19f9cb7b9 100644 --- a/var/spack/repos/builtin/packages/pcre2/package.py +++ b/var/spack/repos/builtin/packages/pcre2/package.py @@ -14,11 +14,14 @@ class Pcre2(AutotoolsPackage): homepage = "http://www.pcre.org""" url = "https://ftp.pcre.org/pub/pcre/pcre2-10.31.tar.bz2" + version('10.35', sha256='9ccba8e02b0ce78046cdfb52e5c177f0f445e421059e43becca4359c669d4613') version('10.31', sha256='e07d538704aa65e477b6a392b32ff9fc5edf75ab9a40ddfc876186c4ff4d68ac') version('10.20', sha256='332e287101c9e9567d1ed55391b338b32f1f72c5b5ee7cc81ef2274a53ad487a') variant('multibyte', default=True, description='Enable support for 16 and 32 bit characters.') + variant('jit', default=False, + description='enable Just-In-Time compiling support') def configure_args(self): args = [] @@ -27,6 +30,9 @@ def configure_args(self): args.append('--enable-pcre2-16') args.append('--enable-pcre2-32') + if '+jit' in self.spec: + args.append('--enable-jit') + return args @property diff --git a/var/spack/repos/builtin/packages/perl-bio-searchio-hmmer/package.py b/var/spack/repos/builtin/packages/perl-bio-searchio-hmmer/package.py new file mode 100644 index 00000000000..ae1c8c0c785 --- /dev/null +++ b/var/spack/repos/builtin/packages/perl-bio-searchio-hmmer/package.py @@ -0,0 +1,18 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class PerlBioSearchioHmmer(PerlPackage): + """BioPerl parser to HMMER output.""" + + homepage = "https://github.com/bioperl/bio-searchio-hmmer" + url = "https://cpan.metacpan.org/authors/id/C/CJ/CJFIELDS/Bio-SearchIO-hmmer-1.7.3.tar.gz" + + version('1.7.3', sha256='686152f8ce7c611d27ee35ac002ecc309f6270e289a482993796a23bb5388246') + + depends_on('perl-bioperl', type=('build', 'run')) + depends_on('perl-io-string', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/perl-bioperl/package.py b/var/spack/repos/builtin/packages/perl-bioperl/package.py index de0f3671415..815eb8fcefb 100644 --- a/var/spack/repos/builtin/packages/perl-bioperl/package.py +++ b/var/spack/repos/builtin/packages/perl-bioperl/package.py @@ -38,7 +38,8 @@ class PerlBioperl(PerlPackage): version('1.7.6', sha256='df2a3efc991b9b5d7cc9d038a1452c6dac910c9ad2a0e47e408dd692c111688d', preferred=True) - version('1.007002', sha256='17aa3aaab2f381bbcaffdc370002eaf28f2c341b538068d6586b2276a76464a1') + version('1.007002', sha256='17aa3aaab2f381bbcaffdc370002eaf28f2c341b538068d6586b2276a76464a1', + url='https://cpan.metacpan.org/authors/id/C/CJ/CJFIELDS/BioPerl-1.007002.tar.gz') # According to cpandeps.grinnz.com Module-Build is both a build and run # time dependency for BioPerl @@ -67,7 +68,7 @@ class PerlBioperl(PerlPackage): depends_on('perl-libwww-perl', when='@1.7.6:', type=('build', 'run')) depends_on('perl-libxml-perl', when='@1.7.6:', type=('build', 'run')) - @when('@1.7.2') + @when('@1.007002') def configure(self, spec, prefix): # Overriding default configure method in order to cater to interactive # Build.pl @@ -102,10 +103,10 @@ def configure(self, spec, prefix): # Build script is run through perl and not use the shebang, as it might be # too long. This is needed because this does not pick up the # `@run_after(configure)` step defined in `PerlPackage`. - @when('@1.7.2') + @when('@1.007002') def build(self, spec, prefix): inspect.getmodule(self).perl('Build') - @when('@1.7.2') + @when('@1.007002') def install(self, spec, prefix): inspect.getmodule(self).perl('Build', 'install') diff --git a/var/spack/repos/builtin/packages/perl/package.py b/var/spack/repos/builtin/packages/perl/package.py index 2841ab27138..05cb13cff9a 100644 --- a/var/spack/repos/builtin/packages/perl/package.py +++ b/var/spack/repos/builtin/packages/perl/package.py @@ -35,7 +35,8 @@ class Perl(Package): # Perl doesn't use Autotools, it should subclass Package version('5.31.4', sha256='418a7e6fe6485cc713a86d1227ef112f0bb3f80322e3b715ffe42851d97804a5') # Maintenance releases (even numbers, recommended) - version('5.30.1', sha256='bf3d25571ff1ee94186177c2cdef87867fd6a14aa5a84f0b1fb7bf798f42f964', preferred=True) + version('5.30.2', sha256='66db7df8a91979eb576fac91743644da878244cf8ee152f02cd6f5cd7a731689', preferred=True) + version('5.30.1', sha256='bf3d25571ff1ee94186177c2cdef87867fd6a14aa5a84f0b1fb7bf798f42f964') version('5.30.0', sha256='851213c754d98ccff042caa40ba7a796b2cee88c5325f121be5cbb61bbf975f2') # End of life releases diff --git a/var/spack/repos/builtin/packages/petsc/package.py b/var/spack/repos/builtin/packages/petsc/package.py index cccced63a06..f328171072e 100644 --- a/var/spack/repos/builtin/packages/petsc/package.py +++ b/var/spack/repos/builtin/packages/petsc/package.py @@ -16,13 +16,14 @@ class Petsc(Package): """ homepage = "http://www.mcs.anl.gov/petsc/index.html" - url = "http://ftp.mcs.anl.gov/pub/petsc/release-snapshots/petsc-lite-3.13.0.tar.gz" + url = "http://ftp.mcs.anl.gov/pub/petsc/release-snapshots/petsc-lite-3.13.1.tar.gz" git = "https://gitlab.com/petsc/petsc.git" maintainers = ['balay', 'barrysmith', 'jedbrown'] version('develop', branch='master') version('xsdk-0.2.0', tag='xsdk-0.2.0') + version('3.13.1', sha256='74a895e44e2ff1146838aaccb7613e7626d99e0eed64ca032c87c72d084efac3') version('3.13.0', sha256='f0ea543a54145c5d1387e25b121c3fd1b1ca834032c5a33f6f1d929e95bdf0e5') version('3.12.5', sha256='d676eb67e79314d6cca6422d7c477d2b192c830b89d5edc6b46934f7453bcfc0') version('3.12.4', sha256='56a941130da93bbacb3cfa74dcacea1e3cd8e36a0341f9ced09977b1457084c3') @@ -176,8 +177,8 @@ class Petsc(Package): depends_on('superlu-dist@5.4:5.4.99+int64', when='@3.10:3.10.2+superlu-dist+mpi+int64') depends_on('superlu-dist@6.1:6.1.99~int64', when='@3.10.3:3.12.99+superlu-dist+mpi~int64') depends_on('superlu-dist@6.1:6.1.99+int64', when='@3.10.3:3.12.99+superlu-dist+mpi+int64') - depends_on('superlu-dist@6.1:6.3.99~int64', when='@3.13.0:3.13.99+superlu-dist+mpi~int64') - depends_on('superlu-dist@6.1:6.3.99+int64', when='@3.13.0:3.13.99+superlu-dist+mpi+int64') + depends_on('superlu-dist@6.1:6.3.0~int64', when='@3.13.0:3.13.99+superlu-dist+mpi~int64') + depends_on('superlu-dist@6.1:6.3.0+int64', when='@3.13.0:3.13.99+superlu-dist+mpi+int64') depends_on('superlu-dist@xsdk-0.2.0~int64', when='@xsdk-0.2.0+superlu-dist+mpi~int64') depends_on('superlu-dist@xsdk-0.2.0+int64', when='@xsdk-0.2.0+superlu-dist+mpi+int64') depends_on('superlu-dist@develop~int64', when='@develop+superlu-dist+mpi~int64') @@ -215,7 +216,7 @@ def mpi_dependent_options(self): # enabled. This generates a list of any such errors. errors = [ error_message_fmt.format(library=x) - for x in ('hdf5', 'hypre', 'parmetis', 'mumps', 'superlu-dist') + for x in ('hdf5', 'hypre', 'mumps', 'superlu-dist') if ('+' + x) in self.spec] if errors: errors = ['incompatible variants given'] + errors @@ -295,18 +296,23 @@ def install(self, spec, prefix): '--with-scalapack=0' ]) - # Activates library support if needed - for library in ('cuda', 'metis', 'hdf5', 'hypre', 'parmetis', + # Activates library support if needed (i.e. direct dependency) + for library in ('cuda', 'metis', 'hypre', 'parmetis', 'mumps', 'trilinos', 'fftw', 'valgrind'): + # Cannot check `library in spec` because of transitive deps + # Cannot check variants because parmetis keys on +metis + library_requested = library in spec.dependencies_dict() options.append( '--with-{library}={value}'.format( - library=library, value=('1' if library in spec else '0')) + library=library, + value=('1' if library_requested else '0')) ) - if library in spec: + if library_requested: options.append( '--with-{library}-dir={path}'.format( library=library, path=spec[library].prefix) ) + # PETSc does not pick up SuperluDist from the dir as they look for # superlu_dist_4.1.a if 'superlu-dist' in spec: @@ -337,6 +343,16 @@ def install(self, spec, prefix): else: options.append('--with-suitesparse=0') + # hdf5: configure detection is convoluted for pflotran + if '+hdf5' in spec: + options.extend([ + '--with-hdf5-include=%s' % spec['hdf5'].prefix.include, + '--with-hdf5-lib=%s' % spec['hdf5:hl,fortran'].libs.joined(), + '--with-hdf5=1' + ]) + else: + options.append('--with-hdf5=0') + # zlib: configuring using '--with-zlib-dir=...' has some issues with # SuiteSparse so specify directly the include path and the libraries. if 'zlib' in spec: diff --git a/var/spack/repos/builtin/packages/pgi/package.py b/var/spack/repos/builtin/packages/pgi/package.py index aa6e60ddcc4..bc5f8091094 100644 --- a/var/spack/repos/builtin/packages/pgi/package.py +++ b/var/spack/repos/builtin/packages/pgi/package.py @@ -21,6 +21,7 @@ class Pgi(Package): homepage = "http://www.pgroup.com/" version('19.10', sha256='ac9db73ba80a66fe3bc875f63aaa9e16f54674a4e88b25416432430ba8cf203d') + version('19.7', sha256='439692aeb51eff464b968c3bfed4536ed7bd3ba6f8174bc0ebe2219a78fe62ae') version('19.4', sha256='23eee0d4da751dd6f247d624b68b03538ebd172e63a053c41bb67013f07cf68e') version('19.1', sha256='3e05a6db2bf80b5d15f6ff83188f20cb89dc23e233417921e5c0822e7e57d34f') version('18.10', sha256='4b3ff83d2a13de6001bed599246eff8e63ef711b8952d4a9ee12efd666b3e326') @@ -98,3 +99,13 @@ def setup_run_environment(self, env): env.set('CXX', join_path(prefix.bin, 'pgc++')) env.set('F77', join_path(prefix.bin, 'pgfortran')) env.set('FC', join_path(prefix.bin, 'pgfortran')) + + if '+mpi' in self.spec: + ompi_dir = os.listdir(prefix.mpi)[0] + env.prepend_path('PATH', join_path(prefix.mpi, ompi_dir, 'bin')) + env.prepend_path('LD_LIBRARY_PATH', join_path(prefix.mpi, ompi_dir, + 'lib')) + env.prepend_path('C_INCLUDE_PATH', join_path(prefix.mpi, ompi_dir, + 'include')) + env.prepend_path('MANPATH', join_path(prefix.mpi, ompi_dir, + 'share/man')) diff --git a/var/spack/repos/builtin/packages/phist/package.py b/var/spack/repos/builtin/packages/phist/package.py index 42ad816dfa8..cdbd84ed7ff 100644 --- a/var/spack/repos/builtin/packages/phist/package.py +++ b/var/spack/repos/builtin/packages/phist/package.py @@ -26,6 +26,7 @@ class Phist(CMakePackage): version('develop', branch='devel') version('master', branch='master') + version('1.9.0', sha256='990d3308fc0083ed0f9f565d00c649ee70c3df74d44cbe5f19dfe05263d06559') version('1.8.0', sha256='ee42946bce187e126452053b5f5c200b57b6e40ee3f5bcf0751f3ced585adeb0') version('1.7.5', sha256='f11fe27f2aa13d69eb285cc0f32c33c1603fa1286b84e54c81856c6f2bdef500') version('1.7.4', sha256='ef0c97fda9984f53011020aff3e61523833320f5f5719af2f2ed84463cccb98b') @@ -44,9 +45,12 @@ class Phist(CMakePackage): 'eigen', 'ghost']) + variant(name='int64', default=True, + description='Use 64-bit global indices.') + variant(name='outlev', default='2', values=['0', '1', '2', '3', '4', '5'], description='verbosity. 0: errors 1: +warnings 2: +info ' - '3: +verbose 4: +extreme 5; +debug') + '3: +verbose 4: +extreme 5: +debug') variant('host', default=True, description='allow PHIST to use compiler flags that lead to host-' @@ -86,6 +90,13 @@ class Phist(CMakePackage): # in older versions, it is not possible to turn off the use of host- # specific compiler flags in Release mode. conflicts('~host', when='@:1.7.3') + # builtin always uses 64-bit indices + conflicts('~int64', when='kernel_lib=builtin') + conflicts('+int64', when='kernel_lib=eigen') + + # ###################### Patches ########################## + + patch('update_tpetra_gotypes.patch', when='@:1.8.99') # ###################### Dependencies ########################## @@ -97,16 +108,18 @@ class Phist(CMakePackage): # the feature (e.g. use the '~fortran' variant) depends_on('python@3:', when='@1.7: +fortran', type='build') depends_on('mpi', when='+mpi') - depends_on('trilinos+anasazi+belos+teuchos', when='+trilinos') - depends_on('trilinos@12:+tpetra', when='kernel_lib=tpetra') + depends_on('trilinos@12:+tpetra gotype=long_long', when='kernel_lib=tpetra +int64') + depends_on('trilinos@12:+tpetra gotype=int', when='kernel_lib=tpetra ~int64') # Epetra backend also works with older Trilinos versions depends_on('trilinos+epetra', when='kernel_lib=epetra') - depends_on('petsc', when='kernel_lib=petsc') + depends_on('petsc +int64', when='kernel_lib=petsc +int64') + depends_on('petsc ~int64', when='kernel_lib=petsc ~int64') depends_on('eigen', when='kernel_lib=eigen') depends_on('ghost', when='kernel_lib=ghost') - depends_on('trilinos', when='+trilinos') - depends_on('parmetis ^metis+int64', when='+parmetis') + depends_on('trilinos+anasazi+belos+teuchos', when='+trilinos') + depends_on('parmetis ^metis+int64', when='+parmetis +int64') + depends_on('parmetis ^metis~int64', when='+parmetis ~int64') # Fortran 2003 bindings were included in version 1.7, previously they # required a separate package @@ -139,13 +152,15 @@ def cmake_args(self): '-DPHIST_ENABLE_SCAMAC:BOOL=%s' % ('ON' if '+scamac' in spec else 'OFF'), '-DPHIST_USE_TRILINOS_TPLS:BOOL=%s' - % ('ON' if '+trilinos' in spec else 'OFF'), + % ('ON' if '^trilinos' in spec else 'OFF'), '-DPHIST_USE_SOLVER_TPLS:BOOL=%s' - % ('ON' if '+trilinos' in spec else 'OFF'), + % ('ON' if '^trilinos+belos+anasazi' in spec else 'OFF'), '-DPHIST_USE_PRECON_TPLS:BOOL=%s' - % ('ON' if '+trilinos' in spec else 'OFF'), + % ('ON' if '^trilinos' in spec else 'OFF'), '-DXSDK_ENABLE_Fortran:BOOL=%s' % ('ON' if '+fortran' in spec else 'OFF'), + '-DXSDK_INDEX_SIZE=%s' + % ('64' if '+int64' in spec else '32'), '-DPHIST_HOST_OPTIMIZE:BOOL=%s' % ('ON' if '+host' in spec else 'OFF'), ] diff --git a/var/spack/repos/builtin/packages/phist/update_tpetra_gotypes.patch b/var/spack/repos/builtin/packages/phist/update_tpetra_gotypes.patch new file mode 100644 index 00000000000..2db579fc340 --- /dev/null +++ b/var/spack/repos/builtin/packages/phist/update_tpetra_gotypes.patch @@ -0,0 +1,35 @@ +commit 8df8ad0e56e3bbd3d0c133fcdb7d2af6ab4dd229 +Author: Jonas Thies +Date: Tue Apr 21 18:24:53 2020 +0200 + + tpetra: use 'int' and 'long long' as gidx type for 32 and 64-bit compilations, respectively + because Trilinos allows to instantiate those variants (before I had 'int'/'ptrdiff_t') + +diff --git a/src/kernels/tpetra/phist_typedefs.h b/src/kernels/tpetra/phist_typedefs.h +index 1f6b6c6c..3a351c27 100644 +--- a/src/kernels/tpetra/phist_typedefs.h ++++ b/src/kernels/tpetra/phist_typedefs.h +@@ -34,19 +34,18 @@ + using phist_s_complex = std::complex; + //! double precision complex type + using phist_d_complex = std::complex; +-//! type of global indices +-using phist_gidx = std::ptrdiff_t; + #else + typedef float complex phist_s_complex; + typedef double complex phist_d_complex; ++#endif ++ + //! type of global indices + #ifdef PHIST_FORCE_32BIT_GIDX + typedef int phist_gidx; + #define PRgidx "d" + #else +-typedef ptrdiff_t phist_gidx; +-#define PRgidx "ld" +-#endif ++typedef long long phist_gidx; ++#define PRgidx "lld" + #endif + + // we want ptrdiff_t (aka long long int on 64 bit systems) as local index, diff --git a/var/spack/repos/builtin/packages/photos/package.py b/var/spack/repos/builtin/packages/photos/package.py new file mode 100644 index 00000000000..6304e194aad --- /dev/null +++ b/var/spack/repos/builtin/packages/photos/package.py @@ -0,0 +1,27 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Photos(AutotoolsPackage): + """ Photos is a Monte Carlo program for bremsstrahlung in the decay + of particles and resonances.""" + + homepage = "http://photospp.web.cern.ch/photospp/" + url = "http://photospp.web.cern.ch/photospp/resources/PHOTOS.3.61/PHOTOS.3.61-LHC.tar.gz" + + version('3.61', sha256='acd3bcb769ba2a3e263de399e9b89fd6296405c9cbc5045b83baba3e60db4b26') + + maintainers = ['vvolkl'] + + depends_on('hepmc@:2.99.99') + + def configure_args(self): + args = [] + + args.append('--with-hepmc=%s' % self.spec["hepmc"].prefix) + args.append('--without-hepmc3') + return args diff --git a/var/spack/repos/builtin/packages/pixman/package.py b/var/spack/repos/builtin/packages/pixman/package.py index 31c15b42686..1087dabbbff 100644 --- a/var/spack/repos/builtin/packages/pixman/package.py +++ b/var/spack/repos/builtin/packages/pixman/package.py @@ -15,6 +15,7 @@ class Pixman(AutotoolsPackage): homepage = "http://www.pixman.org" url = "http://cairographics.org/releases/pixman-0.32.6.tar.gz" + version('0.40.0', sha256='6d200dec3740d9ec4ec8d1180e25779c00bc749f94278c8b9021f5534db223fc') version('0.38.4', sha256='da66d6fd6e40aee70f7bd02e4f8f76fc3f006ec879d346bae6a723025cfbdde7') version('0.38.0', sha256='a7592bef0156d7c27545487a52245669b00cf7e70054505381cff2136d890ca8') version('0.34.0', sha256='21b6b249b51c6800dc9553b65106e1e37d0e25df942c90531d4c3997aa20a88e') diff --git a/var/spack/repos/builtin/packages/pkgconf/package.py b/var/spack/repos/builtin/packages/pkgconf/package.py index 06fcefcc6c0..3bb59b4dd87 100644 --- a/var/spack/repos/builtin/packages/pkgconf/package.py +++ b/var/spack/repos/builtin/packages/pkgconf/package.py @@ -16,6 +16,7 @@ class Pkgconf(AutotoolsPackage): # URL must remain http:// so Spack can bootstrap curl url = "http://distfiles.dereferenced.org/pkgconf/pkgconf-1.6.3.tar.xz" + version('1.7.3', sha256='b846aea51cf696c3392a0ae58bef93e2e72f8e7073ca6ad1ed8b01c85871f9c0') version('1.6.3', sha256='61f0b31b0d5ea0e862b454a80c170f57bad47879c0c42bd8de89200ff62ea210') version('1.6.1', sha256='22b9ee38438901f9d60f180e5182821180854fa738fd071f593ea26a81da208c') version('1.6.0', sha256='6135a3abb576672ba54a899860442ba185063f0f90dae5892f64f7bae8e1ece5') diff --git a/var/spack/repos/builtin/packages/plink-ng/package.py b/var/spack/repos/builtin/packages/plink-ng/package.py new file mode 100644 index 00000000000..27b4a4ca95f --- /dev/null +++ b/var/spack/repos/builtin/packages/plink-ng/package.py @@ -0,0 +1,38 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class PlinkNg(Package): + """A comprehensive update to the PLINK association analysis toolset.""" + + homepage = "https://www.cog-genomics.org/plink/2.0/" + url = "https://www.cog-genomics.org/static/bin/plink2_src_200511.zip" + + version('200511', sha256='00cff19bece88acb7a21ba098501cb677b78d22c9f3ca5bcdc869139a40db816') + + depends_on('zlib') + depends_on('zstd@1.4.4:') + depends_on('cblas') + depends_on('blas') + depends_on('lapack') + + conflicts('%gcc@:4.99') + + def url_for_version(self, ver): + template = 'https://www.cog-genomics.org/static/bin/plink2_src_{0}.zip' + return template.format(ver) + + def setup_build_environment(self, env): + zlib = join_path(self.spec['zlib'].prefix.lib, 'libz.a') + env.set('ZLIB', zlib) + + def install(self, spec, prefix): + ld_flags = [spec['lapack'].libs.ld_flags, spec['blas'].libs.ld_flags] + filter_file('-llapack -lcblas -lblas', ' '.join(ld_flags), + 'build.sh', string=True) + which('sh')('build.sh') + install_tree('.', prefix) diff --git a/var/spack/repos/builtin/packages/pmix/package.py b/var/spack/repos/builtin/packages/pmix/package.py index 2cb5126b535..b752aca0bf7 100644 --- a/var/spack/repos/builtin/packages/pmix/package.py +++ b/var/spack/repos/builtin/packages/pmix/package.py @@ -6,6 +6,7 @@ from spack import * import spack.architecture +import os class Pmix(AutotoolsPackage): @@ -32,8 +33,10 @@ class Pmix(AutotoolsPackage): homepage = "https://pmix.org" url = "https://github.com/pmix/pmix/releases/download/v3.1.3/pmix-3.1.3.tar.bz2" + git = "https://github.com/openpmix/openpmix.git" maintainers = ['rhc54'] + version('master', branch='master') version('3.1.3', sha256='118acb9c4e10c4e481406dcffdfa762f314af50db75336bf8460e53b56dc439d') version('3.1.2', sha256='28aed0392d4ca2cdfbdd721e6210c94dadc9830677fea37a0abe9d592c00f9c3') version('3.0.2', sha256='df68f35a3ed9517eeade80b13855cebad8fde2772b36a3f6be87559b6d430670') @@ -47,13 +50,36 @@ class Pmix(AutotoolsPackage): version('2.0.1', sha256='ba6e0f32936b1859741adb221e18b2c1ee7dc53a6b374b9f7831adf1692b15fd') version('1.2.5', sha256='a2b02d489ee730c06ee40e7f9ffcebb6c35bcb4f95153fab7c4276a3add6ae31') - depends_on('libevent@2.0.20:2.0.22,2.1.8') - depends_on('hwloc@1.11.0:1.11.99,2.0.1:', when='@3.0.0:') - variant('pmi_backwards_compatibility', default=True, description="Toggle pmi backwards compatibility") + variant('restful', + default=False, + description="allow a PMIx server to request services from " + "a system-level REST server") + + depends_on('libevent@2.0.20:2.0.22,2.1.8') + depends_on('hwloc@1.11.0:1.11.99,2.0.1:', when='@3.0.0:') + depends_on("m4", type=("build"), when="@master") + depends_on("autoconf", type=("build"), when="@master") + depends_on("automake", type=("build"), when="@master") + depends_on("libtool", type=("build"), when="@master") + depends_on("perl", type=("build"), when="@master") + depends_on('curl', when="+restful") + depends_on('jansson@2.11:', when="+restful") + + conflicts('@:3.9.9', when='+restful') + + def autoreconf(self, spec, prefix): + """Only needed when building from git checkout""" + # If configure exists nothing needs to be done + if os.path.exists(self.configure_abs_path): + return + # Else bootstrap with autotools + perl = which('perl') + perl('./autogen.pl') + def configure_args(self): spec = self.spec diff --git a/var/spack/repos/builtin/packages/pocl/package.py b/var/spack/repos/builtin/packages/pocl/package.py index 42a862d97b6..121b240e288 100644 --- a/var/spack/repos/builtin/packages/pocl/package.py +++ b/var/spack/repos/builtin/packages/pocl/package.py @@ -20,6 +20,7 @@ class Pocl(CMakePackage): git = "https://github.com/pocl/pocl.git" version("master", branch="master") + version('1.5', sha256='4fcf4618171727d165fc044d465a66e3119217bb4577a97374f94fcd8aed330e') version('1.4', sha256='ec237faa83bb1c803fbdf7c6e83d8a2ad68b6f0ed1879c3aa16c0e1dcc478742') version('1.3', sha256='6527e3f47fab7c21e96bc757c4ae3303901f35e23f64642d6da5cc4c4fcc915a') version('1.2', sha256='0c43e68f336892f3a64cba19beb99d9212f529bedb77f7879c0331450b982d46') @@ -50,7 +51,8 @@ class Pocl(CMakePackage): # enabled by default, and also because they fail to build for us # (see #1616) # These are the supported LLVM versions - depends_on("llvm +clang @6.0:7.0", when="@master") + depends_on("llvm +clang @6.0:10.0", when="@master") + depends_on("llvm +clang @6.0:10.0", when="@1.5") depends_on("llvm +clang @6.0:9.0", when="@1.4") depends_on("llvm +clang @5.0:8.0", when="@1.3") depends_on("llvm +clang @5.0:7.0", when="@1.2") @@ -69,6 +71,8 @@ class Pocl(CMakePackage): variant("icd", default=False, description="Support a system-wide ICD loader") + depends_on('ocl-icd', when='+icd') + def url_for_version(self, version): if version >= Version('1.0'): url = "https://github.com/pocl/pocl/archive/v{0}.tar.gz" diff --git a/var/spack/repos/builtin/packages/podio/cpack.patch b/var/spack/repos/builtin/packages/podio/cpack.patch new file mode 100644 index 00000000000..e685f9bc13d --- /dev/null +++ b/var/spack/repos/builtin/packages/podio/cpack.patch @@ -0,0 +1,11 @@ +--- a/CMakeLists.txt ++++ b/CMakeLists.txt +@@ -86,7 +86,7 @@ include(cmake/podioMacros.cmake) + include(CTest) + + #--- enable CPack -------------------------------------------------------------- +-include(cmake/podioCPack.cmake) ++#include(cmake/podioCPack.cmake) + + #--- target for Doxygen documentation ------------------------------------------ + if(CREATE_DOC) diff --git a/var/spack/repos/builtin/packages/podio/package.py b/var/spack/repos/builtin/packages/podio/package.py index bc7a6dab90e..488eaf54f15 100644 --- a/var/spack/repos/builtin/packages/podio/package.py +++ b/var/spack/repos/builtin/packages/podio/package.py @@ -17,26 +17,21 @@ class Podio(CMakePackage): maintainers = ['vvolkl', 'drbenmorgan'] version('master', branch='master') - version('00-10', sha256='b5b42770ec8b96bcd2748abc05669dd3e4d4cc84f81ed57d57d2eda1ade90ef2') - version('00-09-02', sha256='8234d1b9636029124235ef81199a1220968dcc7fdaeab81cdc96a47af332d240') - version('00-09', sha256='3cde67556b6b76fd2d004adfaa3b3b6173a110c0c209792bfdb5f9353e21076f') - version('00-08', sha256='9d035a7f5ebfae5279a17405003206853271af692f762e2bac8e73825f2af327') + version('0.10.0', sha256='b5b42770ec8b96bcd2748abc05669dd3e4d4cc84f81ed57d57d2eda1ade90ef2') + version('0.9.2', sha256='8234d1b9636029124235ef81199a1220968dcc7fdaeab81cdc96a47af332d240') + version('0.9.0', sha256='3cde67556b6b76fd2d004adfaa3b3b6173a110c0c209792bfdb5f9353e21076f') + version('0.8.0', sha256='9d035a7f5ebfae5279a17405003206853271af692f762e2bac8e73825f2af327') variant('build_type', default='Release', description='The build type to build', values=('Debug', 'Release')) - variant('cxxstd', - default='17', - values=('14', '17'), - multi=False, - description='Use the specified C++ standard when building.') + # cpack config throws an error on some systems + patch('cpack.patch', when="@:0.10.0") - _cxxstd_values = ('14', '17') - for s in _cxxstd_values: - depends_on('root@6.08.06: cxxstd=' + s, when='cxxstd=' + s) + depends_on('root@6.08.06:') - depends_on('cmake', type='build') + depends_on('cmake@3.8:', type='build') depends_on('python', type=('build', 'run')) depends_on('py-pyyaml', type=('build', 'run')) @@ -44,8 +39,8 @@ def cmake_args(self): args = [] # C++ Standard args.append('-DCMAKE_CXX_STANDARD=%s' - % self.spec.variants['cxxstd'].value) - args.append('-DBUILD_TESTING=OFF') + % self.spec['root'].variants['cxxstd'].value) + args.append('-DBUILD_TESTING=%s' % self.run_tests) return args def setup_build_environment(self, spack_env): @@ -56,3 +51,16 @@ def setup_dependent_build_environment(self, env, dependent_spec): def setup_dependent_run_environment(self, env, dependent_spec): env.set('PODIO', self.prefix) + + def url_for_version(self, version): + # podio releases are dashes and padded with a leading zero + # the patch version is omitted when 0 + # so for example v01-12-01, v01-12 ... + major = (str(version[0]).zfill(2)) + minor = (str(version[1]).zfill(2)) + patch = (str(version[2]).zfill(2)) + if version[2] == 0: + url = "https://github.com/AIDASoft/podio/archive/v%s-%s.tar.gz" % (major, minor) + else: + url = "https://github.com/AIDASoft/podio/archive/v%s-%s-%s.tar.gz" % (major, minor, patch) + return url diff --git a/var/spack/repos/builtin/packages/poppler/package.py b/var/spack/repos/builtin/packages/poppler/package.py index affb9e5359a..2848c81eb16 100644 --- a/var/spack/repos/builtin/packages/poppler/package.py +++ b/var/spack/repos/builtin/packages/poppler/package.py @@ -15,6 +15,7 @@ class Poppler(CMakePackage): git = "https://gitlab.freedesktop.org/poppler/poppler.git" version('master', branch='master') + version('0.87.0', sha256='6f602b9c24c2d05780be93e7306201012e41459f289b8279a27a79431ad4150e') version('0.79.0', sha256='f985a4608fe592d2546d9d37d4182e502ff6b4c42f8db4be0a021a1c369528c8') version('0.77.0', sha256='7267eb4cbccd64a58244b8211603c1c1b6bf32c7f6a4ced2642865346102f36b') version('0.72.0', sha256='c1747eb8f26e9e753c4001ed951db2896edc1021b6d0f547a0bd2a27c30ada51') @@ -48,7 +49,7 @@ class Poppler(CMakePackage): depends_on('openjpeg', when='+openjpeg') depends_on('qt@4.0:', when='+qt') depends_on('zlib', when='+zlib') - depends_on('cairo@1.10.0:', when='+glib') + depends_on('cairo+ft@1.10.0:', when='+glib') depends_on('iconv', when='+iconv') depends_on('jpeg', when='+jpeg') depends_on('libpng', when='+png') diff --git a/var/spack/repos/builtin/packages/ppopen-appl-amr-fdm/package.py b/var/spack/repos/builtin/packages/ppopen-appl-amr-fdm/package.py new file mode 100755 index 00000000000..72db2617007 --- /dev/null +++ b/var/spack/repos/builtin/packages/ppopen-appl-amr-fdm/package.py @@ -0,0 +1,62 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * +import os + + +class PpopenApplAmrFdm(MakefilePackage): + """ + ppOpen-APPL/AMR-FDM is an adaptive mesh refinement (AMR) framework + for development of 3D parallel finite-difference method (FDM) + applications. + """ + + homepage = "http://ppopenhpc.cc.u-tokyo.ac.jp/ppopenhpc/" + url = "file://{0}/ppohAMRFDM_0.3.0.tar.gz".format(os.getcwd()) + + version('0.3.0', sha256='e82217e4c949dd079a56024d3d1c1761dc8efd5ad0d26a3af83564c3db7327bb') + + depends_on('mpi') + + parallel = False + build_targets = ['default', 'advAMR3D'] + + def edit(self, spec, prefix): + fflags = [ + '-O3', + '-I.', + '-I{0}/include'.format(os.getcwd()) + ] + makefile_in = FileFilter('Makefile.in') + makefile_in.filter('^PREFIX +=.*', 'PREFIX = {0}'.format(prefix)) + makefile_in.filter( + '^INCDIR +=.*', + 'INCDIR = {0}/include'.format(self.build_directory) + ) + makefile_in.filter( + '^LIBDIR +=.*', + 'LIBDIR = {0}/lib'.format(self.build_directory) + ) + makefile_in.filter('^F90 +=.*', 'F90 = {0}'.format(spack_fc)) + makefile_in.filter( + '^MPIF90 +=.*', + 'MPIF90 = {0}'.format(spec['mpi'].mpifc) + ) + makefile_in.filter( + '^sFFLAGS +=.*', + 'sFFLAGS = {0}'.format(' '.join(fflags)) + ) + fflags.append(self.compiler.openmp_flag) + makefile_in.filter( + '^pFFLAGS +=.*', + 'pFFLAGS = {0}'.format(' '.join(fflags)) + ) + + def install(self, spec, prefix): + install_tree('include', prefix.include) + install_tree('lib', prefix.lib) + install_tree('bin', prefix.bin) + install_tree('doc', prefix.doc) diff --git a/var/spack/repos/builtin/packages/ppopen-appl-dem-util/package.py b/var/spack/repos/builtin/packages/ppopen-appl-dem-util/package.py new file mode 100755 index 00000000000..8fa6544db36 --- /dev/null +++ b/var/spack/repos/builtin/packages/ppopen-appl-dem-util/package.py @@ -0,0 +1,48 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * +import os + + +class PpopenApplDemUtil(MakefilePackage): + """ + ppOpen-APPL/DEM provides fundamental components of the particle + simulations based on the discrete element method (DEM). + ppOpen-APPL/DEM (ver.1.0.0) includes the libraries for the DEM, + sample codes, and data sets. ppOpen-APPL/DEM-Util provides the + preconditioning utilities. This utility prepares data sets of distributed + data files from the mesh data sets. + + """ + + homepage = "http://ppopenhpc.cc.u-tokyo.ac.jp/ppopenhpc/" + url = "file://{0}/ppohDEM_util_1.0.0.tar.gz".format(os.getcwd()) + + version('1.0.0', sha256='e0aa9a61be3b9858a2885c9feff9b0fcd1d7039408f6bd82a73a79dfe86b0488') + + depends_on('mpi') + + def edit(self, spec, prefix): + makefile_in = FileFilter('Makefile.in') + makefile_in.filter('PREFIX += .*', 'PREFIX = {0}'.format(prefix)) + makefile_in.filter('F90 += .*', 'F90 = {0}'.format(spack_fc)) + makefile_in.filter('F77 += .*', 'F77 = {0}'.format(spack_fc)) + makefile_in.filter( + 'MPIF90 += .*', + 'MPIF90 = {0}'.format(spec['mpi'].mpifc) + ) + makefile_in.filter( + 'MPIF77 += .*', + 'MPIF77 = {0}'.format(spec['mpi'].mpifc) + ) + makefile_in.filter( + 'F90MPFLAGS += .*', + 'F90MPFLAGS = -O3 {0}'.format(self.compiler.openmp_flag) + ) + + def install(self, spec, prefix): + make('install') + install_tree('doc', prefix.doc) diff --git a/var/spack/repos/builtin/packages/ppopen-appl-fdm-at/package.py b/var/spack/repos/builtin/packages/ppopen-appl-fdm-at/package.py new file mode 100755 index 00000000000..a8ab4fb9e8d --- /dev/null +++ b/var/spack/repos/builtin/packages/ppopen-appl-fdm-at/package.py @@ -0,0 +1,50 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * +import os + + +class PpopenApplFdmAt(MakefilePackage): + """ppOpen-APPL/FDM with Auto-Tuning""" + + homepage = "http://ppopenhpc.cc.u-tokyo.ac.jp/ppopenhpc/" + url = "file://{0}/ppohFDM_AT_1.0.0.tar.gz".format(os.getcwd()) + + version('1.0.0', sha256='f6052b73250a41b2b319b27efc4d753c6ec1f67cd109b53099c2b240f7acd65a') + + depends_on('mpi') + # depends_on('ppopen-appl-fdm', type='build') + + build_directory = "3.hybrid_AT" + parallel = False + + def edit(self, spec, prefix): + with working_dir(self.build_directory): + fflags = ['-O3', self.compiler.openmp_flag] + if spec.satisfies('%gcc'): + fflags.append('-ffree-line-length-none') + if spec.satisfies('arch=x86_64:'): + fflags.append('-mcmodel=medium') + makefile_opt = FileFilter('Makefile.option') + makefile_opt.filter( + 'FC = .*$', + 'FC = {0}'.format(spec['mpi'].mpifc) + ) + makefile_opt.filter( + 'FFLAGS = .*$', + 'FFLAGS = -O3 {0}'.format(' '.join(fflags)) + ) + + def install(self, spec, prefix): + mkdir(prefix.bin) + copy(join_path(self.build_directory, 'seism3d3n'), prefix.bin) + install_src_dir = join_path(prefix.src, self.build_directory) + mkdirp(install_src_dir) + install_tree(self.build_directory, install_src_dir) + with working_dir(install_src_dir): + make('clean') + mkdir(prefix.doc) + copy('readme.txt', prefix.doc) diff --git a/var/spack/repos/builtin/packages/ppopen-appl-fdm/gfortran_iargc.patch b/var/spack/repos/builtin/packages/ppopen-appl-fdm/gfortran_iargc.patch new file mode 100755 index 00000000000..115a0f06888 --- /dev/null +++ b/var/spack/repos/builtin/packages/ppopen-appl-fdm/gfortran_iargc.patch @@ -0,0 +1,13 @@ +diff --git a/tools/seismic_3D-tools/m_stdio.f90 b/tools/seismic_3D-tools/m_stdio.f90 +index bddb715..bc00f68 100644 +--- a/tools/seismic_3D-tools/m_stdio.f90 ++++ b/tools/seismic_3D-tools/m_stdio.f90 +@@ -65,8 +65,6 @@ module stdio + real(PN), parameter :: DEG2RAD = PI/180._PN + real(PN), parameter :: RAD2DEG = 180.0_PN / PI + +- integer, external :: iargc +- + + interface readPrm + ! diff --git a/var/spack/repos/builtin/packages/ppopen-appl-fdm/iargc_definition.patch b/var/spack/repos/builtin/packages/ppopen-appl-fdm/iargc_definition.patch new file mode 100644 index 00000000000..4f149873aef --- /dev/null +++ b/var/spack/repos/builtin/packages/ppopen-appl-fdm/iargc_definition.patch @@ -0,0 +1,13 @@ +--- spack-src/tools/seismic_2D-tools/m_getopt.f90.org 2020-05-13 10:14:41.822006522 +0900 ++++ spack-src/tools/seismic_2D-tools/m_getopt.f90 2020-05-13 10:17:01.102690012 +0900 +@@ -80,7 +80,9 @@ + character(256), allocatable :: argv(:) + integer :: i + character(256) :: optkey +- ++ ++ integer, external :: iargc ++ + narg = iargc() + allocate( argv(1:narg) ) + diff --git a/var/spack/repos/builtin/packages/ppopen-appl-fdm/package.py b/var/spack/repos/builtin/packages/ppopen-appl-fdm/package.py new file mode 100755 index 00000000000..1b3acfd6921 --- /dev/null +++ b/var/spack/repos/builtin/packages/ppopen-appl-fdm/package.py @@ -0,0 +1,140 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +import os +from spack import * + + +class PpopenApplFdm(MakefilePackage): + """ + ppOpen-APPL/FDM is a application software for the FDM simulation of + seismic wave propagation in elastic media in 2D and 3D. + The 2D application is prepared for a single-CPU (sequential) calculation + and the 3D application use MPI and OpenMP for parallel computing. + """ + + homepage = "http://ppopenhpc.cc.u-tokyo.ac.jp/ppopenhpc/" + url = "file://{0}/ppohFDM_0.3.1.tar.gz".format(os.getcwd()) + + version('0.3.1', sha256='5db7c28ef2df43c0ffa28e542d92320fe3c8cd7551aabe1de64647191ddf7d0b') + + # remove unused variable definition + patch('unused.patch') + # remove iargc external definition + # iargc is intrinsic in gfortran + patch('gfortran_iargc.patch', when='%gcc') + + # Fixed a problem that 'iargc' was not declared in advance + patch('iargc_definition.patch', when='%fj') + + depends_on('ppopen-math-vis', type='link') + depends_on('mpi') + + parallel = False + + def edit(self, spec, prefix): + makefile_in = FileFilter('Makefile.in') + makefile_in.filter('PREFIX += .*$', 'PREFIX = {0}'.format(prefix)) + makefile_in.filter( + 'LIBDIR = .*$', + 'LIBDIR = {0}'.format(prefix) + ) + makefile_in.filter( + 'CC += .*$', + 'CC = {0}'.format(spec['mpi'].mpicc) + ) + makefile_in.filter('COPTFLAGS += .*$', 'COPTFLAGS = -O3') + makefile_in.filter( + 'CXX += .*$', + 'CXX = {0}'.format(spec['mpi'].mpicxx) + ) + makefile_in.filter('CXXOPTFLAGS = .*$', 'CXXOPTFLAGS = -O3') + makefile_in.filter( + 'FC += .*$', + 'FC = {0}'.format(spec['mpi'].mpifc) + ) + makefile_in.filter('FOPTFLAGS += .*$', 'FOPTFLAGS = -O3') + makefile_in.filter( + 'F90 += .*$', + 'F90 = {0}'.format(spec['mpi'].mpifc) + ) + makefile_in.filter('F90OPTFLAGS += .*$', 'F90OPTFLAGS = -O3') + + makefile_opt = FileFilter(join_path( + 'src', 'seismic_2D', 'makefile.option') + ) + makefile_opt.filter('FC = .*$', 'FC = {0}'.format(spack_fc)) + makefile_opt.filter('FFLAGS = .*$', 'FFLAGS = -O3') + + makefile = FileFilter(join_path( + 'src', 'seismic_3D', '1.ppohFDM-ppohVIS', 'Makefile') + ) + makefile.filter('LIBS += .*$', 'LIBS = ') + makefile.filter( + 'FLDFLAGS += .*$', + 'FLDFLAGS = ' + spec['ppopen-math-vis'].libs.ld_flags + ) + + makefile_opt = FileFilter(join_path( + 'src', 'seismic_3D', '3.parallel', 'Makefile.option') + ) + makefile_opt.filter('FC = .*$', 'FC = {0}'.format(spec['mpi'].mpifc)) + makefile_opt.filter( + 'FFLAGS = .*$', + 'FFLAGS = -O3 {0}'.format(self.compiler.openmp_flag) + ) + + copy( + join_path('examples', 'seismic_3D-example', 'm_param.f90'), + join_path('src', 'seismic_3D', '1.ppohFDM-ppohVIS') + ) + copy( + join_path('examples', 'seismic_3D-example', 'm_param.f90'), + join_path('src', 'seismic_3D', '3.parallel') + ) + + for makefile in find('tools', 'makefile', recursive=True): + fflags = ['-O3', '-I.'] + m = FileFilter(makefile) + m.filter('^FC =.*$', 'FC = {0}'.format(spack_fc)) + m.filter( + '^FFLAGS =.*$', + 'FFLAGS = {0}'.format(' '.join(fflags)) + ) + + def build(self, spec, prefix): + make('seism2d', 'seism3d-ppohVIS', 'seism3d-parallel') + for d in ['seismic_2D-tools', 'seismic_3D-tools']: + with working_dir(join_path('tools', d)): + make('all') + + def install(self, spec, prefix): + commands = [ + join_path('src', 'seismic_2D', 'seism2d_psv'), + join_path('src', 'seismic_3D', '3.parallel', 'seism3d3n'), + join_path('src', 'seismic_3D', '1.ppohFDM-ppohVIS', 'seism3d3n'), + join_path('tools', 'seismic_2D-tools', 'pmxy2d'), + join_path('tools', 'seismic_2D-tools', 'rwav2d'), + join_path('tools', 'seismic_3D-tools', 'catsnap'), + join_path('tools', 'seismic_3D-tools', 'catwav'), + join_path('tools', 'seismic_3D-tools', 'ppmxy3d3'), + join_path('tools', 'seismic_3D-tools', 'rwav3d'), + ] + mkdir(prefix.bin) + for command in commands: + copy(command, prefix.bin) + install_tree('examples', prefix.examples) + install_tree('doc', prefix.doc) + install_tree('src', prefix.src) + copy('Makefile.in', prefix) + clean_dir = [ + join_path(prefix.src, 'seismic_2D'), + join_path(prefix.src, 'seismic_3D', '1.ppohFDM-ppohVIS'), + join_path(prefix.src, 'seismic_3D', '3.parallel'), + ] + for d in clean_dir: + with working_dir(d): + make('clean') + force_remove(join_path(prefix, 'Makefile.in')) diff --git a/var/spack/repos/builtin/packages/ppopen-appl-fdm/unused.patch b/var/spack/repos/builtin/packages/ppopen-appl-fdm/unused.patch new file mode 100755 index 00000000000..4c6e67c65d9 --- /dev/null +++ b/var/spack/repos/builtin/packages/ppopen-appl-fdm/unused.patch @@ -0,0 +1,13 @@ +diff --git a/tools/seismic_2D-tools/rwav2d.f90 b/tools/seismic_2D-tools/rwav2d.f90 +index ec08ea8..0f9543c 100644 +--- a/tools/seismic_2D-tools/rwav2d.f90 ++++ b/tools/seismic_2D-tools/rwav2d.f90 +@@ -41,7 +41,7 @@ program sort_seism2d_result + + integer :: station_num + integer :: ns, isx, isz +- integer :: i, k, ii, kk, j, IT ++ integer :: i, k, ii, kk, j + character(len=80) filename, filename2, tmp1 + integer :: istx(5000,100), istz(5000,100), stnum(5000,100) + real(PN) :: ntime(5000,100), vxall(5000,100), vzall(5000,100) diff --git a/var/spack/repos/builtin/packages/ppopen-math-vis/package.py b/var/spack/repos/builtin/packages/ppopen-math-vis/package.py index ad2ff3a7c8f..0082783ed5a 100644 --- a/var/spack/repos/builtin/packages/ppopen-math-vis/package.py +++ b/var/spack/repos/builtin/packages/ppopen-math-vis/package.py @@ -43,3 +43,9 @@ def install(self, spec, prefix): copy_tree('examples', join_path(prefix, 'examples')) mkdir(join_path(prefix, 'doc')) copy_tree('doc', join_path(prefix, 'doc')) + + @property + def libs(self): + return find_libraries( + ['libfppohvisfdm3d', 'libppohvisfdm3d'], + root=self.prefix, shared=False, recursive=True) diff --git a/var/spack/repos/builtin/packages/precice/package.py b/var/spack/repos/builtin/packages/precice/package.py index 986bfa7f146..11250f60bda 100644 --- a/var/spack/repos/builtin/packages/precice/package.py +++ b/var/spack/repos/builtin/packages/precice/package.py @@ -42,6 +42,7 @@ class Precice(CMakePackage): depends_on('cmake@3.10.2:', type='build', when='@1.4:') depends_on('boost@1.60.0:') depends_on('boost@1.65.1:', when='@1.4:') + depends_on('boost@:1.72.99', when='@:2.0.2') depends_on('eigen@3.2:') depends_on('eigen@:3.3.7', type='build', when='@:1.5') # bug in prettyprint depends_on('libxml2') diff --git a/var/spack/repos/builtin/packages/prism/package.py b/var/spack/repos/builtin/packages/prism/package.py new file mode 100644 index 00000000000..a76f33ac7a3 --- /dev/null +++ b/var/spack/repos/builtin/packages/prism/package.py @@ -0,0 +1,43 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Prism(MakefilePackage): + """PRISM is a probabilistic model checker, a tool for formal modelling and + analysis of systems that exhibit random or probabilistic behaviour.""" + + homepage = "https://www.prismmodelchecker.org/" + url = "https://github.com/prismmodelchecker/prism/archive/v4.5.tar.gz" + + version('4.5', sha256='1cb7a77538b5c997d98a8c209030c46f9e8f021f7a8332e5eb2fd3b4a23936fd') + + build_directory = 'prism' + + depends_on('java', type=('build', 'run')) + + def setup_run_environment(self, env): + env.set('PRISM_DIR', self.prefix) + + def install(self, spec, prefix): + with working_dir(self.build_directory): + # after building, remove PRISM_DIR lines from startup scripts, + # as they point to the stage and not the prefix + for f in ['prism', 'xprism']: + filter_file('^PRISM_DIR.*', '', 'bin/{0}'.format(f)) + + dirs = [ + 'bin', + 'classes', + 'dtds', + 'etc', + 'include', + 'images', + 'lib', + ] + + for d in dirs: + install_tree(d, join_path(prefix, d)) diff --git a/var/spack/repos/builtin/packages/prmon/package.py b/var/spack/repos/builtin/packages/prmon/package.py new file mode 100644 index 00000000000..4ae90c2531c --- /dev/null +++ b/var/spack/repos/builtin/packages/prmon/package.py @@ -0,0 +1,27 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack import * + + +class Prmon(CMakePackage): + """Standalone monitor for process resource consumption.""" + + homepage = "https://github.com/HSF/prmon/" + url = "https://github.com/HSF/prmon/archive/v1.1.1.tar.gz" + git = "https://github.com/HSF/prmon.git" + + maintainers = ['vvolkl'] + + version("master", branch="master") + version('1.1.1', sha256='5f074b05af2a12e2726c33f6a6e9e8e59ee0c4fb5fe056deb38abacd1bb6bf03') + + variant('plot', default=True, + description='Make use of plotting scripts') + + depends_on('nlohmann-json') + depends_on('cmake@3.3:', type="build") + depends_on('py-matplotlib', type="run", when="+plot") diff --git a/var/spack/repos/builtin/packages/proj/package.py b/var/spack/repos/builtin/packages/proj/package.py index 19de7a79095..46a91ab8a0c 100644 --- a/var/spack/repos/builtin/packages/proj/package.py +++ b/var/spack/repos/builtin/packages/proj/package.py @@ -20,6 +20,7 @@ class Proj(AutotoolsPackage): # Version 6 removes projects.h, while version 7 removes proj_api.h. # Many packages that depend on proj do not yet support the newer API. # See https://github.com/OSGeo/PROJ/wiki/proj.h-adoption-status + version('6.3.1', sha256='6de0112778438dcae30fcc6942dee472ce31399b9e5a2b67e8642529868c86f8') version('6.2.0', sha256='b300c0f872f632ad7f8eb60725edbf14f0f8f52db740a3ab23e7b94f1cd22a50') version('6.1.0', sha256='676165c54319d2f03da4349cbd7344eb430b225fe867a90191d848dc64788008') version('6.0.0', sha256='4510a2c1c8f9056374708a867c51b1192e8d6f9a5198dd320bf6a168e44a3657') diff --git a/var/spack/repos/builtin/packages/prokka/package.py b/var/spack/repos/builtin/packages/prokka/package.py new file mode 100644 index 00000000000..eedb5fde081 --- /dev/null +++ b/var/spack/repos/builtin/packages/prokka/package.py @@ -0,0 +1,31 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Prokka(Package): + """Prokka is a software tool to annotate bacterial, archaeal and viral + genomes quickly and produce standards-compliant output files.""" + + homepage = "https://github.com/tseemann/prokka" + url = "https://github.com/tseemann/prokka/archive/v1.14.5.tar.gz" + + version('1.14.6', sha256='f730b5400ea9e507bfe6c5f3d22ce61960a897195c11571c2e1308ce2533faf8') + + depends_on('perl', type='run') + depends_on('perl-bioperl', type='run') + depends_on('perl-xml-simple', type='run') + depends_on('perl-bio-searchio-hmmer', type='run') + depends_on('hmmer', type='run') + depends_on('blast-plus', type='run') + depends_on('prodigal', type='run') + depends_on('tbl2asn', type='run') + + def install(self, spec, prefix): + install_tree('bin', prefix.bin) + install_tree('binaries', prefix.binaries) + install_tree('db', prefix.db) + install_tree('doc', prefix.doc) diff --git a/var/spack/repos/builtin/packages/protobuf/package.py b/var/spack/repos/builtin/packages/protobuf/package.py index 05702d9fc96..82d2d05d75b 100644 --- a/var/spack/repos/builtin/packages/protobuf/package.py +++ b/var/spack/repos/builtin/packages/protobuf/package.py @@ -8,12 +8,11 @@ import spack.util.web -class Protobuf(CMakePackage): +class Protobuf(Package): """Google's data interchange format.""" homepage = "https://developers.google.com/protocol-buffers" url = "https://github.com/protocolbuffers/protobuf/archive/v3.10.1.tar.gz" - root_cmakelists_dir = "cmake" version('3.11.2', sha256='e8c7601439dbd4489fe5069c33d374804990a56c2f710e00227ee5d8fd650e67') version('3.11.1', sha256='4f8e805825c53bbc3c9f6b6abc009b5b5679e4702bccfca1121c42ff5ec801c7') @@ -36,6 +35,7 @@ class Protobuf(CMakePackage): version('3.2.0', sha256='a839d3f1519ff9d68ab908de5a0f269650ef1fc501c10f6eefd4cae51d29b86f') version('3.1.0', sha256='fb2a314f4be897491bb2446697be693d489af645cb0e165a85e7e64e07eb134d') version('3.0.2', sha256='a0a265bcc9d4e98c87416e59c33afc37cede9fb277292523739417e449b18c1e') + version('2.5.0', sha256='c2665a7aa2ac1a206e61b28e014486e3de59009ea2be2bde9182e0847f38b62f') variant('shared', default=True, description='Enables the build of shared libraries') @@ -43,19 +43,26 @@ class Protobuf(CMakePackage): description='The build type to build', values=('Debug', 'Release')) + depends_on('cmake', when='@3.0.2:', type='build') depends_on('zlib') + depends_on('autoconf', type='build', when='@2.5.0') + depends_on('automake', type='build', when='@2.5.0') + depends_on('libtool', type='build', when='@2.5.0') + depends_on('m4', type='build', when='@2.5.0') conflicts('%gcc@:4.6', when='@3.6.0:') # Requires c++11 conflicts('%gcc@:4.6', when='@3.2.0:3.3.0') # Breaks # first fixed in 3.4.0: https://github.com/google/protobuf/pull/3406 - patch('pkgconfig.patch', when='@:3.3.2') + patch('pkgconfig.patch', when='@3.0.2:3.3.2') patch('intel-v1.patch', when='@3.2:@3.6 %intel') # See https://github.com/protocolbuffers/protobuf/pull/7197 patch('intel-v2.patch', when='@3.7:@3.11.4 %intel') + patch('protoc2.5.0_aarch64.patch', sha256='7b44fcdb794f421174d619f83584e00a36012a16da09079e2fad9c12f7337451', when='@2.5.0 target=aarch64:') + def fetch_remote_versions(self): """Ignore additional source artifacts uploaded with releases, only keep known versions @@ -75,3 +82,29 @@ def cmake_args(self): if sys.platform == 'darwin': args.extend(['-DCMAKE_MACOSX_RPATH=ON']) return args + + @when('@3.0.2:') + def install(self, spec, prefix): + args = self.cmake_args() + args.extend(std_cmake_args) + + source_directory = join_path(self.stage.source_path, 'cmake') + build_directory = join_path(source_directory, 'build') + + with working_dir(build_directory, create=True): + cmake(source_directory, *args) + make() + make('install') + + def configure_args(self): + args = [] + args.append('--prefix=%s' % self.prefix) + return args + + @when('@2.5.0') + def install(self, spec, prefix): + args = self.configure_args() + autoreconf('-ifv') + configure(*args) + make() + make('install') diff --git a/var/spack/repos/builtin/packages/protobuf/protoc2.5.0_aarch64.patch b/var/spack/repos/builtin/packages/protobuf/protoc2.5.0_aarch64.patch new file mode 100644 index 00000000000..aa1ebc6a9ab --- /dev/null +++ b/var/spack/repos/builtin/packages/protobuf/protoc2.5.0_aarch64.patch @@ -0,0 +1,113 @@ +diff -uprN /src/google/protobuf/stubs/atomicops_internals_arm_gcc.h /src/google/protobuf/stubs/atomicops_internals_arm_gcc.h +--- /src/google/protobuf/subs/atomicops_internals_arm_gcc.h 2018-08-03 08:50:58.579413324 +0000 ++++ /src/google/protobuf/stubs/atomicops_internals_arm_gcc.h 2018-08-03 08:50:58.711413322 +0000 +@@ -68,6 +68,30 @@ inline Atomic32 NoBarrier_CompareAndSwap + } while (prev_value == old_value); + return prev_value; + } ++inline Atomic64 NoBarrier_CompareAndSwap(volatile Atomic64* ptr, ++ Atomic64 old_value, ++ Atomic64 new_value) { ++ Atomic64 prev; ++ int32_t temp; ++ ++ __asm__ __volatile__ ( // NOLINT ++ "0: \n\t" ++ "ldxr %[prev], %[ptr] \n\t" ++ "cmp %[prev], %[old_value] \n\t" ++ "bne 1f \n\t" ++ "stxr %w[temp], %[new_value], %[ptr] \n\t" ++ "cbnz %w[temp], 0b \n\t" ++ "1: \n\t" ++ : [prev]"=&r" (prev), ++ [temp]"=&r" (temp), ++ [ptr]"+Q" (*ptr) ++ : [old_value]"IJr" (old_value), ++ [new_value]"r" (new_value) ++ : "cc", "memory" ++ ); // NOLINT ++ ++ return prev; ++} + + inline Atomic32 NoBarrier_AtomicExchange(volatile Atomic32* ptr, + Atomic32 new_value) { +@@ -105,6 +129,15 @@ inline Atomic32 Acquire_CompareAndSwap(v + return NoBarrier_CompareAndSwap(ptr, old_value, new_value); + } + ++inline Atomic64 Acquire_CompareAndSwap(volatile Atomic64* ptr, ++ Atomic64 old_value, ++ Atomic64 new_value) { ++ Atomic64 prev = NoBarrier_CompareAndSwap(ptr, old_value, new_value); ++ MemoryBarrier(); ++ ++ return prev; ++} ++ + inline Atomic32 Release_CompareAndSwap(volatile Atomic32* ptr, + Atomic32 old_value, + Atomic32 new_value) { +@@ -115,8 +148,11 @@ inline void NoBarrier_Store(volatile Ato + *ptr = value; + } + +-inline void MemoryBarrier() { ++/*inline void MemoryBarrier() { + pLinuxKernelMemoryBarrier(); ++}*/ ++inline void MemoryBarrier() { ++ __asm__ __volatile__ ("dmb ish" ::: "memory"); // NOLINT + } + + inline void Acquire_Store(volatile Atomic32* ptr, Atomic32 value) { +@@ -129,6 +165,15 @@ inline void Release_Store(volatile Atomi + *ptr = value; + } + ++inline void Release_Store(volatile Atomic64* ptr, Atomic64 value) { ++ __asm__ __volatile__ ( // NOLINT ++ "stlr %x[value], %[ptr] \n\t" ++ : [ptr]"=Q" (*ptr) ++ : [value]"r" (value) ++ : "memory" ++ ); // NOLINT ++} ++ + inline Atomic32 NoBarrier_Load(volatile const Atomic32* ptr) { + return *ptr; + } +@@ -139,6 +184,19 @@ inline Atomic32 Acquire_Load(volatile co + return value; + } + ++inline Atomic64 Acquire_Load(volatile const Atomic64* ptr) { ++ Atomic64 value; ++ ++ __asm__ __volatile__ ( // NOLINT ++ "ldar %x[value], %[ptr] \n\t" ++ : [value]"=r" (value) ++ : [ptr]"Q" (*ptr) ++ : "memory" ++ ); // NOLINT ++ ++ return value; ++} ++ + inline Atomic32 Release_Load(volatile const Atomic32* ptr) { + MemoryBarrier(); + return *ptr; +diff -uprN /src/google/protobuf/stubs/platform_macros.h /src/google/protobuf/stubs/platform_macros.h +--- /src/google/protobuf/stubs/platform_macros.h 2018-08-03 08:50:58.543413325 +0000 ++++ /src/google/protobuf/stubs/platform_macros.h 2018-08-03 08:50:58.595413324 +0000 +@@ -57,6 +57,9 @@ + #elif defined(__ppc__) + #define GOOGLE_PROTOBUF_ARCH_PPC 1 + #define GOOGLE_PROTOBUF_ARCH_32_BIT 1 ++#elif defined(__aarch64__) ++#define GOOGLE_PROTOBUF_ARCH_ARM 1 ++#define GOOGLE_PROTOBUF_ARCH_64_BIT 1 + #else + #error Host architecture was not detected as supported by protobuf + #endif + diff --git a/var/spack/repos/builtin/packages/prrte/package.py b/var/spack/repos/builtin/packages/prrte/package.py index c8fa0ede0d7..58a3d10819f 100644 --- a/var/spack/repos/builtin/packages/prrte/package.py +++ b/var/spack/repos/builtin/packages/prrte/package.py @@ -18,6 +18,7 @@ class Prrte(AutotoolsPackage): homepage = "https://pmix.org" url = "https://github.com/pmix/prrte/releases/download/v1.0.0/prrte-1.0.0.tar.bz2" git = "https://github.com/pmix/prrte.git" + maintainers = ['rhc54'] version('develop', branch='master') version('1.0.0', sha256='a9b3715e059c10ed091bd6e3a0d8896f7752e43ee731abcc95fb962e67132a2d') @@ -50,7 +51,7 @@ def configure_args(self): # libevent config_args.append( - '--with-libev={0}'.format(spec['libevent'].prefix)) + '--with-libevent={0}'.format(spec['libevent'].prefix)) # hwloc config_args.append('--with-hwloc={0}'.format(spec['hwloc'].prefix)) # pmix diff --git a/var/spack/repos/builtin/packages/py-addict/package.py b/var/spack/repos/builtin/packages/py-addict/package.py new file mode 100644 index 00000000000..b4b27f9975b --- /dev/null +++ b/var/spack/repos/builtin/packages/py-addict/package.py @@ -0,0 +1,19 @@ +# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class PyAddict(PythonPackage): + """addict is a Python module that gives you dictionaries + whose values are both gettable and settable using + attributes, in addition to standard item-syntax.""" + + homepage = "https://github.com/mewwts/addict" + url = "https://github.com/mewwts/addict/archive/v2.2.1.tar.gz" + + version('2.2.1', sha256='398bba9e7fa25e2ce144c5c4b8ec6208e89b9445869403dfa88ab66ec110fa12') + + depends_on('py-setuptools', type='build') diff --git a/var/spack/repos/builtin/packages/py-asgiref/package.py b/var/spack/repos/builtin/packages/py-asgiref/package.py new file mode 100644 index 00000000000..6819d27aa92 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-asgiref/package.py @@ -0,0 +1,27 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class PyAsgiref(PythonPackage): + """ASGI specification and utilities.""" + + homepage = "https://asgi.readthedocs.io/en/latest/" + url = "https://github.com/django/asgiref/archive/3.2.7.tar.gz" + + version('3.2.7', sha256='8a0b556b9e936418475f6670d59e14592c41d15d00b5ea4ad26f2b46f9f4fb9a') + version('3.2.6', sha256='29788163bdad8d494475a0137eba39b111fd86fbe825534a9376f9f2ab44251a') + version('3.2.5', sha256='eeb01ba02e86859746ee2a7bc8a75c484a006dc9575723563f24642a12b2bba8') + version('3.2.4', sha256='89e47532340338b7eafd717ab28658e8b48f4565d8384628c88d2d41565c8da0') + version('3.2.3', sha256='d38e16141c7189e23bfe03342d9cd3dbfd6baab99217892bfa7bc5646315b6bd') + version('3.2.2', sha256='47edf327aa70f317c9bc810d469ce681f1b35a7f499f68cf2b5da3ba6a651e69') + version('3.2.1', sha256='06a21df1f4456d29079f3c475c09ac31167bcc5f024c637dedf4e00d2dd9020b') + version('3.2.0', sha256='5db8c7a6c1ff54ea04a52f994d8af959427f1cab8e427aa802492a89fb0b635a') + version('3.1.4', sha256='bf01c52111ef7af2adc1e6d90282d2a32c5ebe09e84ae448389ceff7cef53fa9') + version('3.1.3', sha256='5b8bb7b3719b8c12a6c2363784a4d8c0eb5e980d8b4fdb6f38eccb52071dfab5') + + depends_on('python@3.5:', type=('build', 'run')) + depends_on('py-setuptools', type='build') diff --git a/var/spack/repos/builtin/packages/py-asteval/package.py b/var/spack/repos/builtin/packages/py-asteval/package.py new file mode 100644 index 00000000000..4f80a227896 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-asteval/package.py @@ -0,0 +1,19 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class PyAsteval(PythonPackage): + """Safe, minimalistic evaluator of python expression using ast module""" + + homepage = "http://github.com/newville/asteval" + url = "https://pypi.io/packages/source/a/asteval/asteval-0.9.18.tar.gz" + + version('0.9.18', sha256='5d64e18b8a72c2c7ae8f9b70d1f80b68bbcaa98c1c0d7047c35489d03209bc86') + + depends_on('python@3.5:', type=('build', 'run')) + depends_on('py-setuptools', type='build') + depends_on('py-pytest', type='test') diff --git a/var/spack/repos/builtin/packages/py-astroid/package.py b/var/spack/repos/builtin/packages/py-astroid/package.py index 34cd769a0b9..3eb5aece5bb 100644 --- a/var/spack/repos/builtin/packages/py-astroid/package.py +++ b/var/spack/repos/builtin/packages/py-astroid/package.py @@ -13,6 +13,7 @@ class PyAstroid(PythonPackage): homepage = "https://github.com/PyCQA/astroid" url = "https://github.com/PyCQA/astroid/archive/astroid-1.4.5.tar.gz" + version('2.3.3', sha256='3a82983cf34dcbfe42ebcffeb98739e8a7bb868f03c1d9e298c530179b5075e7') version('2.2.0', sha256='7e289d0aa4a537b4aa798bd609fdf745de0f3c37e6b67642ed328e1482421a6d') version('1.6.6', sha256='3fbcc144457ba598fb48e0ddce5eacee62610ab11e6fe374b6eef5f7df2a3fbb') # version('1.5.3', sha256='6f65e4ea8290ec032320460905afb828') # has broken unit tests @@ -22,10 +23,18 @@ class PyAstroid(PythonPackage): version('1.4.2', sha256='f9007d651f4b3514ea5812127677a4bb681ff194164290cea358987920f24ee6') version('1.4.1', sha256='f1ab3ee6f17f9d30981399a52b56a7a7d2747ba24f0aa504e411ee6205a01fc0') - depends_on('py-lazy-object-proxy') - depends_on('py-six') - depends_on('py-wrapt') - depends_on('py-enum34@1.1.3:', when='^python@:3.3.99') - depends_on('py-singledispatch', when='^python@:3.3.99') - depends_on('py-backports-functools-lru-cache', when='^python@:3.2.99') - depends_on('py-setuptools@17.1:') + # Dependencies taken from astroid/__pkginfo__.py + depends_on('python@2.7:2.8,3.4:', when='@:1.999', type=('build', 'run')) + depends_on('python@3.4:', when='@2.0.0:', type=('build', 'run')) + depends_on('python@3.5:', when='@2.3.3:', type=('build', 'run')) + depends_on('py-lazy-object-proxy', type=('build', 'run')) + depends_on('py-lazy-object-proxy@1.4:1.4.999', when='@2.3.3:', type=('build', 'run')) + depends_on('py-six', type=('build', 'run')) + depends_on('py-six@1.12:1.999', when='@2.3.3:', type=('build', 'run')) + depends_on('py-wrapt', type=('build', 'run')) + depends_on('py-wrapt@1.11:1.11.999', when='@2.3.3:', type=('build', 'run')) + depends_on('py-enum34@1.1.3:', when='^python@:3.3.99', type=('build', 'run')) + depends_on('py-singledispatch', when='^python@:3.3.99', type=('build', 'run')) + depends_on('py-backports-functools-lru-cache', when='^python@:3.2.99', type=('build', 'run')) + depends_on('py-typed-ast@1.4.0:1.4.999', when='@2.3.3: ^python@:3.7.999', type=('build', 'run')) + depends_on('py-setuptools@17.1:', type='build') diff --git a/var/spack/repos/builtin/packages/py-astunparse/package.py b/var/spack/repos/builtin/packages/py-astunparse/package.py index 87ca432f56a..996a59183e4 100644 --- a/var/spack/repos/builtin/packages/py-astunparse/package.py +++ b/var/spack/repos/builtin/packages/py-astunparse/package.py @@ -15,6 +15,7 @@ class PyAstunparse(PythonPackage): homepage = "https://pypi.org/project/astunparse/" url = "https://pypi.io/packages/source/a/astunparse/astunparse-1.6.2.tar.gz" + version('1.6.3', sha256='5ad93a8456f0d084c3456d059fd9a92cce667963232cbf763eac3bc5b7940872') version('1.6.2', sha256='dab3e426715373fd76cd08bb1abe64b550f5aa494cf1e32384f26fd60961eb67') depends_on('py-setuptools', type='build') diff --git a/var/spack/repos/builtin/packages/py-blis/package.py b/var/spack/repos/builtin/packages/py-blis/package.py new file mode 100644 index 00000000000..ce7f6c4c655 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-blis/package.py @@ -0,0 +1,18 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +class PyBlis(PythonPackage): + """Cython BLIS: Fast BLAS-like operations from Python and Cython, + without the tears""" + + homepage = "https://github.com/explosion/cython-blis" + url = "https://pypi.io/packages/source/b/blis/blis-0.4.1.tar.gz" + + version('0.4.1', sha256='d69257d317e86f34a7f230a2fd1f021fd2a1b944137f40d8cdbb23bd334cd0c4') + + depends_on('py-setuptools', type='build') + depends_on('py-numpy@1.15:', type=('build', 'run')) + depends_on('py-hypothesis', type='test') diff --git a/var/spack/repos/builtin/packages/py-blosc/package.py b/var/spack/repos/builtin/packages/py-blosc/package.py new file mode 100644 index 00000000000..da185302ff4 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-blosc/package.py @@ -0,0 +1,23 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class PyBlosc(PythonPackage): + """A Python wrapper for the extremely fast Blosc compression library""" + + homepage = "http://python-blosc.blosc.org" + url = "https://github.com/Blosc/python-blosc/archive/v1.9.1.tar.gz" + git = "https://github.com/Blosc/python-blosc.git" + + version('1.9.1', sha256='ffc884439a12409aa4e8945e21dc920d6bc21807357c51d24c7f0a27ae4f79b9') + + depends_on('cmake@3.11.0:', type='build') + depends_on('python@3.6:', type=('build', 'run')) + depends_on('py-scikit-build', type='build') + # depends_on('c-blosc') # shipped internally + depends_on('py-numpy', type='test') + depends_on('py-psutil', type='test') diff --git a/var/spack/repos/builtin/packages/py-cartopy/package.py b/var/spack/repos/builtin/packages/py-cartopy/package.py index feb7d8eba72..1cfa92aec6b 100644 --- a/var/spack/repos/builtin/packages/py-cartopy/package.py +++ b/var/spack/repos/builtin/packages/py-cartopy/package.py @@ -9,12 +9,20 @@ class PyCartopy(PythonPackage): """Cartopy - a cartographic python library with matplotlib support.""" - homepage = "http://scitools.org.uk/cartopy/" - url = "https://github.com/SciTools/cartopy/archive/v0.16.0.tar.gz" + homepage = "https://scitools.org.uk/cartopy/docs/latest/" + url = "https://github.com/SciTools/cartopy/archive/v0.17.0.tar.gz" + + maintainers = ['adamjstewart'] + import_modules = [ + 'cartopy', 'cartopy.sphinxext', 'cartopy.io', 'cartopy.geodesic', + 'cartopy.examples', 'cartopy.mpl', 'cartopy.feature', + ] version('0.17.0', sha256='137642e63952404ec0841fa0333ad14c58fbbf19cca2a5ac6a38498c4b4998fb') version('0.16.0', sha256='cadf62434492c965220b37f0548bc58180466ad6894a1db57dbc51cd43467e5c') + # https://scitools.org.uk/cartopy/docs/latest/installing.html#installing + depends_on('python@2.7:', type=('build', 'run')) depends_on('py-setuptools@0.7.2:', type='build') depends_on('py-cython@0.15.1:', type='build') depends_on('py-numpy@1.10.0:', type=('build', 'run')) @@ -25,11 +33,11 @@ class PyCartopy(PythonPackage): depends_on('proj@4.9.0:5', when='@0.16.0') depends_on('proj@4.9:', when='@0.17.0') - variant('epsg', default=True, description='Add support for epsg.io') - variant('ows', default=True, description='Add support for Open Geospatial Consortium (OGC) web service') - variant('plotting', default=True, description='Add plotting functionality') + variant('epsg', default=False, description='Add support for epsg.io') + variant('ows', default=False, description='Add support for Open Geospatial Consortium (OGC) web service') + variant('plotting', default=False, description='Add plotting functionality') - # optional dependecies + # Optional dependecies depends_on('py-matplotlib@1.5.1:', type=('build', 'run'), when='+plotting') depends_on('gdal@1.10.0:+python', type=('build', 'run'), when='+plotting') depends_on('py-pillow@1.7.8:', type=('build', 'run'), when='+ows') @@ -38,19 +46,34 @@ class PyCartopy(PythonPackage): depends_on('py-scipy@0.10:', type=('build', 'run'), when='+plotting') depends_on('py-owslib@0.8.11:', type=('build', 'run'), when='+ows') - # testing dependencies + # Testing dependencies depends_on('py-filelock', type='test') - depends_on('py-mock@1.0.1', type='test') + depends_on('py-mock@1.0.1:', type='test') depends_on('py-pytest@3.0.0:', type='test') + patch('proj6.patch', when='@0.17.0') + phases = ['build_ext', 'install'] def build_ext_args(self, spec, prefix): - args = ['-I{0}'.format(spec['proj'].prefix.include), - '-L{0}'.format(spec['proj'].prefix.lib) - ] + args = [ + spec['geos'].headers.include_flags, + spec['geos'].libs.search_flags, + spec['proj'].headers.include_flags, + spec['proj'].libs.search_flags, + ] - if spec.satisfies('@0.17.0 ^proj@6'): - args.append('-DACCEPT_USE_OF_DEPRECATED_PROJ_API_H') + if '+plotting' in spec: + args.extend([ + spec['gdal'].headers.include_flags, + spec['gdal'].libs.search_flags, + ]) return args + + # Tests need to be re-added since `phases` was overridden + run_after('build_ext')( + PythonPackage._run_default_build_time_test_callbacks) + run_after('install')( + PythonPackage._run_default_install_time_test_callbacks) + run_after('install')(PythonPackage.sanity_check_prefix) diff --git a/var/spack/repos/builtin/packages/py-cartopy/proj6.patch b/var/spack/repos/builtin/packages/py-cartopy/proj6.patch new file mode 100644 index 00000000000..5ee2ca5490f --- /dev/null +++ b/var/spack/repos/builtin/packages/py-cartopy/proj6.patch @@ -0,0 +1,109 @@ +# Fix PROJ.6 support +# Adapted from https://github.com/SciTools/cartopy/pull/1289 +--- a/setup.py 2020-05-02 17:50:37.000000000 -0500 ++++ b/setup.py 2020-05-02 18:02:04.000000000 -0500 +@@ -16,25 +16,25 @@ + # along with cartopy. If not, see . + from __future__ import print_function + +-""" +-Distribution definition for Cartopy. +- +-""" +- +-import setuptools +-from setuptools import setup, Extension +-from setuptools import Command +-from setuptools import convert_path +-from distutils.spawn import find_executable +-from distutils.sysconfig import get_config_var + import fnmatch + import os + import subprocess + import sys + import warnings ++from collections import defaultdict ++from distutils.spawn import find_executable ++from distutils.sysconfig import get_config_var ++ ++from setuptools import Command, Extension, convert_path, setup + + import versioneer + ++""" ++Distribution definition for Cartopy. ++""" ++ ++ ++ + + try: + from Cython.Distutils import build_ext +@@ -230,6 +230,18 @@ + return proj_version + + ++def get_proj_libraries(): ++ """ ++ This function gets the PROJ libraries to cythonize with ++ """ ++ proj_libraries = ["proj"] ++ if os.name == "nt" and proj_version >= (6, 0, 0): ++ proj_libraries = [ ++ "proj_{}_{}".format(proj_version[0], proj_version[1]) ++ ] ++ return proj_libraries ++ ++ + conda = os.getenv('CONDA_DEFAULT_ENV') + if conda is not None and conda in sys.prefix: + # Conda does not provide pkg-config compatibility, but the search paths +@@ -245,7 +257,7 @@ + exit(1) + + proj_includes = [] +- proj_libraries = ['proj'] ++ proj_libraries = get_proj_libraries() + proj_library_dirs = [] + + else: +@@ -268,7 +280,7 @@ + exit(1) + + proj_includes = [] +- proj_libraries = ['proj'] ++ proj_libraries = get_proj_libraries() + proj_library_dirs = [] + else: + if proj_version < PROJ_MIN_VERSION: +@@ -284,7 +296,7 @@ + proj_clibs = proj_clibs.decode() + + proj_includes = [proj_include[2:] if proj_include.startswith('-I') else +- proj_include for proj_include in proj_includes.split()] ++ proj_include for proj_include in proj_includes.split()] + + proj_libraries = [] + proj_library_dirs = [] +@@ -316,11 +328,16 @@ + return '.' + include_dir = get_config_var('INCLUDEDIR') + library_dir = get_config_var('LIBDIR') +-if sys.platform.startswith('win'): +- extra_extension_args = {} +-else: +- extra_extension_args = dict( +- runtime_library_dirs=[get_config_var('LIBDIR')]) ++extra_extension_args = defaultdict(list) ++if not sys.platform.startswith('win'): ++ extra_extension_args["runtime_library_dirs"].append( ++ get_config_var('LIBDIR') ++ ) ++ ++if proj_version >= (6, 0, 0): ++ extra_extension_args["define_macros"].append( ++ ('ACCEPT_USE_OF_DEPRECATED_PROJ_API_H', '1') ++ ) + + # Description + # =========== diff --git a/var/spack/repos/builtin/packages/py-catalogue/package.py b/var/spack/repos/builtin/packages/py-catalogue/package.py new file mode 100644 index 00000000000..799f5bb2ff3 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-catalogue/package.py @@ -0,0 +1,19 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +class PyCatalogue(PythonPackage): + """catalogue: Super lightweight function registries for your library.""" + + homepage = "https://github.com/explosion/catalogue" + url = "https://pypi.io/packages/source/c/catalogue/catalogue-2.0.0.tar.gz" + + version('2.0.0', sha256='34f8416ec5e7ed08e55c10414416e67c3f4d66edf83bc67320c3290775293816') + version('1.0.0', sha256='d74d1d856c6b36a37bf14aa6dbbc27d0582667b7ab979a6108e61a575e8723f5') + + depends_on('python@3.6:', when='@2:', type=('build', 'run')) + depends_on('python@2.7:2.8,3.4:', type=('build', 'run')) + depends_on('py-setuptools', type='build') + depends_on('py-importlib-metadata@0.20:', when='^python@:3.7', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/py-cf-units/package.py b/var/spack/repos/builtin/packages/py-cf-units/package.py index 07f062dca95..54d6e909b32 100644 --- a/var/spack/repos/builtin/packages/py-cf-units/package.py +++ b/var/spack/repos/builtin/packages/py-cf-units/package.py @@ -16,6 +16,7 @@ class PyCfUnits(PythonPackage): git = "https://github.com/SciTools/cf-units.git" version('master', branch='master') + version('2.1.4', sha256='25f81ad994af30713ee8f5ef18ffddd83c6ec1ac308e1bd89d45de9d2e0f1c31') version('2.1.1', sha256='fa0ef8efd84546e61088aa23e76ebbaf7043167dc3a7f35f34549c234b543530') depends_on('python@3:', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/py-chainer/package.py b/var/spack/repos/builtin/packages/py-chainer/package.py new file mode 100644 index 00000000000..c75383b4694 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-chainer/package.py @@ -0,0 +1,36 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class PyChainer(PythonPackage): + """ + Chainer is a Python-based deep learning framework aiming at flexibility. + + It provides automatic differentiation APIs based on the define-by-run + approach (a.k.a. dynamic computational graphs) as well as object-oriented + high-level APIs to build and train neural networks. + It also supports CUDA/cuDNN using CuPy for high performance training + and inference. + """ + + homepage = "https://chainer.org/" + url = "https://github.com/chainer/chainer/archive/v7.2.0.tar.gz" + + maintainers = ['adamjstewart'] + + version('7.2.0', sha256='6e2fba648cc5b8a5421e494385b76fe5ec154f1028a1c5908557f5d16c04f0b3') + version('6.7.0', sha256='87cb3378a35e7c5c695028ec91d58dc062356bc91412384ea939d71374610389') + + depends_on('python@3.5.1:', when='@7:', type=('build', 'run')) + depends_on('py-setuptools', type=('build', 'run')) + depends_on('py-numpy@1.9:', type=('build', 'run')) + depends_on('py-six@1.9.0:', type=('build', 'run')) + depends_on('py-typing-extensions', type=('build', 'run')) + depends_on('py-typing-extensions@:3.6.6', when='@:6', type=('build', 'run')) + depends_on('py-filelock', type=('build', 'run')) + depends_on('py-protobuf@3:', type=('build', 'run')) + depends_on('py-typing@:3.6.6', when='@:6', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/py-cmake-format/package.py b/var/spack/repos/builtin/packages/py-cmake-format/package.py index a8f6f862298..b300b97379f 100644 --- a/var/spack/repos/builtin/packages/py-cmake-format/package.py +++ b/var/spack/repos/builtin/packages/py-cmake-format/package.py @@ -14,6 +14,7 @@ class PyCmakeFormat(PythonPackage): homepage = "https://pypi.python.org/pypi/cmake-format" url = "https://pypi.io/packages/source/c/cmake_format/cmake_format-0.6.9.tar.gz" + version('0.6.10', sha256='82f0ef16236225cb43f45bfb6983ef7f6f72634727a1a6c26290402527bdd793') version('0.6.9', sha256='b2f8bf2e9c6651126f2f2954b7803222b0faf6b8649eabc4d965ea97483a4d20') depends_on('py-setuptools', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/py-coilmq/package.py b/var/spack/repos/builtin/packages/py-coilmq/package.py new file mode 100644 index 00000000000..fc2f404e51e --- /dev/null +++ b/var/spack/repos/builtin/packages/py-coilmq/package.py @@ -0,0 +1,27 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class PyCoilmq(PythonPackage): + """Lightweight Python STOMP message broker.""" + + homepage = "https://github.com/hozn/coilmq" + url = "https://github.com/hozn/coilmq/archive/1.0.0.tar.gz" + + version('1.0.0', sha256='95d12de9b007fc679d4ad2eba0253aee8f6ecf42b79a94be6a2e0349f91086b0') + version('0.6.1', sha256='402a5f88631a848926c442385248d7ae7bd05607bba8f20605e31fd49c3677f9') + version('0.6.0', sha256='50d22fde72f058720bb48ad96bdd7c87594372d7917dd5d2cca40a9d195fde27') + version('0.5.0', sha256='ffe2648e0a336ff61f729ad76090f8a16e681b3d3d6b14ba7ce3ef840de32cd9') + version('0.4.4', sha256='2a0d494c73412e76075d2a72698948fb1d84c9c5719b134c364c07bcc6a3eacf') + version('0.4.3', sha256='7a051f4fd2b76c8accf0b3f435267566910085c18483726e9eb56416e40703b7') + + depends_on('py-setuptools', type='build') + depends_on('py-python-daemon', type=('build', 'run')) + depends_on('py-pid', type=('build', 'run')) + depends_on('py-wheel', type=('build', 'run')) + depends_on('py-six', type=('build', 'run')) + depends_on('py-click', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/py-configargparse/package.py b/var/spack/repos/builtin/packages/py-configargparse/package.py new file mode 100644 index 00000000000..acaf0cc4bbe --- /dev/null +++ b/var/spack/repos/builtin/packages/py-configargparse/package.py @@ -0,0 +1,25 @@ +# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class PyConfigargparse(PythonPackage): + """Applications with more than a handful of user-settable + options are best configured through a combination of + command line args, config files, hard-coded defaults, and + in some cases, environment variables. + + Python's command line parsing modules such as argparse have + very limited support for config files and environment + variables, so this module extends argparse to add these + features.""" + + homepage = "https://github.com/bw2/ConfigArgParse" + url = "https://github.com/bw2/ConfigArgParse/archive/1.2.3.tar.gz" + + version('1.2.3', sha256='0f1144a204e3b896d6ac900e151c1d13bde3103d6b7d541e3bb57514a94083bf') + + depends_on('python@2.2:2.999,3.5:', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/py-configobj/package.py b/var/spack/repos/builtin/packages/py-configobj/package.py index 6b8521405de..350695780de 100644 --- a/var/spack/repos/builtin/packages/py-configobj/package.py +++ b/var/spack/repos/builtin/packages/py-configobj/package.py @@ -16,15 +16,5 @@ class PyConfigobj(PythonPackage): version('5.0.6', sha256='a2f5650770e1c87fb335af19a9b7eb73fc05ccf22144eb68db7d00cd2bcb0902') version('4.7.2', sha256='515ff923462592e8321df8b48c47e3428f8d406ee22b8de77bef969d1af11171') - # The version on PyPi seems to be outdated (2014) although confusingly - # enough version number is the same as the latest release on github. depends_on('py-six', type=('build', 'run')) depends_on('python@2.7:2.8,3.4:', type=('build', 'run')) - - def url_for_version(self, version): - if version <= Version('5.0.0'): - url = "https://pypi.io/packages/source/c/configobj/configobj-{0}.tar.gz" - else: - url = "https://github.com/DiffSK/configobj/archive/v{0}.tar.gz" - - return url.format(version) diff --git a/var/spack/repos/builtin/packages/py-cymem/package.py b/var/spack/repos/builtin/packages/py-cymem/package.py new file mode 100644 index 00000000000..fa076b88361 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-cymem/package.py @@ -0,0 +1,16 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +class PyCymem(PythonPackage): + """Manage calls to calloc/free through Cython.""" + + homepage = "https://github.com/explosion/cymem" + url = "https://pypi.io/packages/source/c/cymem/cymem-2.0.3.tar.gz" + + version('2.0.3', sha256='5083b2ab5fe13ced094a82e0df465e2dbbd9b1c013288888035e24fd6eb4ed01') + + depends_on('py-setuptools', type='build') + depends_on('py-wheel@0.32.0:0.32.999', type='build') diff --git a/var/spack/repos/builtin/packages/py-dask/package.py b/var/spack/repos/builtin/packages/py-dask/package.py index 4967306f63d..b6f3a07f10a 100644 --- a/var/spack/repos/builtin/packages/py-dask/package.py +++ b/var/spack/repos/builtin/packages/py-dask/package.py @@ -12,6 +12,9 @@ class PyDask(PythonPackage): homepage = "https://github.com/dask/dask/" url = "https://pypi.io/packages/source/d/dask/dask-1.1.0.tar.gz" + maintainers = ['skosukhin'] + + version('2.16.0', sha256='2af5b0dcd48ce679ce0321cf91de623f4fe376262789b951fefa3c334002f350') version('1.2.2', sha256='5e7876bae2a01b355d1969b73aeafa23310febd8c353163910b73e93dc7e492c') version('1.1.2', sha256='93b355b9a9c9a3ddbb39fab99d5759aad5cfd346f4520b87788970e80cf97256') version('1.1.0', sha256='e76088e8931b326c05a92d2658e07b94a6852b42c13a7560505a8b2354871454') @@ -21,10 +24,18 @@ class PyDask(PythonPackage): variant('array', default=True, description='Install requirements for dask.array') variant('bag', default=True, description='Install requirements for dask.bag') variant('dataframe', default=True, description='Install requirements for dask.dataframe') - variant('delayed', default=True, description='Install requirements for dask.delayed') variant('distributed', default=True, description='Install requirements for dask.distributed') + variant('diagnostics', default=False, description='Install requirements for dask.diagnostics') + variant('delayed', default=True, description='Install requirements for dask.delayed (dask.imperative)') + variant('yaml', default=True, description='Ensure support for YAML configuration files') - conflicts('+distributed', when='@:1.2.1') # Only present in 1.2.2+ + conflicts('+distributed', when='@:0.4.0,0.7.6:0.8.1') + conflicts('+diagnostics', when='@:0.5.0') + conflicts('+yaml', when='@:0.17.5') + + depends_on('python@2.7:2.8,3.5:', type=('build', 'run')) + depends_on('python@3.5:', type=('build', 'run'), when='@2.0.0:') + depends_on('python@3.6:', type=('build', 'run'), when='@2.7.0:') depends_on('py-setuptools', type='build') depends_on('py-pytest@3.1.0:', type='test') @@ -32,35 +43,107 @@ class PyDask(PythonPackage): depends_on('py-pytest-runner', type='test') # Requirements for dask.array - depends_on('py-numpy@1.11.0:', type=('build', 'run'), when='+array') - depends_on('py-numpy@1.13.0:', type=('build', 'run'), when='@1.2.2: +array') - depends_on('py-toolz@0.7.3:', type=('build', 'run'), when='+array') + depends_on('py-numpy@1.10.4:', type=('build', 'run'), when='+array') + depends_on('py-numpy@1.11.0:', type=('build', 'run'), when='@0.17.3: +array') + depends_on('py-numpy@1.13.0:', type=('build', 'run'), when='@1.2.1: +array') + + depends_on('py-toolz', type=('build', 'run'), when='+array') + depends_on('py-toolz@0.7.2:', type=('build', 'run'), when='@0.7.0: +array') + depends_on('py-toolz@0.7.3:', type=('build', 'run'), when='@0.14.1: +array') + depends_on('py-toolz@0.8.2:', type=('build', 'run'), when='@2.13.0: +array') # Requirements for dask.bag - depends_on('py-cloudpickle@0.2.1:', type=('build', 'run'), when='+bag') - depends_on('py-toolz@0.7.3:', type=('build', 'run'), when='+bag') - depends_on('py-partd@0.3.8:', type=('build', 'run'), when='+bag') + depends_on('py-dill', type=('build', 'run'), when='@:0.7.5 +bag') + depends_on('py-cloudpickle', type=('build', 'run'), when='@0.7.6: +bag') + depends_on('py-cloudpickle@0.2.1:', type=('build', 'run'), when='@0.8.2: +bag') + depends_on('py-cloudpickle@0.2.2:', type=('build', 'run'), when='@2.13.0: +bag') + + depends_on('py-fsspec@0.3.3:', type=('build', 'run'), when='@2.2.0: +bag') + depends_on('py-fsspec@0.5.1:', type=('build', 'run'), when='@2.5.0: +bag') + depends_on('py-fsspec@0.6.0:', type=('build', 'run'), when='@2.8.0: +bag') + + depends_on('py-toolz', type=('build', 'run'), when='+bag') + depends_on('py-toolz@0.7.2:', type=('build', 'run'), when='@0.7.0: +bag') + depends_on('py-toolz@0.7.3:', type=('build', 'run'), when='@0.14.1: +bag') + depends_on('py-toolz@0.8.2:', type=('build', 'run'), when='@2.13.0: +bag') + + depends_on('py-partd', type=('build', 'run'), when='+bag') + depends_on('py-partd@0.3.2:', type=('build', 'run'), when='@0.6.0: +bag') + depends_on('py-partd@0.3.3:', type=('build', 'run'), when='@0.9.0: +bag') + depends_on('py-partd@0.3.5:', type=('build', 'run'), when='@0.10.2: +bag') + depends_on('py-partd@0.3.6:', type=('build', 'run'), when='@0.12.0: +bag') + depends_on('py-partd@0.3.7:', type=('build', 'run'), when='@0.13.0: +bag') + depends_on('py-partd@0.3.8:', type=('build', 'run'), when='@0.15.0: +bag') + depends_on('py-partd@0.3.10:', type=('build', 'run'), when='@2.0.0: +bag') # Requirements for dask.dataframe - depends_on('py-numpy@1.11.0:', type=('build', 'run'), when='+dataframe') - depends_on('py-numpy@1.13.0:', type=('build', 'run'), when='@1.2.2: +dataframe') - depends_on('py-pandas@0.19.0:', type=('build', 'run'), when='+dataframe') - depends_on('py-pandas@0.21.0:', type=('build', 'run'), when='@1.2.2: +dataframe') - depends_on('py-toolz@0.7.3:', type=('build', 'run'), when='+dataframe') - depends_on('py-partd@0.3.8:', type=('build', 'run'), when='+dataframe') - depends_on('py-cloudpickle@0.2.1:', type=('build', 'run'), when='+dataframe') + depends_on('py-numpy@1.10.4:', type=('build', 'run'), when='+dataframe') + depends_on('py-numpy@1.11.0:', type=('build', 'run'), when='@0.17.3: +dataframe') + depends_on('py-numpy@1.13.0:', type=('build', 'run'), when='@1.2.1: +dataframe') - # Requirements for dask.delayed - depends_on('py-toolz@0.7.3:', type=('build', 'run'), when='+delayed') + depends_on('py-pandas@0.16.0:', type=('build', 'run'), when='+dataframe') + depends_on('py-pandas@0.18.0:', type=('build', 'run'), when='@0.9.0: +dataframe') + depends_on('py-pandas@0.19.0:', type=('build', 'run'), when='@0.14.0: +dataframe') + depends_on('py-pandas@0.21.0:', type=('build', 'run'), when='@1.2.1: +dataframe') + depends_on('py-pandas@0.23.0:', type=('build', 'run'), when='@2.11.0: +dataframe') + + depends_on('py-toolz', type=('build', 'run'), when='+dataframe') + depends_on('py-toolz@0.7.2:', type=('build', 'run'), when='@0.7.0: +dataframe') + depends_on('py-toolz@0.7.3:', type=('build', 'run'), when='@0.14.1: +dataframe') + depends_on('py-toolz@0.8.2:', type=('build', 'run'), when='@2.13.0: +dataframe') + + depends_on('py-partd', type=('build', 'run'), when='+dataframe') + depends_on('py-partd@0.3.2:', type=('build', 'run'), when='@0.6.0: +dataframe') + depends_on('py-partd@0.3.3:', type=('build', 'run'), when='@0.9.0: +dataframe') + depends_on('py-partd@0.3.5:', type=('build', 'run'), when='@0.10.2: +dataframe') + depends_on('py-partd@0.3.7:', type=('build', 'run'), when='@0.13.0: +dataframe') + depends_on('py-partd@0.3.8:', type=('build', 'run'), when='@0.15.0: +dataframe') + depends_on('py-partd@0.3.10:', type=('build', 'run'), when='@2.0.0: +dataframe') + + depends_on('py-cloudpickle@0.2.1:', type=('build', 'run'), when='@0.8.2:2.6.0 +dataframe') + + depends_on('py-fsspec@0.3.3:', type=('build', 'run'), when='@2.2.0: +dataframe') + depends_on('py-fsspec@0.5.1:', type=('build', 'run'), when='@2.5.0: +dataframe') + depends_on('py-fsspec@0.6.0:', type=('build', 'run'), when='@2.8.0: +dataframe') # Requirements for dask.distributed - depends_on('py-distributed@1.22:', type=('build', 'run'), when='+distributed') + depends_on('py-dill', type=('build', 'run'), when='@:0.7.5 +distributed') + depends_on('py-pyzmq', type=('build', 'run'), when='@:0.7.5 +distributed') + depends_on('py-distributed', type=('build', 'run'), when='@0.8.2: +distributed') + depends_on('py-distributed@1.9:', type=('build', 'run'), when='@0.9.0: +distributed') + depends_on('py-distributed@1.10:', type=('build', 'run'), when='@0.10.0: +distributed') + depends_on('py-distributed@1.14:', type=('build', 'run'), when='@0.12.0: +distributed') + depends_on('py-distributed@1.15:', type=('build', 'run'), when='@0.13.0: +distributed') + depends_on('py-distributed@1.16:', type=('build', 'run'), when='@0.14.1: +distributed') + depends_on('py-distributed@1.20:', type=('build', 'run'), when='@0.16.0: +distributed') + depends_on('py-distributed@1.21:', type=('build', 'run'), when='@0.17.0: +distributed') + depends_on('py-distributed@1.22:', type=('build', 'run'), when='@0.18.0: +distributed') + depends_on('py-distributed@2.0:', type=('build', 'run'), when='@2.0.0: +distributed') + + # Requirements for dask.diagnostics + depends_on('py-bokeh', type=('build', 'run'), when='+diagnostics') + depends_on('py-bokeh@1.0.0:', type=('build', 'run'), when='@2.0.0: +diagnostics') + + # Requirements for dask.delayed + depends_on('py-cloudpickle@0.2.1:', type=('build', 'run'), when='@2,7.0: +delayed') + depends_on('py-cloudpickle@0.2.2:', type=('build', 'run'), when='@2.13.0: +delayed') + + depends_on('py-toolz@0.7.2:', type=('build', 'run'), when='@0.8.1: +delayed') + depends_on('py-toolz@0.7.3:', type=('build', 'run'), when='@0.14.1: +delayed') + depends_on('py-toolz@0.8.2:', type=('build', 'run'), when='@2.13.0: +delayed') + + # Support for YAML configuration files + depends_on('py-pyyaml', type=('build', 'run'), when='+yaml') @property def import_modules(self): - modules = [ - 'dask', 'dask.bytes', 'dask.diagnostics', 'dask.store' - ] + modules = ['dask'] + + if self.spec.satisfies('@0.9.0:'): + modules.append('dask.bytes') + + if self.spec.satisfies('@:0.20.2'): + modules.append('dask.store') if '+array' in self.spec: modules.append('dask.array') @@ -68,9 +151,17 @@ def import_modules(self): if '+bag' in self.spec: modules.append('dask.bag') + if self.spec.satisfies('@:0.7.5 +distributed'): + modules.append('dask.distributed') + if '+dataframe' in self.spec: - modules.extend([ - 'dask.dataframe', 'dask.dataframe.io', 'dask.dataframe.tseries' - ]) + modules.append('dask.dataframe') + if self.spec.satisfies('@0.8.2:'): + modules.append('dask.dataframe.tseries') + if self.spec.satisfies('@0.12.0:'): + modules.append('dask.dataframe.io') + + if '+diagnostics' in self.spec: + modules.append('dask.diagnostics') return modules diff --git a/var/spack/repos/builtin/packages/py-devlib/package.py b/var/spack/repos/builtin/packages/py-devlib/package.py new file mode 100644 index 00000000000..5a9bb49486c --- /dev/null +++ b/var/spack/repos/builtin/packages/py-devlib/package.py @@ -0,0 +1,34 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class PyDevlib(PythonPackage): + """Library for interaction with and instrumentation of remote devices.""" + + homepage = "https://github.com/ARM-software/devlib" + url = "https://github.com/ARM-software/devlib/archive/v1.2.tar.gz" + + version('1.2', sha256='4cdb6767a9430b49eecffe34e2b9fcbcfc7e65328122d909aa71c3d11a86503d') + version('1.1.2', sha256='c900420cb97239b4642f5e333e43884fb09507b530edb55466e7b82103b4deaa') + version('1.1.1', sha256='eceb7a2721197a6023bbc2bbf346663fc117e4f54e1eb8334a3085dead9c8036') + version('1.1.0', sha256='317e9be2303ebb6aebac9a2ec398c622ea16d6e46079dc9e37253b37d739ca9d') + version('1.0.0', sha256='2f78278bdc9731a4fa13c41c74f08e0b8c5143de5fa1e1bdb2302673aec45862') + version('0.0.4', sha256='0f55e684d43fab759d0e74bd8f0d0260d9546a8b8d853d286acfe5e00c86da05') + version('0.0.3', sha256='29ec5f1de481783ab0b9efc111dfeb67c890187d56fca8592b25ee756ff32902') + version('0.0.2', sha256='972f33be16a06572a19b67d909ee0ed6cb6f21f9a9da3c43fd0ff5851421051d') + + depends_on('py-setuptools', type='build') + depends_on('py-python-dateutil', type=('build', 'run')) + depends_on('py-pexpect@3.3:', type=('build', 'run')) + depends_on('py-pyserial', type=('build', 'run')) + depends_on('py-wrapt', type=('build', 'run')) + depends_on('py-future', type=('build', 'run')) + depends_on('py-pandas', type=('build', 'run')) + depends_on('py-enum34', type=('build', 'run'), when='^python@:3.3') + depends_on('py-contextlib2', type=('build', 'run'), when='^python@:2.999') + depends_on('py-numpy@:1.16.4', type=('build', 'run'), when='^python@:2.999') + depends_on('py-numpy', type=('build', 'run'), when='^python@:3.0') diff --git a/var/spack/repos/builtin/packages/py-dgl/package.py b/var/spack/repos/builtin/packages/py-dgl/package.py index b1b1c37586f..544b527f69b 100644 --- a/var/spack/repos/builtin/packages/py-dgl/package.py +++ b/var/spack/repos/builtin/packages/py-dgl/package.py @@ -19,6 +19,7 @@ class PyDgl(CMakePackage): maintainers = ['adamjstewart'] version('master', branch='master', submodules=True) + version('0.4.3', tag='0.4.3', submodules=True) version('0.4.2', tag='0.4.2', submodules=True) variant('cuda', default=True, description='Build with CUDA') @@ -31,6 +32,7 @@ class PyDgl(CMakePackage): depends_on('llvm-openmp', when='%clang platform=darwin +openmp') # Python dependencies + # See python/setup.py extends('python') depends_on('python@3.5:', type=('build', 'run')) depends_on('py-setuptools', type='build') @@ -38,12 +40,17 @@ class PyDgl(CMakePackage): depends_on('py-numpy@1.14.0:', type=('build', 'run')) depends_on('py-scipy@1.1.0:', type=('build', 'run')) depends_on('py-networkx@2.1:', type=('build', 'run')) + depends_on('py-requests@2.19.0:', when='@0.4.3:', type=('build', 'run')) # Backends - depends_on('py-torch@0.4.1:', when='backend=pytorch', type='run') - depends_on('mxnet@1.5:', when='backend=mxnet', type='run') + # See https://github.com/dmlc/dgl#installation + depends_on('py-torch@1.2.0:', when='@0.4.3: backend=pytorch', type='run') + depends_on('py-torch@0.4.1:', when='backend=pytorch', type='run') + depends_on('mxnet@1.5.1:', when='@0.4.3: backend=pytorch', type='run') + depends_on('mxnet@1.5.0:', when='backend=mxnet', type='run') + depends_on('py-tensorflow@2.1:', when='@0.4.3: backend=tensorflow', type='run') depends_on('py-tensorflow@2.0:', when='backend=tensorflow', type='run') - depends_on('py-tfdlpack', when='backend=tensorflow', type='run') + depends_on('py-tfdlpack', when='backend=tensorflow', type='run') build_directory = 'build' diff --git a/var/spack/repos/builtin/packages/py-django/package.py b/var/spack/repos/builtin/packages/py-django/package.py new file mode 100644 index 00000000000..0dc1fa13885 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-django/package.py @@ -0,0 +1,28 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class PyDjango(PythonPackage): + """The Web framework for perfectionists with deadlines.""" + + homepage = "https://www.djangoproject.com/" + url = "https://github.com/django/django/archive/3.0.5.tar.gz" + + version('3.0.5', sha256='ef2d4f26414dc9598afce9c56cee4578313b88861cedfc5b3d9a71078e5cc79b') + version('3.0.4', sha256='99699643d83acfab51d3ad73c2c2904173e03a4f59fe24c3d494e4fafc0b679f') + version('3.0.3', sha256='d953c950f0c395db065c8bc39d20e87faded376632a3aacf889ae92d5adaac8b') + version('3.0.2', sha256='ca316b1179a16931ed872ce970aabefcf3d41fe0d4b1a8e1301ec59e1e0ab45b') + version('3.0.1', sha256='85349b9366364847264b2b707ffcff5a27a022afa29aac0e904ca672cbe5ee65') + version('2.2.12', sha256='ec490c67bd2780b4ec4f5355cd99fa2fa6007f81695dd45a9e8f7ccc5ff17772') + version('2.2.11', sha256='f4274181973f0f021cc00419cfa342f1a6f862406e766ae93e7fbba9d84c680c') + version('2.2.10', sha256='3741536cf122d6695e8575b2fcf67c18812751fd3143393ea75c01a277afdacc') + + depends_on('py-setuptools', type='build') + depends_on('python@3.6:', type=('build', 'run')) + depends_on('py-pytz', type=('build', 'run')) + depends_on('py-sqlparse', type=('build', 'run')) + depends_on('py-asgiref', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/py-fastcluster/package.py b/var/spack/repos/builtin/packages/py-fastcluster/package.py new file mode 100644 index 00000000000..bc09681527e --- /dev/null +++ b/var/spack/repos/builtin/packages/py-fastcluster/package.py @@ -0,0 +1,18 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class PyFastcluster(PythonPackage): + """Fast hierarchical clustering routines for R and Python.""" + + homepage = "http://danifold.net/" + url = "https://pypi.io/packages/source/f/fastcluster/fastcluster-1.1.26.tar.gz" + + version('1.1.26', sha256='a202f44a3b06f5cf9cdba3c67d6c523288922d6e6a1cdf737292f93759aa82f7') + + depends_on('py-setuptools', type='build') + depends_on('py-numpy@1.9:', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/py-fits-tools/package.py b/var/spack/repos/builtin/packages/py-fits-tools/package.py new file mode 100644 index 00000000000..5127d887270 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-fits-tools/package.py @@ -0,0 +1,29 @@ +# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class PyFitsTools(PythonPackage): + """Tools for manipulating FITS images using primarily scipy + & native python routines""" + + homepage = "https://github.com/keflavich/FITS_tools" + url = "https://github.com/keflavich/FITS_tools/archive/v0.2.tar.gz" + + version('1.1.2', sha256='6c7596533ea66f5ca05e4326ae6db643edb03aca4b6b654dce091834155d03e8') + version('1.1.1', sha256='5b79ef24fadb43458388754381644712c05cd89da4f89c197e3bd80ca158c525') + version('1.1', sha256='995ebf53dc0ffd8bdb5270c4fa0cf52f639aac05cfb68dc6fd5d58ab40148a8a') + version('1.0', sha256='b711981eb780f3d27a5dec413397af68493b496e1621f9a37cf68dd265536937') + version('0.4.1', sha256='3511eb7bbaf73ac68b92b460c71b7a3dbf6c3860fae908638180876eca12f8fd') + version('0.4', sha256='dbdb558d4c3ada627d42b62aaec5eb8f8f6dd3e323cae853fd447d9ec7805637') + version('0.2', sha256='04c4b6eeb09298bca79b228175fcd209d4ca895ce5675f6684120e75928d2d97', default=True) + version('0.1', sha256='d128e49ff4ecc6a9bf9a050f8605bc457e028e10e48bb8d6fda4ca358298ec17') + + depends_on('py-setuptools', type='build') + depends_on('py-astropy') + depends_on('py-astropy-helpers') + depends_on('py-scipy') + depends_on('py-numpy') diff --git a/var/spack/repos/builtin/packages/py-flake8/package.py b/var/spack/repos/builtin/packages/py-flake8/package.py index 462758ddf41..4c585ee7558 100644 --- a/var/spack/repos/builtin/packages/py-flake8/package.py +++ b/var/spack/repos/builtin/packages/py-flake8/package.py @@ -13,6 +13,7 @@ class PyFlake8(PythonPackage): homepage = "https://github.com/PyCQA/flake8" url = "https://github.com/PyCQA/flake8/archive/3.7.8.tar.gz" + version('3.8.2', sha256='ae9b00ddaa2bb7fa69796ac73ba7607fcf06d79a8b777fa12ba5abef1e770491') version('3.7.8', sha256='201720797dc9691dd349819994e4a0bc281b70ee2ff77b0c928bb1d3c5aa9810') version('3.7.7', sha256='b3f76b02351008dc772276e74b09dd3d4b5c567ff8c6ab573352cb8fd7007444') version('3.5.0', sha256='60ffe2fdacce4ebe7cadc30f310cf1edfd8ff654ef79525d90cf0756e69de44e') @@ -31,12 +32,16 @@ class PyFlake8(PythonPackage): # http://flake8.pycqa.org/en/latest/faq.html#why-does-flake8-use-ranges-for-its-dependencies # http://flake8.pycqa.org/en/latest/internal/releases.html#releasing-flake8 + # Flake8 3.8.X + depends_on('py-pycodestyle@2.6.0:2.6.999', when='@3.8.0:3.8.999', type=('build', 'run')) + depends_on('py-pyflakes@2.2.0:2.2.999', when='@3.8.0:3.8.999', type=('build', 'run')) + # Flake8 3.7.X # FIXME @0.3.0:0.3.999 causes concretization to hang - depends_on('py-entrypoints@0.3', when='@3.7.0:3.7.999', type=('build', 'run')) + depends_on('py-entrypoints@0.3', when='@3.7.0:3.8.999', type=('build', 'run')) depends_on('py-pyflakes@2.1.0:2.1.999', when='@3.7.0:3.7.999', type=('build', 'run')) depends_on('py-pycodestyle@2.5.0:2.5.999', when='@3.7.0:3.7.999', type=('build', 'run')) - depends_on('py-mccabe@0.6.0:0.6.999', when='@3.7.0:3.7.999', type=('build', 'run')) + depends_on('py-mccabe@0.6.0:0.6.999', when='@3.7.0:3.8.999', type=('build', 'run')) # Flake8 3.5.X depends_on('py-pyflakes@1.5:1.6', when='@3.5.0:3.5.999', type=('build', 'run')) @@ -54,10 +59,11 @@ class PyFlake8(PythonPackage): depends_on('py-mccabe@0.2.1:0.4', when='@2.5.0:2.5.999', type=('build', 'run')) # Python version-specific backports - depends_on('py-enum34', when='@3.0.0: ^python@:3.3', type=('build', 'run')) - depends_on('py-typing', when='@3.7.0: ^python@:3.4', type=('build', 'run')) - depends_on('py-configparser', when='@3.0.0: ^python@:3.1', type=('build', 'run')) - depends_on('py-functools32', when='@3.7.4: ^python@:3.1', type=('build', 'run')) + depends_on('py-importlib-metadata', when='@3.8.0: ^python@:3.7', type=('build', 'run')) + depends_on('py-enum34', when='@3.0.0: ^python@:3.3', type=('build', 'run')) + depends_on('py-typing', when='@3.7.0: ^python@:3.4', type=('build', 'run')) + depends_on('py-configparser', when='@3.0.0: ^python@:3.1', type=('build', 'run')) + depends_on('py-functools32', when='@3.7.4: ^python@:3.1', type=('build', 'run')) def patch(self): """Filter pytest-runner requirement out of setup.py.""" diff --git a/var/spack/repos/builtin/packages/py-flask/package.py b/var/spack/repos/builtin/packages/py-flask/package.py index befe5057439..4be2b1e1e15 100644 --- a/var/spack/repos/builtin/packages/py-flask/package.py +++ b/var/spack/repos/builtin/packages/py-flask/package.py @@ -12,13 +12,15 @@ class PyFlask(PythonPackage): homepage = "https://palletsprojects.com/p/flask/" url = "https://pypi.io/packages/source/F/Flask/Flask-1.1.1.tar.gz" + version('1.1.2', sha256='4efa1ae2d7c9865af48986de8aeb8504bf32c7f3d6fdc9353d34b21f4b127060') version('1.1.1', sha256='13f9f196f330c7c2c5d7a5cf91af894110ca0215ac051b5844701f2bfd934d52') + version('0.12.4', sha256='2ea22336f6d388b4b242bc3abf8a01244a8aa3e236e7407469ef78c16ba355dd') version('0.12.2', sha256='49f44461237b69ecd901cc7ce66feea0319b9158743dd27a2899962ab214dac1') version('0.12.1', sha256='9dce4b6bfbb5b062181d3f7da8f727ff70c1156cbb4024351eafd426deb5fb88') version('0.11.1', sha256='b4713f2bfb9ebc2966b8a49903ae0d3984781d5c878591cf2f7b484d28756b0e') depends_on('python@2.7:2.8,3.5:', type=('build', 'run')) - depends_on('py-setuptools', type='build') + depends_on('py-setuptools', type=('build', 'run')) depends_on('py-werkzeug@0.15:', type=('build', 'run')) depends_on('py-jinja2@2.10.1:', type=('build', 'run')) depends_on('py-itsdangerous@0.24:', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/py-fsspec/package.py b/var/spack/repos/builtin/packages/py-fsspec/package.py index 6302fff6d7c..bf82fc803a0 100644 --- a/var/spack/repos/builtin/packages/py-fsspec/package.py +++ b/var/spack/repos/builtin/packages/py-fsspec/package.py @@ -12,7 +12,11 @@ class PyFsspec(PythonPackage): homepage = "https://github.com/intake/filesystem_spec" url = "https://pypi.io/packages/source/f/fsspec/fsspec-0.4.4.tar.gz" + import_modules = ['fsspec', 'fsspec.implementations'] + + version('0.7.3', sha256='1b540552c93b47e83c568e87507d6e02993e6d1b30bc7285f2336c81c5014103') version('0.4.4', sha256='97697a46e8bf8be34461c2520d6fc4bfca0ed749b22bb2b7c21939fd450a7d63') depends_on('python@3.5:', type=('build', 'run')) + depends_on('python@3.6:', type=('build', 'run'), when='@0.6.3:') depends_on('py-setuptools', type='build') diff --git a/var/spack/repos/builtin/packages/py-gast/package.py b/var/spack/repos/builtin/packages/py-gast/package.py index c7730e1e28a..c69b9bc4758 100644 --- a/var/spack/repos/builtin/packages/py-gast/package.py +++ b/var/spack/repos/builtin/packages/py-gast/package.py @@ -12,8 +12,10 @@ class PyGast(PythonPackage): homepage = "https://github.com/serge-sans-paille/gast" url = "https://pypi.io/packages/source/g/gast/gast-0.3.2.tar.gz" + version('0.3.3', sha256='b881ef288a49aa81440d2c5eb8aeefd4c2bb8993d5f50edae7413a85bfdb3b57') version('0.3.2', sha256='5c7617f1f6c8b8b426819642b16b9016727ddaecd16af9a07753e537eba8a3a5') version('0.2.2', sha256='fe939df4583692f0512161ec1c880e0a10e71e6a232da045ab8edd3756fbadf0') version('0.2.0', sha256='7068908321ecd2774f145193c4b34a11305bd104b4551b09273dfd1d6a374930') depends_on('py-setuptools', type='build') + depends_on('python@2.7:2.8,3.4:', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/py-gdbgui/package.py b/var/spack/repos/builtin/packages/py-gdbgui/package.py index 272d375cd33..551e0ef2068 100644 --- a/var/spack/repos/builtin/packages/py-gdbgui/package.py +++ b/var/spack/repos/builtin/packages/py-gdbgui/package.py @@ -12,13 +12,15 @@ class PyGdbgui(PythonPackage): homepage = "https://gdbgui.com" url = "https://pypi.io/packages/source/g/gdbgui/gdbgui-0.11.2.1.tar.gz" + version('0.13.2.0', sha256='80e347a08b8cc630ab9f68482a1ed92c844fbfde46dc21fd39f3e6ef14b72e54') version('0.11.2.1', sha256='280945a37414c31a798f68f70c1bffbedd12dfb0ce77418357e7d42b667491c7') - depends_on('py-setuptools', type=('build', 'run')) - depends_on('py-flask@0.12.2:', type=('build', 'run')) - depends_on('py-flask-compress@1.4.0:', type=('build', 'run')) - depends_on('py-flask-socketio@2.9.3:', type=('build', 'run')) - depends_on('py-gevent@1.2.2:', type=('build', 'run')) - depends_on('py-pygdbmi@0.8.2.0:', type=('build', 'run')) - depends_on('py-pygments@2.2.0:', type=('build', 'run')) - depends_on('gdb', type='run') + depends_on('py-setuptools', type=('build', 'run')) + depends_on('py-flask@0.12.2:0.99.0', type=('build', 'run')) + depends_on('py-flask-compress@1.4.0:1.99.0', type=('build', 'run')) + depends_on('py-flask-socketio@2.9.3:2.99.0', type=('build', 'run')) + depends_on('py-gevent@1.2.2:1.99.0', type=('build', 'run')) + depends_on('py-pygdbmi@0.9.0.0:0.99.0.0', type=('build', 'run'), when='@0.13.1.1:') + depends_on('py-pygdbmi@0.8.2.0:0.8.99.0', type=('build', 'run'), when='@:0.13.0.0') + depends_on('py-pygments@2.2.0:2.99.0', type=('build', 'run')) + depends_on('gdb', type='run') diff --git a/var/spack/repos/builtin/packages/py-gpy/package.py b/var/spack/repos/builtin/packages/py-gpy/package.py new file mode 100644 index 00000000000..59bebda5d0c --- /dev/null +++ b/var/spack/repos/builtin/packages/py-gpy/package.py @@ -0,0 +1,22 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class PyGpy(PythonPackage): + """The Gaussian Process Toolbox.""" + + homepage = "http://sheffieldml.github.com/GPy/" + url = "https://pypi.io/packages/source/g/gpy/GPy-1.9.9.tar.gz" + + version('1.9.9', sha256='04faf0c24eacc4dea60727c50a48a07ddf9b5751a3b73c382105e2a31657c7ed') + + depends_on('py-setuptools', type='build') + depends_on('py-numpy@1.7:', type=('build', 'run')) + depends_on('py-scipy@1.3.0:', type=('build', 'run')) + depends_on('py-six', type=('build', 'run')) + depends_on('py-paramz@0.9.0:', type=('build', 'run')) + depends_on('py-cython@0.29:', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/py-graphviz/package.py b/var/spack/repos/builtin/packages/py-graphviz/package.py index 9fcbd1c5560..144321cccff 100644 --- a/var/spack/repos/builtin/packages/py-graphviz/package.py +++ b/var/spack/repos/builtin/packages/py-graphviz/package.py @@ -13,6 +13,9 @@ class PyGraphviz(PythonPackage): homepage = "https://github.com/xflr6/graphviz" url = "https://pypi.io/packages/source/g/graphviz/graphviz-0.10.1.zip" + version('0.13.2', sha256='60acbeee346e8c14555821eab57dbf68a169e6c10bce40e83c1bf44f63a62a01') + version('0.13', sha256='dc08677f37c65a4a480f00df4bd0d19a0a103c06aad95f21a37f0b7fd440de81') + version('0.12', sha256='c60e232a66e4847f9f644fbaa94730ca4f78385a1314a2cc1e7f4cb2d7461298') version('0.11.1', sha256='914b8b124942d82e3e1dcef499c9fe77c10acd3d18a1cfeeb2b9de05f6d24805') version('0.10.1', sha256='d311be4fddfe832a56986ac5e1d6e8715d7fcb0208560da79d1bb0f72abef41f') diff --git a/var/spack/repos/builtin/packages/py-grpcio/package.py b/var/spack/repos/builtin/packages/py-grpcio/package.py index 18981f20e45..e40d3c0532c 100644 --- a/var/spack/repos/builtin/packages/py-grpcio/package.py +++ b/var/spack/repos/builtin/packages/py-grpcio/package.py @@ -29,3 +29,7 @@ def setup_build_environment(self, env): env.set('GRPC_PYTHON_BUILD_SYSTEM_OPENSSL', True) env.set('GRPC_PYTHON_BUILD_SYSTEM_ZLIB', True) env.set('GRPC_PYTHON_BUILD_SYSTEM_CARES', True) + + def patch(self): + if self.spec.satisfies('%fj'): + filter_file("-std=gnu99", "", "setup.py") diff --git a/var/spack/repos/builtin/packages/py-gsd/package.py b/var/spack/repos/builtin/packages/py-gsd/package.py new file mode 100644 index 00000000000..f130265d824 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-gsd/package.py @@ -0,0 +1,25 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class PyGsd(PythonPackage): + """The GSD file format is the native file format for HOOMD-blue. GSD files + store trajectories of the HOOMD-blue system state in a binary file with + efficient random access to frames. GSD allows all particle and topology + properties to vary from one frame to the next. Use the GSD Python API to + specify the initial condition for a HOOMD-blue simulation or analyze + trajectory output with a script. Read a GSD trajectory with a visualization + tool to explore the behavior of the simulation.""" + + homepage = "https://gsd.readthedocs.io/en/stable/#" + url = "https://pypi.io/packages/source/g/gsd/gsd-1.9.3.tar.gz" + + version('1.9.3', sha256='c6b37344e69020f69fda2b8d97f894cb41fd720840abeda682edd680d1cff838') + + depends_on('py-setuptools', type='build') + depends_on('py-cython', type='build') + depends_on('py-numpy@1.9.3:1.999999', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/py-h5py/package.py b/var/spack/repos/builtin/packages/py-h5py/package.py index 5f05081de17..e652849fd1e 100644 --- a/var/spack/repos/builtin/packages/py-h5py/package.py +++ b/var/spack/repos/builtin/packages/py-h5py/package.py @@ -12,9 +12,11 @@ class PyH5py(PythonPackage): homepage = "http://www.h5py.org/" url = "https://pypi.io/packages/source/h/h5py/h5py-2.10.0.tar.gz" + git = "https://github.com/h5py/h5py.git" import_modules = ['h5py', 'h5py._hl'] + version('master', branch='master') version('2.10.0', sha256='84412798925dc870ffd7107f045d7659e60f5d46d1c70c700375248bf6bf512d') version('2.9.0', sha256='9d41ca62daf36d6b6515ab8765e4c8c4388ee18e2a665701fef2b41563821002') version('2.8.0', sha256='e626c65a8587921ebc7fb8d31a49addfdd0b9a9aa96315ea484c09803337b955') diff --git a/var/spack/repos/builtin/packages/py-healpy/package.py b/var/spack/repos/builtin/packages/py-healpy/package.py new file mode 100644 index 00000000000..d1781d13a17 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-healpy/package.py @@ -0,0 +1,24 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class PyHealpy(PythonPackage): + """healpy is a Python package to handle pixelated data on the sphere.""" + + homepage = "https://healpy.readthedocs.io/" + url = "https://pypi.python.org/packages/source/h/healpy/healpy-1.13.0.tar.gz" + + version('1.13.0', sha256='d0ae02791c2404002a09c643e9e50bc58e3d258f702c736dc1f39ce1e6526f73') + version('1.7.4', sha256='3cca7ed7786ffcca70e2f39f58844667ffb8521180ac890d4da651b459f51442') + + depends_on('python', type=('build', 'run')) + depends_on('py-setuptools@3.2:', type='build') + depends_on('py-numpy@1.13:', type=('build', 'run')) + depends_on('py-scipy', type=('build', 'run')) + depends_on('py-astropy', type=('build', 'run')) + depends_on('py-matplotlib', type=('build', 'run')) + depends_on('py-six', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/py-horovod/package.py b/var/spack/repos/builtin/packages/py-horovod/package.py index c740486853d..7d6a28939f2 100644 --- a/var/spack/repos/builtin/packages/py-horovod/package.py +++ b/var/spack/repos/builtin/packages/py-horovod/package.py @@ -14,6 +14,9 @@ class PyHorovod(PythonPackage): maintainers = ['adamjstewart'] version('master', branch='master', submodules=True) + version('0.19.4', tag='v0.19.4', submodules=True) + version('0.19.3', tag='v0.19.3', submodules=True) + version('0.19.2', tag='v0.19.2', submodules=True) version('0.19.1', tag='v0.19.1', submodules=True) version('0.19.0', tag='v0.19.0', submodules=True) version('0.18.2', tag='v0.18.2', submodules=True) @@ -46,14 +49,15 @@ class PyHorovod(PythonPackage): # Framework dependencies depends_on('py-tensorflow@1.1.0:', type=('build', 'link', 'run'), when='frameworks=tensorflow') - depends_on('py-torch@0.4.0:', type=('build', 'run'), when='frameworks=pytorch') - depends_on('py-torchvision', type=('build', 'run'), when='frameworks=pytorch') + depends_on('py-torch@0.4.0:', type=('build', 'link', 'run'), when='frameworks=pytorch') + depends_on('py-torchvision', type=('build', 'run'), when='frameworks=pytorch @:0.19.1') depends_on('py-cffi@1.4.0:', type=('build', 'run'), when='frameworks=pytorch') depends_on('mxnet@1.4.1:+python', type=('build', 'link', 'run'), when='frameworks=mxnet') depends_on('py-keras@2.0.8,2.1.2:', type=('build', 'run'), when='frameworks=keras') depends_on('py-h5py@2.9:', type=('build', 'run'), when='frameworks=spark') depends_on('py-numpy', type=('build', 'run'), when='frameworks=spark') - depends_on('py-petastorm@0.8.2', type=('build', 'run'), when='frameworks=spark') + depends_on('py-petastorm@0.8.2', type=('build', 'run'), when='frameworks=spark @:0.19.1') + depends_on('py-petastorm@0.9.0:', type=('build', 'run'), when='frameworks=spark @0.19.2:') depends_on('py-pyarrow@0.15.0:', type=('build', 'run'), when='frameworks=spark') depends_on('py-pyspark@2.3.2:', type=('build', 'run'), when='frameworks=spark') @@ -63,7 +67,7 @@ class PyHorovod(PythonPackage): depends_on('cmake', type='build', when='controllers=gloo') # Tensor Operations dependencies - depends_on('nccl', when='tensor_ops=nccl') + depends_on('nccl@2:', when='tensor_ops=nccl') depends_on('mpi', when='tensor_ops=mpi') # There does not appear to be a way to use an external Gloo installation depends_on('cmake', type='build', when='tensor_ops=gloo') diff --git a/var/spack/repos/builtin/packages/py-hypothesis/package.py b/var/spack/repos/builtin/packages/py-hypothesis/package.py index 1a77b3cb28a..79a39055d08 100644 --- a/var/spack/repos/builtin/packages/py-hypothesis/package.py +++ b/var/spack/repos/builtin/packages/py-hypothesis/package.py @@ -20,6 +20,7 @@ class PyHypothesis(PythonPackage): # TODO: Add missing dependency required to import hypothesis.extra.django + version('5.3.0', sha256='c9fdb53fe3bf1f8e7dcca1a7dd6e430862502f088aca2903d141511212e79429') version('4.57.1', sha256='3c4369a4b0a1348561048bcda5f1db951a1b8e2a514ea8e8c70d36e656bf6fa0') version('4.41.2', sha256='6847df3ffb4aa52798621dd007e6b61dbcf2d76c30ba37dc2699720e2c734b7a') version('4.24.3', sha256='fd90a319f409f34a173156ca704d6c0c6c0bb30a2e43dbf26aced2c75569e5d5') diff --git a/var/spack/repos/builtin/packages/py-ipykernel/package.py b/var/spack/repos/builtin/packages/py-ipykernel/package.py index f6ce9facb10..e2f9d919171 100644 --- a/var/spack/repos/builtin/packages/py-ipykernel/package.py +++ b/var/spack/repos/builtin/packages/py-ipykernel/package.py @@ -12,6 +12,7 @@ class PyIpykernel(PythonPackage): homepage = "https://pypi.python.org/pypi/ipykernel" url = "https://github.com/ipython/ipykernel/archive/4.5.0.tar.gz" + version('5.1.1', sha256='a735d3df42e76e8176849dcc8d7746eda80b7768e8f1b38cd9aa6cabfd28baf5') version('5.1.0', sha256='30f01a2a1470d3fabbad03f5c43606c1bc2142850fc4ccedcf44281664ae9122') version('4.10.0', sha256='df2714fd0084085ed68876f75ab846202d261420b5f4069af6335b8df0475391') version('4.5.0', sha256='c5ec5130f5f7eda71345b9ef638c9213c4c2f41610a9ad338a0f1d0819421adf') diff --git a/var/spack/repos/builtin/packages/py-ipython/package.py b/var/spack/repos/builtin/packages/py-ipython/package.py index 5bfad819e7b..5d1710b00ec 100644 --- a/var/spack/repos/builtin/packages/py-ipython/package.py +++ b/var/spack/repos/builtin/packages/py-ipython/package.py @@ -12,24 +12,31 @@ class PyIpython(PythonPackage): homepage = "https://pypi.python.org/pypi/ipython" url = "https://pypi.io/packages/source/i/ipython/ipython-2.3.1.tar.gz" + version('7.5.0', sha256='e840810029224b56cd0d9e7719dc3b39cf84d577f8ac686547c8ba7a06eeab26') version('7.3.0', sha256='06de667a9e406924f97781bda22d5d76bfb39762b678762d86a466e63f65dc39') version('5.8.0', sha256='4bac649857611baaaf76bc82c173aa542f7486446c335fe1a6c05d0d491c8906') version('5.1.0', sha256='7ef4694e1345913182126b219aaa4a0047e191af414256da6772cf249571b961') version('3.1.0', sha256='532092d3f06f82b1d8d1e5c37097eae19fcf025f8f6a4b670dd49c3c338d5624') version('2.3.1', sha256='3e98466aa2fe54540bcba9aa6e01a39f40110d67668c297340c4b9514b7cc49c') - depends_on('python@2.7:2.8,3.3:', type=('build', 'run'), when='@:6') + depends_on('python@3.6:', type=('build', 'run'), when='@7.10:') depends_on('python@3.5:', type=('build', 'run'), when='@7:') + depends_on('python@3.3:', type=('build', 'run'), when='@6:') + depends_on('python@2.7:2.8,3.3:', type=('build', 'run')) depends_on('py-backports-shutil-get-terminal-size', type=('build', 'run'), when="^python@:3.2") depends_on('py-pathlib2', type=('build', 'run'), when="^python@:3.3") depends_on('py-pygments', type=('build', 'run')) depends_on('py-pickleshare', type=('build', 'run')) - depends_on('py-simplegeneric@0.8:', type=('build', 'run')) - depends_on('py-prompt-toolkit@1.0.4:1.999', when='@:7.0.0', type=('build', 'run')) - depends_on('py-prompt-toolkit@2.0.0:2.999', when='@7.0.0:', type=('build', 'run')) + depends_on('py-simplegeneric@0.8:', type=('build', 'run'), when='@:7.0.0') + depends_on('py-prompt-toolkit@1.0.4:1.999', when='@:7.0.0', type=('build', 'run')) + depends_on('py-prompt-toolkit@2.0.0:2.999', when='@7.0.0:', type=('build', 'run')) + depends_on('py-prompt-toolkit@2.0.0:2.0.999', when='@7.5.0:', type=('build', 'run')) depends_on('py-traitlets@4.2:', type=('build', 'run')) depends_on('py-decorator', type=('build', 'run')) depends_on('py-pexpect', type=('build', 'run')) depends_on('py-backcall', type=('build', 'run'), when="^python@3.3:") depends_on('py-appnope', type=('build', 'run'), when='platform=darwin') + depends_on('py-jedi@0.10:', type=('build', 'run'), when='@7.5.0:') + depends_on('py-backcall', type=('build', 'run'), when='@7.5.0:') + depends_on('py-setuptools@18.5:', type='run', when='@4.1:') diff --git a/var/spack/repos/builtin/packages/py-json5/package.py b/var/spack/repos/builtin/packages/py-json5/package.py new file mode 100644 index 00000000000..71b0b50156e --- /dev/null +++ b/var/spack/repos/builtin/packages/py-json5/package.py @@ -0,0 +1,19 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class PyJson5(PythonPackage): + """The JSON5 Data Interchange Format (JSON5) is a superset of JSON that aims + to alleviate some of the limitations of JSON by expanding its syntax to + include some productions from ECMAScript 5.1.""" + + homepage = "https://github.com/dpranke/pyjson5" + url = "https://pypi.io/packages/source/j/json5/json5-0.9.4.tar.gz" + + version('0.9.4', sha256='2ebfad1cd502dca6aecab5b5c36a21c732c3461ddbc412fb0e9a52b07ddfe586') + + depends_on('py-setuptools', type='build') diff --git a/var/spack/repos/builtin/packages/py-jupyter-core/package.py b/var/spack/repos/builtin/packages/py-jupyter-core/package.py index 9fb84e40cb9..3bb21a4b546 100644 --- a/var/spack/repos/builtin/packages/py-jupyter-core/package.py +++ b/var/spack/repos/builtin/packages/py-jupyter-core/package.py @@ -13,6 +13,7 @@ class PyJupyterCore(PythonPackage): url = "https://pypi.io/packages/source/j/jupyter-core/jupyter_core-4.6.0.tar.gz" version('4.6.3', sha256='394fd5dd787e7c8861741880bdf8a00ce39f95de5d18e579c74b882522219e7e') + version('4.6.1', sha256='a183e0ec2e8f6adddf62b0a3fc6a2237e3e0056d381e536d3e7c7ecc3067e244') version('4.6.0', sha256='85103cee6548992780912c1a0a9ec2583a4a18f1ef79a248ec0db4446500bce3') version('4.4.0', sha256='ba70754aa680300306c699790128f6fbd8c306ee5927976cbe48adacf240c0b7') version('4.2.0', sha256='44ec837a53bebf4e937112d3f9ccf31fee4f8db3e406dd0dd4f0378a354bed9c') diff --git a/var/spack/repos/builtin/packages/py-jupyterlab-server/package.py b/var/spack/repos/builtin/packages/py-jupyterlab-server/package.py new file mode 100644 index 00000000000..20c6fc4a0ee --- /dev/null +++ b/var/spack/repos/builtin/packages/py-jupyterlab-server/package.py @@ -0,0 +1,25 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class PyJupyterlabServer(PythonPackage): + """A set of server components for JupyterLab and JupyterLab + like applications""" + + homepage = "https://pypi.org/project/jupyterlab-server/" + url = "https://pypi.io/packages/source/j/jupyterlab_server/jupyterlab_server-1.1.0.tar.gz" + + version('1.1.0', sha256='bac27e2ea40f686e592d6429877e7d46947ea76c08c878081b028c2c89f71733') + + depends_on('python@3.5:', type=('build', 'run')) + depends_on('py-setuptools', type='build') + + depends_on('py-requests', type=('build', 'run')) + depends_on('py-json5', type=('build', 'run')) + depends_on('py-jsonschema@3.0.1:', type=('build', 'run')) + depends_on('py-notebook@4.2.0:', type=('build', 'run')) + depends_on('py-jinja2@2.10:', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/py-jupyterlab/package.py b/var/spack/repos/builtin/packages/py-jupyterlab/package.py new file mode 100644 index 00000000000..d1ae7844ea8 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-jupyterlab/package.py @@ -0,0 +1,30 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class PyJupyterlab(PythonPackage): + """JupyterLab is the next-generation web-based user interface + for Project Jupyter.""" + + homepage = "https://jupyterlab.readthedocs.io/" + url = "https://pypi.io/packages/source/j/jupyterlab/jupyterlab-2.1.0.tar.gz" + + version('2.1.0', sha256='8c239aababf5baa0b3d36e375fddeb9fd96f3a9a24a8cda098d6a414f5bbdc81') + + depends_on('python@3:', type=('build', 'run')) + depends_on('py-setuptools', type='build') + + depends_on('py-notebook@4.3.1:', type=('build', 'run')) + depends_on('py-tornado@6.0.3:', type=('build', 'run')) + depends_on('py-jupyterlab-server@1.1.0:', type=('build', 'run')) + depends_on('py-jinja2@2.10:', type=('build', 'run')) + + depends_on('py-pytest', type='test') + depends_on('py-pytest-check-links', type='test') + depends_on('py-requests', type='test') + depends_on('py-wheel', type='test') + depends_on('py-virtualenv', type='test') diff --git a/var/spack/repos/builtin/packages/py-lazy-object-proxy/package.py b/var/spack/repos/builtin/packages/py-lazy-object-proxy/package.py index 606dc00136c..36cac03c588 100644 --- a/var/spack/repos/builtin/packages/py-lazy-object-proxy/package.py +++ b/var/spack/repos/builtin/packages/py-lazy-object-proxy/package.py @@ -12,8 +12,9 @@ class PyLazyObjectProxy(PythonPackage): homepage = "https://github.com/ionelmc/python-lazy-object-proxy" url = "https://pypi.io/packages/source/l/lazy-object-proxy/lazy-object-proxy-1.3.1.tar.gz" + version('1.4.3', sha256='f3900e8a5de27447acbf900b4750b0ddfd7ec1ea7fbaf11dfa911141bc522af0') version('1.3.1', sha256='eb91be369f945f10d3a49f5f9be8b3d0b93a4c2be8f8a5b83b0571b8123e0a7a') - conflicts('^python@3.0:3.2.99') + depends_on('python@2.7:2.8,3.4:', type=('build', 'run')) depends_on('py-setuptools', type='build') diff --git a/var/spack/repos/builtin/packages/py-libensemble/package.py b/var/spack/repos/builtin/packages/py-libensemble/package.py index 67fc1bbc185..d69f1dde086 100644 --- a/var/spack/repos/builtin/packages/py-libensemble/package.py +++ b/var/spack/repos/builtin/packages/py-libensemble/package.py @@ -11,10 +11,11 @@ class PyLibensemble(PythonPackage): """Library for managing ensemble-like collections of computations.""" homepage = "https://libensemble.readthedocs.io" - url = "https://pypi.io/packages/source/l/libensemble/libensemble-0.6.0.tar.gz" + url = "https://pypi.io/packages/source/l/libensemble/libensemble-0.7.0.tar.gz" git = "https://github.com/Libensemble/libensemble.git" version('develop', branch='develop') + version('0.7.0', sha256='4c3c16ef3d4750b7a54198fae5d7ae402c5f5411ae85189da41afd20e20027dc') version('0.6.0', sha256='3f6a926d3868da53835ed93fc2e2a047b368dacb648c7608ee3a66debcee4d38') version('0.5.2', sha256='3e36c29a4a2adc0984ecfcc998cb5bb8a2cdfbe7a1ae92f7b35b06e41d21b889') version('0.5.1', sha256='522e0cc086a3ed75a101b704c0fe01eae07f2684bd8d6da7bdfe9371d3187362') diff --git a/var/spack/repos/builtin/packages/py-lmfit/package.py b/var/spack/repos/builtin/packages/py-lmfit/package.py index 4c798622b02..9f70b89aa03 100644 --- a/var/spack/repos/builtin/packages/py-lmfit/package.py +++ b/var/spack/repos/builtin/packages/py-lmfit/package.py @@ -12,8 +12,17 @@ class PyLmfit(PythonPackage): homepage = "http://lmfit.github.io/lmfit-py/" url = "https://pypi.io/packages/source/l/lmfit/lmfit-0.9.5.tar.gz" - version('0.9.5', sha256='eebc3c34ed9f3e51bdd927559a5482548c423ad5a0690c6fdcc414bfb5be6667') + version('1.0.1', sha256='d249eb756899360f4d2a544c9458f47fc8f765ac22c09e099530585fd64e286e') + version('0.9.15', sha256='cd7bdf47c09a3d49f30dff9a1c7f778973d15d1e1b5dc642f14c22f6630eaf2f') + version('0.9.5', sha256='eebc3c34ed9f3e51bdd927559a5482548c423ad5a0690c6fdcc414bfb5be6667') - depends_on('py-numpy@1.5:', type=('build', 'run')) - depends_on('py-scipy@0.14:', type=('build', 'run')) - depends_on('py-setuptools', type='build') + depends_on('python@3.5:', type=('build', 'run'), when='@1:') + depends_on('python@2.7:2.8,3.5:', type=('build', 'run'), when='@0.9.15') + depends_on('py-asteval@0.9.16', type=('build', 'run'), when='@0.9.15:') + depends_on('py-numpy@1.5:', type=('build', 'run'), when='@0.9.5:') + depends_on('py-numpy@1.16:', type=('build', 'run'), when='@0.9.15:') + depends_on('py-scipy@0.14:', type=('build', 'run'), when='@0.9.5') + depends_on('py-scipy@1.2:', type=('build', 'run'), when='@0.9.15:') + depends_on('py-setuptools', type='build') + depends_on('py-six@1.11:', type=('build', 'run'), when='@0.9.15') + depends_on('py-uncertainties@3.0.1:', type=('build', 'run'), when='@0.9.15:') diff --git a/var/spack/repos/builtin/packages/py-louie/package.py b/var/spack/repos/builtin/packages/py-louie/package.py new file mode 100644 index 00000000000..dc23bd7a4d1 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-louie/package.py @@ -0,0 +1,21 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class PyLouie(PythonPackage): + """Louie provides Python programmers with a straightforward way to + dispatch signals between objects in a wide variety of contexts. It + is based on PyDispatcher, which in turn was based on a highly-rated + recipe in the Python Cookbook.""" + + homepage = "https://github.com/11craft/louie/" + url = "https://github.com/11craft/louie/archive/2.0.tar.gz" + + version('2.0', sha256='ac274ef672511357fc15d784df841c238ae13d00964094571eebabb0b14c54b2') + version('1.1', sha256='4bc227171fc546d1a527ee3059fa17df6d35a0acc10db1f942dd3da42ad96408') + + depends_on('py-setuptools', type='build') diff --git a/var/spack/repos/builtin/packages/py-macs2/package.py b/var/spack/repos/builtin/packages/py-macs2/package.py index c20463b6e94..b9703842b58 100644 --- a/var/spack/repos/builtin/packages/py-macs2/package.py +++ b/var/spack/repos/builtin/packages/py-macs2/package.py @@ -15,10 +15,12 @@ class PyMacs2(PythonPackage): version('2.2.4', sha256='b131aadc8f5fd94bec35308b821e1f7585def788d2e7c756fc8cac402ffee25b') version('2.1.4', sha256='e4966d001914320829ab859c7bc8e92c6410aa7bdbddfd00b7625e9a0fb15c97') + version('2.1.3.3', sha256='00959e523f45ed92b8429f55944eca6984623ac008d7cdb488c3ffe59c21984a') version('2.1.1.20160309', sha256='2008ba838f83f34f8e0fddefe2a3a0159f4a740707c68058f815b31ddad53d26') depends_on('python@3.5:', when='@2.2:', type=('build', 'run')) depends_on('python@2.7:2.8', when='@:2.1', type=('build', 'run')) + depends_on('py-cython', type='build') # Most Python packages only require py-setuptools as a build dependency. # However, py-macs2 requires py-setuptools during runtime as well. diff --git a/var/spack/repos/builtin/packages/py-matplotlib/package.py b/var/spack/repos/builtin/packages/py-matplotlib/package.py index 4e5dbf91356..a1b4c94e409 100644 --- a/var/spack/repos/builtin/packages/py-matplotlib/package.py +++ b/var/spack/repos/builtin/packages/py-matplotlib/package.py @@ -13,7 +13,7 @@ class PyMatplotlib(PythonPackage): and interactive visualizations in Python.""" homepage = "https://matplotlib.org/" - url = "https://pypi.io/packages/source/m/matplotlib/matplotlib-3.2.1.tar.gz" + url = "https://pypi.io/packages/source/m/matplotlib/matplotlib-3.2.2.tar.gz" maintainers = ['adamjstewart'] @@ -27,11 +27,13 @@ class PyMatplotlib(PythonPackage): 'matplotlib.testing.jpl_units' ] + version('3.2.2', sha256='3d77a6630d093d74cbbfebaa0571d00790966be1ed204e4a8239f5cbd6835c5d') version('3.2.1', sha256='ffe2f9cdcea1086fc414e82f42271ecf1976700b8edd16ca9d376189c6d93aee') version('3.2.0', sha256='651d76daf9168250370d4befb09f79875daa2224a9096d97dfc3ed764c842be4') version('3.1.3', sha256='db3121f12fb9b99f105d1413aebaeb3d943f269f3d262b45586d12765866f0c6') version('3.1.2', sha256='8e8e2c2fe3d873108735c6ee9884e6f36f467df4a143136209cff303b183bada') version('3.1.1', sha256='1febd22afe1489b13c6749ea059d392c03261b2950d1d45c17e3aed812080c93') + version('3.1.0', sha256='1e0213f87cc0076f7b0c4c251d7e23601e2419cd98691df79edb95517ba06f0c') version('3.0.2', sha256='c94b792af431f6adb6859eb218137acd9a35f4f7442cea57e4a59c54751c36af') version('3.0.0', sha256='b4e2333c98a7c2c1ff6eb930cd2b57d4b818de5437c5048802096b32f66e65f9') version('2.2.5', sha256='a3037a840cd9dfdc2df9fee8af8f76ca82bfab173c0f9468193ca7a89a2b60ea') @@ -126,7 +128,10 @@ class PyMatplotlib(PythonPackage): depends_on('pkgconfig', type='build') # Testing dependencies - depends_on('py-pytest', type='test') + # https://matplotlib.org/devel/testing.html#requirements + depends_on('py-pytest@3.6:', type='test') + depends_on('ghostscript@9.0:', type='test') + # depends_on('inkscape@:0.999', type='test') msg = 'MacOSX backend requires the Cocoa headers included with XCode' conflicts('platform=linux', when='backend=macosx', msg=msg) diff --git a/var/spack/repos/builtin/packages/py-mmcv/package.py b/var/spack/repos/builtin/packages/py-mmcv/package.py new file mode 100644 index 00000000000..fbe0fdfe0a4 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-mmcv/package.py @@ -0,0 +1,24 @@ +# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class PyMmcv(PythonPackage): + """MMCV is a foundational python library for computer + vision research and supports many research projects in + MMLAB, such as MMDetection and MMAction.""" + + homepage = "https://mmcv.readthedocs.io/en/latest/" + url = "https://github.com/open-mmlab/mmcv/archive/v0.5.1.tar.gz" + + version('0.5.1', sha256='7c5ad30d9b61e44019e81ef46c406aa85dd08b5d0ba12ddd5cdc9c445835a55e') + + depends_on('python@3.6:', type=('build', 'run')) + depends_on('py-addict', type=('build', 'run')) + depends_on('py-numpy@1.11.1:', type=('build', 'run')) + depends_on('py-pyyaml', type=('build', 'run')) + depends_on('opencv+python', type=('build', 'run')) + depends_on('py-cython', type='build') diff --git a/var/spack/repos/builtin/packages/py-more-itertools/package.py b/var/spack/repos/builtin/packages/py-more-itertools/package.py index 7d1386402a9..dba18e66b3f 100644 --- a/var/spack/repos/builtin/packages/py-more-itertools/package.py +++ b/var/spack/repos/builtin/packages/py-more-itertools/package.py @@ -15,6 +15,7 @@ class PyMoreItertools(PythonPackage): import_modules = ['more_itertools', 'more_itertools.tests'] version('7.2.0', sha256='409cd48d4db7052af495b09dec721011634af3753ae1ef92d2b32f73a745f832') + version('7.0.0', sha256='c3e4748ba1aad8dba30a4886b0b1a2004f9a863837b8654e7059eebf727afa5a') version('5.0.0', sha256='38a936c0a6d98a38bcc2d03fdaaedaba9f412879461dd2ceff8d37564d6522e4') version('4.3.0', sha256='c476b5d3a34e12d40130bc2f935028b5f636df8f372dc2c1c01dc19681b2039e') version('4.1.0', sha256='c9ce7eccdcb901a2c75d326ea134e0886abfbea5f93e91cc95de9507c0816c44') diff --git a/var/spack/repos/builtin/packages/py-murmurhash/package.py b/var/spack/repos/builtin/packages/py-murmurhash/package.py new file mode 100644 index 00000000000..3cb59de262c --- /dev/null +++ b/var/spack/repos/builtin/packages/py-murmurhash/package.py @@ -0,0 +1,16 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +class PyMurmurhash(PythonPackage): + """Cython bindings for MurmurHash.""" + + homepage = "https://github.com/explosion/murmurhash" + url = "https://pypi.io/packages/source/m/murmurhash/murmurhash-1.0.2.tar.gz" + + version('1.0.2', sha256='c7a646f6b07b033642b4f52ae2e45efd8b80780b3b90e8092a0cec935fbf81e2') + + depends_on('py-setuptools', type='build') + depends_on('py-wheel@0.32.0:0.32.999', type='build') diff --git a/var/spack/repos/builtin/packages/py-mypy/package.py b/var/spack/repos/builtin/packages/py-mypy/package.py index b8cde685dc7..f822ea567eb 100644 --- a/var/spack/repos/builtin/packages/py-mypy/package.py +++ b/var/spack/repos/builtin/packages/py-mypy/package.py @@ -15,7 +15,7 @@ class PyMypy(PythonPackage): version('0.740', sha256='48c8bc99380575deb39f5d3400ebb6a8a1cb5cc669bbba4d3bb30f904e0a0e7d') depends_on('python@3.5:', type=('build', 'run')) - depends_on('py-setuptools', type='build') + depends_on('py-setuptools', type=('build', 'run')) depends_on('py-typed-ast@1.4.0:1.4.999', type=('build', 'run')) depends_on('py-typing-extensions@3.7.4:', type=('build', 'run')) depends_on('py-mypy-extensions@0.4.0:0.4.999', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/py-mysqlclient/package.py b/var/spack/repos/builtin/packages/py-mysqlclient/package.py index d1e6ef37a8c..a1f1d09e130 100644 --- a/var/spack/repos/builtin/packages/py-mysqlclient/package.py +++ b/var/spack/repos/builtin/packages/py-mysqlclient/package.py @@ -15,9 +15,10 @@ class PyMysqlclient(PythonPackage): homepage = "https://github.com/PyMySQL/mysqlclient-python" url = "https://pypi.io/packages/source/m/mysqlclient/mysqlclient-1.4.4.tar.gz" - version('1.4.4', sha256='9c737cc55a5dc8dd3583a942d5a9b21be58d16f00f5fefca4e575e7d9682e98c') - version('1.3.13', sha256='ff8ee1be84215e6c30a746b728c41eb0701a46ca76e343af445b35ce6250644f') + version('1.4.6', sha256='f3fdaa9a38752a3b214a6fe79d7cae3653731a53e577821f9187e67cbecb2e16') + version('1.4.5', sha256='e80109b0ae8d952b900b31b623181532e5e89376d707dcbeb63f99e69cefe559') + version('1.4.4', sha256='9c737cc55a5dc8dd3583a942d5a9b21be58d16f00f5fefca4e575e7d9682e98c') + version('1.3.13', sha256='ff8ee1be84215e6c30a746b728c41eb0701a46ca76e343af445b35ce6250644f') depends_on('py-setuptools', type='build') - # Below: cxxstd=17 also works - depends_on('mysql cxxstd=14') + depends_on('mysql') diff --git a/var/spack/repos/builtin/packages/py-netcdf4/check_hdf5version.patch b/var/spack/repos/builtin/packages/py-netcdf4/check_hdf5version.patch new file mode 100644 index 00000000000..c2c3bd025cd --- /dev/null +++ b/var/spack/repos/builtin/packages/py-netcdf4/check_hdf5version.patch @@ -0,0 +1,20 @@ +--- a/setup.py ++++ b/setup.py +@@ -275,7 +275,7 @@ HDF5_DIR environment variable not set, checking some standard locations ..\n""") + for direc in dirstosearch: + sys.stdout.write('checking %s ...\n' % direc) + hdf5_version = check_hdf5version(os.path.join(direc, 'include')) +- if hdf5_version is None or hdf5_version[1:6] < '1.8.0': ++ if hdf5_version is None or [int(c) for c in hdf5_version[1:-1].split('.')] < [1, 8, 0]: + continue + else: + HDF5_dir = direc +@@ -290,7 +290,7 @@ HDF5_DIR environment variable not set, checking some standard locations ..\n""") + hdf5_version = check_hdf5version(HDF5_incdir) + if hdf5_version is None: + raise ValueError('did not find HDF5 headers in %s' % HDF5_incdir) +- elif hdf5_version[1:6] < '1.8.0': ++ elif [int(c) for c in hdf5_version[1:-1].split('.')] < [1, 8, 0]: + raise ValueError('HDF5 version >= 1.8.0 is required') + + if netCDF4_incdir is None and netCDF4_dir is None: diff --git a/var/spack/repos/builtin/packages/py-netcdf4/disable_pkgconf.patch b/var/spack/repos/builtin/packages/py-netcdf4/disable_pkgconf.patch new file mode 100644 index 00000000000..b2b690c7c3e --- /dev/null +++ b/var/spack/repos/builtin/packages/py-netcdf4/disable_pkgconf.patch @@ -0,0 +1,6 @@ +--- a/setup.py ++++ b/setup.py +@@ -309,2 +309,3 @@ except OSError: + HAS_PKG_CONFIG = False ++HAS_PKG_CONFIG = False + diff --git a/var/spack/repos/builtin/packages/py-netcdf4/package.py b/var/spack/repos/builtin/packages/py-netcdf4/package.py index a9dd01db2c2..d1c992db9cf 100644 --- a/var/spack/repos/builtin/packages/py-netcdf4/package.py +++ b/var/spack/repos/builtin/packages/py-netcdf4/package.py @@ -12,6 +12,9 @@ class PyNetcdf4(PythonPackage): homepage = "https://github.com/Unidata/netcdf4-python" url = "https://pypi.io/packages/source/n/netCDF4/netCDF4-1.2.7.tar.gz" + maintainers = ['skosukhin'] + + version('1.5.3', sha256='2a3ca855848f4bbf07fac366da77a681fcead18c0a8813d91d46302f562dc3be') version('1.4.2', sha256='b934af350459cf9041bcdf5472e2aa56ed7321c018d918e9f325ec9a1f9d1a30') version('1.2.7', sha256='0c449b60183ee06238a8f9a75de7b0eed3acaa7a374952ff9f1ff06beb8f94ba') version('1.2.3.1', sha256='55edd74ef9aabb1f7d1ea3ffbab9c555da2a95632a97f91c0242281dc5eb919f') @@ -25,6 +28,20 @@ class PyNetcdf4(PythonPackage): depends_on('netcdf-c') depends_on('hdf5@1.8.0:+hl') + # The installation script tries to find hdf5 using pkg-config. However, the + # version of hdf5 installed with Spack does not have pkg-config files. + # Therefore, if pkg-config finds hdf5.pc at all (e.g. provided by + # Ubuntu/Debian package manager), it is definitely not what we need. The + # following patch disables the usage of pkg-config at all. + patch('disable_pkgconf.patch') + + # Older versions of the package get a false negative result when checking + # the version of HDF5. + patch('check_hdf5version.patch', when='@:1.2.9 ^hdf5@1.10:') + + # We can skip the 'build' phase to avoid recompilation of the library. + phases = ['install'] + def setup_build_environment(self, env): """Ensure installed netcdf and hdf5 libraries are used""" # Explicitly set these variables so setup.py won't erroneously pick up diff --git a/var/spack/repos/builtin/packages/py-nltk/package.py b/var/spack/repos/builtin/packages/py-nltk/package.py new file mode 100644 index 00000000000..9dbbcc26515 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-nltk/package.py @@ -0,0 +1,23 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +class PyNltk(PythonPackage): + """The Natural Language Toolkit (NLTK) is a Python package for + natural language processing.""" + + homepage = "https://www.nltk.org/" + url = "https://pypi.io/packages/source/n/nltk/nltk-3.5.zip" + + version('3.5', sha256='845365449cd8c5f9731f7cb9f8bd6fd0767553b9d53af9eb1b3abf7700936b35') + + depends_on('python@3.5:', type=('build', 'run')) + depends_on('py-setuptools', type='build') + depends_on('py-click', type=('build', 'run')) + depends_on('py-regex', type=('build', 'run')) + depends_on('py-tqdm', type=('build', 'run')) + + # May require additional third-party software: + # https://github.com/nltk/nltk/wiki/Installing-Third-Party-Software diff --git a/var/spack/repos/builtin/packages/py-numpy/package.py b/var/spack/repos/builtin/packages/py-numpy/package.py index 4c58f316a39..f437b43063c 100644 --- a/var/spack/repos/builtin/packages/py-numpy/package.py +++ b/var/spack/repos/builtin/packages/py-numpy/package.py @@ -16,7 +16,7 @@ class PyNumpy(PythonPackage): number capabilities""" homepage = "https://numpy.org/" - url = "https://pypi.io/packages/source/n/numpy/numpy-1.18.3.zip" + url = "https://pypi.io/packages/source/n/numpy/numpy-1.19.0.zip" git = "https://github.com/numpy/numpy.git" maintainers = ['adamjstewart'] @@ -30,6 +30,9 @@ class PyNumpy(PythonPackage): ] version('master', branch='master') + version('1.19.0', sha256='76766cc80d6128750075378d3bb7812cf146415bd29b588616f72c943c00d598') + version('1.18.5', sha256='34e96e9dae65c4839bd80012023aadd6ee2ccb73ce7fdf3074c62f301e63120b') + version('1.18.4', sha256='bbcc85aaf4cd84ba057decaead058f43191cc0e30d6bc5d44fe336dc3d3f4509') version('1.18.3', sha256='e46e2384209c91996d5ec16744234d1c906ab79a701ce1a26155c9ec890b8dc8') version('1.18.2', sha256='e7894793e6e8540dbeac77c87b489e331947813511108ae097f1715c018b8f3d') version('1.18.1', sha256='b6ff59cee96b454516e47e7721098e6ceebef435e3e21ac2d6c3b8b02628eb77') @@ -80,6 +83,7 @@ class PyNumpy(PythonPackage): depends_on('python@2.7:2.8,3.4:', type=('build', 'run')) depends_on('python@2.7:2.8,3.5:', type=('build', 'run'), when='@1.16:') depends_on('python@3.5:', type=('build', 'run'), when='@1.17:') + depends_on('python@3.6:', type=('build', 'run'), when='@1.19:') depends_on('py-setuptools', type='build') # Check pyproject.toml for updates to the required cython version depends_on('py-cython@0.29.13:', when='@1.18.0:', type='build') @@ -89,6 +93,7 @@ class PyNumpy(PythonPackage): depends_on('py-nose@1.0.0:', when='@:1.14', type='test') depends_on('py-pytest', when='@1.15:', type='test') + depends_on('py-hypothesis', when='@1.19:', type='test') # Allows you to specify order of BLAS/LAPACK preference # https://github.com/numpy/numpy/pull/13132 diff --git a/var/spack/repos/builtin/packages/py-openpyxl/package.py b/var/spack/repos/builtin/packages/py-openpyxl/package.py index b8460028009..f929fe6a9bc 100644 --- a/var/spack/repos/builtin/packages/py-openpyxl/package.py +++ b/var/spack/repos/builtin/packages/py-openpyxl/package.py @@ -10,11 +10,15 @@ class PyOpenpyxl(PythonPackage): """A Python library to read/write Excel 2010 xlsx/xlsm files""" homepage = "http://openpyxl.readthedocs.org/" - url = "https://pypi.io/packages/source/o/openpyxl/openpyxl-2.4.5.tar.gz" + url = "https://pypi.io/packages/source/o/openpyxl/openpyxl-3.0.3.tar.gz" + version('3.0.3', sha256='547a9fc6aafcf44abe358b89ed4438d077e9d92e4f182c87e2dc294186dc4b64') version('2.4.5', sha256='78c331e819fb0a63a1339d452ba0b575d1a31f09fdcce793a31bec7e9ef4ef21') - depends_on('python@2.6:2.8,3.0:3.1,3.3:') + depends_on('python@3.6:', when='@3.0:', type=('build', 'run')) + depends_on('python@2.7:2.8,3.5:', when='@2.6:', type=('build', 'run')) + depends_on('python@2.7:2.8,3.4:', when='@2.5:', type=('build', 'run')) + depends_on('python@2.6:2.8,3.3:', when='@2.1:', type=('build', 'run')) depends_on('py-setuptools', type='build') diff --git a/var/spack/repos/builtin/packages/py-opentuner/package.py b/var/spack/repos/builtin/packages/py-opentuner/package.py index 5efd8039f30..b55a9073c4f 100644 --- a/var/spack/repos/builtin/packages/py-opentuner/package.py +++ b/var/spack/repos/builtin/packages/py-opentuner/package.py @@ -12,14 +12,17 @@ class PyOpentuner(PythonPackage): homepage = "http://opentuner.org/" git = "https://github.com/jansel/opentuner.git" + maintainers = ['matthiasdiener'] + + version('0.8.2', commit='8e720a2') version('0.8.0', commit='4cb9135') - # No support for Python 3 yet - depends_on('python@2.7:2.8', type=('build', 'run')) + depends_on('python@3:', type=('build', 'run'), when='@0.8.1:') + depends_on('python@2.7:2.8', type=('build', 'run'), when='@:0.8.0') depends_on('py-argparse@1.2.1:', type=('build', 'run')) depends_on('py-fn-py@0.2.12:', type=('build', 'run')) + depends_on('py-future', type=('build', 'run')) depends_on('py-numpy@1.8.0:', type=('build', 'run')) - depends_on('py-pysqlite@2.6.3:', type=('build', 'run')) depends_on('py-setuptools', type='build') depends_on('py-sqlalchemy@0.8.2:', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/py-opppy/package.py b/var/spack/repos/builtin/packages/py-opppy/package.py index 6e512b12c66..81bcf6d5af4 100644 --- a/var/spack/repos/builtin/packages/py-opppy/package.py +++ b/var/spack/repos/builtin/packages/py-opppy/package.py @@ -14,15 +14,17 @@ class PyOpppy(PythonPackage): homepage = "https://github.com/lanl/opppy" url = "https://github.com/lanl/OPPPY/archive/opppy-0_1_2.tar.gz" git = "https://github.com/lanl/opppy.git" + maintainers = ['clevelam'] version('master', branch='master') + version('0_1_3', sha256='c3ca97f2ff8ab319b5c7257baa8cab852387dc00d426b4534c06f0894363c541') version('0_1_2', sha256='ef3795d3164fa0aa7ea7da7e223d6d0a48d2960aefd03a7d90cdb8b8f480cd4c') version('0_1_1', sha256='505c023853e75552abc65de9777a125ecb6a99a1cb4e605a4f702af837e3168b') - depends_on('py-setuptools', type=('build', 'run')) + depends_on('py-setuptools', type=('build')) + depends_on('py-sphinx', type=('build')) depends_on('py-numpy@1.6:', type=('build', 'run')) depends_on('python@3:', type=('build', 'run')) depends_on('py-argparse', type=('build', 'run'), when='^python@:2.6') depends_on('py-scipy', type=('build', 'run')) depends_on('py-matplotlib', type=('build', 'run')) - depends_on('py-sphinx', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/py-palettable/package.py b/var/spack/repos/builtin/packages/py-palettable/package.py index 716c9842e5a..aa17c77c4b7 100644 --- a/var/spack/repos/builtin/packages/py-palettable/package.py +++ b/var/spack/repos/builtin/packages/py-palettable/package.py @@ -12,6 +12,7 @@ class PyPalettable(PythonPackage): homepage = "https://jiffyclub.github.io/palettable/" url = "https://pypi.io/packages/source/p/palettable/palettable-3.0.0.tar.gz" + version('3.3.0', sha256='72feca71cf7d79830cd6d9181b02edf227b867d503bec953cf9fa91bf44896bd') version('3.0.0', sha256='eed9eb0399386ff42f90ca61d4fa38a1819a93d5adfc2d546e3e2869d9972c31') depends_on('py-setuptools', type='build') diff --git a/var/spack/repos/builtin/packages/py-pandas/package.py b/var/spack/repos/builtin/packages/py-pandas/package.py index 2403197c9e5..3fba468a270 100644 --- a/var/spack/repos/builtin/packages/py-pandas/package.py +++ b/var/spack/repos/builtin/packages/py-pandas/package.py @@ -7,17 +7,12 @@ class PyPandas(PythonPackage): - """pandas is a Python package providing fast, flexible, and expressive - data structures designed to make working with relational or - labeled data both easy and intuitive. It aims to be the - fundamental high-level building block for doing practical, real - world data analysis in Python. Additionally, it has the broader - goal of becoming the most powerful and flexible open source data - analysis / manipulation tool available in any language. + """pandas is a fast, powerful, flexible and easy to use open source + data analysis and manipulation tool, built on top of the Python + programming language.""" - """ - homepage = "http://pandas.pydata.org/" - url = "https://pypi.io/packages/source/p/pandas/pandas-0.25.1.tar.gz" + homepage = "https://pandas.pydata.org/" + url = "https://pypi.io/packages/source/p/pandas/pandas-1.0.5.tar.gz" maintainers = ['adamjstewart'] import_modules = [ @@ -34,6 +29,14 @@ class PyPandas(PythonPackage): 'pandas.api.extensions' ] + version('1.0.5', sha256='69c5d920a0b2a9838e677f78f4dde506b95ea8e4d30da25859db6469ded84fa8') + version('1.0.4', sha256='b35d625282baa7b51e82e52622c300a1ca9f786711b2af7cbe64f1e6831f4126') + version('1.0.3', sha256='32f42e322fb903d0e189a4c10b75ba70d90958cc4f66a1781ed027f1a1d14586') + version('1.0.2', sha256='76334ba36aa42f93b6b47b79cbc32187d3a178a4ab1c3a478c8f4198bcd93a73') + version('1.0.1', sha256='3c07765308f091d81b6735d4f2242bb43c332cc3461cae60543df6b10967fe27') + version('1.0.0', sha256='3ea6cc86931f57f18b1240572216f09922d91b19ab8a01cf24734394a3db3bec') + version('0.25.3', sha256='52da74df8a9c9a103af0a72c9d5fdc8e0183a90884278db7f386b5692a2220a4') + version('0.25.2', sha256='ca91a19d1f0a280874a24dca44aadce42da7f3a7edb7e9ab7c7baad8febee2be') version('0.25.1', sha256='cb2e197b7b0687becb026b84d3c242482f20cbb29a9981e43604eb67576da9f6') version('0.25.0', sha256='914341ad2d5b1ea522798efa4016430b66107d05781dbfe7cf05eba8f37df995') version('0.24.2', sha256='4f919f409c433577a501e023943e582c57355d50a724c589e78bc1d551a535a2') @@ -47,9 +50,12 @@ class PyPandas(PythonPackage): version('0.16.1', sha256='570d243f8cb068bf780461b9225d2e7bef7c90aa10d43cf908fe541fc92df8b6') version('0.16.0', sha256='4013de6f8796ca9d2871218861823bd9878a8dfacd26e08ccf9afdd01bbad9f1') - # https://pandas.pydata.org/pandas-docs/stable/install.html#dependencies # Required dependencies + # https://pandas.pydata.org/pandas-docs/stable/getting_started/install.html#dependencies + depends_on('python@3.6.1:', type=('build', 'run'), when='@1:') depends_on('python@3.5.3:', type=('build', 'run'), when='@0.25:') + # https://pandas.pydata.org/docs/whatsnew/v1.0.0.html#build-changes + depends_on('py-cython@0.29.13:', type='build', when='@1:') depends_on('py-setuptools@24.2.0:', type='build') depends_on('py-numpy', type=('build', 'run')) depends_on('py-numpy@1.13.3:', type=('build', 'run'), when='@0.25:') @@ -58,16 +64,18 @@ class PyPandas(PythonPackage): depends_on('py-pytz@2017.2:', type=('build', 'run')) # Recommended dependencies + # https://pandas.pydata.org/pandas-docs/stable/getting_started/install.html#recommended-dependencies depends_on('py-numexpr', type=('build', 'run')) depends_on('py-numexpr@2.6.2:', type=('build', 'run'), when='@0.25:') depends_on('py-bottleneck', type=('build', 'run')) depends_on('py-bottleneck@1.2.1:', type=('build', 'run'), when='@0.25:') # Optional dependencies - # https://pandas.pydata.org/pandas-docs/stable/install.html#optional-dependencies + # https://pandas.pydata.org/pandas-docs/stable/getting_started/install.html#optional-dependencies # Test dependencies # https://pandas.pydata.org/pandas-docs/stable/development/contributing.html#running-the-test-suite depends_on('py-pytest@4.0.2:', type='test') + depends_on('py-pytest-xdist', type='test') depends_on('py-hypothesis@3.58:', type='test') depends_on('py-pyarrow@0.10.0:', type='test') diff --git a/var/spack/repos/builtin/packages/py-paramz/package.py b/var/spack/repos/builtin/packages/py-paramz/package.py new file mode 100644 index 00000000000..77c5996d12c --- /dev/null +++ b/var/spack/repos/builtin/packages/py-paramz/package.py @@ -0,0 +1,21 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class PyParamz(PythonPackage): + """The Parameterization Framework.""" + + homepage = "https://github.com/sods/paramz" + url = "https://pypi.io/packages/source/p/paramz/paramz-0.9.5.tar.gz" + + version('0.9.5', sha256='0917211c0f083f344e7f1bc997e0d713dbc147b6380bc19f606119394f820b9a') + + depends_on('py-setuptools', type='build') + depends_on('py-numpy@1.7:', type=('build', 'run')) + depends_on('py-scipy', type=('build', 'run')) + depends_on('py-six', type=('build', 'run')) + depends_on('py-decorator@4.0.10:', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/py-partd/package.py b/var/spack/repos/builtin/packages/py-partd/package.py index 616f960f231..8e99413822a 100644 --- a/var/spack/repos/builtin/packages/py-partd/package.py +++ b/var/spack/repos/builtin/packages/py-partd/package.py @@ -14,8 +14,11 @@ class PyPartd(PythonPackage): import_modules = ['partd'] + version('1.1.0', sha256='6e258bf0810701407ad1410d63d1a15cfd7b773fd9efe555dac6bb82cc8832b0') + version('0.3.10', sha256='33722a228ebcd1fa6f44b1631bdd4cff056376f89eb826d7d880b35b637bcfba') version('0.3.8', sha256='67291f1c4827cde3e0148b3be5d69af64b6d6169feb9ba88f0a6cfe77089400f') + depends_on('python@3.5:', type=('build', 'run'), when='@1.1.0:') depends_on('py-setuptools', type='build') depends_on('py-locket', type=('build', 'run')) depends_on('py-toolz', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/py-petsc4py/package.py b/var/spack/repos/builtin/packages/py-petsc4py/package.py index 54653f9b689..c5fe8785e7c 100644 --- a/var/spack/repos/builtin/packages/py-petsc4py/package.py +++ b/var/spack/repos/builtin/packages/py-petsc4py/package.py @@ -28,20 +28,23 @@ class PyPetsc4py(PythonPackage): version('3.8.0', sha256='b9b728e39245213cd8e74cf4724be9bb48bd295f99634135e37dbbdbec275244') version('3.7.0', sha256='fb78b50c596c3ba6a097751dd9a379e7acaf57edd36311a3afa94caa4312ee08') + variant('mpi', default=True, description='Activates MPI support') + depends_on('py-cython', type='build', when='@develop') depends_on('python@2.6:2.8,3.3:', type=('build', 'run')) depends_on('py-setuptools', type='build') depends_on('py-numpy', type=('build', 'run')) - depends_on('py-mpi4py', type=('build', 'run')) + depends_on('py-mpi4py', when='+mpi', type=('build', 'run')) - depends_on('petsc+mpi') - depends_on('petsc@develop+mpi', when='@develop') - depends_on('petsc@3.13:3.13.99+mpi', when='@3.13:3.13.99') - depends_on('petsc@3.12:3.12.99+mpi', when='@3.12:3.12.99') - depends_on('petsc@3.11:3.11.99+mpi', when='@3.11:3.11.99') - depends_on('petsc@3.10.3:3.10.99+mpi', when='@3.10.1:3.10.99') - depends_on('petsc@3.10:3.10.2+mpi', when='@3.10.0') - depends_on('petsc@3.9:3.9.99+mpi', when='@3.9:3.9.99') - depends_on('petsc@3.8:3.8.99+mpi', when='@3.8:3.8.99') - depends_on('petsc@3.7:3.7.99+mpi', when='@3.7:3.7.99') - depends_on('petsc@3.6:3.6.99+mpi', when='@3.6:3.6.99') + depends_on('petsc+mpi', when='+mpi') + depends_on('petsc~mpi', when='~mpi') + depends_on('petsc@develop', when='@develop') + depends_on('petsc@3.13:3.13.99', when='@3.13:3.13.99') + depends_on('petsc@3.12:3.12.99', when='@3.12:3.12.99') + depends_on('petsc@3.11:3.11.99', when='@3.11:3.11.99') + depends_on('petsc@3.10.3:3.10.99', when='@3.10.1:3.10.99') + depends_on('petsc@3.10:3.10.2', when='@3.10.0') + depends_on('petsc@3.9:3.9.99', when='@3.9:3.9.99') + depends_on('petsc@3.8:3.8.99', when='@3.8:3.8.99') + depends_on('petsc@3.7:3.7.99', when='@3.7:3.7.99') + depends_on('petsc@3.6:3.6.99', when='@3.6:3.6.99') diff --git a/var/spack/repos/builtin/packages/py-pid/package.py b/var/spack/repos/builtin/packages/py-pid/package.py new file mode 100644 index 00000000000..b9b20428a9a --- /dev/null +++ b/var/spack/repos/builtin/packages/py-pid/package.py @@ -0,0 +1,27 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class PyPid(PythonPackage): + """Pidfile featuring stale detection and file-locking, can also be + used as context-manager or decorator.""" + + homepage = "https://pypi.org/project/pid/" + url = "https://github.com/trbs/pid/archive/3.0.3.tar.gz" + + version('3.0.3', sha256='082281e2f6b99b4aaa02a24ae4796c604ac17f19cdd0327b8c1ba9c2e73aadc8') + version('3.0.2', sha256='0be7dc260e35788163b3171a5f0e1a8b9888bc2b77232c053c042a65496b8396') + version('3.0.1', sha256='2f51b61210f8e1f009b09a2034717003ca22dcd86995537ecb857863bddca89a') + version('3.0.0', sha256='3d251eadedc6fbd1fe4b43d521e76b83afd244b8b1951a2cd96864406bc96381') + version('2.2.5', sha256='d4c68554bf4b2fc7d0b50749f535f5c1fceb74ff025ce1a3f06745d15c595d40') + version('2.2.4', sha256='de3cc35e18c5409d8424813ab422b637af4d25bfdcf2c15ee6c5af447778de22') + version('2.2.3', sha256='14555fc214e0dfee7d94598b759523349832597e163415d1a7b0d87d9902cc47') + version('2.2.2', sha256='716bb5803fed50facdb62be0e48d08dd95e7392fcfb03f5540915623f9c4ee44') + version('2.2.1', sha256='2c5b398d348b8b1901ccb29b5c914c583187692acfbc3c28fc4ee483b9909357') + version('2.2.0', sha256='f2c3beb5742159794379b73088eb3f592a4b7b93bfef95f8bbc27ab98e5394ed') + + depends_on('py-setuptools', type='build') diff --git a/var/spack/repos/builtin/packages/py-pint/package.py b/var/spack/repos/builtin/packages/py-pint/package.py index 6287cc76bd2..fd2736f7c6b 100644 --- a/var/spack/repos/builtin/packages/py-pint/package.py +++ b/var/spack/repos/builtin/packages/py-pint/package.py @@ -12,9 +12,15 @@ class PyPint(PythonPackage): It allows arithmetic operations between them and conversions from and to different units.""" - homepage = "https://pypi.python.org/pypi/pint" - url = "https://pypi.io/packages/source/p/pint/Pint-0.8.1.tar.gz" + homepage = "https://pypi.org/project/Pint" + url = "https://pypi.io/packages/source/p/pint/Pint-0.11.tar.gz" + version('0.11', sha256='308f1070500e102f83b6adfca6db53debfce2ffc5d3cbe3f6c367da359b5cf4d') + version('0.10.1', sha256='d739c364b8326fe3d70773d5720fa8b005ea6158695cad042677a588480c86e6') + version('0.10', sha256='38a4d6e242b8bab693cd83a5f5ade3d816463b498658e7ab14ce64c4d458c88b') + version('0.9', sha256='32d8a9a9d63f4f81194c0014b3b742679dce81a26d45127d9810a68a561fe4e2') version('0.8.1', sha256='afcf31443a478c32bbac4b00337ee9026a13d0e2ac83d30c79151462513bb0d4') - depends_on('py-setuptools', type='build') + depends_on('python@3.6:', type=('build', 'run'), when='@0.10:') + depends_on('py-setuptools', type=('build', )) + depends_on('py-setuptools-scm', type=('build', )) diff --git a/var/spack/repos/builtin/packages/py-plac/package.py b/var/spack/repos/builtin/packages/py-plac/package.py new file mode 100644 index 00000000000..18d18cf5ddf --- /dev/null +++ b/var/spack/repos/builtin/packages/py-plac/package.py @@ -0,0 +1,16 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +class PyPlac(PythonPackage): + """The smartest command line arguments parser in the world.""" + + homepage = "https://github.com/micheles/plac" + url = "https://pypi.io/packages/source/p/plac/plac-1.1.3.tar.gz" + + version('1.1.3', sha256='398cb947c60c4c25e275e1f1dadf027e7096858fb260b8ece3b33bcff90d985f') + + depends_on('py-setuptools', type='build') + depends_on('py-argparse', when='^python@:2.6', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/py-plotly/package.py b/var/spack/repos/builtin/packages/py-plotly/package.py index 72a0727be51..7a62596eeae 100644 --- a/var/spack/repos/builtin/packages/py-plotly/package.py +++ b/var/spack/repos/builtin/packages/py-plotly/package.py @@ -15,6 +15,8 @@ class PyPlotly(PythonPackage): version('2.2.0', sha256='ca668911ffb4d11fed6d7fbb12236f8ecc6a7209db192326bcb64bdb41451a58') depends_on('py-setuptools', type='build') - depends_on('py-requests@2.3.0', type=('build', 'run')) - depends_on('py-six@1.8.0', type=('build', 'run')) - depends_on('py-pytz@2014.9', type=('build', 'run')) + depends_on('py-decorator@4.0.6:', type=('build', 'run')) + depends_on('py-nbformat@4.2.0:', type=('build', 'run')) + depends_on('py-requests', type=('build', 'run')) + depends_on('py-six', type=('build', 'run')) + depends_on('py-pytz', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/py-preshed/package.py b/var/spack/repos/builtin/packages/py-preshed/package.py new file mode 100644 index 00000000000..cb8cc2ed10b --- /dev/null +++ b/var/spack/repos/builtin/packages/py-preshed/package.py @@ -0,0 +1,18 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +class PyPreshed(PythonPackage): + """preshed: Cython Hash Table for Pre-Hashed Keys.""" + + homepage = "https://github.com/explosion/preshed" + url = "https://pypi.io/packages/source/p/preshed/preshed-3.0.2.tar.gz" + + version('3.0.2', sha256='61d73468c97c1d6d5a048de0b01d5a6fd052123358aca4823cdb277e436436cb') + + depends_on('py-setuptools', type='build') + depends_on('py-cymem@2.0.2:2.0.999', type=('build', 'run')) + depends_on('py-murmurhash@0.28:1.0', type=('build', 'run')) + depends_on('py-pytest', type='test') diff --git a/var/spack/repos/builtin/packages/py-profilehooks/package.py b/var/spack/repos/builtin/packages/py-profilehooks/package.py new file mode 100644 index 00000000000..47b52a455c7 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-profilehooks/package.py @@ -0,0 +1,20 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class PyProfilehooks(PythonPackage): + """Python decorators for profiling/tracing/timing a single function""" + + homepage = "https://mg.pov.lt/profilehooks/" + url = "https://pypi.io/packages/source/p/profilehooks/profilehooks-1.11.2.tar.gz" + + git = "https://github.com/mgedmin/profilehooks.git" + + version('1.11.2', sha256='41a74c1abdc5eeaf7dec024e9e89627f70e158374d263a3098bef31a06d38ab2') + + depends_on('python@2.7:2.8,3.5:', type=('build', 'run')) + depends_on('py-setuptools', type='build') diff --git a/var/spack/repos/builtin/packages/py-pycodestyle/package.py b/var/spack/repos/builtin/packages/py-pycodestyle/package.py index 6f9da670f86..48e833ec2f2 100644 --- a/var/spack/repos/builtin/packages/py-pycodestyle/package.py +++ b/var/spack/repos/builtin/packages/py-pycodestyle/package.py @@ -13,6 +13,7 @@ class PyPycodestyle(PythonPackage): homepage = "https://github.com/PyCQA/pycodestyle" url = "https://github.com/PyCQA/pycodestyle/archive/2.0.0.tar.gz" + version('2.6.0', sha256='08347fbc48cc92afd33117c1e8af9b99b292a4e5889f6b776f402e062fc39c97') version('2.5.0', sha256='a603453c07e8d8e15a43cf062aa7174741b74b4a27b110f9ad03d74d519173b5') version('2.3.1', sha256='e9fc1ca3fd85648f45c0d2e33591b608a17d8b9b78e22c5f898e831351bacb03') version('2.3.0', sha256='ac2a849987316521a56814b5618668d36cd5f3b04843803832a15b93b8383a50') diff --git a/var/spack/repos/builtin/packages/py-pyeda/package.py b/var/spack/repos/builtin/packages/py-pyeda/package.py new file mode 100644 index 00000000000..d6f3b4a3f9c --- /dev/null +++ b/var/spack/repos/builtin/packages/py-pyeda/package.py @@ -0,0 +1,18 @@ +# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class PyPyeda(PythonPackage): + """PyEDA is a Python library for electronic design automation.""" + + homepage = "https://github.com/cjdrake/pyeda" + url = "https://pypi.io//packages/source/p/pyeda/pyeda-0.28.0.tar.gz" + + version('0.28.0', sha256='07185f458d5d0b2ba5058da8b95dad6ab7684ceaf41237a25bcd3f005490f59d') + + depends_on('py-setuptools', type='build') + depends_on('python@3.3:', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/py-pyflakes/package.py b/var/spack/repos/builtin/packages/py-pyflakes/package.py index 156db2f0da2..402a6c525b8 100644 --- a/var/spack/repos/builtin/packages/py-pyflakes/package.py +++ b/var/spack/repos/builtin/packages/py-pyflakes/package.py @@ -12,6 +12,7 @@ class PyPyflakes(PythonPackage): homepage = "https://github.com/PyCQA/pyflakes" url = "https://github.com/PyCQA/pyflakes/archive/2.1.1.tar.gz" + version('2.2.0', sha256='4a6927b9ca7fc19817176d54b3ee2ee4202f064febdee8624ee8340303cfda7b') version('2.1.1', sha256='2c98f07a9dd57d9f33561f6b54a64a766cdf79a3c869bd8c07b7fe03094fb8c3') version('2.1.0', sha256='6cd8775b6430daad386c0de00dfbc27ce2c24468cdcc4d3da41e4aa39d8ce167') version('1.6.0', sha256='f9c72359e05bf8dc27eaaee8cdcae464497f2ccadae87ac6517605ba6040ec99') diff --git a/var/spack/repos/builtin/packages/py-pygdbmi/package.py b/var/spack/repos/builtin/packages/py-pygdbmi/package.py index 98f45a564df..8bf181f4a7d 100644 --- a/var/spack/repos/builtin/packages/py-pygdbmi/package.py +++ b/var/spack/repos/builtin/packages/py-pygdbmi/package.py @@ -12,6 +12,10 @@ class PyPygdbmi(PythonPackage): homepage = "https://github.com/cs01/pygdbmi" url = "https://pypi.io/packages/source/p/pygdbmi/pygdbmi-0.8.2.0.tar.gz" + version('0.9.0.3', sha256='5bdf2f072e8f2f6471f19f8dcd87d6425c5d8069d47c0a5ffe8d0eff48cb171e') version('0.8.2.0', sha256='47cece65808ca42edf6966ac48e2aedca7ae1c675c4d2f0d001c7f3a7fa245fe') + depends_on('python@3.5:', type=('build', 'run'), when='@0.9.0.3:') + depends_on('python@2.7:2.8,3.4:3.6', type=('build', 'run'), when='@0.9.0.0:0.9.0.2') + depends_on('python@2.7:2.8,3.3:3.6', type=('build', 'run'), when='@:0.8.4.0') depends_on('py-setuptools', type='build') diff --git a/var/spack/repos/builtin/packages/py-pygelf/package.py b/var/spack/repos/builtin/packages/py-pygelf/package.py new file mode 100644 index 00000000000..c2b20b57037 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-pygelf/package.py @@ -0,0 +1,22 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class PyPygelf(PythonPackage): + """Python logging handlers with GELF (Graylog Extended Log Format) + support.""" + + homepage = "https://github.com/keeprocking/pygelf" + url = "https://pypi.io/packages/source/p/pygelf/pygelf-0.3.6.tar.gz" + + # notify when the package is updated. + maintainers = ['victorusu', 'vkarak'] + + version('0.3.6', sha256='3e5bc59e3b5a754556a76ff2c69fcf2003218ad7b5ff8417482fa1f6a7eba5f9') + + depends_on('python', type=('build', 'run')) + depends_on('py-setuptools', type='build') diff --git a/var/spack/repos/builtin/packages/py-pygments/package.py b/var/spack/repos/builtin/packages/py-pygments/package.py index c5da704ac0d..87476c64d64 100644 --- a/var/spack/repos/builtin/packages/py-pygments/package.py +++ b/var/spack/repos/builtin/packages/py-pygments/package.py @@ -17,6 +17,7 @@ class PyPygments(PythonPackage): 'pygments.lexers', 'pygments.styles' ] + version('2.6.1', sha256='647344a061c249a3b74e230c739f434d7ea4d8b1d5f3721bc0f3558049b38f44') version('2.4.2', sha256='881c4c157e45f30af185c1ffe8d549d48ac9127433f2c380c24b84572ad66297') version('2.3.1', sha256='5ffada19f6203563680669ee7f53b64dabbeb100eb51b61996085e99c03b284a') version('2.2.0', sha256='dbae1046def0efb574852fab9e90209b23f556367b5a320c0bcb871c77c3e8cc') @@ -24,8 +25,9 @@ class PyPygments(PythonPackage): version('2.0.1', sha256='5e039e1d40d232981ed58914b6d1ac2e453a7e83ddea22ef9f3eeadd01de45cb') version('2.0.2', sha256='7320919084e6dac8f4540638a46447a3bd730fca172afc17d2c03eed22cf4f51') - depends_on('python@2.7:2.8,3.5:', type=('build', 'run')) - depends_on('py-setuptools', type='build') + depends_on('python@2.7:2.8,3.5:', type=('build', 'run'), when='@:2.5') + depends_on('python@3.5:', type=('build', 'run'), when='@2.6:') + depends_on('py-setuptools', type=('build', 'run')) def test(self): # Unit tests require sphinx, but that creates a circular dependency diff --git a/var/spack/repos/builtin/packages/py-pygpu/package.py b/var/spack/repos/builtin/packages/py-pygpu/package.py index 49dc42eede5..aea59399959 100644 --- a/var/spack/repos/builtin/packages/py-pygpu/package.py +++ b/var/spack/repos/builtin/packages/py-pygpu/package.py @@ -4,6 +4,7 @@ # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * +import os class PyPygpu(PythonPackage): @@ -12,6 +13,7 @@ class PyPygpu(PythonPackage): homepage = "http://deeplearning.net/software/libgpuarray/" url = "https://github.com/Theano/libgpuarray/archive/v0.6.1.tar.gz" + version('0.7.6', sha256='ad1c00dd47c3d36ee1708e5167377edbfcdb7226e837ef9c68b841afbb4a4f6a') version('0.7.5', sha256='39c4d2e743848be43c8819c736e089ae51b11aa446cc6ee05af945c2dfd63420') version('0.7.2', sha256='ef11ee6f8d62d53831277fd3dcab662aa770a5b5de2d30fe3018c4af959204da') version('0.7.1', sha256='4d0f9dd63b0595a8c04d8cee91b2619847c033b011c71d776caa784322382ed6') @@ -21,7 +23,9 @@ class PyPygpu(PythonPackage): version('0.6.1', sha256='b2466311e0e3bacdf7a586bba0263f6d232bf9f8d785e91ddb447653741e6ea5') version('0.6.0', sha256='a58a0624e894475a4955aaea25e82261c69b4d22c8f15ec07041a4ba176d35af') - depends_on('libgpuarray') + depends_on('libgpuarray@0.7.6', when='@0.7.6') + depends_on('libgpuarray@0.7.5', when='@0.7.5') + depends_on('libgpuarray') # default # not just build-time, requires pkg_resources depends_on('py-setuptools', type=('build', 'run')) depends_on('py-cython@0.25:', type=('build', 'run')) @@ -29,3 +33,13 @@ class PyPygpu(PythonPackage): depends_on('py-numpy', type=('build', 'run')) depends_on('py-mako', type=('build', 'run')) depends_on('check') + + phases = ['build_ext', 'install'] + + def build_ext_args(self, spec, prefix): + + _ = self.spec['libgpuarray'].prefix + include_flags = '-I{0}'.format(os.path.join(_, 'include')) + library_flags = '-L{0}'.format(os.path.join(_, 'lib')) + + return [include_flags, library_flags] diff --git a/var/spack/repos/builtin/packages/py-pyheadtail/package.py b/var/spack/repos/builtin/packages/py-pyheadtail/package.py new file mode 100644 index 00000000000..811bf63b4b0 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-pyheadtail/package.py @@ -0,0 +1,24 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class PyPyheadtail(PythonPackage): + """CERN PyHEADTAIL numerical n-body simulation code for simulating + macro-particle beam dynamics with collective effects.""" + + homepage = "https://github.com/PyCOMPLETE/PyHEADTAIL" + url = "https://pypi.io/packages/source/P/PyHEADTAIL/PyHEADTAIL-1.14.1.tar.gz" + + version('1.14.1', sha256='bf90ac7e8764176c55e82c363cad7ab43543863b6ef482760ced23b78e917bb4') + version('1.13.1', sha256='29c742573a918126b5a9c21806ee0ec6a34ec642a0e6ad200f6d4551bf1bb310') + + depends_on('python', type=('build', 'run')) + depends_on('python@3:', when='@1.13.5:', type=('build', 'run')) + depends_on('py-cython', type='build') + depends_on('py-numpy', type=('build', 'run')) + depends_on('py-scipy', type=('build', 'run')) + depends_on('py-h5py', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/py-pylint/package.py b/var/spack/repos/builtin/packages/py-pylint/package.py index 02d8b8223fd..23cbfcf93a5 100644 --- a/var/spack/repos/builtin/packages/py-pylint/package.py +++ b/var/spack/repos/builtin/packages/py-pylint/package.py @@ -7,11 +7,12 @@ class PyPylint(PythonPackage): - """array processing for numbers, strings, records, and objects.""" + """python code static checker""" homepage = "https://pypi.python.org/pypi/pylint" url = "https://pypi.io/packages/source/p/pylint/pylint-1.6.5.tar.gz" + version('2.3.1', sha256='723e3db49555abaf9bf79dc474c6b9e2935ad82230b10c1138a71ea41ac0fff1') version('2.3.0', sha256='ee80c7af4f127b2a480d83010c9f0e97beb8eaa652b78c2837d3ed30b12e1182') version('1.9.4', sha256='ee1e85575587c5b58ddafa25e1c1b01691ef172e139fc25585e5d3f02451da93') # version('1.7.2', sha256='ea6afb93a9ed810cf52ff3838eb3a15e2bf6a81b80de0eaede1ce442caa5ca69') # see dependencies @@ -20,13 +21,19 @@ class PyPylint(PythonPackage): version('1.4.1', sha256='3e383060edd432cbbd0e8bd686f5facfe918047ffe1bb401ab5897cb6ee0f030') extends('python', ignore=r'bin/pytest') + depends_on('python@2.7:2.8,3.4:3.6', when='@:1', type=('build', 'run')) + depends_on('python@3.4:', when='@2:', type=('build', 'run')) depends_on('py-astroid', type=('build', 'run')) # note there is no working version of astroid for this depends_on('py-astroid@1.5.1:', type=('build', 'run'), when='@1.7:') - depends_on('py-astroid@1.6:1.9', type=('build', 'run'), when='@1.9.4:') - depends_on('py-six', type=('build', 'run')) + depends_on('py-astroid@1.6:1.9', type=('build', 'run'), when='@1.9.4') + depends_on('py-astroid@2.0:', type=('build', 'run'), when='@2.2.0:') + depends_on('py-astroid@2.2.0:2.999.999', type=('build', 'run'), when='@2.3.0:') + depends_on('py-six', type=('build', 'run'), when='@1:') depends_on('py-isort@4.2.5:', type=('build', 'run')) + depends_on('py-isort@4.2.5:4.999', when='@2.3.1:', type=('build', 'run')) depends_on('py-mccabe', type=('build', 'run')) + depends_on('py-mccabe@0.6.0:0.6.999', when='@2.3.1:', type=('build', 'run')) depends_on('py-editdistance', type=('build', 'run'), when='@:1.7') depends_on('py-setuptools@17.1:', type='build') # depends_on('py-setuptools-scm@1.15.0:', type='build') diff --git a/var/spack/repos/builtin/packages/py-pyproj/package.py b/var/spack/repos/builtin/packages/py-pyproj/package.py index 3341359a9ad..dfc0edf9246 100644 --- a/var/spack/repos/builtin/packages/py-pyproj/package.py +++ b/var/spack/repos/builtin/packages/py-pyproj/package.py @@ -16,6 +16,7 @@ class PyPyproj(PythonPackage): maintainers = ['citibeth', 'adamjstewart'] import_modules = ['pyproj'] + version('2.6.0', sha256='977542d2f8cf2981cf3ad72cedfebcd6ac56977c7aa830d9b49fa7888b56e83d') version('2.2.0', sha256='0a4f793cc93539c2292638c498e24422a2ec4b25cb47545addea07724b2a56e5') version('2.1.3', sha256='99c52788b01a7bb9a88024bf4d40965c0a66a93d654600b5deacf644775f424d') version('1.9.6', sha256='e0c02b1554b20c710d16d673817b2a89ff94738b0b537aead8ecb2edc4c4487b') @@ -23,8 +24,10 @@ class PyPyproj(PythonPackage): depends_on('python@:2', when='@:1.9.5.1') depends_on('python@3:', when='@2.3:') + depends_on('python@3.5:', when='@2.6.0:') depends_on('py-setuptools', type='build') depends_on('py-cython', type='build') + depends_on('py-cython@0.28:', when='@2.6.0:') depends_on('py-aenum', type=('build', 'run'), when='@2.2:^python@:3.5') depends_on('proj') depends_on('proj@:5', when='@:1') diff --git a/var/spack/repos/builtin/packages/py-pyquaternion/package.py b/var/spack/repos/builtin/packages/py-pyquaternion/package.py new file mode 100644 index 00000000000..82dc26d796b --- /dev/null +++ b/var/spack/repos/builtin/packages/py-pyquaternion/package.py @@ -0,0 +1,20 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack import * + + +class PyPyquaternion(PythonPackage): + """Python morphology manipulation toolkit""" + + homepage = "https://kieranwynn.github.io/pyquaternion/" + url = "https://pypi.org/packages/source/p/pyquaternion/pyquaternion-0.9.5.tar.gz" + + version('0.9.5', sha256='2d89d19259d62a8fbd25219eee7dacc1f6bb570becb70e1e883f622597c7d81d') + + depends_on('py-setuptools', type=('build', 'run')) + + depends_on('py-numpy', type='run') diff --git a/var/spack/repos/builtin/packages/py-pyside2/package.py b/var/spack/repos/builtin/packages/py-pyside2/package.py index 69828029bf4..fa70f45b0b1 100644 --- a/var/spack/repos/builtin/packages/py-pyside2/package.py +++ b/var/spack/repos/builtin/packages/py-pyside2/package.py @@ -17,6 +17,7 @@ class PyPyside2(PythonPackage): # http://wiki.qt.io/Qt_for_Python_Development_Getting_Started version('develop', tag='dev') + version('5.14.2.1', tag='v5.14.2.1', submodules=True) version('5.13.2', tag='v5.13.2', submodules=True) version('5.13.1', tag='v5.13.1', submodules=True) version('5.13.0', tag='v5.13.0', submodules=True) diff --git a/var/spack/repos/builtin/packages/py-pytest-check-links/package.py b/var/spack/repos/builtin/packages/py-pytest-check-links/package.py new file mode 100644 index 00000000000..49269ce1f27 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-pytest-check-links/package.py @@ -0,0 +1,19 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack import * + + +class PyPytestCheckLinks(PythonPackage): + """pytest plugin that checks URLs for HTML-containing files.""" + + homepage = "https://github.com/jupyterlab/pytest-check-links" + url = "https://pypi.io/packages/source/p/pytest-check-links/pytest_check_links-0.3.4.tar.gz" + + version('0.3.4', sha256='4b3216548431bf9796557e8ee8fd8e5e77a69a4690b3b2f9bcf6fb5af16a502b') + + depends_on('py-setuptools@17.1:', type='build') + depends_on('py-pbr@1.9:', type='build') diff --git a/var/spack/repos/builtin/packages/py-pytest/package.py b/var/spack/repos/builtin/packages/py-pytest/package.py index 85d01c240c0..b6d2c914c54 100644 --- a/var/spack/repos/builtin/packages/py-pytest/package.py +++ b/var/spack/repos/builtin/packages/py-pytest/package.py @@ -14,6 +14,7 @@ class PyPytest(PythonPackage): import_modules = ['pytest'] + version('5.3.4', sha256='1d122e8be54d1a709e56f82e2d85dcba3018313d64647f38a91aec88c239b600') version('5.2.1', sha256='ca563435f4941d0cb34767301c27bc65c510cb82e90b9ecf9cb52dc2c63caaa0') version('5.1.1', sha256='c3d5020755f70c82eceda3feaf556af9a341334414a8eca521a18f463bcead88') version('4.6.9', sha256='19e8f75eac01dd3f211edd465b39efbcbdc8fc5f7866d7dd49fedb30d8adf339') @@ -48,7 +49,7 @@ class PyPytest(PythonPackage): depends_on('py-attrs@17.4.0:', when='@3.5:', type=('build', 'run')) depends_on('py-more-itertools@4.0.0:', when='@3.5.1:', type=('build', 'run')) depends_on('py-more-itertools@4.0.0:6.0.0', when='@4.2.1:4.6.9 ^python@:2', type=('build', 'run')) - depends_on('py-atomicwrites@1.0:', when='@3.6:', type=('build', 'run')) + depends_on('py-atomicwrites@1.0:', when='@3.6:5.2.999', type=('build', 'run')) depends_on('py-pluggy@0.12:0.999', when='@4.6:', type=('build', 'run')) depends_on('py-pluggy@0.9.0:0.9.999,0.11:0.999', when='@4.5.0:4.5.999', type=('build', 'run')) depends_on('py-pluggy@0.11:', when='@4.4.2:4.4.999', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/py-pyyaml/package.py b/var/spack/repos/builtin/packages/py-pyyaml/package.py index d7a08ca1b8f..00b3d848b7d 100644 --- a/var/spack/repos/builtin/packages/py-pyyaml/package.py +++ b/var/spack/repos/builtin/packages/py-pyyaml/package.py @@ -9,14 +9,60 @@ class PyPyyaml(PythonPackage): """PyYAML is a YAML parser and emitter for Python.""" - homepage = "http://pyyaml.org/wiki/PyYAML" - url = "https://pypi.io/packages/source/P/PyYAML/PyYAML-5.1.2.tar.gz" + homepage = "https://pyyaml.org/wiki/PyYAML" + url = "https://pypi.io/packages/source/P/PyYAML/PyYAML-5.3.1.tar.gz" + git = "https://github.com/yaml/pyyaml.git" + maintainers = ['adamjstewart'] + + version('5.3.1', sha256='b8eac752c5e14d3eca0e6dd9199cd627518cb5ec06add0de9d32baeee6fe645d') version('5.1.2', sha256='01adf0b6c6f61bd11af6e10ca52b7d4057dd0be0343eb9283c878cf3af56aee4') version('5.1', sha256='436bc774ecf7c103814098159fbb84c2715d25980175292c648f2da143909f95') version('3.13', sha256='3ef3092145e9b70e3ddd2c7ad59bdd0252a94dfe3949721633e41344de00a6bf') version('3.12', sha256='592766c6303207a20efc445587778322d7f73b161bd994f227adaa341ba212ab') version('3.11', sha256='c36c938a872e5ff494938b33b14aaa156cb439ec67548fcab3535bb78b0846e8') - depends_on('python@2.7:2.8,3.4:', type=('build', 'run')) - depends_on('libyaml') + variant('libyaml', default=True, description='Use libYAML bindings') + + depends_on('python@2.7:2.8,3.5:', type=('build', 'run')) + depends_on('libyaml', when='+libyaml') + + phases = ['build_ext', 'install'] + + @property + def import_modules(self): + modules = ['yaml'] + + if '+libyaml' in self.spec: + modules.append('yaml.cyaml') + + return modules + + def setup_py(self, *args, **kwargs): + # Cast from tuple to list + args = list(args) + + if '+libyaml' in self.spec: + args.insert(0, '--with-libyaml') + else: + args.insert(0, '--without-libyaml') + + super(PyPyyaml, self).setup_py(*args, **kwargs) + + def build_ext_args(self, spec, prefix): + args = [] + + if '+libyaml' in spec: + args.extend([ + spec['libyaml'].libs.search_flags, + spec['libyaml'].headers.include_flags, + ]) + + return args + + # Tests need to be re-added since `phases` was overridden + run_after('build_ext')( + PythonPackage._run_default_build_time_test_callbacks) + run_after('install')( + PythonPackage._run_default_install_time_test_callbacks) + run_after('install')(PythonPackage.sanity_check_prefix) diff --git a/var/spack/repos/builtin/packages/py-rpy2/package.py b/var/spack/repos/builtin/packages/py-rpy2/package.py index d3578c2cc23..ef9b847b00e 100644 --- a/var/spack/repos/builtin/packages/py-rpy2/package.py +++ b/var/spack/repos/builtin/packages/py-rpy2/package.py @@ -16,6 +16,7 @@ class PyRpy2(PythonPackage): homepage = "https://pypi.python.org/pypi/rpy2" url = "https://pypi.io/packages/source/r/rpy2/rpy2-2.5.4.tar.gz" + version('3.0.4', sha256='2af5158a5d56af7f7bf5e54d8d7e87b6f115ff40f056d82f93cad0cbf6acc0cb') version('3.0.0', sha256='34efc2935d9015527837d6b1de29641863d184b19d39ad415d5384be8a015bce') version('2.9.4', sha256='be57f741d0c284b5d8785ab03dff0e829303e5ac30e548d5ceb46e05b168812e') version('2.8.6', sha256='004d13734a7b9a85cbc1e7a93ec87df741e28db1273ab5b0d9efaac04a9c5f98') @@ -30,12 +31,18 @@ class PyRpy2(PythonPackage): depends_on('py-setuptools', type='build') depends_on('r', type=('build', 'run')) + # @3.0.0: + depends_on('py-cffi@1.0.0:', when='@3.0.0:', type=('build', 'run')) + depends_on('py-simplegeneric', when='@3.0.0:', type=('build', 'run')) + depends_on('py-pytest', when='@3:', type=('build', 'run')) + # @2.9.0: - depends_on('r@3.3:', when='@2.9.0:', type=('build', 'run')) - depends_on('python@3:', when='@2.9.0:', type=('build', 'run')) - depends_on('py-jinja2', when='@2.9.0:', type=('build', 'run')) - depends_on('py-six', when='@2.9.0:', type=('build', 'run')) + depends_on('r@3.3:', when='@2.9.0:', type=('build', 'run')) + depends_on('python@3.5:', when='@2.9.0:', type=('build', 'run')) + depends_on('py-jinja2', when='@2.9.0:', type=('build', 'run')) + depends_on('py-six', when='@2.9.0:2.9.999', type=('build', 'run')) # @:2.8.6 - depends_on('r@2.8:', when='@:2.8.6', type=('build', 'run')) + depends_on('r@2.8:', when='@:2.8.6', type=('build', 'run')) depends_on('py-singledispatch', when='^python@:2', type=('build', 'run')) + depends_on('python@2.7:2.8,3.5:', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/py-scikit-learn/package.py b/var/spack/repos/builtin/packages/py-scikit-learn/package.py index 4c717e20eef..52c9beae434 100644 --- a/var/spack/repos/builtin/packages/py-scikit-learn/package.py +++ b/var/spack/repos/builtin/packages/py-scikit-learn/package.py @@ -3,14 +3,12 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * - class PyScikitLearn(PythonPackage): """A set of python modules for machine learning and data mining.""" homepage = "https://pypi.python.org/pypi/scikit-learn" - url = "https://pypi.io/packages/source/s/scikit-learn/scikit-learn-0.22.tar.gz" + url = "https://pypi.io/packages/source/s/scikit-learn/scikit-learn-0.23.1.tar.gz" git = "https://github.com/scikit-learn/scikit-learn.git" maintainers = ['adamjstewart'] @@ -33,6 +31,8 @@ class PyScikitLearn(PythonPackage): ] version('master', branch='master') + version('0.23.1', sha256='e3fec1c8831f8f93ad85581ca29ca1bb88e2da377fb097cf8322aa89c21bc9b8') + version('0.23.0', sha256='639a53df6273acc6a7510fb0c658b94e0c70bb13dafff9d14932c981ff9baff4') version('0.22.1', sha256='51ee25330fc244107588545c70e2f3570cfc4017cff09eed69d6e1d82a212b7d') version('0.22', sha256='314abf60c073c48a1e95feaae9f3ca47a2139bd77cebb5b877c23a45c9e03012') version('0.21.3', sha256='eb9b8ebf59eddd8b96366428238ab27d05a19e89c5516ce294abc35cea75d003') @@ -53,13 +53,17 @@ class PyScikitLearn(PythonPackage): depends_on('python@2.6:2.8,3.3:', when='@:0.19', type=('build', 'run')) depends_on('python@2.7:2.8,3.4:', when='@0.20.0:0.20.999', type=('build', 'run')) depends_on('python@3.5:', when='@0.21:', type=('build', 'run')) + depends_on('python@3.6:', when='@0.23:', type=('build', 'run')) depends_on('py-numpy@1.6.1:', when='@:0.19', type=('build', 'run')) depends_on('py-numpy@1.8.2:', when='@0.20.0:0.20.999', type=('build', 'run')) depends_on('py-numpy@1.11.0:', when='@0.21:', type=('build', 'run')) + depends_on('py-numpy@1.13.3:', when='@0.23:', type=('build', 'run')) depends_on('py-scipy@0.9:', when='@:0.19', type=('build', 'run')) depends_on('py-scipy@0.13.3:', when='@0.20.0:0.20.999', type=('build', 'run')) depends_on('py-scipy@0.17.0:', when='@0.21:', type=('build', 'run')) + depends_on('py-scipy@0.19.1:', when='@0.23:', type=('build', 'run')) depends_on('py-joblib@0.11:', type=('build', 'run')) + depends_on('py-threadpoolctl@2.0.0:', when='@0.23:', type=('build', 'run')) depends_on('py-cython@0.23:', type='build') depends_on('py-cython@0.28.5:', when='@0.21:', type='build') depends_on('py-pytest@3.3.0:', type='test') diff --git a/var/spack/repos/builtin/packages/py-scipy/package.py b/var/spack/repos/builtin/packages/py-scipy/package.py index 5cbd2438f78..6775d533fa8 100644 --- a/var/spack/repos/builtin/packages/py-scipy/package.py +++ b/var/spack/repos/builtin/packages/py-scipy/package.py @@ -12,7 +12,7 @@ class PyScipy(PythonPackage): as routines for numerical integration and optimization.""" homepage = "https://www.scipy.org/" - url = "https://pypi.io/packages/source/s/scipy/scipy-1.4.1.tar.gz" + url = "https://pypi.io/packages/source/s/scipy/scipy-1.5.0.tar.gz" maintainers = ['adamjstewart'] install_time_test_callbacks = ['install_test', 'import_module_test'] @@ -30,6 +30,7 @@ class PyScipy(PythonPackage): 'scipy.sparse.linalg.eigen.lobpcg', 'scipy.special._precompute' ] + version('1.5.0', sha256='4ff72877d19b295ee7f7727615ea8238f2d59159df0bdd98f91754be4a2767f0') version('1.4.1', sha256='dee1bbf3a6c8f73b6b218cb28eed8dd13347ea2f87d572ce19b289d6fd3fbc59') version('1.4.0', sha256='31f7cfa93b01507c935c12b535e24812594002a02a56803d7cd063e9920d25e8') version('1.3.3', sha256='64bf4e8ae0db2d42b58477817f648d81e77f0b381d0ea4427385bba3f959380a') @@ -50,14 +51,17 @@ class PyScipy(PythonPackage): depends_on('python@2.6:2.8,3.2:', type=('build', 'run')) depends_on('python@2.7:2.8,3.4:', when='@0.18:', type=('build', 'run')) depends_on('python@3.5:', when='@1.3:', type=('build', 'run')) + depends_on('python@3.6:', when='@1.5:', type=('build', 'run')) depends_on('py-setuptools', type='build') - depends_on('py-pybind11@2.4.0:', when='@1.4.1:', type='build') depends_on('py-pybind11@2.2.4:', when='@1.4.0:', type='build') + depends_on('py-pybind11@2.4.0:', when='@1.4.1:', type='build') + depends_on('py-pybind11@2.4.3:', when='@1.5.0:', type='build') depends_on('py-numpy@1.5.1:+blas+lapack', type=('build', 'run')) depends_on('py-numpy@1.6.2:+blas+lapack', when='@0.16:', type=('build', 'run')) depends_on('py-numpy@1.7.1:+blas+lapack', when='@0.18:', type=('build', 'run')) depends_on('py-numpy@1.8.2:+blas+lapack', when='@0.19:', type=('build', 'run')) depends_on('py-numpy@1.13.3:+blas+lapack', when='@1.3:', type=('build', 'run')) + depends_on('py-numpy@1.14.5:+blas+lapack', when='@1.5:', type=('build', 'run')) depends_on('py-pytest', type='test') # NOTE: scipy picks up Blas/Lapack from numpy, see @@ -65,6 +69,11 @@ class PyScipy(PythonPackage): depends_on('blas') depends_on('lapack') + def setup_build_environment(self, env): + # https://github.com/scipy/scipy/issues/11611 + if self.spec.satisfies('@:1.4 %gcc@10:'): + env.set('FFLAGS', '-fallow-argument-mismatch') + def build_args(self, spec, prefix): args = [] diff --git a/var/spack/repos/builtin/packages/py-setuptools/package.py b/var/spack/repos/builtin/packages/py-setuptools/package.py index affd23eebdf..877cf7ece97 100644 --- a/var/spack/repos/builtin/packages/py-setuptools/package.py +++ b/var/spack/repos/builtin/packages/py-setuptools/package.py @@ -21,6 +21,8 @@ class PySetuptools(PythonPackage): 'easy_install' ] + version('46.1.3', sha256='795e0475ba6cd7fa082b1ee6e90d552209995627a2a227a47c6ea93282f4bfb1') + version('44.1.0', sha256='794a96b0c1dc6f182c36b72ab70d7e90f1d59f7a132e6919bb37b4fd4d424aca') version('41.4.0', sha256='7eae782ccf36b790c21bde7d86a4f303a441cd77036b25c559a602cf5186ce4d') version('41.0.1', sha256='a222d126f5471598053c9a77f4b5d4f26eaa1f150ad6e01dcf1a42e185d05613') version('41.0.0', sha256='79d30254b6fe7a8e672e43cd85f13a9f3f2a50080bc81d851143e2219ef0dcb1') @@ -40,7 +42,9 @@ class PySetuptools(PythonPackage): version('16.0', sha256='aa86255dee2c4a0056509750008007667c29306b7a6c13801468515b2c672845') version('11.3.1', sha256='bd25f17de4ecf00116a9f7368b614a54ca1612d7945d2eafe5d97bc08c138bc5') - depends_on('python@2.7:2.8,3.4:', type=('build', 'run')) + depends_on('python@3.5:', type=('build', 'run'), when='@45.0.0:') + depends_on('python@2.7:2.8,3.5:', type=('build', 'run'), when='@44.0.0:44.99.99') + depends_on('python@2.7:2.8,3.4:', type=('build', 'run'), when='@:43.99.99') # Previously, setuptools vendored all of its dependencies to allow # easy bootstrapping. As of version 34.0.0, this is no longer done diff --git a/var/spack/repos/builtin/packages/py-shapely/package.py b/var/spack/repos/builtin/packages/py-shapely/package.py index 9edb86674fd..a25e4d214fe 100644 --- a/var/spack/repos/builtin/packages/py-shapely/package.py +++ b/var/spack/repos/builtin/packages/py-shapely/package.py @@ -3,7 +3,8 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * +import os +import sys class PyShapely(PythonPackage): @@ -11,7 +12,8 @@ class PyShapely(PythonPackage): """ homepage = "https://github.com/Toblerity/Shapely" - url = "https://pypi.io/packages/source/S/Shapely/Shapely-1.6.4.post2.tar.gz" + url = "https://pypi.io/packages/source/S/Shapely/Shapely-1.7.0.tar.gz" + git = "https://github.com/Toblerity/Shapely.git" maintainers = ['adamjstewart'] import_modules = [ @@ -19,9 +21,13 @@ class PyShapely(PythonPackage): 'shapely.examples', 'shapely.speedups', 'shapely.vectorized', ] + version('master', branch='master') + version('1.7.0', sha256='e21a9fe1a416463ff11ae037766fe410526c95700b9e545372475d2361cc951e') version('1.6.4.post2', sha256='c4b87bb61fc3de59fc1f85e71a79b0c709dc68364d9584473697aad4aa13240f') version('1.6.4', sha256='b10bc4199cfefcf1c0e5d932eac89369550320ca4bdf40559328d85f1ca4f655') + depends_on('python@3.5:', when='@1.8:', type=('build', 'run')) + depends_on('python@2.7:2.8,3.4:', when='@1.7:', type=('build', 'run')) depends_on('python@2.6:', type=('build', 'run')) depends_on('py-setuptools', type='build') depends_on('py-cython', type='build') @@ -29,7 +35,34 @@ class PyShapely(PythonPackage): depends_on('geos') depends_on('geos@3.3:', when='@1.3:') depends_on('py-pytest', type='test') + depends_on('py-pytest-cov', type='test') + + # https://github.com/Toblerity/Shapely/pull/891 + patch('https://github.com/Toblerity/Shapely/commit/98f6b36710bbe05b4ab59231cb0e08b06fe8b69c.patch', + sha256='4984cd0590beb5091f213948a953f70cea08ea11c5db1de07ba98c19e3d13f06', + when='@:1.7') + + @when('^python@3.7:') + def patch(self): + # Python 3.7 changed the thread storage API, precompiled *.c files + # need to be re-cythonized + if os.path.exists('shapely/speedups/_speedups.c'): + os.remove('shapely/speedups/_speedups.c') + if os.path.exists('shapely/vectorized/_vectorized.c'): + os.remove('shapely/vectorized/_vectorized.c') def setup_build_environment(self, env): env.set('GEOS_CONFIG', join_path(self.spec['geos'].prefix.bin, 'geos-config')) + + # Shapely uses ctypes.util.find_library, which searches LD_LIBRARY_PATH + # Our RPATH logic works fine, but the unit tests fail without this + # https://github.com/Toblerity/Shapely/issues/909 + libs = ':'.join(self.spec['geos'].libs.directories) + if sys.platform == 'darwin': + env.prepend_path('DYLD_FALLBACK_LIBRARY_PATH', libs) + else: + env.prepend_path('LD_LIBRARY_PATH', libs) + + def test(self): + python('-m', 'pytest') diff --git a/var/spack/repos/builtin/packages/py-shroud/package.py b/var/spack/repos/builtin/packages/py-shroud/package.py new file mode 100644 index 00000000000..504c992f156 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-shroud/package.py @@ -0,0 +1,23 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class PyShroud(PythonPackage): + """Create Fortran wrappers for a C++ library.""" + + homepage = "https://github.com/LLNL/shroud" + git = "https://github.com/LLNL/shroud.git" + + version('develop', branch='develop') + version('master', branch='master') + version('0.11.0', tag='v0.11.0') + version('0.10.1', tag='v0.10.1') + version('0.9.0', tag='v0.9.0') + version('0.8.0', tag='v0.8.0') + + depends_on("py-setuptools", type='build') + depends_on("py-pyyaml@4.2b1:", type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/py-spacy-models-en-core-web-sm/package.py b/var/spack/repos/builtin/packages/py-spacy-models-en-core-web-sm/package.py new file mode 100644 index 00000000000..89119586638 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-spacy-models-en-core-web-sm/package.py @@ -0,0 +1,17 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +class PySpacyModelsEnCoreWebSm(PythonPackage): + """English multi-task CNN trained on OntoNotes. Assigns context-specific + token vectors, POS tags, dependency parse and named entities.""" + + homepage = "https://spacy.io/models/en#en_core_web_sm" + url = "https://github.com/explosion/spacy-models/releases/download/en_core_web_sm-2.2.5/en_core_web_sm-2.2.5.tar.gz" + + version('2.2.5', sha256='60b69065c97fd2e4972c33300205e1dead3501d2e0bfd6a182c3a033e337caee') + + depends_on('py-setuptools', type='build') + depends_on('py-spacy@2.2.2:', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/py-spacy/package.py b/var/spack/repos/builtin/packages/py-spacy/package.py new file mode 100644 index 00000000000..9e343e4e0da --- /dev/null +++ b/var/spack/repos/builtin/packages/py-spacy/package.py @@ -0,0 +1,31 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +class PySpacy(PythonPackage): + """spaCy is a library for advanced Natural Language Processing in + Python and Cython.""" + + homepage = "https://spacy.io/" + url = "https://pypi.io/packages/source/s/spacy/spacy-2.2.4.tar.gz" + + version('2.2.4', sha256='f0f3a67c5841e6e35d62c98f40ebb3d132587d3aba4f4dccac5056c4e90ff5b9') + + depends_on('python@2.7:2.8,3.4:', type=('build', 'run')) + depends_on('py-wheel', type='build') + depends_on('py-cython@0.25:', type='build') + depends_on('py-murmurhash@0.28:1.0', type=('build', 'run')) + depends_on('py-cymem@2.0.2:2.0.999', type=('build', 'run')) + depends_on('py-preshed@3.0.2:3.0.999', type=('build', 'run')) + depends_on('py-thinc@7.4.0', type=('build', 'run')) + depends_on('py-blis@0.4.0:0.4.999', type=('build', 'run')) + depends_on('py-srsly@1.0.2:1.0.999', type=('build', 'run')) + depends_on('py-catalogue@0.0.7:1.0', type=('build', 'run')) + depends_on('py-tqdm@4.38:4.999', type=('build', 'run')) + depends_on('py-setuptools', type=('build', 'run')) + depends_on('py-numpy@1.15:', type=('build', 'run')) + depends_on('py-plac@0.9.6:1.1', type=('build', 'run')) + depends_on('py-requests@2.13:2.999', type=('build', 'run')) + depends_on('py-pathlib@1.0.1', when='^python@:3.3', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/py-sqlparse/package.py b/var/spack/repos/builtin/packages/py-sqlparse/package.py new file mode 100644 index 00000000000..73916d40e54 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-sqlparse/package.py @@ -0,0 +1,21 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class PySqlparse(PythonPackage): + """A non-validating SQL parser module for Python.""" + + homepage = "https://github.com/andialbrecht/sqlparse" + url = "https://github.com/andialbrecht/sqlparse/archive/0.3.1.tar.gz" + + version('0.3.1', sha256='344b539482b75c244ac69fbb160d0f4d63a288a392475c8418ca692c594561f9') + version('0.3.0', sha256='a75fddae009fba1d66786203c9dd3a842aa4415475c466d15484139117108474') + version('0.2.4', sha256='7087a2bd385c06ac1a5cf343e2e5ea7ce2bb6386849e59ef214e02af68f73fb4') + version('0.2.3', sha256='12470ab41df1a7003a2957a79c6da9cd4ded180c8a193aa112fe0899b935ef30') + + depends_on('py-setuptools', type='build') + depends_on('python@2.7:2.8,3.4:', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/py-srsly/package.py b/var/spack/repos/builtin/packages/py-srsly/package.py new file mode 100644 index 00000000000..46b1b50af05 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-srsly/package.py @@ -0,0 +1,29 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +class PySrsly(PythonPackage): + """srsly: Modern high-performance serialization utilities for Python.""" + + homepage = "https://github.com/explosion/srsly" + url = "https://pypi.io/packages/source/s/srsly/srsly-2.0.1.tar.gz" + + version('2.0.1', sha256='fa3c7375be8fe75f23c27feafbfb5f738d55ffdbf02964c6896fb7684f519a52') + version('2.0.0', sha256='785b00e00406120dbef4ca82925051e6b60fe870c5f84f0d22b3632d574eb870') + version('1.0.2', sha256='59258b81d567df207f8a0a33c4b5fa232afccf1d927c8ce3ba5395bfd64c0ed8') + + depends_on('python@3.6:', when='@2:', type=('build', 'run')) + depends_on('py-setuptools', type='build') + depends_on('py-wheel', when='@2:', type='build') + depends_on('py-cython@0.25:', when='@2:', type='build') + depends_on('py-pathlib@1.0.1', when='^python@:3.3', type=('build', 'run')) + depends_on('py-pytest', type='test') + depends_on('py-mock', type='test') + depends_on('py-numpy', type='test') + depends_on('py-six', when='@:1', type='test') + depends_on('py-pytz', when='@:1', type='test') + + # https://github.com/explosion/srsly/pull/24 + patch('subprocess.patch', when='@2.0.0:2.0.1') diff --git a/var/spack/repos/builtin/packages/py-srsly/subprocess.patch b/var/spack/repos/builtin/packages/py-srsly/subprocess.patch new file mode 100644 index 00000000000..50a053d7548 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-srsly/subprocess.patch @@ -0,0 +1,12 @@ +diff -Naur a/srsly/tests/cloudpickle/testutils.py b/srsly/tests/cloudpickle/testutils.py +--- a/srsly/tests/cloudpickle/testutils.py 2020-04-24 16:10:43.000000000 -0500 ++++ b/srsly/tests/cloudpickle/testutils.py 2020-04-24 16:11:02.000000000 -0500 +@@ -5,7 +5,7 @@ + from subprocess import Popen, check_output, PIPE, STDOUT, CalledProcessError + from srsly.cloudpickle.cloudpickle import dumps + from pickle import loads +-from suprocess import TimeoutExpired ++from subprocess import TimeoutExpired + + + TEST_GLOBALS = "a test value" diff --git a/var/spack/repos/builtin/packages/py-storm/package.py b/var/spack/repos/builtin/packages/py-storm/package.py index dae4a9941d7..64a631b2009 100644 --- a/var/spack/repos/builtin/packages/py-storm/package.py +++ b/var/spack/repos/builtin/packages/py-storm/package.py @@ -9,8 +9,12 @@ class PyStorm(PythonPackage): """Storm is an object-relational mapper (ORM) for Python""" homepage = "https://storm.canonical.com/" - url = "https://launchpad.net/storm/trunk/0.20/+download/storm-0.20.tar.gz" + url = "https://launchpad.net/storm/trunk/0.20/+download/storm-0.20.tar.bz2" - version('0.20', sha256='0fa70043bb1a1c178c2f760db35f5956244cecf50dab7fb22d78be7507726603') + version('0.23', sha256='01c59f1c898fb9891333abd65519ba2dd5f68623ac8e67b54932e99ce52593d3') + version('0.20', sha256='1fe016c9ec40520eafc3cf359f1ec2b7fa86be91e45c9279bfb0ea3b06390a82') depends_on('py-setuptools', type='build') + depends_on('py-six', when='@0.23:') + depends_on('python@2.7:2.8', when='@:0.20') + depends_on('python@2.7:2.8,3.5:', when='@0.21:') diff --git a/var/spack/repos/builtin/packages/py-tap-py/package.py b/var/spack/repos/builtin/packages/py-tap-py/package.py index e3f453763d7..a0c9c84ee6b 100644 --- a/var/spack/repos/builtin/packages/py-tap-py/package.py +++ b/var/spack/repos/builtin/packages/py-tap-py/package.py @@ -9,14 +9,18 @@ class PyTapPy(PythonPackage): """Python TAP interface module for unit tests""" - homepage = "https://github.com/mblayman/tappy" - url = "https://pypi.io/packages/source/t/tap.py/tap.py-1.6.tar.gz" + homepage = "https://github.com/python-tap/tappy" + url = "https://pypi.io/packages/source/t/tap.py/tap.py-3.0.tar.gz" - version('1.6', sha256='3ee315567cd1cf444501c405b7f7146ffdb2e630bac58d0840d378a3b9a0dbe4') + version('3.0', sha256='f5eeeeebfd64e53d32661752bb4c288589a3babbb96db3f391a4ec29f1359c70') + version('2.6.2', sha256='5f219d92dbad5e378f8f7549cdfe655b0d5fd2a778f9c83bee51b61c6ca40efb') + version('1.6', sha256='3ee315567cd1cf444501c405b7f7146ffdb2e630bac58d0840d378a3b9a0dbe4') extends('python', ignore='bin/nosetests|bin/pygmentize') - depends_on('python@2.6:2.8,3.2:3.4') - depends_on('py-nose', type=('build', 'run')) - depends_on('py-pygments', type=('build', 'run')) - depends_on('py-setuptools', type='build') + depends_on('python@3.5:3.7', when='@3.0:') + depends_on('python@2.7:2.8,3.5:3.7', when='@2.6') + depends_on('python@2.6:2.8,3.2:3.4', when='@:1.8') + depends_on('py-nose', type=('build', 'run'), when='@:1.99') + depends_on('py-pygments', type=('build', 'run'), when='@:1.99') + depends_on('py-setuptools', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/py-tensorflow-estimator/package.py b/var/spack/repos/builtin/packages/py-tensorflow-estimator/package.py index b738f2ebf71..055d80724b8 100644 --- a/var/spack/repos/builtin/packages/py-tensorflow-estimator/package.py +++ b/var/spack/repos/builtin/packages/py-tensorflow-estimator/package.py @@ -11,16 +11,18 @@ class PyTensorflowEstimator(Package): simplifies machine learning programming.""" homepage = "https://github.com/tensorflow/estimator" - url = "https://github.com/tensorflow/estimator/archive/v1.13.0.tar.gz" + url = "https://github.com/tensorflow/estimator/archive/v2.2.0.tar.gz" + version('2.2.0', sha256='2d68cb6e6442e7dcbfa2e092aa25bdcb0eda420536a829b85d732854a4c85d46') version('2.1', sha256='1d74c8181b981748976fa33ad97d3434c3cf2b7e29a0b00861365fe8329dbc4e') version('2.0.0', sha256='6f4bdf1ab219e1f1cba25d2af097dc820f56479f12a839853d97422fe4d8b465') version('1.13.0', sha256='a787b150ff436636df723e507019c72a5d6486cfe506886279d380166953f12f') extends('python') - depends_on('py-tensorflow@2.1.0:', when='@2.1') - depends_on('py-tensorflow@2.0.0', when='@2.0.0') + depends_on('py-tensorflow@2.2.0:', when='@2.2.0') + depends_on('py-tensorflow@2.1.0:2.1.999', when='@2.1') + depends_on('py-tensorflow@2.0.0:2.0.999', when='@2.0.0') depends_on('py-tensorflow@1.13.1', when='@1.13.0') depends_on('bazel@0.19.0:', type='build') diff --git a/var/spack/repos/builtin/packages/py-tensorflow/1-1_fcc_tf_patch.patch b/var/spack/repos/builtin/packages/py-tensorflow/1-1_fcc_tf_patch.patch new file mode 100644 index 00000000000..281cd63b143 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-tensorflow/1-1_fcc_tf_patch.patch @@ -0,0 +1,12 @@ +diff --git a/tensorflow/tensorflow.bzl b/tensorflow/tensorflow.bzl +index 740f24ec4a..7b8300f678 100644 +--- a/tensorflow/tensorflow.bzl ++++ b/tensorflow/tensorflow.bzl +@@ -1604,6 +1604,7 @@ def _py_wrap_cc_impl(ctx): + outputs = outputs, + mnemonic = "PythonSwig", + progress_message = "SWIGing " + src.path, ++ use_default_shell_env = True, + ) + return struct(files = depset(outputs)) + diff --git a/var/spack/repos/builtin/packages/py-tensorflow/contrib_cloud_1.1.patch b/var/spack/repos/builtin/packages/py-tensorflow/contrib_cloud_1.1.patch new file mode 100644 index 00000000000..8813edf3659 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-tensorflow/contrib_cloud_1.1.patch @@ -0,0 +1,14 @@ +--- a/tensorflow/contrib/__init__.py 2020-05-05 17:52:57.716350118 -0500 ++++ b/tensorflow/contrib/__init__.py 2020-05-05 17:56:55.665192882 -0500 +@@ -20,7 +20,10 @@ + + # Add projects here, they will show up under tf.contrib. + from tensorflow.contrib import bayesflow +-from tensorflow.contrib import cloud ++try: ++ from tensorflow.contrib import cloud ++except ImportError: ++ pass + from tensorflow.contrib import compiler + from tensorflow.contrib import copy_graph + from tensorflow.contrib import crf diff --git a/var/spack/repos/builtin/packages/py-tensorflow/contrib_cloud_1.10.patch b/var/spack/repos/builtin/packages/py-tensorflow/contrib_cloud_1.10.patch new file mode 100644 index 00000000000..e76defff499 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-tensorflow/contrib_cloud_1.10.patch @@ -0,0 +1,14 @@ +--- a/tensorflow/contrib/__init__.py 2020-05-05 18:14:30.701463150 -0500 ++++ b/tensorflow/contrib/__init__.py 2020-05-05 18:15:12.392435370 -0500 +@@ -26,7 +26,10 @@ + from tensorflow.contrib import bayesflow + from tensorflow.contrib import checkpoint + if os.name != "nt": +- from tensorflow.contrib import cloud ++ try: ++ from tensorflow.contrib import cloud ++ except ImportError: ++ pass + from tensorflow.contrib import cluster_resolver + from tensorflow.contrib import coder + from tensorflow.contrib import compiler diff --git a/var/spack/repos/builtin/packages/py-tensorflow/contrib_cloud_1.4.patch b/var/spack/repos/builtin/packages/py-tensorflow/contrib_cloud_1.4.patch new file mode 100644 index 00000000000..a65417233ed --- /dev/null +++ b/var/spack/repos/builtin/packages/py-tensorflow/contrib_cloud_1.4.patch @@ -0,0 +1,14 @@ +--- a/tensorflow/contrib/__init__.py 2020-05-05 18:08:09.361724827 -0500 ++++ b/tensorflow/contrib/__init__.py 2020-05-05 18:08:46.345699058 -0500 +@@ -20,7 +20,10 @@ + + # Add projects here, they will show up under tf.contrib. + from tensorflow.contrib import bayesflow +-from tensorflow.contrib import cloud ++try: ++ from tensorflow.contrib import cloud ++except ImportError: ++ pass + from tensorflow.contrib import cluster_resolver + from tensorflow.contrib import compiler + from tensorflow.contrib import copy_graph diff --git a/var/spack/repos/builtin/packages/py-tensorflow/contrib_cloud_1.9.patch b/var/spack/repos/builtin/packages/py-tensorflow/contrib_cloud_1.9.patch new file mode 100644 index 00000000000..2a0ac1f7137 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-tensorflow/contrib_cloud_1.9.patch @@ -0,0 +1,14 @@ +--- a/tensorflow/contrib/__init__.py 2020-05-05 18:11:33.660582455 -0500 ++++ b/tensorflow/contrib/__init__.py 2020-05-05 18:12:32.570541708 -0500 +@@ -25,7 +25,10 @@ + from tensorflow.contrib import batching + from tensorflow.contrib import bayesflow + from tensorflow.contrib import checkpoint +-from tensorflow.contrib import cloud ++try: ++ from tensorflow.contrib import cloud ++except ImportError: ++ pass + from tensorflow.contrib import cluster_resolver + from tensorflow.contrib import coder + from tensorflow.contrib import compiler diff --git a/var/spack/repos/builtin/packages/py-tensorflow/package.py b/var/spack/repos/builtin/packages/py-tensorflow/package.py index 64b97b05b26..5c8a10a4a92 100644 --- a/var/spack/repos/builtin/packages/py-tensorflow/package.py +++ b/var/spack/repos/builtin/packages/py-tensorflow/package.py @@ -11,14 +11,18 @@ class PyTensorflow(Package, CudaPackage): """ homepage = "https://www.tensorflow.org" - url = "https://github.com/tensorflow/tensorflow/archive/v2.1.0.tar.gz" + url = "https://github.com/tensorflow/tensorflow/archive/v2.2.0.tar.gz" maintainers = ['adamjstewart'] import_modules = ['tensorflow'] + version('2.2.0', sha256='69cd836f87b8c53506c4f706f655d423270f5a563b76dc1cfa60fbc3184185a3') + version('2.1.1', sha256='a200bc16e4b630db3ac7225bcb6f239a76841967b0aec1d7d7bbe44dc5661318') version('2.1.0', sha256='638e541a4981f52c69da4a311815f1e7989bf1d67a41d204511966e1daed14f7') + version('2.0.2', sha256='a548742bbafd302eec51e2794d7687674a64f6b10ce1414073858cb83c0cefc2') version('2.0.1', sha256='29197d30923b9670992ee4b9c6161f50c7452e9a4158c720746e846080ac245a') version('2.0.0', sha256='49b5f0495cd681cbcb5296a4476853d4aea19a43bdd9f179c928a977308a0617') + version('1.15.3', sha256='9ab1d92e58eb813922b040acc7622b32d73c2d8d971fe6491a06f9df4c778151') version('1.15.2', sha256='d95d75d26a298211b5e802842e87fda5b8b14f6ad83719377b391e5fb71b8746') version('1.15.1', sha256='19b6e72bc8675937f618cede364d7228a71c2eeaffc42801bcefd98dda7ca056') version('1.15.0', sha256='a5d49c00a175a61da7431a9b289747d62339be9cf37600330ad63b611f7f5dc9') @@ -82,6 +86,7 @@ class PyTensorflow(Package, CudaPackage): variant('dynamic_kernels', default=False, description='Build kernels into separate shared objects') extends('python') + depends_on('python@3:', type=('build', 'run'), when='@2.1:') # TODO: Older versions of TensorFlow don't list the viable version range, # just the minimum version of bazel that will work. The latest version of @@ -89,7 +94,8 @@ class PyTensorflow(Package, CudaPackage): # Need to investigate further. # See _TF_MIN_BAZEL_VERSION and _TF_MAX_BAZEL_VERSION in configure.py - depends_on('bazel@0.27.1:0.29.1', type='build', when='@2.1:') + depends_on('bazel@2.0.0', type='build', when='@2.2:') + depends_on('bazel@0.27.1:0.29.1', type='build', when='@2.1.0:2.1.999') depends_on('bazel@0.24.1:0.26.1', type='build', when='@1.15:2.0') # See call to check_bazel_version in configure.py depends_on('bazel@0.24.1:0.25.2', type='build', when='@1.14.0') @@ -116,18 +122,22 @@ class PyTensorflow(Package, CudaPackage): # Listed under REQUIRED_PACKAGES in tensorflow/tools/pip_package/setup.py depends_on('py-absl-py@0.7.0:', type=('build', 'run'), when='@1.12.1,1.14:') depends_on('py-absl-py@0.1.6:', type=('build', 'run'), when='@1.5:') - depends_on('py-astor@0.6.0:', type=('build', 'run'), when='@1.6:') + depends_on('py-astunparse@1.6.3', type=('build', 'run'), when='@2.2:') + depends_on('py-astor@0.6.0:', type=('build', 'run'), when='@1.6:2.1') depends_on('py-backports-weakref@1.0:', type=('build', 'run'), when='@1.3: ^python@:3.3') depends_on('py-backports-weakref@1.0rc1', type=('build', 'run'), when='@1.2.0:1.2.1') depends_on('py-enum34@1.1.6:', type=('build', 'run'), when='@1.5: ^python@:3.3') depends_on('py-enum34@1.1.6:', type=('build', 'run'), when='@1.4.0:1.4.1') - depends_on('py-gast@0.2.2', type=('build', 'run'), when='@1.15:') - depends_on('py-gast@0.2.0:', type=('build', 'run'), when='@1.6:') + depends_on('py-gast@0.3.3', type=('build', 'run'), when='@2.2:') + depends_on('py-gast@0.2.2', type=('build', 'run'), when='@1.15:2.1') + depends_on('py-gast@0.2.0:', type=('build', 'run'), when='@1.6:1.14') + depends_on('py-google-pasta@0.1.8:', type=('build', 'run'), when='@2.1:') depends_on('py-google-pasta@0.1.6:', type=('build', 'run'), when='@1.14:') depends_on('py-google-pasta@0.1.2:', type=('build', 'run'), when='@1.12.1') - depends_on('py-keras-applications@1.0.8:', type=('build', 'run'), when='@1.15:') - depends_on('py-keras-applications@1.0.6:', type=('build', 'run'), when='@1.12:') - depends_on('py-keras-applications@1.0.5:', type=('build', 'run'), when='@1.11:') + depends_on('py-h5py@2.10.0:2.10.999', type=('build', 'run'), when='@2.2:') + depends_on('py-keras-applications@1.0.8:', type=('build', 'run'), when='@1.15:2.1') + depends_on('py-keras-applications@1.0.6:', type=('build', 'run'), when='@1.12:1.14') + depends_on('py-keras-applications@1.0.5:', type=('build', 'run'), when='@1.11.0:1.11.999') depends_on('py-keras-preprocessing@1.1.0:', type=('build', 'run'), when='@2.1:') depends_on('py-keras-preprocessing@1.0.5:', type=('build', 'run'), when='@1.12:') depends_on('py-keras-preprocessing@1.0.3:', type=('build', 'run'), when='@1.11:') @@ -152,6 +162,7 @@ class PyTensorflow(Package, CudaPackage): depends_on('py-protobuf@3.0.0', type=('build', 'run'), when='@0.11.0') depends_on('py-protobuf@3.0.0b2', type=('build', 'run'), when='@0.7.1:0.10') depends_on('py-protobuf@3.0.0a3', type=('build', 'run'), when='@0.6:0.7.0') + depends_on('protobuf') # tensorboard # tensorflow-estimator depends_on('py-termcolor@1.1.0:', type=('build', 'run'), when='@1.6:') @@ -245,6 +256,18 @@ class PyTensorflow(Package, CudaPackage): patch('0001-Remove-contrib-cloud-bigtable-and-storage-ops-kernel.patch', when='@2.0.0:2.0.1') + # for fcc + patch('1-1_fcc_tf_patch.patch', when='@2.1.0:2.1.99%fj') + + # do not import contrib.cloud if not available + patch('https://github.com/tensorflow/tensorflow/commit/ed62ac8203999513dfae03498e871ea35eb60cc4.patch', + sha256='c37d14622a86b164e2411ea45a04f756ac61b2044d251f19ab17733c508e5305', when='@1.14.0') + # import_contrib_cloud patch for older versions + patch('contrib_cloud_1.10.patch', when='@1.10:1.13') + patch('contrib_cloud_1.9.patch', when='@1.9') + patch('contrib_cloud_1.4.patch', when='@1.4:1.8') + patch('contrib_cloud_1.1.patch', when='@1.1:1.3') + phases = ['configure', 'build', 'install'] # https://www.tensorflow.org/install/source @@ -498,6 +521,11 @@ def setup_build_environment(self, env): mkdirp(tmp_path) env.set('TEST_TMPDIR', tmp_path) + env.set('TF_SYSTEM_LIBS', 'com_google_protobuf') + # NOTE: INCLUDEDIR is not just relevant to protobuf + # see third_party/systemlibs/jsoncpp.BUILD + env.set('INCLUDEDIR', spec['protobuf'].prefix.include) + def configure(self, spec, prefix): # NOTE: configure script is interactive. If you set the appropriate # environment variables, this interactivity is skipped. If you don't, @@ -509,6 +537,13 @@ def configure(self, spec, prefix): @run_after('configure') def post_configure_fixes(self): spec = self.spec + + # make sure xla is actually turned off + if spec.satisfies('~xla'): + filter_file(r'--define with_xla_support=true', + r'--define with_xla_support=false', + '.tf_configure.bazelrc') + if spec.satisfies('@1.5.0: ~android'): # env variable is somehow ignored -> brute force # TODO: find a better solution @@ -604,6 +639,11 @@ def post_configure_fixes(self): 'build --action_env LD_LIBRARY_PATH="' + slibs + '"', '.tf_configure.bazelrc') + filter_file('build:opt --copt=-march=native', '', + '.tf_configure.bazelrc') + filter_file('build:opt --host_copt=-march=native', '', + '.tf_configure.bazelrc') + def build(self, spec, prefix): tmp_path = env['TEST_TMPDIR'] @@ -626,6 +666,7 @@ def build(self, spec, prefix): # Ask bazel to explain what it's up to # Needs a filename as argument '--explain=explainlogfile.txt', + '--incompatible_no_support_tools_in_action_inputs=false', # Increase verbosity of explanation, '--verbose_explanations', ] @@ -679,9 +720,6 @@ def build(self, spec, prefix): if spec.satisfies('@2:'): args.append('--config=v2') - if spec.satisfies('%gcc@5:'): - args.append('--cxxopt=-D_GLIBCXX_USE_CXX11_ABI=0') - args.append('//tensorflow/tools/pip_package:build_pip_package') bazel(*args) diff --git a/var/spack/repos/builtin/packages/py-thinc/package.py b/var/spack/repos/builtin/packages/py-thinc/package.py new file mode 100644 index 00000000000..b081c9db395 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-thinc/package.py @@ -0,0 +1,29 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +class PyThinc(PythonPackage): + """Thinc: Practical Machine Learning for NLP in Python.""" + + homepage = "https://github.com/explosion/thinc" + url = "https://pypi.io/packages/source/t/thinc/thinc-7.4.0.tar.gz" + + version('7.4.0', sha256='523e9be1bfaa3ed1d03d406ce451b6b4793a9719d5b83d2ea6b3398b96bc58b8') + + depends_on('py-setuptools', type='build') + depends_on('py-murmurhash@0.28:1.0', type=('build', 'run')) + depends_on('py-cymem@2.0.2:2.0.999', type=('build', 'run')) + depends_on('py-preshed@1.0.1:3.0', type=('build', 'run')) + depends_on('py-blis@0.4.0:0.4.999', type=('build', 'run')) + depends_on('py-wasabi@0.0.9:1.0', type=('build', 'run')) + depends_on('py-srsly@0.0.6:1.0', type=('build', 'run')) + depends_on('py-catalogue@0.0.7:1.0', type=('build', 'run')) + depends_on('py-numpy@1.7:', type=('build', 'run')) + depends_on('py-plac@0.9.6:1.1', type=('build', 'run')) + depends_on('py-tqdm@4.10:4.999', type=('build', 'run')) + depends_on('py-pathlib@1.0.1', when='^python@:3.3', type=('build', 'run')) + depends_on('py-pytest', type='test') + depends_on('py-mock', type='test') + depends_on('py-hypothesis', type='test') diff --git a/var/spack/repos/builtin/packages/py-threadpoolctl/package.py b/var/spack/repos/builtin/packages/py-threadpoolctl/package.py new file mode 100644 index 00000000000..f28c9a37b52 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-threadpoolctl/package.py @@ -0,0 +1,19 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +class PyThreadpoolctl(PythonPackage): + """Python helpers to limit the number of threads used in the + threadpool-backed of common native libraries used for scientific + computing and data science (e.g. BLAS and OpenMP).""" + + homepage = "https://github.com/joblib/threadpoolctl" + url = "https://pypi.io/packages/source/t/threadpoolctl/threadpoolctl-2.0.0.tar.gz" + + import_modules = ['threadpoolctl'] + + version('2.0.0', sha256='48b3e3e9ee079d6b5295c65cbe255b36a3026afc6dde3fb49c085cd0c004bbcf') + + depends_on('python@3.5:', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/py-torch/detect_omp_of_fujitsu_compiler.patch b/var/spack/repos/builtin/packages/py-torch/detect_omp_of_fujitsu_compiler.patch new file mode 100644 index 00000000000..519d66869d5 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-torch/detect_omp_of_fujitsu_compiler.patch @@ -0,0 +1,20 @@ +--- pytorch/cmake/Modules/FindOpenMP.cmake.org 2020-05-26 17:43:53.000000000 +0900 ++++ pytorch/cmake/Modules/FindOpenMP.cmake 2020-05-26 17:46:37.000000000 +0900 +@@ -84,7 +84,7 @@ + unset(OpenMP_FLAG_CANDIDATES) + + set(OMP_FLAG_GNU "-fopenmp") +- set(OMP_FLAG_Clang "-fopenmp=libomp" "-fopenmp=libiomp5" "-fopenmp") ++ set(OMP_FLAG_Clang "-fopenmp" "-fopenmp=libomp" "-fopenmp=libiomp5") + + # AppleClang may need a header file, search for omp.h with hints to brew + # default include dir +@@ -245,7 +245,7 @@ + set(OpenMP_libomp_LIBRARY "${MKL_OPENMP_LIBRARY}" CACHE STRING "libomp location for OpenMP") + else() + find_library(OpenMP_libomp_LIBRARY +- NAMES omp gomp iomp5 ++ NAMES fjomp omp gomp iomp5 + HINTS ${CMAKE_${LANG}_IMPLICIT_LINK_DIRECTORIES} + DOC "libomp location for OpenMP" + ) diff --git a/var/spack/repos/builtin/packages/py-torch/package.py b/var/spack/repos/builtin/packages/py-torch/package.py index 31bfb716578..c240144f11d 100644 --- a/var/spack/repos/builtin/packages/py-torch/package.py +++ b/var/spack/repos/builtin/packages/py-torch/package.py @@ -6,7 +6,6 @@ from spack import * -# TODO: try switching to CMakePackage for more control over build class PyTorch(PythonPackage, CudaPackage): """Tensors and Dynamic neural networks in Python with strong GPU acceleration.""" @@ -51,8 +50,9 @@ class PyTorch(PythonPackage, CudaPackage): ] version('master', branch='master', submodules=True) + version('1.5.1', tag='v1.5.1', submodules=True) + version('1.5.0', tag='v1.5.0', submodules=True) version('1.4.1', tag='v1.4.1', submodules=True) - # see https://github.com/pytorch/pytorch/issues/35149 version('1.4.0', tag='v1.4.0', submodules=True, submodules_delete=['third_party/fbgemm']) version('1.3.1', tag='v1.3.1', submodules=True) @@ -75,6 +75,7 @@ class PyTorch(PythonPackage, CudaPackage): variant('mkldnn', default=True, description='Enables use of MKLDNN') variant('nnpack', default=False, description='Enables NNPACK build') variant('qnnpack', default=False, description='Enables QNNPACK build (quantized 8-bit operators)') + variant('xnnpack', default=False, description='Enables XNNPACK build') variant('distributed', default=False, description='Enables distributed (c10d, gloo, mpi, etc.) build') variant('nccl', default=True, description='Use Spack-installed NCCL') variant('caffe2', default=False, description='Enables Caffe2 operators build') @@ -95,6 +96,7 @@ class PyTorch(PythonPackage, CudaPackage): conflicts('+miopen', when='@:0.4') conflicts('+mkldnn', when='@:0.3') conflicts('+qnnpack', when='@:0.4') + conflicts('+xnnpack', when='@:1.4') conflicts('+nccl', when='~cuda') conflicts('+opencv', when='@:0.4') conflicts('+ffmpeg', when='@:0.4') @@ -104,42 +106,26 @@ class PyTorch(PythonPackage, CudaPackage): conflicts('+redis', when='@:1.0') conflicts('+zstd', when='@:1.0') conflicts('+tbb', when='@:1.1') - # see https://github.com/pytorch/pytorch/issues/35149 + # https://github.com/pytorch/pytorch/issues/35149 conflicts('+fbgemm', when='@1.4.0') - cuda_arch_conflict = ('This version of Torch/Caffe2 only supports compute ' - 'capabilities ') - conflicts('cuda_arch=none', when='+cuda', msg='Must specify CUDA compute capabilities of your GPU, see ' 'https://developer.nvidia.com/cuda-gpus') - conflicts('cuda_arch=52', when='@1.3.0:+cuda', - msg=cuda_arch_conflict + '>=5.3') - conflicts('cuda_arch=50', when='@1.3.0:+cuda', - msg=cuda_arch_conflict + '>=5.3') - conflicts('cuda_arch=35', when='@1.3.0:+cuda', - msg=cuda_arch_conflict + '>=5.3') - conflicts('cuda_arch=32', when='@1.3.0:+cuda', - msg=cuda_arch_conflict + '>=5.3') - conflicts('cuda_arch=30', when='@1.3.0:+cuda', - msg=cuda_arch_conflict + '>=5.3') - conflicts('cuda_arch=30', when='@1.2.0:+cuda', - msg=cuda_arch_conflict + '>=3.2') - conflicts('cuda_arch=20', when='@1.0.0:+cuda', - msg=cuda_arch_conflict + '>=3.0') # Required dependencies depends_on('cmake@3.5:', type='build') # Use Ninja generator to speed up build times # Automatically used if found depends_on('ninja@1.5:', type='build') + depends_on('python@3.5:', when='@1.5:', type=('build', 'run')) depends_on('python@2.7:2.8,3.5:', type=('build', 'run')) depends_on('py-setuptools', type='build') depends_on('py-numpy', type=('build', 'run')) depends_on('py-future', when='@1.1: ^python@:2', type='build') depends_on('py-pyyaml', type=('build', 'run')) depends_on('py-typing', when='@0.4: ^python@:3.4', type=('build', 'run')) - depends_on('py-pybind11', when='@0.4:', type=('build', 'run')) + depends_on('py-pybind11', when='@0.4:', type=('build', 'link', 'run')) depends_on('blas') depends_on('lapack') depends_on('protobuf', when='@0.4:') @@ -163,6 +149,8 @@ class PyTorch(PythonPackage, CudaPackage): # TODO: add dependency: https://github.com/Maratyszcza/NNPACK # depends_on('nnpack', when='+nnpack') depends_on('qnnpack', when='+qnnpack') + # TODO: add dependency: https://github.com/google/XNNPACK + # depends_on('xnnpack', when='+xnnpack') depends_on('mpi', when='+distributed') depends_on('nccl', when='+nccl') depends_on('gloo', when='+gloo') @@ -180,6 +168,21 @@ class PyTorch(PythonPackage, CudaPackage): depends_on('py-six', type='test') depends_on('py-psutil', type='test') + # https://github.com/pytorch/pytorch/pull/35607 + # https://github.com/pytorch/pytorch/pull/37865 + # Fixes CMake configuration error when XNNPACK is disabled + patch('xnnpack.patch', when='@1.5.0:1.5.999') + + # https://github.com/pytorch/pytorch/pull/37086 + # Fixes compilation with Clang 9.0.0 and Apple Clang 11.0.3 + patch('https://github.com/pytorch/pytorch/commit/e921cd222a8fbeabf5a3e74e83e0d8dfb01aa8b5.patch', + sha256='17561b16cd2db22f10c0fe1fdcb428aecb0ac3964ba022a41343a6bb8cba7049', + when='@1.1:1.5') + + # Fix for 'FindOpenMP.cmake' + # to detect openmp settings used by Fujitsu compiler. + patch('detect_omp_of_fujitsu_compiler.patch', when='%fj') + # Both build and install run cmake/make/make install # Only run once to speed up build times phases = ['install'] @@ -249,10 +252,11 @@ def enable_or_disable(variant, keyword='USE', var=None, newer=False): enable_or_disable('mkldnn') if '@0.4:0.4.1+mkldnn' in self.spec: - env.set('MKLDNN_HOME', self.spec['intel-mkl-dnn'].prefix) + env.set('MKLDNN_HOME', self.spec['onednn'].prefix) enable_or_disable('nnpack') enable_or_disable('qnnpack') + enable_or_disable('xnnpack') enable_or_disable('distributed') enable_or_disable('nccl') @@ -271,8 +275,9 @@ def enable_or_disable(variant, keyword='USE', var=None, newer=False): enable_or_disable('lmdb', newer=True) enable_or_disable('binary', keyword='BUILD', newer=True) - env.set('PYTORCH_BUILD_VERSION', self.version) - env.set('PYTORCH_BUILD_NUMBER', 0) + if not self.spec.satisfies('@master'): + env.set('PYTORCH_BUILD_VERSION', self.version) + env.set('PYTORCH_BUILD_NUMBER', 0) # BLAS to be used by Caffe2 if '^mkl' in self.spec: diff --git a/var/spack/repos/builtin/packages/py-torch/xnnpack.patch b/var/spack/repos/builtin/packages/py-torch/xnnpack.patch new file mode 100644 index 00000000000..154033081e7 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-torch/xnnpack.patch @@ -0,0 +1,47 @@ +diff --git a/caffe2/CMakeLists.txt b/caffe2/CMakeLists.txt +index 8025a7de3c..0da37079d6 100644 +--- a/caffe2/CMakeLists.txt ++++ b/caffe2/CMakeLists.txt +@@ -46,12 +46,19 @@ if (INTERN_BUILD_ATEN_OPS) + list(APPEND Caffe2_DEPENDENCY_INCLUDE ${ATen_THIRD_PARTY_INCLUDE}) + endif() + ++# {Q/X,etc} NPACK support is enabled by default, if none of these options ++# are selected, turn this flag ON to incidate the support is disabled ++set(NNPACK_AND_FAMILY_DISABLED OFF) ++if(NOT (USE_NNPACK OR USE_QNNPACK OR USE_PYTORCH_QNNPACK OR USE_XNNPACK)) ++ set(NNPACK_AND_FAMILY_DISABLED ON) ++endif() ++ + # ---[ Caffe2 build + # Note: the folders that are being commented out have not been properly + # addressed yet. + + # For pthreadpool_new_if_impl. TODO: Remove when threadpools are unitied. +-if (NOT MSVC) ++if (NOT MSVC AND NOT NNPACK_AND_FAMILY_DISABLED) + IF(NOT TARGET fxdiv) + SET(FXDIV_BUILD_TESTS OFF CACHE BOOL "") + SET(FXDIV_BUILD_BENCHMARKS OFF CACHE BOOL "") +@@ -710,7 +717,7 @@ ELSEIF(USE_CUDA) + ENDIF() + + +-if (NOT MSVC) ++if (NOT MSVC AND NOT NNPACK_AND_FAMILY_DISABLED) + TARGET_LINK_LIBRARIES(torch_cpu PRIVATE fxdiv) + endif() + +diff --git a/caffe2/utils/CMakeLists.txt b/caffe2/utils/CMakeLists.txt +index 27aabb1315..3c7845c67d 100644 +--- a/caffe2/utils/CMakeLists.txt ++++ b/caffe2/utils/CMakeLists.txt +@@ -36,7 +36,7 @@ list(APPEND Caffe2_CPU_SRCS + # ---[ threadpool/pthreadpool* is a local modification of the NNPACK + # pthreadpool with a very similar interface. Neither NNPACK, nor this + # thread pool supports Windows. +-if (NOT MSVC) ++if (NOT MSVC AND NOT NNPACK_AND_FAMILY_DISABLED) + add_definitions(-DUSE_INTERNAL_THREADPOOL_IMPL) + set(Caffe2_CPU_SRCS ${Caffe2_CPU_SRCS} + utils/threadpool/pthreadpool.cc diff --git a/var/spack/repos/builtin/packages/py-torchvision/package.py b/var/spack/repos/builtin/packages/py-torchvision/package.py index bfef2202e26..4e40117c8d4 100644 --- a/var/spack/repos/builtin/packages/py-torchvision/package.py +++ b/var/spack/repos/builtin/packages/py-torchvision/package.py @@ -11,7 +11,7 @@ class PyTorchvision(PythonPackage): architectures, and common image transformations for computer vision.""" homepage = "https://github.com/pytorch/vision" - url = "https://github.com/pytorch/vision/archive/v0.5.0.tar.gz" + url = "https://github.com/pytorch/vision/archive/v0.6.1.tar.gz" maintainers = ['adamjstewart'] import_modules = [ @@ -21,17 +21,21 @@ class PyTorchvision(PythonPackage): 'torchvision.models.detection' ] + version('0.6.1', sha256='8173680a976c833640ecbd0d7e6f0a11047bf8833433e2147180efc905e48656') + version('0.6.0', sha256='02de11b3abe6882de4032ce86dab9c7794cbc84369b44d04e667486580f0f1f7') version('0.5.0', sha256='eb9afc93df3d174d975ee0914057a9522f5272310b4d56c150b955c287a4d74d') version('0.4.2', sha256='1184a27eab85c9e784bacc6f9d6fec99e168ab4eda6047ef9f709e7fdb22d8f9') version('0.4.1', sha256='053689351272b3bd2ac3e6ba51efd284de0e4ca4a301f54674b949f1e62b7176') version('0.4.0', sha256='c270d74e568bad4559fed4544f6dd1e22e2eb1c60b088e04a5bd5787c4150589') version('0.3.0', sha256='c205f0618c268c6ed2f8abb869ef6eb83e5339c1336c243ad321a2f2a85195f0') + depends_on('python@3:', when='@0.6:', type=('build', 'run')) depends_on('py-setuptools', type='build') depends_on('py-numpy', type=('build', 'run')) - depends_on('py-six', type=('build', 'run')) - depends_on('py-torch@1.2.0:', when='@0.4.0:', type=('build', 'run')) - depends_on('py-torch@1.1.0:', type=('build', 'run')) + depends_on('py-six', when='@:0.5', type=('build', 'run')) + depends_on('py-torch@1.4:', when='@0.6:', type=('build', 'link', 'run')) + depends_on('py-torch@1.2:', when='@0.4:', type=('build', 'link', 'run')) + depends_on('py-torch@1.1:', type=('build', 'link', 'run')) # https://github.com/pytorch/vision/issues/1712 depends_on('py-pillow@4.1.1:6', when='@:0.4', type=('build', 'run')) # or py-pillow-simd depends_on('py-pillow@4.1.1:', when='@0.5:', type=('build', 'run')) # or py-pillow-simd @@ -39,3 +43,5 @@ class PyTorchvision(PythonPackage): # Many of the datasets require additional dependencies to use. # These can be installed after the fact. depends_on('py-scipy', type='test') + + depends_on('ffmpeg@3.1:', when='@0.4.2:') diff --git a/var/spack/repos/builtin/packages/py-tqdm/package.py b/var/spack/repos/builtin/packages/py-tqdm/package.py index be2ed3cecb1..ff7b9947590 100644 --- a/var/spack/repos/builtin/packages/py-tqdm/package.py +++ b/var/spack/repos/builtin/packages/py-tqdm/package.py @@ -10,8 +10,9 @@ class PyTqdm(PythonPackage): """A Fast, Extensible Progress Meter""" homepage = "https://github.com/tqdm/tqdm" - url = "https://pypi.io/packages/source/t/tqdm/tqdm-4.36.1.tar.gz" + url = "https://pypi.io/packages/source/t/tqdm/tqdm-4.45.0.tar.gz" + version('4.45.0', sha256='00339634a22c10a7a22476ee946bbde2dbe48d042ded784e4d88e0236eca5d81') version('4.36.1', sha256='abc25d0ce2397d070ef07d8c7e706aede7920da163c64997585d42d3537ece3d') version('4.8.4', sha256='bab05f8bb6efd2702ab6c532e5e6a758a66c0d2f443e09784b73e4066e6b3a37') diff --git a/var/spack/repos/builtin/packages/py-typing-extensions/package.py b/var/spack/repos/builtin/packages/py-typing-extensions/package.py index 7a2b3ab5fb1..02376449f03 100644 --- a/var/spack/repos/builtin/packages/py-typing-extensions/package.py +++ b/var/spack/repos/builtin/packages/py-typing-extensions/package.py @@ -17,7 +17,9 @@ class PyTypingExtensions(PythonPackage): version('3.7.4', sha256='2ed632b30bb54fc3941c382decfd0ee4148f5c591651c9272473fea2c6397d95') version('3.7.2', sha256='fb2cd053238d33a8ec939190f30cfd736c00653a85a2919415cecf7dc3d9da71') + version('3.6.6', sha256='51e7b7f3dcabf9ad22eed61490f3b8d23d9922af400fe6656cb08e66656b701f') depends_on('python@2.7:2.8,3.4:', type=('build', 'run')) depends_on('py-setuptools', type='build') - depends_on('py-typing@3.7.4:', when='^python@:3.4', type=('build', 'run')) + depends_on('py-typing@3.7.4:', when='@3.7: ^python@:3.4', type=('build', 'run')) + depends_on('py-typing@3.6.2:', when='^python@:3.4', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/py-uncertainties/package.py b/var/spack/repos/builtin/packages/py-uncertainties/package.py new file mode 100644 index 00000000000..67a9ee0cc59 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-uncertainties/package.py @@ -0,0 +1,26 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class PyUncertainties(PythonPackage): + """Transparent calculations with uncertainties on the quantities involved + (aka error propagation); fast calculation of derivatives + """ + + homepage = "https://uncertainties-python-package.readthedocs.io/en/latest/" + url = "https://pypi.io/packages/source/u/uncertainties/uncertainties-3.1.4.tar.gz" + + version('3.1.4', sha256='63548a94899f2a51eeb89b640f6ac311f481a8016b37dce157186e44619bc968') + + variant('optional', default=False, description='Enable extra features involving numpy') + variant('docs', default=False, description='Build with documentation') + + depends_on('python@2.7:', type=('build', 'run')) + depends_on('py-setuptools', type='build') + depends_on('py-future', type=('build', 'run')) + depends_on('py-numpy', type=('build', 'run'), when='+optional') + depends_on('py-sphinx', type='build', when='+docs') diff --git a/var/spack/repos/builtin/packages/py-wasabi/package.py b/var/spack/repos/builtin/packages/py-wasabi/package.py new file mode 100644 index 00000000000..c03d14f3e1b --- /dev/null +++ b/var/spack/repos/builtin/packages/py-wasabi/package.py @@ -0,0 +1,16 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +class PyWasabi(PythonPackage): + """wasabi: A lightweight console printing and formatting toolkit.""" + + homepage = "https://ines.io/" + url = "https://pypi.io/packages/source/w/wasabi/wasabi-0.6.0.tar.gz" + + version('0.6.0', sha256='b8dd3e963cd693fde1eb6bfbecf51790171aa3534fa299faf35cf269f2fd6063') + + depends_on('py-setuptools', type='build') + depends_on('py-pytest', type='test') diff --git a/var/spack/repos/builtin/packages/py-wheel/package.py b/var/spack/repos/builtin/packages/py-wheel/package.py index 567eefbc520..476b7b9e07a 100644 --- a/var/spack/repos/builtin/packages/py-wheel/package.py +++ b/var/spack/repos/builtin/packages/py-wheel/package.py @@ -14,6 +14,7 @@ class PyWheel(PythonPackage): version('0.33.4', sha256='62fcfa03d45b5b722539ccbc07b190e4bfff4bb9e3a4d470dd9f6a0981002565') version('0.33.1', sha256='66a8fd76f28977bb664b098372daef2b27f60dc4d1688cfab7b37a09448f0e9d') + version('0.32.3', sha256='029703bf514e16c8271c3821806a1c171220cc5bdd325cbf4e7da1e056a01db6') version('0.29.0', sha256='1ebb8ad7e26b448e9caa4773d2357849bf80ff9e313964bcaf79cbf0201a1648') version('0.26.0', sha256='eaad353805c180a47545a256e6508835b65a8e830ba1093ed8162f19a50a530c') diff --git a/var/spack/repos/builtin/packages/py-workload-automation/package.py b/var/spack/repos/builtin/packages/py-workload-automation/package.py new file mode 100644 index 00000000000..bc65e210ffd --- /dev/null +++ b/var/spack/repos/builtin/packages/py-workload-automation/package.py @@ -0,0 +1,38 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class PyWorkloadAutomation(PythonPackage): + """Workload Automation (WA) is a framework for executing workloads and + collecting measurements on Android and Linux devices.""" + + homepage = "https://github.com/ARM-software/workload-automation" + url = "https://github.com/ARM-software/workload-automation/archive/v3.2.tar.gz" + + version('3.2', sha256='a3db9df6a9e0394231560ebe6ba491a513f6309e096eaed3db6f4cb924c393ea') + version('3.1.4', sha256='217fc33a3739d011a086315ef86b90cf332c16d1b03c9dcd60d58c9fd1f37f98') + version('3.1.3', sha256='152470808cf8dad8a833fd7b2cb7d77cf8aa5d1af404e37fa0a4ff3b07b925b2') + version('3.1.2', sha256='8226a6abc5cbd96e3f1fd6df02891237a06cdddb8b1cc8916f255fcde20d3069') + version('3.1.1', sha256='32a19be92e43439637c68d9146f21bb7a0ae7b8652c11dfc4b4bd66d59329ad4') + version('3.1.0', sha256='f00aeef7a1412144c4139c23b4c48583880ba2147207646d96359f1d295d6ac3') + version('3.0.0', sha256='8564b0c67541e3a212363403ee090dfff5e4df85770959a133c0979445b51c3c') + version('2.7.0', sha256='e9005b9db18e205bf6c4b3e09b15a118abeede73700897427565340dcd589fbb') + version('2.6.0', sha256='b94341fb067592cebe0db69fcf7c00c82f96b4eb7c7210e34b38473869824cce') + + depends_on('py-setuptools', type='build') + depends_on('py-python-dateutil', type=('build', 'run')) + depends_on('py-pexpect@3.3:', type=('build', 'run')) + depends_on('py-pyserial', type=('build', 'run')) + depends_on('py-colorama', type=('build', 'run')) + depends_on('py-pyyaml@5.1:', type=('build', 'run')) + depends_on('py-requests', type=('build', 'run')) + depends_on('py-wrapt', type=('build', 'run')) + depends_on('py-pandas@0.23.0:', type=('build', 'run'), when='^python@3.5.3:') + depends_on('py-pandas@0.23.0:0.24.2', type=('build', 'run'), when='^python@:3.5.2') + depends_on('py-future', type=('build', 'run')) + depends_on('py-louie', type=('build', 'run')) + depends_on('py-devlib', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/py-xenv/package.py b/var/spack/repos/builtin/packages/py-xenv/package.py index c52b71098a4..1ccda619598 100644 --- a/var/spack/repos/builtin/packages/py-xenv/package.py +++ b/var/spack/repos/builtin/packages/py-xenv/package.py @@ -10,12 +10,10 @@ class PyXenv(PythonPackage): """Helpers to work with the environment in a platform independent way.""" homepage = "https://gitlab.cern.ch/gaudi/xenv" + url = "https://pypi.io/packages/source/x/xenv/xenv-1.0.0.tar.gz" git = "https://gitlab.cern.ch/gaudi/xenv.git" - # As of 0.0.4, all released versions of xenv corrupt the system environment - # in a manner which breaks Spack's compiler wrappers. Therefore, we must - # package an un-released development version of xenv. version('develop', branch='master') - version('develop_2018-12-20', commit='ddc3bf5e65e1689da499f639af7a27c5c4242841') + version('1.0.0', sha256='cea9547295f0bd07c87e68353bb9eb1c2f2d1c09a840e3196c19cbc807ee4558') - depends_on('py-setuptools', type='build') + depends_on('py-setuptools', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/pythia8/package.py b/var/spack/repos/builtin/packages/pythia8/package.py index 8f2ec41b545..572d65922be 100644 --- a/var/spack/repos/builtin/packages/pythia8/package.py +++ b/var/spack/repos/builtin/packages/pythia8/package.py @@ -23,13 +23,46 @@ class Pythia8(AutotoolsPackage): version('8212', sha256='f8fb4341c7e8a8be3347eb26b00329a388ccf925313cfbdba655a08d7fd5a70e') variant('shared', default=True, description='Build shared library') + variant('hepmc', default=True, description='Build HepMC2 extensions') + variant('evtgen', default=False, description='Build EvtGen extensions') + variant('root', default=False, description='Build ROOT extensions') + variant('fastjet', default=False, description='Build fastjet extensions') depends_on('rsync', type='build') + depends_on('hepmc@:2.99.99', when="+hepmc") + depends_on('root', when="+root") + depends_on('evtgen', when="+evtgen") + depends_on("fastjet@3.0.0:", when="+fastjet") + + conflicts("^evtgen+pythia8", when="+evtgen", + msg="Building pythia with evtgen bindings and " + "evtgen with pythia bindings results in a circular dependency " + "that cannot be resolved at the moment! " + "Use pythia8+evtgen^evtgen~pythia8") def configure_args(self): args = [] if '+shared' in self.spec: args.append('--enable-shared') + if '+hepmc' in self.spec: + args.append('--with-hepmc=%s' % self.spec["hepmc"].prefix) + else: + args.append('--without-hepmc') + if '+fastjet' in self.spec: + args.append('--with-fastjet3=%s' % self.spec["fastjet"].prefix) + else: + args.append('--without-fastjet') + if '+evtgen' in self.spec: + args.append('--with-evtgen=%s' % self.spec["evtgen"].prefix) + else: + args.append('--without-evtgen') + if '+root' in self.spec: + args.append('--with-root=%s' % self.spec["root"].prefix) + else: + args.append('--without-evtgen') return args + + def setup_dependent_run_environment(self, env, dependent_spec): + env.set('PYTHIA8DATA', self.prefix.share.Pythia8.xmldoc) diff --git a/var/spack/repos/builtin/packages/python/package.py b/var/spack/repos/builtin/packages/python/package.py index 97acde7b59c..bca6162c64a 100644 --- a/var/spack/repos/builtin/packages/python/package.py +++ b/var/spack/repos/builtin/packages/python/package.py @@ -26,15 +26,17 @@ class Python(AutotoolsPackage): homepage = "https://www.python.org/" url = "https://www.python.org/ftp/python/3.8.0/Python-3.8.0.tgz" - list_url = "https://www.python.org/downloads/" + list_url = "https://www.python.org/ftp/python/" list_depth = 1 maintainers = ['adamjstewart'] + version('3.8.3', sha256='6af6d4d2e010f9655518d0fc6738c7ff7069f10a4d2fbd55509e467f092a8b90') version('3.8.2', sha256='e634a7a74776c2b89516b2e013dda1728c89c8149b9863b8cea21946daf9d561') version('3.8.1', sha256='c7cfa39a43b994621b245e029769e9126caa2a93571cee2e743b213cceac35fb') version('3.8.0', sha256='f1069ad3cae8e7ec467aa98a6565a62a48ef196cb8f1455a245a08db5e1792df') - version('3.7.6', sha256='aeee681c235ad336af116f08ab6563361a0c81c537072c1b309d6e4050aa2114', preferred=True) + version('3.7.7', sha256='8c8be91cd2648a1a0c251f04ea0bb4c2a5570feb9c45eaaa2241c785585b475a', preferred=True) + version('3.7.6', sha256='aeee681c235ad336af116f08ab6563361a0c81c537072c1b309d6e4050aa2114') version('3.7.5', sha256='8ecc681ea0600bbfb366f2b173f727b205bb825d93d2f0b286bc4e58d37693da') version('3.7.4', sha256='d63e63e14e6d29e17490abbe6f7d17afb3db182dbd801229f14e55f4157c4ba3') version('3.7.3', sha256='d62e3015f2f89c970ac52343976b406694931742fbde2fed8d1ce8ebb4e1f8ff') @@ -59,6 +61,8 @@ class Python(AutotoolsPackage): version('3.3.6', sha256='0a58ad1f1def4ecc90b18b0c410a3a0e1a48cf7692c75d1f83d0af080e5d2034') version('3.2.6', sha256='fc1e41296e29d476f696303acae293ae7a2310f0f9d0d637905e722a3f16163e') version('3.1.5', sha256='d12dae6d06f52ef6bf1271db4d5b4d14b5dd39813e324314e72b648ef1bc0103') + version('2.7.18', sha256='da3080e3b488f648a3d7a4560ddee895284c3380b11d6de75edb986526b9a814') + version('2.7.17', sha256='f22059d09cdf9625e0a7284d24a13062044f5bf59d93a7f3382190dfa94cecde') version('2.7.16', sha256='01da813a3600876f03f46db11cc5c408175e99f03af2ba942ef324389a83bad5') version('2.7.15', sha256='18617d1f15a380a919d517630a9cd85ce17ea602f9bbdc58ddc672df4b0239db') version('2.7.14', sha256='304c9b202ea6fbd0a4a8e0ad3733715fbd4749f2204a9173a58ec53c32ea73e8') @@ -81,8 +85,9 @@ class Python(AutotoolsPackage): ) # --enable-shared is known to cause problems for some users on macOS + # This is a problem for Python 2.7 only, not Python3 # See http://bugs.python.org/issue29846 - variant('shared', default=sys.platform != 'darwin', + variant('shared', default=True, description='Enable shared libraries') # From https://docs.python.org/2/c-api/unicode.html: Python's default # builds use a 16-bit type for Py_UNICODE and store Unicode values @@ -129,6 +134,9 @@ class Python(AutotoolsPackage): depends_on('readline', when='+readline') depends_on('ncurses', when='+readline') depends_on('openssl', when='+ssl') + # https://raw.githubusercontent.com/python/cpython/84471935ed2f62b8c5758fd544c7d37076fe0fa5/Misc/NEWS + # https://docs.python.org/3.5/whatsnew/changelog.html#python-3-5-4rc1 + depends_on('openssl@:1.0.2z', when='@:2.7.13,3.0.0:3.5.2+ssl') depends_on('openssl@1.0.2:', when='@3.7:+ssl') # https://docs.python.org/3/whatsnew/3.7.html#build-changes depends_on('sqlite@3.0.8:', when='+sqlite3') depends_on('gdbm', when='+dbm') # alternatively ndbm or berkeley-db @@ -149,6 +157,22 @@ class Python(AutotoolsPackage): # a Mac. depends_on('libuuid', when='+uuid') + # Python needs to be patched to build extensions w/ mixed C/C++ code: + # https://github.com/NixOS/nixpkgs/pull/19585/files + # https://bugs.python.org/issue1222585 + # + # NOTE: This patch puts Spack's default Python installation out of + # sync with standard Python installs. If you're using such an + # installation as an external and encountering build issues with mixed + # C/C++ modules, consider installing a Spack-managed Python with + # this patch instead. For more information, see: + # https://github.com/spack/spack/pull/16856 + patch('python-2.7.8-distutils-C++.patch', when='@2.7.8:2.7.16') + patch('python-2.7.17+-distutils-C++.patch', when='@2.7.17:2.7.18') + patch('python-3.6.8-distutils-C++.patch', when='@3.6.8,3.7.2') + patch('python-3.7.3-distutils-C++.patch', when='@3.7.3') + patch('python-3.7.4+-distutils-C++.patch', when='@3.7.4:3.8') + patch('tkinter.patch', when='@:2.8,3.3:3.7 platform=darwin') # Ensure that distutils chooses correct compiler option for RPATH on cray: @@ -163,6 +187,12 @@ class Python(AutotoolsPackage): # https://github.com/python/cpython/pull/16717 patch('intel-3.6.7.patch', when='@3.6.7:3.6.8,3.7.1:3.7.5 %intel') + # CPython tries to build an Objective-C file with GCC's C frontend + # https://github.com/spack/spack/pull/16222 + # https://github.com/python/cpython/pull/13306 + conflicts('%gcc platform=darwin', + msg='CPython does not compile with GCC on macOS yet, use clang. ' + 'See: https://github.com/python/cpython/pull/13306') # For more information refer to this bug report: # https://bugs.python.org/issue29712 conflicts( @@ -187,17 +217,46 @@ def url_for_version(self, version): url = "https://www.python.org/ftp/python/{0}/Python-{1}.tgz" return url.format(re.split('[a-z]', str(version))[0], version) - @when('@2.7:2.8,3.4:') + # TODO: Ideally, these patches would be applied as separate '@run_before' + # functions enabled via '@when', but these two decorators don't work + # when used together. See: https://github.com/spack/spack/issues/12736 def patch(self): # NOTE: Python's default installation procedure makes it possible for a # user's local configurations to change the Spack installation. In # order to prevent this behavior for a full installation, we must # modify the installation script so that it ignores user files. - ff = FileFilter('Makefile.pre.in') - ff.filter( - r'^(.*)setup\.py(.*)((build)|(install))(.*)$', - r'\1setup.py\2 --no-user-cfg \3\6' - ) + if self.spec.satisfies('@2.7:2.8,3.4:'): + ff = FileFilter('Makefile.pre.in') + ff.filter( + r'^(.*)setup\.py(.*)((build)|(install))(.*)$', + r'\1setup.py\2 --no-user-cfg \3\6' + ) + + # NOTE: Older versions of Python do not support the '--with-openssl' + # configuration option, so the installation's module setup file needs + # to be modified directly in order to point to the correct SSL path. + # See: https://stackoverflow.com/a/5939170 + if self.spec.satisfies('@:3.6.999+ssl'): + ff = FileFilter(join_path('Modules', 'Setup.dist')) + ff.filter(r'^#(((SSL=)|(_ssl))(.*))$', r'\1') + ff.filter(r'^#((.*)(\$\(SSL\))(.*))$', r'\1') + ff.filter( + r'^SSL=(.*)$', + r'SSL={0}'.format(self.spec['openssl'].prefix) + ) + # Because Python uses compiler system paths during install, it's + # possible to pick up a system OpenSSL when building 'python~ssl'. + # To avoid this scenario, we disable the 'ssl' module with patching. + elif self.spec.satisfies('@:3.6.999~ssl'): + ff = FileFilter('setup.py') + ff.filter( + r'^(\s+(ssl_((incs)|(libs)))\s+=\s+)(.*)$', + r'\1 None and \6' + ) + ff.filter( + r'^(\s+(opensslv_h)\s+=\s+)(.*)$', + r'\1 None and \3' + ) def setup_build_environment(self, env): spec = self.spec @@ -210,12 +269,30 @@ def setup_build_environment(self, env): tty.warn(('Python v{0} may not install properly if Python ' 'user configurations are present.').format(self.version)) + # TODO: Python has incomplete support for Python modules with mixed + # C/C++ source, and patches are required to enable building for these + # modules. All Python versions without a viable patch are installed + # with a warning message about this potentially erroneous behavior. + if not spec.satisfies('@2.7.8:2.7.18,3.6.8,3.7.2:3.8.3'): + tty.warn(('Python v{0} does not have the C++ "distutils" patch; ' + 'errors may occur when installing Python modules w/ ' + 'mixed C/C++ source files.').format(self.version)) + # Need this to allow python build to find the Python installation. env.set('MACOSX_DEPLOYMENT_TARGET', platform.mac_ver()[0]) env.unset('PYTHONPATH') env.unset('PYTHONHOME') + def flag_handler(self, name, flags): + # python 3.8 requires -fwrapv when compiled with intel + if self.spec.satisfies('@3.8: %intel'): + if name == 'cflags': + flags.append('-fwrapv') + + # allow flags to be passed through compiler wrapper + return (flags, None, None) + def configure_args(self): spec = self.spec config_args = [] @@ -282,8 +359,8 @@ def configure_args(self): if '+pic' in spec: config_args.append('CFLAGS={0}'.format(self.compiler.cc_pic_flag)) - if spec.satisfies('@3.7:'): - if '+ssl' in spec: + if '+ssl' in spec: + if spec.satisfies('@3.7:'): config_args.append('--with-openssl={0}'.format( spec['openssl'].prefix)) @@ -910,7 +987,7 @@ def add_files_to_view(self, view, merge_map): bin_dir = self.spec.prefix.bin for src, dst in merge_map.items(): if not path_contains_subdirectory(src, bin_dir): - view.link(src, dst) + view.link(src, dst, spec=self.spec) elif not os.path.islink(src): copy(src, dst) if 'script' in get_filetype(src): @@ -936,7 +1013,7 @@ def add_files_to_view(self, view, merge_map): orig_link_target = os.path.join(self.spec.prefix, realpath_rel) new_link_target = os.path.abspath(merge_map[orig_link_target]) - view.link(new_link_target, dst) + view.link(new_link_target, dst, spec=self.spec) def remove_files_from_view(self, view, merge_map): bin_dir = self.spec.prefix.bin diff --git a/var/spack/repos/builtin/packages/python/python-2.7.17+-distutils-C++.patch b/var/spack/repos/builtin/packages/python/python-2.7.17+-distutils-C++.patch new file mode 100644 index 00000000000..1d2249fcfc0 --- /dev/null +++ b/var/spack/repos/builtin/packages/python/python-2.7.17+-distutils-C++.patch @@ -0,0 +1,269 @@ +diff --git a/Lib/_osx_support.py b/Lib/_osx_support.py +index d2aaae7..8bcdb05 100644 +--- a/Lib/_osx_support.py ++++ b/Lib/_osx_support.py +@@ -14,13 +14,13 @@ __all__ = [ + # configuration variables that may contain universal build flags, + # like "-arch" or "-isdkroot", that may need customization for + # the user environment +-_UNIVERSAL_CONFIG_VARS = ('CFLAGS', 'LDFLAGS', 'CPPFLAGS', 'BASECFLAGS', +- 'BLDSHARED', 'LDSHARED', 'CC', 'CXX', +- 'PY_CFLAGS', 'PY_LDFLAGS', 'PY_CPPFLAGS', +- 'PY_CORE_CFLAGS') ++_UNIVERSAL_CONFIG_VARS = ('CFLAGS', 'CXXFLAGS', 'LDFLAGS', 'CPPFLAGS', ++ 'BASECFLAGS', 'BLDSHARED', 'LDSHARED', 'LDCXXSHARED', ++ 'CC', 'CXX', 'PY_CFLAGS', 'PY_LDFLAGS', ++ 'PY_CPPFLAGS', 'PY_CORE_CFLAGS') + + # configuration variables that may contain compiler calls +-_COMPILER_CONFIG_VARS = ('BLDSHARED', 'LDSHARED', 'CC', 'CXX') ++_COMPILER_CONFIG_VARS = ('BLDSHARED', 'LDSHARED', 'LDCXXSHARED', 'CC', 'CXX') + + # prefix added to original configuration variable names + _INITPRE = '_OSX_SUPPORT_INITIAL_' +diff --git a/Lib/distutils/cygwinccompiler.py b/Lib/distutils/cygwinccompiler.py +index 258e138..13b7d0c 100644 +--- a/Lib/distutils/cygwinccompiler.py ++++ b/Lib/distutils/cygwinccompiler.py +@@ -117,8 +117,10 @@ class CygwinCCompiler (UnixCCompiler): + # dllwrap 2.10.90 is buggy + if self.ld_version >= "2.10.90": + self.linker_dll = "gcc" ++ self.linker_dll_cxx = "g++" + else: + self.linker_dll = "dllwrap" ++ self.linker_dll_cxx = "dllwrap" + + # ld_version >= "2.13" support -shared so use it instead of + # -mdll -static +@@ -132,9 +134,13 @@ class CygwinCCompiler (UnixCCompiler): + self.set_executables(compiler='gcc -mcygwin -O -Wall', + compiler_so='gcc -mcygwin -mdll -O -Wall', + compiler_cxx='g++ -mcygwin -O -Wall', ++ compiler_so_cxx='g++ -mcygwin -mdll -O -Wall', + linker_exe='gcc -mcygwin', + linker_so=('%s -mcygwin %s' % +- (self.linker_dll, shared_option))) ++ (self.linker_dll, shared_option)), ++ linker_exe_cxx='g++ -mcygwin', ++ linker_so_cxx=('%s -mcygwin %s' % ++ (self.linker_dll_cxx, shared_option))) + + # cygwin and mingw32 need different sets of libraries + if self.gcc_version == "2.91.57": +@@ -160,8 +166,12 @@ class CygwinCCompiler (UnixCCompiler): + raise CompileError, msg + else: # for other files use the C-compiler + try: +- self.spawn(self.compiler_so + cc_args + [src, '-o', obj] + +- extra_postargs) ++ if self.detect_language(src) == 'c++': ++ self.spawn(self.compiler_so_cxx + cc_args + [src, '-o', obj] + ++ extra_postargs) ++ else: ++ self.spawn(self.compiler_so + cc_args + [src, '-o', obj] + ++ extra_postargs) + except DistutilsExecError, msg: + raise CompileError, msg + +@@ -327,9 +337,14 @@ class Mingw32CCompiler (CygwinCCompiler): + self.set_executables(compiler='gcc%s -O -Wall' % no_cygwin, + compiler_so='gcc%s -mdll -O -Wall' % no_cygwin, + compiler_cxx='g++%s -O -Wall' % no_cygwin, ++ compiler_so_cxx='g++%s -mdll -O -Wall' % no_cygwin, + linker_exe='gcc%s' % no_cygwin, + linker_so='%s%s %s %s' + % (self.linker_dll, no_cygwin, ++ shared_option, entry_point), ++ linker_exe_cxx='g++%s' % no_cygwin, ++ linker_so_cxx='%s%s %s %s' ++ % (self.linker_dll_cxx, no_cygwin, + shared_option, entry_point)) + # Maybe we should also append -mthreads, but then the finished + # dlls need another dll (mingwm10.dll see Mingw32 docs) +diff --git a/Lib/distutils/emxccompiler.py b/Lib/distutils/emxccompiler.py +index a017205..bdc532c 100644 +--- a/Lib/distutils/emxccompiler.py ++++ b/Lib/distutils/emxccompiler.py +@@ -65,8 +65,12 @@ class EMXCCompiler (UnixCCompiler): + # XXX optimization, warnings etc. should be customizable. + self.set_executables(compiler='gcc -Zomf -Zmt -O3 -fomit-frame-pointer -mprobe -Wall', + compiler_so='gcc -Zomf -Zmt -O3 -fomit-frame-pointer -mprobe -Wall', ++ compiler_cxx='g++ -Zomf -Zmt -O3 -fomit-frame-pointer -mprobe -Wall', ++ compiler_so_cxx='g++ -Zomf -Zmt -O3 -fomit-frame-pointer -mprobe -Wall', + linker_exe='gcc -Zomf -Zmt -Zcrtdll', +- linker_so='gcc -Zomf -Zmt -Zcrtdll -Zdll') ++ linker_so='gcc -Zomf -Zmt -Zcrtdll -Zdll', ++ linker_exe_cxx='g++ -Zomf -Zmt -Zcrtdll', ++ linker_so_cxx='g++ -Zomf -Zmt -Zcrtdll -Zdll') + + # want the gcc library statically linked (so that we don't have + # to distribute a version dependent on the compiler we have) +@@ -83,8 +87,12 @@ class EMXCCompiler (UnixCCompiler): + raise CompileError, msg + else: # for other files use the C-compiler + try: +- self.spawn(self.compiler_so + cc_args + [src, '-o', obj] + +- extra_postargs) ++ if self.detect_language(src) == 'c++': ++ self.spawn(self.compiler_so_cxx + cc_args + [src, '-o', obj] + ++ extra_postargs) ++ else: ++ self.spawn(self.compiler_so + cc_args + [src, '-o', obj] + ++ extra_postargs) + except DistutilsExecError, msg: + raise CompileError, msg + +diff --git a/Lib/distutils/sysconfig.py b/Lib/distutils/sysconfig.py +index 1a4b792..9d724b2 100644 +--- a/Lib/distutils/sysconfig.py ++++ b/Lib/distutils/sysconfig.py +@@ -181,10 +181,12 @@ def customize_compiler(compiler): + _osx_support.customize_compiler(_config_vars) + _config_vars['CUSTOMIZED_OSX_COMPILER'] = 'True' + +- (cc, cxx, cflags, ccshared, ldshared, so_ext, ar, ar_flags) = \ +- get_config_vars('CC', 'CXX', 'CFLAGS', +- 'CCSHARED', 'LDSHARED', 'SO', 'AR', +- 'ARFLAGS') ++ (cc, cxx, ccshared, ldshared, ldcxxshared, so_ext, ar, ar_flags) = \ ++ get_config_vars('CC', 'CXX', 'CCSHARED', 'LDSHARED', 'LDCXXSHARED', ++ 'SO', 'AR', 'ARFLAGS') ++ ++ cflags = '' ++ cxxflags = '' + + if 'CC' in os.environ: + newcc = os.environ['CC'] +@@ -199,19 +201,27 @@ def customize_compiler(compiler): + cxx = os.environ['CXX'] + if 'LDSHARED' in os.environ: + ldshared = os.environ['LDSHARED'] ++ if 'LDCXXSHARED' in os.environ: ++ ldcxxshared = os.environ['LDCXXSHARED'] + if 'CPP' in os.environ: + cpp = os.environ['CPP'] + else: + cpp = cc + " -E" # not always + if 'LDFLAGS' in os.environ: + ldshared = ldshared + ' ' + os.environ['LDFLAGS'] ++ ldcxxshared = ldcxxshared + ' ' + os.environ['LDFLAGS'] + if 'CFLAGS' in os.environ: + cflags = cflags + ' ' + os.environ['CFLAGS'] + ldshared = ldshared + ' ' + os.environ['CFLAGS'] ++ if 'CXXFLAGS' in os.environ: ++ cxxflags = cxxflags + ' ' + os.environ['CXXFLAGS'] ++ ldcxxshared = ldcxxshared + ' ' + os.environ['CXXFLAGS'] + if 'CPPFLAGS' in os.environ: + cpp = cpp + ' ' + os.environ['CPPFLAGS'] + cflags = cflags + ' ' + os.environ['CPPFLAGS'] ++ cxxflags = cxxflags + ' ' + os.environ['CPPFLAGS'] + ldshared = ldshared + ' ' + os.environ['CPPFLAGS'] ++ ldcxxshared = ldcxxshared + ' ' + os.environ['CPPFLAGS'] + if 'AR' in os.environ: + ar = os.environ['AR'] + if 'ARFLAGS' in os.environ: +@@ -220,13 +230,17 @@ def customize_compiler(compiler): + archiver = ar + ' ' + ar_flags + + cc_cmd = cc + ' ' + cflags ++ cxx_cmd = cxx + ' ' + cxxflags + compiler.set_executables( + preprocessor=cpp, + compiler=cc_cmd, + compiler_so=cc_cmd + ' ' + ccshared, +- compiler_cxx=cxx, ++ compiler_cxx=cxx_cmd, ++ compiler_so_cxx=cxx_cmd + ' ' + ccshared, + linker_so=ldshared, + linker_exe=cc, ++ linker_so_cxx=ldcxxshared, ++ linker_exe_cxx=cxx, + archiver=archiver) + + compiler.shared_lib_extension = so_ext +diff --git a/Lib/distutils/unixccompiler.py b/Lib/distutils/unixccompiler.py +index 3af540e..f8f7efe 100644 +--- a/Lib/distutils/unixccompiler.py ++++ b/Lib/distutils/unixccompiler.py +@@ -55,14 +55,17 @@ class UnixCCompiler(CCompiler): + # are pretty generic; they will probably have to be set by an outsider + # (eg. using information discovered by the sysconfig about building + # Python extensions). +- executables = {'preprocessor' : None, +- 'compiler' : ["cc"], +- 'compiler_so' : ["cc"], +- 'compiler_cxx' : ["cc"], +- 'linker_so' : ["cc", "-shared"], +- 'linker_exe' : ["cc"], +- 'archiver' : ["ar", "-cr"], +- 'ranlib' : None, ++ executables = {'preprocessor' : None, ++ 'compiler' : ["cc"], ++ 'compiler_so' : ["cc"], ++ 'compiler_cxx' : ["c++"], ++ 'compiler_so_cxx' : ["c++"], ++ 'linker_so' : ["cc", "-shared"], ++ 'linker_exe' : ["cc"], ++ 'linker_so_cxx' : ["c++", "-shared"], ++ 'linker_exe_cxx' : ["c++"], ++ 'archiver' : ["ar", "-cr"], ++ 'ranlib' : None, + } + + if sys.platform[:6] == "darwin": +@@ -114,12 +117,19 @@ class UnixCCompiler(CCompiler): + + def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts): + compiler_so = self.compiler_so ++ compiler_so_cxx = self.compiler_so_cxx + if sys.platform == 'darwin': + compiler_so = _osx_support.compiler_fixup(compiler_so, + cc_args + extra_postargs) ++ compiler_so_cxx = _osx_support.compiler_fixup(compiler_so_cxx, ++ cc_args + extra_postargs) + try: +- self.spawn(compiler_so + cc_args + [src, '-o', obj] + +- extra_postargs) ++ if self.detect_language(src) == 'c++': ++ self.spawn(compiler_so_cxx + cc_args + [src, '-o', obj] + ++ extra_postargs) ++ else: ++ self.spawn(compiler_so + cc_args + [src, '-o', obj] + ++ extra_postargs) + except DistutilsExecError, msg: + raise CompileError, msg + +@@ -176,23 +186,16 @@ class UnixCCompiler(CCompiler): + ld_args.extend(extra_postargs) + self.mkpath(os.path.dirname(output_filename)) + try: +- if target_desc == CCompiler.EXECUTABLE: +- linker = self.linker_exe[:] ++ if target_lang == "c++": ++ if target_desc == CCompiler.EXECUTABLE: ++ linker = self.linker_exe_cxx[:] ++ else: ++ linker = self.linker_so_cxx[:] + else: +- linker = self.linker_so[:] +- if target_lang == "c++" and self.compiler_cxx: +- # skip over environment variable settings if /usr/bin/env +- # is used to set up the linker's environment. +- # This is needed on OSX. Note: this assumes that the +- # normal and C++ compiler have the same environment +- # settings. +- i = 0 +- if os.path.basename(linker[0]) == "env": +- i = 1 +- while '=' in linker[i]: +- i = i + 1 +- +- linker[i] = self.compiler_cxx[i] ++ if target_desc == CCompiler.EXECUTABLE: ++ linker = self.linker_exe[:] ++ else: ++ linker = self.linker_so[:] + + if sys.platform == 'darwin': + linker = _osx_support.compiler_fixup(linker, ld_args) diff --git a/var/spack/repos/builtin/packages/python/python-2.7.8-distutils-C++.patch b/var/spack/repos/builtin/packages/python/python-2.7.8-distutils-C++.patch new file mode 100644 index 00000000000..d6710066d53 --- /dev/null +++ b/var/spack/repos/builtin/packages/python/python-2.7.8-distutils-C++.patch @@ -0,0 +1,260 @@ +--- a/Lib/distutils/cygwinccompiler.py ++++ b/Lib/distutils/cygwinccompiler.py +@@ -117,8 +117,10 @@ + # dllwrap 2.10.90 is buggy + if self.ld_version >= "2.10.90": + self.linker_dll = "gcc" ++ self.linker_dll_cxx = "g++" + else: + self.linker_dll = "dllwrap" ++ self.linker_dll_cxx = "dllwrap" + + # ld_version >= "2.13" support -shared so use it instead of + # -mdll -static +@@ -132,9 +134,13 @@ + self.set_executables(compiler='gcc -mcygwin -O -Wall', + compiler_so='gcc -mcygwin -mdll -O -Wall', + compiler_cxx='g++ -mcygwin -O -Wall', ++ compiler_so_cxx='g++ -mcygwin -mdll -O -Wall', + linker_exe='gcc -mcygwin', + linker_so=('%s -mcygwin %s' % +- (self.linker_dll, shared_option))) ++ (self.linker_dll, shared_option)), ++ linker_exe_cxx='g++ -mcygwin', ++ linker_so_cxx=('%s -mcygwin %s' % ++ (self.linker_dll_cxx, shared_option))) + + # cygwin and mingw32 need different sets of libraries + if self.gcc_version == "2.91.57": +@@ -160,8 +166,12 @@ + raise CompileError, msg + else: # for other files use the C-compiler + try: +- self.spawn(self.compiler_so + cc_args + [src, '-o', obj] + +- extra_postargs) ++ if self.detect_language(src) == 'c++': ++ self.spawn(self.compiler_so_cxx + cc_args + [src, '-o', obj] + ++ extra_postargs) ++ else: ++ self.spawn(self.compiler_so + cc_args + [src, '-o', obj] + ++ extra_postargs) + except DistutilsExecError, msg: + raise CompileError, msg + +@@ -327,9 +337,14 @@ + self.set_executables(compiler='gcc%s -O -Wall' % no_cygwin, + compiler_so='gcc%s -mdll -O -Wall' % no_cygwin, + compiler_cxx='g++%s -O -Wall' % no_cygwin, ++ compiler_so_cxx='g++%s -mdll -O -Wall' % no_cygwin, + linker_exe='gcc%s' % no_cygwin, + linker_so='%s%s %s %s' + % (self.linker_dll, no_cygwin, ++ shared_option, entry_point), ++ linker_exe_cxx='g++%s' % no_cygwin, ++ linker_so_cxx='%s%s %s %s' ++ % (self.linker_dll_cxx, no_cygwin, + shared_option, entry_point)) + # Maybe we should also append -mthreads, but then the finished + # dlls need another dll (mingwm10.dll see Mingw32 docs) +--- a/Lib/distutils/emxccompiler.py ++++ b/Lib/distutils/emxccompiler.py +@@ -65,8 +65,12 @@ + # XXX optimization, warnings etc. should be customizable. + self.set_executables(compiler='gcc -Zomf -Zmt -O3 -fomit-frame-pointer -mprobe -Wall', + compiler_so='gcc -Zomf -Zmt -O3 -fomit-frame-pointer -mprobe -Wall', ++ compiler_cxx='g++ -Zomf -Zmt -O3 -fomit-frame-pointer -mprobe -Wall', ++ compiler_so_cxx='g++ -Zomf -Zmt -O3 -fomit-frame-pointer -mprobe -Wall', + linker_exe='gcc -Zomf -Zmt -Zcrtdll', +- linker_so='gcc -Zomf -Zmt -Zcrtdll -Zdll') ++ linker_so='gcc -Zomf -Zmt -Zcrtdll -Zdll', ++ linker_exe_cxx='g++ -Zomf -Zmt -Zcrtdll', ++ linker_so_cxx='g++ -Zomf -Zmt -Zcrtdll -Zdll') + + # want the gcc library statically linked (so that we don't have + # to distribute a version dependent on the compiler we have) +@@ -83,8 +87,12 @@ + raise CompileError, msg + else: # for other files use the C-compiler + try: +- self.spawn(self.compiler_so + cc_args + [src, '-o', obj] + +- extra_postargs) ++ if self.detect_language(src) == 'c++': ++ self.spawn(self.compiler_so_cxx + cc_args + [src, '-o', obj] + ++ extra_postargs) ++ else: ++ self.spawn(self.compiler_so + cc_args + [src, '-o', obj] + ++ extra_postargs) + except DistutilsExecError, msg: + raise CompileError, msg + +--- a/Lib/distutils/sysconfig.py ++++ b/Lib/distutils/sysconfig.py +@@ -170,10 +170,12 @@ + _osx_support.customize_compiler(_config_vars) + _config_vars['CUSTOMIZED_OSX_COMPILER'] = 'True' + +- (cc, cxx, opt, cflags, ccshared, ldshared, so_ext, ar, ar_flags) = \ +- get_config_vars('CC', 'CXX', 'OPT', 'CFLAGS', +- 'CCSHARED', 'LDSHARED', 'SO', 'AR', +- 'ARFLAGS') ++ (cc, cxx, ccshared, ldshared, ldcxxshared, so_ext, ar, ar_flags) = \ ++ get_config_vars('CC', 'CXX', 'CCSHARED', 'LDSHARED', 'LDCXXSHARED', ++ 'SO', 'AR', 'ARFLAGS') ++ ++ cflags = '' ++ cxxflags = '' + + if 'CC' in os.environ: + newcc = os.environ['CC'] +@@ -188,19 +190,27 @@ + cxx = os.environ['CXX'] + if 'LDSHARED' in os.environ: + ldshared = os.environ['LDSHARED'] ++ if 'LDCXXSHARED' in os.environ: ++ ldcxxshared = os.environ['LDCXXSHARED'] + if 'CPP' in os.environ: + cpp = os.environ['CPP'] + else: + cpp = cc + " -E" # not always + if 'LDFLAGS' in os.environ: + ldshared = ldshared + ' ' + os.environ['LDFLAGS'] ++ ldcxxshared = ldcxxshared + ' ' + os.environ['LDFLAGS'] + if 'CFLAGS' in os.environ: +- cflags = opt + ' ' + os.environ['CFLAGS'] ++ cflags = os.environ['CFLAGS'] + ldshared = ldshared + ' ' + os.environ['CFLAGS'] ++ if 'CXXFLAGS' in os.environ: ++ cxxflags = os.environ['CXXFLAGS'] ++ ldcxxshared = ldcxxshared + ' ' + os.environ['CXXFLAGS'] + if 'CPPFLAGS' in os.environ: + cpp = cpp + ' ' + os.environ['CPPFLAGS'] + cflags = cflags + ' ' + os.environ['CPPFLAGS'] ++ cxxflags = cxxflags + ' ' + os.environ['CPPFLAGS'] + ldshared = ldshared + ' ' + os.environ['CPPFLAGS'] ++ ldcxxshared = ldcxxshared + ' ' + os.environ['CPPFLAGS'] + if 'AR' in os.environ: + ar = os.environ['AR'] + if 'ARFLAGS' in os.environ: +@@ -209,13 +219,17 @@ + archiver = ar + ' ' + ar_flags + + cc_cmd = cc + ' ' + cflags ++ cxx_cmd = cxx + ' ' + cxxflags + compiler.set_executables( + preprocessor=cpp, + compiler=cc_cmd, + compiler_so=cc_cmd + ' ' + ccshared, +- compiler_cxx=cxx, ++ compiler_cxx=cxx_cmd, ++ compiler_so_cxx=cxx_cmd + ' ' + ccshared, + linker_so=ldshared, + linker_exe=cc, ++ linker_so_cxx=ldcxxshared, ++ linker_exe_cxx=cxx, + archiver=archiver) + + compiler.shared_lib_extension = so_ext +--- a/Lib/distutils/unixccompiler.py ++++ b/Lib/distutils/unixccompiler.py +@@ -55,14 +55,17 @@ + # are pretty generic; they will probably have to be set by an outsider + # (eg. using information discovered by the sysconfig about building + # Python extensions). +- executables = {'preprocessor' : None, +- 'compiler' : ["cc"], +- 'compiler_so' : ["cc"], +- 'compiler_cxx' : ["cc"], +- 'linker_so' : ["cc", "-shared"], +- 'linker_exe' : ["cc"], +- 'archiver' : ["ar", "-cr"], +- 'ranlib' : None, ++ executables = {'preprocessor' : None, ++ 'compiler' : ["cc"], ++ 'compiler_so' : ["cc"], ++ 'compiler_cxx' : ["c++"], ++ 'compiler_so_cxx' : ["c++"], ++ 'linker_so' : ["cc", "-shared"], ++ 'linker_exe' : ["cc"], ++ 'linker_so_cxx' : ["c++", "-shared"], ++ 'linker_exe_cxx' : ["c++"], ++ 'archiver' : ["ar", "-cr"], ++ 'ranlib' : None, + } + + if sys.platform[:6] == "darwin": +@@ -112,12 +115,19 @@ + + def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts): + compiler_so = self.compiler_so ++ compiler_so_cxx = self.compiler_so_cxx + if sys.platform == 'darwin': + compiler_so = _osx_support.compiler_fixup(compiler_so, + cc_args + extra_postargs) ++ compiler_so_cxx = _osx_support.compiler_fixup(compiler_so_cxx, ++ cc_args + extra_postargs) + try: +- self.spawn(compiler_so + cc_args + [src, '-o', obj] + +- extra_postargs) ++ if self.detect_language(src) == 'c++': ++ self.spawn(compiler_so_cxx + cc_args + [src, '-o', obj] + ++ extra_postargs) ++ else: ++ self.spawn(compiler_so + cc_args + [src, '-o', obj] + ++ extra_postargs) + except DistutilsExecError, msg: + raise CompileError, msg + +@@ -174,23 +184,16 @@ + ld_args.extend(extra_postargs) + self.mkpath(os.path.dirname(output_filename)) + try: +- if target_desc == CCompiler.EXECUTABLE: +- linker = self.linker_exe[:] ++ if target_lang == "c++": ++ if target_desc == CCompiler.EXECUTABLE: ++ linker = self.linker_exe_cxx[:] ++ else: ++ linker = self.linker_so_cxx[:] + else: +- linker = self.linker_so[:] +- if target_lang == "c++" and self.compiler_cxx: +- # skip over environment variable settings if /usr/bin/env +- # is used to set up the linker's environment. +- # This is needed on OSX. Note: this assumes that the +- # normal and C++ compiler have the same environment +- # settings. +- i = 0 +- if os.path.basename(linker[0]) == "env": +- i = 1 +- while '=' in linker[i]: +- i = i + 1 +- +- linker[i] = self.compiler_cxx[i] ++ if target_desc == CCompiler.EXECUTABLE: ++ linker = self.linker_exe[:] ++ else: ++ linker = self.linker_so[:] + + if sys.platform == 'darwin': + linker = _osx_support.compiler_fixup(linker, ld_args) +--- a/Lib/_osx_support.py ++++ b/Lib/_osx_support.py +@@ -14,13 +14,13 @@ + # configuration variables that may contain universal build flags, + # like "-arch" or "-isdkroot", that may need customization for + # the user environment +-_UNIVERSAL_CONFIG_VARS = ('CFLAGS', 'LDFLAGS', 'CPPFLAGS', 'BASECFLAGS', +- 'BLDSHARED', 'LDSHARED', 'CC', 'CXX', +- 'PY_CFLAGS', 'PY_LDFLAGS', 'PY_CPPFLAGS', +- 'PY_CORE_CFLAGS') ++_UNIVERSAL_CONFIG_VARS = ('CFLAGS', 'CXXFLAGS', 'LDFLAGS', 'CPPFLAGS', ++ 'BASECFLAGS', 'BLDSHARED', 'LDSHARED', 'LDCXXSHARED', ++ 'CC', 'CXX', 'PY_CFLAGS', 'PY_LDFLAGS', ++ 'PY_CPPFLAGS', 'PY_CORE_CFLAGS') + + # configuration variables that may contain compiler calls +-_COMPILER_CONFIG_VARS = ('BLDSHARED', 'LDSHARED', 'CC', 'CXX') ++_COMPILER_CONFIG_VARS = ('BLDSHARED', 'LDSHARED', 'LDCXXSHARED', 'CC', 'CXX') + + # prefix added to original configuration variable names + _INITPRE = '_OSX_SUPPORT_INITIAL_' diff --git a/var/spack/repos/builtin/packages/python/python-3.6.8-distutils-C++.patch b/var/spack/repos/builtin/packages/python/python-3.6.8-distutils-C++.patch new file mode 100644 index 00000000000..5728fad6f77 --- /dev/null +++ b/var/spack/repos/builtin/packages/python/python-3.6.8-distutils-C++.patch @@ -0,0 +1,241 @@ +--- a/Lib/_osx_support.py ++++ b/Lib/_osx_support.py +@@ -14,13 +14,13 @@ __all__ = [ + # configuration variables that may contain universal build flags, + # like "-arch" or "-isdkroot", that may need customization for + # the user environment +-_UNIVERSAL_CONFIG_VARS = ('CFLAGS', 'LDFLAGS', 'CPPFLAGS', 'BASECFLAGS', +- 'BLDSHARED', 'LDSHARED', 'CC', 'CXX', +- 'PY_CFLAGS', 'PY_LDFLAGS', 'PY_CPPFLAGS', +- 'PY_CORE_CFLAGS', 'PY_CORE_LDFLAGS') ++_UNIVERSAL_CONFIG_VARS = ('CFLAGS', 'CXXFLAGS', 'LDFLAGS', 'CPPFLAGS', ++ 'BASECFLAGS', 'BLDSHARED', 'LDSHARED', 'LDCXXSHARED', ++ 'CC', 'CXX', 'PY_CFLAGS', 'PY_LDFLAGS', ++ 'PY_CPPFLAGS', 'PY_CORE_LDFLAGS', 'PY_CORE_CFLAGS') + + # configuration variables that may contain compiler calls +-_COMPILER_CONFIG_VARS = ('BLDSHARED', 'LDSHARED', 'CC', 'CXX') ++_COMPILER_CONFIG_VARS = ('BLDSHARED', 'LDSHARED', 'LDCXXSHARED', 'CC', 'CXX') + + # prefix added to original configuration variable names + _INITPRE = '_OSX_SUPPORT_INITIAL_' +--- a/Lib/distutils/cygwinccompiler.py ++++ b/Lib/distutils/cygwinccompiler.py +@@ -125,8 +125,10 @@ class CygwinCCompiler(UnixCCompiler): + # dllwrap 2.10.90 is buggy + if self.ld_version >= "2.10.90": + self.linker_dll = "gcc" ++ self.linker_dll_cxx = "g++" + else: + self.linker_dll = "dllwrap" ++ self.linker_dll_cxx = "dllwrap" + + # ld_version >= "2.13" support -shared so use it instead of + # -mdll -static +@@ -140,9 +142,13 @@ class CygwinCCompiler(UnixCCompiler): + self.set_executables(compiler='gcc -mcygwin -O -Wall', + compiler_so='gcc -mcygwin -mdll -O -Wall', + compiler_cxx='g++ -mcygwin -O -Wall', ++ compiler_so_cxx='g++ -mcygwin -mdll -O -Wall', + linker_exe='gcc -mcygwin', + linker_so=('%s -mcygwin %s' % +- (self.linker_dll, shared_option))) ++ (self.linker_dll, shared_option)), ++ linker_exe_cxx='g++ -mcygwin', ++ linker_so_cxx=('%s -mcygwin %s' % ++ (self.linker_dll_cxx, shared_option))) + + # cygwin and mingw32 need different sets of libraries + if self.gcc_version == "2.91.57": +@@ -166,8 +172,12 @@ class CygwinCCompiler(UnixCCompiler): + raise CompileError(msg) + else: # for other files use the C-compiler + try: +- self.spawn(self.compiler_so + cc_args + [src, '-o', obj] + +- extra_postargs) ++ if self.detect_language(src) == 'c++': ++ self.spawn(self.compiler_so_cxx + cc_args + [src, '-o', obj] + ++ extra_postargs) ++ else: ++ self.spawn(self.compiler_so + cc_args + [src, '-o', obj] + ++ extra_postargs) + except DistutilsExecError as msg: + raise CompileError(msg) + +@@ -302,9 +312,14 @@ class Mingw32CCompiler(CygwinCCompiler): + self.set_executables(compiler='gcc -O -Wall', + compiler_so='gcc -mdll -O -Wall', + compiler_cxx='g++ -O -Wall', ++ compiler_so_cxx='g++ -mdll -O -Wall', + linker_exe='gcc', + linker_so='%s %s %s' + % (self.linker_dll, shared_option, ++ entry_point), ++ linker_exe_cxx='g++', ++ linker_so_cxx='%s %s %s' ++ % (self.linker_dll_cxx, shared_option, + entry_point)) + # Maybe we should also append -mthreads, but then the finished + # dlls need another dll (mingwm10.dll see Mingw32 docs) +--- a/Lib/distutils/sysconfig.py ++++ b/Lib/distutils/sysconfig.py +@@ -170,9 +170,11 @@ def customize_compiler(compiler): + _osx_support.customize_compiler(_config_vars) + _config_vars['CUSTOMIZED_OSX_COMPILER'] = 'True' + +- (cc, cxx, opt, cflags, ccshared, ldshared, shlib_suffix, ar, ar_flags) = \ +- get_config_vars('CC', 'CXX', 'OPT', 'CFLAGS', +- 'CCSHARED', 'LDSHARED', 'SHLIB_SUFFIX', 'AR', 'ARFLAGS') ++ (cc, cxx, cflags, ccshared, ldshared, ldcxxshared, shlib_suffix, ar, ar_flags) = \ ++ get_config_vars('CC', 'CXX', 'CFLAGS', 'CCSHARED', 'LDSHARED', 'LDCXXSHARED', ++ 'SHLIB_SUFFIX', 'AR', 'ARFLAGS') ++ ++ cxxflags = cflags + + if 'CC' in os.environ: + newcc = os.environ['CC'] +@@ -187,19 +189,27 @@ def customize_compiler(compiler): + cxx = os.environ['CXX'] + if 'LDSHARED' in os.environ: + ldshared = os.environ['LDSHARED'] ++ if 'LDCXXSHARED' in os.environ: ++ ldcxxshared = os.environ['LDCXXSHARED'] + if 'CPP' in os.environ: + cpp = os.environ['CPP'] + else: + cpp = cc + " -E" # not always + if 'LDFLAGS' in os.environ: + ldshared = ldshared + ' ' + os.environ['LDFLAGS'] ++ ldcxxshared = ldcxxshared + ' ' + os.environ['LDFLAGS'] + if 'CFLAGS' in os.environ: +- cflags = opt + ' ' + os.environ['CFLAGS'] ++ cflags = os.environ['CFLAGS'] + ldshared = ldshared + ' ' + os.environ['CFLAGS'] ++ if 'CXXFLAGS' in os.environ: ++ cxxflags = os.environ['CXXFLAGS'] ++ ldcxxshared = ldcxxshared + ' ' + os.environ['CXXFLAGS'] + if 'CPPFLAGS' in os.environ: + cpp = cpp + ' ' + os.environ['CPPFLAGS'] + cflags = cflags + ' ' + os.environ['CPPFLAGS'] ++ cxxflags = cxxflags + ' ' + os.environ['CPPFLAGS'] + ldshared = ldshared + ' ' + os.environ['CPPFLAGS'] ++ ldcxxshared = ldcxxshared + ' ' + os.environ['CPPFLAGS'] + if 'AR' in os.environ: + ar = os.environ['AR'] + if 'ARFLAGS' in os.environ: +@@ -208,13 +218,17 @@ def customize_compiler(compiler): + archiver = ar + ' ' + ar_flags + + cc_cmd = cc + ' ' + cflags ++ cxx_cmd = cxx + ' ' + cxxflags + compiler.set_executables( + preprocessor=cpp, + compiler=cc_cmd, + compiler_so=cc_cmd + ' ' + ccshared, +- compiler_cxx=cxx, ++ compiler_cxx=cxx_cmd, ++ compiler_so_cxx=cxx_cmd + ' ' + ccshared, + linker_so=ldshared, + linker_exe=cc, ++ linker_so_cxx=ldcxxshared, ++ linker_exe_cxx=cxx, + archiver=archiver) + + compiler.shared_lib_extension = shlib_suffix +--- a/Lib/distutils/unixccompiler.py ++++ b/Lib/distutils/unixccompiler.py +@@ -52,14 +52,17 @@ class UnixCCompiler(CCompiler): + # are pretty generic; they will probably have to be set by an outsider + # (eg. using information discovered by the sysconfig about building + # Python extensions). +- executables = {'preprocessor' : None, +- 'compiler' : ["cc"], +- 'compiler_so' : ["cc"], +- 'compiler_cxx' : ["cc"], +- 'linker_so' : ["cc", "-shared"], +- 'linker_exe' : ["cc"], +- 'archiver' : ["ar", "-cr"], +- 'ranlib' : None, ++ executables = {'preprocessor' : None, ++ 'compiler' : ["cc"], ++ 'compiler_so' : ["cc"], ++ 'compiler_cxx' : ["c++"], ++ 'compiler_so_cxx' : ["c++"], ++ 'linker_so' : ["cc", "-shared"], ++ 'linker_exe' : ["cc"], ++ 'linker_so_cxx' : ["c++", "-shared"], ++ 'linker_exe_cxx' : ["c++"], ++ 'archiver' : ["ar", "-cr"], ++ 'ranlib' : None, + } + + if sys.platform[:6] == "darwin": +@@ -110,12 +113,19 @@ class UnixCCompiler(CCompiler): + + def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts): + compiler_so = self.compiler_so ++ compiler_so_cxx = self.compiler_so_cxx + if sys.platform == 'darwin': + compiler_so = _osx_support.compiler_fixup(compiler_so, + cc_args + extra_postargs) ++ compiler_so_cxx = _osx_support.compiler_fixup(compiler_so_cxx, ++ cc_args + extra_postargs) + try: +- self.spawn(compiler_so + cc_args + [src, '-o', obj] + +- extra_postargs) ++ if self.detect_language(src) == 'c++': ++ self.spawn(compiler_so_cxx + cc_args + [src, '-o', obj] + ++ extra_postargs) ++ else: ++ self.spawn(compiler_so + cc_args + [src, '-o', obj] + ++ extra_postargs) + except DistutilsExecError as msg: + raise CompileError(msg) + +@@ -173,22 +183,16 @@ class UnixCCompiler(CCompiler): + ld_args.extend(extra_postargs) + self.mkpath(os.path.dirname(output_filename)) + try: +- if target_desc == CCompiler.EXECUTABLE: +- linker = self.linker_exe[:] ++ if target_lang == "c++": ++ if target_desc == CCompiler.EXECUTABLE: ++ linker = self.linker_exe_cxx[:] ++ else: ++ linker = self.linker_so_cxx[:] + else: +- linker = self.linker_so[:] +- if target_lang == "c++" and self.compiler_cxx: +- # skip over environment variable settings if /usr/bin/env +- # is used to set up the linker's environment. +- # This is needed on OSX. Note: this assumes that the +- # normal and C++ compiler have the same environment +- # settings. +- i = 0 +- if os.path.basename(linker[0]) == "env": +- i = 1 +- while '=' in linker[i]: +- i += 1 +- linker[i] = self.compiler_cxx[i] ++ if target_desc == CCompiler.EXECUTABLE: ++ linker = self.linker_exe[:] ++ else: ++ linker = self.linker_so[:] + + if sys.platform == 'darwin': + linker = _osx_support.compiler_fixup(linker, ld_args) +--- a/Makefile.pre.in ++++ b/Makefile.pre.in +@@ -584,10 +584,10 @@ sharedmods: $(BUILDPYTHON) pybuilddir.txt Modules/_math.o + *\ -s*|s*) quiet="-q";; \ + *) quiet="";; \ + esac; \ +- echo "$(RUNSHARED) CC='$(CC)' LDSHARED='$(BLDSHARED)' OPT='$(OPT)' \ ++ echo "$(RUNSHARED) CC='$(CC)' LDSHARED='$(BLDSHARED)' CFLAGS='$(PY_CFLAGS)' \ + _TCLTK_INCLUDES='$(TCLTK_INCLUDES)' _TCLTK_LIBS='$(TCLTK_LIBS)' \ + $(PYTHON_FOR_BUILD) $(srcdir)/setup.py $$quiet build"; \ +- $(RUNSHARED) CC='$(CC)' LDSHARED='$(BLDSHARED)' OPT='$(OPT)' \ ++ $(RUNSHARED) CC='$(CC)' LDSHARED='$(BLDSHARED)' CFLAGS='$(PY_CFLAGS)' \ + _TCLTK_INCLUDES='$(TCLTK_INCLUDES)' _TCLTK_LIBS='$(TCLTK_LIBS)' \ + $(PYTHON_FOR_BUILD) $(srcdir)/setup.py $$quiet build + diff --git a/var/spack/repos/builtin/packages/python/python-3.7.3-distutils-C++.patch b/var/spack/repos/builtin/packages/python/python-3.7.3-distutils-C++.patch new file mode 100644 index 00000000000..e29323bf0b5 --- /dev/null +++ b/var/spack/repos/builtin/packages/python/python-3.7.3-distutils-C++.patch @@ -0,0 +1,256 @@ +diff --git a/Lib/_osx_support.py b/Lib/_osx_support.py +index db6674e..ccbe09a 100644 +--- a/Lib/_osx_support.py ++++ b/Lib/_osx_support.py +@@ -14,13 +14,13 @@ __all__ = [ + # configuration variables that may contain universal build flags, + # like "-arch" or "-isdkroot", that may need customization for + # the user environment +-_UNIVERSAL_CONFIG_VARS = ('CFLAGS', 'LDFLAGS', 'CPPFLAGS', 'BASECFLAGS', +- 'BLDSHARED', 'LDSHARED', 'CC', 'CXX', +- 'PY_CFLAGS', 'PY_LDFLAGS', 'PY_CPPFLAGS', +- 'PY_CORE_CFLAGS', 'PY_CORE_LDFLAGS') ++_UNIVERSAL_CONFIG_VARS = ('CFLAGS', 'CXXFLAGS', 'LDFLAGS', 'CPPFLAGS', ++ 'BASECFLAGS', 'BLDSHARED', 'LDSHARED', 'LDCXXSHARED', ++ 'CC', 'CXX', 'PY_CFLAGS', 'PY_LDFLAGS', ++ 'PY_CPPFLAGS', 'PY_CORE_LDFLAGS', 'PY_CORE_CFLAGS') + + # configuration variables that may contain compiler calls +-_COMPILER_CONFIG_VARS = ('BLDSHARED', 'LDSHARED', 'CC', 'CXX') ++_COMPILER_CONFIG_VARS = ('BLDSHARED', 'LDSHARED', 'LDCXXSHARED', 'CC', 'CXX') + + # prefix added to original configuration variable names + _INITPRE = '_OSX_SUPPORT_INITIAL_' +diff --git a/Lib/distutils/cygwinccompiler.py b/Lib/distutils/cygwinccompiler.py +index 6c5d777..640fa2d 100644 +--- a/Lib/distutils/cygwinccompiler.py ++++ b/Lib/distutils/cygwinccompiler.py +@@ -125,8 +125,10 @@ class CygwinCCompiler(UnixCCompiler): + # dllwrap 2.10.90 is buggy + if self.ld_version >= "2.10.90": + self.linker_dll = "gcc" ++ self.linker_dll_cxx = "g++" + else: + self.linker_dll = "dllwrap" ++ self.linker_dll_cxx = "dllwrap" + + # ld_version >= "2.13" support -shared so use it instead of + # -mdll -static +@@ -140,9 +142,13 @@ class CygwinCCompiler(UnixCCompiler): + self.set_executables(compiler='gcc -mcygwin -O -Wall', + compiler_so='gcc -mcygwin -mdll -O -Wall', + compiler_cxx='g++ -mcygwin -O -Wall', ++ compiler_so_cxx='g++ -mcygwin -mdll -O -Wall', + linker_exe='gcc -mcygwin', + linker_so=('%s -mcygwin %s' % +- (self.linker_dll, shared_option))) ++ (self.linker_dll, shared_option)), ++ linker_exe_cxx='g++ -mcygwin', ++ linker_so_cxx=('%s -mcygwin %s' % ++ (self.linker_dll_cxx, shared_option))) + + # cygwin and mingw32 need different sets of libraries + if self.gcc_version == "2.91.57": +@@ -166,8 +172,12 @@ class CygwinCCompiler(UnixCCompiler): + raise CompileError(msg) + else: # for other files use the C-compiler + try: +- self.spawn(self.compiler_so + cc_args + [src, '-o', obj] + +- extra_postargs) ++ if self.detect_language(src) == 'c++': ++ self.spawn(self.compiler_so_cxx + cc_args + [src, '-o', obj] + ++ extra_postargs) ++ else: ++ self.spawn(self.compiler_so + cc_args + [src, '-o', obj] + ++ extra_postargs) + except DistutilsExecError as msg: + raise CompileError(msg) + +@@ -302,9 +312,14 @@ class Mingw32CCompiler(CygwinCCompiler): + self.set_executables(compiler='gcc -O -Wall', + compiler_so='gcc -mdll -O -Wall', + compiler_cxx='g++ -O -Wall', ++ compiler_so_cxx='g++ -mdll -O -Wall', + linker_exe='gcc', + linker_so='%s %s %s' + % (self.linker_dll, shared_option, ++ entry_point), ++ linker_exe_cxx='g++', ++ linker_so_cxx='%s %s %s' ++ % (self.linker_dll_cxx, shared_option, + entry_point)) + # Maybe we should also append -mthreads, but then the finished + # dlls need another dll (mingwm10.dll see Mingw32 docs) +diff --git a/Lib/distutils/sysconfig.py b/Lib/distutils/sysconfig.py +index 83160f8..b735369 100644 +--- a/Lib/distutils/sysconfig.py ++++ b/Lib/distutils/sysconfig.py +@@ -183,9 +183,11 @@ def customize_compiler(compiler): + _osx_support.customize_compiler(_config_vars) + _config_vars['CUSTOMIZED_OSX_COMPILER'] = 'True' + +- (cc, cxx, opt, cflags, ccshared, ldshared, shlib_suffix, ar, ar_flags) = \ ++ (cc, cxx, opt, cflags, ccshared, ldshared, ldcxxshared, shlib_suffix, ar, ar_flags) = \ + get_config_vars('CC', 'CXX', 'OPT', 'CFLAGS', +- 'CCSHARED', 'LDSHARED', 'SHLIB_SUFFIX', 'AR', 'ARFLAGS') ++ 'CCSHARED', 'LDSHARED', 'LDCXXSHARED', 'SHLIB_SUFFIX', 'AR', 'ARFLAGS') ++ ++ cxxflags = cflags + + if 'CC' in os.environ: + newcc = os.environ['CC'] +@@ -200,19 +202,27 @@ def customize_compiler(compiler): + cxx = os.environ['CXX'] + if 'LDSHARED' in os.environ: + ldshared = os.environ['LDSHARED'] ++ if 'LDCXXSHARED' in os.environ: ++ ldcxxshared = os.environ['LDCXXSHARED'] + if 'CPP' in os.environ: + cpp = os.environ['CPP'] + else: + cpp = cc + " -E" # not always + if 'LDFLAGS' in os.environ: + ldshared = ldshared + ' ' + os.environ['LDFLAGS'] ++ ldcxxshared = ldcxxshared + ' ' + os.environ['LDFLAGS'] + if 'CFLAGS' in os.environ: + cflags = opt + ' ' + os.environ['CFLAGS'] + ldshared = ldshared + ' ' + os.environ['CFLAGS'] ++ if 'CXXFLAGS' in os.environ: ++ cxxflags = opt + ' ' + os.environ['CXXFLAGS'] ++ ldcxxshared = ldcxxshared + ' ' + os.environ['CXXFLAGS'] + if 'CPPFLAGS' in os.environ: + cpp = cpp + ' ' + os.environ['CPPFLAGS'] ++ cxxflags = cxxflags + ' ' + os.environ['CPPFLAGS'] + cflags = cflags + ' ' + os.environ['CPPFLAGS'] + ldshared = ldshared + ' ' + os.environ['CPPFLAGS'] ++ ldcxxshared = ldcxxshared + ' ' + os.environ['CPPFLAGS'] + if 'AR' in os.environ: + ar = os.environ['AR'] + if 'ARFLAGS' in os.environ: +@@ -221,13 +231,17 @@ def customize_compiler(compiler): + archiver = ar + ' ' + ar_flags + + cc_cmd = cc + ' ' + cflags ++ cxx_cmd = cxx + ' ' + cxxflags + compiler.set_executables( + preprocessor=cpp, + compiler=cc_cmd, + compiler_so=cc_cmd + ' ' + ccshared, +- compiler_cxx=cxx, ++ compiler_cxx=cxx_cmd, ++ compiler_so_cxx=cxx_cmd + ' ' + ccshared, + linker_so=ldshared, + linker_exe=cc, ++ linker_so_cxx=ldcxxshared, ++ linker_exe_cxx=cxx, + archiver=archiver) + + compiler.shared_lib_extension = shlib_suffix +diff --git a/Lib/distutils/unixccompiler.py b/Lib/distutils/unixccompiler.py +index d10a78d..7e88781 100644 +--- a/Lib/distutils/unixccompiler.py ++++ b/Lib/distutils/unixccompiler.py +@@ -52,14 +52,17 @@ class UnixCCompiler(CCompiler): + # are pretty generic; they will probably have to be set by an outsider + # (eg. using information discovered by the sysconfig about building + # Python extensions). +- executables = {'preprocessor' : None, +- 'compiler' : ["cc"], +- 'compiler_so' : ["cc"], +- 'compiler_cxx' : ["cc"], +- 'linker_so' : ["cc", "-shared"], +- 'linker_exe' : ["cc"], +- 'archiver' : ["ar", "-cr"], +- 'ranlib' : None, ++ executables = {'preprocessor' : None, ++ 'compiler' : ["cc"], ++ 'compiler_so' : ["cc"], ++ 'compiler_cxx' : ["c++"], ++ 'compiler_so_cxx' : ["c++"], ++ 'linker_so' : ["cc", "-shared"], ++ 'linker_exe' : ["cc"], ++ 'linker_so_cxx' : ["c++", "-shared"], ++ 'linker_exe_cxx' : ["c++"], ++ 'archiver' : ["ar", "-cr"], ++ 'ranlib' : None, + } + + if sys.platform[:6] == "darwin": +@@ -110,12 +113,19 @@ class UnixCCompiler(CCompiler): + + def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts): + compiler_so = self.compiler_so ++ compiler_so_cxx = self.compiler_so_cxx + if sys.platform == 'darwin': + compiler_so = _osx_support.compiler_fixup(compiler_so, + cc_args + extra_postargs) ++ compiler_so_cxx = _osx_support.compiler_fixup(compiler_so_cxx, ++ cc_args + extra_postargs) + try: +- self.spawn(compiler_so + cc_args + [src, '-o', obj] + +- extra_postargs) ++ if self.detect_language(src) == 'c++': ++ self.spawn(compiler_so_cxx + cc_args + [src, '-o', obj] + ++ extra_postargs) ++ else: ++ self.spawn(compiler_so + cc_args + [src, '-o', obj] + ++ extra_postargs) + except DistutilsExecError as msg: + raise CompileError(msg) + +@@ -173,30 +183,16 @@ class UnixCCompiler(CCompiler): + ld_args.extend(extra_postargs) + self.mkpath(os.path.dirname(output_filename)) + try: +- if target_desc == CCompiler.EXECUTABLE: +- linker = self.linker_exe[:] ++ if target_lang == "c++": ++ if target_desc == CCompiler.EXECUTABLE: ++ linker = self.linker_exe_cxx[:] ++ else: ++ linker = self.linker_so_cxx[:] + else: +- linker = self.linker_so[:] +- if target_lang == "c++" and self.compiler_cxx: +- # skip over environment variable settings if /usr/bin/env +- # is used to set up the linker's environment. +- # This is needed on OSX. Note: this assumes that the +- # normal and C++ compiler have the same environment +- # settings. +- i = 0 +- if os.path.basename(linker[0]) == "env": +- i = 1 +- while '=' in linker[i]: +- i += 1 +- +- if os.path.basename(linker[i]) == 'ld_so_aix': +- # AIX platforms prefix the compiler with the ld_so_aix +- # script, so we need to adjust our linker index +- offset = 1 ++ if target_desc == CCompiler.EXECUTABLE: ++ linker = self.linker_exe[:] + else: +- offset = 0 +- +- linker[i+offset] = self.compiler_cxx[i] ++ linker = self.linker_so[:] + + if sys.platform == 'darwin': + linker = _osx_support.compiler_fixup(linker, ld_args) +diff --git a/Makefile.pre.in b/Makefile.pre.in +index 2d2e11f..8456e3f 100644 +--- a/Makefile.pre.in ++++ b/Makefile.pre.in +@@ -615,10 +615,10 @@ sharedmods: $(BUILDPYTHON) pybuilddir.txt Modules/_math.o + *\ -s*|s*) quiet="-q";; \ + *) quiet="";; \ + esac; \ +- echo "$(RUNSHARED) CC='$(CC)' LDSHARED='$(BLDSHARED)' OPT='$(OPT)' \ ++ echo "$(RUNSHARED) CC='$(CC)' LDSHARED='$(BLDSHARED)' CFLAGS='$(PY_CFLAGS)' \ + _TCLTK_INCLUDES='$(TCLTK_INCLUDES)' _TCLTK_LIBS='$(TCLTK_LIBS)' \ + $(PYTHON_FOR_BUILD) $(srcdir)/setup.py $$quiet build"; \ +- $(RUNSHARED) CC='$(CC)' LDSHARED='$(BLDSHARED)' OPT='$(OPT)' \ ++ $(RUNSHARED) CC='$(CC)' LDSHARED='$(BLDSHARED)' CFLAGS='$(PY_CFLAGS)' \ + _TCLTK_INCLUDES='$(TCLTK_INCLUDES)' _TCLTK_LIBS='$(TCLTK_LIBS)' \ + $(PYTHON_FOR_BUILD) $(srcdir)/setup.py $$quiet build + diff --git a/var/spack/repos/builtin/packages/python/python-3.7.4+-distutils-C++.patch b/var/spack/repos/builtin/packages/python/python-3.7.4+-distutils-C++.patch new file mode 100644 index 00000000000..02daf0a11bf --- /dev/null +++ b/var/spack/repos/builtin/packages/python/python-3.7.4+-distutils-C++.patch @@ -0,0 +1,257 @@ +diff --git a/Lib/_osx_support.py b/Lib/_osx_support.py +index db6674e..ccbe09a 100644 +--- a/Lib/_osx_support.py ++++ b/Lib/_osx_support.py +@@ -14,13 +14,13 @@ __all__ = [ + # configuration variables that may contain universal build flags, + # like "-arch" or "-isdkroot", that may need customization for + # the user environment +-_UNIVERSAL_CONFIG_VARS = ('CFLAGS', 'LDFLAGS', 'CPPFLAGS', 'BASECFLAGS', +- 'BLDSHARED', 'LDSHARED', 'CC', 'CXX', +- 'PY_CFLAGS', 'PY_LDFLAGS', 'PY_CPPFLAGS', +- 'PY_CORE_CFLAGS', 'PY_CORE_LDFLAGS') ++_UNIVERSAL_CONFIG_VARS = ('CFLAGS', 'CXXFLAGS', 'LDFLAGS', 'CPPFLAGS', ++ 'BASECFLAGS', 'BLDSHARED', 'LDSHARED', 'LDCXXSHARED', ++ 'CC', 'CXX', 'PY_CFLAGS', 'PY_LDFLAGS', ++ 'PY_CPPFLAGS', 'PY_CORE_LDFLAGS', 'PY_CORE_CFLAGS') + + # configuration variables that may contain compiler calls +-_COMPILER_CONFIG_VARS = ('BLDSHARED', 'LDSHARED', 'CC', 'CXX') ++_COMPILER_CONFIG_VARS = ('BLDSHARED', 'LDSHARED', 'LDCXXSHARED', 'CC', 'CXX') + + # prefix added to original configuration variable names + _INITPRE = '_OSX_SUPPORT_INITIAL_' +diff --git a/Lib/distutils/cygwinccompiler.py b/Lib/distutils/cygwinccompiler.py +index 6c5d777..640fa2d 100644 +--- a/Lib/distutils/cygwinccompiler.py ++++ b/Lib/distutils/cygwinccompiler.py +@@ -125,8 +125,10 @@ class CygwinCCompiler(UnixCCompiler): + # dllwrap 2.10.90 is buggy + if self.ld_version >= "2.10.90": + self.linker_dll = "gcc" ++ self.linker_dll_cxx = "g++" + else: + self.linker_dll = "dllwrap" ++ self.linker_dll_cxx = "dllwrap" + + # ld_version >= "2.13" support -shared so use it instead of + # -mdll -static +@@ -140,9 +142,13 @@ class CygwinCCompiler(UnixCCompiler): + self.set_executables(compiler='gcc -mcygwin -O -Wall', + compiler_so='gcc -mcygwin -mdll -O -Wall', + compiler_cxx='g++ -mcygwin -O -Wall', ++ compiler_so_cxx='g++ -mcygwin -mdll -O -Wall', + linker_exe='gcc -mcygwin', + linker_so=('%s -mcygwin %s' % +- (self.linker_dll, shared_option))) ++ (self.linker_dll, shared_option)), ++ linker_exe_cxx='g++ -mcygwin', ++ linker_so_cxx=('%s -mcygwin %s' % ++ (self.linker_dll_cxx, shared_option))) + + # cygwin and mingw32 need different sets of libraries + if self.gcc_version == "2.91.57": +@@ -166,8 +172,12 @@ class CygwinCCompiler(UnixCCompiler): + raise CompileError(msg) + else: # for other files use the C-compiler + try: +- self.spawn(self.compiler_so + cc_args + [src, '-o', obj] + +- extra_postargs) ++ if self.detect_language(src) == 'c++': ++ self.spawn(self.compiler_so_cxx + cc_args + [src, '-o', obj] + ++ extra_postargs) ++ else: ++ self.spawn(self.compiler_so + cc_args + [src, '-o', obj] + ++ extra_postargs) + except DistutilsExecError as msg: + raise CompileError(msg) + +@@ -302,9 +312,14 @@ class Mingw32CCompiler(CygwinCCompiler): + self.set_executables(compiler='gcc -O -Wall', + compiler_so='gcc -mdll -O -Wall', + compiler_cxx='g++ -O -Wall', ++ compiler_so_cxx='g++ -mdll -O -Wall', + linker_exe='gcc', + linker_so='%s %s %s' + % (self.linker_dll, shared_option, ++ entry_point), ++ linker_exe_cxx='g++', ++ linker_so_cxx='%s %s %s' ++ % (self.linker_dll_cxx, shared_option, + entry_point)) + # Maybe we should also append -mthreads, but then the finished + # dlls need another dll (mingwm10.dll see Mingw32 docs) +diff --git a/Lib/distutils/sysconfig.py b/Lib/distutils/sysconfig.py +index 0a034ee..ecf4759 100644 +--- a/Lib/distutils/sysconfig.py ++++ b/Lib/distutils/sysconfig.py +@@ -188,9 +188,11 @@ def customize_compiler(compiler): + _osx_support.customize_compiler(_config_vars) + _config_vars['CUSTOMIZED_OSX_COMPILER'] = 'True' + +- (cc, cxx, cflags, ccshared, ldshared, shlib_suffix, ar, ar_flags) = \ +- get_config_vars('CC', 'CXX', 'CFLAGS', +- 'CCSHARED', 'LDSHARED', 'SHLIB_SUFFIX', 'AR', 'ARFLAGS') ++ (cc, cxx, cflags, ccshared, ldshared, ldcxxshared, shlib_suffix, ar, ar_flags) = \ ++ get_config_vars('CC', 'CXX', 'CFLAGS', 'CCSHARED', 'LDSHARED', 'LDCXXSHARED', ++ 'SHLIB_SUFFIX', 'AR', 'ARFLAGS') ++ ++ cxxflags = cflags + + if 'CC' in os.environ: + newcc = os.environ['CC'] +@@ -205,19 +207,27 @@ def customize_compiler(compiler): + cxx = os.environ['CXX'] + if 'LDSHARED' in os.environ: + ldshared = os.environ['LDSHARED'] ++ if 'LDCXXSHARED' in os.environ: ++ ldcxxshared = os.environ['LDCXXSHARED'] + if 'CPP' in os.environ: + cpp = os.environ['CPP'] + else: + cpp = cc + " -E" # not always + if 'LDFLAGS' in os.environ: + ldshared = ldshared + ' ' + os.environ['LDFLAGS'] ++ ldcxxshared = ldcxxshared + ' ' + os.environ['LDFLAGS'] + if 'CFLAGS' in os.environ: + cflags = cflags + ' ' + os.environ['CFLAGS'] + ldshared = ldshared + ' ' + os.environ['CFLAGS'] ++ if 'CXXFLAGS' in os.environ: ++ cxxflags = cxxflags + ' ' + os.environ['CXXFLAGS'] ++ ldcxxshared = ldcxxshared + ' ' + os.environ['CXXFLAGS'] + if 'CPPFLAGS' in os.environ: + cpp = cpp + ' ' + os.environ['CPPFLAGS'] + cflags = cflags + ' ' + os.environ['CPPFLAGS'] ++ cxxflags = cxxflags + ' ' + os.environ['CPPFLAGS'] + ldshared = ldshared + ' ' + os.environ['CPPFLAGS'] ++ ldcxxshared = ldcxxshared + ' ' + os.environ['CPPFLAGS'] + if 'AR' in os.environ: + ar = os.environ['AR'] + if 'ARFLAGS' in os.environ: +@@ -226,13 +236,17 @@ def customize_compiler(compiler): + archiver = ar + ' ' + ar_flags + + cc_cmd = cc + ' ' + cflags ++ cxx_cmd = cxx + ' ' + cxxflags + compiler.set_executables( + preprocessor=cpp, + compiler=cc_cmd, + compiler_so=cc_cmd + ' ' + ccshared, +- compiler_cxx=cxx, ++ compiler_cxx=cxx_cmd, ++ compiler_so_cxx=cxx_cmd + ' ' + ccshared, + linker_so=ldshared, + linker_exe=cc, ++ linker_so_cxx=ldcxxshared, ++ linker_exe_cxx=cxx, + archiver=archiver) + + compiler.shared_lib_extension = shlib_suffix +diff --git a/Lib/distutils/unixccompiler.py b/Lib/distutils/unixccompiler.py +index d10a78d..7e88781 100644 +--- a/Lib/distutils/unixccompiler.py ++++ b/Lib/distutils/unixccompiler.py +@@ -52,14 +52,17 @@ class UnixCCompiler(CCompiler): + # are pretty generic; they will probably have to be set by an outsider + # (eg. using information discovered by the sysconfig about building + # Python extensions). +- executables = {'preprocessor' : None, +- 'compiler' : ["cc"], +- 'compiler_so' : ["cc"], +- 'compiler_cxx' : ["cc"], +- 'linker_so' : ["cc", "-shared"], +- 'linker_exe' : ["cc"], +- 'archiver' : ["ar", "-cr"], +- 'ranlib' : None, ++ executables = {'preprocessor' : None, ++ 'compiler' : ["cc"], ++ 'compiler_so' : ["cc"], ++ 'compiler_cxx' : ["c++"], ++ 'compiler_so_cxx' : ["c++"], ++ 'linker_so' : ["cc", "-shared"], ++ 'linker_exe' : ["cc"], ++ 'linker_so_cxx' : ["c++", "-shared"], ++ 'linker_exe_cxx' : ["c++"], ++ 'archiver' : ["ar", "-cr"], ++ 'ranlib' : None, + } + + if sys.platform[:6] == "darwin": +@@ -110,12 +113,19 @@ class UnixCCompiler(CCompiler): + + def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts): + compiler_so = self.compiler_so ++ compiler_so_cxx = self.compiler_so_cxx + if sys.platform == 'darwin': + compiler_so = _osx_support.compiler_fixup(compiler_so, + cc_args + extra_postargs) ++ compiler_so_cxx = _osx_support.compiler_fixup(compiler_so_cxx, ++ cc_args + extra_postargs) + try: +- self.spawn(compiler_so + cc_args + [src, '-o', obj] + +- extra_postargs) ++ if self.detect_language(src) == 'c++': ++ self.spawn(compiler_so_cxx + cc_args + [src, '-o', obj] + ++ extra_postargs) ++ else: ++ self.spawn(compiler_so + cc_args + [src, '-o', obj] + ++ extra_postargs) + except DistutilsExecError as msg: + raise CompileError(msg) + +@@ -173,30 +183,16 @@ class UnixCCompiler(CCompiler): + ld_args.extend(extra_postargs) + self.mkpath(os.path.dirname(output_filename)) + try: +- if target_desc == CCompiler.EXECUTABLE: +- linker = self.linker_exe[:] ++ if target_lang == "c++": ++ if target_desc == CCompiler.EXECUTABLE: ++ linker = self.linker_exe_cxx[:] ++ else: ++ linker = self.linker_so_cxx[:] + else: +- linker = self.linker_so[:] +- if target_lang == "c++" and self.compiler_cxx: +- # skip over environment variable settings if /usr/bin/env +- # is used to set up the linker's environment. +- # This is needed on OSX. Note: this assumes that the +- # normal and C++ compiler have the same environment +- # settings. +- i = 0 +- if os.path.basename(linker[0]) == "env": +- i = 1 +- while '=' in linker[i]: +- i += 1 +- +- if os.path.basename(linker[i]) == 'ld_so_aix': +- # AIX platforms prefix the compiler with the ld_so_aix +- # script, so we need to adjust our linker index +- offset = 1 ++ if target_desc == CCompiler.EXECUTABLE: ++ linker = self.linker_exe[:] + else: +- offset = 0 +- +- linker[i+offset] = self.compiler_cxx[i] ++ linker = self.linker_so[:] + + if sys.platform == 'darwin': + linker = _osx_support.compiler_fixup(linker, ld_args) +diff --git a/Makefile.pre.in b/Makefile.pre.in +index 35ca1a8..cfa79df 100644 +--- a/Makefile.pre.in ++++ b/Makefile.pre.in +@@ -618,10 +618,10 @@ sharedmods: $(BUILDPYTHON) pybuilddir.txt Modules/_math.o + *\ -s*|s*) quiet="-q";; \ + *) quiet="";; \ + esac; \ +- echo "$(RUNSHARED) CC='$(CC)' LDSHARED='$(BLDSHARED)' OPT='$(OPT)' \ ++ echo "$(RUNSHARED) CC='$(CC)' LDSHARED='$(BLDSHARED)' CFLAGS='$(PY_CFLAGS)' \ + _TCLTK_INCLUDES='$(TCLTK_INCLUDES)' _TCLTK_LIBS='$(TCLTK_LIBS)' \ + $(PYTHON_FOR_BUILD) $(srcdir)/setup.py $$quiet build"; \ +- $(RUNSHARED) CC='$(CC)' LDSHARED='$(BLDSHARED)' OPT='$(OPT)' \ ++ $(RUNSHARED) CC='$(CC)' LDSHARED='$(BLDSHARED)' CFLAGS='$(PY_CFLAGS)' \ + _TCLTK_INCLUDES='$(TCLTK_INCLUDES)' _TCLTK_LIBS='$(TCLTK_LIBS)' \ + $(PYTHON_FOR_BUILD) $(srcdir)/setup.py $$quiet build + diff --git a/var/spack/repos/builtin/packages/qgis/package.py b/var/spack/repos/builtin/packages/qgis/package.py index 1edf4c29720..86d0756e540 100644 --- a/var/spack/repos/builtin/packages/qgis/package.py +++ b/var/spack/repos/builtin/packages/qgis/package.py @@ -107,7 +107,7 @@ class Qgis(CMakePackage): depends_on('cmake@3.0.0:', type='build') depends_on('flex@2.5.6:', type='build') depends_on('bison@2.4:', type='build') - depends_on('pkg-config', type='build') + depends_on('pkgconfig', type='build') # Take care of conflicts using depends_on depends_on('proj@5:', when='@3.8.2:') diff --git a/var/spack/repos/builtin/packages/qmcpack/package.py b/var/spack/repos/builtin/packages/qmcpack/package.py index 9484e8a1693..5307b3d2d5c 100644 --- a/var/spack/repos/builtin/packages/qmcpack/package.py +++ b/var/spack/repos/builtin/packages/qmcpack/package.py @@ -14,7 +14,7 @@ class Qmcpack(CMakePackage, CudaPackage): # Package information homepage = "http://www.qmcpack.org/" git = "https://github.com/QMCPACK/qmcpack.git" - + maintainers = ['naromero77'] tags = ['ecp', 'ecp-apps'] # This download method is untrusted, and is not recommended by the @@ -22,6 +22,7 @@ class Qmcpack(CMakePackage, CudaPackage): # can occasionally change. # NOTE: 12/19/2017 QMCPACK 3.0.0 does not build properly with Spack. version('develop') + version('3.9.2', tag='v3.9.2') version('3.9.1', tag='v3.9.1') version('3.9.0', tag='v3.9.0') version('3.8.0', tag='v3.8.0') @@ -54,8 +55,6 @@ class Qmcpack(CMakePackage, CudaPackage): description='Install with support for basic data analysis tools') variant('gui', default=False, description='Install with Matplotlib (long installation time)') - variant('qe', default=False, - description='Install with patched Quantum Espresso 6.4.1') variant('afqmc', default=False, description='Install with AFQMC support. NOTE that if used in ' 'combination with CUDA, only AFQMC will have CUDA.') @@ -85,12 +84,6 @@ class Qmcpack(CMakePackage, CudaPackage): when='+cuda@:3.4.0', msg='QMCPACK CUDA+SOA variant does not exist prior to v. 3.5.0.') - conflicts( - '+qe', - when='~mpi', - msg='Serial QMCPACK with serial QE converter not supported. ' - 'Configure in serial QE + serial HDF5 will not run correctly.') - conflicts('^openblas+ilp64', msg='QMCPACK does not support OpenBLAS 64-bit integer variant') @@ -140,8 +133,6 @@ class Qmcpack(CMakePackage, CudaPackage): # HDF5 depends_on('hdf5~mpi', when='~phdf5') depends_on('hdf5+mpi', when='+phdf5') - depends_on('hdf5+hl+fortran~mpi', when='+qe~phdf5') - depends_on('hdf5+hl+fortran+mpi', when='+qe+phdf5') # Math libraries depends_on('blas') @@ -159,18 +150,6 @@ class Qmcpack(CMakePackage, CudaPackage): # py-matplotlib leads to a long complex DAG for dependencies depends_on('py-matplotlib', when='+gui', type='run') - # B-spline basis calculation require a patched version of - # Quantum Espresso 6.4.1 (see QMCPACK manual) - patch_url = 'https://raw.githubusercontent.com/QMCPACK/qmcpack/develop/external_codes/quantum_espresso/add_pw2qmcpack_to_qe-6.4.1.diff' - patch_checksum = '57cb1b06ee2653a87c3acc0dd4f09032fcf6ce6b8cbb9677ae9ceeb6a78f85e2' - depends_on('quantum-espresso~patch@6.4.1+mpi hdf5=parallel', - patches=patch(patch_url, sha256=patch_checksum), - when='+qe+phdf5', type='run') - - depends_on('quantum-espresso~patch@6.4.1+mpi hdf5=serial', - patches=patch(patch_url, sha256=patch_checksum), - when='+qe~phdf5', type='run') - # Backport several patches from recent versions of QMCPACK # The test_numerics unit test is broken prior to QMCPACK 3.3.0 patch_url = 'https://patch-diff.githubusercontent.com/raw/QMCPACK/qmcpack/pull/621.patch' diff --git a/var/spack/repos/builtin/packages/qt/package.py b/var/spack/repos/builtin/packages/qt/package.py index 408ebc9f591..ba28042fa3d 100644 --- a/var/spack/repos/builtin/packages/qt/package.py +++ b/var/spack/repos/builtin/packages/qt/package.py @@ -28,6 +28,7 @@ class Qt(Package): version('5.14.1', sha256='6f17f488f512b39c2feb57d83a5e0a13dcef32999bea2e2a8f832f54a29badb8') version('5.14.0', sha256='be9a77cd4e1f9d70b58621d0753be19ea498e6b0da0398753e5038426f76a8ba') version('5.13.1', sha256='adf00266dc38352a166a9739f1a24a1e36f1be9c04bf72e16e142a256436974e') + version('5.12.7', sha256='873783a0302129d98a8f63de9afe4520fb5f8d5316be8ad7b760c59875cd8a8d') version('5.12.5', sha256='a2299e21db7767caf98242767bffb18a2a88a42fee2d6a393bedd234f8c91298') version('5.12.2', sha256='59b8cb4e728450b21224dcaaa40eb25bafc5196b6988f2225c394c6b7f881ff5') version('5.11.3', sha256='859417642713cee2493ee3646a7fee782c9f1db39e41d7bb1322bba0c5f0ff4d') @@ -48,6 +49,8 @@ class Qt(Package): version('4.8.5', sha256='eb728f8268831dc4373be6403b7dd5d5dde03c169ad6882f9a8cb560df6aa138') version('3.3.8b', sha256='1b7a1ff62ec5a9cb7a388e2ba28fda6f960b27f27999482ebeceeadb72ac9f6e') + variant('debug', default=False, + description="Build debug version.") variant('gtk', default=False, description="Build with gtkplus.") variant('webkit', default=False, @@ -111,6 +114,8 @@ class Qt(Package): # https://github.com/spack/spack/issues/14400 patch('qt5-11-intel-overflow.patch', when='@5.11 %intel') patch('qt5-12-intel-overflow.patch', when='@5.12:5.14.0 %intel') + # https://bugreports.qt.io/browse/QTBUG-78937 + patch('qt5-12-configure.patch', when='@5.12') # Build-only dependencies depends_on("pkgconfig", type='build') @@ -140,12 +145,16 @@ class Qt(Package): depends_on("glib", when='@4:') depends_on("libpng", when='@4:') depends_on("dbus", when='@4:+dbus') - depends_on("gl@3.2:", when='@4:+opengl') + depends_on("gl", when='@4:+opengl') depends_on("harfbuzz", when='@5:') depends_on("double-conversion", when='@5.7:') depends_on("pcre2+multibyte", when='@5.9:') + # gcc@4 is not supported as of Qt@5.14 + # https://doc.qt.io/qt-5.14/supported-platforms.html + conflicts('%gcc@:4.99', when='@5.14:') + # Non-macOS dependencies and special macOS constraints if MACOS_VERSION is None: depends_on("fontconfig", when='freetype=spack') @@ -358,7 +367,7 @@ def common_config_args(self): '-v', '-opensource', '-{0}opengl'.format('' if '+opengl' in self.spec else 'no-'), - '-release', + '-{0}'.format('debug' if '+debug' in self.spec else 'release'), '-confirm-license', '-optimized-qmake', '-no-pch', diff --git a/var/spack/repos/builtin/packages/qt/qt5-12-configure.patch b/var/spack/repos/builtin/packages/qt/qt5-12-configure.patch new file mode 100644 index 00000000000..42681a16255 --- /dev/null +++ b/var/spack/repos/builtin/packages/qt/qt5-12-configure.patch @@ -0,0 +1,13 @@ +diff --git a/qtbase/src/corelib/tools/qsimd_p.h b/qtbase/src/corelib/tools/qsimd_p.h +index 2130a1f..3a14715 100644 +--- a/qtbase/src/corelib/tools/qsimd_p.h ++++ b/qtbase/src/corelib/tools/qsimd_p.h +@@ -346,7 +346,7 @@ extern Q_CORE_EXPORT QBasicAtomicInteger qt_cpu_features[2]; + #endif + Q_CORE_EXPORT void qDetectCpuFeatures(); + +-#if defined(Q_PROCESSOR_X86) && QT_COMPILER_SUPPORTS_HERE(RDRND) ++#if defined(Q_PROCESSOR_X86) && QT_COMPILER_SUPPORTS_HERE(RDRND) && !defined(QT_BOOTSTRAPPED) + Q_CORE_EXPORT qsizetype qRandomCpu(void *, qsizetype) Q_DECL_NOTHROW; + #else + static inline qsizetype qRandomCpu(void *, qsizetype) Q_DECL_NOTHROW diff --git a/var/spack/repos/builtin/packages/quantum-espresso/package.py b/var/spack/repos/builtin/packages/quantum-espresso/package.py index 52fb46c7843..3c8b2c9d021 100644 --- a/var/spack/repos/builtin/packages/quantum-espresso/package.py +++ b/var/spack/repos/builtin/packages/quantum-espresso/package.py @@ -51,12 +51,17 @@ class QuantumEspresso(Package): variant('epw', default=False, description='Builds Electron-phonon Wannier executable') - # Apply internal patches by default. May need to be set to to False - # for 3rd party dependency patching - desc = 'Apply internal patches. May need to be set to False for' - desc = desc + ' dependency patching' + # Apply upstream patches by default. Variant useful for 3rd party + # patches which are incompatible with upstream patches + desc = 'Apply recommended upstream patches. May need to be set ' + desc = desc + 'to False for third party patches or plugins' variant('patch', default=True, description=desc) + # QMCPACK converter patch + # https://github.com/QMCPACK/qmcpack/tree/develop/external_codes/quantum_espresso + variant('qmcpack', default=False, + description='Build QE-to-QMCPACK wave function converter') + # Dependencies depends_on('blas') depends_on('lapack') @@ -68,14 +73,11 @@ class QuantumEspresso(Package): # Versions of HDF5 prior to 1.8.16 lead to QE runtime errors depends_on('hdf5@1.8.16:+fortran+hl+mpi', when='hdf5=parallel') depends_on('hdf5@1.8.16:+fortran+hl~mpi', when='hdf5=serial') - + depends_on('hdf5', when='+qmcpack') # TODO: enable building EPW when ~mpi depends_on('mpi', when='+epw') - patch('dspev_drv_elpa.patch', when='@6.1.0:+patch+elpa ^elpa@2016.05.004') - patch('dspev_drv_elpa.patch', when='@6.1.0:+patch+elpa ^elpa@2016.05.003') - - # Conflicts + # CONFLICTS SECTION # Omitted for now due to concretizer bug # MKL with 64-bit integers not supported. # conflicts( @@ -126,6 +128,27 @@ class QuantumEspresso(Package): # folder QE expects as a link, we issue a conflict here. conflicts('+elpa', when='@:5.4.0') + # Some QMCPACK converters are incompatible with upstream patches. + # HDF5 is a hard requirement. Need to do two HDF5 cases explicitly + # since Spack lacks support for expressing NOT operation. + conflicts( + '@6.4+patch', + when='+qmcpack', + msg='QE-to-QMCPACK wave function converter requires ' + 'deactivatation of upstream patches' + ) + conflicts( + '@6.3:6.4.0 hdf5=serial', + when='+qmcpack', + msg='QE-to-QMCPACK wave function converter only ' + 'supported with parallel HDF5' + ) + conflicts( + 'hdf5=none', + when='+qmcpack', + msg='QE-to-QMCPACK wave function converter requires HDF5' + ) + # The first version of Q-E to feature integrated EPW is 6.0.0, # as per http://epw.org.uk/Main/DownloadAndInstall . # Complain if trying to install a version older than this. @@ -147,11 +170,33 @@ class QuantumEspresso(Package): conflicts('+epw', when='^openmpi@1.10.7%pgi@17.0:17.12', msg='PGI+OpenMPI version combo incompatible with EPW') - # Spurious problems running in parallel the Makefile - # generated by the configure - parallel = False + # PATCHES SECTION + # THIRD-PARTY PATCHES + # NOTE: *SOME* third-party patches will require deactivation of + # upstream patches using `~patch` variant - # QE upstream patches + # QMCPACK converter patches for QE 6.4.1, 6.4, and 6.3 + conflicts('@:6.2,6.5:', when='+qmcpack', + msg='QMCPACK converter NOT available for this version of QE') + + # 6.4.1 + patch_url = 'https://raw.githubusercontent.com/QMCPACK/qmcpack/develop/external_codes/quantum_espresso/add_pw2qmcpack_to_qe-6.4.1.diff' + patch_checksum = '57cb1b06ee2653a87c3acc0dd4f09032fcf6ce6b8cbb9677ae9ceeb6a78f85e2' + patch(patch_url, sha256=patch_checksum, when='@6.4.1+qmcpack') + # 6.4 + patch_url = 'https://raw.githubusercontent.com/QMCPACK/qmcpack/develop/external_codes/quantum_espresso/add_pw2qmcpack_to_qe-6.4.diff' + patch_checksum = 'ef08f5089951be902f0854a4dbddaa7b01f08924cdb27decfade6bef0e2b8994' + patch(patch_url, sha256=patch_checksum, when='@6.4:6.4.0+qmcpack') + # 6.3 + patch_url = 'https://raw.githubusercontent.com/QMCPACK/qmcpack/develop/external_codes/quantum_espresso/add_pw2qmcpack_to_qe-6.3.diff' + patch_checksum = '2ee346e24926479f5e96f8dc47812173a8847a58354bbc32cf2114af7a521c13' + patch(patch_url, sha256=patch_checksum, when='@6.3+qmcpack') + + # ELPA + patch('dspev_drv_elpa.patch', when='@6.1.0:+elpa ^elpa@2016.05.004') + patch('dspev_drv_elpa.patch', when='@6.1.0:+elpa ^elpa@2016.05.003') + + # QE UPSTREAM PATCHES # QE 6.3 requires multiple patches to fix MKL detection # There may still be problems on Mac with MKL detection patch('https://gitlab.com/QEF/q-e/commit/0796e1b7c55c9361ecb6515a0979280e78865e36.diff', @@ -170,6 +215,10 @@ class QuantumEspresso(Package): sha256='b1aa3179ee1c069964fb9c21f3b832aebeae54947ce8d3cc1a74e7b154c3c10f', when='+patch@6.4.1:6.5.0') + # Spurious problems running in parallel the Makefile + # generated by the configure + parallel = False + def install(self, spec, prefix): prefix_path = prefix.bin if '@:5.4.0' in spec else prefix @@ -268,7 +317,7 @@ def install(self, spec, prefix): if spec.variants['hdf5'].value != 'none': options.append('--with-hdf5={0}'.format(spec['hdf5'].prefix)) - if '@6.4.1' or '@6.5' in spec: + if spec.satisfies('@6.4.1,6.5'): options.extend([ '--with-hdf5-include={0}'.format( spec['hdf5'].headers.directories[0] @@ -280,6 +329,19 @@ def install(self, spec, prefix): configure(*options) + # Filter file must be applied after configure executes + # QE 6.1.0 to QE 6.4 have `-L` missing in front of zlib library + # This issue is backported through an internal patch in 6.4.1, but + # can't be applied to the '+qmcpack' variant + if spec.variants['hdf5'].value != 'none': + if (spec.satisfies('@6.1.0:6.4.0') or + (spec.satisfies('@6.4.1') and '+qmcpack' in spec)): + make_inc = join_path(self.stage.source_path, 'make.inc') + zlib_libs = spec['zlib'].prefix.lib + ' -lz' + filter_file( + zlib_libs, format(spec['zlib'].libs.ld_flags), make_inc + ) + if '+epw' in spec: make('all', 'epw') else: diff --git a/var/spack/repos/builtin/packages/quicksilver/package.py b/var/spack/repos/builtin/packages/quicksilver/package.py new file mode 100644 index 00000000000..ceef9e1bfba --- /dev/null +++ b/var/spack/repos/builtin/packages/quicksilver/package.py @@ -0,0 +1,63 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Quicksilver(MakefilePackage): + """Quicksilver is a proxy application that represents some elements of the + Mercury workload. + """ + + tags = ['proxy-app'] + + homepage = "https://codesign.llnl.gov/quicksilver.php" + url = "https://github.com/LLNL/Quicksilver" + git = "https://github.com/LLNL/Quicksilver.git" + + maintainers = ['richards12'] + + version('master', branch='master') + + variant('openmp', default=True, description='Build with OpenMP support') + variant('mpi', default=True, description='Build with MPI support') + + depends_on('mpi', when="+mpi") + + build_directory = 'src' + + @property + def build_targets(self): + targets = [] + spec = self.spec + + targets.append('CXXFLAGS={0}'.format(self.compiler.cxx11_flag)) + + if '+mpi' in spec: + targets.append('CXX={0}'.format(spec['mpi'].mpicxx)) + else: + targets.append('CXX={0}'.format(spack_cxx)) + + if '+openmp+mpi' in spec: + targets.append('CPPFLAGS=-DHAVE_MPI -DHAVE_OPENMP {0}'.format( + self.compiler.openmp_flag)) + elif '+openmp' in spec: + targets.append('CPPFLAGS=-DHAVE_OPENMP {0}'.format( + self.compiler.openmp_flag)) + elif '+mpi' in spec: + targets.append('CPPFLAGS=-DHAVE_MPI') + + if '+openmp' in self.spec: + targets.append('LDFLAGS={0}'.format(self.compiler.openmp_flag)) + + return targets + + def install(self, spec, prefix): + mkdir(prefix.bin) + mkdir(prefix.doc) + install("src/qs", prefix.bin) + install('LICENSE.md', prefix.doc) + install('README.md', prefix.doc) + install_tree('Examples', prefix.Examples) diff --git a/var/spack/repos/builtin/packages/r-boruta/package.py b/var/spack/repos/builtin/packages/r-boruta/package.py new file mode 100644 index 00000000000..45b4f22d579 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-boruta/package.py @@ -0,0 +1,23 @@ +# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class RBoruta(RPackage): + """An all relevant feature selection wrapper algorithm. It finds + relevant features by comparing original attributes' importance + with importance achievable at random, estimated using their + permuted copies (shadows). + """ + + homepage = "https://cloud.r-project.org/package=Boruta" + url = "https://cloud.r-project.org/src/contrib/Boruta_7.0.0.tar.gz" + list_url = "https://cloud.r-project.org/src/contrib/Archive/Boruta" + + version('7.0.0', sha256='6ff520d27d68637058c33a34c547a656bb44d5e351b7cc7afed6cd4216275c78') + version('6.0.0', sha256='1c9a7aabe09f040e147f6c614f5fe1d0b951d3b0f0024161fbb4c31da8fae8de') + + depends_on('r-ranger', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/r-callr/package.py b/var/spack/repos/builtin/packages/r-callr/package.py index 3c964e0400e..22122f8030c 100644 --- a/var/spack/repos/builtin/packages/r-callr/package.py +++ b/var/spack/repos/builtin/packages/r-callr/package.py @@ -15,6 +15,7 @@ class RCallr(RPackage): url = "https://cloud.r-project.org/src/contrib/callr_1.0.0.tar.gz" list_url = "https://cloud.r-project.org/src/contrib/Archive/callr/" + version('3.4.3', sha256='01b7277f20c1d662c6bebbfa2798d179922b36d4148b4298853579aeda0382b5') version('3.3.1', sha256='bf60da47357d3336aa395b0c9643235a621763c80d28bc9bb2257767d0a37967') version('3.2.0', sha256='4bb47b1018e8eb5c683a86c05d0d9b8b25848db1f1b30e92cfebedc0ce14b0e8') version('3.0.0', sha256='e36361086c65660a6ecbbc09b5ecfcddee6b59caf75e983e48b21d3b8defabe7') diff --git a/var/spack/repos/builtin/packages/r-caracas/package.py b/var/spack/repos/builtin/packages/r-caracas/package.py new file mode 100644 index 00000000000..74f9c77e545 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-caracas/package.py @@ -0,0 +1,23 @@ +# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class RCaracas(RPackage): + """Computer algebra via the 'SymPy' library (). + This makes it possible to solve equations symbolically, find symbolic + integrals, symbolic sums and other important quantities. + """ + + homepage = "https://cloud.r-project.org/package=caracas" + url = "https://cloud.r-project.org/src/contrib/caracas_1.0.0.tar.gz" + list_url = "https://cloud.r-project.org/src/contrib/Archive/caracas" + + version('1.0.0', sha256='0da6f1d94d1dacb1c11a3635bdff8f7cd8f84373deffa7126636d0876d48e42b') + + depends_on('r@3.0:', type=('build', 'run')) + depends_on('r-reticulate@1.14:', type=('build', 'run')) + depends_on('python@3.6:', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/r-cli/package.py b/var/spack/repos/builtin/packages/r-cli/package.py index 7f5cc52a0cc..d8a169c7791 100644 --- a/var/spack/repos/builtin/packages/r-cli/package.py +++ b/var/spack/repos/builtin/packages/r-cli/package.py @@ -15,6 +15,7 @@ class RCli(RPackage): url = "https://cloud.r-project.org/src/contrib/cli_1.0.0.tar.gz" list_url = "https://cloud.r-project.org/src/contrib/Archive/cli" + version('2.0.2', sha256='490834e5b80eb036befa0e150996bcab1c4d5d168c3d45209926e52d0d5413b6') version('1.1.0', sha256='4fc00fcdf4fdbdf9b5792faee8c7cf1ed5c4f45b1221d961332cda82dbe60d0a') version('1.0.1', sha256='ef80fbcde15760fd55abbf9413b306e3971b2a7034ab8c415fb52dc0088c5ee4') version('1.0.0', sha256='8fa3dbfc954ca61b8510f767ede9e8a365dac2ef95fe87c715a0f37d721b5a1d') @@ -22,3 +23,5 @@ class RCli(RPackage): depends_on('r@2.10:', type=('build', 'run')) depends_on('r-assertthat', type=('build', 'run')) depends_on('r-crayon@1.3.4:', type=('build', 'run')) + depends_on('r-glue', when='@2:', type=('build', 'run')) + depends_on('r-fansi', when='@2:', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/r-covr/package.py b/var/spack/repos/builtin/packages/r-covr/package.py index b3daeac584a..e2c60194dbd 100644 --- a/var/spack/repos/builtin/packages/r-covr/package.py +++ b/var/spack/repos/builtin/packages/r-covr/package.py @@ -20,6 +20,7 @@ class RCovr(RPackage): url = "https://cloud.r-project.org/src/contrib/covr_3.0.1.tar.gz" list_url = "https://cloud.r-project.org/src/contrib/Archive/covr" + version('3.5.0', sha256='cb919912018130164a40803ac573a37dde2186678c058c03c6303d79604979df') version('3.3.0', sha256='c0aa0bd7b2dc05effdc2367c59d45294f46858930d1b14efb393b205021fc65a') version('3.2.1', sha256='ea90daa48011e4ac4431ae47ee02fad98f54b529fc3900281cbeef7a2edef0a0') version('3.2.0', sha256='b26135306b1d6b14dd4deb481359dd919a7ca1e802ca5479fed394dcf35f0ef9') diff --git a/var/spack/repos/builtin/packages/r-decipher/package.py b/var/spack/repos/builtin/packages/r-decipher/package.py new file mode 100644 index 00000000000..d4d2a881b88 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-decipher/package.py @@ -0,0 +1,27 @@ +# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class RDecipher(RPackage): + """Tools for curating, analyzing, and manipulating biological sequences.""" + + homepage = "https://bioconductor.org/packages/DECIPHER" + git = "https://git.bioconductor.org/packages/DECIPHER.git" + + version('2.12.0', commit='658ae23870383b25b96a03a18d4ecac228a2650f') + version('2.10.2', commit='db7b017c9050a7ec1d4daa15352994890095e9c3') + version('2.8.1', commit='35aa66f48e06b93a98d1060c90c44d34ce05ccd9') + version('2.6.0', commit='ed9acaa35c8774cb0ea01cd7cc2e46d063d8c70e') + version('2.4.0', commit='1a57b8e4c7d7dec1c233f79c9a88d3705e0ad432') + + depends_on('r@3.3.0:', type=('build', 'run')) + depends_on('r-biostrings@2.35.12:', type=('build', 'run')) + depends_on('r-rsqlite@1.1:', type=('build', 'run')) + depends_on('r-dbi', type=('build', 'run')) + depends_on('r-s4vectors', type=('build', 'run')) + depends_on('r-iranges', type=('build', 'run')) + depends_on('r-xvector', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/r-digest/package.py b/var/spack/repos/builtin/packages/r-digest/package.py index bb32198c858..6d02efcbdf8 100644 --- a/var/spack/repos/builtin/packages/r-digest/package.py +++ b/var/spack/repos/builtin/packages/r-digest/package.py @@ -28,6 +28,7 @@ class RDigest(RPackage): url = "https://cloud.r-project.org/src/contrib/digest_0.6.12.tar.gz" list_url = "https://cloud.r-project.org/src/contrib/Archive/digest" + version('0.6.25', sha256='15ccadb7b8bccaa221b6700bb549011719d0f4b38dbd3a1f29face3e019e2de5') version('0.6.20', sha256='05674b0b5d888461ff770176c67b10a11be062b0fee5dbd9298f25a9a49830c7') version('0.6.19', sha256='28d159bd589ecbd01b8da0826eaed417f5c1bf5a11b79e76bf67ce8d935cccf4') version('0.6.12', sha256='a479463f120037ad8e88bb1387170842e635a1f07ce7e3575316efd6e14d9eab') diff --git a/var/spack/repos/builtin/packages/r-dt/package.py b/var/spack/repos/builtin/packages/r-dt/package.py index a4d810b238e..b1e899093a9 100644 --- a/var/spack/repos/builtin/packages/r-dt/package.py +++ b/var/spack/repos/builtin/packages/r-dt/package.py @@ -16,6 +16,7 @@ class RDt(RPackage): url = "https://cloud.r-project.org/src/contrib/DT_0.1.tar.gz" list_url = "https://cloud.r-project.org/src/contrib/Archive/DT" + version('0.13', sha256='79a073fe96980ce150d790ab76133c9e80bd463270c34d149c03934a622d63b5') version('0.8', sha256='90195054148806cf31c7db5c41f72d5389c75adc0b1183606a9babd2c6ae8e21') version('0.7', sha256='1de3f170deccd9e3aaefc057dd87c498e3b3f7f88eff645cf165ac34ffe3de2c') version('0.6', sha256='2ed68e9d161559171fa74b6105eee87b98acf755eae072b38ada60a83d427916') diff --git a/var/spack/repos/builtin/packages/r-ellipsis/package.py b/var/spack/repos/builtin/packages/r-ellipsis/package.py index 4f5349f3e58..3c583459f8c 100644 --- a/var/spack/repos/builtin/packages/r-ellipsis/package.py +++ b/var/spack/repos/builtin/packages/r-ellipsis/package.py @@ -16,7 +16,9 @@ class REllipsis(RPackage): url = "https://cloud.r-project.org/src/contrib/ellipsis_0.2.0.1.tar.gz" list_url = "https://cloud.r-project.org/src/contrib/Archive/ellipsis" + version('0.3.0', sha256='0bf814cb7a1f0ee1f2949bdc98752a0d535f2a9489280dd4d8fcdb10067ee907') version('0.2.0.1', sha256='0e6528c5e8016c3617cc1cfcdb5a4bfeb073e0bd5ea76b43e56b0c3208a0a943') depends_on('r@3.1:', type=('build', 'run')) + depends_on('r@3.2:', when='@0.3:', type=('build', 'run')) depends_on('r-rlang@0.3.0:', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/r-envstats/package.py b/var/spack/repos/builtin/packages/r-envstats/package.py new file mode 100644 index 00000000000..42e17269591 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-envstats/package.py @@ -0,0 +1,26 @@ +# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class REnvstats(RPackage): + """Graphical and statistical analyses of environmental data, with focus + on analyzing chemical concentrations and physical parameters, usually + in the context of mandated environmental monitoring. + """ + + homepage = "https://cloud.r-project.org/package=EnvStats" + url = "https://cloud.r-project.org/src/contrib/EnvStats_2.3.1.tar.gz" + list_url = "https://cloud.r-project.org/src/contrib/Archive/EnvStats" + + version('2.3.1', sha256='d753d42b42ff28c1cd25c63916fb2aa9e325941672fb16f7dfd97e218416cf2a') + version('2.3.0', sha256='51b7c982b4ffc6506579ec6933c817b780b8dade9f5e7754122e4132cb677a75') + version('2.2.1', sha256='bbad7736272a404302190ccf1095abd8674d4366f3827a1c0a9540bcafe0523e') + + depends_on('r@3.1:', type=('build', 'run')) + depends_on('r-mass', type=('build', 'run')) + depends_on('r-ggplot2', type=('build', 'run')) + depends_on('r-nortest', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/r-exactextractr/package.py b/var/spack/repos/builtin/packages/r-exactextractr/package.py new file mode 100644 index 00000000000..8374c5dcead --- /dev/null +++ b/var/spack/repos/builtin/packages/r-exactextractr/package.py @@ -0,0 +1,26 @@ +# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class RExactextractr(RPackage): + """Provides a replacement for the 'extract' function from the 'raster' + package that is suitable for extracting raster values using 'sf' + polygons. + """ + + homepage = "https://cloud.r-project.org/package=exactextractr" + url = "https://cloud.r-project.org/src/contrib/exactextractr_0.3.0.tar.gz" + list_url = "https://cloud.r-project.org/src/contrib/Archive/exactextractr" + + version('0.3.0', sha256='c7fb38b38b9dc8b3ca5b8f1f84f4ba3256efd331f2b4636b496d42689ffc3fb0') + version('0.2.1', sha256='d0b998c77c3fd9265a600a0e08e9bf32a2490a06c19df0d0c0dea4b5c9ab5773') + + depends_on('r@3.4:', type=('build', 'run')) + depends_on('r-raster', type=('build', 'run')) + depends_on('r-sf', type=('build', 'run')) + depends_on('r-rcpp@0.12.12:', type=('build', 'run')) + depends_on('geos@3.5:', type=('build', 'run', 'link')) diff --git a/var/spack/repos/builtin/packages/r-git2r/package.py b/var/spack/repos/builtin/packages/r-git2r/package.py index 4f7613a21a5..dff31b8318a 100644 --- a/var/spack/repos/builtin/packages/r-git2r/package.py +++ b/var/spack/repos/builtin/packages/r-git2r/package.py @@ -15,6 +15,7 @@ class RGit2r(RPackage): url = "https://cloud.r-project.org/src/contrib/git2r_0.18.0.tar.gz" list_url = "https://cloud.r-project.org/src/contrib/Archive/git2r" + version('0.27.1', sha256='099207f180aa45ddcc443cbb22487eafd14e1cd8e5979b3476214253fd773bc0') version('0.26.1', sha256='13d609286a0af4ef75ba76f2c2f856593603b8014e311b88896243a50b417435') version('0.26.0', sha256='56671389c3a50591e1dae3be8c3b0112d06d291f897d7fe14db17aea175616cf') version('0.18.0', sha256='91b32e49afb859c0c4f6f77988343645e9499e5046ef08d945d4d8149b6eff2d') diff --git a/var/spack/repos/builtin/packages/r-glue/package.py b/var/spack/repos/builtin/packages/r-glue/package.py index b6489eedd7e..1472728e93d 100644 --- a/var/spack/repos/builtin/packages/r-glue/package.py +++ b/var/spack/repos/builtin/packages/r-glue/package.py @@ -17,6 +17,7 @@ class RGlue(RPackage): url = "https://cloud.r-project.org/src/contrib/glue_1.2.0.tar.gz" list_url = "https://cloud.r-project.org/src/contrib/Archive/glue" + version('1.4.0', sha256='ea6c409f7141754baa090deba96cff270a11b185452cf9e6fb69cb148a9069c1') version('1.3.1', sha256='4fc1f2899d71a634e1f0adb7942772feb5ac73223891abe30ea9bd91d3633ea8') version('1.3.0', sha256='789e5a44c3635c3d3db26666e635e88adcf61cd02b75465125d95d7a12291cee') version('1.2.0', sha256='19275b34ee6a1bcad05360b7eb996cebaa1402f189a5dfb084e695d423f2296e') diff --git a/var/spack/repos/builtin/packages/r-goftest/package.py b/var/spack/repos/builtin/packages/r-goftest/package.py new file mode 100644 index 00000000000..feaa0724b3d --- /dev/null +++ b/var/spack/repos/builtin/packages/r-goftest/package.py @@ -0,0 +1,20 @@ +# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class RGoftest(RPackage): + """Cramer-Von Mises and Anderson-Darling tests of goodness-of-fit for + continuous univariate distributions, using efficient algorithms. + """ + + homepage = "https://cloud.r-project.org/package=goftest" + url = "https://cloud.r-project.org/src/contrib/goftest_1.2-2.tar.gz" + list_url = "https://cloud.r-project.org/src/contrib/Archive/goftest" + + version('1.2-2', sha256='e497992666b002b6c6bed73bf05047ad7aa69eb58898da0ad8f1f5b2219e7647') + + depends_on('r@3.3:', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/r-hmisc/package.py b/var/spack/repos/builtin/packages/r-hmisc/package.py index 131c3ea26ca..678816277c0 100644 --- a/var/spack/repos/builtin/packages/r-hmisc/package.py +++ b/var/spack/repos/builtin/packages/r-hmisc/package.py @@ -18,11 +18,13 @@ class RHmisc(RPackage): url = "https://cloud.r-project.org/src/contrib/Hmisc_4.1-1.tar.gz" list_url = "https://cloud.r-project.org/src/contrib/Archive/Hmisc" + version('4.4-0', sha256='f16ecf4c5ee2202d51f426282a54f8000ffa8b9747c3e910205f34f878556ec7') version('4.2-0', sha256='9e9614673288dd00295f250fa0bf96fc9e9fed692c69bf97691081c1a01411d9') version('4.1-1', sha256='991db21cdf73ffbf5b0239a4876b2e76fd243ea33528afd88dc968792f281498') depends_on('r-lattice', type=('build', 'run')) depends_on('r-survival@2.40-1:', type=('build', 'run')) + depends_on('r-survival@3.1-6:', when='@4.4:', type=('build', 'run')) depends_on('r-formula', type=('build', 'run')) depends_on('r-ggplot2@2.2:', type=('build', 'run')) depends_on('r-latticeextra', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/r-jsonlite/package.py b/var/spack/repos/builtin/packages/r-jsonlite/package.py index 68b3811d5dc..590134065af 100644 --- a/var/spack/repos/builtin/packages/r-jsonlite/package.py +++ b/var/spack/repos/builtin/packages/r-jsonlite/package.py @@ -22,6 +22,7 @@ class RJsonlite(RPackage): url = "https://cloud.r-project.org/src/contrib/jsonlite_1.5.tar.gz" list_url = "https://cloud.r-project.org/src/contrib/Archive/jsonlite" + version('1.6.1', sha256='74921dd249857a23afabc1ad1485a63a48828e57f240f0619deb04c60f883377') version('1.6', sha256='88c5b425229966b7409145a6cabc72db9ed04f8c37ee95901af0146bb285db53') version('1.5', sha256='6490371082a387cb1834048ad8cdecacb8b6b6643751b50298c741490c798e02') version('1.2', sha256='cb6b4660468d2db84ed09c7b8fefd169fcfc13e1e6b4e7ce64dce2713f34264d') diff --git a/var/spack/repos/builtin/packages/r-knitr/package.py b/var/spack/repos/builtin/packages/r-knitr/package.py index bde3fe85bda..c3af15f155f 100644 --- a/var/spack/repos/builtin/packages/r-knitr/package.py +++ b/var/spack/repos/builtin/packages/r-knitr/package.py @@ -15,6 +15,7 @@ class RKnitr(RPackage): url = "https://cloud.r-project.org/src/contrib/knitr_1.14.tar.gz" list_url = "https://cloud.r-project.org/src/contrib/Archive/knitr" + version('1.28', sha256='05ee01da31d715bf24793efb3e4ef3bb3101ef1e1ab2d760c645fc5b9d40232a') version('1.24', sha256='e80c2043b445a7e576b62ae8510cce89322660fe388881d799a706d35cd27b89') version('1.23', sha256='063bfb3300fc9f3e7d223c346e19b93beced0e6784470b9bef2524868a206a99') version('1.17', sha256='9484a2b2c7b0c2aae24ab7f4eec6db48affbceb0e42bd3d69e34d953fe92f401') @@ -25,8 +26,8 @@ class RKnitr(RPackage): depends_on('r@3.1.0:', when='@1.15:1.22', type=('build', 'run')) depends_on('r@3.2.3:', when='@1.23:', type=('build', 'run')) depends_on('r-evaluate@0.10:', type=('build', 'run')) - depends_on('r-digest@:1.17', type=('build', 'run')) - depends_on('r-formatr@:1.14', type=('build', 'run')) + depends_on('r-digest@:1.17', when='@:1.24', type=('build', 'run')) + depends_on('r-formatr@:1.14', when='@:1.24', type=('build', 'run')) depends_on('r-highr', type=('build', 'run')) depends_on('r-stringr@0.6:', type=('build', 'run')) depends_on('r-markdown', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/r-lfe/package.py b/var/spack/repos/builtin/packages/r-lfe/package.py new file mode 100644 index 00000000000..08d061330d2 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-lfe/package.py @@ -0,0 +1,23 @@ +# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class RLfe(AutotoolsPackage): + """Transforms away factors with many levels prior to doing an OLS""" + + homepage = "https://cloud.r-project.org/package=lfe" + url = "https://cloud.r-project.org/src/contrib/lfe_2.8-5.tar.gz" + list_url = "https://cloud.r-project.org/src/contrib/Archive/lfe" + + version('2.8-5', sha256='fd80c573d334594db933ff38f67bd4c9f899aaf648c3bd68f19477a0059723c2') + version('2.8-4', sha256='ee5f6e312214aa73e285ae84a6bdf49ba10e830f1a68ffded2fea2e532f2cd6a') + + depends_on('r@2.15.2:', type=('build', 'run')) + depends_on('r-matrix@1.1-2:', type=('build', 'run')) + depends_on('r-formula', type=('build', 'run')) + depends_on('r-xtable', type=('build', 'run')) + depends_on('r-sandwich', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/r-matlab/package.py b/var/spack/repos/builtin/packages/r-matlab/package.py new file mode 100644 index 00000000000..221f5f7b314 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-matlab/package.py @@ -0,0 +1,18 @@ +# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class RMatlab(RPackage): + """Emulate MATLAB code using R""" + + homepage = "https://cloud.r-project.org/package=matlab" + url = "https://cloud.r-project.org/src/contrib/matlab_1.0.2.tar.gz" + list_url = "https://cloud.r-project.org/src/contrib/Archive/matlab" + + version('1.0.2', sha256='a23dec736c51ae1864c1a53caac556a2f98e8020138a3b121badb0f5b7984154') + + depends_on('r@2.15:', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/r-nimble/package.py b/var/spack/repos/builtin/packages/r-nimble/package.py new file mode 100644 index 00000000000..c0cd1c2b40d --- /dev/null +++ b/var/spack/repos/builtin/packages/r-nimble/package.py @@ -0,0 +1,27 @@ +# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class RNimble(AutotoolsPackage): + """A system for writing hierarchical statistical models largely + compatible with 'BUGS' and 'JAGS', writing nimbleFunctions to + operate models and do basic R-style math, and compiling both + models and nimbleFunctions via custom- generated C++. + """ + + homepage = "https://cloud.r-project.org/package=nimble" + url = "https://cloud.r-project.org/src/contrib/nimble_0.9.1.tar.gz" + list_url = "https://cloud.r-project.org/src/contrib/Archive/nimble" + + version('0.9.1', sha256='ad5e8a171193cb0172e68bf61c4f94432c45c131a150101ad1c5c7318c335757') + version('0.9.0', sha256='ebc28fadf933143eea73900cacaf96ff81cb3c2d607405016062b7e93afa5611') + + depends_on('r@3.1.2:', type=('build', 'run')) + depends_on('r-igraph', type=('build', 'run')) + depends_on('r-coda', type=('build', 'run')) + depends_on('r-r6', type=('build', 'run')) + depends_on('automake') diff --git a/var/spack/repos/builtin/packages/r-pkgbuild/package.py b/var/spack/repos/builtin/packages/r-pkgbuild/package.py index 33ca13d0953..5dd2a8c97e1 100644 --- a/var/spack/repos/builtin/packages/r-pkgbuild/package.py +++ b/var/spack/repos/builtin/packages/r-pkgbuild/package.py @@ -13,6 +13,7 @@ class RPkgbuild(RPackage): url = "https://cloud.r-project.org/src/contrib/pkgbuild_1.0.3.tar.gz" list_url = "https://cloud.r-project.org/src/contrib/Archive/pkgbuild/" + version('1.0.8', sha256='b149fcf3e98ef148945ff9f4272512cd03e21408c235ec6c0548167fd41219a1') version('1.0.4', sha256='2934efa5ff9ccfe1636d360aedec36713f3bb3128a493241dbb728d842ea3b5f') version('1.0.3', sha256='c93aceb499886e42bcd61eb7fb59e47a76c9ba5ab5349a426736d46c8ce21f4d') diff --git a/var/spack/repos/builtin/packages/r-polspline/package.py b/var/spack/repos/builtin/packages/r-polspline/package.py new file mode 100644 index 00000000000..358d3e8d1af --- /dev/null +++ b/var/spack/repos/builtin/packages/r-polspline/package.py @@ -0,0 +1,22 @@ +# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class RPolspline(RPackage): + """Routines for the polynomial spline fitting routines hazard regression, + hazard estimation with flexible tails, logspline, lspec, polyclass, + and polymars, by C. Kooperberg and co-authors. + """ + + homepage = "https://cloud.r-project.org/package=polspline" + url = "https://cloud.r-project.org/src/contrib/polspline_1.1.18.tar.gz" + list_url = "https://cloud.r-project.org/src/contrib/Archive/polspline" + + version('1.1.18', sha256='df250ee144bfff154249ba50308f46863107ef3efb2333ad908e599ed0eb0102') + version('1.1.17', sha256='d67b269d01105d4a6ea774737e921e66e065a859d1931ae38a70f88b6fb7ee30') + version('1.1.16', sha256='aa3b5a1560008a1a401a65a25f19a27ba6f0a6ea185b6d093acd40e4e2818934') + version('1.1.15', sha256='8cdbaa5ee672055a4d02f965025199ce764958f84bfa159e853feba7ee24faa7') diff --git a/var/spack/repos/builtin/packages/r-purrr/package.py b/var/spack/repos/builtin/packages/r-purrr/package.py index da5220dfefa..24e3145aac6 100644 --- a/var/spack/repos/builtin/packages/r-purrr/package.py +++ b/var/spack/repos/builtin/packages/r-purrr/package.py @@ -13,11 +13,13 @@ class RPurrr(RPackage): url = "https://cloud.r-project.org/src/contrib/purrr_0.2.4.tar.gz" list_url = "https://cloud.r-project.org/src/contrib/Archive/purrr" + version('0.3.4', sha256='23ebc93bc9aed9e7575e8eb9683ff4acc0270ef7d6436cc2ef4236a9734840b2') version('0.3.2', sha256='27c74dd9e4f6f14bf442473df22bcafc068822f7f138f0870326532f143a9a31') version('0.3.1', sha256='c2a3c9901192efd8a04976676f84885a005db88deb1432e4750900c7b3b7883b') version('0.2.4', sha256='ed8d0f69d29b95c2289ae52be08a0e65f8171abb6d2587de7b57328bf3b2eb71') depends_on('r@3.1:', type=('build', 'run')) + depends_on('r@3.2:', when='@0.3.3:', type=('build', 'run')) depends_on('r-magrittr@1.5:', type=('build', 'run')) depends_on('r-rlang@0.3.1:', type=('build', 'run')) depends_on('r-tibble', when='@:0.2.9', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/r-rcpp/package.py b/var/spack/repos/builtin/packages/r-rcpp/package.py index 4d9b6dae2c0..d479880153e 100644 --- a/var/spack/repos/builtin/packages/r-rcpp/package.py +++ b/var/spack/repos/builtin/packages/r-rcpp/package.py @@ -21,6 +21,7 @@ class RRcpp(RPackage): url = "https://cloud.r-project.org/src/contrib/Rcpp_0.12.13.tar.gz" list_url = "https://cloud.r-project.org/src/contrib/Archive/Rcpp" + version('1.0.4.6', sha256='45af675ddbbe155e671453b2e84fe32250bb98d4ccb4342b61c1e25cff10b302') version('1.0.2', sha256='ad9338d6fc89dd116a3e2c5ecef1956e4be63b6c6aa1b21b2e5f249d65a5129c') version('1.0.0', sha256='b7378bf0dda17ef72aa3f2a318a9cb5667bef50b601dc1096431e17426e18bc2') version('0.12.19', sha256='63aeb6d4b58cd2899ded26f38a77d461397d5b0dc5936f187d3ca6cd958ab582') diff --git a/var/spack/repos/builtin/packages/r-rcurl/package.py b/var/spack/repos/builtin/packages/r-rcurl/package.py index 4fe4b6454e5..51586cc6915 100644 --- a/var/spack/repos/builtin/packages/r-rcurl/package.py +++ b/var/spack/repos/builtin/packages/r-rcurl/package.py @@ -22,9 +22,11 @@ class RRcurl(RPackage): url = "https://cloud.r-project.org/src/contrib/RCurl_1.95-4.8.tar.gz" list_url = "https://cloud.r-project.org/src/contrib/Archive/RCurl" + version('1.98-1.2', sha256='5d74a0cdc3c5684b0348b959f67039e3c2a5da2bbb6176f6800a94124895a7a8') version('1.95-4.12', sha256='393779efafdf40823dac942a1e028905d65c34f3d41cfd21bcd225e411385ff4') version('1.95-4.8', sha256='e72243251bbbec341bc5864305bb8cc23d311d19c5d0d9310afec7eb35aa2bfb') + depends_on('r@3.4.0:', when='@1.98:', type=('build', 'run')) depends_on('r@3.0.0:', type=('build', 'run')) depends_on('r-bitops', type=('build', 'run')) depends_on('curl') diff --git a/var/spack/repos/builtin/packages/r-rematch2/package.py b/var/spack/repos/builtin/packages/r-rematch2/package.py new file mode 100644 index 00000000000..2d07db3a234 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-rematch2/package.py @@ -0,0 +1,23 @@ +# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class RRematch2(RPackage): + """Wrappers on 'regexpr' and 'gregexpr' to return + the match results in tidy data frames. + """ + + homepage = "https://cloud.r-project.org/package=rematch2" + url = "https://cloud.r-project.org/src/contrib/rematch2_2.1.2.tar.gz" + list_url = "https://cloud.r-project.org/src/contrib/Archive/rematch2" + + version('2.1.2', sha256='fe9cbfe99dd7731a0a2a310900d999f80e7486775b67f3f8f388c30737faf7bb') + version('2.1.1', sha256='d0423a418e8b46ac3a4819af7a7d19c39ca7c8c862c1e9a1c1294aa19152518f') + version('2.1.0', sha256='78677071bd44b40e562df1da6f0c6bdeae44caf973f97ff8286b8c994db59f01') + version('2.0.1', sha256='0612bb904334bd022ba6d1e69925b1e85f8e86b15ec65476777828776e89609a') + + depends_on('r-tibble') diff --git a/var/spack/repos/builtin/packages/r-reticulate/package.py b/var/spack/repos/builtin/packages/r-reticulate/package.py index 378134032ff..ec301f3b0c8 100644 --- a/var/spack/repos/builtin/packages/r-reticulate/package.py +++ b/var/spack/repos/builtin/packages/r-reticulate/package.py @@ -17,10 +17,12 @@ class RReticulate(RPackage): url = "https://cloud.r-project.org/src/contrib/reticulate_1.13.tar.gz" list_url = "https://cloud.r-project.org/src/contrib/Archive/reticulate" + version('1.15', sha256='47db3e9c9424263ade15287da8e74f6ba261a936b644b197dba6772853b7b50d') version('1.13', sha256='adbe41d556b667c4419d563680f8608a56b0f792b8bc427b3bf4c584ff819de3') depends_on('r@3.0:', type=('build', 'run')) depends_on('r-jsonlite', type=('build', 'run')) - depends_on('r-rcpp@0.12.7:', type=('build', 'run')) + depends_on('r-rcpp@0.12.7:', type=('build', 'run', 'link')) depends_on('r-matrix', type=('build', 'run')) + depends_on('r-rappdirs', when='@1.15:', type=('build', 'run')) depends_on('python@2.7.0:') diff --git a/var/spack/repos/builtin/packages/r-rferns/package.py b/var/spack/repos/builtin/packages/r-rferns/package.py new file mode 100644 index 00000000000..a10b40bdf3b --- /dev/null +++ b/var/spack/repos/builtin/packages/r-rferns/package.py @@ -0,0 +1,16 @@ +# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class RRferns(RPackage): + """Provides the random ferns classifier""" + + homepage = "https://cloud.r-project.org/package=rFerns" + url = "https://cloud.r-project.org/src/contrib/rFerns_3.0.0.tar.gz" + list_url = "https://cloud.r-project.org/src/contrib/Archive/rFerns" + + version('3.0.0', sha256='35e7e31a6497e415a0fe578678cf9b2f537b21319e4c015a1e2dade00310227c') diff --git a/var/spack/repos/builtin/packages/r-rlang/package.py b/var/spack/repos/builtin/packages/r-rlang/package.py index ae099f7b9a4..9b258b65656 100644 --- a/var/spack/repos/builtin/packages/r-rlang/package.py +++ b/var/spack/repos/builtin/packages/r-rlang/package.py @@ -14,6 +14,7 @@ class RRlang(RPackage): url = "https://cloud.r-project.org/src/contrib/rlang_0.2.2.tar.gz" list_url = "https://cloud.r-project.org/src/contrib/Archive/rlang" + version('0.4.6', sha256='3a81b107765fd6ac0ad716c428d01878775ded9208ba125d43c890c73d2533ca') version('0.4.0', sha256='9748a4a217548bbe5631c18fd88c94811950446f798ff21fb327703aebaa150d') version('0.3.4', sha256='4e467f7b0dcbde91b60c292137d2c69cecaa713a6e4c9b7157ef6fd5453b7ade') version('0.3.1', sha256='30427b2be2288e88acd30c4ea348ee06043a649fd73623a63148b1ad96317151') diff --git a/var/spack/repos/builtin/packages/r-rms/package.py b/var/spack/repos/builtin/packages/r-rms/package.py new file mode 100644 index 00000000000..1090e0e5464 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-rms/package.py @@ -0,0 +1,48 @@ +# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class RRms(RPackage): + """Regression modeling, testing, estimation, validation, graphics, + prediction, and typesetting by storing enhanced model design + attributes in the fit. 'rms' is a collection of functions that + assist with and streamline modeling. It also contains functions + for binary and ordinal logistic regression models, ordinal models + for continuous Y with a variety of distribution families, and the + Buckley-James multiple regression model for right-censored responses, + and implements penalized maximum likelihood estimation for logistic + and ordinary linear models. 'rms' works with almost any regression + model, but it was especially written to work with binary or ordinal + regression models, Cox regression, accelerated failure time models, + ordinary linear models, the Buckley-James model, generalized least + squares for serially or spatially correlated observations, + generalized linear models, and quantile regression. + """ + + homepage = "https://cloud.r-project.org/package=rms" + url = "https://cloud.r-project.org/src/contrib/rms_5.1-4.tar.gz" + list_url = "https://cloud.r-project.org/src/contrib/Archive/rms" + + version('5.1-4', sha256='38f5844c4944a95b2adebea6bb1d163111270b8662399ea0349c45c0758076a6') + version('5.1-3.1', sha256='0946d9547a4e3ff020a61ab3fce38f88aa9545729683e2bfefeb960edec82b37') + version('5.1-3', sha256='5fc7120d8a93b4aa9727d82eac368c5c47ff70f467ae2b012afac688235089eb') + version('5.1-2', sha256='f1cfeef466ac436105756679353a3468027d97a600e3be755b819aef30ed9207') + version('5.1-1', sha256='c489948df5c434b40bcf5288844f5b4e08d157f36939d09230c1600f88d1bfe3') + + depends_on('r@3.5:', type=('build', 'run')) + depends_on('r-hmisc@4.3:', type=('build', 'run')) + depends_on('r-survival@3.1-6', type=('build', 'run')) + depends_on('r-lattice', type=('build', 'run')) + depends_on('r-polspline', type=('build', 'run')) + depends_on('r-ggplot2@2.2:', type=('build', 'run')) + depends_on('r-sparsem', type=('build', 'run')) + depends_on('r-quantreg', type=('build', 'run')) + depends_on('r-rpart', type=('build', 'run')) + depends_on('r-nlme@3.1-123:', type=('build', 'run')) + depends_on('r-multcomp', type=('build', 'run')) + depends_on('r-htmltable@1.11.0:', type=('build', 'run')) + depends_on('r-htmltools', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/r-roxygen2/package.py b/var/spack/repos/builtin/packages/r-roxygen2/package.py index bd129a375e8..66c51eb4879 100644 --- a/var/spack/repos/builtin/packages/r-roxygen2/package.py +++ b/var/spack/repos/builtin/packages/r-roxygen2/package.py @@ -14,19 +14,24 @@ class RRoxygen2(RPackage): url = "https://cloud.r-project.org/src/contrib/roxygen2_5.0.1.tar.gz" list_url = "https://cloud.r-project.org/src/contrib/Archive/roxygen2" + version('7.1.0', sha256='7e9b36f6e7c01a5c8c4747340b3d0c064ce2e48c93fcfbfe45139854fae74103') version('6.1.1', sha256='ed46b7e062e0dfd8de671c7a5f6d120fb2b720982e918dbeb01e6985694c0273') version('5.0.1', sha256='9f755ddd08358be436f08b02df398e50e7508b856131aeeed235099bb3a7eba5') depends_on('r@3.0.2:', when='@:6.0.1', type=('build', 'run')) + depends_on('r@3.2:', when='@7.1.0:', type=('build', 'run')) depends_on('r@3.1:', when='@6.1.0:', type=('build', 'run')) depends_on('r-brew', type=('build', 'run')) depends_on('r-commonmark', type=('build', 'run')) depends_on('r-desc@1.2.0:', type=('build', 'run')) + depends_on('r-knitr', when='@7.1.0:', type=('build', 'run')) depends_on('r-digest', type=('build', 'run')) depends_on('r-pkgload@1.0.2:', type=('build', 'run')) depends_on('r-purrr', type=('build', 'run')) + depends_on('r-purrr@0.3.3:', when='@7.1.0:', type=('build', 'run')) depends_on('r-r6@2.1.2:', type=('build', 'run')) depends_on('r-rcpp@0.11.0:', type=('build', 'run')) + depends_on('r-rlang', when='@7.1.0:', type=('build', 'run')) depends_on('r-stringi', type=('build', 'run')) depends_on('r-stringr@1.0.0:', type=('build', 'run')) depends_on('r-xml2', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/r-rstudioapi/package.py b/var/spack/repos/builtin/packages/r-rstudioapi/package.py index e9c73695aa0..07f3919c58f 100644 --- a/var/spack/repos/builtin/packages/r-rstudioapi/package.py +++ b/var/spack/repos/builtin/packages/r-rstudioapi/package.py @@ -14,6 +14,7 @@ class RRstudioapi(RPackage): url = "https://cloud.r-project.org/src/contrib/rstudioapi_0.7.tar.gz" list_url = "https://cloud.r-project.org/src/contrib/Archive/rstudioapi" + version('0.11', sha256='13e07fb7e2eba8cf1d885db2721901d676d219a1042d7ef5d166125e4905306b') version('0.10', sha256='80c5aa3063bcab649904cb92f0b164edffa2f6b0e6a8f7ea28ae317b80e1ab96') version('0.9.0', sha256='5149a2830ae7134c396ce64764b263cf9f348d4399f53da3804f40d7d5bec13e') version('0.7', sha256='a541bc76ef082d2c27e42fd683f8262cb195b1497af3509178d2642870397a8c') diff --git a/var/spack/repos/builtin/packages/r-rversions/package.py b/var/spack/repos/builtin/packages/r-rversions/package.py new file mode 100644 index 00000000000..7d0224fd90b --- /dev/null +++ b/var/spack/repos/builtin/packages/r-rversions/package.py @@ -0,0 +1,28 @@ +# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class RRversions(RPackage): + """Query the main 'R' 'SVN' repository to find the versions + 'r-release' and 'r-oldrel' refer to, and also all previous 'R' + versions and their release dates. + """ + + homepage = "https://cloud.r-project.org/package=rversions" + url = "https://cloud.r-project.org/src/contrib/rversions_2.0.1.tar.gz" + list_url = "https://cloud.r-project.org/src/contrib/Archive/rversions" + + version('2.0.1', sha256='51ec1f64e7d628e88d716a020d5d521eba71d472e3c9ae7b694428ef6dd786c5') + version('2.0.0', sha256='b50c321d9e973284ae6b1d0c89bd46a40f5174de51fb28e3c77cd12ef34f6f56') + version('1.1.0', sha256='14a5a2f06b74e332fd9cbd4d715baa6165f5269c9ba2c0b9f1d0c6540dde9c3c') + version('1.0.3', sha256='21d0809f46505de89a2be7be9449e39c39cff5bc77e584dec976ee6c0b884f44') + version('1.0.2', sha256='c8ec8e24524cc42893e445e01e1a65d15889d28959877cd6b3c5e5f08221b176') + version('1.0.1', sha256='9099d37d2f6cc1cab0cd0fdddfb9657c7bd3651226810b496e2808f458c80ae3') + version('1.0.0', sha256='ce1e5368ff1d15665ca2db700521a96cf44f0e78daaab68aabbdaf7ed7393b4d') + + depends_on('r-curl', type=('build', 'run')) + depends_on('r-xml2@1.0.0:', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/r-snow/package.py b/var/spack/repos/builtin/packages/r-snow/package.py index 393ca504446..9d07e4943b7 100644 --- a/var/spack/repos/builtin/packages/r-snow/package.py +++ b/var/spack/repos/builtin/packages/r-snow/package.py @@ -17,3 +17,8 @@ class RSnow(RPackage): version('0.4-2', sha256='ee070187aea3607c9ca6235399b3db3e181348692405d038e962e06aefccabd7') depends_on('r@2.13.1:', type=('build', 'run')) + + @run_after('install') + def install_wrapper(self): + mkdir(self.prefix.bin) + install('inst/RMPISNOW', self.prefix.bin) diff --git a/var/spack/repos/builtin/packages/r-spatialeco/package.py b/var/spack/repos/builtin/packages/r-spatialeco/package.py new file mode 100644 index 00000000000..f028ebd8d5d --- /dev/null +++ b/var/spack/repos/builtin/packages/r-spatialeco/package.py @@ -0,0 +1,41 @@ +# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class RSpatialeco(RPackage): + """Utilities to support spatial data manipulation, query, + sampling and modelling. + """ + + homepage = "https://cloud.r-project.org/package=spatialEco" + url = "https://cloud.r-project.org/src/contrib/spatialEco_1.3-1.tar.gz" + list_url = "https://cloud.r-project.org/src/contrib/Archive/spatialEco" + + version('1.3-2', sha256='9dfa427ee8b112446b582f6739a1c40a6e3ad3d050f522082a28ce47c675e57a') + version('1.3-1', sha256='ff12e26cc1bbf7934fbf712c99765d96ce6817e8055faa15a26d9ebade4bbf1c') + version('1.3-0', sha256='cfa09673cb3bbed30b243082fc2d63ac09f48b9f072a18d32b95c2c29979d1d0') + + depends_on('r@3.6:', type=('build', 'run')) + depends_on('r-dplyr', type=('build', 'run')) + depends_on('r-exactextractr', type=('build', 'run')) + depends_on('r-spatstat', type=('build', 'run')) + depends_on('r-rcurl', type=('build', 'run')) + depends_on('r-rms', type=('build', 'run')) + depends_on('r-yaimpute', type=('build', 'run')) + depends_on('r-spatialpack@0.3:', type=('build', 'run')) + depends_on('r-mgcv', type=('build', 'run')) + depends_on('r-envstats', type=('build', 'run')) + depends_on('r-sp', type=('build', 'run')) + depends_on('r-raster', type=('build', 'run')) + depends_on('r-sf', type=('build', 'run')) + depends_on('r-cluster', type=('build', 'run')) + depends_on('r-spdep', type=('build', 'run')) + depends_on('r-readr', type=('build', 'run')) + depends_on('r-rgeos', type=('build', 'run')) + depends_on('r-rann', type=('build', 'run')) + depends_on('r-mass', type=('build', 'run')) + depends_on('r-maptools', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/r-spatialpack/package.py b/var/spack/repos/builtin/packages/r-spatialpack/package.py new file mode 100644 index 00000000000..afa919cd105 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-spatialpack/package.py @@ -0,0 +1,19 @@ +# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class RSpatialpack(RPackage): + """Tools to assess the association between two spatial processes.""" + + homepage = "https://cloud.r-project.org/package=SpatialPack" + url = "https://cloud.r-project.org/src/contrib/SpatialPack_0.3-8.tar.gz" + list_url = "https://cloud.r-project.org/src/contrib/Archive/SpatialPack" + + version('0.3-8', sha256='a0e54b5dee3cd30a634e2d30380fe163942b672073fd909be888803332ed5151') + version('0.3', sha256='4c80fc1c77bc97fc678e6e201ecf7f0f89dcf3417b3b497a28a3639e9b30bd8a') + + depends_on('r@2.10:', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/r-spatstat-data/package.py b/var/spack/repos/builtin/packages/r-spatstat-data/package.py new file mode 100644 index 00000000000..991b87559b0 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-spatstat-data/package.py @@ -0,0 +1,21 @@ +# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class RSpatstatData(RPackage): + """Contains all the datasets for the 'spatstat' package.""" + + homepage = "https://cloud.r-project.org/package=spatstat.data" + url = "https://cloud.r-project.org/src/contrib/spatstat.data_1.4-3.tar.gz" + list_url = "https://cloud.r-project.org/src/contrib/Archive/spatstat.data" + + version('1.4-3', sha256='8955b6ac40cc7d0d89e02334bb46f4c223ff0755e5818f132fee753e77918ea2') + version('1.4-0', sha256='121e5bb92beb7ccac920f921e760f429fd71bcfe11cb9b07a7e7326c7a72ec8c') + + depends_on('r@3.3:', type=('build', 'run')) + depends_on('r-matrix', type=('build', 'run')) + depends_on('r-spatstat-utils', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/r-spatstat-utils/package.py b/var/spack/repos/builtin/packages/r-spatstat-utils/package.py new file mode 100644 index 00000000000..1cad42c0052 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-spatstat-utils/package.py @@ -0,0 +1,21 @@ +# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class RSpatstatUtils(RPackage): + """Contains utility functions for the 'spatstat' + package which may also be useful for other purposes. + """ + + homepage = "https://cloud.r-project.org/package=spatstat.utils" + url = "https://cloud.r-project.org/src/contrib/spatstat.utils_1.17-0.tar.gz" + list_url = "https://cloud.r-project.org/src/contrib/Archive/spatstat.utils" + + version('1.17-0', sha256='39cd683ed7f41d8adc9e28af073d91b244aa1cf5ad966dfbb396ee3ee79f0922') + version('1.15-0', sha256='90e07d730b6939f47f93c939afae10874b2c82bd402960ede4133de67dca2a0c') + + depends_on('r@3.3.0:', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/r-spatstat/package.py b/var/spack/repos/builtin/packages/r-spatstat/package.py new file mode 100644 index 00000000000..490e78c61ae --- /dev/null +++ b/var/spack/repos/builtin/packages/r-spatstat/package.py @@ -0,0 +1,32 @@ +# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class RSpatstat(RPackage): + """Comprehensive open-source toolbox for + analysing Spatial Point Patterns. + """ + + homepage = "https://cloud.r-project.org/package=spatstat" + url = "https://cloud.r-project.org/src/contrib/spatstat_1.64-1.tar.gz" + list_url = "https://cloud.r-project.org/src/contrib/Archive/spatstat" + + version('1.64-1', sha256='ca3fc7d0d6b7a83fd045a7502bf03c6871fa1ab2cf411647c438fd99b4eb551a') + version('1.63-3', sha256='07b4a1a1b37c91944f31779dd789598f4a5ad047a3de3e9ec2ca99b9e9565528') + + depends_on('r@3.3:', type=('build', 'run')) + depends_on('r-rpart', type=('build', 'run')) + depends_on('r-nlme', type=('build', 'run')) + depends_on('r-spatstat-data@1.4-2:', type=('build', 'run')) + depends_on('r-spatstat-utils@1.17:', type=('build', 'run')) + depends_on('r-mgcv', type=('build', 'run')) + depends_on('r-matrix', type=('build', 'run')) + depends_on('r-deldir@0.0-21:', type=('build', 'run')) + depends_on('r-abind', type=('build', 'run')) + depends_on('r-tensor', type=('build', 'run')) + depends_on('r-polyclip@1.10:', type=('build', 'run')) + depends_on('r-goftest@1.2-2:', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/r-survival/package.py b/var/spack/repos/builtin/packages/r-survival/package.py index c7d81fc02cb..8e2bfb7f37a 100644 --- a/var/spack/repos/builtin/packages/r-survival/package.py +++ b/var/spack/repos/builtin/packages/r-survival/package.py @@ -15,6 +15,7 @@ class RSurvival(RPackage): url = "https://cloud.r-project.org/src/contrib/survival_2.41-3.tar.gz" list_url = "https://cloud.r-project.org/src/contrib/Archive/survival" + version('3.1-12', sha256='b62ed66eb646f3df13f7e9bf6571e3bfecae128c66491e174c8833cbef1bf21f') version('2.44-1.1', sha256='55b151e15fcd24ccb3acf60331c9a7ad82bc10f3841ab3be9bc2a37e9ee751b9') version('2.44-1', sha256='82c44afa41fe4504295855f1da4a5940c3289dfd61bc664bf211bb67c051a909') version('2.41-3', sha256='f3797c344de93abd2ba8c89568770a13524a8b2694144ae55adec46921c8961d') @@ -22,4 +23,5 @@ class RSurvival(RPackage): version('2.39-5', sha256='607170ebe36080d102e884cf13c3b29df01d6bb3b593258afffa67fee2a0ada7') depends_on('r@2.13.0:', type=('build', 'run')) + depends_on('r@3.4:', when='@3.1-12:', type=('build', 'run')) depends_on('r-matrix', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/r-tensor/package.py b/var/spack/repos/builtin/packages/r-tensor/package.py new file mode 100644 index 00000000000..6fc8ace4949 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-tensor/package.py @@ -0,0 +1,20 @@ +# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class RTensor(RPackage): + """The tensor product of two arrays is notionally an outer product of + the arrays collapsed in specific extents by summing along the + appropriate diagonals. + """ + + homepage = "https://cloud.r-project.org/package=tensor" + url = "https://cloud.r-project.org/src/contrib/tensor_1.5.tar.gz" + list_url = "https://cloud.r-project.org/src/contrib/Archive/tensor" + + version('1.5', sha256='e1dec23e3913a82e2c79e76313911db9050fb82711a0da227f94fc6df2d3aea6') + version('1.4', sha256='6f1643da018d58a0aaa27260df6fdf687fc36f4cd1964931b3180b7df8c0e642') diff --git a/var/spack/repos/builtin/packages/r-testthat/package.py b/var/spack/repos/builtin/packages/r-testthat/package.py index e0712387bde..413aa0d63b7 100644 --- a/var/spack/repos/builtin/packages/r-testthat/package.py +++ b/var/spack/repos/builtin/packages/r-testthat/package.py @@ -14,17 +14,21 @@ class RTestthat(RPackage): url = "https://cloud.r-project.org/src/contrib/testthat_1.0.2.tar.gz" list_url = "https://cloud.r-project.org/src/contrib/Archive/testthat" + version('2.3.2', sha256='1a268d8df07f7cd8d282d03bb96ac2d96a24a95c9aa52f4cca5138a09dd8e06c') version('2.2.1', sha256='67ee0512bb312695c81fd74338bb8ce9e2e58763681ddbcdfdf35f52dfdb0b78') version('2.1.0', sha256='cf5fa7108111b32b86e70819352f86b57ab4e835221bb1e83642d52a1fdbcdd4') version('1.0.2', sha256='0ef7df0ace1fddf821d329f9d9a5d42296085350ae0d94af62c45bd203c8415e') depends_on('r@3.1:', type=('build', 'run')) depends_on('r-digest', type=('build', 'run')) + depends_on('r-ellipsis', when='@2.3.2:', type=('build', 'run')) + depends_on('r-pkgload', when='@2.3.2:', type=('build', 'run')) depends_on('r-crayon@1.3.4:', type=('build', 'run')) depends_on('r-praise', type=('build', 'run')) depends_on('r-magrittr', type=('build', 'run')) depends_on('r-r6@2.2.0:', type=('build', 'run')) depends_on('r-cli', when='@2.0.0:', type=('build', 'run')) depends_on('r-rlang@0.3.0:', when='@2.0.0:', type=('build', 'run')) + depends_on('r-rlang@0.4.1:', when='@2.3.2:', type=('build', 'run')) depends_on('r-withr@2.0.0:', when='@2.0.0:', type=('build', 'run')) depends_on('r-evaluate', when='@2.2.0:', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/r-withr/package.py b/var/spack/repos/builtin/packages/r-withr/package.py index a1fbce55b14..42e2e2427fd 100644 --- a/var/spack/repos/builtin/packages/r-withr/package.py +++ b/var/spack/repos/builtin/packages/r-withr/package.py @@ -16,8 +16,10 @@ class RWithr(RPackage): url = "https://cloud.r-project.org/src/contrib/withr_1.0.2.tar.gz" list_url = "https://cloud.r-project.org/src/contrib/Archive/withr" + version('2.2.0', sha256='4c21e51cf48f8c281ddd5f5ec358ac446df3c982104fd00bfe62d9259d73b582') version('2.1.2', sha256='41366f777d8adb83d0bdbac1392a1ab118b36217ca648d3bb9db763aa7ff4686') version('1.0.2', sha256='2391545020adc4256ee7c2e31c30ff6f688f0b6032e355e1ce8f468cab455f10') version('1.0.1', sha256='7e245fdd17d290ff9e7c237159804dd06e1c6a3efe7855ed641eb0765a1e727d') depends_on('r@3.0.2:', type=('build', 'run')) + depends_on('r@3.2:', when='@2.2:', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/r-xml2/package.py b/var/spack/repos/builtin/packages/r-xml2/package.py index 55edf949ea0..7e12f208bf1 100644 --- a/var/spack/repos/builtin/packages/r-xml2/package.py +++ b/var/spack/repos/builtin/packages/r-xml2/package.py @@ -14,11 +14,12 @@ class RXml2(RPackage): url = "https://cloud.r-project.org/src/contrib/xml2_1.1.1.tar.gz" list_url = "https://cloud.r-project.org/src/contrib/Archive/xml2" + version('1.3.2', sha256='df22f9e7e3189d8c9b8804eaf0105324fdac983cffe743552f6d76613600a4cf') version('1.2.2', sha256='3050f147c4335be2925a576557bbda36bd52a5bba3110d47b740a2dd811a78f4') version('1.2.1', sha256='5615bbc94607efc3bc192551992b349091df802ae34b855cfa817733f2690605') version('1.1.1', sha256='00f3e3b66b76760c19da5f6dddc98e6f30de36a96b211e59e1a3f4ff58763116') depends_on('r@3.1.0:', type=('build', 'run')) - depends_on('r-rcpp@0.12.12:', type=('build', 'run')) + depends_on('r-rcpp@0.12.12:', when='@:1.2', type=('build', 'run')) depends_on('r-bh', when='@:1.1.1', type=('build', 'run')) depends_on('libxml2') diff --git a/var/spack/repos/builtin/packages/r-yaimpute/package.py b/var/spack/repos/builtin/packages/r-yaimpute/package.py new file mode 100644 index 00000000000..6d15be6ff66 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-yaimpute/package.py @@ -0,0 +1,20 @@ +# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class RYaimpute(RPackage): + """Performs nearest neighbor-based imputation using one or more + alternative approaches to processing multivariate data + """ + + homepage = "https://cloud.r-project.org/package=yaImpute" + url = "https://cloud.r-project.org/src/contrib/yaImpute_1.0-32.tar.gz" + list_url = "https://cloud.r-project.org/src/contrib/Archive/yaImpute" + + version('1.0-32', sha256='08eee5d851b80aad9c7c80f9531aadd50d60e4b16b3a80657a50212269cd73ff') + + depends_on('r@3.0:', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/r/package.py b/var/spack/repos/builtin/packages/r/package.py index 7c6807e3e71..e28ae117391 100644 --- a/var/spack/repos/builtin/packages/r/package.py +++ b/var/spack/repos/builtin/packages/r/package.py @@ -21,6 +21,7 @@ class R(AutotoolsPackage): extendable = True + version('4.0.0', sha256='06beb0291b569978484eb0dcb5d2339665ec745737bdfb4e873e7a5a75492940') version('3.6.3', sha256='89302990d8e8add536e12125ec591d6951022cf8475861b3690bc8bf1cefaa8f') version('3.6.2', sha256='bd65a45cddfb88f37370fbcee4ac8dd3f1aebeebe47c2f968fd9770ba2bbc954') version('3.6.1', sha256='5baa9ebd3e71acecdcc3da31d9042fb174d55a42829f8315f2457080978b1389') @@ -81,7 +82,8 @@ class R(AutotoolsPackage): depends_on('libxt', when='+X') depends_on('libxmu', when='+X') depends_on('curl') - depends_on('pcre') + depends_on('pcre2', when='@4:') + depends_on('pcre', when='@:3.6.3') depends_on('java') patch('zlib.patch', when='@:3.3.2') @@ -92,6 +94,12 @@ class R(AutotoolsPackage): # temporary fix to lower the optimization level. patch('change_optflags_tmp.patch', when='%fj@4.1.0') + # R custom URL version + def url_for_version(self, version): + """Handle R's customed URL versions""" + url = 'https://cloud.r-project.org/src/base' + return url + '/R-%s/R-%s.tar.gz' % (version.up_to(1), version) + filter_compiler_wrappers( 'Makeconf', relative_root=os.path.join('rlib', 'R', 'etc') ) diff --git a/var/spack/repos/builtin/packages/raja/package.py b/var/spack/repos/builtin/packages/raja/package.py index ccb1863444f..b7a62180568 100644 --- a/var/spack/repos/builtin/packages/raja/package.py +++ b/var/spack/repos/builtin/packages/raja/package.py @@ -6,15 +6,16 @@ from spack import * -class Raja(CMakePackage): +class Raja(CMakePackage, CudaPackage): """RAJA Parallel Framework.""" homepage = "http://software.llnl.gov/RAJA/" git = "https://github.com/LLNL/RAJA.git" version('develop', branch='develop', submodules='True') - version('master', branch='master', submodules='True') + version('main', branch='main', submodules='True') version('0.11.0', tag='v0.11.0', submodules="True") + version('0.10.1', tag='v0.10.1', submodules="True") version('0.10.0', tag='v0.10.0', submodules="True") version('0.9.0', tag='v0.9.0', submodules="True") version('0.8.0', tag='v0.8.0', submodules="True") @@ -27,11 +28,8 @@ class Raja(CMakePackage): version('0.4.1', tag='v0.4.1', submodules="True") version('0.4.0', tag='v0.4.0', submodules="True") - variant('cuda', default=False, description='Build with CUDA backend') variant('openmp', default=True, description='Build OpenMP backend') - depends_on('cuda', when='+cuda') - depends_on('cmake@3.8:', type='build') depends_on('cmake@3.9:', when='+cuda', type='build') @@ -47,4 +45,16 @@ def cmake_args(self): '-DENABLE_CUDA=On', '-DCUDA_TOOLKIT_ROOT_DIR=%s' % (spec['cuda'].prefix)]) + if not spec.satisfies('cuda_arch=none'): + cuda_arch = spec.variants['cuda_arch'].value + options.append('-DCUDA_ARCH=sm_{0}'.format(cuda_arch[0])) + + # Work around spack adding -march=ppc64le to SPACK_TARGET_ARGS which + # is used by the spack compiler wrapper. This can go away when BLT + # removes -Werror from GTest flags + if self.spec.satisfies('%clang target=ppc64le:') or not self.run_tests: + options.append('-DENABLE_TESTS=OFF') + else: + options.append('-DENABLE_TESTS=ON') + return options diff --git a/var/spack/repos/builtin/packages/ramulator/package.py b/var/spack/repos/builtin/packages/ramulator/package.py new file mode 100644 index 00000000000..a103d4b68da --- /dev/null +++ b/var/spack/repos/builtin/packages/ramulator/package.py @@ -0,0 +1,35 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Ramulator(MakefilePackage): + """ + Ramulator is a fast and cycle-accurate DRAM simulator that supports + a wide array of commercial, as well as academic, DRAM standards. + """ + + homepage = "https://github.com/CMU-SAFARI/ramulator" + git = "https://github.com/CMU-SAFARI/ramulator" + + maintainers = ['jjwilke'] + + version('sst', commit="7d2e72306c6079768e11a1867eb67b60cee34a1c") + + patch('ramulator_sha_7d2e723_gcc48Patch.patch', when="@sst") + patch('ramulator_sha_7d2e723_libPatch.patch', when="@sst") + + def patch(self): + filter_file('-fpic', self.compiler.cxx_pic_flag, "Makefile") + + def build(self, spec, prefix): + if spec.satisfies("platform=darwin"): + make("libramulator.a") + else: + make("libramulator.so") + + def install(self, spec, prefix): + install_tree(".", prefix) diff --git a/var/spack/repos/builtin/packages/ramulator/ramulator_sha_7d2e723_gcc48Patch.patch b/var/spack/repos/builtin/packages/ramulator/ramulator_sha_7d2e723_gcc48Patch.patch new file mode 100644 index 00000000000..6e8b31aeb5c --- /dev/null +++ b/var/spack/repos/builtin/packages/ramulator/ramulator_sha_7d2e723_gcc48Patch.patch @@ -0,0 +1,250 @@ +diff --git a/src/Scheduler.h b/src/Scheduler.h +index c8561c1..38f924d 100644 +--- a/src/Scheduler.h ++++ b/src/Scheduler.h +@@ -9,8 +9,10 @@ + #include + #include + #include ++#include + + using namespace std; ++using namespace std::placeholders; + + namespace ramulator + { +@@ -22,18 +24,66 @@ template + class Scheduler + { + public: +- Controller* ctrl; +- +- enum class Type { +- FCFS, FRFCFS, FRFCFS_Cap, FRFCFS_PriorHit, MAX +- } type = Type::FRFCFS_Cap; +- //} type = Type::FCFS; +- +- long cap = 16; ++ Controller* ctrl; ++ ++ enum class Type { ++ FCFS, FRFCFS, FRFCFS_Cap, FRFCFS_PriorHit, MAX ++ } type = Type::FRFCFS_Cap; ++ //} type = Type::FCFS; ++ ++ long cap = 16; ++ ++ Scheduler(Controller* _ctrl) : ctrl(_ctrl) { + +- Scheduler(Controller* ctrl) : ctrl(ctrl) {} +- +- list::iterator get_head(list& q) ++ // FCFS ++ compare[0] = std::bind([] (ReqIter req1, ReqIter req2, Scheduler *obj) -> ReqIter { ++ if (req1->arrive <= req2->arrive) return req1; ++ return req2;}, _1, _2, std::move(this)); ++ ++ // FRFCFS ++ compare[1] = std::bind([] (ReqIter req1, ReqIter req2, Scheduler *obj) -> ReqIter { ++ bool ready1 = obj->ctrl->is_ready(req1); ++ bool ready2 = obj->ctrl->is_ready(req2); ++ ++ if (ready1 ^ ready2) { ++ if (ready1) return req1; ++ return req2; ++ } ++ ++ if (req1->arrive <= req2->arrive) return req1; ++ return req2;}, _1, _2, std::move(this)); ++ ++ // FRFCFS_CAP ++ compare[2] = std::bind([] (ReqIter req1, ReqIter req2, Scheduler *obj) -> ReqIter { ++ bool ready1 = obj->ctrl->is_ready(req1); ++ bool ready2 = obj->ctrl->is_ready(req2); ++ ++ ready1 = ready1 && (obj->ctrl->rowtable->get_hits(req1->addr_vec) <= obj->cap); ++ ready2 = ready2 && (obj->ctrl->rowtable->get_hits(req2->addr_vec) <= obj->cap); ++ ++ if (ready1 ^ ready2) { ++ if (ready1) return req1; ++ return req2; ++ } ++ ++ if (req1->arrive <= req2->arrive) return req1; ++ return req2;}, _1, _2, this); ++ ++ // FRFCFS_PriorHit ++ compare[3] = std::bind([] (ReqIter req1, ReqIter req2, Scheduler *obj) -> ReqIter { ++ bool ready1 = obj->ctrl->is_ready(req1) && obj->ctrl->is_row_hit(req1); ++ bool ready2 = obj->ctrl->is_ready(req2) && obj->ctrl->is_row_hit(req2); ++ ++ if (ready1 ^ ready2) { ++ if (ready1) return req1; ++ return req2; ++ } ++ ++ if (req1->arrive <= req2->arrive) return req1; ++ return req2;}, _1, _2, this); ++ } ++ ++ list::iterator get_head(list& q) + { + // TODO make the decision at compile time + if (type != Type::FRFCFS_PriorHit) { +@@ -106,55 +156,8 @@ public: + + private: + typedef list::iterator ReqIter; +- function compare[int(Type::MAX)] = { +- // FCFS +- [this] (ReqIter req1, ReqIter req2) { +- if (req1->arrive <= req2->arrive) return req1; +- return req2;}, +- +- // FRFCFS +- [this] (ReqIter req1, ReqIter req2) { +- bool ready1 = this->ctrl->is_ready(req1); +- bool ready2 = this->ctrl->is_ready(req2); +- +- if (ready1 ^ ready2) { +- if (ready1) return req1; +- return req2; +- } +- +- if (req1->arrive <= req2->arrive) return req1; +- return req2;}, +- +- // FRFCFS_CAP +- [this] (ReqIter req1, ReqIter req2) { +- bool ready1 = this->ctrl->is_ready(req1); +- bool ready2 = this->ctrl->is_ready(req2); +- +- ready1 = ready1 && (this->ctrl->rowtable->get_hits(req1->addr_vec) <= this->cap); +- ready2 = ready2 && (this->ctrl->rowtable->get_hits(req2->addr_vec) <= this->cap); +- +- if (ready1 ^ ready2) { +- if (ready1) return req1; +- return req2; +- } +- +- if (req1->arrive <= req2->arrive) return req1; +- return req2;}, +- // FRFCFS_PriorHit +- [this] (ReqIter req1, ReqIter req2) { +- bool ready1 = this->ctrl->is_ready(req1) && this->ctrl->is_row_hit(req1); +- bool ready2 = this->ctrl->is_ready(req2) && this->ctrl->is_row_hit(req2); +- +- if (ready1 ^ ready2) { +- if (ready1) return req1; +- return req2; +- } +- +- if (req1->arrive <= req2->arrive) return req1; +- return req2;} +- }; +-}; +- ++ function compare[int(Type::MAX)]; ++ }; + + template + class RowPolicy +@@ -168,50 +171,52 @@ public: + + int timeout = 50; + +- RowPolicy(Controller* ctrl) : ctrl(ctrl) {} +- +- vector get_victim(typename T::Command cmd) +- { +- return policy[int(type)](cmd); +- } +- +-private: +- function(typename T::Command)> policy[int(Type::MAX)] = { +- // Closed +- [this] (typename T::Command cmd) -> vector { +- for (auto& kv : this->ctrl->rowtable->table) { +- if (!this->ctrl->is_ready(cmd, kv.first)) ++ RowPolicy(Controller* _ctrl) : ctrl(_ctrl) { ++ // Closed ++ policy[0] = std::bind([] (typename T::Command cmd, RowPolicy *obj) -> vector { ++ for (auto& kv : obj->ctrl->rowtable->table) { ++ if (!obj->ctrl->is_ready(cmd, kv.first)) + continue; + return kv.first; + } +- return vector();}, ++ return vector();}, _1, this); + + // ClosedAP +- [this] (typename T::Command cmd) -> vector { +- for (auto& kv : this->ctrl->rowtable->table) { +- if (!this->ctrl->is_ready(cmd, kv.first)) +- continue; +- return kv.first; +- } +- return vector();}, ++ policy[1] = std::bind([] (typename T::Command cmd, RowPolicy *obj) -> vector { ++ for (auto& kv : obj->ctrl->rowtable->table) { ++ if (!obj->ctrl->is_ready(cmd, kv.first)) ++ continue; ++ return kv.first; ++ } ++ return vector();}, _1, this); + + // Opened +- [this] (typename T::Command cmd) { +- return vector();}, +- +- // Timeout +- [this] (typename T::Command cmd) -> vector { +- for (auto& kv : this->ctrl->rowtable->table) { +- auto& entry = kv.second; +- if (this->ctrl->clk - entry.timestamp < timeout) +- continue; +- if (!this->ctrl->is_ready(cmd, kv.first)) +- continue; +- return kv.first; +- } +- return vector();} +- }; ++ policy[2] = std::bind([] (typename T::Command cmd, RowPolicy *obj) -> vector { ++ return vector();}, _1, this); ++ ++ // Timeout ++ policy[3] = std::bind([] (typename T::Command cmd, RowPolicy *obj) -> vector { ++ for (auto& kv : obj->ctrl->rowtable->table) { ++ auto& entry = kv.second; ++ if (obj->ctrl->clk - entry.timestamp < obj->timeout) ++ continue; ++ if (!obj->ctrl->is_ready(cmd, kv.first)) ++ continue; ++ return kv.first; ++ } ++ return vector();}, _1, this); + ++ ++ ++ } ++ ++ vector get_victim(typename T::Command cmd) ++ { ++ return policy[int(type)](cmd); ++ } ++ ++private: ++ function(typename T::Command)> policy[int(Type::MAX)]; + }; + + +@@ -304,7 +309,8 @@ public: + + return itr->second.row; + } +-}; ++ }; ++ + + } /*namespace ramulator*/ + diff --git a/var/spack/repos/builtin/packages/ramulator/ramulator_sha_7d2e723_libPatch.patch b/var/spack/repos/builtin/packages/ramulator/ramulator_sha_7d2e723_libPatch.patch new file mode 100644 index 00000000000..20225bb54db --- /dev/null +++ b/var/spack/repos/builtin/packages/ramulator/ramulator_sha_7d2e723_libPatch.patch @@ -0,0 +1,23 @@ +diff --git a/Makefile b/Makefile +index ea340c8..314113d 100644 +--- a/Makefile ++++ b/Makefile +@@ -9,7 +9,7 @@ OBJS := $(patsubst $(SRCDIR)/%.cpp, $(OBJDIR)/%.o, $(SRCS)) + # g++ 4.x due to an internal compiler error when processing lambda functions. + CXX := clang++ + # CXX := g++-5 +-CXXFLAGS := -O3 -std=c++11 -g -Wall ++CXXFLAGS := -O3 -std=c++11 -g -Wall -fpic + + .PHONY: all clean depend + +@@ -38,6 +38,9 @@ ramulator: $(MAIN) $(OBJS) $(SRCDIR)/*.h | depend + libramulator.a: $(OBJS) $(OBJDIR)/Gem5Wrapper.o + libtool -static -o $@ $(OBJS) $(OBJDIR)/Gem5Wrapper.o + ++libramulator.so: $(OBJS) $(OBJDIR)/Gem5Wrapper.o ++ $(CXX) -shared -o $@ $(OBJS) $(OBJDIR)/Gem5Wrapper.o ++ + $(OBJS): | $(OBJDIR) + + $(OBJDIR): diff --git a/var/spack/repos/builtin/packages/readline/package.py b/var/spack/repos/builtin/packages/readline/package.py index c73a08daaba..0db25205c2c 100644 --- a/var/spack/repos/builtin/packages/readline/package.py +++ b/var/spack/repos/builtin/packages/readline/package.py @@ -14,7 +14,7 @@ class Readline(AutotoolsPackage, GNUMirrorPackage): command lines, to recall and perhaps reedit those lines, and perform csh-like history expansion on previous commands.""" - homepage = "http://cnswww.cns.cwru.edu/php/chet/readline/rltop.html" + homepage = "https://tiswww.case.edu/php/chet/readline/rltop.html" # URL must remain http:// so Spack can bootstrap curl gnu_mirror_path = "readline/readline-8.0.tar.gz" @@ -29,7 +29,8 @@ class Readline(AutotoolsPackage, GNUMirrorPackage): def build(self, spec, prefix): options = [ - 'SHLIB_LIBS=-L{0} -lncursesw'.format(spec['ncurses'].prefix.lib) + 'SHLIB_LIBS=-L{0} -lncursesw -ltinfo'.format( + spec['ncurses'].prefix.lib) ] make(*options) diff --git a/var/spack/repos/builtin/packages/reframe/package.py b/var/spack/repos/builtin/packages/reframe/package.py index 14a307e6e44..00e200d3ec2 100644 --- a/var/spack/repos/builtin/packages/reframe/package.py +++ b/var/spack/repos/builtin/packages/reframe/package.py @@ -4,6 +4,7 @@ # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * +import shutil class Reframe(Package): @@ -22,13 +23,40 @@ class Reframe(Package): maintainers = ['victorusu', 'vkarak'] version('master', branch='master') + version('3.0', sha256='fb76b4513c03b84f5b3bbbc988f7747e5b58f04c983b3935bab1f2e81adccb82') version('2.21', sha256='f35d4fda2f9672c87d3ef664d9a2d6eb0c01c88218a31772a6645c32c8934c4d') version('2.20', sha256='310c18d705858bbe6bd9a2dc4d382b254c1f093b0671d72363f2111e8c162ba4') version('2.17.3', sha256='dc8dfb2ccb9a966303879b7cdcd188c47063e9b7999cbd5d6255223b066bf357') version('2.17.2', sha256='092241cdc15918040aacb922c806aecb59c5bdc3ff7db034a4f355d39aecc101') version('2.17.1', sha256='0b0d32a892607840a7d668f5dcea6f03f7022a26b23e5042a0faf5b8c41cb146') - depends_on('python@3.5:', type=('run')) + variant("docs", default=False, + description="Build ReFrame's man page documentation") + variant("gelf", default=False, + description="Add graylog handler support") + + depends_on('python@3.5:', when='@2.0:2.999', type='run') + depends_on('python@3.6:', when='@3.0:', type='run') + depends_on('py-jsonschema', type='run') + depends_on('py-setuptools', type='build') + depends_on("py-pygelf", when="+gelf", type="run") + depends_on("py-sphinx", when="+docs", type="build") + depends_on("py-sphinx-rtd-theme", when="+docs", type="build") def install(self, spec, prefix): + if spec.version >= Version('3.0'): + if "+docs" in spec: + with working_dir("docs"): + make("man") + make("html") + with working_dir("man"): + mkdir('man1') + shutil.move('reframe.1', 'man1') + mkdir('man8') + shutil.move('reframe.settings.8', 'man8') install_tree(self.stage.source_path, self.prefix) + + def setup_run_environment(self, env): + if self.spec.version >= Version('3.0'): + if "+docs" in self.spec: + env.prepend_path('MANPATH', self.prefix.docs.man) diff --git a/var/spack/repos/builtin/packages/revbayes/package.py b/var/spack/repos/builtin/packages/revbayes/package.py index 5e02dc974da..e3703b2e527 100644 --- a/var/spack/repos/builtin/packages/revbayes/package.py +++ b/var/spack/repos/builtin/packages/revbayes/package.py @@ -11,10 +11,11 @@ class Revbayes(CMakePackage): and an interpreted language.""" homepage = "https://revbayes.github.io" - url = "https://github.com/revbayes/revbayes/archive/v1.0.11.tar.gz" + url = "https://github.com/revbayes/revbayes/archive/1.1.0.tar.gz" git = "https://github.com/revbayes/revbayes.git" version('develop', branch='development') + version('1.1.0', sha256='a9f35178d8289d0dd32c9d936f6384f260e8e81e7b80a5155169064a24666012') version('1.0.13', sha256='e85e2e1fe182fe9f504900150d936a06d252a362c591b9d3d8272dd085aa85d9') version('1.0.12', sha256='80c926bb6b37288d02e36e07b44e4663841cd1fe541e2cc0b0e44c89ca929759') version('1.0.11', sha256='03052194baa220dde7e622a739f09f34393f67ea00a0b163b409d313d7fc7c02') @@ -29,17 +30,27 @@ class Revbayes(CMakePackage): def url_for_version(self, version): if version > Version('1.0.13'): - return 'https://github.com/revbayes/revbayes/archive/v{0}.tar.gz'.format(version) + return 'https://github.com/revbayes/revbayes/archive/{0}.tar.gz'.format(version) else: return 'https://github.com/revbayes/revbayes.archive/archive/v{0}.tar.gz'.format(version) @property def root_cmakelists_dir(self): - if self.spec.version > Version('1.0.13') and '+mpi' in self.spec: - return 'projects/cmake/build-mpi' + if self.spec.version > Version('1.0.13'): + return 'src' else: return 'projects/cmake/build' + @when('@1.1.0:') + def cmake_args(self): + args = [] + if '+mpi' in self.spec: + args.extend([ + self.define('MPI', 'ON'), + self.define('RB_EXEC_NAME', 'rb-mpi'), + ]) + return args + @run_before('cmake') def regenerate(self): with working_dir(join_path('projects', 'cmake')): @@ -49,10 +60,11 @@ def regenerate(self): generate_version() dest = join_path('..', '..', 'src', 'revlanguage', 'utils') install('GitVersion.cpp', dest) - edit = FileFilter('regenerate.sh') - edit.filter('boost="true"', 'boost="false"') - if '+mpi' in self.spec: - edit.filter('mpi="false"', 'mpi="true"') + else: + edit = FileFilter('regenerate.sh') + edit.filter('boost="true"', 'boost="false"') + if '+mpi' in self.spec: + edit.filter('mpi="false"', 'mpi="true"') regenerate = Executable('./regenerate.sh') regenerate() diff --git a/var/spack/repos/builtin/packages/revocap-coupler/package.py b/var/spack/repos/builtin/packages/revocap-coupler/package.py new file mode 100644 index 00000000000..6bc128152da --- /dev/null +++ b/var/spack/repos/builtin/packages/revocap-coupler/package.py @@ -0,0 +1,26 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +import os +from spack import * + + +class RevocapCoupler(AutotoolsPackage): + """Large Scale Assembly, Structural Correspondence, + Multi Dynamics Simulator.In this program, + a part of functions of ADVENTURE_Solid ver.1.1 module""" + + homepage = "http://www.ciss.iis.u-tokyo.ac.jp/dl/index.php" + url = "file://{0}/REVOCAP_Coupler-2.1.tar.gz".format(os.getcwd()) + version('2.1', sha256='9e7612d5c508ccdce23bff9ccbf62aeb635877bc2276cdc05c109de40f609f49') + + depends_on('mpi') + + def configure_args(self): + spec = self.spec + args = ['--with-mpicc=%s' % spec['mpi'].mpicc, + '--with-fortran=%s' % spec['mpi'].mpif77, + '--with-mpif90=%s' % spec['mpi'].mpifc] + return args diff --git a/var/spack/repos/builtin/packages/revocap-refiner/add_space.patch b/var/spack/repos/builtin/packages/revocap-refiner/add_space.patch new file mode 100644 index 00000000000..0db2007eaf4 --- /dev/null +++ b/var/spack/repos/builtin/packages/revocap-refiner/add_space.patch @@ -0,0 +1,37 @@ +--- spack-src/Common/kmbDebug.h.org 2020-05-26 14:12:24.977659640 +0900 ++++ spack-src/Common/kmbDebug.h 2020-05-26 14:15:11.118470571 +0900 +@@ -52,29 +52,29 @@ + #define REVOCAP_Debug_X(format) + #else + /* _DEBUG マクロに関係なく出力 */ +- #define REVOCAP_Debug_X(fmt, ...) fprintf(stderr,"%s, line %d: "fmt,__FILE__,__LINE__, ##__VA_ARGS__) ++ #define REVOCAP_Debug_X(fmt, ...) fprintf(stderr,"%s, line %d: " fmt,__FILE__,__LINE__, ##__VA_ARGS__) + + #if defined _DEBUG || _DEBUG_ + #include +- #define REVOCAP_Debug(fmt, ...) fprintf(stderr,"%s, line %d: "fmt,__FILE__,__LINE__, ##__VA_ARGS__) ++ #define REVOCAP_Debug_X(fmt, ...) fprintf(stderr,"%s, line %d: " fmt,__FILE__,__LINE__, ##__VA_ARGS__) + #else + #define REVOCAP_Debug(format, ...) + #endif + + #if ( _DEBUG >= 1 ) || ( _DEBUG_ >= 1 ) +- #define REVOCAP_Debug_1(fmt, ...) fprintf(stderr,"%s, line %d: "fmt,__FILE__,__LINE__, ##__VA_ARGS__) ++ #define REVOCAP_Debug_X(fmt, ...) fprintf(stderr,"%s, line %d: " fmt,__FILE__,__LINE__, ##__VA_ARGS__) + #else + #define REVOCAP_Debug_1(format, ...) + #endif + + #if ( _DEBUG >= 2 ) || ( _DEBUG_ >= 2 ) +- #define REVOCAP_Debug_2(fmt, ...) fprintf(stderr,"%s, line %d: "fmt,__FILE__,__LINE__, ##__VA_ARGS__) ++ #define REVOCAP_Debug_X(fmt, ...) fprintf(stderr,"%s, line %d: " fmt,__FILE__,__LINE__, ##__VA_ARGS__) + #else + #define REVOCAP_Debug_2(format, ...) + #endif + + #if ( _DEBUG >= 3 ) || ( _DEBUG_ >= 3 ) +- #define REVOCAP_Debug_3(fmt, ...) fprintf(stderr,"%s, line %d: "fmt,__FILE__,__LINE__, ##__VA_ARGS__) ++ #define REVOCAP_Debug_X(fmt, ...) fprintf(stderr,"%s, line %d: " fmt,__FILE__,__LINE__, ##__VA_ARGS__) + #else + #define REVOCAP_Debug_3(format, ...) + #endif diff --git a/var/spack/repos/builtin/packages/revocap-refiner/delete_getIndices.patch b/var/spack/repos/builtin/packages/revocap-refiner/delete_getIndices.patch new file mode 100644 index 00000000000..7a70f354bf7 --- /dev/null +++ b/var/spack/repos/builtin/packages/revocap-refiner/delete_getIndices.patch @@ -0,0 +1,28 @@ +--- spack-src/Geometry/kmbBucket.h.org 2020-05-28 11:42:08.438970353 +0900 ++++ spack-src/Geometry/kmbBucket.h 2020-05-28 11:43:55.449496013 +0900 +@@ -348,12 +348,6 @@ + + int getIndex() const{ return it->first; }; + +- void getIndices(int &i,int &j,int &k) const{ +- i = it->first / (ynum*znum); +- j = (it->first - i*ynum*znum) / znum; +- k = it->first - i*ynum*znum - j*znum; +- }; +- + iterator& operator++(void){ ++it; return *this; }; + + iterator operator++(int n){ +@@ -391,12 +385,6 @@ + + int getIndex() const{ return it->first; }; + +- void getIndices(int &i,int &j,int &k) const{ +- i = it->first / (ynum*znum); +- j = (it->first - i*ynum*znum) / znum; +- k = it->first - i*ynum*znum - j*znum; +- }; +- + const_iterator& operator++(void){ ++it; return *this; }; + + const_iterator operator++(int n){ diff --git a/var/spack/repos/builtin/packages/revocap-refiner/package.py b/var/spack/repos/builtin/packages/revocap-refiner/package.py new file mode 100644 index 00000000000..4c563a91d83 --- /dev/null +++ b/var/spack/repos/builtin/packages/revocap-refiner/package.py @@ -0,0 +1,62 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class RevocapRefiner(MakefilePackage): + """The University of Tokyo, CISS Project: + Geometric processing, mesh processing, mesh generation""" + + homepage = "https://github.com/FrontISTR/REVOCAP_Refiner" + git = "https://github.com/FrontISTR/REVOCAP_Refiner.git" + + version('master', branch='master') + + depends_on('ruby', type='build') + depends_on('mpi') + depends_on('doxygen', type='build') + depends_on('swig', type='build') + + parallel = False + + # add space between literal and identifier. + patch('add_space.patch') + # remove unused function getIndices. + patch('delete_getIndices.patch') + + def edit(self, spec, prefix): + cflags = ['-O'] + cxxflags = ['-O', self.compiler.cxx_pic_flag] + fflags = [''] + ldshare = [''] + libs = ['-lstdc++'] + if spec.satisfies('%gcc'): + ldshare.append('g++ -shared -s') + + m = FileFilter('MakefileConfig.in') + m.filter(r'CC\s=.*$', 'CC={0}'.format(spec['mpi'].mpicc)) + m.filter(r'CFLAGS\s=.*$', 'CFLAGS={0}'.format(' '.join(cflags))) + m.filter(r'CXX\s*=.*$', 'CXX={0}'.format(spec['mpi'].mpicxx)) + m.filter(r'CXXFLAGS\s*=.*$', + 'CXXFLAGS={0}'.format(' '.join(cxxflags))) + m.filter(r'AR\s*=.*$', 'AR=ar') + m.filter(r'ARFLAGS\s*=.*$', 'ARFLAGS=rsv') + m.filter(r'LD\s*=.*$', 'LD={0}'.format(spack_fc)) + m.filter(r'LDFLAGS\s*=.*$', + 'LDFLAGS={0}'.format(' '.join(fflags))) + m.filter(r'LDSHARE\s*=.*$', + 'LDSHARE={0}'.format(' '.join(ldshare))) + m.filter(r'LIBS\s*=.*$', 'LIBS={0}'.format(' '.join(libs))) + m.filter(r'LIBPATH\s*=.*$', 'LIBPATH= ') + m.filter(r'RM\s*=.*$', 'RM=rm -f') + m.filter(r'DOXYGEN\s*=.*$', 'DOXYGEN=doxygen') + m.filter(r'TAR\s*=.*$', 'TAR=tar') + m.filter(r'SWIG\s*=.*$', 'SWIG=swig') + + def install(self, spec, prefix): + install_tree('bin', prefix.bin) + install_tree('lib', prefix.lib) + install_tree('Refiner', prefix.include.refine) diff --git a/var/spack/repos/builtin/packages/rocksdb/package.py b/var/spack/repos/builtin/packages/rocksdb/package.py index 67b9ddea448..7cb105ba637 100644 --- a/var/spack/repos/builtin/packages/rocksdb/package.py +++ b/var/spack/repos/builtin/packages/rocksdb/package.py @@ -13,7 +13,8 @@ class Rocksdb(MakefilePackage): url = 'https://github.com/facebook/rocksdb/archive/v6.5.3.tar.gz' git = 'https://github.com/facebook/rocksdb.git' - version('develop', git=git, branch='master', submodules=True) + version('master', git=git, branch='master', submodules=True) + version('6.7.3', sha256='c4d1397b58e4801b5fd7c3dd9175e6ae84541119cbebb739fe17d998f1829e81') version('6.5.3', sha256='6dc023a11d61d00c8391bd44f26ba7db06c44be228c10b552edc84e02d7fbde2') version('5.18.3', sha256='7fb6738263d3f2b360d7468cf2ebe333f3109f3ba1ff80115abd145d75287254') version('5.17.2', sha256='101f05858650a810c90e4872338222a1a3bf3b24de7b7d74466814e6a95c2d28') @@ -22,6 +23,7 @@ class Rocksdb(MakefilePackage): variant('bz2', default=False, description='Enable bz2 compression support') variant('lz4', default=True, description='Enable lz4 compression support') + variant('shared', default=True, description='Build shared library') variant('snappy', default=False, description='Enable snappy compression support') variant('static', default=True, description='Build static library') variant('zlib', default=True, description='Enable zlib compression support') @@ -36,6 +38,8 @@ class Rocksdb(MakefilePackage): depends_on('zstd', when='+zstd') depends_on('tbb', when='+tbb') + conflicts('~shared~static', msg='have to build one type of library') + phases = ['install'] def patch(self): @@ -85,5 +89,27 @@ def install(self, spec, prefix): env['PLATFORM_FLAGS'] = ' '.join(ldflags) env['INSTALL_PATH'] = self.spec.prefix - buildtype = 'install-static' if '+static' in spec else 'install-shared' - make(buildtype) + if '+static' in spec: + make('install-static') + + if '+shared' in spec: + make('install-shared') + + @run_after('install') + def install_pkgconfig(self): + libdir = self.spec['rocksdb'].libs.directories[0] + pkg_path = join_path(libdir, 'pkgconfig') + mkdirp(pkg_path) + + with open(join_path(pkg_path, 'rocksdb.pc'), 'w') as f: + f.write('prefix={0}\n'.format(self.prefix)) + f.write('exec_prefix=${prefix}\n') + f.write('libdir={0}\n'.format(libdir)) + f.write('includedir={0}\n'.format(self.prefix.include)) + f.write('\n') + f.write('Name: rocksdb\n') + f.write('Description: RocksDB: A Persistent Key-Value Store for' + ' Flash and RAM Storage\n') + f.write('Version: {0}\n'.format(self.spec.version)) + f.write('Cflags: -I${includedir}\n') + f.write('Libs: -L${libdir} -lrocksdb -ldl\n') diff --git a/var/spack/repos/builtin/packages/root/package.py b/var/spack/repos/builtin/packages/root/package.py index 9db1a702540..0e590e30ae5 100644 --- a/var/spack/repos/builtin/packages/root/package.py +++ b/var/spack/repos/builtin/packages/root/package.py @@ -26,6 +26,7 @@ class Root(CMakePackage): # Development version (when more recent than production). # Production version + version('6.20.04', sha256='1f8c76ccdb550e64e6ddb092b4a7e9d0a10655ef80044828cba12d5e7c874472') version('6.20.02', sha256='0997586bf097c0afbc6f08edbffcebf5eb6a4237262216114ba3f5c8087dcba6') version('6.20.00', sha256='68421eb0434b38b66346fa8ea6053a0fdc9a6d254e4a72019f4e3633ae118bf0') version('6.18.04', sha256='315a85fc8363f8eb1bffa0decbf126121258f79bd273513ed64795675485cfa4', @@ -213,10 +214,10 @@ class Root(CMakePackage): depends_on('python@2.7:', when='+python', type=('build', 'run')) depends_on('py-numpy', type=('build', 'run'), when='+tmva') # This numpy dependency was not intended and will hopefully - # be fixed in 6.20.04. + # be fixed in 6.20.06. # See: https://sft.its.cern.ch/jira/browse/ROOT-10626 depends_on('py-numpy', type=('build', 'run'), - when='@6.20.00:6.20.03 +python') + when='@6.20.00:6.20.05 +python') # Optional dependencies depends_on('davix @0.7.1:', when='+davix') @@ -378,7 +379,6 @@ def cmake_args(self): ['pgsql', 'postgres'], ['pythia6'], ['pythia8'], - ['python'], ['qt', 'qt4'], # See conflicts ['qtgsi', 'qt4'], # See conflicts ['r', 'R'], @@ -409,6 +409,12 @@ def cmake_args(self): options = self._process_opts(control_opts, builtin_opts, feature_opts) + # Some special features + if self.spec.satisfies('@6.20:'): + options.append(self.define_from_variant('pyroot', 'python')) + else: + options.append(self.define_from_variant('python')) + # #################### Compiler options #################### if sys.platform == 'darwin': @@ -486,12 +492,16 @@ def setup_dependent_build_environment(self, env, dependent_spec): env.prepend_path('PATH', self.prefix.bin) env.append_path('CMAKE_MODULE_PATH', '{0}/cmake' .format(self.prefix)) + if "+rpath" not in self.spec: + env.prepend_path('LD_LIBRARY_PATH', self.prefix.lib) def setup_dependent_run_environment(self, env, dependent_spec): env.set('ROOTSYS', self.prefix) env.set('ROOT_VERSION', 'v{0}'.format(self.version.up_to(1))) env.prepend_path('PYTHONPATH', self.prefix.lib) env.prepend_path('PATH', self.prefix.bin) + if "+rpath" not in self.spec: + env.prepend_path('LD_LIBRARY_PATH', self.prefix.lib) def _process_opts(self, *opt_lists): """Process all provided boolean option lists into CMake arguments. diff --git a/var/spack/repos/builtin/packages/rose/package.py b/var/spack/repos/builtin/packages/rose/package.py index 326da4854bb..0255230da10 100644 --- a/var/spack/repos/builtin/packages/rose/package.py +++ b/var/spack/repos/builtin/packages/rose/package.py @@ -62,6 +62,8 @@ class Rose(AutotoolsPackage): depends_on("autoconf@2.69:", type="build") depends_on("automake@1.14:", type="build") depends_on("libtool@2.4:", type="build") + depends_on("flex@2.6.4:", type="build") + depends_on("bison@3.4.2:", type="build") # C++11 compatible boost and gcc versions required for +cxx11 variant: depends_on("boost@1.60.0:1.64.0,1.65.1,1.66.0:1.67.0 cxxstd=11", when="+cxx11") diff --git a/var/spack/repos/builtin/packages/rt-tests/package.py b/var/spack/repos/builtin/packages/rt-tests/package.py index 4310ae5718e..ba5975e5189 100644 --- a/var/spack/repos/builtin/packages/rt-tests/package.py +++ b/var/spack/repos/builtin/packages/rt-tests/package.py @@ -6,7 +6,7 @@ from spack import * -class RtTests(Package): +class RtTests(MakefilePackage): """ Suite of real-time tests - cyclictest, hwlatdetect, pip_stress, pi_stress, pmqtest, ptsematest, rt-migrate-test, sendme, signaltest, @@ -19,4 +19,6 @@ class RtTests(Package): version('1.2', sha256='7ccde036059c87681a4b00e7138678d9551b1232113441f6edda31ea45452426') def install(self, spec, prefix): - install_tree('.', prefix) + mkdirp(prefix.bin) + mkdirp(prefix.share.man) + make('install', 'prefix={0}'.format(prefix)) diff --git a/var/spack/repos/builtin/packages/ruby/package.py b/var/spack/repos/builtin/packages/ruby/package.py index b2e2c6f6fa3..0a67b9baa7a 100644 --- a/var/spack/repos/builtin/packages/ruby/package.py +++ b/var/spack/repos/builtin/packages/ruby/package.py @@ -15,6 +15,7 @@ class Ruby(AutotoolsPackage): list_url = "http://cache.ruby-lang.org/pub/ruby/" list_depth = 1 + version('2.7.1', sha256='d418483bdd0000576c1370571121a6eb24582116db0b7bb2005e90e250eae418') version('2.6.2', sha256='a0405d2bf2c2d2f332033b70dff354d224a864ab0edd462b7a413420453b49ab') version('2.5.3', sha256='9828d03852c37c20fa333a0264f2490f07338576734d910ee3fd538c9520846c') version('2.2.0', sha256='7671e394abfb5d262fbcd3b27a71bf78737c7e9347fa21c39e58b0bb9c4840fc') @@ -34,6 +35,10 @@ class Ruby(AutotoolsPackage): depends_on('openssl', when='+openssl') depends_on('readline', when='+readline') + # Known build issues when Avira antivirus software is running: + # https://github.com/rvm/rvm/issues/4313#issuecomment-374020379 + # TODO: add check for this and warn user + # gcc-7-based build requires patches (cf. https://bugs.ruby-lang.org/issues/13150) patch('ruby_23_gcc7.patch', level=0, when='@2.2.0:2.2.999 %gcc@7:') patch('ruby_23_gcc7.patch', level=0, when='@2.3.0:2.3.4 %gcc@7:') @@ -61,6 +66,8 @@ def configure_args(self): args.append("--with-readline-dir=%s" % self.spec['readline'].prefix) args.append('--with-tk=%s' % self.spec['tk'].prefix) + if self.spec.satisfies("%fj"): + args.append('--disable-dtrace') return args def setup_dependent_build_environment(self, env, dependent_spec): diff --git a/var/spack/repos/builtin/packages/rust/package.py b/var/spack/repos/builtin/packages/rust/package.py index 4434c5efbd8..ff8cba54e63 100644 --- a/var/spack/repos/builtin/packages/rust/package.py +++ b/var/spack/repos/builtin/packages/rust/package.py @@ -85,6 +85,9 @@ class Rust(Package): # The `x.py` bootstrapping script did not exist prior to Rust 1.17. It # would be possible to support both, but for simplicitly, we only support # Rust 1.17 and newer + version('1.44.0', sha256='bf2df62317e533e84167c5bc7d4351a99fdab1f9cd6e6ba09f51996ad8561100') + version('1.43.1', sha256='cde177b4a8c687da96f20de27630a1eb55c9d146a15e4c900d5c31cd3c3ac41d') + version('1.43.0', sha256='75f6ac6c9da9f897f4634d5a07be4084692f7ccc2d2bb89337be86cfc18453a1') version('1.42.0', sha256='d2e8f931d16a0539faaaacd801e0d92c58df190269014b2360c6ab2a90ee3475') version('1.41.1', sha256='38c93d016e6d3e083aa15e8f65511d3b4983072c0218a529f5ee94dd1de84573') version('1.41.0', sha256='5546822c09944c4d847968e9b7b3d0e299f143f307c00fa40e84a99fabf8d74b') @@ -124,6 +127,24 @@ class Rust(Package): # This dictionary contains a version: hash dictionary for each supported # Rust target. rust_releases = { + '1.44.0': { + 'x86_64-unknown-linux-gnu': 'eaa34271b4ac4d2c281831117d4d335eed0b37fe7a34477d9855a6f1d930a624', + 'powerpc64le-unknown-linux-gnu': '97038ea935c7a5b21f5aaaaad409c514e2b2ae8ea55994ba39645f453e98bc9f', + 'aarch64-unknown-linux-gnu': 'bcc916003cb9c7ff44f5f9af348020b422dbc5bd4fe49bdbda2de6ce0a1bb745', + 'x86_64-apple-darwin': 'f20388b80b2b0a8b122d89058f785a2cf3b14e93bcac53471d60fdb4106ffa35' + }, + '1.43.1': { + 'x86_64-unknown-linux-gnu': '25cd71b95bba0daef56bad8c943a87368c4185b90983f4412f46e3e2418c0505', + 'powerpc64le-unknown-linux-gnu': '1670f00b00cc1bed38d523a25dba7420de3c06986c15a0248e06299f80ce6124', + 'aarch64-unknown-linux-gnu': 'fbb612387a64c9da2869725afffc1f66a72d6e7ba6667ba717cd52c33080b7fb', + 'x86_64-apple-darwin': 'e1c3e1426a9e615079159d6b619319235e3ca7b395e7603330375bfffcbb7003' + }, + '1.43.0': { + 'x86_64-unknown-linux-gnu': '069f34fa5cef92551724c83c36360df1ac66fe3942bc1d0e4d341ce79611a029', + 'powerpc64le-unknown-linux-gnu': 'c75c7ae4c94715fd6cc43d1d6fdd0952bc151f7cbe3054f66d99a529d5bb996f', + 'aarch64-unknown-linux-gnu': 'e5fa55f333c10cdae43d147438a80ffb435d6c7b9681cd2e2f0857c024556856', + 'x86_64-apple-darwin': '504e8efb2cbb36f5a3db7bb36f339a1e5216082c910ad19039c370505cfbde99' + }, '1.42.0': { 'x86_64-unknown-linux-gnu': '7d1e07ad9c8a33d8d039def7c0a131c5917aa3ea0af3d0cc399c6faf7b789052', 'powerpc64le-unknown-linux-gnu': '805b08fa1e0aad4d706301ca1f13e2d80810d385cece2c15070360b3c4bd6e4a', @@ -356,11 +377,11 @@ class Rust(Package): sha256='0000000000000000000000000000000000000000000000000000000000000000', destination='spack_bootstrap_stage', when='@{version} platform={platform} target={target}'\ - .format( - version=prerelease_version, - platform=rust_arch['platform'], - target=rust_arch['target'] - ) + .format( + version=prerelease_version, + platform=rust_arch['platform'], + target=rust_arch['target'] + ) ) # This loop generates resources for each binary distribution, and maps @@ -387,11 +408,11 @@ class Rust(Package): sha256=rust_sha256, destination='spack_bootstrap_stage', when='@{version} platform={platform} target={target}'\ - .format( - version=rust_version, - platform=rust_arch['platform'], - target=rust_arch['target'] - ) + .format( + version=rust_version, + platform=rust_arch['platform'], + target=rust_arch['target'] + ) ) # This routine returns the target architecture we intend to build for. diff --git a/var/spack/repos/builtin/packages/sbml/package.py b/var/spack/repos/builtin/packages/sbml/package.py new file mode 100644 index 00000000000..22c608d156e --- /dev/null +++ b/var/spack/repos/builtin/packages/sbml/package.py @@ -0,0 +1,125 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Sbml(CMakePackage): + """Library for the Systems Biology Markup Language""" + + homepage = "https://sbml.org" + maintainers = ['rblake-llnl'] + + version('5.18.0', sha256='6c01be2306ec0c9656b59cb082eb7b90176c39506dd0f912b02e08298a553360') + version('5.17.0', sha256='189216e1472777e4464b791c506b79267d07a5454cb23ac991452711f8e0ed3a') + version('5.16.0', sha256='c6855481434dd2a667fef73e1ff2feade509aa2f3a76d4d06e29022975ce1496') + version('5.15.0', sha256='c779c2a8a97c5480fe044028099d928a327261fb68cf08657ec8d4f3b3fc0a21') + version('5.13.0', sha256='e58430edb1b454d7414bcf1be0549bf6860a6d19d73232eb58211559485c2c05') + version('5.12.0', sha256='c637494b19269947fc90ebe479b624d36f80d1cb5569e45cd76ddde81dd28ae4') + version('5.11.4', sha256='6429188b689b331b0b8f2c8b55b3f2339196ccd4c93191648fa767e1d02152a3') + version('5.11.0', sha256='b21931ca7461494915c617b30d4a9f2cafe831d6ce74989b3e5874e6e3c3f72b') + version('5.10.2', sha256='83f32a143cf657672b1050f5f79d3591c418fc59570d180fb1f39b103f4e5286') + version('5.10.0', sha256='2cd8b37018ce8b1df869c8c182803addbce6d451512ae25a7f527b49981f0966') + + def url_for_version(self, version): + url = "https://downloads.sourceforge.net/project/sbml/libsbml/{0}/stable/libSBML-{1}-core-plus-packages-src.tar.gz".format(version, version) + return url + + variant('python', default=False, + description='Build with python support') + depends_on('python', when="+python") + + variant('perl', default=False, + description='Build with perl support') + depends_on('perl', when="+perl") + + variant('ruby', default=False, + description='Build with ruby support') + depends_on('ruby', when="+ruby") + + variant('r', default=False, + description='Build with R support') + depends_on('r', when="+r") + + variant('octave', default=False, + description='Build with octave support') + depends_on('octave', when="+octave") + + variant('matlab', default=False, + description='Build with matlab support') + depends_on('matlab', when="+matlab") + + variant('java', default=False, + description='Build with java support') + depends_on('java', when="+java") + + variant('mono', default=False, + description='Build with mono support') + depends_on('mono', when="+mono") + + variant('cpp', default=False, + description="All c++ includes should be under a namespace") + + depends_on('swig@2:', type='build') + depends_on('cmake', type='build') + depends_on('zlib') + depends_on('bzip2') + depends_on('libxml2') + + def cmake_args(self): + spec = self.spec + args = [ + "-DENABLE_COMP:BOOL=ON", + "-DENABLE_FBC:BOOL=ON", + "-DENABLE_GROUPS:BOOL=ON", + "-DENABLE_LAYOUT:BOOL=ON", + "-DENABLE_QUAL:BOOL=ON", + "-DENABLE_RENDER:BOOL=ON", + "-DWITH_BZIP2:BOOL=ON", + "-DWITH_CHECK:BOOL=OFF", + "-DWITH_DOXYGEN:BOOL=OFF", + "-DWITH_EXAMPLES:BOOL=OFF", + "-DWITH_EXPAT:BOOL=OFF", + "-DWITH_LIBXML:BOOL=ON", + "-DWITH_SWIG:BOOL=ON", + "-DWITH_WALL:BOOL=ON", + "-DWITH_XERCES:BOOL=OFF", + "-DWITH_ZLIB:BOOL=ON", + ] + args.append(self.define_from_variant('WITH_CPP_NAMESPACE', 'cpp')) + if '+python' in spec: + args.extend([ + "-DWITH_PYTHON:BOOL=ON", + "-DWITH_PYTHON_INCLUDE:PATH=%s" % spec['python'].prefix, + ]) + else: + args.append('-DWITH_PYTHON:BOOL=ON') + + args.append(self.define_from_variant('WITH_CSHARP', 'mono')) + + if '+java' in spec: + args.extend([ + "-DWITH_JAVA:BOOL=ON", + "-DJDK_PATH:STRING=%s" % spec['java'].prefix, + "-DJAVA_INCLUDE_PATH:STRING=%s" % spec['java'].prefix, + ]) + else: + args.append('-DWITH_JAVA:BOOL=OFF') + + if '+matlab' in spec: + args.extend([ + "-DWITH_MATLAB:BOOL=ON", + "-DMATLAB_ROOT_PATH:PATH=%s" % spec['matlab'].prefix, + "-DWITH_MATLAB_MEX:BOOL=ON", + ]) + else: + args.append('-DWITH_MATLAB:BOOL=OFF') + + args.append(self.define_from_variant('WITH_OCTAVE', 'octave')) + args.append(self.define_from_variant('WITH_PERL', 'perl')) + args.append(self.define_from_variant('WITH_R', 'r')) + args.append(self.define_from_variant('WITH_RUBY', 'ruby')) + + return args diff --git a/var/spack/repos/builtin/packages/scorep/gcc10.patch b/var/spack/repos/builtin/packages/scorep/gcc10.patch new file mode 100644 index 00000000000..76bcb987ca0 --- /dev/null +++ b/var/spack/repos/builtin/packages/scorep/gcc10.patch @@ -0,0 +1,14 @@ +--- a/src/adapters/compiler/gcc-plugin/scorep_plugin_pass_instrument.inc.c ++++ b/src/adapters/compiler/gcc-plugin/scorep_plugin_pass_instrument.inc.c +@@ -83,7 +83,11 @@ is_instrumentable( const char* assemblerName ) + return "in system header"; + } + ++#if SCOREP_GCC_PLUGIN_TARGET_VERSION < 10000 + if ( node->global.inlined_to ) ++#else ++ if ( node->inlined_to ) ++#endif + { + return "was inlined"; + } diff --git a/var/spack/repos/builtin/packages/scorep/package.py b/var/spack/repos/builtin/packages/scorep/package.py index b26af4287db..d7b57c0c446 100644 --- a/var/spack/repos/builtin/packages/scorep/package.py +++ b/var/spack/repos/builtin/packages/scorep/package.py @@ -25,7 +25,8 @@ class Scorep(AutotoolsPackage): version('1.4.2', sha256='d7f3fcca2efeb2f5d5b5f183b3b2c4775e66cbb3400ea2da841dd0428713ebac') version('1.3', sha256='dcfd42bd05f387748eeefbdf421cb3cd98ed905e009303d70b5f75b217fd1254') - patch('gcc7.patch', when='@:3') + patch('gcc7.patch', when='@1.4:3') + patch('gcc10.patch', when='@3.1:') variant('mpi', default=True, description="Enable MPI support") variant('papi', default=True, description="Enable PAPI") diff --git a/var/spack/repos/builtin/packages/sgpp/avx512_datadriven_compilation.patch b/var/spack/repos/builtin/packages/sgpp/avx512_datadriven_compilation.patch new file mode 100644 index 00000000000..268cba63587 --- /dev/null +++ b/var/spack/repos/builtin/packages/sgpp/avx512_datadriven_compilation.patch @@ -0,0 +1,21 @@ +diff --git a/datadriven/src/sgpp/datadriven/operation/hash/OperationMultiEvalStreaming/OperationMultiEvalStreaming_multImpl.cpp b/datadriven/src/sgpp/datadriven/operation/hash/OperationMultiEvalStreaming/OperationMultiEvalStreaming_multImpl.cpp +index 7fafd43d4..ea15ba137 100644 +--- a/datadriven/src/sgpp/datadriven/operation/hash/OperationMultiEvalStreaming/OperationMultiEvalStreaming_multImpl.cpp ++++ b/datadriven/src/sgpp/datadriven/operation/hash/OperationMultiEvalStreaming/OperationMultiEvalStreaming_multImpl.cpp +@@ -295,6 +295,7 @@ void OperationMultiEvalStreaming::multImpl( + _mm512_extload_pd(A, _MM_UPCONV_PD_NONE, _MM_BROADCAST_1X8, _MM_HINT_NONE) + #define _mm512_max_pd(A, B) _mm512_gmax_pd(A, B) + #define _mm512_set1_epi64(A) _mm512_set_1to8_epi64(A) ++#define _mm512_set1_pd(A) _mm512_set_1to8_pd(A) + #endif + #if defined(__AVX512F__) + #define _mm512_broadcast_sd(A) _mm512_broadcastsd_pd(_mm_load_sd(A)) +@@ -409,7 +410,7 @@ void OperationMultiEvalStreaming::multImpl( + eval_11 = _mm512_castsi512_pd(_mm512_and_epi64(abs2Mask, _mm512_castpd_si512(eval_11))); + #endif + +- __m512d one = _mm512_set_1to8_pd(1.0); ++ __m512d one = _mm512_set1_pd(1.0); + + eval_0 = _mm512_sub_pd(one, eval_0); + eval_1 = _mm512_sub_pd(one, eval_1); diff --git a/var/spack/repos/builtin/packages/sgpp/directory.patch b/var/spack/repos/builtin/packages/sgpp/directory.patch new file mode 100644 index 00000000000..f1b22864231 --- /dev/null +++ b/var/spack/repos/builtin/packages/sgpp/directory.patch @@ -0,0 +1,22 @@ +diff --git a/SConstruct b/SConstruct +index 98c1a56ac..30baef5e5 100644 +--- a/SConstruct ++++ b/SConstruct +@@ -293,7 +293,7 @@ Parameters are: + for line in vars.GenerateHelpText(env).splitlines()])) + + # add trailing slashes were required and if not present +-BUILD_DIR = Dir(os.path.join("lib", "sgpp")) ++BUILD_DIR = Dir(os.path.join("lib")) + Export("BUILD_DIR") + PYSGPP_PACKAGE_PATH = Dir(os.path.join("lib")) + Export("PYSGPP_PACKAGE_PATH") +@@ -550,7 +550,7 @@ env.SideEffect("sideEffectFinalSteps", exampleTargetList) + ######################################################################### + + installLibSGpp = env.Alias("install-lib-sgpp", +- env.Install(os.path.join(env.get("LIBDIR"), "sgpp"), ++ env.Install(os.path.join(env.get("LIBDIR")), + libraryTargetList)) + + headerFinalDestList = [] diff --git a/var/spack/repos/builtin/packages/sgpp/fix-setup-py.patch b/var/spack/repos/builtin/packages/sgpp/fix-setup-py.patch new file mode 100644 index 00000000000..e201a5dff08 --- /dev/null +++ b/var/spack/repos/builtin/packages/sgpp/fix-setup-py.patch @@ -0,0 +1,160 @@ +diff --git a/setup.py b/setup.py +index af1ec083e..d971f87c5 100644 +--- a/setup.py ++++ b/setup.py +@@ -1,5 +1,5 @@ +-#!/usr/bin/env python +-# Copyright (C) 2008-today The SG++ Project ++#!/usr/bin/env python ++# Copyright (C) 2008-today The SG++ project + # This file is part of the SG++ project. For conditions of distribution and + # use, please see the copyright notice provided with SG++ or at + # sgpp.sparsegrids.org +@@ -8,17 +8,26 @@ + # script creates a pysgpp lib in the site-packages of + # python. Furthermore, it collects all the relevant python code, + # located in each module under the path /python and +-# copies it to the site-package folder of pysgpp using the following +-# scheme: /pysgpp-/extensions//. ++# copies it to the site-package folder of pysgpp under the extensions namespace ++# pysgpp.extensions + + import os + import shutil +-from setuptools import setup ++from setuptools import setup, find_packages ++ ++ ++try: ++ from wheel.bdist_wheel import bdist_wheel as _bdist_wheel ++ class bdist_wheel(_bdist_wheel): ++ def finalize_options(self): ++ _bdist_wheel.finalize_options(self) ++ self.root_is_pure = False ++except ImportError: ++ bdist_wheel = None + + # path to pysgpp lib + libpath = os.path.join("lib", "pysgpp") ++extensionspath = os.path.join(libpath, "extensions") + + # list of all available modules -> all folders in the root directory + moduleFolders = [filename for filename in os.listdir(".") +@@ -29,16 +38,44 @@ pythonModuleFolders = [(moduleFolder, os.path.join(moduleFolder, "python")) + for moduleFolder in moduleFolders + if os.path.exists(os.path.join(moduleFolder, "python"))] + +-# create the data file list such that it can be used by setuptools +-dataFiles = [] ++try: ++ os.mkdir(extensionspath) ++except FileExistsError as e: ++ pass ++ ++# create list of extension scripts ++extFiles = [] + for moduleFolder, srcdir in pythonModuleFolders: +- basepath = os.path.join("pysgpp", "extensions", moduleFolder) ++ basepath = os.path.join(extensionspath, moduleFolder) ++ try: ++ os.mkdir(basepath) ++ except FileExistsError as e: ++ pass + for root, dirs, files in os.walk(srcdir): +- if '.svn' in dirs: +- dirs.remove('.svn') ++ if '.git' in dirs: ++ dirs.remove('.git') ++ ++ extFiles += [os.path.join(root, f) for f in files if ".py" in f] ++ ++## ++# copy extension python files to new layout ++# pysgpp ++# --extensions ++# ----modulename ++# ------*.py ++## + +- dataFiles += [(root.replace(srcdir, basepath), +- [os.path.join(root, f) for f in files])] ++for f in extFiles: ++ dest = os.path.join(extensionspath,f) ++ dest = dest.replace(os.sep + "python", "") ++ try: ++ shutil.copy2(f, dest) ++ except FileNotFoundError as e: ++ os.mkdir(os.path.dirname(dest)) ++ shutil.copy2(f, dest) ++ except shutil.SameFileError as e: ++ pass ++ + + # write init file for pysgpp + initFile = os.path.join(libpath, "__init__.py") +@@ -50,8 +87,8 @@ import sys + sys.path.append(os.path.dirname(__file__)) + + # import pysgpp_swig and extensions +-from pysgpp_swig import * +-import pysgpp.extensions ++from .pysgpp_swig import * ++from . import extensions + """) + + if len(moduleFolders) > 0: +@@ -59,9 +96,13 @@ if len(moduleFolders) > 0: + initFile = os.path.join("__init__.py") + with open(initFile, "w") as f: + for moduleFolder, _ in pythonModuleFolders: +- f.write("import %s\n" % moduleFolder) ++ f.write("from . import %s\n" % moduleFolder) + +- dataFiles += [(os.path.join("pysgpp", "extensions"), [initFile])] ++ try: ++ shutil.copy2(initFile, os.path.join(extensionspath, initFile)) ++ except shutil.SameFileError as e: ++ pass ++ + + # if the current system is windows we need to rename the dll to pyd + dllLibs = [filename for filename in os.listdir(libpath) +@@ -74,20 +115,27 @@ for dllLib in dllLibs: + + # setup pysgpp + setup(name='pysgpp', +- version="1.0.0", +- url='sgpp.sparsegrids.org', +- author="Fabian Franzelin", +- description='', +- license='', ++ version="0.0.0", ++ url='https://github.com/SGpp/SGpp', ++ author="Dirk.Pflueger@ipvs.uni-stuttgart.de", ++ description='''The sparse grids toolkit SG++ ++ SG++ is a collection of numerical algorithms for sparse grids. It ++ contains modules for interpolation, quadrature, data mining ++ (regression, classification, clustering), optimization, PDEs, and ++ more. SG++ implements algorithms for spatially adaptive grids and ++ also provides a module for the combination technique. Many of the ++ implemented algorithms are also available as a high-performance ++ version, often orders of magnitude faster than standard ++ implementations.''', ++ license='BSD-style license', + long_description="README", +- platforms='any', + zip_safe=False, + package_dir={'': 'lib'}, +- packages=['pysgpp'], +- package_data={'pysgpp': ['*.so', '*.lib', '*.pyd']}, +- data_files=dataFiles ++ packages=find_packages(where='lib', include=['pysgpp', 'pysgpp.extensions*']), ++ package_data={'pysgpp': ['_pysgpp_swig.so', '*.lib', '*.pyd']}, + ) + + # cleanup + if len(moduleFolders) > 0 and os.path.exists(initFile): + os.remove(initFile) ++ shutil.rmtree(extensionspath, ignore_errors=True) diff --git a/var/spack/repos/builtin/packages/sgpp/ocl.patch b/var/spack/repos/builtin/packages/sgpp/ocl.patch new file mode 100644 index 00000000000..fbdca6ff188 --- /dev/null +++ b/var/spack/repos/builtin/packages/sgpp/ocl.patch @@ -0,0 +1,13 @@ +diff --git a/datadriven/examplesOCL/multiEvalPerformance.cpp b/datadriven/examplesOCL/multiEvalPerformance.cpp +index 60c281353..373e8509d 100644 +--- a/datadriven/examplesOCL/multiEvalPerformance.cpp ++++ b/datadriven/examplesOCL/multiEvalPerformance.cpp +@@ -17,7 +17,7 @@ int main(int argc, char** argv) { + std::string fileName = "debugging.arff"; + + sgpp::datadriven::ARFFTools arffTools; +- sgpp::datadriven::Dataset dataset = arffTools.readARFFFromeFile(fileName); ++ sgpp::datadriven::Dataset dataset = arffTools.readARFFFromFile(fileName); + + // sgpp::base::DataVector *classes = dataset.getClasses(); + sgpp::base::DataMatrix& trainingData = dataset.getData(); diff --git a/var/spack/repos/builtin/packages/sgpp/package.py b/var/spack/repos/builtin/packages/sgpp/package.py new file mode 100644 index 00000000000..7e5b907374e --- /dev/null +++ b/var/spack/repos/builtin/packages/sgpp/package.py @@ -0,0 +1,205 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Sgpp(SConsPackage): + """SGpp is a library and framework for sparse grids in different flavors. + SGpp supports both hierarchical spatially-adaptive sparse grids and the + dimensionally-adaptive sparse grid combination technique.""" + + homepage = "https://sgpp.sparsegrids.org" + url = "https://github.com/SGpp/SGpp/archive/v3.2.0.tar.gz" + git = "https://github.com/SGpp/SGpp.git" + + maintainers = ['G-071', 'leiterrl', 'pfluegdk'] + + # Versions with Python 3 bindings: + version('master', branch='master') + version('3.3.0', sha256='ca4d5b79f315b425ce69b04940c141451a76848bf1bd7b96067217304c68e2d4') + version('3.2.0', sha256='dab83587fd447f92ed8546eacaac6b8cbe65b8db5e860218c0fa2e42f776962d') + # Versions with Python 2 bindings: + version('3.1.0', sha256='6b46bc5b3966e92567d6754130666bdffb7be1d1d2c1b427d7ce964b8eaab526') + version('3.0.0', sha256='4dd9049e664abd7db78c355fea5e192167812f443115d4bf686a51bb1e9bda9c') + + # Patches with bugfixes that are necessary to build old SGpp versions + # with spack. Patches are submitted upstream, but need to applied + # for versions too old to include them as they will not be + # backported for old releases: + + # Patch that ensures libraries will actually + # be copied into prefix/lib upon installation + # (otherwise it would be prefix/lib/sgpp) + # Fixed in SGpp in PR https://github.com/SGpp/SGpp/pull/222 + patch('directory.patch', when='@:3.2.0') + # Fix faulty setup.py introduced in 3.2.0 + # Fixed in SGpp in version 3.3.0 + patch('fix-setup-py.patch', when='@3.2.0') + # Fix compilation issue with opencl introduced in 3.2.0 + # Fixed in SGpp in PR https://github.com/SGpp/SGpp/pull/219 + patch('ocl.patch', when='@3.2.0+opencl') + # Fixes compilation with AVX512 and datadriven + # Fixed in SGpp in PR https://github.com/SGpp/SGpp/pull/229 + patch('avx512_datadriven_compilation.patch', when='@:3.3.0+datadriven') + + variant('python', default=True, + description='Provide Python bindings for SGpp') + variant('optimization', default=True, + description='Builds the optimization module of SGpp') + variant('pde', default=True, + description='Builds the datadriven module of SGpp') + variant('quadrature', default=True, + description='Builds the datadriven module of SGpp') + variant('datadriven', default=False, + description='Builds the datadriven module of SGpp') + variant('misc', default=False, + description='Builds the misc module of SGpp') + variant('combigrid', default=False, + description='Builds the combigrid module of SGpp') + variant('solver', default=True, + description='Builds the solver module of SGpp') + variant('opencl', default=False, + description='Enables support for OpenCL accelerated operations') + variant('mpi', default=False, + description='Enables support for MPI-distributed operations') + + # Java variant deactivated due to spack issue #987 + # variant('java', default=False, + # description='Provide Java bindings for SGpp') + # depends_on('swig@3:', when='+java', type=('build')) + # extends('openjdk', when='+java') + + # Mandatory dependencies + depends_on('scons@2.5.1', when='@:3.1.0', type=('build')) + depends_on('scons@3:', when='@3.2.0:', type=('build')) + depends_on('zlib', type=('link')) + # Python dependencies + extends('python', when='+python') + depends_on('py-setuptools', when='+python', type=('build')) + # Python 3 support was added in version 3.2.0 + depends_on('python@2.7:2.8', when='@:3.1.0+python', type=('build', 'run')) + depends_on('python@3:', when='@3.2.0:+python', type=('build', 'run')) + depends_on('swig@3:', when='+python', type=('build')) + # Python libraries (version depends on whether we use Python 2 or 3) + depends_on('py-numpy@:1.16', when='@:3.1.0+python', type=('build', 'run')) + depends_on('py-numpy@1.17:', when='@3.2.0:+python', type=('build', 'run')) + depends_on('py-scipy@:1.2.3', when='@:3.1.0+python', type=('build', 'run')) + depends_on('py-scipy@1.3.0:', when='@3.2.0:+python', type=('build', 'run')) + # OpenCL dependency + depends_on('opencl@1.1:', when='+opencl', type=('build', 'run')) + # MPI dependency + depends_on('mpi', when='+mpi', type=('build', 'run')) + # Testing requires boost test + depends_on('boost+test', type=('test')) + + # Compiler with C++11 support is required + conflicts('%gcc@:4.8.4', msg='Compiler with c++11 support is required!') + conflicts('%clang@:3.2', msg='Compiler with c++11 support is required!') + conflicts('%intel@:14', msg='Compiler with c++11 support is required!') + # Solver python bindings are actually using the pde module at one point: + conflicts('-pde', when='+python+solver') + # some modules depend on each other (notably datadriven and misc) + conflicts('+pde', when='-solver') + # Datadriven module requirements + conflicts('+datadriven', when='-solver') + conflicts('+datadriven', when='-optimization') + conflicts('+datadriven', when='-pde') + # Misc module requirements + conflicts('+misc', when='-datadriven') + conflicts('+misc', when='-solver') + conflicts('+misc', when='-optimization') + conflicts('+misc', when='-pde') + conflicts('+misc', when='@:3.1.0', + msg='The misc module was introduced in version 3.2.0') + # Combigrid module requirements (for 3.2.0 or older) + # newer combigrids have no dependencies + conflicts('+combigrid', when='@:3.2.0~optimization') + conflicts('+combigrid', when='@:3.2.0~pde') + conflicts('+combigrid', when='@:3.2.0~solver') + conflicts('+combigrid', when='@:3.2.0~quadrature') + + def build_args(self, spec, prefix): + # Testing parameters + if self.run_tests: + self.args = ['COMPILE_BOOST_TESTS=1', + 'RUN_BOOST_TESTS=1'] + if ('+python' in spec): + self.args.append('RUN_PYTHON_TESTS=1') + if spec.satisfies('@:3.2.0'): + self.args.append('RUN_CPPLINT=1') + else: # argument was renamed after 3.2.0 + self.args.append('CHECK_STYLE=1') + else: + self.args = ['COMPILE_BOOST_TESTS=0', + 'RUN_BOOST_TESTS=0', + 'RUN_PYTHON_TESTS=0'] + if spec.satisfies('@:3.2.0'): + self.args.append('RUN_CPPLINT=0') + else: # argument was renamed after 3.2.0 + self.args.append('CHECK_STYLE=0') + + # Install direction + self.args.append('PREFIX={0}'.format(prefix)) + + # Generate swig bindings? + self.args.append('SG_PYTHON={0}'.format( + '1' if '+python' in spec else '0')) + + # Java variant deactivated due to spack issue #987 + # self.args.append('SG_JAVA={0}'.format( + # '1' if '+java' in spec else '0')) + self.args.append('SG_JAVA=0') + + # Which modules to build? + self.args.append('SG_OPTIMIZATION={0}'.format( + '1' if '+optimization' in spec else '0')) + self.args.append('SG_QUADRATURE={0}'.format( + '1' if '+quadrature' in spec else '0')) + self.args.append('SG_PDE={0}'.format( + '1' if '+pde' in spec else '0')) + self.args.append('SG_DATADRIVEN={0}'.format( + '1' if '+datadriven' in spec else '0')) + self.args.append('SG_COMBIGRID={0}'.format( + '1' if '+combigrid' in spec else '0')) + self.args.append('SG_SOLVER={0}'.format( + '1' if '+solver' in spec else '0')) + + # Misc flag did not exist in older versions + if spec.satisfies('@3.2.0:'): + self.args.append('SG_MISC={0}'.format( + '1' if '+misc' in spec else '0')) + + # SIMD scons parameter (pick according to simd spec) + if 'avx512' in self.spec.target: + self.args.append('ARCH=avx512') + elif 'avx2' in self.spec.target: + self.args.append('ARCH=avx2') + elif 'avx' in self.spec.target: + self.args.append('ARCH=avx') + elif 'fma4' in self.spec.target: + self.args.append('ARCH=fma4') + elif 'sse42' in self.spec.target: + self.args.append('ARCH=sse42') + elif 'sse3' in self.spec.target: + self.args.append('ARCH=sse3') + + # OpenCL Flags + self.args.append('USE_OCL={0}'.format( + '1' if '+opencl' in spec else '0')) + + # Get the mpicxx compiler from the Spack spec + # (makes certain we use the one from spack): + if ('+mpi' in spec): + self.args.append('CXX={0}'.format( + self.spec['mpi'].mpicxx)) + + return self.args + + def install_args(self, spec, prefix): + # Everything is already built, time to install our python bindings: + if '+python' in spec: + setup_py('install', '--prefix={0}'.format(prefix)) + return self.args diff --git a/var/spack/repos/builtin/packages/shengbte/package.py b/var/spack/repos/builtin/packages/shengbte/package.py new file mode 100644 index 00000000000..59697a51cee --- /dev/null +++ b/var/spack/repos/builtin/packages/shengbte/package.py @@ -0,0 +1,39 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +from spack import * + + +class Shengbte(MakefilePackage): + """ShengBTE is a software package for solving the Boltzmann Transport + Equation for phonons.""" + + homepage = "www.shengbte.org" + url = "www.shengbte.org/downloads/ShengBTE-v1.1.1-8a63749.tar.bz2" + + version('1.1.1-8a63749', sha256='43920740d19ae854c8ecae0b648acfdf1d7726ca4c2b44f1a1684457f2f88522') + + depends_on('mpi') + depends_on('spglib') + depends_on('mkl') + + parallel = False + + build_directory = 'Src' + + def edit(self, spec, prefix): + arch_make = join_path(self.build_directory, 'arch.make') + copy('arch.make.example', arch_make) + filter_file('export FFLAGS=.*', 'export FFLAGS=-debug -O2', arch_make) + filter_file('export LDFLAGS=.*', 'export LDFLAGS=' + spec['spglib']. + libs.ld_flags, arch_make) + + filter_file('export MPIFC=.*', 'export MPIFC=%s' % spec['mpi'].mpifc, + arch_make) + filter_file('LAPACK=.*', 'LAPACK=' + spec['mkl'].libs.ld_flags, + arch_make) + + def install(self, spec, prefix): + mkdirp(prefix.bin) + install('ShengBTE', prefix.bin) diff --git a/var/spack/repos/builtin/packages/singularity/package.py b/var/spack/repos/builtin/packages/singularity/package.py index 8931e270da5..9f4c898e619 100644 --- a/var/spack/repos/builtin/packages/singularity/package.py +++ b/var/spack/repos/builtin/packages/singularity/package.py @@ -17,7 +17,10 @@ class Singularity(MakefilePackage): which has a different install base (Autotools). Needs post-install chmod/chown steps to enable full functionality. - See package definition or `spack-build-out.txt` build log for details. + See package definition or `spack-build-out.txt` build log for details, + e.g. + + tail -15 $(spack location -i singularity)/.spack/spack-build-out.txt ''' homepage = "https://www.sylabs.io/singularity/" @@ -37,9 +40,12 @@ class Singularity(MakefilePackage): variant('suid', default=True, description='install SUID binary') variant('network', default=True, description='install network plugins') + + depends_on('pkgconfig', type='build') depends_on('go') depends_on('libuuid') depends_on('libgpg-error') + depends_on('libseccomp') depends_on('squashfs', type='run') depends_on('git', when='@develop') # mconfig uses it for version info depends_on('shadow', type='run', when='@3.3:') diff --git a/var/spack/repos/builtin/packages/slate/package.py b/var/spack/repos/builtin/packages/slate/package.py index 67264d3bc92..cae0a0e8df5 100644 --- a/var/spack/repos/builtin/packages/slate/package.py +++ b/var/spack/repos/builtin/packages/slate/package.py @@ -16,10 +16,10 @@ class Slate(Package): solvers.""" homepage = "https://icl.utk.edu/slate/" - hg = "https://bitbucket.org/icl/slate" - maintainers = ['G-Ragghianti'] + git = "https://bitbucket.org/icl/slate" + maintainers = ['G-Ragghianti', 'mgates3'] - version('develop', hg=hg) + version('develop', submodules=True) variant('cuda', default=True, description='Build with CUDA support.') variant('mpi', default=True, description='Build with MPI support.') @@ -27,7 +27,6 @@ class Slate(Package): depends_on('cuda@9:', when='+cuda') depends_on('intel-mkl') - depends_on('mercurial', type='build') depends_on('mpi', when='+mpi') conflicts('%gcc@:5') diff --git a/var/spack/repos/builtin/packages/slepc/package.py b/var/spack/repos/builtin/packages/slepc/package.py index 042a8ecd155..869cceaa59d 100644 --- a/var/spack/repos/builtin/packages/slepc/package.py +++ b/var/spack/repos/builtin/packages/slepc/package.py @@ -18,6 +18,8 @@ class Slepc(Package): maintainers = ['joseeroman', 'balay'] version('master', branch='master') + version('3.13.3', sha256='23d179c22b4b2f22d29fa0ac0a62f5355a964d3bc245a667e9332347c5aa8f81') + version('3.13.2', sha256='04cb8306cb5d4d990509710d7f8ae949bdc2c7eb850930b8d0b0b5ca99f6c70d') version('3.13.1', sha256='f4a5ede4ebdee5e15153ce31c1421209c7b794bd94be1430018615fb0838b879') version('3.13.0', sha256='f1f3c2d13a1a6914e7bf4746d38761e107ea866f50927b639e4ad5918dd1e53b') version('3.12.2', sha256='a586ce572a928ed87f04961850992a9b8e741677397cbaa3fb028323eddf4598') diff --git a/var/spack/repos/builtin/packages/slurm/package.py b/var/spack/repos/builtin/packages/slurm/package.py index c0d4cc01a91..ed69d08a774 100644 --- a/var/spack/repos/builtin/packages/slurm/package.py +++ b/var/spack/repos/builtin/packages/slurm/package.py @@ -63,6 +63,15 @@ class Slurm(AutotoolsPackage): depends_on('mariadb', when='+mariadb') depends_on('pmix', when='+pmix') + def flag_handler(self, name, flags): + wrapper_flags = None + + if name == 'cflags': + if self.spec.satisfies('@:20-02-1 %gcc@10:'): + wrapper_flags = ['-fcommon'] + + return (wrapper_flags, None, flags) + def configure_args(self): spec = self.spec diff --git a/var/spack/repos/builtin/packages/smartdenovo/aarch64.patch b/var/spack/repos/builtin/packages/smartdenovo/aarch64.patch new file mode 100644 index 00000000000..998c9b2cdf3 --- /dev/null +++ b/var/spack/repos/builtin/packages/smartdenovo/aarch64.patch @@ -0,0 +1,29 @@ +diff --git a/Makefile b/Makefile +index 0802f65..3816b6e 100644 +--- a/Makefile ++++ b/Makefile +@@ -2,9 +2,9 @@ VERSION=1.0.0 + MINOR_VER=20140314 + CC=gcc + ifdef DEBUG +-CFLAGS=-g3 -W -Wall -O0 -D_FILE_OFFSET_BITS=64 -D_GNU_SOURCE -mpopcnt -mssse3 ++CFLAGS=-g3 -W -Wall -O0 -D_FILE_OFFSET_BITS=64 -D_GNU_SOURCE + else +-CFLAGS=-W -Wall -O4 -D_FILE_OFFSET_BITS=64 -D_GNU_SOURCE -mpopcnt -mssse3 ++CFLAGS=-W -Wall -O4 -D_FILE_OFFSET_BITS=64 -D_GNU_SOURCE + endif + INSTALLDIR=/usr/local/bin + GLIBS=-lm -lpthread +diff --git a/ksw.c b/ksw.c +index 15dd0f2..22641ed 100644 +--- a/ksw.c ++++ b/ksw.c +@@ -25,7 +25,7 @@ + + #include + #include +-#include ++#include "SSE2NEON.h" + #include "ksw.h" + + #ifdef USE_MALLOC_WRAPPERS diff --git a/var/spack/repos/builtin/packages/smartdenovo/package.py b/var/spack/repos/builtin/packages/smartdenovo/package.py new file mode 100644 index 00000000000..f18b8583d9d --- /dev/null +++ b/var/spack/repos/builtin/packages/smartdenovo/package.py @@ -0,0 +1,30 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Smartdenovo(MakefilePackage): + """SMARTdenovo is a de novo assembler for PacBio and Oxford Nanopore + (ONT) data.""" + + homepage = "https://github.com/ruanjue/smartdenovo" + git = "https://github.com/ruanjue/smartdenovo.git" + + version('master', branch='master') + + depends_on('sse2neon', when='target=aarch64:') + + patch('aarch64.patch', when='target=aarch64:', + sha256='7dd4bca28aafb0680cc1823aa58ac9000819993538e92628554666c4b3acc470') + + def install(self, spec, prefix): + install_files = [ + 'pairaln', 'wtpre', 'wtcyc', 'wtmer', 'wtzmo', 'wtobt', + 'wtclp', 'wtext', 'wtgbo', 'wtlay', 'wtcns', 'wtmsa' + ] + mkdirp(prefix.bin) + for f in install_files: + install(f, prefix.bin) diff --git a/var/spack/repos/builtin/packages/source-highlight/package.py b/var/spack/repos/builtin/packages/source-highlight/package.py index eee706518a6..67d3b368817 100644 --- a/var/spack/repos/builtin/packages/source-highlight/package.py +++ b/var/spack/repos/builtin/packages/source-highlight/package.py @@ -19,3 +19,7 @@ class SourceHighlight(AutotoolsPackage, GNUMirrorPackage): version('3.1.8', sha256='01336a7ea1d1ccc374201f7b81ffa94d0aecb33afc7d6903ebf9fbf33a55ada3') depends_on('boost') + + def configure_args(self): + args = ["--with-boost={0}".format(self.spec['boost'].prefix)] + return args diff --git a/var/spack/repos/builtin/packages/spectrum-mpi/package.py b/var/spack/repos/builtin/packages/spectrum-mpi/package.py index 8cdd82e2ae1..8ec206adae3 100644 --- a/var/spack/repos/builtin/packages/spectrum-mpi/package.py +++ b/var/spack/repos/builtin/packages/spectrum-mpi/package.py @@ -56,3 +56,22 @@ def setup_dependent_build_environment(self, env, dependent_spec): env.set('OMPI_F77', spack_f77) env.prepend_path('LD_LIBRARY_PATH', self.prefix.lib) + + def setup_run_environment(self, env): + # Because MPI functions as a compiler we need to setup the compilers + # in the run environment, like any compiler + if '%xl' in self.spec or '%xl_r' in self.spec: + env.set('MPICC', os.path.join(self.prefix.bin, 'mpixlc')) + env.set('MPICXX', os.path.join(self.prefix.bin, 'mpixlC')) + env.set('MPIF77', os.path.join(self.prefix.bin, 'mpixlf')) + env.set('MPIF90', os.path.join(self.prefix.bin, 'mpixlf')) + elif '%pgi' in self.spec: + env.set('MPICC', os.path.join(self.prefix.bin, 'mpipgicc')) + env.set('MPICXX', os.path.join(self.prefix.bin, 'mpipgic++')) + env.set('MPIF77', os.path.join(self.prefix.bin, 'mpipgifort')) + env.set('MPIF90', os.path.join(self.prefix.bin, 'mpipgifort')) + else: + env.set('MPICC', os.path.join(self.prefix.bin, 'mpicc')) + env.set('MPICXX', os.path.join(self.prefix.bin, 'mpic++')) + env.set('MPIF77', os.path.join(self.prefix.bin, 'mpif77')) + env.set('MPIF90', os.path.join(self.prefix.bin, 'mpif90')) diff --git a/var/spack/repos/builtin/packages/spglib/package.py b/var/spack/repos/builtin/packages/spglib/package.py index e832b85eab1..dff137ef036 100644 --- a/var/spack/repos/builtin/packages/spglib/package.py +++ b/var/spack/repos/builtin/packages/spglib/package.py @@ -18,3 +18,8 @@ class Spglib(CMakePackage): version('1.10.3', sha256='43776b5fb220b746d53c1aa39d0230f304687ec05984671392bccaf850d9d696') version('1.10.0', sha256='117fff308731784bea2ddaf3d076f0ecbf3981b31ea1c1bfd5ce4f057a5325b1') + + @property + def libs(self): + return find_libraries(['libsymspg'], root=self.prefix.lib, + recursive=False) diff --git a/var/spack/repos/builtin/packages/sqlite/package.py b/var/spack/repos/builtin/packages/sqlite/package.py index 531a9bc81de..ccd8bd8e211 100644 --- a/var/spack/repos/builtin/packages/sqlite/package.py +++ b/var/spack/repos/builtin/packages/sqlite/package.py @@ -14,6 +14,7 @@ class Sqlite(AutotoolsPackage): """ homepage = "https://www.sqlite.org" + version('3.31.1', sha256='62284efebc05a76f909c580ffa5c008a7d22a1287285d68b7825a2b6b51949ae') version('3.30.1', sha256='8c5a50db089bd2a1b08dbc5b00d2027602ca7ff238ba7658fabca454d4298e60') version('3.30.0', sha256='e0a8cf4c7a87455e55e10413d16f358ca121ccec687fe1301eac95e2d340fc58') version('3.29.0', sha256='8e7c1e2950b5b04c5944a981cb31fffbf9d2ddda939d536838ebc854481afd5b') @@ -71,7 +72,9 @@ def url_for_version(self, version): ''.join(['%02d' % v for v in full_version[1:]]) # See https://sqlite.org/chronology.html for version -> year # correspondence. - if version >= Version('3.27.0'): + if version >= Version('3.31.0'): + year = '2020' + elif version >= Version('3.27.0'): year = '2019' elif version >= Version('3.22.0'): year = '2018' diff --git a/var/spack/repos/builtin/packages/sqoop/package.py b/var/spack/repos/builtin/packages/sqoop/package.py new file mode 100644 index 00000000000..d0fcb22d1e2 --- /dev/null +++ b/var/spack/repos/builtin/packages/sqoop/package.py @@ -0,0 +1,27 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Sqoop(Package): + """Apache Sqoop is a tool designed for efficiently transferring bulk + data between Apache Hadoop and structured datastores such as relational + databases.""" + + homepage = "http://sqoop.apache.org/" + url = "https://downloads.apache.org/sqoop/1.99.7/sqoop-1.99.7.tar.gz" + list_url = "https://downloads.apache.org/sqoop/" + list_depth = 1 + + version('1.99.7', sha256='caca533554235d9e999435be59a13b5ecae514b3c914ca3b54868fca43a3b74a') + + depends_on('maven', type='build') + depends_on('java@8', type=('build', 'run')) + + def install(self, spec, prefix): + mvn = which('mvn') + mvn('clean', 'install', '-DskipTests') + install_tree('.', prefix) diff --git a/var/spack/repos/builtin/packages/sse2neon/package.py b/var/spack/repos/builtin/packages/sse2neon/package.py new file mode 100644 index 00000000000..5c01d6f0aec --- /dev/null +++ b/var/spack/repos/builtin/packages/sse2neon/package.py @@ -0,0 +1,23 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * +import glob + + +class Sse2neon(Package): + """A C/C++ header file that converts Intel SSE intrinsics to ARN NEON + intrinsics.""" + + homepage = "https://github.com/jratcliff63367/sse2neon" + git = "https://github.com/jratcliff63367/sse2neon.git" + + version('master', branch='master') + + def install(self, spec, prefix): + mkdirp(prefix.include) + headers = glob.glob('*.h') + for f in headers: + install(f, prefix.include) diff --git a/var/spack/repos/builtin/packages/ssht/package.py b/var/spack/repos/builtin/packages/ssht/package.py index 7a4cd4857bb..1500871e62e 100644 --- a/var/spack/repos/builtin/packages/ssht/package.py +++ b/var/spack/repos/builtin/packages/ssht/package.py @@ -28,20 +28,20 @@ class Ssht(Package): def install(self, spec, prefix): make('default', 'SSHTDIR=.') - mkdirp(join_path(prefix, 'include')) + mkdirp(join_path(prefix, 'include', 'ssht')) install('src/c/ssht.h', - join_path(prefix, 'include', 'ssht.h')) + join_path(prefix, 'include', 'ssht', 'ssht.h')) install('src/c/ssht_adjoint.h', - join_path(prefix, 'include', 'ssht_adjoint.h')) + join_path(prefix, 'include', 'ssht', 'ssht_adjoint.h')) install('src/c/ssht_core.h', - join_path(prefix, 'include', 'ssht_core.h')) + join_path(prefix, 'include', 'ssht', 'ssht_core.h')) install('src/c/ssht_dl.h', - join_path(prefix, 'include', 'ssht_dl.h')) + join_path(prefix, 'include', 'ssht', 'ssht_dl.h')) install('src/c/ssht_error.h', - join_path(prefix, 'include', 'ssht_error.h')) + join_path(prefix, 'include', 'ssht', 'ssht_error.h')) install('src/c/ssht_sampling.h', - join_path(prefix, 'include', 'ssht_sampling.h')) + join_path(prefix, 'include', 'ssht', 'ssht_sampling.h')) install('src/c/ssht_types.h', - join_path(prefix, 'include', 'ssht_types.h')) + join_path(prefix, 'include', 'ssht', 'ssht_types.h')) install_tree('doc/c', join_path(prefix, 'doc')) install_tree('lib/c', join_path(prefix, 'lib')) diff --git a/var/spack/repos/builtin/packages/ssmtp/install.patch b/var/spack/repos/builtin/packages/ssmtp/install.patch new file mode 100644 index 00000000000..89992f7ba0b --- /dev/null +++ b/var/spack/repos/builtin/packages/ssmtp/install.patch @@ -0,0 +1,11 @@ +--- a/Makefile.in ++++ b/Makefile.in +@@ -61,7 +61,7 @@ install-sendmail: install + $(LN_S) ssmtp $(bindir)/sendmail + $(INSTALL) -d -m 755 $(libexecdir) + $(RM) $(libexecdir)/sendmail +- $(LN_S) sendmail /lib/sendmail ++ $(LN_S) $(bindir)/sendmail $(libexecdir)/sendmail + $(RM) $(mandir)/sendmail.8 + $(LN_S) ssmtp.8 $(mandir)/sendmail.8 + diff --git a/var/spack/repos/builtin/packages/ssmtp/package.py b/var/spack/repos/builtin/packages/ssmtp/package.py new file mode 100644 index 00000000000..c1ed9055f2f --- /dev/null +++ b/var/spack/repos/builtin/packages/ssmtp/package.py @@ -0,0 +1,58 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Ssmtp(AutotoolsPackage): + """A program that replaces sendmail on workstations that should send their + mail via the departmental mailhub from which they pick up their mail.""" + + homepage = "https://salsa.debian.org/debian/ssmtp" + url = "http://deb.debian.org/debian/pool/main/s/ssmtp/ssmtp_2.64.orig.tar.bz2" + + version('2.64', sha256='22c37dc90c871e8e052b2cab0ad219d010fa938608cd66b21c8f3c759046fa36') + + variant('ssl', default=True, + description='Enable support for secure connection to mail server') + variant('inet6', default=True, + description='Enable support for IPv6 transport') + variant('md5auth', default=True, + description='Enable support for MD5 authentication') + + depends_on('libnsl') + depends_on('openssl', when='+ssl') + + patch('install.patch') + + @when('+ssl') + def setup_build_environment(self, env): + # The configure script is generated with a very old version of + # autoconf, which cannot accept LIBS as a command-line argument + env.set('LIBS', self.spec['openssl'].libs.link_flags) + + def configure_args(self): + args = self.enable_or_disable('ssl') + args += self.enable_or_disable('inet6') + args += self.enable_or_disable('md5auth') + return args + + def install(self, spec, prefix): + install_answers = [ + # Please enter the mail name of your system. + # This is the hostname portion of the address to be shown + # on outgoing news and mail messages headers. + # The default is your system's host name. + # + # Mail name [system.host.name]: + '\n', + # Please enter the SMTP port number [25]: + '\n' + ] + install_answers_filename = 'spack-install.in' + with working_dir(self.build_directory): + with open(install_answers_filename, 'w') as f: + f.writelines(install_answers) + make('install-sendmail', input=install_answers_filename) diff --git a/var/spack/repos/builtin/packages/sst-core/package.py b/var/spack/repos/builtin/packages/sst-core/package.py index b28ae4cea01..4c6dbff0b37 100644 --- a/var/spack/repos/builtin/packages/sst-core/package.py +++ b/var/spack/repos/builtin/packages/sst-core/package.py @@ -7,38 +7,71 @@ class SstCore(AutotoolsPackage): - """The Structural Simulation Toolkit (SST) was developed to explore - innovations in highly concurrent systems where the ISA, microarchitecture, - and memory interact with the programming model and communications system""" + """The Structural Simulation Toolkit (SST) core + provides a parallel discrete event simulation (PDES) + framework for performing architecture simulations + of existing and proposed HPC systems""" - homepage = "http://sst-simulator.org/" - url = "https://github.com/sstsimulator/sst-core/releases/download/v8.0.0_Final/sstcore-8.0.0.tar.gz" - git = "https://github.com/sstsimulator/sst-core.git" + homepage = "https://github.com/sstsimulator" + git = "https://github.com/sstsimulator/sst-core.git" + url = "https://github.com/sstsimulator/sst-core/releases/download/v10.0.0_Final/sstcore-10.0.0.tar.gz" - version('develop', branch='devel') - version('8.0.0', sha256='34a62425c3209cf80b6bca99cb0dcc328b67fb84ed92d5e6d6c975ad9319ba8a') + maintainers = ['jjwilke'] - variant('mpi', default=True, description='Support multi-node simulations using MPI') - variant('boost', default=False, description='Use boost') + version('10.0.0', sha256="64cf93a46dfab011fba49244bf0e0efe25ef928c6fbde1d49003220d0eb7735a") + version('9.1.0', sha256="cfeda39bb2ce9f32032480427517df62e852c0b3713797255e3b838075f3614d") + version('develop', branch='devel') + version('master', branch='master') - depends_on('autoconf@1.68:', type='build', when='@develop') - depends_on('automake@1.11.1:', type='build', when='@develop') - depends_on('libtool@1.2.4:', type='build', when='@develop') - depends_on('m4', type='build', when='@develop') + variant("pdes_mpi", default=True, + description="Build support for parallel discrete event simulation") + variant("zoltan", default=False, + description="Use Zoltan for partitioning parallel runs") + variant("hdf5", default=False, + description="Build support for HDF5 statistic output") + variant("zlib", default=False, + description="Build support for ZLIB compression") + variant("preview", default=False, + description="Preview build with deprecated features removed") - depends_on('python@:2') - depends_on('zlib', type='build') - depends_on('mpi', when='+mpi') - depends_on('boost@1.56.0:', type='build', when='+boost') + depends_on("python", type=('build', 'run')) + depends_on("mpi", when="+pdes_mpi") + depends_on("zoltan", when="+zoltan") + depends_on("hdf5", when="+hdf5") + depends_on("zlib", when="+zlib") + + depends_on('autoconf@1.68:', type='build', when='@master:') + depends_on('automake@1.11.1:', type='build', when='@master:') + depends_on('libtool@1.2.4:', type='build', when='@master:') + depends_on('m4', type='build', when='@master:') + + # force out-of-source builds + build_directory = 'spack-build' + + def autoreconf(self, spec, prefix): + bash = which('bash') + bash('autogen.sh') def configure_args(self): args = [] - spec = self.spec + if "+zoltan" in self.spec: + args.append("--with-zoltan=%s" % self.spec["zoltan"].prefix) + if "+hdf5" in self.spec: + args.append("--with-hdf5=%s" % self.spec["hdf5"].prefix) + if "+zlib" in self.spec: + args.append("--with-zlib=%s" % self.spec["zlib"].prefix) - if '~mpi' in spec: - args.append('--disable-mpi') + if "+pdes_mpi" in self.spec: + args.append("--enable-mpi") + env['CC'] = self.spec['mpi'].mpicc + env['CXX'] = self.spec['mpi'].mpicxx + env['F77'] = self.spec['mpi'].mpif77 + env['FC'] = self.spec['mpi'].mpifc + else: + args.append("--disable-mpi") - if '+boost' in spec: - args.append('--with-boost=%s' % spec['boost'].prefix) + if "+preview" in self.spec: + args.append("--enable-preview-build") + args.append("--with-python=%s" % self.spec["python"].prefix) return args diff --git a/var/spack/repos/builtin/packages/sst-elements/package.py b/var/spack/repos/builtin/packages/sst-elements/package.py new file mode 100644 index 00000000000..89af49373ae --- /dev/null +++ b/var/spack/repos/builtin/packages/sst-elements/package.py @@ -0,0 +1,101 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class SstElements(AutotoolsPackage): + """SST Elements implements a range of components for performing + architecture simulation from node-level to system-level using + the SST discrete event core + """ + + homepage = "https://github.com/sstsimulator" + git = "https://github.com/sstsimulator/sst-elements.git" + url = "https://github.com/sstsimulator/sst-elements/releases/download/v10.0.0_Final/sstelements-10.0.0.tar.gz" + + maintainers = ['jjwilke'] + + version('10.0.0', sha256="ecf28ef97b27ea75be7e64cb0acb99d36773a888c1b32ba16034c62174b02693") + version('9.1.0', sha256="e19b05aa6e59728995fc059840c79e476ba866b67887ccde7eaf52a18a1f52ca") + + version('develop', branch='devel') + version('master', branch='master') + + variant("pin", default=False, + description="Enable the Ariel CPU model") + variant("dramsim2", default=False, + description="Build with DRAMSim2 support") + variant("nvdimmsim", default=False, + description="Build with NVDimmSim support") + variant("hybridsim", default=False, + description="Build with HybridSim support") + variant("goblin", default=False, + description="Build with GoblinHMCSim support") + variant("hbm", default=False, + description="Build with HBM DRAMSim2 support") + variant("ramulator", default=False, + description="Build with Ramulator support") + + depends_on("python", type=('build', 'run')) + depends_on("sst-core") + depends_on("sst-core@develop", when="@develop") + depends_on("sst-core@master", when="@master") + + depends_on("intel-pin@2.14", when="+pin") + depends_on("dramsim2@2.2", when="+dramsim2") + depends_on("hybridsim@2.0.1", when="+hybridsim") + depends_on("nvdimmsim@2.0.0", when="+nvdimmsim") + depends_on("goblin-hmc-sim", when="+goblin") + depends_on("ramulator@sst", when="+ramulator") + depends_on("hbm-dramsim2", when="+hbm") + depends_on("dramsim2@2.2.2", when="+hybridsim") + depends_on("nvdimmsim@2.0.0", when="+hybridsim") + + depends_on('autoconf@1.68:', type='build', when='@master:') + depends_on('automake@1.11.1:', type='build', when='@master:') + depends_on('libtool@1.2.4:', type='build', when='@master:') + depends_on('m4', type='build', when='@master:') + + # force out-of-source builds + build_directory = 'spack-build' + + def autoreconf(self, spec, prefix): + bash = which('bash') + bash('autogen.sh') + + def configure_args(self): + args = [] + if '+pdes_mpi' in self.spec["sst-core"]: + env['CC'] = self.spec['mpi'].mpicc + env['CXX'] = self.spec['mpi'].mpicxx + env['F77'] = self.spec['mpi'].mpif77 + env['FC'] = self.spec['mpi'].mpifc + + if "+pin" in self.spec: + args.append("--with-pin=%s" % self.spec["intel-pin"].prefix) + + if "+dramsim2" in self.spec or "+hybridsim" in self.spec: + args.append("--with-dramsim=%s" % self.spec["dramsim2"].prefix) + + if "+nvdimmsim" in self.spec or "+hybridsim" in self.spec: + args.append("--with-nvdimmsim=%s" % self.spec["nvdimmsim"].prefix) + + if "+hybridsim" in self.spec: + args.append("--with-hybridsim=%s" % self.spec["hybridsim"].prefix) + + if "+goblin" in self.spec: + args.append("--with-goblin-hmcsim=%s" % + self.spec["goblin-hmc-sim"].prefix) + + if "+hbm" in self.spec: + args.append("--with-hbmdramsim=%s" % + self.spec["hbm-dramsim2"].prefix) + + if "+ramulator" in self.spec: + args.append("--with-ramulator=%s" % self.spec["ramulator"].prefix) + + args.append("--with-sst-core=%s" % self.spec["sst-core"].prefix) + return args diff --git a/var/spack/repos/builtin/packages/sst-macro/package.py b/var/spack/repos/builtin/packages/sst-macro/package.py index e7333c9a5bd..6a04e44dba7 100644 --- a/var/spack/repos/builtin/packages/sst-macro/package.py +++ b/var/spack/repos/builtin/packages/sst-macro/package.py @@ -16,48 +16,56 @@ class SstMacro(AutotoolsPackage): """ homepage = "http://sst.sandia.gov/about_sstmacro.html" - url = "https://github.com/sstsimulator/sst-macro/releases/download/v6.1.0_Final/sstmacro-6.1.0.tar.gz" - git = "https://github.com/sstsimulator/sst-macro.git" + git = "https://github.com/sstsimulator/sst-macro.git" + url = "https://github.com/sstsimulator/sst-macro/releases/download/v10.0.0_Final/sstmacro-10.0.0.tar.gz" + maintainers = ['jjwilke'] + + version('10.0.0', sha256='064b732256f3bec9b553e00bcbc9a1d82172ec194f2b69c8797f585200b12566') + version('master', branch='master') version('develop', branch='devel') - version('8.0.0', sha256='8618a259e98ede9a1a2ce854edd4930628c7c5a770c3915858fa840556c1861f') - version('6.1.0', sha256='930b67313b594148d6356e550ca370214a9283858235321d3ef974191eb028d6') - depends_on('boost@1.59:', when='@:6.1.0') - - depends_on('autoconf@1.68:', type='build', when='@develop') - depends_on('automake@1.11.1:', type='build', when='@develop') - depends_on('libtool@1.2.4:', type='build', when='@develop') - depends_on('m4', type='build', when='@develop') + depends_on('autoconf@1.68:', type='build', when='@master:') + depends_on('automake@1.11.1:', type='build', when='@master:') + depends_on('libtool@1.2.4:', type='build', when='@master:') + depends_on('m4', type='build', when='@master:') depends_on('binutils', type='build') depends_on('zlib', type=('build', 'link')) depends_on('otf2', when='+otf2') - depends_on('llvm+clang@:5.99.99', when='+skeletonizer') - depends_on('mpi', when='+mpi') - depends_on('sst-core@8.0.0', when='@8.0.0 +core') - depends_on('sst-core@develop', when='@develop +core') + depends_on('llvm+clang@5:9', when='+skeletonizer') + depends_on('mpi', when='+pdes_mpi') + depends_on('sst-core@develop', when='@develop+core') + depends_on('sst-core@master', when='@master+core') + + variant('pdes_threads', default=True, + description='Enable thread-parallel PDES simulation') + variant('pdes_mpi', default=False, + description='Enable distributed PDES simulation') + variant('core', default=False, description='Use SST Core for PDES') + variant('otf2', default=False, + description='Enable OTF2 trace emission and replay support') + variant('skeletonizer', default=False, + description='Enable Clang source-to-source autoskeletonization') - variant('core', default=False, description='Use SST Core for PDES') - variant('mpi', default=True, description='Enable distributed PDES simulation') - variant('otf2', default=False, description='Enable OTF2 trace emission and replay support') - variant('shared', default=True, description='Build shared libraries') - variant('skeletonizer', default=False, description='Enable Clang source-to-source autoskeletonization') variant('static', default=True, description='Build static libraries') - variant('threaded', default=False, description='Enable thread-parallel PDES simulation') + variant('shared', default=True, description='Build shared libraries') - @run_before('autoreconf') - def bootstrap(self): - if '@develop' in self.spec: - Executable('./bootstrap.sh')() + variant('werror', default=False, + description='Build with all warnings as errors') + variant('warnings', default=False, + description='Build with all possible warnings') + + # force out-of-source builds + build_directory = 'spack-build' + + def autoreconf(self, spec, prefix): + bash = which('bash') + bash('./bootstrap.sh') def configure_args(self): args = ['--disable-regex'] - # Set CFLAGS and CXXFLAGS so they won't automatically insert '-g' - env['CFLAGS'] = '-O2' - env['CXXFLAGS'] = '-O2' - spec = self.spec args.append( '--enable-static=%s' % ('yes' if '+static' in spec else 'no')) @@ -66,19 +74,30 @@ def configure_args(self): if spec.satisfies("@8.0.0:"): args.extend([ - '--%sable-otf2' % ('en' if '+otf2' in spec else 'dis'), - '--%sable-multithread' % ( - 'en' if '+threaded' in spec else 'dis') + '--%sable-otf2' % + ('en' if '+otf2' in spec else 'dis'), + '--%sable-multithread' % + ('en' if '+pdes_threads' in spec else 'dis') ]) if '+skeletonizer' in spec: args.append('--with-clang=' + spec['llvm'].prefix) + if spec.satisfies("@10:"): + if "+warnings" in spec: + args.append("--with-warnings") + if "+werror" in spec: + args.append("--with-werror") + if '+core' in spec: args.append('--with-sst-core=%s' % spec['sst-core'].prefix) # Optional MPI support - if '+mpi' in spec: + need_core_mpi = False + if "+core" in spec: + if "+pdes_mpi" in spec["sst-core"]: + need_core_mpi = True + if '+pdes_mpi' in spec or need_core_mpi: env['CC'] = spec['mpi'].mpicc env['CXX'] = spec['mpi'].mpicxx env['F77'] = spec['mpi'].mpif77 diff --git a/var/spack/repos/builtin/packages/sst-transports/package.py b/var/spack/repos/builtin/packages/sst-transports/package.py new file mode 100644 index 00000000000..39a6e99d058 --- /dev/null +++ b/var/spack/repos/builtin/packages/sst-transports/package.py @@ -0,0 +1,25 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class SstTransports(CMakePackage): + """Provides transports like uGNI and verbs + that run in the simulator""" + + homepage = "https://github.com/sstsimulator" + git = "https://github.com/jjwilke/sst-transports.git" + + maintainers = ['jjwilke'] + + version('master', branch='master') + + depends_on("sst-macro") + + def cmake_args(self): + args = [] + args.append("-DSSTMacro_ROOT=%s" % self.spec["sst-macro"].prefix) + return args diff --git a/var/spack/repos/builtin/packages/stata/package.py b/var/spack/repos/builtin/packages/stata/package.py index 59dee6cf139..263fc60cd06 100644 --- a/var/spack/repos/builtin/packages/stata/package.py +++ b/var/spack/repos/builtin/packages/stata/package.py @@ -27,10 +27,11 @@ class Stata(Package): homepage = "https://www.stata.com/" # url = "stata" + version('16', 'a13a6a92558eeb3c6cb3013c458a6777e54c21af43599df6b0a924f5f5c2d5d2') version('15', '2486f4c7db1e7b453004c7bd3f8da40ba1e30be150613065c7b82b1915259016') - # V15 depends on libpng v12 and fails with other versions of libpng - depends_on('libpng@1.2.57') + depends_on('libpng@1.2.57', when='@15', type='run') + depends_on('libpng@1.6:1.6.99', when='@16', type='run') # STATA is downloaded from user/pass protected ftp as Stata15Linux64.tar.gz def url_for_version(self, version): @@ -81,6 +82,11 @@ def install(self, spec, prefix): bash = which('bash') tar = which('tar') + res_dir = 'unix/linux64/' + + if self.spec.satisfies('@16:'): + res_dir = 'unix/linux64p/' + # Step 1. x = datetime.now() with open("installed.150", "w") as fh: @@ -89,11 +95,11 @@ def install(self, spec, prefix): # Step 2. instlist = ['ado.taz', 'base.taz', 'bins.taz', 'docs.taz'] for instfile in instlist: - tar('-x', '-z', '-f', 'unix/linux64/' + instfile) + tar('-x', '-z', '-f', res_dir + instfile) # Step 3. - install('unix/linux64/setrwxp', 'setrwxp') - install('unix/linux64/inst2', 'inst2') + install(res_dir + 'setrwxp', 'setrwxp') + install(res_dir + 'inst2', 'inst2') # Step 4. Since the install script calls out specific permissions and # could change in the future (or old versions) I thought it best to diff --git a/var/spack/repos/builtin/packages/stinger/package.py b/var/spack/repos/builtin/packages/stinger/package.py new file mode 100644 index 00000000000..fe78dc015c6 --- /dev/null +++ b/var/spack/repos/builtin/packages/stinger/package.py @@ -0,0 +1,26 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Stinger(CMakePackage): + """The STINGER in-memory graph store and dynamic graph analysis + platform. Millions to billions of vertices and edges at thousands + to millions of updates per second.""" + + homepage = "http://www.stingergraph.com/" + git = "https://github.com/stingergraph/stinger.git" + + version('master', branch='master') + + parallel = False + + def install(self, spec, prefix): + with working_dir(self.build_directory): + install_tree('./bin', prefix.bin) + install_tree('./lib', prefix.lib) + install_tree('./include', prefix.include) + install_tree('./share', prefix.share) diff --git a/var/spack/repos/builtin/packages/strace/package.py b/var/spack/repos/builtin/packages/strace/package.py index f1b282e5305..a46a98b2bc5 100644 --- a/var/spack/repos/builtin/packages/strace/package.py +++ b/var/spack/repos/builtin/packages/strace/package.py @@ -17,6 +17,11 @@ class Strace(AutotoolsPackage): conflicts('platform=darwin', msg='strace runs only on Linux.') + version('5.7', sha256='b284b59f9bcd95b9728cea5bd5c0edc5ebe360af73dc76fbf6334f11c777ccd8') + version('5.6', sha256='189968eeae06ed9e20166ec55a830943c84374676a457c9fe010edc7541f1b01') + version('5.5', sha256='9f58958c8e59ea62293d907d10572e352b582bd7948ed21aa28ebb47e5bf30ff') + version('5.4', sha256='f7d00514d51290b6db78ad7a9de709baf93caa5981498924cbc9a744cfd2a741') + version('5.3', sha256='6c131198749656401fe3efd6b4b16a07ea867e8f530867ceae8930bbc937a047') version('5.2', sha256='d513bc085609a9afd64faf2ce71deb95b96faf46cd7bc86048bc655e4e4c24d2') version('5.1', sha256='f5a341b97d7da88ee3760626872a4899bf23cf8dee56901f114be5b1837a9a8b') version('5.0', sha256='3b7ad77eb2b81dc6078046a9cc56eed5242b67b63748e7fc28f7c2daf4e647da') diff --git a/var/spack/repos/builtin/packages/su2/package.py b/var/spack/repos/builtin/packages/su2/package.py new file mode 100644 index 00000000000..3aa51e5828d --- /dev/null +++ b/var/spack/repos/builtin/packages/su2/package.py @@ -0,0 +1,23 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Su2(MesonPackage): + """SU2 is a suite of open-source software tools written in C++ for + the numerical solution of partial differential equations (PDE) and + performing PDE constrained optimization.""" + + homepage = "https://su2code.github.io" + url = "https://github.com/su2code/SU2/archive/v7.0.3.tar.gz" + + version('7.0.3', sha256='7fc01deaad9baabbe0ccd162a4b565172d49e573e79abcb65433b51ff29bda06') + version('7.0.2', sha256='69e51d52c5a84fb572bd6a83faf8f9fd04471fbf7d5b70d967c7306c1d4e17d9') + version('7.0.1', sha256='eb0550c82ccaef8cb71e4a8775aa71d2020ef085ec3dd19dfafff5d301034f6f') + version('7.0.0', sha256='6207dcca15eaebc11ce12b2866c937b4ad9b93274edf6f23d0487948ac3963b8') + version('6.2.0', sha256='ffc953326e8432a1a6534556a5f6cf086046d3149cfcec6b4e7390eebe30ce2e') + + depends_on('python@3:', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/suite-sparse/package.py b/var/spack/repos/builtin/packages/suite-sparse/package.py index da2cc222aef..eef38fefae9 100644 --- a/var/spack/repos/builtin/packages/suite-sparse/package.py +++ b/var/spack/repos/builtin/packages/suite-sparse/package.py @@ -102,7 +102,7 @@ def install(self, spec, prefix): # GraphBLAS/Demo/Program/wildtype_demo.c. For many compilers this is # not an issue because c11 or newer is their default. However, for some # compilers (e.g. xlc) the c11 flag is necessary. - if spec.satisfies('@5.4:'): + if spec.satisfies('@5.4:5.7.1') and ('%xl' in spec or '%xl_r' in spec): make_args += ['CFLAGS+=%s' % self.compiler.c11_flag] # 64bit blas in UMFPACK: @@ -142,7 +142,7 @@ def install(self, spec, prefix): # not possible, mainly because of GraphBLAS. Thus compile first and # install in a second run. if '@5.4.0:' in self.spec: - make('default', *make_args) + make('library', *make_args) make_args.append('INSTALL=%s' % prefix) make('install', *make_args) diff --git a/var/spack/repos/builtin/packages/sumo/package.py b/var/spack/repos/builtin/packages/sumo/package.py new file mode 100644 index 00000000000..0fbd58303d8 --- /dev/null +++ b/var/spack/repos/builtin/packages/sumo/package.py @@ -0,0 +1,55 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Sumo(CMakePackage): + """Eclipse SUMO is an open source, highly portable, microscopic and + continuous road traffic simulation package designed to handle large road + networks. It allows for intermodal simulation including pedestrians and + comes with a large set of tools for scenario creation.""" + + homepage = "http://projects.eclipse.org/projects/technology.sumo" + url = "https://github.com/eclipse/sumo/archive/v1_5_0.tar.gz" + + version('1.5.0', sha256='be6ba0361b487a5e71c81e60b4c07a67826d5e170500c10c37374c1086ac2cb6') + + variant('gdal', + default=True, + description='gdal support, for arcgis') + variant('ffmpeg', + default=False, + description='ffmpeg support, for video output') + variant('openscenegraph', + default=False, + description='openscenegraph support, for experimental 3D GUI') + variant('gl2ps', + default=False, + description='gl2ps support') + variant('eigen', + default=False, + description='eigen support') + + extends('python') + depends_on('py-setuptools', type='build') + depends_on('googletest', type='test') + depends_on('xerces-c') + depends_on('proj') + depends_on('fox@1.6.57+opengl') + depends_on('swig', type='build') + depends_on('java', type=('build', 'run')) + depends_on('gdal', when='+gdal') + depends_on('ffmpeg', when='+ffmpeg') + depends_on('openscenegraph', when='+openscenegraph') + depends_on('gl2ps', when='+gl2ps') + depends_on('eigen', when='+eigen') + + def url_for_version(self, version): + url = "https://github.com/eclipse/sumo/archive/v{0}.tar.gz" + return url.format(version.underscored) + + def setup_run_environment(self, env): + env.set('SUMO_HOME', self.prefix) diff --git a/var/spack/repos/builtin/packages/sundials/package.py b/var/spack/repos/builtin/packages/sundials/package.py index 104307fa19a..e374a4bbcad 100644 --- a/var/spack/repos/builtin/packages/sundials/package.py +++ b/var/spack/repos/builtin/packages/sundials/package.py @@ -21,6 +21,8 @@ class Sundials(CMakePackage): # Versions # ========================================================================== version('develop', branch='develop') + version('5.3.0', sha256='88dff7e11a366853d8afd5de05bf197a8129a804d9d4461fb64297f1ef89bca7') + version('5.2.0', sha256='95f058acce5bd66e654de65acdbb1c9f44c90cf1b4e28f8d933cdb4415ebba3e') version('5.1.0', sha256='fb22d14fad42203809dc46d046b001149ec4e901b23882bd4a80619157fd9b21') version('5.0.0', sha256='345141ec01c641d0bdfb3476c478b7e74fd6a7192a478a27cafe75d9da2d7dd3') version('4.1.0', sha256='280de1c27b2360170a6f46cb3799b2aee9dff3bddbafc8b08c291a47ab258aa5') diff --git a/var/spack/repos/builtin/packages/superlu-dist/package.py b/var/spack/repos/builtin/packages/superlu-dist/package.py index 799b413f7d3..9bf0a240f6a 100644 --- a/var/spack/repos/builtin/packages/superlu-dist/package.py +++ b/var/spack/repos/builtin/packages/superlu-dist/package.py @@ -18,7 +18,9 @@ class SuperluDist(CMakePackage): version('develop', branch='master') version('xsdk-0.2.0', tag='xsdk-0.2.0') + version('6.3.1', sha256='3787c2755acd6aadbb4d9029138c293a7570a2ed228806676edcc7e1d3f5a1d3') version('6.3.0', sha256='daf3264706caccae2b8fd5a572e40275f1e128fa235cb7c21ee2f8051c11af95') + version('6.2.0', sha256='15ad1badd81b41e37941dd124d06d3b92e51c4f0ff532ad23fb09c4ebfe6eb9e') version('6.1.1', sha256='35d25cff592c724439870444ed45e1d1d15ca2c65f02ccd4b83a6d3c9d220bd1') version('6.1.0', sha256='92c6d1424dd830ee2d1e7396a418a5f6645160aea8472e558c4e4bfe006593c4') version('6.0.0', sha256='ff6cdfa0263d595708bbb6d11fb780915d8cfddab438db651e246ea292f37ee4') diff --git a/var/spack/repos/builtin/packages/swfft/include-stdio_h.patch b/var/spack/repos/builtin/packages/swfft/include-stdio_h.patch new file mode 100644 index 00000000000..86c9390cae1 --- /dev/null +++ b/var/spack/repos/builtin/packages/swfft/include-stdio_h.patch @@ -0,0 +1,11 @@ +--- a/TimingStats.h ++++ b/TimingStats.h +@@ -56,7 +56,7 @@ + #define HACC_TIMINGSTATS_H + + #include +- ++#include + #include + + // lightweight timing statistics from MPI_Wtime() calls diff --git a/var/spack/repos/builtin/packages/swfft/package.py b/var/spack/repos/builtin/packages/swfft/package.py index f6bfae90ec6..3d4766f77b0 100644 --- a/var/spack/repos/builtin/packages/swfft/package.py +++ b/var/spack/repos/builtin/packages/swfft/package.py @@ -21,6 +21,10 @@ class Swfft(MakefilePackage): depends_on('mpi') depends_on('fftw') + # fix error + # TimingStats.h:94:35: error: 'printf' was not declared in this scope + patch('include-stdio_h.patch') + tags = ['proxy-app', 'ecp-proxy-app'] @property diff --git a/var/spack/repos/builtin/packages/swig/package.py b/var/spack/repos/builtin/packages/swig/package.py index c6c9824a63a..59a535fb745 100644 --- a/var/spack/repos/builtin/packages/swig/package.py +++ b/var/spack/repos/builtin/packages/swig/package.py @@ -43,6 +43,9 @@ class Swig(AutotoolsPackage, SourceforgePackage): depends_on('autoconf', type='build', when=_version) depends_on('automake', type='build', when=_version) depends_on('libtool', type='build', when=_version) + # Need newer 'automake' to support newer platforms + for _target in ['ppc64le', 'aarch64']: + depends_on('automake@1.15:', type='build', when='target={0}:'.format(_target)) depends_on('pkgconfig', type='build') build_directory = 'spack-build' diff --git a/var/spack/repos/builtin/packages/swipl/package.py b/var/spack/repos/builtin/packages/swipl/package.py index 6cba487a7d2..da3e161c2c6 100644 --- a/var/spack/repos/builtin/packages/swipl/package.py +++ b/var/spack/repos/builtin/packages/swipl/package.py @@ -24,6 +24,7 @@ class Swipl(CMakePackage): maintainers = ['alexrobomind'] + version('8.2.0', sha256='d8c9f3adb9cd997a5fed7b5f5dbfe971d2defda969b9066ada158e4202c09c3c') version('8.0.3', sha256='cee59c0a477c8166d722703f6e52f962028f3ac43a5f41240ecb45dbdbe2d6ae') variant('gmp', default=True, description='bignum and rational number support') @@ -32,10 +33,14 @@ class Swipl(CMakePackage): variant('zlib', default=True, description='Compressed streams support') variant('odbc', default=True, description='ODBC database access') variant('unwind', default=True, description='Build with stack traces in crash reports') + variant('html', default=True, description='Install the HTML documentation') + variant('pdfdoc', default=False, description='Build the PDF documentation') depends_on('uuid') depends_on('readline') + depends_on('libarchive', when='+html') + depends_on('gmp', when='+gmp') depends_on('unwind', when='+unwind') depends_on('unixodbc', when='+odbc') @@ -65,6 +70,8 @@ def append_switch(variant, cmake_flag): append_switch('+gmp', 'USE_GMP') append_switch('+xpce', 'SWIPL_PACKAGES_X') append_switch('+odbc', 'SWIPL_PACKAGES_ODBC') + append_switch('+html', 'INSTALL_DOCUMENTATION') + append_switch('+pdfdoc', 'BUILD_PDF_DOCUMENTATION') # The variants ssl and zlib are implicitly set up by CMake diff --git a/var/spack/repos/builtin/packages/sz/ctags-only-if-requested.patch b/var/spack/repos/builtin/packages/sz/ctags-only-if-requested.patch new file mode 100644 index 00000000000..0b3494f11e1 --- /dev/null +++ b/var/spack/repos/builtin/packages/sz/ctags-only-if-requested.patch @@ -0,0 +1,42 @@ +From 3637a87f986cb64ddc1bde3551e29894ab18500b Mon Sep 17 00:00:00 2001 +From: Robert Underwood +Date: Wed, 3 Jun 2020 12:48:33 -0400 +Subject: [PATCH] Don't require ctags unless we ask for it + +Previously this caused build failures if the user had a bad version of +ctags. Don't build with it. +--- + CMakeLists.txt | 17 ++++++++++------- + 1 file changed, 10 insertions(+), 7 deletions(-) + +diff --git a/CMakeLists.txt b/CMakeLists.txt +index 4fd4014..d764a44 100644 +--- a/CMakeLists.txt ++++ b/CMakeLists.txt +@@ -36,13 +36,16 @@ set(CMAKE_EXPORT_COMPILE_COMMANDS ON) + set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/bin) + + #generate tags for the project if tags exist +-find_program(TAGS ctags) +-if(TAGS) +- add_custom_target(tags ALL +- COMMAND ${TAGS} --exclude=${CMAKE_BINARY_DIR} -f ${CMAKE_BINARY_DIR}/tags --c++-kinds=+p --fields=+iaS -R +- COMMENT Generating Tag files +- WORKING_DIRECTORY ${CMAKE_SOURCE_DIR} +- ) ++option(BUILD_CTAGS "enable ctags generation target" OFF) ++if(BUILD_CTAGS) ++ find_program(TAGS ctags) ++ if(TAGS) ++ add_custom_target(tags ALL ++ COMMAND ${TAGS} --exclude=${CMAKE_BINARY_DIR} -f ${CMAKE_BINARY_DIR}/tags --c++-kinds=+p --fields=+iaS -R ++ COMMENT Generating Tag files ++ WORKING_DIRECTORY ${CMAKE_SOURCE_DIR} ++ ) ++ endif() + endif() + + option(BUILD_SHARED_LIBS "build shared libraries over static libraries" ON) +-- +2.24.2 (Apple Git-127) + diff --git a/var/spack/repos/builtin/packages/sz/package.py b/var/spack/repos/builtin/packages/sz/package.py index b0d92dd51c5..897aa57a1c7 100644 --- a/var/spack/repos/builtin/packages/sz/package.py +++ b/var/spack/repos/builtin/packages/sz/package.py @@ -54,6 +54,8 @@ class Sz(CMakePackage): depends_on('hdf5', when="+hdf5") depends_on('netcdf-c', when="+netcdf") + patch('ctags-only-if-requested.patch', when='@2.1.8.1:2.1.8.3') + @property def build_directory(self): """autotools needs a different build directory to work""" diff --git a/var/spack/repos/builtin/packages/tasmanian/package.py b/var/spack/repos/builtin/packages/tasmanian/package.py index 87ddc99db44..f1214b17773 100644 --- a/var/spack/repos/builtin/packages/tasmanian/package.py +++ b/var/spack/repos/builtin/packages/tasmanian/package.py @@ -12,13 +12,14 @@ class Tasmanian(CMakePackage): interpolation as well as parameter calibration.""" homepage = 'http://tasmanian.ornl.gov' - url = 'https://github.com/ORNL/TASMANIAN/archive/v7.0.tar.gz' + url = 'https://github.com/ORNL/TASMANIAN/archive/v7.1.tar.gz' git = 'https://github.com/ORNL/TASMANIAN.git' maintainers = ['mkstoyanov'] version('develop', branch='master') + version('7.1', sha256='9c24a591506a478745b802f1fa5c557da7bc80b12d8070855de6bc7aaca7547a') version('7.0', sha256='4094ba4ee2f1831c575d00368c8471d3038f813398be2e500739cef5c7c4a47b') # use for xsdk-0.5.0 version('6.0', sha256='ceab842e9fbce2f2de971ba6226967caaf1627b3e5d10799c3bd2e7c3285ba8b') # use for xsdk-0.4.0 version('5.1', sha256='b0c1be505ce5f8041984c63edca9100d81df655733681858f5cc10e8c0c72711') @@ -67,6 +68,7 @@ class Tasmanian(CMakePackage): depends_on('mpi', when="+mpi", type=('build', 'run')) # openmpi 2 and 3 tested depends_on('blas', when="+blas", type=('build', 'run')) # openblas 0.2.18 or newer + depends_on('lapack', when="+blas @7.1:", type=('build', 'run')) # lapack used since 7.1 depends_on('cuda@8.0.61:', when='+cuda', type=('build', 'run')) depends_on('cuda@8.0.61:', when='+magma', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/tau/package.py b/var/spack/repos/builtin/packages/tau/package.py index 132bcb7e3db..1d2dea580b7 100644 --- a/var/spack/repos/builtin/packages/tau/package.py +++ b/var/spack/repos/builtin/packages/tau/package.py @@ -89,7 +89,7 @@ class Tau(Package): depends_on('libdwarf', when='+libdwarf') depends_on('libelf', when='+libdwarf') # TAU requires the ELF header support, libiberty and demangle. - depends_on('binutils@:2.33.1+libiberty+headers~nls', when='+binutils') + depends_on('binutils@:2.33.1+libiberty+headers', when='+binutils') depends_on('python@2.7:', when='+python') depends_on('libunwind', when='+libunwind') depends_on('mpi', when='+mpi', type=('build', 'run', 'link')) @@ -107,6 +107,8 @@ class Tau(Package): conflicts('+adios2', when='@:2.29.1') conflicts('+sqlite', when='@:2.29.1') + patch('unwind.patch', when="@2.29") + def set_compiler_options(self, spec): useropt = ["-O2 -g", self.rpath_args] @@ -144,6 +146,7 @@ def set_compiler_options(self, spec): def setup_build_environment(self, env): env.prepend_path('LIBRARY_PATH', self.spec['zlib'].prefix.lib) + env.prepend_path('LIBRARY_PATH', self.spec['hwloc'].prefix.lib) def install(self, spec, prefix): # TAU isn't happy with directories that have '@' in the path. Sigh. diff --git a/var/spack/repos/builtin/packages/tau/unwind.patch b/var/spack/repos/builtin/packages/tau/unwind.patch new file mode 100644 index 00000000000..1d9faa4cc6b --- /dev/null +++ b/var/spack/repos/builtin/packages/tau/unwind.patch @@ -0,0 +1,34 @@ +From 3b60cbcab167f8f1cf450f0319850bfa88f39d3a Mon Sep 17 00:00:00 2001 +From: eugeneswalker +Date: Thu, 4 Jun 2020 06:12:41 -0700 +Subject: [PATCH] check for unwind_dir/lib and unwind_dir/lib64 + +--- + configure | 11 +++++++++-- + 1 file changed, 9 insertions(+), 2 deletions(-) + +diff --git a/configure b/configure +index 3bd7430..b673cdf 100755 +--- a/configure ++++ b/configure +@@ -1662,8 +1662,15 @@ for arg in "$@"; do + -unwind=*) + unwind_dir=`echo $arg | sed -e 's/-unwind=//'` + if [ $unwind_dir != "/usr" ]; then +- unwind_inc="$unwind_dir/include" +- unwind_lib="$unwind_dir/lib" ++ unwind_inc="$unwind_dir/include" ++ unwind_lib="" ++ if [[ -d "$unwind_dir/lib" ]] ; then ++ unwind_lib="$unwind_dir/lib" ++ elif [[ -d "$unwind_dir/lib64" ]] ; then ++ unwind_lib="$unwind_dir/lib64" ++ else ++ exit 1 ++ fi + fi + # So that the user doesn't have to specify the unwinder, + # use libunwind as the default. +-- +2.24.2 (Apple Git-127) + diff --git a/var/spack/repos/builtin/packages/tauola/package.py b/var/spack/repos/builtin/packages/tauola/package.py new file mode 100644 index 00000000000..e850ce4dcc6 --- /dev/null +++ b/var/spack/repos/builtin/packages/tauola/package.py @@ -0,0 +1,26 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Tauola(AutotoolsPackage): + """ Tauola is a event generator for tau decays.""" + + homepage = "https://tauolapp.web.cern.ch/tauolapp/" + url = "https://tauolapp.web.cern.ch/tauolapp/resources/TAUOLA.1.1.8/TAUOLA.1.1.8-LHC.tar.gz" + + version('1.1.8', sha256='3f734e8a967682869cca2c1ffebd3e055562613c40853cc81820d8b666805ed5') + + maintainers = ['vvolkl'] + + depends_on('hepmc@:2.99.99') + + def configure_args(self): + args = [] + + args.append('--with-hepmc=%s' % self.spec["hepmc"].prefix) + args.append('--without-hepmc3') + return args diff --git a/var/spack/repos/builtin/packages/tbl2asn/package.py b/var/spack/repos/builtin/packages/tbl2asn/package.py new file mode 100644 index 00000000000..8f1c0ba74c7 --- /dev/null +++ b/var/spack/repos/builtin/packages/tbl2asn/package.py @@ -0,0 +1,24 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * +from os import chmod + + +class Tbl2asn(Package): + """Tbl2asn is a command-line program that automates the creation of + sequence records for submission to GenBank.""" + + homepage = "https://www.ncbi.nlm.nih.gov/genbank/tbl2asn2/" + + version('2020-03-01', sha256='7cc1119d3cfcbbffdbd4ecf33cef8bbdd44fc5625c72976bee08b1157625377e') + + def url_for_version(self, ver): + return "https://ftp.ncbi.nih.gov/toolbox/ncbi_tools/converters/by_program/tbl2asn/linux.tbl2asn.gz" + + def install(self, spec, prefix): + mkdirp(prefix.bin) + install('../linux.tbl2asn', prefix.bin.tbl2asn) + chmod(prefix.bin.tbl2asn, 0o775) diff --git a/var/spack/repos/builtin/packages/tcsh/package.py b/var/spack/repos/builtin/packages/tcsh/package.py index 90625e352d2..7cdd8bb4120 100644 --- a/var/spack/repos/builtin/packages/tcsh/package.py +++ b/var/spack/repos/builtin/packages/tcsh/package.py @@ -27,16 +27,17 @@ def fedora_patch(commit, file, **kwargs): # noqa patch('{0}{1}'.format(prefix, file), **kwargs) # Upstream patches - fedora_patch('8a6066c901fb4fc75013dd488ba958387f00c74d', 'tcsh-6.20.00-000-add-all-flags-for-gethost-build.patch', when='@6.20.00', sha256='f8266916189ebbdfbad5c2c28ac00ed25f07be70f054d9830eb84ba84b3d03ef') - fedora_patch('8a6066c901fb4fc75013dd488ba958387f00c74d', 'tcsh-6.20.00-001-delay-arginp-interpreting.patch', when='@6.20.00', sha256='57c7a9b0d94dd41e4276b57b0a4a89d91303d36180c1068b9e3ab8f6149b18dd') - fedora_patch('8a6066c901fb4fc75013dd488ba958387f00c74d', 'tcsh-6.20.00-002-type-of-read-in-prompt-confirm.patch', when='@6.20.00', sha256='837a6a82f815c0905cf7ea4c4ef0112f36396fc8b2138028204000178a1befa5') - fedora_patch('8a6066c901fb4fc75013dd488ba958387f00c74d', 'tcsh-6.20.00-003-fix-out-of-bounds-read.patch', when='@6.20.00', sha256='f973bd33a7fd8af0002a9b8992216ffc04fdf2927917113e42e58f28b702dc14') - fedora_patch('8a6066c901fb4fc75013dd488ba958387f00c74d', 'tcsh-6.20.00-004-do-not-use-old-pointer-tricks.patch', when='@6.20.00', sha256='333e111ed39f7452f904590b47b996812590b8818f1c51ad68407dc05a1b18b0') - fedora_patch('8a6066c901fb4fc75013dd488ba958387f00c74d', 'tcsh-6.20.00-005-reset-fixes-numbering.patch', when='@6.20.00', sha256='d1b54b5c5432faed9791ffde813560e226896a68fc5933d066172bcf3b2eb8bd') - fedora_patch('8a6066c901fb4fc75013dd488ba958387f00c74d', 'tcsh-6.20.00-006-cleanup-in-readme-files.patch', when='@6.20.00', sha256='b4e7428ac6c2918beacc1b73f33e784ac520ef981d87e98285610b1bfa299d7b') - fedora_patch('8a6066c901fb4fc75013dd488ba958387f00c74d', 'tcsh-6.20.00-007-look-for-tgetent-in-libtinfo.patch', when='@6.20.00', sha256='e6c88ffc291c9d4bda4d6bedf3c9be89cb96ce7dc245163e251345221fa77216') - fedora_patch('8a6066c901fb4fc75013dd488ba958387f00c74d', 'tcsh-6.20.00-008-guard-ascii-only-reversion.patch', when='@6.20.00', sha256='7ee195e4ce4c9eac81920843b4d4d27254bec7b43e0b744f457858a9f156e621') - fedora_patch('8a6066c901fb4fc75013dd488ba958387f00c74d', 'tcsh-6.20.00-009-fix-regexp-for-backlash-quoting-tests.patch', when='@6.20.00', sha256='d2358c930d5ab89e5965204dded499591b42a22d0a865e2149b8c0f1446fac34') + fedora_patch('96b95844cc685b11ed0cc215137e394da4505d41', 'tcsh-6.22.02-avoid-gcc-to-fail.patch', when='@:6.22.02', sha256='392615011adb7afeb0010152409a37b150f03dbde5b534503e9cd7363b742a19') + fedora_patch('8a6066c901fb4fc75013dd488ba958387f00c74d', 'tcsh-6.20.00-000-add-all-flags-for-gethost-build.patch', when='@6.20.00', sha256='f8266916189ebbdfbad5c2c28ac00ed25f07be70f054d9830eb84ba84b3d03ef') + fedora_patch('8a6066c901fb4fc75013dd488ba958387f00c74d', 'tcsh-6.20.00-001-delay-arginp-interpreting.patch', when='@6.20.00', sha256='57c7a9b0d94dd41e4276b57b0a4a89d91303d36180c1068b9e3ab8f6149b18dd') + fedora_patch('8a6066c901fb4fc75013dd488ba958387f00c74d', 'tcsh-6.20.00-002-type-of-read-in-prompt-confirm.patch', when='@6.20.00', sha256='837a6a82f815c0905cf7ea4c4ef0112f36396fc8b2138028204000178a1befa5') + fedora_patch('8a6066c901fb4fc75013dd488ba958387f00c74d', 'tcsh-6.20.00-003-fix-out-of-bounds-read.patch', when='@6.20.00', sha256='f973bd33a7fd8af0002a9b8992216ffc04fdf2927917113e42e58f28b702dc14') + fedora_patch('8a6066c901fb4fc75013dd488ba958387f00c74d', 'tcsh-6.20.00-004-do-not-use-old-pointer-tricks.patch', when='@6.20.00', sha256='333e111ed39f7452f904590b47b996812590b8818f1c51ad68407dc05a1b18b0') + fedora_patch('8a6066c901fb4fc75013dd488ba958387f00c74d', 'tcsh-6.20.00-005-reset-fixes-numbering.patch', when='@6.20.00', sha256='d1b54b5c5432faed9791ffde813560e226896a68fc5933d066172bcf3b2eb8bd') + fedora_patch('8a6066c901fb4fc75013dd488ba958387f00c74d', 'tcsh-6.20.00-006-cleanup-in-readme-files.patch', when='@6.20.00', sha256='b4e7428ac6c2918beacc1b73f33e784ac520ef981d87e98285610b1bfa299d7b') + fedora_patch('8a6066c901fb4fc75013dd488ba958387f00c74d', 'tcsh-6.20.00-007-look-for-tgetent-in-libtinfo.patch', when='@6.20.00', sha256='e6c88ffc291c9d4bda4d6bedf3c9be89cb96ce7dc245163e251345221fa77216') + fedora_patch('8a6066c901fb4fc75013dd488ba958387f00c74d', 'tcsh-6.20.00-008-guard-ascii-only-reversion.patch', when='@6.20.00', sha256='7ee195e4ce4c9eac81920843b4d4d27254bec7b43e0b744f457858a9f156e621') + fedora_patch('8a6066c901fb4fc75013dd488ba958387f00c74d', 'tcsh-6.20.00-009-fix-regexp-for-backlash-quoting-tests.patch', when='@6.20.00', sha256='d2358c930d5ab89e5965204dded499591b42a22d0a865e2149b8c0f1446fac34') # Downstream patches fedora_patch('8a6066c901fb4fc75013dd488ba958387f00c74d', 'tcsh-6.20.00-manpage-memoryuse.patch', sha256='3a4e60fe56a450632140c48acbf14d22850c1d72835bf441e3f8514d6c617a9f') # noqa: E501 diff --git a/var/spack/repos/builtin/packages/tealeaf/package.py b/var/spack/repos/builtin/packages/tealeaf/package.py index 53c4c7c0907..807d9565fe8 100644 --- a/var/spack/repos/builtin/packages/tealeaf/package.py +++ b/var/spack/repos/builtin/packages/tealeaf/package.py @@ -16,7 +16,7 @@ class Tealeaf(MakefilePackage): """ homepage = "http://uk-mac.github.io/TeaLeaf/" - url = "https://github.com/Mantevo/mantevo.github.io/raw/master/download_files/TeaLeaf-1.0.tar.gz" + url = "http://downloads.mantevo.org/releaseTarballs/miniapps/TeaLeaf/TeaLeaf-1.0.tar.gz" tags = ['proxy-app'] diff --git a/var/spack/repos/builtin/packages/testdfsio/package.py b/var/spack/repos/builtin/packages/testdfsio/package.py new file mode 100644 index 00000000000..890c17cc05c --- /dev/null +++ b/var/spack/repos/builtin/packages/testdfsio/package.py @@ -0,0 +1,24 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Testdfsio(Package): + """A corrected and enhanced version of Apache Hadoop TestDFSIO""" + + homepage = "https://github.com/tthx/testdfsio" + url = "https://github.com/tthx/testdfsio/archive/0.0.1.tar.gz" + + version('0.0.1', sha256='fe8cc47260ffb3e3ac90e0796ebfe73eb4dac64964ab77671e5d32435339dd09') + + depends_on('maven', type='build') + depends_on('java@8', type=('build', 'run')) + depends_on('hadoop@3.2.1:', type='run') + + def install(self, spec, prefix): + mvn = which('mvn') + mvn('clean', 'package', '-Dmaven.test.skip=true') + install_tree('.', prefix) diff --git a/var/spack/repos/builtin/packages/timemory/package.py b/var/spack/repos/builtin/packages/timemory/package.py index 53dada7f342..8561ca07f31 100644 --- a/var/spack/repos/builtin/packages/timemory/package.py +++ b/var/spack/repos/builtin/packages/timemory/package.py @@ -6,6 +6,7 @@ # ---------------------------------------------------------------------------- from spack import * +from sys import platform class Timemory(CMakePackage): @@ -22,18 +23,70 @@ class Timemory(CMakePackage): version('3.0.0', commit='b36b1673b2c6b7ff3126d8261bef0f8f176c7beb', submodules=True) + linux = False if platform == 'darwin' else True + + variant('shared', default=True, description='Build shared libraries') + variant('static', default=False, description='Build static libraries') variant('python', default=True, description='Enable Python support') - variant('mpi', default=False, description='Enable MPI support') - variant('tau', default=True, description='Enable TAU support') - variant('papi', default=True, description='Enable PAPI support') - variant('cuda', default=True, description='Enable CUDA support') - variant('cupti', default=True, description='Enable CUPTI support') + variant('mpi', default=True, description='Enable MPI support') + variant('tau', default=False, description='Enable TAU support') + variant('papi', default=linux, description='Enable PAPI support') + variant('cuda', default=linux, description='Enable CUDA support') + variant('cupti', default=linux, description='Enable CUPTI support') + variant('tools', default=True, description='Build/install extra tools') + variant('vtune', default=False, description='Enable VTune support') variant('upcxx', default=False, description='Enable UPC++ support') - variant('gotcha', default=True, description='Enable GOTCHA support') - variant('likwid', default=True, description='Enable LIKWID support') - variant('caliper', default=True, description='Enable Caliper support') + variant('gotcha', default=linux, description='Enable GOTCHA support') + variant('likwid', default=linux, description='Enable LIKWID support') + variant('caliper', default=False, description='Enable Caliper support') + variant('dyninst', default=linux, + description='Build dynamic instrumentation tools') + variant('examples', default=False, description='Build/install examples') variant('gperftools', default=True, description='Enable gperftools support') + variant('kokkos_tools', default=True, + description=('Build generic kokkos-tools libraries, e.g. ' + 'kp_timemory, kp_timemory_filter')) + variant('kokkos_build_config', default=False, + description=('Build pre-configured (i.e. dedicated) kokkos-tools ' + 'libraries, e.g. kp_timemory_cpu_flops')) + variant('cuda_arch', default='auto', description='CUDA architecture name', + values=('auto', 'kepler', 'tesla', 'maxwell', 'pascal', + 'volta', 'turing'), multi=False) + variant('cpu_target', default='auto', + description=('Build for specific cpu architecture (specify ' + 'cpu-model)')) + variant('use_arch', default=False, + description=('Build all of timemory w/ cpu_target architecture ' + 'flags (default: roofline toolkit only)')) + variant('tls_model', default='global-dynamic', + description='Thread-local static model', multi=False, + values=('global-dynamic', 'local-dynamic', 'initial-exec', + 'local-exec')) + variant('lto', default=False, + description='Build w/ link-time optimization') + variant('statistics', default=True, + description=('Build components w/ support for statistics ' + '(min/max/stddev)')) + variant('extra_optimizations', default=True, + description='Build timemory with extra optimization flags') + variant('cxxstd', default='14', description='C++ language standard', + values=('14', '17', '20'), multi=False) + variant('mpip_library', default=linux, + description='Build stand-alone timemory-mpip GOTCHA library') + variant('ompt', default=True, description=('Enable OpenMP tools support')) + variant('ompt_standalone', default=True, + description=('Enable OpenMP tools support via drop-in ' + 'replacement of libomp/libgomp/libiomp5')) + variant('ompt_llvm', default=False, + description='Enable OpenMP tools support as part of llvm build') + variant('ompt_library', default=True, + description='Build stand-alone timemory-ompt library') + variant('allinea_map', default=False, + description='Enable Allinea ARM-MAP support') + variant('require_packages', default=False, + description=('find_package(...) resulting in NOTFOUND ' + 'generates error')) depends_on('cmake@3.11:', type='build') @@ -48,77 +101,110 @@ class Timemory(CMakePackage): depends_on('cuda', when='+cuda') depends_on('cuda', when='+cupti') depends_on('upcxx', when='+upcxx') - depends_on('gotcha', when='+gotcha') depends_on('likwid', when='+likwid') + depends_on('gotcha', when='+gotcha') depends_on('caliper', when='+caliper') + depends_on('dyninst', when='+dyninst') depends_on('gperftools', when='+gperftools') + depends_on('intel-parallel-studio', when='+vtune') + depends_on('llvm-openmp-ompt+standalone', when='+ompt_standalone') + depends_on('llvm-openmp-ompt~standalone', when='+ompt_llvm') + depends_on('arm-forge', when='+allinea_map') + conflicts('+python', when='~shared', + msg='+python requires building shared libraries') conflicts('+cupti', when='~cuda', msg='CUPTI requires CUDA') + conflicts('+kokkos_tools', when='~tools', + msg='+kokkos_tools requires +tools') + conflicts('+kokkos_build_config', when='~tools', + msg='+kokkos_build_config requires +tools') + conflicts('+kokkos_build_config', when='~kokkos_tools', + msg='+kokkos_build_config requires +kokkos_tools') + conflicts('tls_model=local-dynamic', when='+python', + msg='+python require tls_model=global-dynamic') + conflicts('tls_model=initial-exec', when='+python', + msg='+python require tls_model=global-dynamic') + conflicts('tls_model=local-exec', when='+python', + msg='+python require tls_model=global-dynamic') + conflicts('+mpip_library', when='~mpi', msg='+mpip_library requires +mpi') + conflicts('+mpip_library', when='~gotcha', + msg='+mpip_library requires +gotcha') + conflicts('+mpip_library', when='~shared', + msg='+mpip_library requires building shared libraries') + conflicts('+ompt_standalone', when='~ompt', + msg='+ompt_standalone requires +ompt') + conflicts('+ompt_llvm', when='~ompt', + msg='+ompt_llvm requires +ompt') + conflicts('+ompt_library', when='~ompt', + msg='+ompt_library requires +ompt') + conflicts('+ompt_library', when='~shared~static', + msg='+ompt_library requires building shared or static libraries') + conflicts('+ompt_standalone+ompt_llvm', + msg=('+ompt_standalone and +ompt_llvm are not compatible. Use ' + '+ompt_llvm~ompt_standalone if building LLVM, use ' + '~ompt_llvm+ompt_standalone if ompt.h is not provided by ' + 'the compiler')) def cmake_args(self): spec = self.spec - # Use spack install of Caliper and/or GOTCHA - # instead of internal submodule build args = [ - '-DTIMEMORY_BUILD_GOTCHA=OFF', - '-DTIMEMORY_BUILD_CALIPER=OFF', - '-DTIMEMORY_BUILD_TOOLS=ON', + '-DTIMEMORY_BUILD_PYTHON=ON', '-DTIMEMORY_BUILD_TESTING=OFF', - '-DTIMEMORY_BUILD_EXTRA_OPTIMIZATIONS=ON', '-DCMAKE_INSTALL_RPATH_USE_LINK_PATH=ON', ] + cxxstd = spec.variants['cxxstd'].value + args.append('-DCMAKE_CXX_STANDARD={0}'.format(cxxstd)) + + tls = spec.variants['tls_model'].value + args.append('-DTIMEMORY_TLS_MODEL={0}'.format(tls)) + if '+python' in spec: args.append('-DPYTHON_EXECUTABLE={0}'.format( spec['python'].command.path)) - args.append('-DTIMEMORY_USE_PYTHON=ON') - args.append('-DTIMEMORY_BUILD_PYTHON=ON') - args.append('-DTIMEMORY_TLS_MODEL=global-dynamic') - else: - args.append('-DTIMEMORY_USE_PYTHON=OFF') - args.append('-DTIMEMORY_BUILD_PYTHON=OFF') - - if '+caliper' in spec: - args.append('-DTIMEMORY_USE_CALIPER=ON') - else: - args.append('-DTIMEMORY_USE_CALIPER=OFF') - - if '+tau' in spec: - args.append('-DTIMEMORY_USE_TAU=ON') - else: - args.append('-DTIMEMORY_USE_TAU=OFF') - - if '+likwid' in spec: - args.append('-DTIMEMORY_USE_LIKWID=ON') - else: - args.append('-DTIMEMORY_USE_LIKWID=OFF') - - if '+papi' in spec: - args.append('-DTIMEMORY_USE_PAPI=ON') - args.append('-DPAPI_ROOT_DIR={0}'.format(spec['papi'].prefix)) - else: - args.append('-DTIMEMORY_USE_PAPI=OFF') if '+mpi' in spec: + args.append('-DTIMEMORY_USE_MPI_LINK_FLAGS=OFF') args.append('-DMPI_C_COMPILER={0}'.format(spec['mpi'].mpicc)) args.append('-DMPI_CXX_COMPILER={0}'.format(spec['mpi'].mpicxx)) - else: - args.append('-DTIMEMORY_USE_MPI=OFF') - - if '+gotcha' in spec: - args.append('-DTIMEMORY_USE_GOTCHA=ON') - else: - args.append('-DTIMEMORY_USE_GOTCHA=OFF') if '+cuda' in spec: - args.append('-DTIMEMORY_USE_CUDA=ON') - else: - args.append('-DTIMEMORY_USE_CUDA=OFF') + targ = spec.variants['cuda_arch'].value + key = '' if spec.satisfies('@:3.0.1') else 'TIMEMORY_' + # newer versions use 'TIMEMORY_CUDA_ARCH' + args.append('-D{0}CUDA_ARCH={1}'.format(key, targ)) - if '+cupti' in spec: - args.append('-DTIMEMORY_USE_CUPTI=ON') - else: - args.append('-DTIMEMORY_USE_CUPTI=OFF') + cpu_target = spec.variants['cpu_target'].value + if cpu_target == 'auto': + args.append('-DCpuArch_TARGET={0}'.format(cpu_target)) + + # forced disabling of submodule builds + for dep in ('caliper', 'gotcha', 'ompt'): + args.append('-DTIMEMORY_BUILD_{0}=OFF'.format(dep.upper())) + + # spack options which translate to TIMEMORY_