Merge branch 'develop' into eschnett/sympol

This commit is contained in:
Erik Schnetter
2016-08-23 09:45:46 -04:00
929 changed files with 28519 additions and 6367 deletions

View File

@@ -19,5 +19,5 @@
# - F999: name name be undefined or undefined from star imports.
#
[flake8]
ignore = E221,E241,E731,F403,F821,F999
ignore = E129,E221,E241,E272,E731,F403,F821,F999,F405
max-line-length = 79

8
.gitignore vendored
View File

@@ -1,10 +1,14 @@
/var/spack/stage
/var/spack/cache
/var/spack/repos/*/index.yaml
/var/spack/repos/*/lock
*.pyc
/opt/
/opt
*~
.DS_Store
.idea
/etc/spack/*
/etc/spack/licenses
/etc/spack/*.yaml
/etc/spackconfig
/share/spack/dotkit
/share/spack/modules

View File

@@ -1,7 +1,17 @@
language: python
python:
- "2.6"
- "2.7"
env:
- TEST_TYPE=unit
- TEST_TYPE=flake8
# Exclude flake8 from python 2.6
matrix:
exclude:
- python: "2.6"
env: TEST_TYPE=flake8
# Use new Travis infrastructure (Docker can't sudo yet)
sudo: false
@@ -20,20 +30,13 @@ before_install:
- git fetch origin develop:develop
script:
# Regular spack setup and tests
- . share/spack/setup-env.sh
- spack compilers
- spack config get compilers
- spack install -v libdwarf
# Run unit tests with code coverage
- coverage run bin/spack test
# Run unit tests with code coverage plus install libdwarf
- 'if [ "$TEST_TYPE" = "unit" ]; then share/spack/qa/run-unit-tests; fi'
# Run flake8 code style checks.
- share/spack/qa/run-flake8
- 'if [ "$TEST_TYPE" = "flake8" ]; then share/spack/qa/run-flake8; fi'
after_success:
- coveralls
- 'if [ "$TEST_TYPE" = "unit" ] && [ "$TRAVIS_PYTHON_VERSION" = "2.7" ]; then coveralls; fi'
notifications:
email:

View File

@@ -58,17 +58,24 @@ can join it here:
### Contributions
At the moment, contributing to Spack is relatively simple. Just send us
a [pull request](https://help.github.com/articles/using-pull-requests/).
Contributing to Spack is relatively. Just send us a
[pull request](https://help.github.com/articles/using-pull-requests/).
When you send your request, make ``develop`` the destination branch on the
[Spack repository](https://github.com/LLNL/spack).
Your contribution will need to pass all the tests run by the `spack test`
command, as well as the formatting checks in `share/spack/qa/run-flake8`.
You should run both of these before submitting your pull request, to
ensure that the online checks succeed.
Before you send a PR, your code should pass the following checks:
Spack is using a rough approximation of the [Git
* Your contribution will need to pass the `spack test` command.
Run this before submitting your PR.
* Also run the `share/spack/qa/run-flake8` script to check for PEP8 compliance.
To encourage contributions and readability by a broad audience,
Spack uses the [PEP8](https://www.python.org/dev/peps/pep-0008/) coding
standard with [a few exceptions](https://github.com/LLNL/spack/blob/develop/.flake8).
We enforce these guidelines with [Travis CI](https://travis-ci.org/LLNL/spack).
Spack uses a rough approximation of the [Git
Flow](http://nvie.com/posts/a-successful-git-branching-model/)
branching model. The ``develop`` branch contains the latest
contributions, and ``master`` is always tagged and points to the

View File

@@ -79,6 +79,15 @@
# Obviously, for this to work, `sbang` needs to have a short enough
# path that *it* will run without hitting OS limits.
#
# For Lua, scripts the second line can't start with #!, as # is not
# the comment character in lua (even though lua ignores #! on the
# *first* line of a script). So, instrument a lua script like this,
# using -- instead of # on the second line:
#
# 1 #!/bin/bash /path/to/sbang
# 2 --!/long/path/to/lua with arguments
# 3
# 4 print "success!"
#
# How it works
# -----------------------------
@@ -95,6 +104,8 @@ lines=0
while read line && ((lines < 2)) ; do
if [[ "$line" = '#!'* ]]; then
interpreter="${line#\#!}"
elif [[ "$line" = '--!'*lua* ]]; then
interpreter="${line#--!}"
fi
lines=$((lines+1))
done < "$script"

View File

@@ -1,4 +1,5 @@
#!/usr/bin/env python
# flake8: noqa
##############################################################################
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
@@ -24,9 +25,10 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import sys
if not sys.version_info[:2] >= (2,6):
if not sys.version_info[:2] >= (2, 6):
v_info = sys.version_info[:3]
sys.exit("Spack requires Python 2.6 or higher. This is Python %d.%d.%d." % v_info)
sys.exit("Spack requires Python 2.6 or higher. "
"This is Python %d.%d.%d." % v_info)
import os
@@ -62,7 +64,8 @@ for pyc_file in orphaned_pyc_files:
try:
os.remove(pyc_file)
except OSError as e:
print "WARNING: Spack may fail mysteriously. Couldn't remove orphaned .pyc file: %s" % pyc_file
print ("WARNING: Spack may fail mysteriously. "
"Couldn't remove orphaned .pyc file: %s" % pyc_file)
# If there is no working directory, use the spack prefix.
try:
@@ -77,7 +80,7 @@ import llnl.util.tty as tty
from llnl.util.tty.color import *
import spack
from spack.error import SpackError
from external import argparse
import argparse
# Command parsing
parser = argparse.ArgumentParser(
@@ -128,6 +131,7 @@ if len(sys.argv) == 1:
# actually parse the args.
args = parser.parse_args()
def main():
# Set up environment based on args.
tty.set_verbose(args.verbose)
@@ -138,6 +142,9 @@ def main():
import spack.util.debug as debug
debug.register_interrupt_handler()
from spack.yaml_version_check import check_yaml_versions
check_yaml_versions()
spack.spack_working_dir = working_dir
if args.mock:
from spack.repository import RepoPath
@@ -145,7 +152,7 @@ def main():
# If the user asked for it, don't check ssl certs.
if args.insecure:
tty.warn("You asked for --insecure, which does not check SSL certificates.")
tty.warn("You asked for --insecure. Will NOT check SSL certificates.")
spack.curl.add_default_arg('-k')
# Try to load the particular command asked for and run it
@@ -164,7 +171,8 @@ def main():
elif isinstance(return_val, int):
sys.exit(return_val)
else:
tty.die("Bad return value from command %s: %s" % (args.command, return_val))
tty.die("Bad return value from command %s: %s"
% (args.command, return_val))
if args.profile:
import cProfile

View File

@@ -0,0 +1,40 @@
# -------------------------------------------------------------------------
# This is the default configuration for Spack's module file generation.
#
# Settings here are versioned with Spack and are intended to provide
# sensible defaults out of the box. Spack maintainers should edit this
# file to keep it current.
#
# Users can override these settings by editing the following files.
#
# Per-spack-instance settings (overrides defaults):
# $SPACK_ROOT/etc/spack/modules.yaml
#
# Per-user settings (overrides default and site settings):
# ~/.spack/modules.yaml
# -------------------------------------------------------------------------
modules:
enable:
- tcl
- dotkit
prefix_inspections:
bin:
- PATH
man:
- MANPATH
share/man:
- MANPATH
lib:
- LIBRARY_PATH
- LD_LIBRARY_PATH
lib64:
- LIBRARY_PATH
- LD_LIBRARY_PATH
include:
- CPATH
lib/pkgconfig:
- PKG_CONFIG_PATH
lib64/pkgconfig:
- PKG_CONFIG_PATH
'':
- CMAKE_PREFIX_PATH

View File

@@ -0,0 +1,21 @@
# -------------------------------------------------------------------------
# This file controls default concretization preferences for Spack.
#
# Settings here are versioned with Spack and are intended to provide
# sensible defaults out of the box. Spack maintainers should edit this
# file to keep it current.
#
# Users can override these settings by editing the following files.
#
# Per-spack-instance settings (overrides defaults):
# $SPACK_ROOT/etc/spack/packages.yaml
#
# Per-user settings (overrides default and site settings):
# ~/.spack/packages.yaml
# -------------------------------------------------------------------------
packages:
all:
providers:
mpi: [openmpi, mpich]
blas: [openblas]
lapack: [openblas]

View File

@@ -0,0 +1,14 @@
# -------------------------------------------------------------------------
# This is the default spack repository configuration. It includes the
# builtin spack package repository.
#
# Users can override these settings by editing the following files.
#
# Per-spack-instance settings (overrides defaults):
# $SPACK_ROOT/etc/spack/repos.yaml
#
# Per-user settings (overrides default and site settings):
# ~/.spack/repos.yaml
# -------------------------------------------------------------------------
repos:
- $spack/var/spack/repos/builtin

View File

@@ -1,29 +0,0 @@
# -------------------------------------------------------------------------
# This is the default spack module files generation configuration.
#
# Changes to this file will affect all users of this spack install,
# although users can override these settings in their ~/.spack/modules.yaml.
# -------------------------------------------------------------------------
modules:
enable:
- tcl
- dotkit
prefix_inspections:
bin:
- PATH
man:
- MANPATH
lib:
- LIBRARY_PATH
- LD_LIBRARY_PATH
lib64:
- LIBRARY_PATH
- LD_LIBRARY_PATH
include:
- CPATH
lib/pkgconfig:
- PKGCONFIG
lib64/pkgconfig:
- PKGCONFIG
'':
- CMAKE_PREFIX_PATH

View File

@@ -1,8 +0,0 @@
# -------------------------------------------------------------------------
# This is the default spack repository configuration.
#
# Changes to this file will affect all users of this spack install,
# although users can override these settings in their ~/.spack/repos.yaml.
# -------------------------------------------------------------------------
repos:
- $spack/var/spack/repos/builtin

View File

@@ -24,12 +24,29 @@ Spack can install:
.. command-output:: spack list
The packages are listed by name in alphabetical order. You can also
do wildcats searches using ``*``:
The packages are listed by name in alphabetical order. If you specify a
pattern to match, it will follow this set of rules. A pattern with no
wildcards, ``*`` or ``?``, will be treated as though it started and ended with
``*``, so ``util`` is equivalent to ``*util*``. A pattern with no capital
letters will be treated as case-insensitive. You can also add the ``-i`` flag
to specify a case insensitive search, or ``-d`` to search the description of
the package in addition to the name. Some examples:
.. command-output:: spack list m*
All packages whose names contain "sql" case insensitive:
.. command-output:: spack list *util*
.. command-output:: spack list sql
All packages whose names start with a capital M:
.. command-output:: spack list 'M*'
All packages whose names or descriptions contain Documentation:
.. command-output:: spack list -d Documentation
All packages whose names contain documentation case insensitive:
.. command-output:: spack list -d documentation
.. _spack-info:
@@ -97,13 +114,13 @@ that the packages is installed:
$ spack install mpileaks
==> Installing mpileaks
==> mpich is already installed in /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/mpich@3.0.4.
==> callpath is already installed in /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/callpath@1.0.2-5dce4318.
==> adept-utils is already installed in /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/adept-utils@1.0-5adef8da.
==> mpich is already installed in /home/gamblin2/spack/opt/linux-debian7-x86_64/gcc@4.4.7/mpich@3.0.4.
==> callpath is already installed in /home/gamblin2/spack/opt/linux-debian7-x86_64/gcc@4.4.7/callpath@1.0.2-5dce4318.
==> adept-utils is already installed in /home/gamblin2/spack/opt/linux-debian7-x86_64/gcc@4.4.7/adept-utils@1.0-5adef8da.
==> Trying to fetch from https://github.com/hpc/mpileaks/releases/download/v1.0/mpileaks-1.0.tar.gz
######################################################################## 100.0%
==> Staging archive: /home/gamblin2/spack/var/spack/stage/mpileaks@1.0%gcc@4.4.7 arch=chaos_5_x86_64_ib-59f6ad23/mpileaks-1.0.tar.gz
==> Created stage in /home/gamblin2/spack/var/spack/stage/mpileaks@1.0%gcc@4.4.7 arch=chaos_5_x86_64_ib-59f6ad23.
==> Staging archive: /home/gamblin2/spack/var/spack/stage/mpileaks@1.0%gcc@4.4.7 arch=linux-debian7-x86_64-59f6ad23/mpileaks-1.0.tar.gz
==> Created stage in /home/gamblin2/spack/var/spack/stage/mpileaks@1.0%gcc@4.4.7 arch=linux-debian7-x86_64-59f6ad23.
==> No patches needed for mpileaks.
==> Building mpileaks.
@@ -111,7 +128,7 @@ that the packages is installed:
==> Successfully installed mpileaks.
Fetch: 2.16s. Build: 9.82s. Total: 11.98s.
[+] /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/mpileaks@1.0-59f6ad23
[+] /home/gamblin2/spack/opt/linux-debian7-x86_64/gcc@4.4.7/mpileaks@1.0-59f6ad23
The last line, with the ``[+]``, indicates where the package is
installed.
@@ -166,7 +183,7 @@ To uninstall a package and every package that depends on it, you may give the
spack uninstall --dependents mpich
will display a list of all the packages that depends on `mpich` and, upon confirmation,
will display a list of all the packages that depend on `mpich` and, upon confirmation,
will uninstall them in the right order.
A line like
@@ -213,7 +230,7 @@ Running ``spack find`` with no arguments lists installed packages:
$ spack find
==> 74 installed packages.
-- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
-- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
ImageMagick@6.8.9-10 libdwarf@20130729 py-dateutil@2.4.0
adept-utils@1.0 libdwarf@20130729 py-ipython@2.3.1
atk@2.14.0 libelf@0.8.12 py-matplotlib@1.4.2
@@ -239,7 +256,7 @@ Running ``spack find`` with no arguments lists installed packages:
lcms@2.6 pixman@0.32.6 xz@5.2.0
libdrm@2.4.33 py-dateutil@2.4.0 zlib@1.2.8
-- chaos_5_x86_64_ib / gcc@4.9.2 --------------------------------
-- linux-debian7-x86_64 / gcc@4.9.2 --------------------------------
libelf@0.8.10 mpich@3.0.4
Packages are divided into groups according to their architecture and
@@ -262,7 +279,7 @@ in more detail using ``spack find -d``, and by asking only to show
$ spack find --deps libdwarf
==> 2 installed packages.
-- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
-- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
libdwarf@20130729-d9b90962
^libelf@0.8.12
libdwarf@20130729-b52fac98
@@ -278,7 +295,7 @@ want to know whether two packages' dependencies differ, you can use
$ spack find -l libdwarf
==> 2 installed packages.
-- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
-- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
libdwarf@20130729-d9b90962 libdwarf@20130729-b52fac98
Now the ``libwarf`` installs have hashes after their names. These are
@@ -292,14 +309,14 @@ use ``spack find -p``:
$ spack find -p
==> 74 installed packages.
-- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
ImageMagick@6.8.9-10 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/ImageMagick@6.8.9-10-4df950dd
adept-utils@1.0 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/adept-utils@1.0-5adef8da
atk@2.14.0 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/atk@2.14.0-3d09ac09
boost@1.55.0 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/boost@1.55.0
bzip2@1.0.6 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/bzip2@1.0.6
cairo@1.14.0 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/cairo@1.14.0-fcc2ab44
callpath@1.0.2 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/callpath@1.0.2-5dce4318
-- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
ImageMagick@6.8.9-10 /home/gamblin2/spack/opt/linux-debian7-x86_64/gcc@4.4.7/ImageMagick@6.8.9-10-4df950dd
adept-utils@1.0 /home/gamblin2/spack/opt/linux-debian7-x86_64/gcc@4.4.7/adept-utils@1.0-5adef8da
atk@2.14.0 /home/gamblin2/spack/opt/linux-debian7-x86_64/gcc@4.4.7/atk@2.14.0-3d09ac09
boost@1.55.0 /home/gamblin2/spack/opt/linux-debian7-x86_64/gcc@4.4.7/boost@1.55.0
bzip2@1.0.6 /home/gamblin2/spack/opt/linux-debian7-x86_64/gcc@4.4.7/bzip2@1.0.6
cairo@1.14.0 /home/gamblin2/spack/opt/linux-debian7-x86_64/gcc@4.4.7/cairo@1.14.0-fcc2ab44
callpath@1.0.2 /home/gamblin2/spack/opt/linux-debian7-x86_64/gcc@4.4.7/callpath@1.0.2-5dce4318
...
And, finally, you can restrict your search to a particular package
@@ -308,10 +325,10 @@ by supplying its name:
.. code-block:: sh
$ spack find -p libelf
-- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
libelf@0.8.11 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/libelf@0.8.11
libelf@0.8.12 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/libelf@0.8.12
libelf@0.8.13 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/libelf@0.8.13
-- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
libelf@0.8.11 /home/gamblin2/spack/opt/linux-debian7-x86_64/gcc@4.4.7/libelf@0.8.11
libelf@0.8.12 /home/gamblin2/spack/opt/linux-debian7-x86_64/gcc@4.4.7/libelf@0.8.12
libelf@0.8.13 /home/gamblin2/spack/opt/linux-debian7-x86_64/gcc@4.4.7/libelf@0.8.13
``spack find`` actually does a lot more than this. You can use
*specs* to query for specific configurations and builds of each
@@ -321,7 +338,7 @@ package. If you want to find only libelf versions greater than version
.. code-block:: sh
$ spack find libelf@0.8.12:
-- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
-- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
libelf@0.8.12 libelf@0.8.13
Finding just the versions of libdwarf built with a particular version
@@ -331,7 +348,7 @@ of libelf would look like this:
$ spack find -l libdwarf ^libelf@0.8.12
==> 1 installed packages.
-- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
-- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
libdwarf@20130729-d9b90962
We can also search for packages that have a certain attribute. For example,
@@ -342,6 +359,7 @@ will find every installed package with a 'debug' compile-time option enabled.
The full spec syntax is discussed in detail in :ref:`sec-specs`.
.. _compiler-config:
Compiler configuration
-----------------------------------
@@ -428,7 +446,7 @@ If you want to see specifics on a particular compiler, you can run
fc = /usr/local/bin/ifort-15.0.090
This shows which C, C++, and Fortran compilers were detected by Spack.
Notice also that we didn't have to be too specific about the
Notice also that we didn\'t have to be too specific about the
version. We just said ``intel@15``, and information about the only
matching Intel compiler was displayed.
@@ -443,19 +461,17 @@ editing your ``~/.spack/compilers.yaml`` file. You can do this by running
Each compiler configuration in the file looks like this::
...
chaos_5_x86_64_ib:
...
intel@15.0.0:
compilers:
- compiler:
modules = []
operating_system: OS
paths:
cc: /usr/local/bin/icc-15.0.024-beta
cxx: /usr/local/bin/icpc-15.0.024-beta
f77: /usr/local/bin/ifort-15.0.024-beta
fc: /usr/local/bin/ifort-15.0.024-beta
...
The chaos_5_x86_64_ib string is an architecture string, and multiple
compilers can be listed underneath an architecture. The architecture
string may be replaced with the string 'all' to signify compilers that
work on all architectures.
spec: intel@15.0.0:
For compilers, like ``clang``, that do not support Fortran, put
``None`` for ``f77`` and ``fc``::
@@ -471,10 +487,11 @@ list displayed by ``spack compilers``.
You can also add compiler flags to manually configured compilers. The
valid flags are ``cflags``, ``cxxflags``, ``fflags``, ``cppflags``,
``ldflags``, and ``ldlibs``. For example,::
``ldflags``, and ``ldlibs``. For example::
...
chaos_5_x86_64_ib:
compilers:
- compiler:
...
intel@15.0.0:
cc: /usr/local/bin/icc-15.0.024-beta
@@ -501,10 +518,10 @@ Spack, that descriptor is called a *spec*. Spack uses specs to refer
to a particular build configuration (or configurations) of a package.
Specs are more than a package name and a version; you can use them to
specify the compiler, compiler version, architecture, compile options,
and dependency options for a build. In this section, we'll go over
and dependency options for a build. In this section, we\'ll go over
the full syntax of specs.
Here is an example of a much longer spec than we've seen thus far::
Here is an example of a much longer spec than we\'ve seen thus far::
mpileaks @1.2:1.4 %gcc@4.7.5 +debug -qt arch=bgq_os ^callpath @1.1 %gcc@4.7.2
@@ -526,10 +543,11 @@ More formally, a spec consists of the following pieces:
* ``+`` or ``-`` or ``~`` Optional variant specifiers (``+debug``,
``-qt``, or ``~qt``) for boolean variants
* ``name=<value>`` Optional variant specifiers that are not restricted to
boolean variants
boolean variants
* ``name=<value>`` Optional compiler flag specifiers. Valid flag names are
``cflags``, ``cxxflags``, ``fflags``, ``cppflags``, ``ldflags``, and ``ldlibs``.
* ``arch=<value>`` Optional architecture specifier (``arch=bgq_os``)
``cflags``, ``cxxflags``, ``fflags``, ``cppflags``, ``ldflags``, and ``ldlibs``.
* ``target=<value> os=<value>`` Optional architecture specifier
(``target=haswell os=CNL10``)
* ``^`` Dependency specs (``^callpath@1.1``)
There are two things to notice here. The first is that specs are
@@ -609,7 +627,7 @@ compilers, variants, and architectures just like any other spec.
Specifiers are associated with the nearest package name to their left.
For example, above, ``@1.1`` and ``%gcc@4.7.2`` associates with the
``callpath`` package, while ``@1.2:1.4``, ``%gcc@4.7.5``, ``+debug``,
``-qt``, and ``arch=bgq_os`` all associate with the ``mpileaks`` package.
``-qt``, and ``target=haswell os=CNL10`` all associate with the ``mpileaks`` package.
In the diagram above, ``mpileaks`` depends on ``mpich`` with an
unspecified version, but packages can depend on other packages with
@@ -741,14 +759,18 @@ in gnu autotools. If all flags are set, the order is
Architecture specifiers
~~~~~~~~~~~~~~~~~~~~~~~
.. Note::
The architecture can be specified by using the reserved
words ``target`` and/or ``os`` (``target=x86-64 os=debian7``). You can also
use the triplet form of platform, operating system and processor.
Architecture specifiers are part of specs but are not yet
functional. They will be in Spack version 1.0, due in Q3 2015.
.. code-block:: sh
The architecture specifier looks identical to a variant specifier for a
non-boolean variant. The architecture can be specified only using the
reserved name ``arch`` (``arch=bgq_os``).
spack install libelf arch=cray_xc-CNL10-haswell
Users on non-Cray systems won't have to worry about specifying the architecture.
Spack will autodetect what kind of operating system is on your machine as well
as the processor. For more information on how the architecture can be
used on Cray machines, check here :ref:`spack-cray`
.. _sec-virtual-dependencies:
@@ -968,7 +990,7 @@ of installed packages.
$ module avail
------- /home/gamblin2/spack/share/spack/modules/chaos_5_x86_64_ib --------
------- /home/gamblin2/spack/share/spack/modules/linux-debian7-x86_64 --------
adept-utils@1.0%gcc@4.4.7-5adef8da libelf@0.8.13%gcc@4.4.7
automaded@1.0%gcc@4.4.7-d9691bb0 libelf@0.8.13%intel@15.0.0
boost@1.55.0%gcc@4.4.7 mpc@1.0.2%gcc@4.4.7-559607f5
@@ -1039,7 +1061,7 @@ Spack. For example, this will add the ``mpich`` package built with
$ spack use mpich %gcc@4.4.7
Prepending: mpich@3.0.4%gcc@4.4.7 (ok)
$ which mpicc
~/src/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/mpich@3.0.4/bin/mpicc
~/src/spack/opt/linux-debian7-x86_64/gcc@4.4.7/mpich@3.0.4/bin/mpicc
Or, similarly with modules, you could type:
@@ -1072,8 +1094,8 @@ than one installed package matches it), then Spack will warn you:
$ spack load libelf
==> Error: Multiple matches for spec libelf. Choose one:
libelf@0.8.13%gcc@4.4.7 arch=chaos_5_x86_64_ib
libelf@0.8.13%intel@15.0.0 arch=chaos_5_x86_64_ib
libelf@0.8.13%gcc@4.4.7 arch=linux-debian7-x86_64
libelf@0.8.13%intel@15.0.0 arch=linux-debian7-x86_64
You can either type the ``spack load`` command again with a fully
qualified argument, or you can add just enough extra constraints to
@@ -1114,6 +1136,80 @@ of module files:
"""Set up the compile and runtime environments for a package."""
pass
Recursive Modules
``````````````````
In some cases, it is desirable to load not just a module, but also all
the modules it depends on. This is not required for most modules
because Spack builds binaries with RPATH support. However, not all
packages use RPATH to find their dependencies: this can be true in
particular for Python extensions, which are currently *not* built with
RPATH.
Modules may be loaded recursively with the ``load`` command's
``--dependencies`` or ``-r`` argument:
.. code-block:: sh
$ spack load --dependencies <spec> ...
More than one spec may be placed on the command line here.
Module Commands for Shell Scripts
``````````````````````````````````
Although Spack is flexible, the ``module`` command is much faster.
This could become an issue when emitting a series of ``spack load``
commands inside a shell script. By adding the ``--shell`` flag,
``spack module find`` may also be used to generate code that can be
cut-and-pasted into a shell script. For example:
.. code-block:: sh
$ spack module find tcl --dependencies --shell py-numpy git
# bzip2@1.0.6%gcc@4.9.3=linux-x86_64
module load bzip2-1.0.6-gcc-4.9.3-ktnrhkrmbbtlvnagfatrarzjojmkvzsx
# ncurses@6.0%gcc@4.9.3=linux-x86_64
module load ncurses-6.0-gcc-4.9.3-kaazyneh3bjkfnalunchyqtygoe2mncv
# zlib@1.2.8%gcc@4.9.3=linux-x86_64
module load zlib-1.2.8-gcc-4.9.3-v3ufwaahjnviyvgjcelo36nywx2ufj7z
# sqlite@3.8.5%gcc@4.9.3=linux-x86_64
module load sqlite-3.8.5-gcc-4.9.3-a3eediswgd5f3rmto7g3szoew5nhehbr
# readline@6.3%gcc@4.9.3=linux-x86_64
module load readline-6.3-gcc-4.9.3-se6r3lsycrwxyhreg4lqirp6xixxejh3
# python@3.5.1%gcc@4.9.3=linux-x86_64
module load python-3.5.1-gcc-4.9.3-5q5rsrtjld4u6jiicuvtnx52m7tfhegi
# py-setuptools@20.5%gcc@4.9.3=linux-x86_64
module load py-setuptools-20.5-gcc-4.9.3-4qr2suj6p6glepnedmwhl4f62x64wxw2
# py-nose@1.3.7%gcc@4.9.3=linux-x86_64
module load py-nose-1.3.7-gcc-4.9.3-pwhtjw2dvdvfzjwuuztkzr7b4l6zepli
# openblas@0.2.17%gcc@4.9.3+shared=linux-x86_64
module load openblas-0.2.17-gcc-4.9.3-pw6rmlom7apfsnjtzfttyayzc7nx5e7y
# py-numpy@1.11.0%gcc@4.9.3+blas+lapack=linux-x86_64
module load py-numpy-1.11.0-gcc-4.9.3-mulodttw5pcyjufva4htsktwty4qd52r
# curl@7.47.1%gcc@4.9.3=linux-x86_64
module load curl-7.47.1-gcc-4.9.3-ohz3fwsepm3b462p5lnaquv7op7naqbi
# autoconf@2.69%gcc@4.9.3=linux-x86_64
module load autoconf-2.69-gcc-4.9.3-bkibjqhgqm5e3o423ogfv2y3o6h2uoq4
# cmake@3.5.0%gcc@4.9.3~doc+ncurses+openssl~qt=linux-x86_64
module load cmake-3.5.0-gcc-4.9.3-x7xnsklmgwla3ubfgzppamtbqk5rwn7t
# expat@2.1.0%gcc@4.9.3=linux-x86_64
module load expat-2.1.0-gcc-4.9.3-6pkz2ucnk2e62imwakejjvbv6egncppd
# git@2.8.0-rc2%gcc@4.9.3+curl+expat=linux-x86_64
module load git-2.8.0-rc2-gcc-4.9.3-3bib4hqtnv5xjjoq5ugt3inblt4xrgkd
The script may be further edited by removing unnecessary modules.
This script may be directly executed in bash via
.. code-block :: sh
source <( spack module find tcl --dependencies --shell py-numpy git )
Regenerating Module files
~~~~~~~~~~~~~~~~~~~~~~~~~~~
.. code-block:: python
def setup_dependent_environment(self, spack_env, run_env, dependent_spec):
@@ -1378,23 +1474,23 @@ files in the ``cmake`` package while retaining its dependencies.
.. code-block:: sh
$ spack view -v symlink myview cmake@3.5.2
==> Linking package: "ncurses"
==> Linking package: "zlib"
==> Linking package: "openssl"
==> Linking package: "cmake"
$ ls myview/
bin doc etc include lib share
$ ls myview/bin/
captoinfo clear cpack ctest infotocap openssl tabs toe tset
ccmake cmake c_rehash infocmp ncurses6-config reset tic tput
$ spack view -v -d false rm myview cmake@3.5.2
==> Removing package: "cmake"
$ ls myview/bin/
captoinfo c_rehash infotocap openssl tabs toe tset
clear infocmp ncurses6-config reset tic tput
@@ -1404,7 +1500,7 @@ Limitations of Filesystem Views
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This section describes some limitations that should be considered in
using filesystems views.
using filesystems views.
Filesystem views are merely organizational. The binary executable
programs, shared libraries and other build products found in a view
@@ -1453,7 +1549,7 @@ an *extension*. Suppose you have Python installed like so:
$ spack find python
==> 1 installed packages.
-- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
-- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
python@2.7.8
.. _spack-extensions:
@@ -1466,7 +1562,7 @@ You can find extensions for your Python installation like this:
.. code-block:: sh
$ spack extensions python
==> python@2.7.8%gcc@4.4.7 arch=chaos_5_x86_64_ib-703c7a96
==> python@2.7.8%gcc@4.4.7 arch=linux-debian7-x86_64-703c7a96
==> 36 extensions:
geos py-ipython py-pexpect py-pyside py-sip
py-basemap py-libxml2 py-pil py-pytz py-six
@@ -1478,7 +1574,7 @@ You can find extensions for your Python installation like this:
py-h5py py-numpy py-pyqt py-shiboken
==> 12 installed:
-- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
-- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
py-dateutil@2.4.0 py-nose@1.3.4 py-pyside@1.2.2
py-dateutil@2.4.0 py-numpy@1.9.1 py-pytz@2014.10
py-ipython@2.3.1 py-pygments@2.0.1 py-setuptools@11.3.1
@@ -1494,8 +1590,8 @@ prefixes, and you can see this with ``spack find -p``:
$ spack find -p py-numpy
==> 1 installed packages.
-- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
py-numpy@1.9.1 /g/g21/gamblin2/src/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/py-numpy@1.9.1-66733244
-- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
py-numpy@1.9.1 /g/g21/gamblin2/src/spack/opt/linux-debian7-x86_64/gcc@4.4.7/py-numpy@1.9.1-66733244
However, even though this package is installed, you cannot use it
directly when you run ``python``:
@@ -1556,9 +1652,9 @@ installation:
.. code-block:: sh
$ spack activate py-numpy
==> Activated extension py-setuptools@11.3.1%gcc@4.4.7 arch=chaos_5_x86_64_ib-3c74eb69 for python@2.7.8%gcc@4.4.7.
==> Activated extension py-nose@1.3.4%gcc@4.4.7 arch=chaos_5_x86_64_ib-5f70f816 for python@2.7.8%gcc@4.4.7.
==> Activated extension py-numpy@1.9.1%gcc@4.4.7 arch=chaos_5_x86_64_ib-66733244 for python@2.7.8%gcc@4.4.7.
==> Activated extension py-setuptools@11.3.1%gcc@4.4.7 arch=linux-debian7-x86_64-3c74eb69 for python@2.7.8%gcc@4.4.7.
==> Activated extension py-nose@1.3.4%gcc@4.4.7 arch=linux-debian7-x86_64-5f70f816 for python@2.7.8%gcc@4.4.7.
==> Activated extension py-numpy@1.9.1%gcc@4.4.7 arch=linux-debian7-x86_64-66733244 for python@2.7.8%gcc@4.4.7.
Several things have happened here. The user requested that
``py-numpy`` be activated in the ``python`` installation it was built
@@ -1573,7 +1669,7 @@ packages listed as activated:
.. code-block:: sh
$ spack extensions python
==> python@2.7.8%gcc@4.4.7 arch=chaos_5_x86_64_ib-703c7a96
==> python@2.7.8%gcc@4.4.7 arch=linux-debian7-x86_64-703c7a96
==> 36 extensions:
geos py-ipython py-pexpect py-pyside py-sip
py-basemap py-libxml2 py-pil py-pytz py-six
@@ -1585,14 +1681,14 @@ packages listed as activated:
py-h5py py-numpy py-pyqt py-shiboken
==> 12 installed:
-- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
-- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
py-dateutil@2.4.0 py-nose@1.3.4 py-pyside@1.2.2
py-dateutil@2.4.0 py-numpy@1.9.1 py-pytz@2014.10
py-ipython@2.3.1 py-pygments@2.0.1 py-setuptools@11.3.1
py-matplotlib@1.4.2 py-pyparsing@2.0.3 py-six@1.9.0
==> 3 currently activated:
-- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
-- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
py-nose@1.3.4 py-numpy@1.9.1 py-setuptools@11.3.1
@@ -1621,7 +1717,7 @@ dependencies, you can use ``spack activate -f``:
.. code-block:: sh
$ spack activate -f py-numpy
==> Activated extension py-numpy@1.9.1%gcc@4.4.7 arch=chaos_5_x86_64_ib-66733244 for python@2.7.8%gcc@4.4.7.
==> Activated extension py-numpy@1.9.1%gcc@4.4.7 arch=linux-debian7-x86_64-66733244 for python@2.7.8%gcc@4.4.7.
.. _spack-deactivate:
@@ -1653,7 +1749,7 @@ Spack currently needs to be run from a filesystem that supports
``flock`` locking semantics. Nearly all local filesystems and recent
versions of NFS support this, but parallel filesystems may be mounted
without ``flock`` support enabled. You can determine how your
filesystems are mounted with ``mount -p``. The output for a Lustre
filesystems are mounted with ``mount -p``. The output for a Lustre
filesystem might look like this:
.. code-block:: sh
@@ -1674,7 +1770,7 @@ This issue typically manifests with the error below:
Traceback (most recent call last):
File "./spack", line 176, in <module>
main()
File "./spack", line 154, in main
File "./spack", line 154,' in main
return_val = command(parser, args)
File "./spack/lib/spack/spack/cmd/find.py", line 170, in find
specs = set(spack.installed_db.query(**q_args))
@@ -1692,6 +1788,146 @@ This issue typically manifests with the error below:
A nicer error message is TBD in future versions of Spack.
.. _spack-cray:
Spack on Cray
-----------------------------
Spack differs slightly when used on a Cray system. The architecture spec
can differentiate between the front-end and back-end processor and operating system.
For example, on Edison at NERSC, the back-end target processor
is \"Ivy Bridge\", so you can specify to use the back-end this way:
.. code-block:: sh
spack install zlib target=ivybridge
You can also use the operating system to build against the back-end:
.. code-block:: sh
spack install zlib os=CNL10
Notice that the name includes both the operating system name and the major
version number concatenated together.
Alternatively, if you want to build something for the front-end,
you can specify the front-end target processor. The processor for a login node
on Edison is \"Sandy bridge\" so we specify on the command line like so:
.. code-block:: sh
spack install zlib target=sandybridge
And the front-end operating system is:
.. code-block:: sh
spack install zlib os=SuSE11
Cray compiler detection
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Spack can detect compilers using two methods. For the front-end, we treat
everything the same. The difference lies in back-end compiler detection.
Back-end compiler detection is made via the Tcl module avail command.
Once it detects the compiler it writes the appropriate PrgEnv and compiler
module name to compilers.yaml and sets the paths to each compiler with Cray\'s
compiler wrapper names (i.e. cc, CC, ftn). During build time, Spack will load
the correct PrgEnv and compiler module and will call appropriate wrapper.
The compilers.yaml config file will also differ. There is a
modules section that is filled with the compiler\'s Programming Environment
and module name. On other systems, this field is empty []::
...
- compiler:
modules:
- PrgEnv-intel
- intel/15.0.109
...
As mentioned earlier, the compiler paths will look different on a Cray system.
Since most compilers are invoked using cc, CC and ftn, the paths for each
compiler are replaced with their respective Cray compiler wrapper names::
...
paths:
cc: cc
cxx: CC
f77: ftn
fc: ftn
...
As opposed to an explicit path to the compiler executable. This allows Spack
to call the Cray compiler wrappers during build time.
For more on compiler configuration, check out :ref:`compiler-config`.
Spack sets the default Cray link type to dynamic, to better match other
other platforms. Individual packages can enable static linking (which is the
default outside of Spack on cray systems) using the -static flag.
Setting defaults and using Cray modules
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If you want to use default compilers for each PrgEnv and also be able
to load cray external modules, you will need to set up a packages.yaml.
Here\'s an example of an external configuration for cray modules:
.. code-block:: yaml
packages:
mpi:
modules:
mpich@7.3.1%gcc@5.2.0 arch=cray_xc-haswell-CNL10: cray-mpich
mpich@7.3.1%intel@16.0.0.109 arch=cray_xc-haswell-CNL10: cray-mpich
This tells Spack that for whatever package that depends on mpi, load the
cray-mpich module into the environment. You can then be able to use whatever
environment variables, libraries, etc, that are brought into the environment
via module load.
You can set the default compiler that Spack can use for each compiler type.
If you want to use the Cray defaults, then set them under *all:* in packages.yaml.
In the compiler field, set the compiler specs in your order of preference.
Whenever you build with that compiler type, Spack will concretize to that version.
Here is an example of a full packages.yaml used at NERSC
.. code-block:: yaml
packages:
mpi:
modules:
mpich@7.3.1%gcc@5.2.0 arch=cray_xc-CNL10-ivybridge: cray-mpich
mpich@7.3.1%intel@16.0.0.109 arch=cray_xc-SuSE11-ivybridge: cray-mpich
buildable: False
netcdf:
modules:
netcdf@4.3.3.1%gcc@5.2.0 arch=cray_xc-CNL10-ivybridge: cray-netcdf
netcdf@4.3.3.1%intel@16.0.0.109 arch=cray_xc-CNL10-ivybridge: cray-netcdf
buildable: False
hdf5:
modules:
hdf5@1.8.14%gcc@5.2.0 arch=cray_xc-CNL10-ivybridge: cray-hdf5
hdf5@1.8.14%intel@16.0.0.109 arch=cray_xc-CNL10-ivybridge: cray-hdf5
buildable: False
all:
compiler: [gcc@5.2.0, intel@16.0.0.109]
Here we tell spack that whenever we want to build with gcc use version 5.2.0 or
if we want to build with intel compilers, use version 16.0.0.109. We add a spec
for each compiler type for each cray modules. This ensures that for each
compiler on our system we can use that external module.
For more on external packages check out the section :ref:`sec-external_packages`.
Getting Help
-----------------------

View File

@@ -51,7 +51,8 @@
os.environ['PATH'] += os.pathsep + '$SPACK_ROOT/bin'
spack_version = subprocess.Popen(
['spack', '-V'], stderr=subprocess.PIPE).communicate()[1].strip().split('.')
[spack_root + '/bin/spack', '-V'],
stderr=subprocess.PIPE).communicate()[1].strip().split('.')
# Set an environment variable so that colify will print output like it would to
# a terminal.

View File

@@ -53,6 +53,7 @@ in the first directory it finds to which it has write access. Add
more elements to the list to indicate where your own site's temporary
directory is.
.. _sec-external_packages:
External Packages
----------------------------
@@ -70,20 +71,20 @@ directory. Here's an example of an external configuration:
packages:
openmpi:
paths:
openmpi@1.4.3%gcc@4.4.7 arch=chaos_5_x86_64_ib: /opt/openmpi-1.4.3
openmpi@1.4.3%gcc@4.4.7 arch=chaos_5_x86_64_ib+debug: /opt/openmpi-1.4.3-debug
openmpi@1.6.5%intel@10.1 arch=chaos_5_x86_64_ib: /opt/openmpi-1.6.5-intel
openmpi@1.4.3%gcc@4.4.7 arch=linux-x86_64-debian7: /opt/openmpi-1.4.3
openmpi@1.4.3%gcc@4.4.7 arch=linux-x86_64-debian7+debug: /opt/openmpi-1.4.3-debug
openmpi@1.6.5%intel@10.1 arch=linux-x86_64-debian7: /opt/openmpi-1.6.5-intel
This example lists three installations of OpenMPI, one built with gcc,
one built with gcc and debug information, and another built with Intel.
If Spack is asked to build a package that uses one of these MPIs as a
dependency, it will use the the pre-installed OpenMPI in
the given directory.
the given directory. Packages.yaml can also be used to specify modules
Each ``packages.yaml`` begins with a ``packages:`` token, followed
by a list of package names. To specify externals, add a ``paths``
by a list of package names. To specify externals, add a ``paths`` or ``modules``
token under the package name, which lists externals in a
``spec : /path`` format. Each spec should be as
``spec: /path`` or ``spec: module-name`` format. Each spec should be as
well-defined as reasonably possible. If a
package lacks a spec component, such as missing a compiler or
package version, then Spack will guess the missing component based
@@ -108,9 +109,9 @@ be:
packages:
openmpi:
paths:
openmpi@1.4.3%gcc@4.4.7 arch=chaos_5_x86_64_ib: /opt/openmpi-1.4.3
openmpi@1.4.3%gcc@4.4.7 arch=chaos_5_x86_64_ib+debug: /opt/openmpi-1.4.3-debug
openmpi@1.6.5%intel@10.1 arch=chaos_5_x86_64_ib: /opt/openmpi-1.6.5-intel
openmpi@1.4.3%gcc@4.4.7 arch=linux-x86_64-debian7: /opt/openmpi-1.4.3
openmpi@1.4.3%gcc@4.4.7 arch=linux-x86_64-debian7+debug: /opt/openmpi-1.4.3-debug
openmpi@1.6.5%intel@10.1 arch=linux-x86_64-debian7: /opt/openmpi-1.6.5-intel
buildable: False
The addition of the ``buildable`` flag tells Spack that it should never build
@@ -118,6 +119,9 @@ its own version of OpenMPI, and it will instead always rely on a pre-built
OpenMPI. Similar to ``paths``, ``buildable`` is specified as a property under
a package name.
If an external module is specified as not buildable, then Spack will load the
external module into the build environment which can be used for linking.
The ``buildable`` does not need to be paired with external packages.
It could also be used alone to forbid packages that may be
buggy or otherwise undesirable.
@@ -138,7 +142,7 @@ Here's an example packages.yaml file that sets preferred packages:
.. code-block:: sh
packages:
dyninst:
opencv:
compiler: [gcc@4.9]
variants: +debug
gperftools:
@@ -150,10 +154,10 @@ Here's an example packages.yaml file that sets preferred packages:
At a high level, this example is specifying how packages should be
concretized. The dyninst package should prefer using gcc 4.9 and
concretized. The opencv package should prefer using gcc 4.9 and
be built with debug options. The gperftools package should prefer version
2.2 over 2.4. Every package on the system should prefer mvapich for
its MPI and gcc 4.4.7 (except for Dyninst, which overrides this by preferring gcc 4.9).
its MPI and gcc 4.4.7 (except for opencv, which overrides this by preferring gcc 4.9).
These options are used to fill in implicit defaults. Any of them can be overwritten
on the command line if explicitly requested.
@@ -182,7 +186,6 @@ concretization rules. A provider lists a value that packages may
dependency.
Profiling
------------------

View File

@@ -80,10 +80,11 @@ with a high level view of Spack's directory structure::
var/
spack/ <- build & stage directories
repos/ <- contains package repositories
builtin/ <- pkg repository that comes with Spack
repo.yaml <- descriptor for the builtin repository
packages/ <- directories under here contain packages
repos/ <- contains package repositories
builtin/ <- pkg repository that comes with Spack
repo.yaml <- descriptor for the builtin repository
packages/ <- directories under here contain packages
cache/ <- saves resources downloaded during installs
opt/
spack/ <- packages are installed here

View File

@@ -214,3 +214,21 @@ Adding a mirror really adds a line in ``~/.spack/mirrors.yaml``::
If you want to change the order in which mirrors are searched for
packages, you can edit this file and reorder the sections. Spack will
search the topmost mirror first and the bottom-most mirror last.
.. _caching:
Local Default Cache
----------------------------
Spack caches resources that are downloaded as part of installs. The cache is
a valid spack mirror: it uses the same directory structure and naming scheme
as other Spack mirrors (so it can be copied anywhere and referenced with a URL
like other mirrors). The mirror is maintained locally (within the Spack
installation directory) at :file:`var/spack/cache/`. It is always enabled (and
is always searched first when attempting to retrieve files for an installation)
but can be cleared with :ref:`purge <spack-purge>`; the cache directory can also
be deleted manually without issue.
Caching includes retrieved tarball archives and source control repositories, but
only resources with an associated digest or commit ID (e.g. a revision number
for SVN) will be cached.

View File

@@ -36,10 +36,11 @@ Creating & editing packages
``spack create``
~~~~~~~~~~~~~~~~~~~~~
The ``spack create`` command generates a boilerplate package template
from a URL. The URL should point to a tarball or other software
archive. In most cases, ``spack create`` plus a few modifications is
all you need to get a package working.
The ``spack create`` command creates a directory with the package name and
generates a ``package.py`` file with a boilerplate package template from a URL.
The URL should point to a tarball or other software archive. In most cases,
``spack create`` plus a few modifications is all you need to get a package
working.
Here's an example:
@@ -47,12 +48,16 @@ Here's an example:
$ spack create http://www.cmake.org/files/v2.8/cmake-2.8.12.1.tar.gz
Spack examines the tarball URL and tries to figure out the name of the
package to be created. It also tries to determine what version strings
look like for this package. Using this information, it will try to
find *additional* versions by spidering the package's webpage. If it
finds multiple versions, Spack prompts you to tell it how many
versions you want to download and checksum:
Spack examines the tarball URL and tries to figure out the name of the package
to be created. Once the name is determined a directory in the appropriate
repository is created with that name. Spack prefers, but does not require, that
names be lower case so the directory name will be lower case when ``spack
create`` generates it. In cases where it is desired to have mixed case or upper
case simply rename the directory. Spack also tries to determine what version
strings look like for this package. Using this information, it will try to find
*additional* versions by spidering the package's webpage. If it finds multiple
versions, Spack prompts you to tell it how many versions you want to download
and checksum:
.. code-block:: sh
@@ -297,9 +302,10 @@ directories or files (like patches) that it needs to build.
Package Names
~~~~~~~~~~~~~~~~~~
Packages are named after the directory containing ``package.py``. So,
``libelf``'s ``package.py`` lives in a directory called ``libelf``.
The ``package.py`` file defines a class called ``Libelf``, which
Packages are named after the directory containing ``package.py``. It is
preferred, but not required, that the directory, and thus the package name, are
lower case. So, ``libelf``'s ``package.py`` lives in a directory called
``libelf``. The ``package.py`` file defines a class called ``Libelf``, which
extends Spack's ``Package`` class. for example, here is
``$SPACK_ROOT/var/spack/repos/builtin/packages/libelf/package.py``:
@@ -377,6 +383,8 @@ add a line like this in the package class:
version('8.2.1', '4136d7b4c04df68b686570afa26988ac')
...
Versions should be listed with the newest version first.
Version URLs
~~~~~~~~~~~~~~~~~
@@ -385,8 +393,21 @@ in the package. For example, Spack is smart enough to download
version ``8.2.1.`` of the ``Foo`` package above from
``http://example.com/foo-8.2.1.tar.gz``.
If spack *cannot* extrapolate the URL from the ``url`` field, or if
the package doesn't have a ``url`` field, you can add a URL explicitly
If spack *cannot* extrapolate the URL from the ``url`` field by
default, you can write your own URL generation algorithm in place of
the ``url`` declaration. For example:
.. code-block:: python
:linenos:
class Foo(Package):
def url_for_version(self, version):
return 'http://example.com/version_%s/foo-%s.tar.gz' \
% (version, version)
version('8.2.1', '4136d7b4c04df68b686570afa26988ac')
...
If a URL cannot be derived systematically, you can add an explicit URL
for a particular version:
.. code-block:: python
@@ -446,14 +467,25 @@ to use based on the hash length.
``spack md5``
^^^^^^^^^^^^^^^^^^^^^^
If you have a single file to checksum, you can use the ``spack md5``
command to do it. Here's how you might download an archive and get a
checksum for it:
If you have one or more files to checksum, you can use the ``spack md5``
command to do it:
.. code-block:: sh
$ curl -O http://exmaple.com/foo-8.2.1.tar.gz'
$ spack md5 foo-8.2.1.tar.gz
$ spack md5 foo-8.2.1.tar.gz foo-8.2.2.tar.gz
==> 2 MD5 checksums:
4136d7b4c04df68b686570afa26988ac foo-8.2.1.tar.gz
1586b70a49dfe05da5fcc29ef239dce0 foo-8.2.2.tar.gz
``spack md5`` also accepts one or more URLs and automatically downloads
the files for you:
.. code-block:: sh
$ spack md5 http://example.com/foo-8.2.1.tar.gz
==> Trying to fetch from http://example.com/foo-8.2.1.tar.gz
######################################################################## 100.0%
==> 1 MD5 checksum:
4136d7b4c04df68b686570afa26988ac foo-8.2.1.tar.gz
Doing this for lots of files, or whenever a new package version is
@@ -547,7 +579,7 @@ The package author is responsible for coming up with a sensible name
for each version to be fetched from a repository. For example, if
you're fetching from a tag like ``v1.0``, you might call that ``1.0``.
If you're fetching a nameless git commit or an older subversion
revision, you might give the commit an intuitive name, like ``dev``
revision, you might give the commit an intuitive name, like ``develop``
for a development version, or ``some-fancy-new-feature`` if you want
to be more specific.
@@ -557,6 +589,17 @@ branches move forward over time and you aren't guaranteed to get the
same thing every time you fetch a particular version. Life isn't
always simple, though, so this is not strictly enforced.
When fetching from from the branch corresponding to the development version
(often called ``master``,``trunk`` or ``dev``), it is recommended to
call this version ``develop``. Spack has special treatment for this version so
that ``@develop`` will satisfy dependencies like
``depends_on(abc, when="@x.y.z:")``. In other words, ``@develop`` is
greater than any other version. The rationale is that certain features or
options first appear in the development branch. Therefore if a package author
wants to keep the package on the bleeding edge and provide support for new
features, it is advised to use ``develop`` for such a version which will
greatly simplify writing dependencies and version-related conditionals.
In some future release, Spack may support extrapolating repository
versions as it does for tarball URLs, but currently this is not
supported.
@@ -572,6 +615,7 @@ Git fetching is enabled with the following parameters to ``version``:
* ``tag``: name of a tag to fetch.
* ``branch``: name of a branch to fetch.
* ``commit``: SHA hash (or prefix) of a commit to fetch.
* ``submodules``: Also fetch submodules when checking out this repository.
Only one of ``tag``, ``branch``, or ``commit`` can be used at a time.
@@ -582,7 +626,7 @@ Default branch
class Example(Package):
...
version('dev', git='https://github.com/example-project/example.git')
version('develop', git='https://github.com/example-project/example.git')
This is not recommended, as the contents of the default branch
change over time.
@@ -628,6 +672,17 @@ Commits
could just use the abbreviated commit hash. It's up to the package
author to decide what makes the most sense.
Submodules
You can supply ``submodules=True`` to cause Spack to fetch submodules
along with the repository at fetch time.
.. code-block:: python
version('1.0.1', git='https://github.com/example-project/example.git',
tag='v1.0.1', submdoules=True)
Installing
^^^^^^^^^^^^^^
@@ -655,7 +710,7 @@ Default
.. code-block:: python
version('hg-head', hg='https://jay.grs.rwth-aachen.de/hg/example')
version('develop', hg='https://jay.grs.rwth-aachen.de/hg/example')
Note that this is not recommended; try to fetch a particular
revision instead.
@@ -687,7 +742,7 @@ Fetching the head
.. code-block:: python
version('svn-head', svn='https://outreach.scidac.gov/svn/libmonitor/trunk')
version('develop', svn='https://outreach.scidac.gov/svn/libmonitor/trunk')
This is not recommended, as the head will move forward over time.
@@ -697,12 +752,19 @@ Fetching a revision
.. code-block:: python
version('svn-head', svn='https://outreach.scidac.gov/svn/libmonitor/trunk',
version('develop', svn='https://outreach.scidac.gov/svn/libmonitor/trunk',
revision=128)
Subversion branches are handled as part of the directory structure, so
you can check out a branch or tag by changing the ``url``.
Automatic caching of files fetched during installation
------------------------------------------------------
Spack maintains a cache (described :ref:`here <caching>`) which saves files
retrieved during package installations to avoid re-downloading in the case that
a package is installed with a different specification (but the same version) or
reinstalled on account of a change in the hashing scheme.
.. _license:
@@ -776,7 +838,7 @@ Spack will create a global license file located at
file using the editor set in ``$EDITOR``, or vi if unset. It will look like
this:
.. code-block::
.. code-block:: sh
# A license is required to use pgi.
#
@@ -807,7 +869,7 @@ You can add your license directly to this file, or tell FlexNet to use a
license stored on a separate license server. Here is an example that
points to a license server called licman1:
.. code-block::
.. code-block:: sh
SERVER licman1.mcs.anl.gov 00163eb7fba5 27200
USE_SERVER
@@ -1235,6 +1297,31 @@ command line to find installed packages or to install packages with
particular constraints, and package authors can use specs to describe
relationships between packages.
Additionally, dependencies may be specified for specific use cases:
.. code-block:: python
depends_on("cmake", type="build")
depends_on("libelf", type=("build", "link"))
depends_on("python", type="run")
The dependency types are:
* **"build"**: made available during the project's build. The package will
be added to ``PATH``, the compiler include paths, and ``PYTHONPATH``.
Other projects which depend on this one will not have these modified
(building project X doesn't need project Y's build dependencies).
* **"link"**: the project is linked to by the project. The package will be
added to the current package's ``rpath``.
* **"run"**: the project is used by the project at runtime. The package will
be added to ``PATH`` and ``PYTHONPATH``.
If not specified, ``type`` is assumed to be ``("build", "link")``. This is the
common case for compiled language usage. Also available are the aliases
``"alldeps"`` for all dependency types and ``"nolink"`` (``("build", "run")``)
for use by dependencies which are not expressed via a linker (e.g., Python or
Lua module loading).
.. _setup-dependent-environment:
``setup_dependent_environment()``
@@ -1339,6 +1426,19 @@ Now, the ``py-numpy`` package can be used as an argument to ``spack
activate``. When it is activated, all the files in its prefix will be
symbolically linked into the prefix of the python package.
Many packages produce Python extensions for *some* variants, but not
others: they should extend ``python`` only if the apropriate
variant(s) are selected. This may be accomplished with conditional
``extends()`` declarations:
.. code-block:: python
class FooLib(Package):
variant('python', default=True, description= \
'Build the Python extension Module')
extends('python', when='+python')
...
Sometimes, certain files in one package will conflict with those in
another, which means they cannot both be activated (symlinked) at the
same time. In this case, you can tell Spack to ignore those files
@@ -1625,21 +1725,21 @@ the user runs ``spack install`` and the time the ``install()`` method
is called. The concretized version of the spec above might look like
this::
mpileaks@2.3%gcc@4.7.3 arch=linux-ppc64
^callpath@1.0%gcc@4.7.3+debug arch=linux-ppc64
^dyninst@8.1.2%gcc@4.7.3 arch=linux-ppc64
^libdwarf@20130729%gcc@4.7.3 arch=linux-ppc64
^libelf@0.8.11%gcc@4.7.3 arch=linux-ppc64
^mpich@3.0.4%gcc@4.7.3 arch=linux-ppc64
mpileaks@2.3%gcc@4.7.3 arch=linux-debian7-x86_64
^callpath@1.0%gcc@4.7.3+debug arch=linux-debian7-x86_64
^dyninst@8.1.2%gcc@4.7.3 arch=linux-debian7-x86_64
^libdwarf@20130729%gcc@4.7.3 arch=linux-debian7-x86_64
^libelf@0.8.11%gcc@4.7.3 arch=linux-debian7-x86_64
^mpich@3.0.4%gcc@4.7.3 arch=linux-debian7-x86_64
.. graphviz::
digraph {
"mpileaks@2.3\n%gcc@4.7.3\n arch=linux-ppc64" -> "mpich@3.0.4\n%gcc@4.7.3\n arch=linux-ppc64"
"mpileaks@2.3\n%gcc@4.7.3\n arch=linux-ppc64" -> "callpath@1.0\n%gcc@4.7.3+debug\n arch=linux-ppc64" -> "mpich@3.0.4\n%gcc@4.7.3\n arch=linux-ppc64"
"callpath@1.0\n%gcc@4.7.3+debug\n arch=linux-ppc64" -> "dyninst@8.1.2\n%gcc@4.7.3\n arch=linux-ppc64"
"dyninst@8.1.2\n%gcc@4.7.3\n arch=linux-ppc64" -> "libdwarf@20130729\n%gcc@4.7.3\n arch=linux-ppc64" -> "libelf@0.8.11\n%gcc@4.7.3\n arch=linux-ppc64"
"dyninst@8.1.2\n%gcc@4.7.3\n arch=linux-ppc64" -> "libelf@0.8.11\n%gcc@4.7.3\n arch=linux-ppc64"
"mpileaks@2.3\n%gcc@4.7.3\n arch=linux-debian7-x86_64" -> "mpich@3.0.4\n%gcc@4.7.3\n arch=linux-debian7-x86_64"
"mpileaks@2.3\n%gcc@4.7.3\n arch=linux-debian7-x86_64" -> "callpath@1.0\n%gcc@4.7.3+debug\n arch=linux-debian7-x86_64" -> "mpich@3.0.4\n%gcc@4.7.3\n arch=linux-debian7-x86_64"
"callpath@1.0\n%gcc@4.7.3+debug\n arch=linux-debian7-x86_64" -> "dyninst@8.1.2\n%gcc@4.7.3\n arch=linux-debian7-x86_64"
"dyninst@8.1.2\n%gcc@4.7.3\n arch=linux-debian7-x86_64" -> "libdwarf@20130729\n%gcc@4.7.3\n arch=linux-debian7-x86_64" -> "libelf@0.8.11\n%gcc@4.7.3\n arch=linux-debian7-x86_64"
"dyninst@8.1.2\n%gcc@4.7.3\n arch=linux-debian7-x86_64" -> "libelf@0.8.11\n%gcc@4.7.3\n arch=linux-debian7-x86_64"
}
Here, all versions, compilers, and platforms are filled in, and there
@@ -1668,9 +1768,9 @@ running ``spack spec``. For example:
^libdwarf
^libelf
dyninst@8.0.1%gcc@4.7.3 arch=linux-ppc64
^libdwarf@20130729%gcc@4.7.3 arch=linux-ppc64
^libelf@0.8.13%gcc@4.7.3 arch=linux-ppc64
dyninst@8.0.1%gcc@4.7.3 arch=linux-debian7-x86_64
^libdwarf@20130729%gcc@4.7.3 arch=linux-debian7-x86_64
^libelf@0.8.13%gcc@4.7.3 arch=linux-debian7-x86_64
This is useful when you want to know exactly what Spack will do when
you ask for a particular spec.
@@ -1844,7 +1944,7 @@ discover its dependencies.
If you want to see the environment that a package will build with, or
if you want to run commands in that environment to test them out, you
can use the :ref:```spack env`` <spack-env>` command, documented
can use the :ref:`spack env <spack-env>` command, documented
below.
.. _compiler-wrappers:
@@ -1950,6 +2050,19 @@ instead of hard-coding ``join_path(self.spec['mpi'].prefix.bin, 'mpicc')`` for
the reasons outlined above.
Blas and Lapack libraries
~~~~~~~~~~~~~~~~~~~~~~~~~
Different packages provide implementation of ``Blas`` and ``Lapack`` routines.
The names of the resulting static and/or shared libraries differ from package
to package. In order to make ``install()`` method indifferent to the
choice of ``Blas`` implementation, each package which provides it
sets up ``self.spec.blas_shared_lib`` and ``self.spec.blas_static_lib `` to
point to the shared and static ``Blas`` libraries, respectively. The same
applies to packages which provide ``Lapack``. Package developers are advised to
use these variables, for example ``spec['blas'].blas_shared_lib`` instead of
hard-coding ``join_path(spec['blas'].prefix.lib, 'libopenblas.so')``.
Forking ``install()``
~~~~~~~~~~~~~~~~~~~~~
@@ -2162,12 +2275,12 @@ example:
def install(self, prefix):
# Do default install
@when('arch=chaos_5_x86_64_ib')
@when('arch=linux-debian7-x86_64')
def install(self, prefix):
# This will be executed instead of the default install if
# the package's sys_type() is chaos_5_x86_64_ib.
@when('arch=bgqos_0")
@when('arch=linux-debian7-x86_64")
def install(self, prefix):
# This will be executed if the package's sys_type is bgqos_0
@@ -2295,7 +2408,7 @@ build system.
.. _sanity-checks:
Sanity checking an intallation
Sanity checking an installation
--------------------------------
By default, Spack assumes that a build has failed if nothing is
@@ -2511,6 +2624,59 @@ File functions
.. _package-lifecycle:
Coding Style Guidelines
---------------------------
The following guidelines are provided, in the interests of making
Spack packages work in a consistent manner:
Variant Names
~~~~~~~~~~~~~~
Spack packages with variants similar to already-existing Spack
packages should use the same name for their variants. Standard
variant names are:
======= ======== ========================
Name Default Description
======= ======== ========================
shared True Build shared libraries
static Build static libraries
mpi Use MPI
python Build Python extension
======= ======== ========================
If specified in this table, the corresponding default should be used
when declaring a variant.
Version Lists
~~~~~~~~~~~~~~
Spack packges should list supported versions with the newest first.
Special Versions
~~~~~~~~~~~~~~~~~
The following *special* version names may be used when building a package:
* *@system*: Indicates a hook to the OS-installed version of the
package. This is useful, for example, to tell Spack to use the
OS-installed version in ``packages.yaml``::
openssl:
paths:
openssl@system: /usr
buildable: False
Certain Spack internals look for the *@system* version and do
appropriate things in that case.
* *@local*: Indicates the version was built manually from some source
tree of unknown provenance (see ``spack setup``).
Packaging workflow commands
---------------------------------
@@ -2605,11 +2771,16 @@ build process will start from scratch.
``spack purge``
~~~~~~~~~~~~~~~~~
Cleans up all of Spack's temporary files. Use this to recover disk
space if temporary files from interrupted or failed installs
accumulate in the staging area. This is equivalent to running ``spack
clean`` for every package you have fetched or staged.
Cleans up all of Spack's temporary and cached files. This can be used to
recover disk space if temporary files from interrupted or failed installs
accumulate in the staging area.
When called with ``--stage`` or ``--all`` (or without arguments, in which case
the default is ``--all``) this removes all staged files; this is equivalent to
running ``spack clean`` for every package you have fetched or staged.
When called with ``--cache`` or ``--all`` this will clear all resources
:ref:`cached <caching>` during installs.
Keeping the stage directory on success
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -2757,11 +2928,11 @@ build it:
$ spack stage libelf
==> Trying to fetch from http://www.mr511.de/software/libelf-0.8.13.tar.gz
######################################################################## 100.0%
==> Staging archive: /Users/gamblin2/src/spack/var/spack/stage/libelf@0.8.13%gcc@4.8.3 arch=linux-ppc64/libelf-0.8.13.tar.gz
==> Created stage in /Users/gamblin2/src/spack/var/spack/stage/libelf@0.8.13%gcc@4.8.3 arch=linux-ppc64.
==> Staging archive: /Users/gamblin2/src/spack/var/spack/stage/libelf@0.8.13%gcc@4.8.3 arch=linux-debian7-x86_64/libelf-0.8.13.tar.gz
==> Created stage in /Users/gamblin2/src/spack/var/spack/stage/libelf@0.8.13%gcc@4.8.3 arch=linux-debian7-x86_64.
$ spack cd libelf
$ pwd
/Users/gamblin2/src/spack/var/spack/stage/libelf@0.8.13%gcc@4.8.3 arch=linux-ppc64/libelf-0.8.13
/Users/gamblin2/src/spack/var/spack/stage/libelf@0.8.13%gcc@4.8.3 arch=linux-debian7-x86_64/libelf-0.8.13
``spack cd`` here changed he current working directory to the
directory containing the expanded ``libelf`` source code. There are a
@@ -2826,3 +2997,109 @@ might write:
DWARF_PREFIX = $(spack location -i libdwarf)
CXXFLAGS += -I$DWARF_PREFIX/include
CXXFLAGS += -L$DWARF_PREFIX/lib
Build System Configuration Support
----------------------------------
Imagine a developer creating a CMake-based (or Autotools) project in a local
directory, which depends on libraries A-Z. Once Spack has installed
those dependencies, one would like to run ``cmake`` with appropriate
command line and environment so CMake can find them. The ``spack
setup`` command does this conveniently, producing a CMake
configuration that is essentially the same as how Spack *would have*
configured the project. This can be demonstrated with a usage
example:
.. code-block:: bash
cd myproject
spack setup myproject@local
mkdir build; cd build
../spconfig.py ..
make
make install
Notes:
* Spack must have ``myproject/package.py`` in its repository for
this to work.
* ``spack setup`` produces the executable script ``spconfig.py`` in
the local directory, and also creates the module file for the
package. ``spconfig.py`` is normally run from the user's
out-of-source build directory.
* The version number given to ``spack setup`` is arbitrary, just
like ``spack diy``. ``myproject/package.py`` does not need to
have any valid downloadable versions listed (typical when a
project is new).
* spconfig.py produces a CMake configuration that *does not* use the
Spack wrappers. Any resulting binaries *will not* use RPATH,
unless the user has enabled it. This is recommended for
development purposes, not production.
* ``spconfig.py`` is human readable, and can serve as a developer
reference of what dependencies are being used.
* ``make install`` installs the package into the Spack repository,
where it may be used by other Spack packages.
* CMake-generated makefiles re-run CMake in some circumstances. Use
of ``spconfig.py`` breaks this behavior, requiring the developer
to manually re-run ``spconfig.py`` when a ``CMakeLists.txt`` file
has changed.
CMakePackage
~~~~~~~~~~~~
In order ot enable ``spack setup`` functionality, the author of
``myproject/package.py`` must subclass from ``CMakePackage`` instead
of the standard ``Package`` superclass. Because CMake is
standardized, the packager does not need to tell Spack how to run
``cmake; make; make install``. Instead the packager only needs to
create (optional) methods ``configure_args()`` and ``configure_env()``, which
provide the arguments (as a list) and extra environment variables (as
a dict) to provide to the ``cmake`` command. Usually, these will
translate variant flags into CMake definitions. For example:
.. code-block:: python
def configure_args(self):
spec = self.spec
return [
'-DUSE_EVERYTRACE=%s' % ('YES' if '+everytrace' in spec else 'NO'),
'-DBUILD_PYTHON=%s' % ('YES' if '+python' in spec else 'NO'),
'-DBUILD_GRIDGEN=%s' % ('YES' if '+gridgen' in spec else 'NO'),
'-DBUILD_COUPLER=%s' % ('YES' if '+coupler' in spec else 'NO'),
'-DUSE_PISM=%s' % ('YES' if '+pism' in spec else 'NO')]
If needed, a packager may also override methods defined in
``StagedPackage`` (see below).
StagedPackage
~~~~~~~~~~~~~
``CMakePackage`` is implemented by subclassing the ``StagedPackage``
superclass, which breaks down the standard ``Package.install()``
method into several sub-stages: ``setup``, ``configure``, ``build``
and ``install``. Details:
* Instead of implementing the standard ``install()`` method, package
authors implement the methods for the sub-stages
``install_setup()``, ``install_configure()``,
``install_build()``, and ``install_install()``.
* The ``spack install`` command runs the sub-stages ``configure``,
``build`` and ``install`` in order. (The ``setup`` stage is
not run by default; see below).
* The ``spack setup`` command runs the sub-stages ``setup``
and a dummy install (to create the module file).
* The sub-stage install methods take no arguments (other than
``self``). The arguments ``spec`` and ``prefix`` to the standard
``install()`` method may be accessed via ``self.spec`` and
``self.prefix``.
GNU Autotools
~~~~~~~~~~~~~
The ``setup`` functionality is currently only available for
CMake-based packages. Extending this functionality to GNU
Autotools-based packages would be easy (and should be done by a
developer who actively uses Autotools). Packages that use
non-standard build systems can gain ``setup`` functionality by
subclassing ``StagedPackage`` directly.

52
lib/spack/env/cc vendored
View File

@@ -110,13 +110,13 @@ case "$command" in
comp="CXX"
lang_flags=CXX
;;
f90|fc|f95|gfortran|ifort|pgfortran|xlf90|nagfor)
ftn|f90|fc|f95|gfortran|ifort|pgfortran|xlf90|nagfor)
command="$SPACK_FC"
language="Fortran 90"
comp="FC"
lang_flags=F
;;
f77|gfortran|ifort|pgfortran|xlf|nagfor)
f77|gfortran|ifort|pgfortran|xlf|nagfor|ftn)
command="$SPACK_F77"
language="Fortran 77"
comp="F77"
@@ -174,6 +174,28 @@ if [[ -z $command ]]; then
die "ERROR: Compiler '$SPACK_COMPILER_SPEC' does not support compiling $language programs."
fi
#
# Filter '.' and Spack environment directories out of PATH so that
# this script doesn't just call itself
#
IFS=':' read -ra env_path <<< "$PATH"
IFS=':' read -ra spack_env_dirs <<< "$SPACK_ENV_PATH"
spack_env_dirs+=("" ".")
PATH=""
for dir in "${env_path[@]}"; do
addpath=true
for env_dir in "${spack_env_dirs[@]}"; do
if [[ $dir == $env_dir ]]; then
addpath=false
break
fi
done
if $addpath; then
PATH="${PATH:+$PATH:}$dir"
fi
done
export PATH
if [[ $mode == vcheck ]]; then
exec ${command} "$@"
fi
@@ -286,28 +308,6 @@ unset LD_LIBRARY_PATH
unset LD_RUN_PATH
unset DYLD_LIBRARY_PATH
#
# Filter '.' and Spack environment directories out of PATH so that
# this script doesn't just call itself
#
IFS=':' read -ra env_path <<< "$PATH"
IFS=':' read -ra spack_env_dirs <<< "$SPACK_ENV_PATH"
spack_env_dirs+=("" ".")
PATH=""
for dir in "${env_path[@]}"; do
addpath=true
for env_dir in "${spack_env_dirs[@]}"; do
if [[ $dir == $env_dir ]]; then
addpath=false
break
fi
done
if $addpath; then
PATH="${PATH:+$PATH:}$dir"
fi
done
export PATH
full_command=("$command" "${args[@]}")
# In test command mode, write out full command for Spack tests.
@@ -324,8 +324,8 @@ fi
if [[ $SPACK_DEBUG == TRUE ]]; then
input_log="$SPACK_DEBUG_LOG_DIR/spack-cc-$SPACK_SHORT_SPEC.in.log"
output_log="$SPACK_DEBUG_LOG_DIR/spack-cc-$SPACK_SHORT_SPEC.out.log"
echo "[$mode] $command $input_command" >> $input_log
echo "[$mode] ${full_command[@]}" >> $output_log
echo "[$mode] $command $input_command" >> "$input_log"
echo "[$mode] ${full_command[@]}" >> "$output_log"
fi
exec "${full_command[@]}"

1
lib/spack/env/cray/CC vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/cray/cc vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/cray/ftn vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/craype/CC vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/craype/cc vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

1
lib/spack/env/craype/ftn vendored Symbolic link
View File

@@ -0,0 +1 @@
../cc

View File

@@ -22,33 +22,34 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
__all__ = ['set_install_permissions', 'install', 'install_tree', 'traverse_tree',
'expand_user', 'working_dir', 'touch', 'touchp', 'mkdirp',
'force_remove', 'join_path', 'ancestor', 'can_access', 'filter_file',
'FileFilter', 'change_sed_delimiter', 'is_exe', 'force_symlink',
'set_executable', 'copy_mode', 'unset_executable_mode',
'remove_dead_links', 'remove_linked_tree', 'find_library_path',
'fix_darwin_install_name']
import os
import glob
import sys
import re
import shutil
import stat
import errno
import getpass
from contextlib import contextmanager, closing
from tempfile import NamedTemporaryFile
from contextlib import contextmanager
import subprocess
import fileinput
import llnl.util.tty as tty
from spack.util.compression import ALLOWED_ARCHIVE_TYPES
__all__ = ['set_install_permissions', 'install', 'install_tree',
'traverse_tree',
'expand_user', 'working_dir', 'touch', 'touchp', 'mkdirp',
'force_remove', 'join_path', 'ancestor', 'can_access',
'filter_file',
'FileFilter', 'change_sed_delimiter', 'is_exe', 'force_symlink',
'set_executable', 'copy_mode', 'unset_executable_mode',
'remove_dead_links', 'remove_linked_tree', 'find_library_path',
'fix_darwin_install_name', 'to_link_flags', 'to_lib_name']
def filter_file(regex, repl, *filenames, **kwargs):
"""Like sed, but uses python regular expressions.
Filters every line of file through regex and replaces the file
Filters every line of each file through regex and replaces the file
with a filtered version. Preserves mode of filtered files.
As with re.sub, ``repl`` can be either a string or a callable.
@@ -59,7 +60,7 @@ def filter_file(regex, repl, *filenames, **kwargs):
Keyword Options:
string[=False] If True, treat regex as a plain string.
backup[=True] Make a backup files suffixed with ~
backup[=True] Make backup file(s) suffixed with ~
ignore_absent[=False] Ignore any files that don't exist.
"""
string = kwargs.get('string', False)
@@ -69,6 +70,7 @@ def filter_file(regex, repl, *filenames, **kwargs):
# Allow strings to use \1, \2, etc. for replacement, like sed
if not callable(repl):
unescaped = repl.replace(r'\\', '\\')
def replace_groups_with_groupid(m):
def groupid_to_group(x):
return m.group(int(x.group(1)))
@@ -79,30 +81,32 @@ def groupid_to_group(x):
regex = re.escape(regex)
for filename in filenames:
backup = filename + "~"
backup_filename = filename + "~"
if ignore_absent and not os.path.exists(filename):
continue
shutil.copy(filename, backup)
# Create backup file. Don't overwrite an existing backup
# file in case this file is being filtered multiple times.
if not os.path.exists(backup_filename):
shutil.copy(filename, backup_filename)
try:
with closing(open(backup)) as infile:
with closing(open(filename, 'w')) as outfile:
for line in infile:
foo = re.sub(regex, repl, line)
outfile.write(foo)
for line in fileinput.input(filename, inplace=True):
print(re.sub(regex, repl, line.rstrip('\n')))
except:
# clean up the original file on failure.
shutil.move(backup, filename)
shutil.move(backup_filename, filename)
raise
finally:
if not backup:
shutil.rmtree(backup, ignore_errors=True)
os.remove(backup_filename)
class FileFilter(object):
"""Convenience class for calling filter_file a lot."""
def __init__(self, *filenames):
self.filenames = filenames
@@ -113,7 +117,7 @@ def filter(self, regex, repl, **kwargs):
def change_sed_delimiter(old_delim, new_delim, *filenames):
"""Find all sed search/replace commands and change the delimiter.
e.g., if the file contains seds that look like 's///', you can
call change_sed_delimeter('/', '@', file) to change the
call change_sed_delimiter('/', '@', file) to change the
delimiter to '@'.
NOTE that this routine will fail if the delimiter is ' or ".
@@ -157,9 +161,12 @@ def set_install_permissions(path):
def copy_mode(src, dest):
src_mode = os.stat(src).st_mode
dest_mode = os.stat(dest).st_mode
if src_mode & stat.S_IXUSR: dest_mode |= stat.S_IXUSR
if src_mode & stat.S_IXGRP: dest_mode |= stat.S_IXGRP
if src_mode & stat.S_IXOTH: dest_mode |= stat.S_IXOTH
if src_mode & stat.S_IXUSR:
dest_mode |= stat.S_IXUSR
if src_mode & stat.S_IXGRP:
dest_mode |= stat.S_IXGRP
if src_mode & stat.S_IXOTH:
dest_mode |= stat.S_IXOTH
os.chmod(dest, dest_mode)
@@ -175,7 +182,7 @@ def install(src, dest):
"""Manually install a file to a particular location."""
tty.debug("Installing %s to %s" % (src, dest))
# Expand dsst to its eventual full path if it is a directory.
# Expand dest to its eventual full path if it is a directory.
if os.path.isdir(dest):
dest = join_path(dest, os.path.basename(src))
@@ -185,7 +192,7 @@ def install(src, dest):
def install_tree(src, dest, **kwargs):
"""Manually install a file to a particular location."""
"""Manually install a directory tree to a particular location."""
tty.debug("Installing %s to %s" % (src, dest))
shutil.copytree(src, dest, **kwargs)
@@ -215,7 +222,7 @@ def mkdirp(*paths):
if not os.path.exists(path):
os.makedirs(path)
elif not os.path.isdir(path):
raise OSError(errno.EEXIST, "File alredy exists", path)
raise OSError(errno.EEXIST, "File already exists", path)
def force_remove(*paths):
@@ -224,9 +231,10 @@ def force_remove(*paths):
for path in paths:
try:
os.remove(path)
except OSError, e:
except OSError:
pass
@contextmanager
def working_dir(dirname, **kwargs):
if kwargs.get('create', False):
@@ -240,7 +248,7 @@ def working_dir(dirname, **kwargs):
def touch(path):
"""Creates an empty file at the specified path."""
with open(path, 'a') as file:
with open(path, 'a'):
os.utime(path, None)
@@ -253,7 +261,7 @@ def touchp(path):
def force_symlink(src, dest):
try:
os.symlink(src, dest)
except OSError as e:
except OSError:
os.remove(dest)
os.symlink(src, dest)
@@ -275,7 +283,7 @@ def ancestor(dir, n=1):
def can_access(file_name):
"""True if we have read/write access to the file."""
return os.access(file_name, os.R_OK|os.W_OK)
return os.access(file_name, os.R_OK | os.W_OK)
def traverse_tree(source_root, dest_root, rel_path='', **kwargs):
@@ -304,7 +312,7 @@ def traverse_tree(source_root, dest_root, rel_path='', **kwargs):
Optional args:
order=[pre|post] -- Whether to do pre- or post-order traveral.
order=[pre|post] -- Whether to do pre- or post-order traversal.
ignore=<predicate> -- Predicate indicating which files to ignore.
@@ -343,13 +351,15 @@ def traverse_tree(source_root, dest_root, rel_path='', **kwargs):
# Treat as a directory
if os.path.isdir(source_child) and (
follow_links or not os.path.islink(source_child)):
follow_links or not os.path.islink(source_child)):
# When follow_nonexisting isn't set, don't descend into dirs
# in source that do not exist in dest
if follow_nonexisting or os.path.exists(dest_child):
tuples = traverse_tree(source_root, dest_root, rel_child, **kwargs)
for t in tuples: yield t
tuples = traverse_tree(
source_root, dest_root, rel_child, **kwargs)
for t in tuples:
yield t
# Treat as a file.
elif not ignore(os.path.join(rel_path, f)):
@@ -379,6 +389,7 @@ def remove_dead_links(root):
if not os.path.exists(real_path):
os.unlink(path)
def remove_linked_tree(path):
"""
Removes a directory and its contents. If the directory is a
@@ -402,28 +413,53 @@ def fix_darwin_install_name(path):
Fix install name of dynamic libraries on Darwin to have full path.
There are two parts of this task:
(i) use install_name('-id',...) to change install name of a single lib;
(ii) use install_name('-change',...) to change the cross linking between libs.
The function assumes that all libraries are in one folder and currently won't
follow subfolders.
(ii) use install_name('-change',...) to change the cross linking between
libs. The function assumes that all libraries are in one folder and
currently won't follow subfolders.
Args:
path: directory in which .dylib files are alocated
path: directory in which .dylib files are located
"""
libs = glob.glob(join_path(path,"*.dylib"))
libs = glob.glob(join_path(path, "*.dylib"))
for lib in libs:
# fix install name first:
subprocess.Popen(["install_name_tool", "-id",lib,lib], stdout=subprocess.PIPE).communicate()[0]
long_deps = subprocess.Popen(["otool", "-L",lib], stdout=subprocess.PIPE).communicate()[0].split('\n')
subprocess.Popen(
["install_name_tool", "-id", lib, lib],
stdout=subprocess.PIPE).communicate()[0]
long_deps = subprocess.Popen(
["otool", "-L", lib],
stdout=subprocess.PIPE).communicate()[0].split('\n')
deps = [dep.partition(' ')[0][1::] for dep in long_deps[2:-1]]
# fix all dependencies:
for dep in deps:
for loc in libs:
if dep == os.path.basename(loc):
subprocess.Popen(["install_name_tool", "-change",dep,loc,lib], stdout=subprocess.PIPE).communicate()[0]
subprocess.Popen(
["install_name_tool", "-change", dep, loc, lib],
stdout=subprocess.PIPE).communicate()[0]
break
def to_lib_name(library):
"""Transforms a path to the library /path/to/lib<name>.xyz into <name>
"""
# Assume libXYZ.suffix
return os.path.basename(library)[3:].split(".")[0]
def to_link_flags(library):
"""Transforms a path to a <library> into linking flags -L<dir> -l<name>.
Return:
A string of linking flags.
"""
dir = os.path.dirname(library)
name = to_lib_name(library)
res = '-L%s -l%s' % (dir, name)
return res
def find_library_path(libname, *paths):
"""Searches for a file called <libname> in each path.

View File

@@ -24,7 +24,6 @@
##############################################################################
import os
import re
import sys
import functools
import collections
import inspect
@@ -39,14 +38,15 @@ def index_by(objects, *funcs):
Values are used as keys. For example, suppose you have four
objects with attributes that look like this:
a = Spec(name="boost", compiler="gcc", arch="bgqos_0")
b = Spec(name="mrnet", compiler="intel", arch="chaos_5_x86_64_ib")
c = Spec(name="libelf", compiler="xlc", arch="bgqos_0")
d = Spec(name="libdwarf", compiler="intel", arch="chaos_5_x86_64_ib")
a = Spec(name="boost", compiler="gcc", arch="bgqos_0")
b = Spec(name="mrnet", compiler="intel", arch="chaos_5_x86_64_ib")
c = Spec(name="libelf", compiler="xlc", arch="bgqos_0")
d = Spec(name="libdwarf", compiler="intel", arch="chaos_5_x86_64_ib")
list_of_specs = [a,b,c,d]
index1 = index_by(list_of_specs, lambda s: s.arch, lambda s: s.compiler)
index2 = index_by(list_of_specs, lambda s: s.compiler)
list_of_specs = [a,b,c,d]
index1 = index_by(list_of_specs, lambda s: s.arch,
lambda s: s.compiler)
index2 = index_by(list_of_specs, lambda s: s.compiler)
``index1'' now has two levels of dicts, with lists at the
leaves, like this:
@@ -137,7 +137,7 @@ def get_calling_module_name():
finally:
del stack
if not '__module__' in caller_locals:
if '__module__' not in caller_locals:
raise RuntimeError("Must invoke get_calling_module_name() "
"from inside a class definition!")
@@ -173,11 +173,11 @@ def has_method(cls, name):
class memoized(object):
"""Decorator that caches the results of a function, storing them
in an attribute of that function."""
def __init__(self, func):
self.func = func
self.cache = {}
def __call__(self, *args):
if not isinstance(args, collections.Hashable):
# Not hashable, so just call the function.
@@ -187,12 +187,10 @@ def __call__(self, *args):
self.cache[args] = self.func(*args)
return self.cache[args]
def __get__(self, obj, objtype):
"""Support instance methods."""
return functools.partial(self.__call__, obj)
def clear(self):
"""Expunge cache so that self.func will be called again."""
self.cache.clear()
@@ -237,13 +235,21 @@ def setter(name, value):
if not has_method(cls, '_cmp_key'):
raise TypeError("'%s' doesn't define _cmp_key()." % cls.__name__)
setter('__eq__', lambda s,o: (s is o) or (o is not None and s._cmp_key() == o._cmp_key()))
setter('__lt__', lambda s,o: o is not None and s._cmp_key() < o._cmp_key())
setter('__le__', lambda s,o: o is not None and s._cmp_key() <= o._cmp_key())
setter('__eq__',
lambda s, o:
(s is o) or (o is not None and s._cmp_key() == o._cmp_key()))
setter('__lt__',
lambda s, o: o is not None and s._cmp_key() < o._cmp_key())
setter('__le__',
lambda s, o: o is not None and s._cmp_key() <= o._cmp_key())
setter('__ne__', lambda s,o: (s is not o) and (o is None or s._cmp_key() != o._cmp_key()))
setter('__gt__', lambda s,o: o is None or s._cmp_key() > o._cmp_key())
setter('__ge__', lambda s,o: o is None or s._cmp_key() >= o._cmp_key())
setter('__ne__',
lambda s, o:
(s is not o) and (o is None or s._cmp_key() != o._cmp_key()))
setter('__gt__',
lambda s, o: o is None or s._cmp_key() > o._cmp_key())
setter('__ge__',
lambda s, o: o is None or s._cmp_key() >= o._cmp_key())
setter('__hash__', lambda self: hash(self._cmp_key()))
@@ -254,10 +260,10 @@ def setter(name, value):
class HashableMap(dict):
"""This is a hashable, comparable dictionary. Hash is performed on
a tuple of the values in the dictionary."""
def _cmp_key(self):
return tuple(sorted(self.values()))
def copy(self):
"""Type-agnostic clone method. Preserves subclass type."""
# Construct a new dict of my type
@@ -336,24 +342,39 @@ def match(string):
return match
def DictWrapper(dictionary):
"""Returns a class that wraps a dictionary and enables it to be used
like an object."""
class wrapper(object):
def __getattr__(self, name): return dictionary[name]
def __setattr__(self, name, value): dictionary[name] = value
def setdefault(self, *args): return dictionary.setdefault(*args)
def get(self, *args): return dictionary.get(*args)
def keys(self): return dictionary.keys()
def values(self): return dictionary.values()
def items(self): return dictionary.items()
def __iter__(self): return iter(dictionary)
def __getattr__(self, name):
return dictionary[name]
def __setattr__(self, name, value):
dictionary[name] = value
def setdefault(self, *args):
return dictionary.setdefault(*args)
def get(self, *args):
return dictionary.get(*args)
def keys(self):
return dictionary.keys()
def values(self):
return dictionary.values()
def items(self):
return dictionary.items()
def __iter__(self):
return iter(dictionary)
return wrapper()
class RequiredAttributeError(ValueError):
def __init__(self, message):
super(RequiredAttributeError, self).__init__(message)

View File

@@ -23,12 +23,13 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
"""LinkTree class for setting up trees of symbolic links."""
__all__ = ['LinkTree']
import os
import shutil
from llnl.util.filesystem import *
__all__ = ['LinkTree']
empty_file_name = '.spack-empty'
@@ -43,13 +44,13 @@ class LinkTree(object):
modified.
"""
def __init__(self, source_root):
if not os.path.exists(source_root):
raise IOError("No such file or directory: '%s'", source_root)
self._root = source_root
def find_conflict(self, dest_root, **kwargs):
"""Returns the first file in dest that conflicts with src"""
kwargs['follow_nonexisting'] = False
@@ -61,9 +62,9 @@ def find_conflict(self, dest_root, **kwargs):
return dest
return None
def merge(self, dest_root, **kwargs):
"""Link all files in src into dest, creating directories if necessary."""
"""Link all files in src into dest, creating directories
if necessary."""
kwargs['order'] = 'pre'
for src, dest in traverse_tree(self._root, dest_root, **kwargs):
if os.path.isdir(src):
@@ -83,7 +84,6 @@ def merge(self, dest_root, **kwargs):
assert(not os.path.exists(dest))
os.symlink(src, dest)
def unmerge(self, dest_root, **kwargs):
"""Unlink all files in dest that exist in src.

View File

@@ -28,6 +28,9 @@
import time
import socket
__all__ = ['Lock', 'LockTransaction', 'WriteTransaction', 'ReadTransaction',
'LockError']
# Default timeout in seconds, after which locks will raise exceptions.
_default_timeout = 60
@@ -36,13 +39,21 @@
class Lock(object):
def __init__(self,file_path):
"""This is an implementation of a filesystem lock using Python's lockf.
In Python, `lockf` actually calls `fcntl`, so this should work with any
filesystem implementation that supports locking through the fcntl calls.
This includes distributed filesystems like Lustre (when flock is enabled)
and recent NFS versions.
"""
def __init__(self, file_path):
self._file_path = file_path
self._fd = None
self._reads = 0
self._writes = 0
def _lock(self, op, timeout):
"""This takes a lock using POSIX locks (``fnctl.lockf``).
@@ -63,7 +74,9 @@ def _lock(self, op, timeout):
fcntl.lockf(self._fd, op | fcntl.LOCK_NB)
if op == fcntl.LOCK_EX:
os.write(self._fd, "pid=%s,host=%s" % (os.getpid(), socket.getfqdn()))
os.write(
self._fd,
"pid=%s,host=%s" % (os.getpid(), socket.getfqdn()))
return
except IOError as error:
@@ -75,7 +88,6 @@ def _lock(self, op, timeout):
raise LockError("Timed out waiting for lock.")
def _unlock(self):
"""Releases a lock using POSIX locks (``fcntl.lockf``)
@@ -83,11 +95,10 @@ def _unlock(self):
be masquerading as write locks, but this removes either.
"""
fcntl.lockf(self._fd,fcntl.LOCK_UN)
fcntl.lockf(self._fd, fcntl.LOCK_UN)
os.close(self._fd)
self._fd = None
def acquire_read(self, timeout=_default_timeout):
"""Acquires a recursive, shared lock for reading.
@@ -107,7 +118,6 @@ def acquire_read(self, timeout=_default_timeout):
self._reads += 1
return False
def acquire_write(self, timeout=_default_timeout):
"""Acquires a recursive, exclusive lock for writing.
@@ -127,7 +137,6 @@ def acquire_write(self, timeout=_default_timeout):
self._writes += 1
return False
def release_read(self):
"""Releases a read lock.
@@ -148,7 +157,6 @@ def release_read(self):
self._reads -= 1
return False
def release_write(self):
"""Releases a write lock.
@@ -170,6 +178,70 @@ def release_write(self):
return False
class LockTransaction(object):
"""Simple nested transaction context manager that uses a file lock.
This class can trigger actions when the lock is acquired for the
first time and released for the last.
If the acquire_fn returns a value, it is used as the return value for
__enter__, allowing it to be passed as the `as` argument of a `with`
statement.
If acquire_fn returns a context manager, *its* `__enter__` function will be
called in `__enter__` after acquire_fn, and its `__exit__` funciton will be
called before `release_fn` in `__exit__`, allowing you to nest a context
manager to be used along with the lock.
Timeout for lock is customizable.
"""
def __init__(self, lock, acquire_fn=None, release_fn=None,
timeout=_default_timeout):
self._lock = lock
self._timeout = timeout
self._acquire_fn = acquire_fn
self._release_fn = release_fn
self._as = None
def __enter__(self):
if self._enter() and self._acquire_fn:
self._as = self._acquire_fn()
if hasattr(self._as, '__enter__'):
return self._as.__enter__()
else:
return self._as
def __exit__(self, type, value, traceback):
suppress = False
if self._exit():
if self._as and hasattr(self._as, '__exit__'):
if self._as.__exit__(type, value, traceback):
suppress = True
if self._release_fn:
if self._release_fn(type, value, traceback):
suppress = True
return suppress
class ReadTransaction(LockTransaction):
def _enter(self):
return self._lock.acquire_read(self._timeout)
def _exit(self):
return self._lock.release_read()
class WriteTransaction(LockTransaction):
def _enter(self):
return self._lock.acquire_write(self._timeout)
def _exit(self):
return self._lock.release_write()
class LockError(Exception):
"""Raised when an attempt to acquire a lock times out."""
pass

View File

@@ -36,6 +36,7 @@
_verbose = False
indent = " "
def is_verbose():
return _verbose
@@ -64,12 +65,14 @@ def info(message, *args, **kwargs):
format = kwargs.get('format', '*b')
stream = kwargs.get('stream', sys.stdout)
wrap = kwargs.get('wrap', False)
break_long_words = kwargs.get('break_long_words', False)
cprint("@%s{==>} %s" % (format, cescape(str(message))), stream=stream)
for arg in args:
if wrap:
lines = textwrap.wrap(
str(arg), initial_indent=indent, subsequent_indent=indent)
str(arg), initial_indent=indent, subsequent_indent=indent,
break_long_words=break_long_words)
for line in lines:
stream.write(line + '\n')
else:
@@ -146,7 +149,8 @@ def get_yes_or_no(prompt, **kwargs):
elif default_value is False:
prompt += ' [y/N] '
else:
raise ValueError("default for get_yes_no() must be True, False, or None.")
raise ValueError(
"default for get_yes_no() must be True, False, or None.")
result = None
while result is None:
@@ -172,8 +176,9 @@ def hline(label=None, **kwargs):
char = kwargs.pop('char', '-')
max_width = kwargs.pop('max_width', 64)
if kwargs:
raise TypeError("'%s' is an invalid keyword argument for this function."
% next(kwargs.iterkeys()))
raise TypeError(
"'%s' is an invalid keyword argument for this function."
% next(kwargs.iterkeys()))
rows, cols = terminal_size()
if not cols:
@@ -198,7 +203,8 @@ def terminal_size():
"""Gets the dimensions of the console: (rows, cols)."""
def ioctl_GWINSZ(fd):
try:
rc = struct.unpack('hh', fcntl.ioctl(fd, termios.TIOCGWINSZ, '1234'))
rc = struct.unpack('hh', fcntl.ioctl(
fd, termios.TIOCGWINSZ, '1234'))
except:
return
return rc

View File

@@ -27,15 +27,14 @@
"""
import os
import sys
import fcntl
import termios
import struct
from StringIO import StringIO
from llnl.util.tty import terminal_size
from llnl.util.tty.color import clen, cextra
class ColumnConfig:
def __init__(self, cols):
self.cols = cols
self.line_length = 0
@@ -43,7 +42,8 @@ def __init__(self, cols):
self.widths = [0] * cols # does not include ansi colors
def __repr__(self):
attrs = [(a,getattr(self, a)) for a in dir(self) if not a.startswith("__")]
attrs = [(a, getattr(self, a))
for a in dir(self) if not a.startswith("__")]
return "<Config: %s>" % ", ".join("%s: %r" % a for a in attrs)
@@ -68,7 +68,7 @@ def config_variable_cols(elts, console_width, padding, cols=0):
max_cols = min(len(elts), max_cols)
# Range of column counts to try. If forced, use the supplied value.
col_range = [cols] if cols else xrange(1, max_cols+1)
col_range = [cols] if cols else xrange(1, max_cols + 1)
# Determine the most columns possible for the console width.
configs = [ColumnConfig(c) for c in col_range]
@@ -106,7 +106,6 @@ def config_uniform_cols(elts, console_width, padding, cols=0):
# 'clen' ignores length of ansi color sequences.
max_len = max(clen(e) for e in elts) + padding
max_clen = max(len(e) for e in elts) + padding
if cols == 0:
cols = max(1, console_width / max_len)
cols = min(len(elts), cols)
@@ -130,17 +129,19 @@ def colify(elts, **options):
output=<stream> A file object to write to. Default is sys.stdout.
indent=<int> Optionally indent all columns by some number of spaces.
padding=<int> Spaces between columns. Default is 2.
width=<int> Width of the output. Default is 80 if tty is not detected.
width=<int> Width of the output. Default is 80 if tty not detected.
cols=<int> Force number of columns. Default is to size to terminal,
or single-column if no tty
tty=<bool> Whether to attempt to write to a tty. Default is to
autodetect a tty. Set to False to force single-column output.
autodetect a tty. Set to False to force
single-column output.
method=<string> Method to use to fit columns. Options are variable or uniform.
Variable-width columns are tighter, uniform columns are all the
same width and fit less data on the screen.
method=<string> Method to use to fit columns. Options are variable or
uniform. Variable-width columns are tighter, uniform
columns are all the same width and fit less data on
the screen.
"""
# Get keyword arguments or set defaults
cols = options.pop("cols", 0)
@@ -152,8 +153,9 @@ def colify(elts, **options):
console_cols = options.pop("width", None)
if options:
raise TypeError("'%s' is an invalid keyword argument for this function."
% next(options.iterkeys()))
raise TypeError(
"'%s' is an invalid keyword argument for this function."
% next(options.iterkeys()))
# elts needs to be an array of strings so we can count the elements
elts = [str(elt) for elt in elts]
@@ -167,7 +169,8 @@ def colify(elts, **options):
r, c = env_size.split('x')
console_rows, console_cols = int(r), int(c)
tty = True
except: pass
except:
pass
# Use only one column if not a tty.
if not tty:
@@ -198,8 +201,13 @@ def colify(elts, **options):
for col in xrange(cols):
elt = col * rows + row
width = config.widths[col] + cextra(elts[elt])
fmt = '%%-%ds' % width
output.write(fmt % elts[elt])
if col < cols - 1:
fmt = '%%-%ds' % width
output.write(fmt % elts[elt])
else:
# Don't pad the rightmost column (sapces can wrap on
# small teriminals if one line is overlong)
output.write(elts[elt])
output.write("\n")
row += 1
@@ -223,6 +231,7 @@ def colify_table(table, **options):
raise ValueError("Table is empty in colify_table!")
columns = len(table[0])
def transpose():
for i in xrange(columns):
for row in table:

View File

@@ -75,25 +75,27 @@
import re
import sys
class ColorParseError(Exception):
"""Raised when a color format fails to parse."""
def __init__(self, message):
super(ColorParseError, self).__init__(message)
# Text styles for ansi codes
styles = {'*' : '1', # bold
'_' : '4', # underline
None : '0' } # plain
styles = {'*': '1', # bold
'_': '4', # underline
None: '0'} # plain
# Dim and bright ansi colors
colors = {'k' : 30, 'K' : 90, # black
'r' : 31, 'R' : 91, # red
'g' : 32, 'G' : 92, # green
'y' : 33, 'Y' : 93, # yellow
'b' : 34, 'B' : 94, # blue
'm' : 35, 'M' : 95, # magenta
'c' : 36, 'C' : 96, # cyan
'w' : 37, 'W' : 97 } # white
colors = {'k': 30, 'K': 90, # black
'r': 31, 'R': 91, # red
'g': 32, 'G': 92, # green
'y': 33, 'Y': 93, # yellow
'b': 34, 'B': 94, # blue
'm': 35, 'M': 95, # magenta
'c': 36, 'C': 96, # cyan
'w': 37, 'W': 97} # white
# Regex to be used for color formatting
color_re = r'@(?:@|\.|([*_])?([a-zA-Z])?(?:{((?:[^}]|}})*)})?)'
@@ -104,6 +106,7 @@ def __init__(self, message):
class match_to_ansi(object):
def __init__(self, color=True):
self.color = color
@@ -179,12 +182,14 @@ def cprint(string, stream=sys.stdout, color=None):
"""Same as cwrite, but writes a trailing newline to the stream."""
cwrite(string + "\n", stream, color)
def cescape(string):
"""Replace all @ with @@ in the string provided."""
return str(string).replace('@', '@@')
class ColorStream(object):
def __init__(self, stream, color=None):
self._stream = stream
self._color = color
@@ -196,7 +201,7 @@ def write(self, string, **kwargs):
color = self._color
if self._color is None:
if raw:
color=True
color = True
else:
color = self._stream.isatty() or _force_color
raw_write(colorize(string, color=color))

View File

@@ -36,6 +36,7 @@
# Use this to strip escape sequences
_escape = re.compile(r'\x1b[^m]*m|\x1b\[?1034h')
def _strip(line):
"""Strip color and control characters from a line."""
return _escape.sub('', line)
@@ -58,10 +59,10 @@ class keyboard_input(object):
When the with block completes, this will restore settings before
canonical and echo were disabled.
"""
def __init__(self, stream):
self.stream = stream
def __enter__(self):
self.old_cfg = None
@@ -86,10 +87,9 @@ def __enter__(self):
# Apply new settings for terminal
termios.tcsetattr(fd, termios.TCSADRAIN, self.new_cfg)
except Exception, e:
except Exception:
pass # Some OS's do not support termios, so ignore.
def __exit__(self, exc_type, exception, traceback):
# If termios was avaialble, restore old settings after the
# with block
@@ -114,6 +114,7 @@ class log_output(object):
Closes the provided stream when done with the block.
If echo is True, also prints the output to stdout.
"""
def __init__(self, stream, echo=False, force_color=False, debug=False):
self.stream = stream
@@ -122,7 +123,7 @@ def __init__(self, stream, echo=False, force_color=False, debug=False):
self.force_color = force_color
self.debug = debug
# Default is to try file-descriptor reassignment unless the system
# Default is to try file-descriptor reassignment unless the system
# out/err streams do not have an associated file descriptor
self.directAssignment = False
@@ -130,7 +131,6 @@ def trace(self, frame, event, arg):
"""Jumps to __exit__ on the child process."""
raise _SkipWithBlock()
def __enter__(self):
"""Redirect output from the with block to a file.
@@ -154,7 +154,8 @@ def __enter__(self):
with self.stream as log_file:
with keyboard_input(sys.stdin):
while True:
rlist, w, x = select.select([read_file, sys.stdin], [], [])
rlist, w, x = select.select(
[read_file, sys.stdin], [], [])
if not rlist:
break
@@ -211,7 +212,6 @@ def __enter__(self):
if self.debug:
tty._debug = True
def __exit__(self, exc_type, exception, traceback):
"""Exits on child, handles skipping the with block on parent."""
# Child should just exit here.
@@ -235,7 +235,7 @@ def __exit__(self, exc_type, exception, traceback):
sys.stderr = self._stderr
else:
os.dup2(self._stdout, sys.stdout.fileno())
os.dup2(self._stderr, sys.stderr.fileno())
os.dup2(self._stderr, sys.stderr.fileno())
return False

View File

@@ -1,3 +1,4 @@
# flake8: noqa
##############################################################################
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
@@ -39,13 +40,26 @@
lib_path = join_path(spack_root, "lib", "spack")
build_env_path = join_path(lib_path, "env")
module_path = join_path(lib_path, "spack")
platform_path = join_path(module_path, 'platforms')
compilers_path = join_path(module_path, "compilers")
operating_system_path = join_path(module_path, 'operating_systems')
test_path = join_path(module_path, "test")
hooks_path = join_path(module_path, "hooks")
var_path = join_path(spack_root, "var", "spack")
stage_path = join_path(var_path, "stage")
repos_path = join_path(var_path, "repos")
share_path = join_path(spack_root, "share", "spack")
cache_path = join_path(var_path, "cache")
# User configuration location
user_config_path = os.path.expanduser('~/.spack')
import spack.fetch_strategy
fetch_cache = spack.fetch_strategy.FsCache(cache_path)
from spack.file_cache import FileCache
user_cache_path = join_path(user_config_path, 'cache')
user_cache = FileCache(user_cache_path)
prefix = spack_root
opt_path = join_path(prefix, "opt")
@@ -134,7 +148,7 @@
_tmp_candidates = (_default_tmp, '/nfs/tmp2', '/tmp', '/var/tmp')
for path in _tmp_candidates:
# don't add a second username if it's already unique by user.
if not _tmp_user in path:
if _tmp_user not in path:
tmp_dirs.append(join_path(path, '%u', 'spack-stage'))
else:
tmp_dirs.append(join_path(path, 'spack-stage'))
@@ -166,13 +180,17 @@
# Spack internal code should call 'import spack' and accesses other
# variables (spack.repo, paths, etc.) directly.
#
# TODO: maybe this should be separated out and should go in build_environment.py?
# TODO: it's not clear where all the stuff that needs to be included in packages
# should live. This file is overloaded for spack core vs. for packages.
# TODO: maybe this should be separated out to build_environment.py?
# TODO: it's not clear where all the stuff that needs to be included in
# packages should live. This file is overloaded for spack core vs.
# for packages.
#
__all__ = ['Package', 'Version', 'when', 'ver']
__all__ = ['Package', 'StagedPackage', 'CMakePackage',
'Version', 'when', 'ver', 'alldeps', 'nolink']
from spack.package import Package, ExtensionConflictError
from spack.package import StagedPackage, CMakePackage
from spack.version import Version, ver
from spack.spec import DependencySpec, alldeps, nolink
from spack.multimethod import when
import llnl.util.filesystem
@@ -188,8 +206,8 @@
__all__ += spack.util.executable.__all__
from spack.package import \
install_dependency_symlinks, flatten_dependencies, DependencyConflictError, \
InstallError, ExternalPackageError
install_dependency_symlinks, flatten_dependencies, \
DependencyConflictError, InstallError, ExternalPackageError
__all__ += [
'install_dependency_symlinks', 'flatten_dependencies', 'DependencyConflictError',
'InstallError', 'ExternalPackageError']
'install_dependency_symlinks', 'flatten_dependencies',
'DependencyConflictError', 'InstallError', 'ExternalPackageError']

View File

@@ -30,14 +30,15 @@
from spack.util.executable import Executable, ProcessError
from llnl.util.lang import memoized
class ABI(object):
"""This class provides methods to test ABI compatibility between specs.
The current implementation is rather rough and could be improved."""
def architecture_compatible(self, parent, child):
"""Returns true iff the parent and child specs have ABI compatible architectures."""
return not parent.architecture or not child.architecture or parent.architecture == child.architecture
"""Return true if parent and child have ABI compatible targets."""
return not parent.architecture or not child.architecture or \
parent.architecture == child.architecture
@memoized
def _gcc_get_libstdcxx_version(self, version):
@@ -60,8 +61,9 @@ def _gcc_get_libstdcxx_version(self, version):
else:
return None
try:
output = rungcc("--print-file-name=%s" % libname, return_output=True)
except ProcessError, e:
output = rungcc("--print-file-name=%s" % libname,
return_output=True)
except ProcessError:
return None
if not output:
return None
@@ -70,7 +72,6 @@ def _gcc_get_libstdcxx_version(self, version):
return None
return os.path.basename(libpath)
@memoized
def _gcc_compiler_compare(self, pversion, cversion):
"""Returns true iff the gcc version pversion and cversion
@@ -81,7 +82,6 @@ def _gcc_compiler_compare(self, pversion, cversion):
return False
return plib == clib
def _intel_compiler_compare(self, pversion, cversion):
"""Returns true iff the intel version pversion and cversion
are ABI compatible"""
@@ -91,9 +91,8 @@ def _intel_compiler_compare(self, pversion, cversion):
return False
return pversion.version[:2] == cversion.version[:2]
def compiler_compatible(self, parent, child, **kwargs):
"""Returns true iff the compilers for parent and child specs are ABI compatible"""
"""Return true if compilers for parent and child are ABI compatible."""
if not parent.compiler or not child.compiler:
return True
@@ -108,8 +107,8 @@ def compiler_compatible(self, parent, child, **kwargs):
# TODO: into compiler classes?
for pversion in parent.compiler.versions:
for cversion in child.compiler.versions:
# For a few compilers use specialized comparisons. Otherwise
# match on version match.
# For a few compilers use specialized comparisons.
# Otherwise match on version match.
if pversion.satisfies(cversion):
return True
elif (parent.compiler.name == "gcc" and
@@ -120,9 +119,8 @@ def compiler_compatible(self, parent, child, **kwargs):
return True
return False
def compatible(self, parent, child, **kwargs):
"""Returns true iff a parent and child spec are ABI compatible"""
loosematch = kwargs.get('loose', False)
return self.architecture_compatible(parent, child) and \
self.compiler_compatible(parent, child, loose=loosematch)
self.compiler_compatible(parent, child, loose=loosematch)

View File

@@ -22,68 +22,523 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import os
import re
import platform
"""
This module contains all the elements that are required to create an
architecture object. These include, the target processor, the operating system,
and the architecture platform (i.e. cray, darwin, linux, bgq, etc) classes.
from llnl.util.lang import memoized
On a multiple architecture machine, the architecture spec field can be set to
build a package against any target and operating system that is present on the
platform. On Cray platforms or any other architecture that has different front
and back end environments, the operating system will determine the method of
compiler
detection.
There are two different types of compiler detection:
1. Through the $PATH env variable (front-end detection)
2. Through the tcl module system. (back-end detection)
Depending on which operating system is specified, the compiler will be detected
using one of those methods.
For platforms such as linux and darwin, the operating system is autodetected
and the target is set to be x86_64.
The command line syntax for specifying an architecture is as follows:
target=<Target name> os=<OperatingSystem name>
If the user wishes to use the defaults, either target or os can be left out of
the command line and Spack will concretize using the default. These defaults
are set in the 'platforms/' directory which contains the different subclasses
for platforms. If the machine has multiple architectures, the user can
also enter front-end, or fe or back-end or be. These settings will concretize
to their respective front-end and back-end targets and operating systems.
Additional platforms can be added by creating a subclass of Platform
and adding it inside the platform directory.
Platforms are an abstract class that are extended by subclasses. If the user
wants to add a new type of platform (such as cray_xe), they can create a
subclass and set all the class attributes such as priority, front_target,
back_target, front_os, back_os. Platforms also contain a priority class
attribute. A lower number signifies higher priority. These numbers are
arbitrarily set and can be changed though often there isn't much need unless a
new platform is added and the user wants that to be detected first.
Targets are created inside the platform subclasses. Most architecture
(like linux, and darwin) will have only one target (x86_64) but in the case of
Cray machines, there is both a frontend and backend processor. The user can
specify which targets are present on front-end and back-end architecture
Depending on the platform, operating systems are either auto-detected or are
set. The user can set the front-end and back-end operating setting by the class
attributes front_os and back_os. The operating system as described earlier,
will be responsible for compiler detection.
"""
import os
import inspect
from llnl.util.lang import memoized, list_modules, key_ordering
from llnl.util.filesystem import join_path
import llnl.util.tty as tty
import spack
import spack.compilers
from spack.util.naming import mod_to_class
from spack.util.environment import get_path
from spack.util.multiproc import parmap
import spack.error as serr
class InvalidSysTypeError(serr.SpackError):
def __init__(self, sys_type):
super(InvalidSysTypeError,
self).__init__("Invalid sys_type value for Spack: " + sys_type)
class NoPlatformError(serr.SpackError):
class NoSysTypeError(serr.SpackError):
def __init__(self):
super(NoSysTypeError,
self).__init__("Could not determine sys_type for this machine.")
super(NoPlatformError, self).__init__(
"Could not determine a platform for this machine.")
def get_sys_type_from_spack_globals():
"""Return the SYS_TYPE from spack globals, or None if it isn't set."""
if not hasattr(spack, "sys_type"):
return None
elif hasattr(spack.sys_type, "__call__"):
return spack.sys_type()
@key_ordering
class Target(object):
""" Target is the processor of the host machine.
The host machine may have different front-end and back-end targets,
especially if it is a Cray machine. The target will have a name and
also the module_name (e.g craype-compiler). Targets will also
recognize which platform they came from using the set_platform method.
Targets will have compiler finding strategies
"""
def __init__(self, name, module_name=None):
self.name = name # case of cray "ivybridge" but if it's x86_64
self.module_name = module_name # craype-ivybridge
# Sets only the platform name to avoid recursiveness
def _cmp_key(self):
return (self.name, self.module_name)
def __repr__(self):
return self.__str__()
def __str__(self):
return self.name
@key_ordering
class Platform(object):
""" Abstract class that each type of Platform will subclass.
Will return a instance of it once it
is returned
"""
priority = None # Subclass sets number. Controls detection order
front_end = None
back_end = None
default = None # The default back end target. On cray ivybridge
front_os = None
back_os = None
default_os = None
def __init__(self, name):
self.targets = {}
self.operating_sys = {}
self.name = name
def add_target(self, name, target):
"""Used by the platform specific subclass to list available targets.
Raises an error if the platform specifies a name
that is reserved by spack as an alias.
"""
if name in ['frontend', 'fe', 'backend', 'be', 'default_target']:
raise ValueError(
"%s is a spack reserved alias "
"and cannot be the name of a target" % name)
self.targets[name] = target
def target(self, name):
"""This is a getter method for the target dictionary
that handles defaulting based on the values provided by default,
front-end, and back-end. This can be overwritten
by a subclass for which we want to provide further aliasing options.
"""
if name == 'default_target':
name = self.default
elif name == 'frontend' or name == 'fe':
name = self.front_end
elif name == 'backend' or name == 'be':
name = self.back_end
return self.targets.get(name, None)
def add_operating_system(self, name, os_class):
""" Add the operating_system class object into the
platform.operating_sys dictionary
"""
if name in ['frontend', 'fe', 'backend', 'be', 'default_os']:
raise ValueError(
"%s is a spack reserved alias "
"and cannot be the name of an OS" % name)
self.operating_sys[name] = os_class
def operating_system(self, name):
if name == 'default_os':
name = self.default_os
if name == 'frontend' or name == "fe":
name = self.front_os
if name == 'backend' or name == 'be':
name = self.back_os
return self.operating_sys.get(name, None)
@classmethod
def setup_platform_environment(self, pkg, env):
""" Subclass can override this method if it requires any
platform-specific build environment modifications.
"""
pass
@classmethod
def detect(self):
""" Subclass is responsible for implementing this method.
Returns True if the Platform class detects that
it is the current platform
and False if it's not.
"""
raise NotImplementedError()
def __repr__(self):
return self.__str__()
def __str__(self):
return self.name
def _cmp_key(self):
t_keys = ''.join(str(t._cmp_key()) for t in
sorted(self.targets.values()))
o_keys = ''.join(str(o._cmp_key()) for o in
sorted(self.operating_sys.values()))
return (self.name,
self.default,
self.front_end,
self.back_end,
self.default_os,
self.front_os,
self.back_os,
t_keys,
o_keys)
@key_ordering
class OperatingSystem(object):
""" Operating System will be like a class similar to platform extended
by subclasses for the specifics. Operating System will contain the
compiler finding logic. Instead of calling two separate methods to
find compilers we call find_compilers method for each operating system
"""
def __init__(self, name, version):
self.name = name
self.version = version
def __str__(self):
return self.name + self.version
def __repr__(self):
return self.__str__()
def _cmp_key(self):
return (self.name, self.version)
def find_compilers(self, *paths):
"""
Return a list of compilers found in the suppied paths.
This invokes the find() method for each Compiler class,
and appends the compilers detected to a list.
"""
if not paths:
paths = get_path('PATH')
# Make sure path elements exist, and include /bin directories
# under prefixes.
filtered_path = []
for p in paths:
# Eliminate symlinks and just take the real directories.
p = os.path.realpath(p)
if not os.path.isdir(p):
continue
filtered_path.append(p)
# Check for a bin directory, add it if it exists
bin = join_path(p, 'bin')
if os.path.isdir(bin):
filtered_path.append(os.path.realpath(bin))
# Once the paths are cleaned up, do a search for each type of
# compiler. We can spawn a bunch of parallel searches to reduce
# the overhead of spelunking all these directories.
types = spack.compilers.all_compiler_types()
compiler_lists = parmap(lambda cmp_cls:
self.find_compiler(cmp_cls, *filtered_path),
types)
# ensure all the version calls we made are cached in the parent
# process, as well. This speeds up Spack a lot.
clist = reduce(lambda x, y: x + y, compiler_lists)
return clist
def find_compiler(self, cmp_cls, *path):
"""Try to find the given type of compiler in the user's
environment. For each set of compilers found, this returns
compiler objects with the cc, cxx, f77, fc paths and the
version filled in.
This will search for compilers with the names in cc_names,
cxx_names, etc. and it will group them if they have common
prefixes, suffixes, and versions. e.g., gcc-mp-4.7 would
be grouped with g++-mp-4.7 and gfortran-mp-4.7.
"""
dicts = parmap(
lambda t: cmp_cls._find_matches_in_path(*t),
[(cmp_cls.cc_names, cmp_cls.cc_version) + tuple(path),
(cmp_cls.cxx_names, cmp_cls.cxx_version) + tuple(path),
(cmp_cls.f77_names, cmp_cls.f77_version) + tuple(path),
(cmp_cls.fc_names, cmp_cls.fc_version) + tuple(path)])
all_keys = set()
for d in dicts:
all_keys.update(d)
compilers = {}
for k in all_keys:
ver, pre, suf = k
# Skip compilers with unknown version.
if ver == 'unknown':
continue
paths = tuple(pn[k] if k in pn else None for pn in dicts)
spec = spack.spec.CompilerSpec(cmp_cls.name, ver)
if ver in compilers:
prev = compilers[ver]
# prefer the one with more compilers.
prev_paths = [prev.cc, prev.cxx, prev.f77, prev.fc]
newcount = len([p for p in paths if p is not None])
prevcount = len([p for p in prev_paths if p is not None])
# Don't add if it's not an improvement over prev compiler.
if newcount <= prevcount:
continue
compilers[ver] = cmp_cls(spec, self, paths)
return list(compilers.values())
def to_dict(self):
d = {}
d['name'] = self.name
d['version'] = self.version
return d
@key_ordering
class Arch(object):
"""Architecture is now a class to help with setting attributes.
TODO: refactor so that we don't need this class.
"""
def __init__(self, plat=None, os=None, target=None):
self.platform = plat
if plat and os:
os = self.platform.operating_system(os)
self.platform_os = os
if plat and target:
target = self.platform.target(target)
self.target = target
# Hooks for parser to use when platform is set after target or os
self.target_string = None
self.os_string = None
@property
def concrete(self):
return all((self.platform is not None,
isinstance(self.platform, Platform),
self.platform_os is not None,
isinstance(self.platform_os, OperatingSystem),
self.target is not None, isinstance(self.target, Target)))
def __str__(self):
if self.platform or self.platform_os or self.target:
if self.platform.name == 'darwin':
os_name = self.platform_os.name if self.platform_os else "None"
else:
os_name = str(self.platform_os)
return (str(self.platform) + "-" +
os_name + "-" + str(self.target))
else:
return ''
def __contains__(self, string):
return string in str(self)
# TODO: make this unnecessary: don't include an empty arch on *every* spec.
def __nonzero__(self):
return (self.platform is not None or
self.platform_os is not None or
self.target is not None)
__bool__ = __nonzero__
def _cmp_key(self):
if isinstance(self.platform, Platform):
platform = self.platform.name
else:
platform = self.platform
if isinstance(self.platform_os, OperatingSystem):
platform_os = self.platform_os.name
else:
platform_os = self.platform_os
if isinstance(self.target, Target):
target = self.target.name
else:
target = self.target
return (platform, platform_os, target)
def to_dict(self):
d = {}
d['platform'] = str(self.platform) if self.platform else None
d['platform_os'] = str(self.platform_os) if self.platform_os else None
d['target'] = str(self.target) if self.target else None
return d
def _target_from_dict(target_name, plat=None):
""" Creates new instance of target and assigns all the attributes of
that target from the dictionary
"""
if not plat:
plat = platform()
return plat.target(target_name)
def _operating_system_from_dict(os_name, plat=None):
""" uses platform's operating system method to grab the constructed
operating systems that are valid on the platform.
"""
if not plat:
plat = platform()
if isinstance(os_name, dict):
name = os_name['name']
version = os_name['version']
return plat.operating_system(name + version)
else:
return spack.sys_type
return plat.operating_system(os_name)
def get_sys_type_from_environment():
"""Return $SYS_TYPE or None if it's not defined."""
return os.environ.get('SYS_TYPE')
def _platform_from_dict(platform_name):
""" Constructs a platform from a dictionary. """
platform_list = all_platforms()
for p in platform_list:
if platform_name.replace("_", "").lower() == p.__name__.lower():
return p()
def get_sys_type_from_platform():
"""Return the architecture from Python's platform module."""
sys_type = platform.system() + '-' + platform.machine()
sys_type = re.sub(r'[^\w-]', '_', sys_type)
return sys_type.lower()
def arch_from_dict(d):
""" Uses _platform_from_dict, _operating_system_from_dict, _target_from_dict
helper methods to recreate the arch tuple from the dictionary read from
a yaml file
"""
arch = Arch()
if isinstance(d, basestring):
# We have an old spec using a string for the architecture
arch.platform = Platform('spack_compatibility')
arch.platform_os = OperatingSystem('unknown', '')
arch.target = Target(d)
arch.os_string = None
arch.target_string = None
else:
if d is None:
return None
platform_name = d['platform']
os_name = d['platform_os']
target_name = d['target']
if platform_name:
arch.platform = _platform_from_dict(platform_name)
else:
arch.platform = None
if target_name:
arch.target = _target_from_dict(target_name, arch.platform)
else:
arch.target = None
if os_name:
arch.platform_os = _operating_system_from_dict(os_name,
arch.platform)
else:
arch.platform_os = None
arch.os_string = None
arch.target_string = None
return arch
@memoized
def all_platforms():
classes = []
mod_path = spack.platform_path
parent_module = "spack.platforms"
for name in list_modules(mod_path):
mod_name = '%s.%s' % (parent_module, name)
class_name = mod_to_class(name)
mod = __import__(mod_name, fromlist=[class_name])
if not hasattr(mod, class_name):
tty.die('No class %s defined in %s' % (class_name, mod_name))
cls = getattr(mod, class_name)
if not inspect.isclass(cls):
tty.die('%s.%s is not a class' % (mod_name, class_name))
classes.append(cls)
return classes
@memoized
def platform():
"""Detects the platform for this machine.
Gather a list of all available subclasses of platforms.
Sorts the list according to their priority looking. Priority is
an arbitrarily set number. Detects platform either using uname or
a file path (/opt/cray...)
"""
# Try to create a Platform object using the config file FIRST
platform_list = all_platforms()
platform_list.sort(key=lambda a: a.priority)
for platform_cls in platform_list:
if platform_cls.detect():
return platform_cls()
@memoized
def sys_type():
"""Returns a SysType for the current machine."""
methods = [get_sys_type_from_spack_globals, get_sys_type_from_environment,
get_sys_type_from_platform]
"""Print out the "default" platform-os-target tuple for this machine.
# search for a method that doesn't return None
sys_type = None
for method in methods:
sys_type = method()
if sys_type:
break
On machines with only one target OS/target, prints out the
platform-os-target for the frontend. For machines with a frontend
and a backend, prints the default backend.
# Couldn't determine the sys_type for this machine.
if sys_type is None:
return "unknown_arch"
TODO: replace with use of more explicit methods to get *all* the
backends, as client code should really be aware of cross-compiled
architectures.
if not isinstance(sys_type, basestring):
raise InvalidSysTypeError(sys_type)
return sys_type
"""
arch = Arch(platform(), 'default_os', 'default_target')
return str(arch)

View File

@@ -75,20 +75,19 @@
# set_build_environment_variables and used to pass parameters to
# Spack's compiler wrappers.
#
SPACK_ENV_PATH = 'SPACK_ENV_PATH'
SPACK_DEPENDENCIES = 'SPACK_DEPENDENCIES'
SPACK_PREFIX = 'SPACK_PREFIX'
SPACK_INSTALL = 'SPACK_INSTALL'
SPACK_DEBUG = 'SPACK_DEBUG'
SPACK_SHORT_SPEC = 'SPACK_SHORT_SPEC'
SPACK_DEBUG_LOG_DIR = 'SPACK_DEBUG_LOG_DIR'
SPACK_ENV_PATH = 'SPACK_ENV_PATH'
SPACK_DEPENDENCIES = 'SPACK_DEPENDENCIES'
SPACK_PREFIX = 'SPACK_PREFIX'
SPACK_INSTALL = 'SPACK_INSTALL'
SPACK_DEBUG = 'SPACK_DEBUG'
SPACK_SHORT_SPEC = 'SPACK_SHORT_SPEC'
SPACK_DEBUG_LOG_DIR = 'SPACK_DEBUG_LOG_DIR'
# Platform-specific library suffix.
dso_suffix = 'dylib' if sys.platform == 'darwin' else 'so'
class MakeExecutable(Executable):
"""Special callable executable object for make so the user can
specify parallel or not on a per-invocation basis. Using
@@ -99,6 +98,7 @@ class MakeExecutable(Executable):
Note that if the SPACK_NO_PARALLEL_MAKE env var is set it overrides
everything.
"""
def __init__(self, name, jobs):
super(MakeExecutable, self).__init__(name)
self.jobs = jobs
@@ -114,32 +114,95 @@ def __call__(self, *args, **kwargs):
return super(MakeExecutable, self).__call__(*args, **kwargs)
def load_module(mod):
"""Takes a module name and removes modules until it is possible to
load that module. It then loads the provided module. Depends on the
modulecmd implementation of modules used in cray and lmod.
"""
# Create an executable of the module command that will output python code
modulecmd = which('modulecmd')
modulecmd.add_default_arg('python')
# Read the module and remove any conflicting modules
# We do this without checking that they are already installed
# for ease of programming because unloading a module that is not
# loaded does nothing.
text = modulecmd('show', mod, output=str, error=str).split()
for i, word in enumerate(text):
if word == 'conflict':
exec(compile(modulecmd('unload', text[i + 1], output=str,
error=str), '<string>', 'exec'))
# Load the module now that there are no conflicts
load = modulecmd('load', mod, output=str, error=str)
exec(compile(load, '<string>', 'exec'))
def get_path_from_module(mod):
"""Inspects a TCL module for entries that indicate the absolute path
at which the library supported by said module can be found.
"""
# Create a modulecmd executable
modulecmd = which('modulecmd')
modulecmd.add_default_arg('python')
# Read the module
text = modulecmd('show', mod, output=str, error=str).split('\n')
# If it lists its package directory, return that
for line in text:
if line.find(mod.upper() + '_DIR') >= 0:
words = line.split()
return words[2]
# If it lists a -rpath instruction, use that
for line in text:
rpath = line.find('-rpath/')
if rpath >= 0:
return line[rpath + 6:line.find('/lib')]
# If it lists a -L instruction, use that
for line in text:
L = line.find('-L/')
if L >= 0:
return line[L + 2:line.find('/lib')]
# If it sets the LD_LIBRARY_PATH or CRAY_LD_LIBRARY_PATH, use that
for line in text:
if line.find('LD_LIBRARY_PATH') >= 0:
words = line.split()
path = words[2]
return path[:path.find('/lib')]
# Unable to find module path
return None
def set_compiler_environment_variables(pkg, env):
assert pkg.spec.concrete
assert(pkg.spec.concrete)
compiler = pkg.compiler
flags = pkg.spec.compiler_flags
# Set compiler variables used by CMake and autotools
assert all(key in compiler.link_paths for key in ('cc', 'cxx', 'f77', 'fc'))
assert all(key in compiler.link_paths for key in (
'cc', 'cxx', 'f77', 'fc'))
# Populate an object with the list of environment modifications
# and return it
# TODO : add additional kwargs for better diagnostics, like requestor, ttyout, ttyerr, etc.
# TODO : add additional kwargs for better diagnostics, like requestor,
# ttyout, ttyerr, etc.
link_dir = spack.build_env_path
env.set('CC', join_path(link_dir, compiler.link_paths['cc']))
env.set('CXX', join_path(link_dir, compiler.link_paths['cxx']))
env.set('F77', join_path(link_dir, compiler.link_paths['f77']))
env.set('FC', join_path(link_dir, compiler.link_paths['fc']))
# Set SPACK compiler variables so that our wrapper knows what to call
if compiler.cc:
env.set('SPACK_CC', compiler.cc)
env.set('CC', join_path(link_dir, compiler.link_paths['cc']))
if compiler.cxx:
env.set('SPACK_CXX', compiler.cxx)
env.set('CXX', join_path(link_dir, compiler.link_paths['cxx']))
if compiler.f77:
env.set('SPACK_F77', compiler.f77)
env.set('F77', join_path(link_dir, compiler.link_paths['f77']))
if compiler.fc:
env.set('SPACK_FC', compiler.fc)
env.set('FC', join_path(link_dir, compiler.link_paths['fc']))
# Set SPACK compiler rpath flags so that our wrapper knows what to use
env.set('SPACK_CC_RPATH_ARG', compiler.cc_rpath_arg)
@@ -154,12 +217,19 @@ def set_compiler_environment_variables(pkg, env):
env.set('SPACK_' + flag.upper(), ' '.join(f for f in flags[flag]))
env.set('SPACK_COMPILER_SPEC', str(pkg.spec.compiler))
for mod in compiler.modules:
load_module(mod)
return env
def set_build_environment_variables(pkg, env):
def set_build_environment_variables(pkg, env, dirty=False):
"""
This ensures a clean install environment when we build packages
This ensures a clean install environment when we build packages.
Arguments:
dirty -- skip unsetting the user's environment settings.
"""
# Add spack build environment path with compiler wrappers first in
# the path. We add both spack.env_path, which includes default
@@ -172,7 +242,8 @@ def set_build_environment_variables(pkg, env):
# handled by putting one in the <build_env_path>/case-insensitive
# directory. Add that to the path too.
env_paths = []
for item in [spack.build_env_path, join_path(spack.build_env_path, pkg.compiler.name)]:
compiler_specific = join_path(spack.build_env_path, pkg.compiler.name)
for item in [spack.build_env_path, compiler_specific]:
env_paths.append(item)
ci = join_path(item, 'case-insensitive')
if os.path.isdir(ci):
@@ -183,9 +254,11 @@ def set_build_environment_variables(pkg, env):
env.set_path(SPACK_ENV_PATH, env_paths)
# Prefixes of all of the package's dependencies go in SPACK_DEPENDENCIES
dep_prefixes = [d.prefix for d in pkg.spec.traverse(root=False)]
dep_prefixes = [d.prefix
for d in pkg.spec.traverse(root=False, deptype='build')]
env.set_path(SPACK_DEPENDENCIES, dep_prefixes)
env.set_path('CMAKE_PREFIX_PATH', dep_prefixes) # Add dependencies to CMAKE_PREFIX_PATH
# Add dependencies to CMAKE_PREFIX_PATH
env.set_path('CMAKE_PREFIX_PATH', dep_prefixes)
# Install prefix
env.set(SPACK_PREFIX, pkg.prefix)
@@ -193,15 +266,30 @@ def set_build_environment_variables(pkg, env):
# Install root prefix
env.set(SPACK_INSTALL, spack.install_path)
# Remove these vars from the environment during build because they
# can affect how some packages find libraries. We want to make
# sure that builds never pull in unintended external dependencies.
env.unset('LD_LIBRARY_PATH')
env.unset('LD_RUN_PATH')
env.unset('DYLD_LIBRARY_PATH')
# Stuff in here sanitizes the build environemnt to eliminate
# anything the user has set that may interfere.
if not dirty:
# Remove these vars from the environment during build because they
# can affect how some packages find libraries. We want to make
# sure that builds never pull in unintended external dependencies.
env.unset('LD_LIBRARY_PATH')
env.unset('LIBRARY_PATH')
env.unset('CPATH')
env.unset('LD_RUN_PATH')
env.unset('DYLD_LIBRARY_PATH')
# Remove any macports installs from the PATH. The macports ld can
# cause conflicts with the built-in linker on el capitan. Solves
# assembler issues, e.g.:
# suffix or operands invalid for `movq'"
path = get_path('PATH')
for p in path:
if '/macports/' in p:
env.remove_path('PATH', p)
# Add bin directories from dependencies to the PATH for the build.
bin_dirs = reversed(filter(os.path.isdir, ['%s/bin' % prefix for prefix in dep_prefixes]))
bin_dirs = reversed(
filter(os.path.isdir, ['%s/bin' % prefix for prefix in dep_prefixes]))
for item in bin_dirs:
env.prepend_path('PATH', item)
@@ -212,13 +300,14 @@ def set_build_environment_variables(pkg, env):
env.set(SPACK_DEBUG_LOG_DIR, spack.spack_working_dir)
# Add any pkgconfig directories to PKG_CONFIG_PATH
pkg_config_dirs = []
for p in dep_prefixes:
for maybe in ('lib', 'lib64', 'share'):
pcdir = join_path(p, maybe, 'pkgconfig')
for pre in dep_prefixes:
for directory in ('lib', 'lib64', 'share'):
pcdir = join_path(pre, directory, 'pkgconfig')
if os.path.isdir(pcdir):
pkg_config_dirs.append(pcdir)
env.set_path('PKG_CONFIG_PATH', pkg_config_dirs)
env.prepend_path('PKG_CONFIG_PATH', pcdir)
if pkg.spec.architecture.target.module_name:
load_module(pkg.spec.architecture.target.module_name)
return env
@@ -227,7 +316,7 @@ def set_module_variables_for_package(pkg, module):
"""Populate the module scope of install() with some useful functions.
This makes things easier for package writers.
"""
# number of jobs spack will to build with.
# number of jobs spack will build with.
jobs = multiprocessing.cpu_count()
if not pkg.parallel:
jobs = 1
@@ -238,8 +327,9 @@ def set_module_variables_for_package(pkg, module):
m.make_jobs = jobs
# TODO: make these build deps that can be installed if not found.
m.make = MakeExecutable('make', jobs)
m.make = MakeExecutable('make', jobs)
m.gmake = MakeExecutable('gmake', jobs)
m.scons = MakeExecutable('scons', jobs)
# easy shortcut to os.environ
m.env = os.environ
@@ -248,11 +338,8 @@ def set_module_variables_for_package(pkg, module):
# Don't use which for this; we want to find it in the current dir.
m.configure = Executable('./configure')
# TODO: shouldn't really use "which" here. Consider adding notion
# TODO: of build dependencies, as opposed to link dependencies.
# TODO: Currently, everything is a link dependency, but tools like
# TODO: this shouldn't be.
m.cmake = Executable('cmake')
m.ctest = Executable('ctest')
# standard CMake arguments
m.std_cmake_args = ['-DCMAKE_INSTALL_PREFIX=%s' % pkg.prefix,
@@ -262,33 +349,34 @@ def set_module_variables_for_package(pkg, module):
# Set up CMake rpath
m.std_cmake_args.append('-DCMAKE_INSTALL_RPATH_USE_LINK_PATH=FALSE')
m.std_cmake_args.append('-DCMAKE_INSTALL_RPATH=%s' % ":".join(get_rpaths(pkg)))
m.std_cmake_args.append('-DCMAKE_INSTALL_RPATH=%s' %
":".join(get_rpaths(pkg)))
# Put spack compiler paths in module scope.
link_dir = spack.build_env_path
m.spack_cc = join_path(link_dir, pkg.compiler.link_paths['cc'])
m.spack_cc = join_path(link_dir, pkg.compiler.link_paths['cc'])
m.spack_cxx = join_path(link_dir, pkg.compiler.link_paths['cxx'])
m.spack_f77 = join_path(link_dir, pkg.compiler.link_paths['f77'])
m.spack_fc = join_path(link_dir, pkg.compiler.link_paths['fc'])
m.spack_fc = join_path(link_dir, pkg.compiler.link_paths['fc'])
# Emulate some shell commands for convenience
m.pwd = os.getcwd
m.cd = os.chdir
m.mkdir = os.mkdir
m.makedirs = os.makedirs
m.remove = os.remove
m.removedirs = os.removedirs
m.symlink = os.symlink
m.pwd = os.getcwd
m.cd = os.chdir
m.mkdir = os.mkdir
m.makedirs = os.makedirs
m.remove = os.remove
m.removedirs = os.removedirs
m.symlink = os.symlink
m.mkdirp = mkdirp
m.install = install
m.mkdirp = mkdirp
m.install = install
m.install_tree = install_tree
m.rmtree = shutil.rmtree
m.move = shutil.move
m.rmtree = shutil.rmtree
m.move = shutil.move
# Useful directories within the prefix are encapsulated in
# a Prefix object.
m.prefix = pkg.prefix
m.prefix = pkg.prefix
# Platform-specific library suffix.
m.dso_suffix = dso_suffix
@@ -297,30 +385,45 @@ def set_module_variables_for_package(pkg, module):
def get_rpaths(pkg):
"""Get a list of all the rpaths for a package."""
rpaths = [pkg.prefix.lib, pkg.prefix.lib64]
rpaths.extend(d.prefix.lib for d in pkg.spec.dependencies.values()
deps = pkg.spec.dependencies(deptype='link')
rpaths.extend(d.prefix.lib for d in deps
if os.path.isdir(d.prefix.lib))
rpaths.extend(d.prefix.lib64 for d in pkg.spec.dependencies.values()
rpaths.extend(d.prefix.lib64 for d in deps
if os.path.isdir(d.prefix.lib64))
# Second module is our compiler mod name. We use that to get rpaths from
# module show output.
if pkg.compiler.modules and len(pkg.compiler.modules) > 1:
rpaths.append(get_path_from_module(pkg.compiler.modules[1]))
return rpaths
def parent_class_modules(cls):
"""Get list of super class modules that are all descend from spack.Package"""
"""
Get list of super class modules that are all descend from spack.Package
"""
if not issubclass(cls, spack.Package) or issubclass(spack.Package, cls):
return []
result = []
module = sys.modules.get(cls.__module__)
if module:
result = [ module ]
result = [module]
for c in cls.__bases__:
result.extend(parent_class_modules(c))
return result
def setup_package(pkg):
def load_external_modules(pkg):
""" traverse the spec list and find any specs that have external modules.
"""
for dep in list(pkg.spec.traverse()):
if dep.external_module:
load_module(dep.external_module)
def setup_package(pkg, dirty=False):
"""Execute all environment setup routines."""
spack_env = EnvironmentModifications()
run_env = EnvironmentModifications()
run_env = EnvironmentModifications()
# Before proceeding, ensure that specs and packages are consistent
#
@@ -336,14 +439,16 @@ def setup_package(pkg):
# throwaway environment, but it is kind of dirty.
#
# TODO: Think about how to avoid this fix and do something cleaner.
for s in pkg.spec.traverse(): s.package.spec = s
for s in pkg.spec.traverse():
s.package.spec = s
set_compiler_environment_variables(pkg, spack_env)
set_build_environment_variables(pkg, spack_env)
set_build_environment_variables(pkg, spack_env, dirty)
pkg.spec.architecture.platform.setup_platform_environment(pkg, spack_env)
load_external_modules(pkg)
# traverse in postorder so package can use vars from its dependencies
spec = pkg.spec
for dspec in pkg.spec.traverse(order='post', root=False):
for dspec in pkg.spec.traverse(order='post', root=False, deptype='build'):
# If a user makes their own package repo, e.g.
# spack.repos.mystuff.libelf.Libelf, and they inherit from
# an existing class like spack.repos.original.libelf.Libelf,
@@ -368,7 +473,7 @@ def setup_package(pkg):
spack_env.apply_modifications()
def fork(pkg, function):
def fork(pkg, function, dirty=False):
"""Fork a child process to do part of a spack build.
Arguments:
@@ -376,6 +481,7 @@ def fork(pkg, function):
pkg -- pkg whose environemnt we should set up the
forked process for.
function -- arg-less function to run in the child process.
dirty -- If True, do NOT clean the environment before building.
Usage:
def child_fun():
@@ -399,7 +505,7 @@ def child_fun():
if pid == 0:
# Give the child process the package's build environment.
setup_package(pkg)
setup_package(pkg, dirty=dirty)
try:
# call the forked function.
@@ -424,7 +530,9 @@ def child_fun():
# message. Just make the parent exit with an error code.
pid, returncode = os.waitpid(pid, 0)
if returncode != 0:
raise InstallError("Installation process had nonzero exit code.".format(str(returncode)))
message = "Installation process had nonzero exit code : {code}"
strcode = str(returncode)
raise InstallError(message.format(code=strcode))
class InstallError(spack.error.SpackError):

View File

@@ -27,16 +27,18 @@
import sys
import llnl.util.tty as tty
from llnl.util.lang import attr_setdefault
import spack
import spack.spec
import spack.config
import spack.spec
from llnl.util.lang import *
from llnl.util.tty.colify import *
from llnl.util.tty.color import *
#
# Settings for commands that modify configuration
#
# Commands that modify confguration By default modify the *highest* priority scope.
# Commands that modify confguration By default modify the *highest*
# priority scope.
default_modify_scope = spack.config.highest_precedence_scope().name
# Commands that list confguration list *all* scopes by default.
default_list_scope = None
@@ -48,7 +50,7 @@
ignore_files = r'^\.|^__init__.py$|^#'
SETUP_PARSER = "setup_parser"
DESCRIPTION = "description"
DESCRIPTION = "description"
command_path = os.path.join(spack.lib_path, "spack", "cmd")
@@ -71,7 +73,7 @@ def get_module(name):
module_name, fromlist=[name, SETUP_PARSER, DESCRIPTION],
level=0)
attr_setdefault(module, SETUP_PARSER, lambda *args: None) # null-op
attr_setdefault(module, SETUP_PARSER, lambda *args: None) # null-op
attr_setdefault(module, DESCRIPTION, "")
fn_name = get_cmd_function_name(name)
@@ -101,17 +103,17 @@ def parse_specs(args, **kwargs):
specs = spack.spec.parse(args)
for spec in specs:
if concretize:
spec.concretize() # implies normalize
spec.concretize() # implies normalize
elif normalize:
spec.normalize()
return specs
except spack.parse.ParseError, e:
except spack.parse.ParseError as e:
tty.error(e.message, e.string, e.pos * " " + "^")
sys.exit(1)
except spack.spec.SpecError, e:
except spack.spec.SpecError as e:
tty.error(e.message)
sys.exit(1)
@@ -127,7 +129,7 @@ def elide_list(line_list, max_num=10):
[1, 2, 3, '...', 6]
"""
if len(line_list) > max_num:
return line_list[:max_num-1] + ['...'] + line_list[-1:]
return line_list[:max_num - 1] + ['...'] + line_list[-1:]
else:
return line_list
@@ -138,10 +140,104 @@ def disambiguate_spec(spec):
tty.die("Spec '%s' matches no installed packages." % spec)
elif len(matching_specs) > 1:
args = ["%s matches multiple packages." % spec,
"Matching packages:"]
args = ["%s matches multiple packages." % spec,
"Matching packages:"]
args += [" " + str(s) for s in matching_specs]
args += ["Use a more specific spec."]
tty.die(*args)
return matching_specs[0]
def ask_for_confirmation(message):
while True:
tty.msg(message + '[y/n]')
choice = raw_input().lower()
if choice == 'y':
break
elif choice == 'n':
raise SystemExit('Operation aborted')
tty.warn('Please reply either "y" or "n"')
def gray_hash(spec, length):
return colorize('@K{%s}' % spec.dag_hash(length))
def display_specs(specs, **kwargs):
mode = kwargs.get('mode', 'short')
hashes = kwargs.get('long', False)
namespace = kwargs.get('namespace', False)
flags = kwargs.get('show_flags', False)
variants = kwargs.get('variants', False)
hlen = 7
if kwargs.get('very_long', False):
hashes = True
hlen = None
nfmt = '.' if namespace else '_'
ffmt = '$%+' if flags else ''
vfmt = '$+' if variants else ''
format_string = '$%s$@%s%s' % (nfmt, ffmt, vfmt)
# Make a dict with specs keyed by architecture and compiler.
index = index_by(specs, ('architecture', 'compiler'))
# Traverse the index and print out each package
for i, (architecture, compiler) in enumerate(sorted(index)):
if i > 0:
print
header = "%s{%s} / %s{%s}" % (spack.spec.architecture_color,
architecture, spack.spec.compiler_color,
compiler)
tty.hline(colorize(header), char='-')
specs = index[(architecture, compiler)]
specs.sort()
abbreviated = [s.format(format_string, color=True) for s in specs]
if mode == 'paths':
# Print one spec per line along with prefix path
width = max(len(s) for s in abbreviated)
width += 2
format = " %%-%ds%%s" % width
for abbrv, spec in zip(abbreviated, specs):
if hashes:
print(gray_hash(spec, hlen), )
print(format % (abbrv, spec.prefix))
elif mode == 'deps':
for spec in specs:
print(spec.tree(
format=format_string,
color=True,
indent=4,
prefix=(lambda s: gray_hash(s, hlen)) if hashes else None))
elif mode == 'short':
# Print columns of output if not printing flags
if not flags:
def fmt(s):
string = ""
if hashes:
string += gray_hash(s, hlen) + ' '
string += s.format('$-%s$@%s' % (nfmt, vfmt), color=True)
return string
colify(fmt(s) for s in specs)
# Print one entry per line if including flags
else:
for spec in specs:
# Print the hash if necessary
hsh = gray_hash(spec, hlen) + ' ' if hashes else ''
print(hsh + spec.format(format_string, color=True) + '\n')
else:
raise ValueError(
"Invalid mode for display_specs: %s. Must be one of (paths,"
"deps, short)." % mode)

View File

@@ -29,12 +29,14 @@
description = "Activate a package extension."
def setup_parser(subparser):
subparser.add_argument(
'-f', '--force', action='store_true',
help="Activate without first activating dependencies.")
subparser.add_argument(
'spec', nargs=argparse.REMAINDER, help="spec of package extension to activate.")
'spec', nargs=argparse.REMAINDER,
help="spec of package extension to activate.")
def activate(parser, args):

View File

@@ -22,14 +22,10 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import spack
import spack.architecture as architecture
description = "Print the architecture for this machine"
def arch(parser, args):
configured_sys_type = architecture.get_sys_type_from_spack_globals()
if not configured_sys_type:
configured_sys_type = "autodetect"
print "Configured sys_type: %s" % configured_sys_type
print "Autodetected default sys_type: %s" % architecture.sys_type()
print architecture.sys_type()

View File

@@ -23,7 +23,6 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import os
from subprocess import check_call
import llnl.util.tty as tty
from llnl.util.filesystem import join_path, mkdirp
@@ -31,26 +30,49 @@
import spack
from spack.util.executable import which
_SPACK_UPSTREAM = 'https://github.com/llnl/spack'
description = "Create a new installation of spack in another prefix"
def setup_parser(subparser):
subparser.add_argument('prefix', help="names of prefix where we should install spack")
subparser.add_argument(
'-r', '--remote', action='store', dest='remote',
help="name of the remote to bootstrap from", default='origin')
subparser.add_argument(
'prefix',
help="names of prefix where we should install spack")
def get_origin_url():
def get_origin_info(remote):
git_dir = join_path(spack.prefix, '.git')
git = which('git', required=True)
origin_url = git(
'--git-dir=%s' % git_dir, 'config', '--get', 'remote.origin.url',
output=str)
return origin_url.strip()
try:
branch = git('symbolic-ref', '--short', 'HEAD', output=str)
except ProcessError:
branch = 'develop'
tty.warn('No branch found; using default branch: %s' % branch)
if remote == 'origin' and \
branch not in ('master', 'develop'):
branch = 'develop'
tty.warn('Unknown branch found; using default branch: %s' % branch)
try:
origin_url = git(
'--git-dir=%s' % git_dir,
'config', '--get', 'remote.%s.url' % remote,
output=str)
except ProcessError:
origin_url = _SPACK_UPSTREAM
tty.warn('No git repository found; '
'using default upstream URL: %s' % origin_url)
return (origin_url.strip(), branch.strip())
def bootstrap(parser, args):
origin_url = get_origin_url()
origin_url, branch = get_origin_info(args.remote)
prefix = args.prefix
tty.msg("Fetching spack from origin: %s" % origin_url)
tty.msg("Fetching spack from '%s': %s" % (args.remote, origin_url))
if os.path.isfile(prefix):
tty.die("There is already a file at %s" % prefix)
@@ -62,7 +84,8 @@ def bootstrap(parser, args):
files_in_the_way = os.listdir(prefix)
if files_in_the_way:
tty.die("There are already files there! Delete these files before boostrapping spack.",
tty.die("There are already files there! "
"Delete these files before boostrapping spack.",
*files_in_the_way)
tty.msg("Installing:",
@@ -73,8 +96,10 @@ def bootstrap(parser, args):
git = which('git', required=True)
git('init', '--shared', '-q')
git('remote', 'add', 'origin', origin_url)
git('fetch', 'origin', 'master:refs/remotes/origin/master', '-n', '-q')
git('reset', '--hard', 'origin/master', '-q')
git('fetch', 'origin', '%s:refs/remotes/origin/%s' % (branch, branch),
'-n', '-q')
git('reset', '--hard', 'origin/%s' % branch, '-q')
git('checkout', '-B', branch, 'origin/%s' % branch, '-q')
tty.msg("Successfully created a new spack in %s" % prefix,
"Run %s/bin/spack to use this installation." % prefix)

View File

@@ -25,7 +25,8 @@
import spack.cmd.location
import spack.modules
description="cd to spack directories in the shell."
description = "cd to spack directories in the shell."
def setup_parser(subparser):
"""This is for decoration -- spack cd is used through spack's

View File

@@ -42,7 +42,8 @@ def setup_parser(subparser):
'--keep-stage', action='store_true', dest='keep_stage',
help="Don't clean up staging area when command completes.")
subparser.add_argument(
'versions', nargs=argparse.REMAINDER, help='Versions to generate checksums for')
'versions', nargs=argparse.REMAINDER,
help='Versions to generate checksums for')
def get_checksums(versions, urls, **kwargs):
@@ -59,10 +60,10 @@ def get_checksums(versions, urls, **kwargs):
with Stage(url, keep=keep_stage) as stage:
stage.fetch()
if i == 0 and first_stage_function:
first_stage_function(stage)
first_stage_function(stage, url)
hashes.append((version,
spack.util.crypto.checksum(hashlib.md5, stage.archive_file)))
hashes.append((version, spack.util.crypto.checksum(
hashlib.md5, stage.archive_file)))
i += 1
except FailedDownloadError as e:
tty.msg("Failed to fetch %s" % url)
@@ -79,12 +80,12 @@ def checksum(parser, args):
# If the user asked for specific versions, use those.
if args.versions:
versions = {}
for v in args.versions:
v = ver(v)
if not isinstance(v, Version):
for version in args.versions:
version = ver(version)
if not isinstance(version, Version):
tty.die("Cannot generate checksums for version lists or " +
"version ranges. Use unambiguous versions.")
versions[v] = pkg.url_for_version(v)
versions[version] = pkg.url_for_version(version)
else:
versions = pkg.fetch_remote_versions()
if not versions:
@@ -111,5 +112,7 @@ def checksum(parser, args):
if not version_hashes:
tty.die("Could not fetch any versions for %s" % pkg.name)
version_lines = [" version('%s', '%s')" % (v, h) for v, h in version_hashes]
version_lines = [
" version('%s', '%s')" % (v, h) for v, h in version_hashes
]
tty.msg("Checksummed new versions of %s:" % pkg.name, *version_lines)

View File

@@ -31,6 +31,7 @@
description = "Remove build stage and source tarball for packages."
def setup_parser(subparser):
subparser.add_argument('packages', nargs=argparse.REMAINDER,
help="specs of packages to clean")

View File

@@ -0,0 +1,24 @@
##############################################################################
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################

View File

@@ -0,0 +1,96 @@
##############################################################################
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import argparse
import spack.modules
from spack.util.pattern import Bunch
__all__ = ['add_common_arguments']
_arguments = {}
def add_common_arguments(parser, list_of_arguments):
for argument in list_of_arguments:
if argument not in _arguments:
message = 'Trying to add non existing argument "{0}" to a command'
raise KeyError(message.format(argument))
x = _arguments[argument]
parser.add_argument(*x.flags, **x.kwargs)
class ConstraintAction(argparse.Action):
"""Constructs a list of specs based on a constraint given on the command line
An instance of this class is supposed to be used as an argument action
in a parser. It will read a constraint and will attach a list of matching
specs to the namespace
"""
qualifiers = {}
def __call__(self, parser, namespace, values, option_string=None):
# Query specs from command line
d = self.qualifiers.get(namespace.subparser_name, {})
specs = [s for s in spack.installed_db.query(**d)]
values = ' '.join(values)
if values:
specs = [x for x in specs if x.satisfies(values, strict=True)]
namespace.specs = specs
parms = Bunch(
flags=('constraint',),
kwargs={
'nargs': '*',
'help': 'Constraint to select a subset of installed packages',
'action': ConstraintAction
})
_arguments['constraint'] = parms
parms = Bunch(
flags=('-m', '--module-type'),
kwargs={
'help': 'Type of module files',
'default': 'tcl',
'choices': spack.modules.module_types
})
_arguments['module_type'] = parms
parms = Bunch(
flags=('-y', '--yes-to-all'),
kwargs={
'action': 'store_true',
'dest': 'yes_to_all',
'help': 'Assume "yes" is the answer to every confirmation request.'
})
_arguments['yes_to_all'] = parms
parms = Bunch(
flags=('-r', '--dependencies'),
kwargs={
'action': 'store_true',
'dest': 'recurse_dependencies',
'help': 'Recursively traverse spec dependencies'
})
_arguments['recurse_dependencies'] = parms

View File

@@ -37,6 +37,7 @@
description = "Manage compilers"
def setup_parser(subparser):
sp = subparser.add_subparsers(
metavar='SUBCOMMAND', dest='compiler_command')
@@ -44,43 +45,58 @@ def setup_parser(subparser):
scopes = spack.config.config_scopes
# Find
find_parser = sp.add_parser('find', aliases=['add'], help='Search the system for compilers to add to the Spack configuration.')
find_parser = sp.add_parser(
'find', aliases=['add'],
help='Search the system for compilers to add to Spack configuration.')
find_parser.add_argument('add_paths', nargs=argparse.REMAINDER)
find_parser.add_argument('--scope', choices=scopes, default=spack.cmd.default_modify_scope,
help="Configuration scope to modify.")
find_parser.add_argument(
'--scope', choices=scopes, default=spack.cmd.default_modify_scope,
help="Configuration scope to modify.")
# Remove
remove_parser = sp.add_parser('remove', aliases=['rm'], help='Remove compiler by spec.')
remove_parser = sp.add_parser(
'remove', aliases=['rm'], help='Remove compiler by spec.')
remove_parser.add_argument(
'-a', '--all', action='store_true', help='Remove ALL compilers that match spec.')
'-a', '--all', action='store_true',
help='Remove ALL compilers that match spec.')
remove_parser.add_argument('compiler_spec')
remove_parser.add_argument('--scope', choices=scopes, default=spack.cmd.default_modify_scope,
help="Configuration scope to modify.")
remove_parser.add_argument(
'--scope', choices=scopes, default=spack.cmd.default_modify_scope,
help="Configuration scope to modify.")
# List
list_parser = sp.add_parser('list', help='list available compilers')
list_parser.add_argument('--scope', choices=scopes, default=spack.cmd.default_list_scope,
help="Configuration scope to read from.")
list_parser.add_argument(
'--scope', choices=scopes, default=spack.cmd.default_list_scope,
help="Configuration scope to read from.")
# Info
info_parser = sp.add_parser('info', help='Show compiler paths.')
info_parser.add_argument('compiler_spec')
info_parser.add_argument('--scope', choices=scopes, default=spack.cmd.default_list_scope,
help="Configuration scope to read from.")
info_parser.add_argument(
'--scope', choices=scopes, default=spack.cmd.default_list_scope,
help="Configuration scope to read from.")
def compiler_find(args):
"""Search either $PATH or a list of paths for compilers and add them
to Spack's configuration."""
"""Search either $PATH or a list of paths OR MODULES for compilers and
add them to Spack's configuration.
"""
paths = args.add_paths
if not paths:
paths = get_path('PATH')
compilers = [c for c in spack.compilers.find_compilers(*args.add_paths)
if c.spec not in spack.compilers.all_compilers(scope=args.scope)]
# Don't initialize compilers config via compilers.get_compiler_config.
# Just let compiler_find do the
# entire process and return an empty config from all_compilers
# Default for any other process is init_config=True
compilers = [c for c in spack.compilers.find_compilers(*paths)
if c.spec not in spack.compilers.all_compilers(
scope=args.scope, init_config=False)]
if compilers:
spack.compilers.add_compilers_to_config(compilers, scope=args.scope)
spack.compilers.add_compilers_to_config(compilers, scope=args.scope,
init_config=False)
n = len(compilers)
s = 's' if n > 1 else ''
filename = spack.config.get_config_filename(args.scope, 'compilers')
@@ -93,17 +109,17 @@ def compiler_find(args):
def compiler_remove(args):
cspec = CompilerSpec(args.compiler_spec)
compilers = spack.compilers.compilers_for_spec(cspec, scope=args.scope)
if not compilers:
tty.die("No compilers match spec %s" % cspec)
elif not args.all and len(compilers) > 1:
tty.error("Multiple compilers match spec %s. Choose one:" % cspec)
colify(reversed(sorted([c.spec for c in compilers])), indent=4)
tty.msg("Or, you can use `spack compiler remove -a` to remove all of them.")
tty.msg("Or, use `spack compiler remove -a` to remove all of them.")
sys.exit(1)
for compiler in compilers:
spack.compilers.remove_compiler_from_config(compiler.spec, scope=args.scope)
spack.compilers.remove_compiler_from_config(
compiler.spec, scope=args.scope)
tty.msg("Removed compiler %s" % compiler.spec)
@@ -121,13 +137,16 @@ def compiler_info(args):
print "\tcxx = %s" % c.cxx
print "\tf77 = %s" % c.f77
print "\tfc = %s" % c.fc
print "\tmodules = %s" % c.modules
print "\toperating system = %s" % c.operating_system
def compiler_list(args):
tty.msg("Available compilers")
index = index_by(spack.compilers.all_compilers(scope=args.scope), 'name')
for i, (name, compilers) in enumerate(index.items()):
if i >= 1: print
if i >= 1:
print
cname = "%s{%s}" % (spack.spec.compiler_color, name)
tty.hline(colorize(cname), char='-')
@@ -135,10 +154,10 @@ def compiler_list(args):
def compiler(parser, args):
action = { 'add' : compiler_find,
'find' : compiler_find,
'remove' : compiler_remove,
'rm' : compiler_remove,
'info' : compiler_info,
'list' : compiler_list }
action = {'add': compiler_find,
'find': compiler_find,
'remove': compiler_remove,
'rm': compiler_remove,
'info': compiler_info,
'list': compiler_list}
action[args.compiler_command](args)

View File

@@ -22,18 +22,16 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import llnl.util.tty as tty
from llnl.util.tty.colify import colify
from llnl.util.lang import index_by
import spack
from spack.cmd.compiler import compiler_list
description = "List available compilers. Same as 'spack compiler list'."
def setup_parser(subparser):
subparser.add_argument('--scope', choices=spack.config.config_scopes,
help="Configuration scope to read/modify.")
def compilers(parser, args):
compiler_list(args)

View File

@@ -22,15 +22,11 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import sys
import argparse
import llnl.util.tty as tty
import spack.config
description = "Get and set configuration options."
def setup_parser(subparser):
# User can only choose one
scope_group = subparser.add_mutually_exclusive_group()
@@ -64,6 +60,6 @@ def config_edit(args):
def config(parser, args):
action = { 'get' : config_get,
'edit' : config_edit }
action = {'get': config_get,
'edit': config_edit}
action[args.config_command](args)

View File

@@ -1,4 +1,3 @@
_copyright = """\
##############################################################################
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
@@ -23,10 +22,8 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
"""
import string
import os
import hashlib
import re
from ordereddict_backport import OrderedDict
@@ -41,16 +38,37 @@
from spack.spec import Spec
from spack.util.naming import *
from spack.repository import Repo, RepoError
import spack.util.crypto as crypto
from spack.util.executable import which
from spack.stage import Stage
description = "Create a new package file from an archive URL"
package_template = string.Template(
_copyright + """
package_template = string.Template("""\
##############################################################################
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
#
# This is a template package file for Spack. We've put "FIXME"
# next to all the things you'll want to change. Once you've handled
@@ -68,26 +86,106 @@
#
from spack import *
class ${class_name}(Package):
""\"FIXME: put a proper description of your package here.""\"
# FIXME: add a proper url for your package's homepage here.
""\"FIXME: Put a proper description of your package here.""\"
# FIXME: Add a proper url for your package's homepage here.
homepage = "http://www.example.com"
url = "${url}"
${versions}
# FIXME: Add dependencies if this package requires them.
# depends_on("foo")
${dependencies}
def install(self, spec, prefix):
# FIXME: Modify the configure line to suit your build system here.
${configure}
# FIXME: Add logic to build and install here
make()
make("install")
${install}
""")
# Build dependencies and extensions
dependencies_dict = {
'autotools': """\
# FIXME: Add dependencies if required.
# depends_on('foo')""",
'cmake': """\
# FIXME: Add additional dependencies if required.
depends_on('cmake', type='build')""",
'scons': """\
# FIXME: Add additional dependencies if required.
depends_on('scons', type='build')""",
'python': """\
extends('python')
# FIXME: Add additional dependencies if required.
# depends_on('py-foo', type=nolink)""",
'R': """\
extends('R')
# FIXME: Add additional dependencies if required.
# depends_on('r-foo', type=nolink)""",
'octave': """\
extends('octave')
# FIXME: Add additional dependencies if required.
# depends_on('octave-foo', type=nolink)""",
'unknown': """\
# FIXME: Add dependencies if required.
# depends_on('foo')"""
}
# Default installation instructions
install_dict = {
'autotools': """\
# FIXME: Modify the configure line to suit your build system here.
configure('--prefix={0}'.format(prefix))
# FIXME: Add logic to build and install here.
make()
make('install')""",
'cmake': """\
with working_dir('spack-build', create=True):
# FIXME: Modify the cmake line to suit your build system here.
cmake('..', *std_cmake_args)
# FIXME: Add logic to build and install here.
make()
make('install')""",
'scons': """\
# FIXME: Add logic to build and install here.
scons('prefix={0}'.format(prefix))
scons('install')""",
'python': """\
# FIXME: Add logic to build and install here.
setup_py('install', '--prefix={0}'.format(prefix))""",
'R': """\
# FIXME: Add logic to build and install here.
R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir),
self.stage.source_path)""",
'octave': """\
# FIXME: Add logic to build and install here.
octave('--quiet', '--norc',
'--built-in-docstrings-file=/dev/null',
'--texi-macros-file=/dev/null',
'--eval', 'pkg prefix {0}; pkg install {1}'.format(
prefix, self.stage.archive_file))""",
'unknown': """\
# FIXME: Unknown build system
make()
make('install')"""
}
def make_version_calls(ver_hash_tuples):
"""Adds a version() call to the package for each version found."""
@@ -118,41 +216,53 @@ def setup_parser(subparser):
setup_parser.subparser = subparser
class ConfigureGuesser(object):
def __call__(self, stage):
"""Try to guess the type of build system used by the project, and return
an appropriate configure line.
"""
autotools = "configure('--prefix=%s' % prefix)"
cmake = "cmake('.', *std_cmake_args)"
python = "python('setup.py', 'install', '--prefix=%s' % prefix)"
r = "R('CMD', 'INSTALL', '--library=%s' % self.module.r_lib_dir, '%s' % self.stage.archive_file)"
class BuildSystemGuesser(object):
config_lines = ((r'/configure$', 'autotools', autotools),
(r'/CMakeLists.txt$', 'cmake', cmake),
(r'/setup.py$', 'python', python),
(r'/NAMESPACE$', 'r', r))
def __call__(self, stage, url):
"""Try to guess the type of build system used by a project based on
the contents of its archive or the URL it was downloaded from."""
# Peek inside the tarball.
tar = which('tar')
output = tar(
"--exclude=*/*/*", "-tf", stage.archive_file, output=str)
lines = output.split("\n")
# Most octave extensions are hosted on Octave-Forge:
# http://octave.sourceforge.net/index.html
# They all have the same base URL.
if 'downloads.sourceforge.net/octave/' in url:
self.build_system = 'octave'
return
# Set the configure line to the one that matched.
for pattern, bs, cl in config_lines:
if any(re.search(pattern, l) for l in lines):
config_line = cl
build_system = bs
break
# A list of clues that give us an idea of the build system a package
# uses. If the regular expression matches a file contained in the
# archive, the corresponding build system is assumed.
clues = [
(r'/configure$', 'autotools'),
(r'/CMakeLists.txt$', 'cmake'),
(r'/SConstruct$', 'scons'),
(r'/setup.py$', 'python'),
(r'/NAMESPACE$', 'R')
]
# Peek inside the compressed file.
if stage.archive_file.endswith('.zip'):
try:
unzip = which('unzip')
output = unzip('-l', stage.archive_file, output=str)
except:
output = ''
else:
# None matched -- just put both, with cmake commented out
config_line = "# FIXME: Spack couldn't guess one, so here are some options:\n"
config_line += " # " + autotools + "\n"
config_line += " # " + cmake
build_system = 'unknown'
try:
tar = which('tar')
output = tar('--exclude=*/*/*', '-tf',
stage.archive_file, output=str)
except:
output = ''
lines = output.split('\n')
# Determine the build system based on the files contained
# in the archive.
build_system = 'unknown'
for pattern, bs in clues:
if any(re.search(pattern, l) for l in lines):
build_system = bs
self.configure = config_line
self.build_system = build_system
@@ -168,7 +278,7 @@ def guess_name_and_version(url, args):
else:
try:
name = spack.url.parse_name(url, version)
except spack.url.UndetectableNameError, e:
except spack.url.UndetectableNameError:
# Use a user-supplied name if one is present
tty.die("Couldn't guess a name for this package. Try running:", "",
"spack create --name <name> <url>")
@@ -182,7 +292,8 @@ def guess_name_and_version(url, args):
def find_repository(spec, args):
# figure out namespace for spec
if spec.namespace and args.namespace and spec.namespace != args.namespace:
tty.die("Namespaces '%s' and '%s' do not match." % (spec.namespace, args.namespace))
tty.die("Namespaces '%s' and '%s' do not match." % (spec.namespace,
args.namespace))
if not spec.namespace and args.namespace:
spec.namespace = args.namespace
@@ -193,8 +304,8 @@ def find_repository(spec, args):
try:
repo = Repo(repo_path)
if spec.namespace and spec.namespace != repo.namespace:
tty.die("Can't create package with namespace %s in repo with namespace %s"
% (spec.namespace, repo.namespace))
tty.die("Can't create package with namespace %s in repo with "
"namespace %s" % (spec.namespace, repo.namespace))
except RepoError as e:
tty.die(str(e))
else:
@@ -214,11 +325,7 @@ def find_repository(spec, args):
def fetch_tarballs(url, name, version):
"""Try to find versions of the supplied archive by scraping the web.
Prompts the user to select how many to download if many are found.
"""
Prompts the user to select how many to download if many are found."""
versions = spack.util.web.find_versions_of_archive(url)
rkeys = sorted(versions.keys(), reverse=True)
versions = OrderedDict(zip(rkeys, (versions[v] for v in rkeys)))
@@ -226,11 +333,11 @@ def fetch_tarballs(url, name, version):
archives_to_fetch = 1
if not versions:
# If the fetch failed for some reason, revert to what the user provided
versions = { version : url }
versions = {version: url}
elif len(versions) > 1:
tty.msg("Found %s versions of %s:" % (len(versions), name),
*spack.cmd.elide_list(
["%-10s%s" % (v,u) for v, u in versions.iteritems()]))
["%-10s%s" % (v, u) for v, u in versions.iteritems()]))
print
archives_to_fetch = tty.get_number(
"Include how many checksums in the package file?",
@@ -253,7 +360,7 @@ def create(parser, args):
# Figure out a name and repo for the package.
name, version = guess_name_and_version(url, args)
spec = Spec(name)
name = spec.name # factors out namespace, if any
name = spec.name.lower() # factors out namespace, if any
repo = find_repository(spec, args)
tty.msg("This looks like a URL for %s version %s" % (name, version))
@@ -262,8 +369,8 @@ def create(parser, args):
# Fetch tarballs (prompting user if necessary)
versions, urls = fetch_tarballs(url, name, version)
# Try to guess what configure system is used.
guesser = ConfigureGuesser()
# Try to guess what build system is used.
guesser = BuildSystemGuesser()
ver_hash_tuples = spack.cmd.checksum.get_checksums(
versions, urls,
first_stage_function=guesser,
@@ -272,13 +379,13 @@ def create(parser, args):
if not ver_hash_tuples:
tty.die("Could not fetch any tarballs for %s" % name)
# Prepend 'py-' to python package names, by convention.
# Add prefix to package name if it is an extension.
if guesser.build_system == 'python':
name = 'py-%s' % name
# Prepend 'r-' to R package names, by convention.
if guesser.build_system == 'r':
name = 'r-%s' % name
name = 'py-{0}'.format(name)
if guesser.build_system == 'R':
name = 'r-{0}'.format(name)
if guesser.build_system == 'octave':
name = 'octave-{0}'.format(name)
# Create a directory for the new package.
pkg_path = repo.filename_for_package_name(name)
@@ -292,10 +399,11 @@ def create(parser, args):
pkg_file.write(
package_template.substitute(
name=name,
configure=guesser.configure,
class_name=mod_to_class(name),
url=url,
versions=make_version_calls(ver_hash_tuples)))
versions=make_version_calls(ver_hash_tuples),
dependencies=dependencies_dict[guesser.build_system],
install=install_dict[guesser.build_system]))
# If everything checks out, go ahead and edit.
spack.editor(pkg_path)

View File

@@ -31,6 +31,7 @@
description = "Deactivate a package extension."
def setup_parser(subparser):
subparser.add_argument(
'-f', '--force', action='store_true',
@@ -40,7 +41,8 @@ def setup_parser(subparser):
help="Deactivate all extensions of an extendable package, or "
"deactivate an extension AND its dependencies.")
subparser.add_argument(
'spec', nargs=argparse.REMAINDER, help="spec of package extension to deactivate.")
'spec', nargs=argparse.REMAINDER,
help="spec of package extension to deactivate.")
def deactivate(parser, args):
@@ -65,7 +67,8 @@ def deactivate(parser, args):
if not args.force and not spec.package.activated:
tty.die("%s is not activated." % pkg.spec.short_spec)
tty.msg("Deactivating %s and all dependencies." % pkg.spec.short_spec)
tty.msg("Deactivating %s and all dependencies." %
pkg.spec.short_spec)
topo_order = topological_sort(spec)
index = spec.index()
@@ -79,7 +82,9 @@ def deactivate(parser, args):
epkg.do_deactivate(force=args.force)
else:
tty.die("spack deactivate --all requires an extendable package or an extension.")
tty.die(
"spack deactivate --all requires an extendable package "
"or an extension.")
else:
if not pkg.is_extension:

View File

@@ -31,9 +31,11 @@
description = "Show installed packages that depend on another."
def setup_parser(subparser):
subparser.add_argument(
'spec', nargs=argparse.REMAINDER, help="specs to list dependencies of.")
'spec', nargs=argparse.REMAINDER,
help="specs to list dependencies of.")
def dependents(parser, args):
@@ -42,5 +44,6 @@ def dependents(parser, args):
tty.die("spack dependents takes only one spec.")
fmt = '$_$@$%@$+$=$#'
deps = [d.format(fmt, color=True) for d in specs[0].package.installed_dependents]
deps = [d.format(fmt, color=True)
for d in specs[0].package.installed_dependents]
tty.msg("Dependents of %s" % specs[0].format(fmt, color=True), *deps)

View File

@@ -35,6 +35,7 @@
description = "Do-It-Yourself: build from an existing source directory."
def setup_parser(subparser):
subparser.add_argument(
'-i', '--ignore-dependencies', action='store_true', dest='ignore_deps',
@@ -50,7 +51,7 @@ def setup_parser(subparser):
help="Do not display verbose build output while installing.")
subparser.add_argument(
'spec', nargs=argparse.REMAINDER,
help="specs to use for install. Must contain package AND verison.")
help="specs to use for install. Must contain package AND version.")
def diy(self, args):
@@ -76,14 +77,17 @@ def diy(self, args):
return
if not spec.versions.concrete:
tty.die("spack diy spec must have a single, concrete version. Did you forget a package version number?")
tty.die(
"spack diy spec must have a single, concrete version. "
"Did you forget a package version number?")
spec.concretize()
package = spack.repo.get(spec)
if package.installed:
tty.error("Already installed in %s" % package.prefix)
tty.msg("Uninstall or try adding a version suffix for this DIY build.")
tty.msg("Uninstall or try adding a version suffix for this "
"DIY build.")
sys.exit(1)
# Forces the build to run out of the current directory.

View File

@@ -25,6 +25,7 @@
description = "Run pydoc from within spack."
def setup_parser(subparser):
subparser.add_argument('entity', help="Run pydoc help on entity")

View File

@@ -68,7 +68,7 @@ def edit_package(name, repo_path, namespace, force=False):
if os.path.exists(path):
if not os.path.isfile(path):
tty.die("Something's wrong. '%s' is not a file!" % path)
if not os.access(path, os.R_OK|os.W_OK):
if not os.access(path, os.R_OK | os.W_OK):
tty.die("Insufficient permissions on '%s'!" % path)
elif not force:
tty.die("No package '%s'. Use spack create, or supply -f/--force "
@@ -93,19 +93,23 @@ def setup_parser(subparser):
# Various filetypes you can edit directly from the cmd line.
excl_args.add_argument(
'-c', '--command', dest='path', action='store_const',
const=spack.cmd.command_path, help="Edit the command with the supplied name.")
const=spack.cmd.command_path,
help="Edit the command with the supplied name.")
excl_args.add_argument(
'-t', '--test', dest='path', action='store_const',
const=spack.test_path, help="Edit the test with the supplied name.")
excl_args.add_argument(
'-m', '--module', dest='path', action='store_const',
const=spack.module_path, help="Edit the main spack module with the supplied name.")
const=spack.module_path,
help="Edit the main spack module with the supplied name.")
# Options for editing packages
excl_args.add_argument(
'-r', '--repo', default=None, help="Path to repo to edit package in.")
'-r', '--repo', default=None,
help="Path to repo to edit package in.")
excl_args.add_argument(
'-N', '--namespace', default=None, help="Namespace of package to edit.")
'-N', '--namespace', default=None,
help="Namespace of package to edit.")
subparser.add_argument(
'name', nargs='?', default=None, help="name of package to edit")

View File

@@ -28,11 +28,13 @@
import spack.cmd
import spack.build_environment as build_env
description = "Run a command with the environment for a particular spec's install."
description = "Run a command with the install environment for a spec."
def setup_parser(subparser):
subparser.add_argument(
'spec', nargs=argparse.REMAINDER, help="specs of package environment to emulate.")
'spec', nargs=argparse.REMAINDER,
help="specs of package environment to emulate.")
def env(parser, args):
@@ -47,7 +49,7 @@ def env(parser, args):
if sep in args.spec:
s = args.spec.index(sep)
spec = args.spec[:s]
cmd = args.spec[s+1:]
cmd = args.spec[s + 1:]
else:
spec = args.spec[0]
cmd = args.spec[1:]

View File

@@ -22,7 +22,6 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import sys
import argparse
import llnl.util.tty as tty
@@ -34,6 +33,7 @@
description = "List extensions for package."
def setup_parser(subparser):
format_group = subparser.add_mutually_exclusive_group()
format_group.add_argument(
@@ -47,7 +47,8 @@ def setup_parser(subparser):
help='Show full dependency DAG of extensions')
subparser.add_argument(
'spec', nargs=argparse.REMAINDER, help='Spec of package to list extensions for')
'spec', nargs=argparse.REMAINDER,
help='Spec of package to list extensions for')
def extensions(parser, args):
@@ -85,7 +86,8 @@ def extensions(parser, args):
#
# List specs of installed extensions.
#
installed = [s.spec for s in spack.installed_db.installed_extensions_for(spec)]
installed = [
s.spec for s in spack.installed_db.installed_extensions_for(spec)]
print
if not installed:
tty.msg("None installed.")
@@ -102,4 +104,5 @@ def extensions(parser, args):
tty.msg("None activated.")
return
tty.msg("%d currently activated:" % len(activated))
spack.cmd.find.display_specs(activated.values(), mode=args.mode, long=args.long)
spack.cmd.find.display_specs(
activated.values(), mode=args.mode, long=args.long)

View File

@@ -29,16 +29,21 @@
description = "Fetch archives for packages"
def setup_parser(subparser):
subparser.add_argument(
'-n', '--no-checksum', action='store_true', dest='no_checksum',
help="Do not check packages against checksum")
subparser.add_argument(
'-m', '--missing', action='store_true', help="Also fetch all missing dependencies")
'-m', '--missing', action='store_true',
help="Also fetch all missing dependencies")
subparser.add_argument(
'-D', '--dependencies', action='store_true', help="Also fetch all dependencies")
'-D', '--dependencies', action='store_true',
help="Also fetch all dependencies")
subparser.add_argument(
'packages', nargs=argparse.REMAINDER, help="specs of packages to fetch")
'packages', nargs=argparse.REMAINDER,
help="specs of packages to fetch")
def fetch(parser, args):
if not args.packages:
@@ -50,8 +55,7 @@ def fetch(parser, args):
specs = spack.cmd.parse_specs(args.packages, concretize=True)
for spec in specs:
if args.missing or args.dependencies:
to_fetch = set()
for s in spec.traverse():
for s in spec.traverse(deptype_query=spack.alldeps):
package = spack.repo.get(s)
if args.missing and package.installed:
continue

View File

@@ -31,6 +31,7 @@
from llnl.util.lang import *
from llnl.util.tty.colify import *
from llnl.util.tty.color import *
from spack.cmd import display_specs
description = "Find installed spack packages"
@@ -85,6 +86,11 @@ def setup_parser(subparser):
action='store_true',
dest='missing',
help='Show missing dependencies as well as installed specs.')
subparser.add_argument(
'-v', '--variants',
action='store_true',
dest='variants',
help='Show variants in output (can be long)')
subparser.add_argument('-M', '--only-missing',
action='store_true',
dest='only_missing',
@@ -98,88 +104,6 @@ def setup_parser(subparser):
help='optional specs to filter results')
def gray_hash(spec, length):
return colorize('@K{%s}' % spec.dag_hash(length))
def display_specs(specs, **kwargs):
mode = kwargs.get('mode', 'short')
hashes = kwargs.get('long', False)
namespace = kwargs.get('namespace', False)
hlen = 7
if kwargs.get('very_long', False):
hashes = True
hlen = None
nfmt = '.' if namespace else '_'
format_string = '$%s$@$+' % nfmt
flags = kwargs.get('show_flags', False)
if flags:
format_string = '$%s$@$%%+$+' % nfmt
# Make a dict with specs keyed by architecture and compiler.
index = index_by(specs, ('architecture', 'compiler'))
# Traverse the index and print out each package
for i, (architecture, compiler) in enumerate(sorted(index)):
if i > 0:
print
header = "%s{%s} / %s{%s}" % (spack.spec.architecture_color,
architecture, spack.spec.compiler_color,
compiler)
tty.hline(colorize(header), char='-')
specs = index[(architecture, compiler)]
specs.sort()
abbreviated = [s.format(format_string, color=True) for s in specs]
if mode == 'paths':
# Print one spec per line along with prefix path
width = max(len(s) for s in abbreviated)
width += 2
format = " %%-%ds%%s" % width
for abbrv, spec in zip(abbreviated, specs):
if hashes:
print(gray_hash(spec, hlen), )
print(format % (abbrv, spec.prefix))
elif mode == 'deps':
for spec in specs:
print(spec.tree(
format=format_string,
color=True,
indent=4,
prefix=(lambda s: gray_hash(s, hlen)) if hashes else None))
elif mode == 'short':
# Print columns of output if not printing flags
if not flags:
def fmt(s):
string = ""
if hashes:
string += gray_hash(s, hlen) + ' '
string += s.format('$-%s$@$+' % nfmt, color=True)
return string
colify(fmt(s) for s in specs)
# Print one entry per line if including flags
else:
for spec in specs:
# Print the hash if necessary
hsh = gray_hash(spec, hlen) + ' ' if hashes else ''
print(hsh + spec.format(format_string, color=True) + '\n')
else:
raise ValueError(
"Invalid mode for display_specs: %s. Must be one of (paths,"
"deps, short)." % mode) # NOQA: ignore=E501
def query_arguments(args):
# Check arguments
if args.explicit and args.implicit:
@@ -236,4 +160,6 @@ def find(parser, args):
mode=args.mode,
long=args.long,
very_long=args.very_long,
show_flags=args.show_flags)
show_flags=args.show_flags,
namespace=args.namespace,
variants=args.variants)

View File

@@ -30,6 +30,7 @@
description = "Generate graphs of package dependency relationships."
def setup_parser(subparser):
setup_parser.parser = subparser
@@ -42,10 +43,12 @@ def setup_parser(subparser):
help="Generate graph in dot format and print to stdout.")
subparser.add_argument(
'--concretize', action='store_true', help="Concretize specs before graphing.")
'--concretize', action='store_true',
help="Concretize specs before graphing.")
subparser.add_argument(
'specs', nargs=argparse.REMAINDER, help="specs of packages to graph.")
'specs', nargs=argparse.REMAINDER,
help="specs of packages to graph.")
def graph(parser, args):
@@ -56,11 +59,11 @@ def graph(parser, args):
setup_parser.parser.print_help()
return 1
if args.dot: # Dot graph only if asked for.
if args.dot: # Dot graph only if asked for.
graph_dot(*specs)
elif specs: # ascii is default: user doesn't need to provide it explicitly
elif specs: # ascii is default: user doesn't need to provide it explicitly
graph_ascii(specs[0], debug=spack.debug)
for spec in specs[1:]:
print # extra line bt/w independent graphs
print # extra line bt/w independent graphs
graph_ascii(spec, debug=spack.debug)

View File

@@ -22,14 +22,14 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import sys
description = "Get help on spack and its commands"
def setup_parser(subparser):
subparser.add_argument('help_command', nargs='?', default=None,
help='command to get help on')
def help(parser, args):
if args.help_command:
parser.parse_args([args.help_command, '-h'])

View File

@@ -29,9 +29,11 @@
description = "Get detailed information on a particular package"
def padder(str_list, extra=0):
"""Return a function to pad elements of a list."""
length = max(len(str(s)) for s in str_list) + extra
def pad(string):
string = str(string)
padding = max(0, length - len(string))
@@ -40,7 +42,8 @@ def pad(string):
def setup_parser(subparser):
subparser.add_argument('name', metavar="PACKAGE", help="Name of package to get info for.")
subparser.add_argument(
'name', metavar="PACKAGE", help="Name of package to get info for.")
def print_text_info(pkg):
@@ -81,12 +84,14 @@ def print_text_info(pkg):
print " " + fmt % (name, default, desc)
print
print "Dependencies:"
if pkg.dependencies:
colify(pkg.dependencies, indent=4)
else:
print " None"
for deptype in ('build', 'link', 'run'):
print
print "%s Dependencies:" % deptype.capitalize()
deps = pkg.dependencies_of_type(deptype)
if deps:
colify(deps, indent=4)
else:
print " None"
print
print "Virtual packages: "

View File

@@ -31,6 +31,7 @@
description = "Build and install packages"
def setup_parser(subparser):
subparser.add_argument(
'-i', '--ignore-dependencies', action='store_true', dest='ignore_deps',
@@ -52,9 +53,16 @@ def setup_parser(subparser):
help="Display verbose build output while installing.")
subparser.add_argument(
'--fake', action='store_true', dest='fake',
help="Fake install. Just remove the prefix and touch a fake file in it.")
help="Fake install. Just remove prefix and create a fake file.")
subparser.add_argument(
'packages', nargs=argparse.REMAINDER, help="specs of packages to install")
'--dirty', action='store_true', dest='dirty',
help="Install a package *without* cleaning the environment.")
subparser.add_argument(
'packages', nargs=argparse.REMAINDER,
help="specs of packages to install")
subparser.add_argument(
'--run-tests', action='store_true', dest='run_tests',
help="Run tests during installation of a package.")
def install(parser, args):
@@ -77,6 +85,8 @@ def install(parser, args):
keep_stage=args.keep_stage,
ignore_deps=args.ignore_deps,
make_jobs=args.jobs,
run_tests=args.run_tests,
verbose=args.verbose,
fake=args.fake,
dirty=args.dirty,
explicit=True)

View File

@@ -29,36 +29,62 @@
import spack
import fnmatch
import re
description = "List available spack packages"
description ="List available spack packages"
def setup_parser(subparser):
subparser.add_argument(
'filter', nargs=argparse.REMAINDER,
help='Optional glob patterns to filter results.')
subparser.add_argument(
'-i', '--insensitive', action='store_true', default=False,
help='Filtering will be case insensitive.')
'-s', '--sensitive', action='store_true', default=False,
help='Use case-sensitive filtering. Default is case sensitive, '
'unless the query contains a capital letter.')
subparser.add_argument(
'-d', '--search-description', action='store_true', default=False,
help='Filtering will also search the description for a match.')
def list(parser, args):
# Start with all package names.
pkgs = spack.repo.all_package_names()
pkgs = set(spack.repo.all_package_names())
# filter if a filter arg was provided
if args.filter:
def match(p, f):
if args.insensitive:
p = p.lower()
f = f.lower()
return fnmatch.fnmatchcase(p, f)
pkgs = [p for p in pkgs if any(match(p, f) for f in args.filter)]
res = []
for f in args.filter:
if '*' not in f and '?' not in f:
r = fnmatch.translate('*' + f + '*')
else:
r = fnmatch.translate(f)
re_flags = re.I
if any(l.isupper for l in f) or args.sensitive:
re_flags = 0
rc = re.compile(r, flags=re_flags)
res.append(rc)
if args.search_description:
def match(p, f):
if f.match(p):
return True
pkg = spack.repo.get(p)
if pkg.__doc__:
return f.match(pkg.__doc__)
return False
else:
def match(p, f):
return f.match(p)
pkgs = [p for p in pkgs if any(match(p, f) for f in res)]
# sort before displaying.
sorted_packages = sorted(pkgs, key=lambda s:s.lower())
sorted_packages = sorted(pkgs, key=lambda s: s.lower())
# Print all the package names in columns
indent=0
indent = 0
if sys.stdout.isatty():
tty.msg("%d packages." % len(sorted_packages))
colify(sorted_packages, indent=indent)

View File

@@ -25,13 +25,16 @@
import argparse
import spack.modules
description ="Add package to environment using modules."
description = "Add package to environment using modules."
def setup_parser(subparser):
"""Parser is only constructed so that this prints a nice help
message with -h. """
subparser.add_argument(
'spec', nargs=argparse.REMAINDER, help='Spec of package to load with modules.')
'spec', nargs=argparse.REMAINDER,
help="Spec of package to load with modules. "
"(If -, read specs from STDIN)")
def load(parser, args):

View File

@@ -22,8 +22,6 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import os
import sys
import argparse
import llnl.util.tty as tty
@@ -32,16 +30,19 @@
import spack
import spack.cmd
description="Print out locations of various directories used by Spack"
description = "Print out locations of various directories used by Spack"
def setup_parser(subparser):
global directories
directories = subparser.add_mutually_exclusive_group()
directories.add_argument(
'-m', '--module-dir', action='store_true', help="Spack python module directory.")
'-m', '--module-dir', action='store_true',
help="Spack python module directory.")
directories.add_argument(
'-r', '--spack-root', action='store_true', help="Spack installation root.")
'-r', '--spack-root', action='store_true',
help="Spack installation root.")
directories.add_argument(
'-i', '--install-dir', action='store_true',
@@ -53,15 +54,19 @@ def setup_parser(subparser):
'-P', '--packages', action='store_true',
help="Top-level packages directory for Spack.")
directories.add_argument(
'-s', '--stage-dir', action='store_true', help="Stage directory for a spec.")
'-s', '--stage-dir', action='store_true',
help="Stage directory for a spec.")
directories.add_argument(
'-S', '--stages', action='store_true', help="Top level Stage directory.")
'-S', '--stages', action='store_true',
help="Top level Stage directory.")
directories.add_argument(
'-b', '--build-dir', action='store_true',
help="Checked out or expanded source directory for a spec (requires it to be staged first).")
help="Checked out or expanded source directory for a spec "
"(requires it to be staged first).")
subparser.add_argument(
'spec', nargs=argparse.REMAINDER, help="spec of package to fetch directory for.")
'spec', nargs=argparse.REMAINDER,
help="spec of package to fetch directory for.")
def location(parser, args):
@@ -104,9 +109,9 @@ def location(parser, args):
if args.stage_dir:
print pkg.stage.path
else: # args.build_dir is the default.
else: # args.build_dir is the default.
if not pkg.stage.source_path:
tty.die("Build directory does not exist yet. Run this to create it:",
tty.die("Build directory does not exist yet. "
"Run this to create it:",
"spack stage " + " ".join(args.spec))
print pkg.stage.source_path

View File

@@ -36,7 +36,7 @@
def setup_parser(subparser):
setup_parser.parser = subparser
subparser.add_argument('files', nargs=argparse.REMAINDER,
help="Files to checksum.")
help="Files/urls to checksum.")
def compute_md5_checksum(url):
@@ -67,6 +67,7 @@ def md5(parser, args):
tty.warn("%s" % e)
# Dump the MD5s at last without interleaving them with downloads
tty.msg("%d MD5 checksums:" % len(results))
checksum = 'checksum' if len(results) == 1 else 'checksums'
tty.msg("%d MD5 %s:" % (len(results), checksum))
for checksum, url in results:
print "%s %s" % (checksum, url)

View File

@@ -23,7 +23,6 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import os
import sys
from datetime import datetime
import argparse
@@ -40,6 +39,7 @@
description = "Manage mirrors."
def setup_parser(subparser):
subparser.add_argument(
'-n', '--no-checksum', action='store_true', dest='no_checksum',
@@ -61,8 +61,9 @@ def setup_parser(subparser):
'-D', '--dependencies', action='store_true',
help="Also fetch all dependencies")
create_parser.add_argument(
'-o', '--one-version-per-spec', action='store_const', const=1, default=0,
help="Only fetch one 'preferred' version per spec, not all known versions.")
'-o', '--one-version-per-spec', action='store_const',
const=1, default=0,
help="Only fetch one 'preferred' version per spec, not all known.")
scopes = spack.config.config_scopes
@@ -70,7 +71,7 @@ def setup_parser(subparser):
add_parser = sp.add_parser('add', help=mirror_add.__doc__)
add_parser.add_argument('name', help="Mnemonic name for mirror.")
add_parser.add_argument(
'url', help="URL of mirror directory created by 'spack mirror create'.")
'url', help="URL of mirror directory from 'spack mirror create'.")
add_parser.add_argument(
'--scope', choices=scopes, default=spack.cmd.default_modify_scope,
help="Configuration scope to modify.")
@@ -107,7 +108,7 @@ def mirror_add(args):
tty.die("Mirror with url %s already exists." % url)
# should only be one item per mirror dict.
items = [(n,u) for n,u in mirrors.items()]
items = [(n, u) for n, u in mirrors.items()]
items.insert(0, (args.name, url))
mirrors = syaml_dict(items)
spack.config.update_config('mirrors', mirrors, scope=args.scope)
@@ -121,7 +122,7 @@ def mirror_remove(args):
if not mirrors:
mirrors = syaml_dict()
if not name in mirrors:
if name not in mirrors:
tty.die("No mirror with name %s" % name)
old_value = mirrors.pop(name)
@@ -152,7 +153,7 @@ def _read_specs_from_file(filename):
s.package
specs.append(s)
except SpackError, e:
tty.die("Parse error in %s, line %d:" % (args.file, i+1),
tty.die("Parse error in %s, line %d:" % (args.file, i + 1),
">>> " + string, str(e))
return specs
@@ -179,7 +180,7 @@ def mirror_create(args):
new_specs = set()
for spec in specs:
spec.concretize()
for s in spec.traverse():
for s in spec.traverse(deptype_query=spack.alldeps):
new_specs.add(s)
specs = list(new_specs)
@@ -214,10 +215,10 @@ def mirror_create(args):
def mirror(parser, args):
action = { 'create' : mirror_create,
'add' : mirror_add,
'remove' : mirror_remove,
'rm' : mirror_remove,
'list' : mirror_list }
action = {'create': mirror_create,
'add': mirror_add,
'remove': mirror_remove,
'rm': mirror_remove,
'list': mirror_list}
action[args.mirror_command](args)

View File

@@ -22,83 +22,241 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from __future__ import print_function
import collections
import os
import shutil
import sys
import llnl.util.tty as tty
import spack.cmd
from llnl.util.filesystem import mkdirp
import spack.cmd.common.arguments as arguments
import llnl.util.filesystem as filesystem
from spack.modules import module_types
from spack.util.string import *
description = "Manipulate modules and dotkits."
description = "Manipulate module files"
# Dictionary that will be populated with the list of sub-commands
# Each sub-command must be callable and accept 3 arguments :
# - mtype : the type of the module file
# - specs : the list of specs to be processed
# - args : namespace containing the parsed command line arguments
callbacks = {}
def subcommand(subparser_name):
"""Registers a function in the callbacks dictionary"""
def decorator(callback):
callbacks[subparser_name] = callback
return callback
return decorator
def setup_parser(subparser):
sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='module_command')
sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='subparser_name')
sp.add_parser('refresh', help='Regenerate all module files.')
# spack module refresh
refresh_parser = sp.add_parser('refresh', help='Regenerate module files')
refresh_parser.add_argument(
'--delete-tree',
help='Delete the module file tree before refresh',
action='store_true'
)
arguments.add_common_arguments(
refresh_parser, ['constraint', 'module_type', 'yes_to_all']
)
find_parser = sp.add_parser('find', help='Find module files for packages.')
find_parser.add_argument('module_type',
help="Type of module to find file for. [" +
'|'.join(module_types) + "]")
find_parser.add_argument('spec',
nargs='+',
help='spec to find a module file for.')
# spack module find
find_parser = sp.add_parser('find', help='Find module files for packages')
arguments.add_common_arguments(find_parser, ['constraint', 'module_type'])
# spack module rm
rm_parser = sp.add_parser('rm', help='Remove module files')
arguments.add_common_arguments(
rm_parser, ['constraint', 'module_type', 'yes_to_all']
)
# spack module loads
loads_parser = sp.add_parser(
'loads',
help='Prompt the list of modules associated with a constraint'
)
loads_parser.add_argument(
'--input-only', action='store_false', dest='shell',
help='Generate input for module command (instead of a shell script)'
)
loads_parser.add_argument(
'-p', '--prefix', dest='prefix', default='',
help='Prepend to module names when issuing module load commands'
)
arguments.add_common_arguments(
loads_parser, ['constraint', 'module_type', 'recurse_dependencies']
)
def module_find(mtype, spec_array):
"""Look at all installed packages and see if the spec provided
matches any. If it does, check whether there is a module file
of type <mtype> there, and print out the name that the user
should type to use that package's module.
class MultipleMatches(Exception):
pass
class NoMatch(Exception):
pass
@subcommand('loads')
def loads(mtype, specs, args):
"""Prompt the list of modules associated with a list of specs"""
# Get a comprehensive list of specs
if args.recurse_dependencies:
specs_from_user_constraint = specs[:]
specs = []
# FIXME : during module file creation nodes seem to be visited
# FIXME : multiple times even if cover='nodes' is given. This
# FIXME : work around permits to get a unique list of spec anyhow.
# FIXME : (same problem as in spack/modules.py)
seen = set()
seen_add = seen.add
for spec in specs_from_user_constraint:
specs.extend(
[item for item in spec.traverse(order='post', cover='nodes')
if not (item in seen or seen_add(item))]
)
module_cls = module_types[mtype]
modules = [(spec, module_cls(spec).use_name)
for spec in specs if os.path.exists(module_cls(spec).file_name)]
module_commands = {
'tcl': 'module load ',
'dotkit': 'dotkit use '
}
d = {
'command': '' if not args.shell else module_commands[mtype],
'prefix': args.prefix
}
prompt_template = '{comment}{command}{prefix}{name}'
for spec, mod in modules:
d['comment'] = '' if not args.shell else '# {0}\n'.format(
spec.format())
d['name'] = mod
print(prompt_template.format(**d))
@subcommand('find')
def find(mtype, specs, args):
"""
Look at all installed packages and see if the spec provided
matches any. If it does, check whether there is a module file
of type <mtype> there, and print out the name that the user
should type to use that package's module.
"""
if mtype not in module_types:
tty.die("Invalid module type: '%s'. Options are %s" %
(mtype, comma_or(module_types)))
specs = spack.cmd.parse_specs(spec_array)
if len(specs) > 1:
tty.die("You can only pass one spec.")
spec = specs[0]
specs = spack.installed_db.query(spec)
if len(specs) == 0:
tty.die("No installed packages match spec %s" % spec)
raise NoMatch()
if len(specs) > 1:
tty.error("Multiple matches for spec %s. Choose one:" % spec)
for s in specs:
sys.stderr.write(s.tree(color=True))
sys.exit(1)
raise MultipleMatches()
mt = module_types[mtype]
mod = mt(specs[0])
spec = specs.pop()
mod = module_types[mtype](spec)
if not os.path.isfile(mod.file_name):
tty.die("No %s module is installed for %s" % (mtype, spec))
print(mod.use_name)
def module_refresh():
"""Regenerate all module files for installed packages known to
spack (some packages may no longer exist)."""
specs = [s for s in spack.installed_db.query(installed=True, known=True)]
@subcommand('rm')
def rm(mtype, specs, args):
"""Deletes module files associated with items in specs"""
module_cls = module_types[mtype]
specs_with_modules = [
spec for spec in specs if os.path.exists(module_cls(spec).file_name)]
modules = [module_cls(spec) for spec in specs_with_modules]
for name, cls in module_types.items():
tty.msg("Regenerating %s module files." % name)
if os.path.isdir(cls.path):
shutil.rmtree(cls.path, ignore_errors=False)
mkdirp(cls.path)
for spec in specs:
cls(spec).write()
if not modules:
tty.msg('No module file matches your query')
raise SystemExit(1)
# Ask for confirmation
if not args.yes_to_all:
tty.msg(
'You are about to remove {0} module files the following specs:\n'
.format(mtype))
spack.cmd.display_specs(specs_with_modules, long=True)
print('')
spack.cmd.ask_for_confirmation('Do you want to proceed ? ')
# Remove the module files
for s in modules:
s.remove()
@subcommand('refresh')
def refresh(mtype, specs, args):
"""Regenerate module files for item in specs"""
# Prompt a message to the user about what is going to change
if not specs:
tty.msg('No package matches your query')
return
if not args.yes_to_all:
tty.msg(
'You are about to regenerate {name} module files for:\n'
.format(name=mtype))
spack.cmd.display_specs(specs, long=True)
print('')
spack.cmd.ask_for_confirmation('Do you want to proceed ? ')
cls = module_types[mtype]
# Detect name clashes
writers = [cls(spec) for spec in specs]
file2writer = collections.defaultdict(list)
for item in writers:
file2writer[item.file_name].append(item)
if len(file2writer) != len(writers):
message = 'Name clashes detected in module files:\n'
for filename, writer_list in file2writer.items():
if len(writer_list) > 1:
message += '\nfile : {0}\n'.format(filename)
for x in writer_list:
message += 'spec : {0}\n'.format(x.spec.format(color=True))
tty.error(message)
tty.error('Operation aborted')
raise SystemExit(1)
# Proceed regenerating module files
tty.msg('Regenerating {name} module files'.format(name=mtype))
if os.path.isdir(cls.path) and args.delete_tree:
shutil.rmtree(cls.path, ignore_errors=False)
filesystem.mkdirp(cls.path)
for x in writers:
x.write(overwrite=True)
def module(parser, args):
if args.module_command == 'refresh':
module_refresh()
# Qualifiers to be used when querying the db for specs
constraint_qualifiers = {
'refresh': {
'installed': True,
'known': True
},
}
arguments.ConstraintAction.qualifiers.update(constraint_qualifiers)
elif args.module_command == 'find':
module_find(args.module_type, args.spec)
module_type = args.module_type
constraint = args.constraint
try:
callbacks[args.subparser_name](module_type, args.specs, args)
except MultipleMatches:
message = ('the constraint \'{query}\' matches multiple packages, '
'and this is not allowed in this context')
tty.error(message.format(query=constraint))
for s in args.specs:
sys.stderr.write(s.format(color=True) + '\n')
raise SystemExit(1)
except NoMatch:
message = ('the constraint \'{query}\' match no package, '
'and this is not allowed in this context')
tty.die(message.format(query=constraint))

View File

@@ -22,10 +22,8 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import re
import cgi
from StringIO import StringIO
import llnl.util.tty as tty
from llnl.util.tty.colify import *
import spack
@@ -34,21 +32,22 @@
def github_url(pkg):
"""Link to a package file on github."""
return ("https://github.com/llnl/spack/blob/master/var/spack/packages/%s/package.py" %
pkg.name)
url = "https://github.com/llnl/spack/blob/master/var/spack/packages/%s/package.py"
return (url % pkg.name)
def rst_table(elts):
"""Print out a RST-style table."""
cols = StringIO()
ncol, widths = colify(elts, output=cols, tty=True)
header = " ".join("=" * (w-1) for w in widths)
header = " ".join("=" * (w - 1) for w in widths)
return "%s\n%s%s" % (header, cols.getvalue(), header)
def print_rst_package_list():
"""Print out information on all packages in restructured text."""
pkgs = sorted(spack.repo.all_packages(), key=lambda s:s.name.lower())
pkgs = sorted(spack.repo.all_packages(), key=lambda s: s.name.lower())
pkg_names = [p.name for p in pkgs]
print ".. _package-list:"
print
@@ -62,7 +61,7 @@ def print_rst_package_list():
print "Spack currently has %d mainline packages:" % len(pkgs)
print
print rst_table("`%s`_" % p.name for p in pkgs)
print rst_table("`%s`_" % p for p in pkg_names)
print
print "-----"
@@ -79,12 +78,17 @@ def print_rst_package_list():
print
if pkg.versions:
print "Versions:"
print " " + ", ".join(str(v) for v in reversed(sorted(pkg.versions)))
if pkg.dependencies:
print "Dependencies"
print " " + ", ".join("`%s`_" % d if d != "mpi" else d
for d in pkg.dependencies)
print
print " " + ", ".join(str(v) for v in
reversed(sorted(pkg.versions)))
for deptype in spack.alldeps:
deps = pkg.dependencies_of_type(deptype)
if deps:
print "%s Dependencies" % deptype.capitalize()
print " " + ", ".join("%s_" % d if d in pkg_names
else d for d in deps)
print
print "Description:"
print pkg.format_doc(indent=2)
print

View File

@@ -29,14 +29,16 @@
import spack
description="Patch expanded archive sources in preparation for install"
description = "Patch expanded archive sources in preparation for install"
def setup_parser(subparser):
subparser.add_argument(
'-n', '--no-checksum', action='store_true', dest='no_checksum',
help="Do not check downloaded packages against checksum")
subparser.add_argument(
'packages', nargs=argparse.REMAINDER, help="specs of packages to stage")
'packages', nargs=argparse.REMAINDER,
help="specs of packages to stage")
def patch(parser, args):

View File

@@ -33,6 +33,7 @@
description = "Query packages associated with particular git revisions."
def setup_parser(subparser):
sp = subparser.add_subparsers(
metavar='SUBCOMMAND', dest='pkg_command')
@@ -46,22 +47,28 @@ def setup_parser(subparser):
help="Revision to list packages for.")
diff_parser = sp.add_parser('diff', help=pkg_diff.__doc__)
diff_parser.add_argument('rev1', nargs='?', default='HEAD^',
help="Revision to compare against.")
diff_parser.add_argument('rev2', nargs='?', default='HEAD',
help="Revision to compare to rev1 (default is HEAD).")
diff_parser.add_argument(
'rev1', nargs='?', default='HEAD^',
help="Revision to compare against.")
diff_parser.add_argument(
'rev2', nargs='?', default='HEAD',
help="Revision to compare to rev1 (default is HEAD).")
add_parser = sp.add_parser('added', help=pkg_added.__doc__)
add_parser.add_argument('rev1', nargs='?', default='HEAD^',
help="Revision to compare against.")
add_parser.add_argument('rev2', nargs='?', default='HEAD',
help="Revision to compare to rev1 (default is HEAD).")
add_parser.add_argument(
'rev1', nargs='?', default='HEAD^',
help="Revision to compare against.")
add_parser.add_argument(
'rev2', nargs='?', default='HEAD',
help="Revision to compare to rev1 (default is HEAD).")
rm_parser = sp.add_parser('removed', help=pkg_removed.__doc__)
rm_parser.add_argument('rev1', nargs='?', default='HEAD^',
help="Revision to compare against.")
rm_parser.add_argument('rev2', nargs='?', default='HEAD',
help="Revision to compare to rev1 (default is HEAD).")
rm_parser.add_argument(
'rev1', nargs='?', default='HEAD^',
help="Revision to compare against.")
rm_parser.add_argument(
'rev2', nargs='?', default='HEAD',
help="Revision to compare to rev1 (default is HEAD).")
def get_git():
@@ -88,7 +95,8 @@ def pkg_add(args):
for pkg_name in args.packages:
filename = spack.repo.filename_for_package_name(pkg_name)
if not os.path.isfile(filename):
tty.die("No such package: %s. Path does not exist:" % pkg_name, filename)
tty.die("No such package: %s. Path does not exist:" %
pkg_name, filename)
git = get_git()
git('-C', spack.packages_path, 'add', filename)
@@ -112,7 +120,8 @@ def pkg_diff(args):
if u1:
print "%s:" % args.rev1
colify(sorted(u1), indent=4)
if u1: print
if u1:
print
if u2:
print "%s:" % args.rev2
@@ -122,19 +131,21 @@ def pkg_diff(args):
def pkg_removed(args):
"""Show packages removed since a commit."""
u1, u2 = diff_packages(args.rev1, args.rev2)
if u1: colify(sorted(u1))
if u1:
colify(sorted(u1))
def pkg_added(args):
"""Show packages added since a commit."""
u1, u2 = diff_packages(args.rev1, args.rev2)
if u2: colify(sorted(u2))
if u2:
colify(sorted(u2))
def pkg(parser, args):
action = { 'add' : pkg_add,
'diff' : pkg_diff,
'list' : pkg_list,
'removed' : pkg_removed,
'added' : pkg_added }
action = {'add': pkg_add,
'diff': pkg_diff,
'list': pkg_list,
'removed': pkg_removed,
'added': pkg_added}
action[args.pkg_command](args)

View File

@@ -22,7 +22,6 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import os
import argparse
from llnl.util.tty.colify import colify
@@ -30,11 +29,13 @@
import spack
import spack.cmd
description ="List packages that provide a particular virtual package"
description = "List packages that provide a particular virtual package"
def setup_parser(subparser):
subparser.add_argument('vpkg_spec', metavar='VPACKAGE_SPEC', nargs=argparse.REMAINDER,
help='Find packages that provide this virtual package')
subparser.add_argument(
'vpkg_spec', metavar='VPACKAGE_SPEC', nargs=argparse.REMAINDER,
help='Find packages that provide this virtual package')
def providers(parser, args):

View File

@@ -22,9 +22,37 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import spack
import spack.stage as stage
description = "Remove all temporary build files and downloaded archives"
description = "Remove temporary build files and/or downloaded archives"
def setup_parser(subparser):
subparser.add_argument(
'-s', '--stage', action='store_true', default=True,
help="Remove all temporary build stages (default).")
subparser.add_argument(
'-d', '--downloads', action='store_true',
help="Remove cached downloads.")
subparser.add_argument(
'-u', '--user-cache', action='store_true',
help="Remove caches in user home directory. Includes virtual indices.")
subparser.add_argument(
'-a', '--all', action='store_true',
help="Remove all of the above.")
def purge(parser, args):
stage.purge()
# Special case: no flags.
if not any((args.stage, args.downloads, args.user_cache, args.all)):
stage.purge()
return
# handle other flags with fall through.
if args.stage or args.all:
stage.purge()
if args.downloads or args.all:
spack.fetch_cache.destroy()
if args.user_cache or args.all:
spack.user_cache.destroy()

View File

@@ -30,18 +30,22 @@
import spack
def setup_parser(subparser):
subparser.add_argument(
'-c', dest='python_command', help='Command to execute.')
subparser.add_argument(
'python_args', nargs=argparse.REMAINDER, help="File to run plus arguments.")
'python_args', nargs=argparse.REMAINDER,
help="File to run plus arguments.")
description = "Launch an interpreter as spack would launch a command"
def python(parser, args):
# Fake a main python shell by setting __name__ to __main__.
console = code.InteractiveConsole({'__name__' : '__main__',
'spack' : spack})
console = code.InteractiveConsole({'__name__': '__main__',
'spack': spack})
if "PYTHONSTARTUP" in os.environ:
startup_file = os.environ["PYTHONSTARTUP"]

View File

@@ -22,10 +22,10 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import argparse
import spack
description = "Rebuild Spack's package database."
def reindex(parser, args):
spack.installed_db.reindex(spack.install_layout)

View File

@@ -23,20 +23,16 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import os
import re
import shutil
from external import argparse
import llnl.util.tty as tty
from llnl.util.filesystem import join_path, mkdirp
import spack.spec
import spack.config
from spack.util.environment import get_path
from spack.repository import *
description = "Manage package source repositories."
def setup_parser(subparser):
sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='repo_command')
scopes = spack.config.config_scopes
@@ -57,13 +53,15 @@ def setup_parser(subparser):
# Add
add_parser = sp.add_parser('add', help=repo_add.__doc__)
add_parser.add_argument('path', help="Path to a Spack package repository directory.")
add_parser.add_argument(
'path', help="Path to a Spack package repository directory.")
add_parser.add_argument(
'--scope', choices=scopes, default=spack.cmd.default_modify_scope,
help="Configuration scope to modify.")
# Remove
remove_parser = sp.add_parser('remove', help=repo_remove.__doc__, aliases=['rm'])
remove_parser = sp.add_parser(
'remove', help=repo_remove.__doc__, aliases=['rm'])
remove_parser.add_argument(
'path_or_namespace',
help="Path or namespace of a Spack package repository.")
@@ -100,7 +98,8 @@ def repo_add(args):
# If that succeeds, finally add it to the configuration.
repos = spack.config.get_config('repos', args.scope)
if not repos: repos = []
if not repos:
repos = []
if repo.root in repos or path in repos:
tty.die("Repository is already registered with Spack: %s" % path)
@@ -135,7 +134,7 @@ def repo_remove(args):
tty.msg("Removed repository %s with namespace '%s'."
% (repo.root, repo.namespace))
return
except RepoError as e:
except RepoError:
continue
tty.die("No repository with path or namespace: %s"
@@ -149,7 +148,7 @@ def repo_list(args):
for r in roots:
try:
repos.append(Repo(r))
except RepoError as e:
except RepoError:
continue
msg = "%d package repositor" % len(repos)
@@ -166,9 +165,9 @@ def repo_list(args):
def repo(parser, args):
action = { 'create' : repo_create,
'list' : repo_list,
'add' : repo_add,
'remove' : repo_remove,
'rm' : repo_remove}
action = {'create': repo_create,
'list': repo_list,
'add': repo_add,
'remove': repo_remove,
'rm': repo_remove}
action[args.repo_command](args)

View File

@@ -31,6 +31,7 @@
description = "Revert checked out package source code."
def setup_parser(subparser):
subparser.add_argument('packages', nargs=argparse.REMAINDER,
help="specs of packages to restage")

View File

@@ -0,0 +1,94 @@
##############################################################################
# Copyright (c) 2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Written by Elizabeth Fischer
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License (as published by
# the Free Software Foundation) version 2.1 dated February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import sys
import os
import argparse
import llnl.util.tty as tty
import spack
import spack.cmd
from spack.cmd.edit import edit_package
from spack.stage import DIYStage
description = "Create a configuration script and module, but don't build."
def setup_parser(subparser):
subparser.add_argument(
'-i', '--ignore-dependencies', action='store_true', dest='ignore_deps',
help="Do not try to install dependencies of requested packages.")
subparser.add_argument(
'-v', '--verbose', action='store_true', dest='verbose',
help="Display verbose build output while installing.")
subparser.add_argument(
'spec', nargs=argparse.REMAINDER,
help="specs to use for install. Must contain package AND version.")
def setup(self, args):
if not args.spec:
tty.die("spack setup requires a package spec argument.")
specs = spack.cmd.parse_specs(args.spec)
if len(specs) > 1:
tty.die("spack setup only takes one spec.")
# Take a write lock before checking for existence.
with spack.installed_db.write_transaction():
spec = specs[0]
if not spack.repo.exists(spec.name):
tty.warn("No such package: %s" % spec.name)
create = tty.get_yes_or_no("Create this package?", default=False)
if not create:
tty.msg("Exiting without creating.")
sys.exit(1)
else:
tty.msg("Running 'spack edit -f %s'" % spec.name)
edit_package(spec.name, spack.repo.first_repo(), None, True)
return
if not spec.versions.concrete:
tty.die(
"spack setup spec must have a single, concrete version. "
"Did you forget a package version number?")
spec.concretize()
package = spack.repo.get(spec)
# It's OK if the package is already installed.
# Forces the build to run out of the current directory.
package.stage = DIYStage(os.getcwd())
# TODO: make this an argument, not a global.
spack.do_checksum = False
package.do_install(
keep_prefix=True, # Don't remove install directory
ignore_deps=args.ignore_deps,
verbose=args.verbose,
keep_stage=True, # don't remove source dir for SETUP.
install_phases=set(['setup', 'provenance']))

View File

@@ -25,23 +25,22 @@
import argparse
import spack.cmd
import llnl.util.tty as tty
import spack
import spack.url as url
description = "print out abstract and concrete versions of a spec."
def setup_parser(subparser):
subparser.add_argument('-i', '--ids', action='store_true',
help="show numerical ids for dependencies.")
subparser.add_argument('specs', nargs=argparse.REMAINDER, help="specs of packages")
subparser.add_argument(
'specs', nargs=argparse.REMAINDER, help="specs of packages")
def spec(parser, args):
kwargs = { 'ids' : args.ids,
'indent' : 2,
'color' : True }
kwargs = {'ids': args.ids,
'indent': 2,
'color': True}
for spec in spack.cmd.parse_specs(args.specs):
print "Input spec"

View File

@@ -22,14 +22,14 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import os
import argparse
import llnl.util.tty as tty
import spack
import spack.cmd
description="Expand downloaded archive in preparation for install"
description = "Expand downloaded archive in preparation for install"
def setup_parser(subparser):
subparser.add_argument(

View File

@@ -36,25 +36,25 @@
from spack.build_environment import InstallError
from spack.fetch_strategy import FetchError
description = "Run package installation as a unit test, output formatted results."
description = "Run package install as a unit test, output formatted results."
def setup_parser(subparser):
subparser.add_argument('-j',
'--jobs',
action='store',
type=int,
help="Explicitly set number of make jobs. Default is #cpus.")
subparser.add_argument(
'-j', '--jobs', action='store', type=int,
help="Explicitly set number of make jobs. Default is #cpus.")
subparser.add_argument('-n',
'--no-checksum',
action='store_true',
dest='no_checksum',
help="Do not check packages against checksum")
subparser.add_argument(
'-n', '--no-checksum', action='store_true', dest='no_checksum',
help="Do not check packages against checksum")
subparser.add_argument('-o', '--output', action='store', help="test output goes in this file")
subparser.add_argument(
'-o', '--output', action='store',
help="test output goes in this file")
subparser.add_argument('package', nargs=argparse.REMAINDER, help="spec of package to install")
subparser.add_argument(
'package', nargs=argparse.REMAINDER,
help="spec of package to install")
class TestResult(object):
@@ -65,6 +65,7 @@ class TestResult(object):
class TestSuite(object):
def __init__(self, filename):
self.filename = filename
self.root = ET.Element('testsuite')
@@ -75,14 +76,17 @@ def __enter__(self):
def append(self, item):
if not isinstance(item, TestCase):
raise TypeError('only TestCase instances may be appended to a TestSuite instance')
raise TypeError(
'only TestCase instances may be appended to TestSuite')
self.tests.append(item) # Append the item to the list of tests
def __exit__(self, exc_type, exc_val, exc_tb):
# Prepare the header for the entire test suite
number_of_errors = sum(x.result_type == TestResult.ERRORED for x in self.tests)
number_of_errors = sum(
x.result_type == TestResult.ERRORED for x in self.tests)
self.root.set('errors', str(number_of_errors))
number_of_failures = sum(x.result_type == TestResult.FAILED for x in self.tests)
number_of_failures = sum(
x.result_type == TestResult.FAILED for x in self.tests)
self.root.set('failures', str(number_of_failures))
self.root.set('tests', str(len(self.tests)))
@@ -112,7 +116,8 @@ def __init__(self, classname, name, time=None):
self.element.set('time', str(time))
self.result_type = None
def set_result(self, result_type, message=None, error_type=None, text=None):
def set_result(self, result_type,
message=None, error_type=None, text=None):
self.result_type = result_type
result = TestCase.results[self.result_type]
if result is not None and result is not TestResult.PASSED:
@@ -133,7 +138,12 @@ def fetch_log(path):
def failed_dependencies(spec):
return set(item for item in spec.dependencies.itervalues() if not spack.repo.get(item).installed)
def get_deps(deptype):
return set(item for item in spec.dependencies(deptype)
if not spack.repo.get(item).installed)
link_deps = get_deps('link')
run_deps = get_deps('run')
return link_deps.union(run_deps)
def get_top_spec_or_die(args):
@@ -150,13 +160,19 @@ def install_single_spec(spec, number_of_jobs):
# If it is already installed, skip the test
if spack.repo.get(spec).installed:
testcase = TestCase(package.name, package.spec.short_spec, time=0.0)
testcase.set_result(TestResult.SKIPPED, message='Skipped [already installed]', error_type='already_installed')
testcase.set_result(
TestResult.SKIPPED,
message='Skipped [already installed]',
error_type='already_installed')
return testcase
# If it relies on dependencies that did not install, skip
if failed_dependencies(spec):
testcase = TestCase(package.name, package.spec.short_spec, time=0.0)
testcase.set_result(TestResult.SKIPPED, message='Skipped [failed dependencies]', error_type='dep_failed')
testcase.set_result(
TestResult.SKIPPED,
message='Skipped [failed dependencies]',
error_type='dep_failed')
return testcase
# Otherwise try to install the spec
@@ -172,26 +188,30 @@ def install_single_spec(spec, number_of_jobs):
testcase = TestCase(package.name, package.spec.short_spec, duration)
testcase.set_result(TestResult.PASSED)
except InstallError:
# An InstallError is considered a failure (the recipe didn't work correctly)
# An InstallError is considered a failure (the recipe didn't work
# correctly)
duration = time.time() - start_time
# Try to get the log
lines = fetch_log(package.build_log_path)
text = '\n'.join(lines)
testcase = TestCase(package.name, package.spec.short_spec, duration)
testcase.set_result(TestResult.FAILED, message='Installation failure', text=text)
testcase.set_result(TestResult.FAILED,
message='Installation failure', text=text)
except FetchError:
# A FetchError is considered an error (we didn't even start building)
duration = time.time() - start_time
testcase = TestCase(package.name, package.spec.short_spec, duration)
testcase.set_result(TestResult.ERRORED, message='Unable to fetch package')
testcase.set_result(TestResult.ERRORED,
message='Unable to fetch package')
return testcase
def get_filename(args, top_spec):
if not args.output:
fname = 'test-{x.name}-{x.version}-{hash}.xml'.format(x=top_spec, hash=top_spec.dag_hash())
fname = 'test-{x.name}-{x.version}-{hash}.xml'.format(
x=top_spec, hash=top_spec.dag_hash())
output_directory = join_path(os.getcwd(), 'test-output')
if not os.path.exists(output_directory):
os.mkdir(output_directory)

View File

@@ -23,33 +23,55 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import os
from pprint import pprint
from llnl.util.filesystem import join_path, mkdirp
from llnl.util.tty.colify import colify
from llnl.util.lang import list_modules
import spack
import spack.test
from spack.fetch_strategy import FetchError
description = "Run unit tests"
description ="Run unit tests"
def setup_parser(subparser):
subparser.add_argument(
'names', nargs='*', help="Names of tests to run.")
subparser.add_argument(
'-l', '--list', action='store_true', dest='list', help="Show available tests")
'-l', '--list', action='store_true', dest='list',
help="Show available tests")
subparser.add_argument(
'--createXmlOutput', action='store_true', dest='createXmlOutput',
'--createXmlOutput', action='store_true', dest='createXmlOutput',
help="Create JUnit XML from test results")
subparser.add_argument(
'--xmlOutputDir', dest='xmlOutputDir',
'--xmlOutputDir', dest='xmlOutputDir',
help="Nose creates XML files in this directory")
subparser.add_argument(
'-v', '--verbose', action='store_true', dest='verbose',
help="verbose output")
class MockCache(object):
def store(self, copyCmd, relativeDst):
pass
def fetcher(self, targetPath, digest):
return MockCacheFetcher()
class MockCacheFetcher(object):
def set_stage(self, stage):
pass
def fetch(self):
raise FetchError("Mock cache always fails for tests")
def __str__(self):
return "[mock fetcher]"
def test(parser, args):
if args.list:
print "Available tests:"
@@ -63,7 +85,8 @@ def test(parser, args):
outputDir = join_path(os.getcwd(), "test-output")
else:
outputDir = os.path.abspath(args.xmlOutputDir)
if not os.path.exists(outputDir):
mkdirp(outputDir)
spack.fetch_cache = MockCache()
spack.test.run(args.names, outputDir, args.verbose)

View File

@@ -30,7 +30,6 @@
import spack
import spack.cmd
import spack.repository
from spack.cmd.find import display_specs
description = "Remove an installed package"
@@ -39,51 +38,54 @@
b) use spack uninstall -a to uninstall ALL matching specs.
"""
def ask_for_confirmation(message):
while True:
tty.msg(message + '[y/n]')
choice = raw_input().lower()
if choice == 'y':
break
elif choice == 'n':
raise SystemExit('Operation aborted')
tty.warn('Please reply either "y" or "n"')
# Arguments for display_specs when we find ambiguity
display_args = {
'long': True,
'show_flags': True,
'variants': True
}
def setup_parser(subparser):
subparser.add_argument(
'-f', '--force', action='store_true', dest='force',
help="Remove regardless of whether other packages depend on this one.")
subparser.add_argument(
'-a', '--all', action='store_true', dest='all',
help="USE CAREFULLY. Remove ALL installed packages that match each " +
"supplied spec. i.e., if you say uninstall libelf, ALL versions of " +
"libelf are uninstalled. This is both useful and dangerous, like rm -r.")
help="USE CAREFULLY. Remove ALL installed packages that match each "
"supplied spec. i.e., if you say uninstall libelf, ALL versions "
"of libelf are uninstalled. This is both useful and dangerous, "
"like rm -r.")
subparser.add_argument(
'-d', '--dependents', action='store_true', dest='dependents',
help='Also uninstall any packages that depend on the ones given via command line.'
)
help='Also uninstall any packages that depend on the ones given '
'via command line.')
subparser.add_argument(
'-y', '--yes-to-all', action='store_true', dest='yes_to_all',
help='Assume "yes" is the answer to every confirmation asked to the user.'
help='Assume "yes" is the answer to every confirmation requested')
)
subparser.add_argument('packages', nargs=argparse.REMAINDER, help="specs of packages to uninstall")
subparser.add_argument(
'packages',
nargs=argparse.REMAINDER,
help="specs of packages to uninstall")
def concretize_specs(specs, allow_multiple_matches=False, force=False):
"""
Returns a list of specs matching the non necessarily concretized specs given from cli
"""Returns a list of specs matching the non necessarily
concretized specs given from cli
Args:
specs: list of specs to be matched against installed packages
allow_multiple_matches : boolean (if True multiple matches for each item in specs are admitted)
allow_multiple_matches : if True multiple matches are admitted
Return:
list of specs
"""
specs_from_cli = [] # List of specs that match expressions given via command line
# List of specs that match expressions given via command line
specs_from_cli = []
has_errors = False
for spec in specs:
matching = spack.installed_db.query(spec)
@@ -92,7 +94,7 @@ def concretize_specs(specs, allow_multiple_matches=False, force=False):
if not allow_multiple_matches and len(matching) > 1:
tty.error("%s matches multiple packages:" % spec)
print()
display_specs(matching, long=True, show_flags=True)
spack.cmd.display_specs(matching, **display_args)
print()
has_errors = True
@@ -109,8 +111,8 @@ def concretize_specs(specs, allow_multiple_matches=False, force=False):
def installed_dependents(specs):
"""
Returns a dictionary that maps a spec with a list of its installed dependents
"""Returns a dictionary that maps a spec with a list of its
installed dependents
Args:
specs: list of specs to be checked for dependents
@@ -140,7 +142,7 @@ def do_uninstall(specs, force):
try:
# should work if package is known to spack
packages.append(item.package)
except spack.repository.UnknownPackageError as e:
except spack.repository.UnknownPackageError:
# The package.py file has gone away -- but still
# want to uninstall.
spack.Package(item).do_uninstall(force=True)
@@ -162,17 +164,20 @@ def uninstall(parser, args):
with spack.installed_db.write_transaction():
specs = spack.cmd.parse_specs(args.packages)
# Gets the list of installed specs that match the ones give via cli
uninstall_list = concretize_specs(specs, args.all, args.force) # takes care of '-a' is given in the cli
dependent_list = installed_dependents(uninstall_list) # takes care of '-d'
# takes care of '-a' is given in the cli
uninstall_list = concretize_specs(specs, args.all, args.force)
dependent_list = installed_dependents(
uninstall_list) # takes care of '-d'
# Process dependent_list and update uninstall_list
has_error = False
if dependent_list and not args.dependents and not args.force:
for spec, lst in dependent_list.items():
tty.error("Will not uninstall %s" % spec.format("$_$@$%@$#", color=True))
tty.error("Will not uninstall %s" %
spec.format("$_$@$%@$#", color=True))
print('')
print("The following packages depend on it:")
display_specs(lst, long=True)
spack.cmd.display_specs(lst, **display_args)
print('')
has_error = True
elif args.dependents:
@@ -181,14 +186,15 @@ def uninstall(parser, args):
uninstall_list = list(set(uninstall_list))
if has_error:
tty.die('You can use spack uninstall --dependents to uninstall these dependencies as well')
tty.die('You can use spack uninstall --dependents '
'to uninstall these dependencies as well')
if not args.yes_to_all:
tty.msg("The following packages will be uninstalled : ")
print('')
display_specs(uninstall_list, long=True, show_flags=True)
spack.cmd.display_specs(uninstall_list, **display_args)
print('')
ask_for_confirmation('Do you want to proceed ? ')
spack.cmd.ask_for_confirmation('Do you want to proceed ? ')
# Uninstall everything on the list
do_uninstall(uninstall_list, args.force)

View File

@@ -25,13 +25,15 @@
import argparse
import spack.modules
description ="Remove package from environment using module."
description = "Remove package from environment using module."
def setup_parser(subparser):
"""Parser is only constructed so that this prints a nice help
message with -h. """
subparser.add_argument(
'spec', nargs=argparse.REMAINDER, help='Spec of package to unload with modules.')
'spec', nargs=argparse.REMAINDER,
help='Spec of package to unload with modules.')
def unload(parser, args):

View File

@@ -25,13 +25,15 @@
import argparse
import spack.modules
description ="Remove package from environment using dotkit."
description = "Remove package from environment using dotkit."
def setup_parser(subparser):
"""Parser is only constructed so that this prints a nice help
message with -h. """
subparser.add_argument(
'spec', nargs=argparse.REMAINDER, help='Spec of package to unuse with dotkit.')
'spec', nargs=argparse.REMAINDER,
help='Spec of package to unuse with dotkit.')
def unuse(parser, args):

View File

@@ -22,28 +22,28 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import sys
import llnl.util.tty as tty
import spack
import spack.url
from spack.util.web import find_versions_of_archive
description = "Show parsing of a URL, optionally spider web for other versions."
description = "Show parsing of a URL, optionally spider web for versions."
def setup_parser(subparser):
subparser.add_argument('url', help="url of a package archive")
subparser.add_argument(
'-s', '--spider', action='store_true', help="Spider the source page for versions.")
'-s', '--spider', action='store_true',
help="Spider the source page for versions.")
def print_name_and_version(url):
name, ns, nl, ntup, ver, vs, vl, vtup = spack.url.substitution_offsets(url)
underlines = [" "] * max(ns+nl, vs+vl)
for i in range(ns, ns+nl):
underlines = [" "] * max(ns + nl, vs + vl)
for i in range(ns, ns + nl):
underlines[i] = '-'
for i in range(vs, vs+vl):
for i in range(vs, vs + vl):
underlines[i] = '~'
print " %s" % url

View File

@@ -22,12 +22,12 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import sys
import spack
import spack.url
description = "Inspect urls used by packages in spack."
def setup_parser(subparser):
subparser.add_argument(
'-c', '--color', action='store_true',
@@ -53,6 +53,7 @@ def urls(parser, args):
for url in sorted(urls):
if args.color or args.extrapolation:
print spack.url.color_url(url, subs=args.extrapolation, errors=True)
print spack.url.color_url(
url, subs=args.extrapolation, errors=True)
else:
print url

View File

@@ -25,13 +25,15 @@
import argparse
import spack.modules
description ="Add package to environment using dotkit."
description = "Add package to environment using dotkit."
def setup_parser(subparser):
"""Parser is only constructed so that this prints a nice help
message with -h. """
subparser.add_argument(
'spec', nargs=argparse.REMAINDER, help='Spec of package to use with dotkit.')
'spec', nargs=argparse.REMAINDER,
help='Spec of package to use with dotkit.')
def use(parser, args):

View File

@@ -22,15 +22,16 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import os
from llnl.util.tty.colify import colify
import llnl.util.tty as tty
import spack
description ="List available versions of a package"
description = "List available versions of a package"
def setup_parser(subparser):
subparser.add_argument('package', metavar='PACKAGE', help='Package to list versions for')
subparser.add_argument('package', metavar='PACKAGE',
help='Package to list versions for')
def versions(parser, args):

View File

@@ -25,21 +25,20 @@
import os
import re
import itertools
from datetime import datetime
import llnl.util.tty as tty
from llnl.util.lang import memoized
from llnl.util.filesystem import join_path
import spack.error
import spack.spec
import spack.architecture
from spack.util.multiproc import parmap
from spack.util.executable import *
from spack.util.environment import get_path
from spack.version import Version
__all__ = ['Compiler', 'get_compiler_version']
def _verify_executables(*paths):
for path in paths:
if not os.path.isfile(path) and os.access(path, os.X_OK):
@@ -48,8 +47,9 @@ def _verify_executables(*paths):
_version_cache = {}
def get_compiler_version(compiler_path, version_arg, regex='(.*)'):
if not compiler_path in _version_cache:
if compiler_path not in _version_cache:
compiler = Executable(compiler_path)
output = compiler(version_arg, output=str, error=str)
@@ -107,19 +107,27 @@ def f77_rpath_arg(self):
@property
def fc_rpath_arg(self):
return '-Wl,-rpath,'
# Cray PrgEnv name that can be used to load this compiler
PrgEnv = None
# Name of module used to switch versions of this compiler
PrgEnv_compiler = None
def __init__(self, cspec, cc, cxx, f77, fc, **kwargs):
def __init__(self, cspec, operating_system,
paths, modules=[], alias=None, **kwargs):
def check(exe):
if exe is None:
return None
_verify_executables(exe)
return exe
self.cc = check(cc)
self.cxx = check(cxx)
self.f77 = check(f77)
self.fc = check(fc)
self.cc = check(paths[0])
self.cxx = check(paths[1])
if len(paths) > 2:
self.f77 = check(paths[2])
if len(paths) == 3:
self.fc = self.f77
else:
self.fc = check(paths[3])
# Unfortunately have to make sure these params are accepted
# in the same order they are returned by sorted(flags)
@@ -130,8 +138,10 @@ def check(exe):
if value is not None:
self.flags[flag] = value.split()
self.operating_system = operating_system
self.spec = cspec
self.modules = modules
self.alias = alias
@property
def version(self):
@@ -142,31 +152,30 @@ def version(self):
@property
def openmp_flag(self):
# If it is not overridden, assume it is not supported and warn the user
tty.die("The compiler you have chosen does not currently support OpenMP.",
"If you think it should, please edit the compiler subclass and",
"submit a pull request or issue.")
tty.die(
"The compiler you have chosen does not currently support OpenMP.",
"If you think it should, please edit the compiler subclass and",
"submit a pull request or issue.")
# This property should be overridden in the compiler subclass if
# C++11 is supported by that compiler
@property
def cxx11_flag(self):
# If it is not overridden, assume it is not supported and warn the user
tty.die("The compiler you have chosen does not currently support C++11.",
"If you think it should, please edit the compiler subclass and",
"submit a pull request or issue.")
tty.die(
"The compiler you have chosen does not currently support C++11.",
"If you think it should, please edit the compiler subclass and",
"submit a pull request or issue.")
# This property should be overridden in the compiler subclass if
# C++14 is supported by that compiler
@property
def cxx14_flag(self):
# If it is not overridden, assume it is not supported and warn the user
tty.die("The compiler you have chosen does not currently support C++14.",
"If you think it should, please edit the compiler subclass and",
"submit a pull request or issue.")
tty.die(
"The compiler you have chosen does not currently support C++14.",
"If you think it should, please edit the compiler subclass and",
"submit a pull request or issue.")
#
# Compiler classes have methods for querying the version of
@@ -175,7 +184,6 @@ def cxx14_flag(self):
# Compiler *instances* are just data objects, and can only be
# constructed from an actual set of executables.
#
@classmethod
def default_version(cls, cc):
"""Override just this to override all compiler version functions."""
@@ -242,91 +250,46 @@ def check(key):
version = detect_version(full_path)
return (version, prefix, suffix, full_path)
except ProcessError, e:
tty.debug("Couldn't get version for compiler %s" % full_path, e)
tty.debug(
"Couldn't get version for compiler %s" % full_path, e)
return None
except Exception, e:
# Catching "Exception" here is fine because it just
# means something went wrong running a candidate executable.
tty.debug("Error while executing candidate compiler %s" % full_path,
"%s: %s" %(e.__class__.__name__, e))
tty.debug("Error while executing candidate compiler %s"
% full_path,
"%s: %s" % (e.__class__.__name__, e))
return None
successful = [key for key in parmap(check, checks) if key is not None]
successful = [k for k in parmap(check, checks) if k is not None]
# The 'successful' list is ordered like the input paths.
# Reverse it here so that the dict creation (last insert wins)
# does not spoil the intented precedence.
successful.reverse()
return dict(((v, p, s), path) for v, p, s, path in successful)
@classmethod
def find(cls, *path):
"""Try to find this type of compiler in the user's
environment. For each set of compilers found, this returns
compiler objects with the cc, cxx, f77, fc paths and the
version filled in.
This will search for compilers with the names in cc_names,
cxx_names, etc. and it will group them if they have common
prefixes, suffixes, and versions. e.g., gcc-mp-4.7 would
be grouped with g++-mp-4.7 and gfortran-mp-4.7.
"""
dicts = parmap(
lambda t: cls._find_matches_in_path(*t),
[(cls.cc_names, cls.cc_version) + tuple(path),
(cls.cxx_names, cls.cxx_version) + tuple(path),
(cls.f77_names, cls.f77_version) + tuple(path),
(cls.fc_names, cls.fc_version) + tuple(path)])
all_keys = set()
for d in dicts:
all_keys.update(d)
compilers = {}
for k in all_keys:
ver, pre, suf = k
# Skip compilers with unknown version.
if ver == 'unknown':
continue
paths = tuple(pn[k] if k in pn else None for pn in dicts)
spec = spack.spec.CompilerSpec(cls.name, ver)
if ver in compilers:
prev = compilers[ver]
# prefer the one with more compilers.
prev_paths = [prev.cc, prev.cxx, prev.f77, prev.fc]
newcount = len([p for p in paths if p is not None])
prevcount = len([p for p in prev_paths if p is not None])
# Don't add if it's not an improvement over prev compiler.
if newcount <= prevcount:
continue
compilers[ver] = cls(spec, *paths)
return list(compilers.values())
def __repr__(self):
"""Return a string representation of the compiler toolchain."""
return self.__str__()
def __str__(self):
"""Return a string representation of the compiler toolchain."""
return "%s(%s)" % (
self.name, '\n '.join((str(s) for s in (self.cc, self.cxx, self.f77, self.fc))))
self.name, '\n '.join((str(s) for s in (
self.cc, self.cxx, self.f77, self.fc, self.modules,
str(self.operating_system)))))
class CompilerAccessError(spack.error.SpackError):
def __init__(self, path):
super(CompilerAccessError, self).__init__(
"'%s' is not a valid compiler." % path)
class InvalidCompilerError(spack.error.SpackError):
def __init__(self):
super(InvalidCompilerError, self).__init__(
"Compiler has no executables.")

View File

@@ -26,10 +26,9 @@
system and configuring Spack to use multiple compilers.
"""
import imp
import os
import platform
from llnl.util.lang import memoized, list_modules
from llnl.util.lang import list_modules
from llnl.util.filesystem import join_path
import spack
@@ -38,14 +37,12 @@
import spack.config
import spack.architecture
from spack.util.multiproc import parmap
from spack.compiler import Compiler
from spack.util.executable import which
from spack.util.naming import mod_to_class
from spack.util.environment import get_path
_imported_compilers_module = 'spack.compilers'
_required_instance_vars = ['cc', 'cxx', 'f77', 'fc']
_path_instance_vars = ['cc', 'cxx', 'f77', 'fc']
_other_instance_vars = ['modules', 'operating_system']
_cache_config_file = []
# TODO: customize order in config file
if platform.system() == 'Darwin':
@@ -64,107 +61,111 @@ def converter(cspec_like, *args, **kwargs):
def _to_dict(compiler):
"""Return a dict version of compiler suitable to insert in YAML."""
return {
str(compiler.spec) : dict(
(attr, getattr(compiler, attr, None))
for attr in _required_instance_vars)
}
d = {}
d['spec'] = str(compiler.spec)
d['paths'] = dict((attr, getattr(compiler, attr, None))
for attr in _path_instance_vars)
d['operating_system'] = str(compiler.operating_system)
d['modules'] = compiler.modules if compiler.modules else []
if compiler.alias:
d['alias'] = compiler.alias
return {'compiler': d}
def get_compiler_config(arch=None, scope=None):
def get_compiler_config(scope=None, init_config=True):
"""Return the compiler configuration for the specified architecture.
"""
# Check whether we're on a front-end (native) architecture.
my_arch = spack.architecture.sys_type()
if arch is None:
arch = my_arch
def init_compiler_config():
"""Compiler search used when Spack has no compilers."""
config[arch] = {}
compilers = find_compilers(*get_path('PATH'))
compilers = find_compilers()
compilers_dict = []
for compiler in compilers:
config[arch].update(_to_dict(compiler))
spack.config.update_config('compilers', config, scope=scope)
compilers_dict.append(_to_dict(compiler))
spack.config.update_config('compilers', compilers_dict, scope=scope)
config = spack.config.get_config('compilers', scope=scope)
# Update the configuration if there are currently no compilers
# configured. Avoid updating automatically if there ARE site
# compilers configured but no user ones.
if arch == my_arch and arch not in config:
if not config and init_config:
if scope is None:
# We know no compilers were configured in any scope.
init_compiler_config()
config = spack.config.get_config('compilers', scope=scope)
elif scope == 'user':
# Check the site config and update the user config if
# nothing is configured at the site level.
site_config = spack.config.get_config('compilers', scope='site')
if not site_config:
init_compiler_config()
return config[arch] if arch in config else {}
config = spack.config.get_config('compilers', scope=scope)
return config
elif config:
return config
else:
return [] # Return empty list which we will later append to.
def add_compilers_to_config(compilers, arch=None, scope=None):
def add_compilers_to_config(compilers, scope=None, init_config=True):
"""Add compilers to the config for the specified architecture.
Arguments:
- compilers: a list of Compiler objects.
- arch: arch to add compilers for.
- scope: configuration scope to modify.
"""
if arch is None:
arch = spack.architecture.sys_type()
compiler_config = get_compiler_config(arch, scope)
compiler_config = get_compiler_config(scope, init_config)
for compiler in compilers:
compiler_config[str(compiler.spec)] = dict(
(c, getattr(compiler, c, "None"))
for c in _required_instance_vars)
update = { arch : compiler_config }
spack.config.update_config('compilers', update, scope)
compiler_config.append(_to_dict(compiler))
global _cache_config_file
_cache_config_file = compiler_config
spack.config.update_config('compilers', compiler_config, scope)
@_auto_compiler_spec
def remove_compiler_from_config(compiler_spec, arch=None, scope=None):
def remove_compiler_from_config(compiler_spec, scope=None):
"""Remove compilers from the config, by spec.
Arguments:
- compiler_specs: a list of CompilerSpec objects.
- arch: arch to add compilers for.
- scope: configuration scope to modify.
"""
if arch is None:
arch = spack.architecture.sys_type()
# Need a better way for this
global _cache_config_file
compiler_config = get_compiler_config(arch, scope)
del compiler_config[str(compiler_spec)]
update = { arch : compiler_config }
compiler_config = get_compiler_config(scope)
config_length = len(compiler_config)
spack.config.update_config('compilers', update, scope)
filtered_compiler_config = [
comp for comp in compiler_config
if spack.spec.CompilerSpec(comp['compiler']['spec']) != compiler_spec]
# Update the cache for changes
_cache_config_file = filtered_compiler_config
if len(filtered_compiler_config) == config_length: # No items removed
CompilerSpecInsufficientlySpecificError(compiler_spec)
spack.config.update_config('compilers', filtered_compiler_config, scope)
def all_compilers_config(arch=None, scope=None):
def all_compilers_config(scope=None, init_config=True):
"""Return a set of specs for all the compiler versions currently
available to build with. These are instances of CompilerSpec.
"""
# Get compilers for this architecture.
arch_config = get_compiler_config(arch, scope)
# Merge 'all' compilers with arch-specific ones.
# Arch-specific compilers have higher precedence.
merged_config = get_compiler_config('all', scope=scope)
merged_config = spack.config._merge_yaml(merged_config, arch_config)
return merged_config
# Create a cache of the config file so we don't load all the time.
global _cache_config_file
if not _cache_config_file:
_cache_config_file = get_compiler_config(scope, init_config)
return _cache_config_file
else:
return _cache_config_file
def all_compilers(arch=None, scope=None):
def all_compilers(scope=None, init_config=True):
# Return compiler specs from the merged config.
return [spack.spec.CompilerSpec(s)
for s in all_compilers_config(arch, scope)]
return [spack.spec.CompilerSpec(s['compiler']['spec'])
for s in all_compilers_config(scope, init_config)]
def default_compiler():
@@ -179,36 +180,18 @@ def default_compiler():
return sorted(versions)[-1]
def find_compilers(*path):
def find_compilers(*paths):
"""Return a list of compilers found in the suppied paths.
This invokes the find() method for each Compiler class,
and appends the compilers detected to a list.
This invokes the find_compilers() method for each operating
system associated with the host platform, and appends
the compilers detected to a list.
"""
# Make sure path elements exist, and include /bin directories
# under prefixes.
filtered_path = []
for p in path:
# Eliminate symlinks and just take the real directories.
p = os.path.realpath(p)
if not os.path.isdir(p):
continue
filtered_path.append(p)
# Check for a bin directory, add it if it exists
bin = join_path(p, 'bin')
if os.path.isdir(bin):
filtered_path.append(os.path.realpath(bin))
# Once the paths are cleaned up, do a search for each type of
# compiler. We can spawn a bunch of parallel searches to reduce
# the overhead of spelunking all these directories.
types = all_compiler_types()
compiler_lists = parmap(lambda cls: cls.find(*filtered_path), types)
# ensure all the version calls we made are cached in the parent
# process, as well. This speeds up Spack a lot.
clist = reduce(lambda x,y: x+y, compiler_lists)
return clist
# Find compilers for each operating system class
oss = all_os_classes()
compiler_lists = []
for o in oss:
compiler_lists.extend(o.find_compilers(*paths))
return compiler_lists
def supported_compilers():
@@ -227,51 +210,85 @@ def supported(compiler_spec):
@_auto_compiler_spec
def find(compiler_spec, arch=None, scope=None):
def find(compiler_spec, scope=None):
"""Return specs of available compilers that match the supplied
compiler spec. Return an list if nothing found."""
return [c for c in all_compilers(arch, scope) if c.satisfies(compiler_spec)]
return [c for c in all_compilers(scope) if c.satisfies(compiler_spec)]
@_auto_compiler_spec
def compilers_for_spec(compiler_spec, arch=None, scope=None):
def compilers_for_spec(compiler_spec, scope=None, **kwargs):
"""This gets all compilers that satisfy the supplied CompilerSpec.
Returns an empty list if none are found.
"""
config = all_compilers_config(arch, scope)
platform = kwargs.get("platform", None)
config = all_compilers_config(scope)
def get_compiler(cspec):
items = config[str(cspec)]
def get_compilers(cspec):
compilers = []
if not all(n in items for n in _required_instance_vars):
raise InvalidCompilerConfigurationError(cspec)
for items in config:
if items['compiler']['spec'] != str(cspec):
continue
items = items['compiler']
cls = class_for_compiler_name(cspec.name)
compiler_paths = []
for c in _required_instance_vars:
compiler_path = items[c]
if compiler_path != "None":
compiler_paths.append(compiler_path)
if not ('paths' in items and
all(n in items['paths'] for n in _path_instance_vars)):
raise InvalidCompilerConfigurationError(cspec)
cls = class_for_compiler_name(cspec.name)
compiler_paths = []
for c in _path_instance_vars:
compiler_path = items['paths'][c]
if compiler_path != "None":
compiler_paths.append(compiler_path)
else:
compiler_paths.append(None)
mods = items.get('modules')
if mods == 'None':
mods = []
if 'operating_system' in items:
os = spack.architecture._operating_system_from_dict(
items['operating_system'], platform)
else:
compiler_paths.append(None)
os = None
flags = {}
for f in spack.spec.FlagMap.valid_compiler_flags():
if f in items:
flags[f] = items[f]
return cls(cspec, *compiler_paths, **flags)
alias = items['alias'] if 'alias' in items else None
matches = find(compiler_spec, arch, scope)
return [get_compiler(cspec) for cspec in matches]
flags = {}
for f in spack.spec.FlagMap.valid_compiler_flags():
if f in items:
flags[f] = items[f]
compilers.append(
cls(cspec, os, compiler_paths, mods, alias, **flags))
return compilers
matches = set(find(compiler_spec, scope))
compilers = []
for cspec in matches:
compilers.extend(get_compilers(cspec))
return compilers
@_auto_compiler_spec
def compiler_for_spec(compiler_spec):
def compiler_for_spec(compiler_spec, arch):
"""Get the compiler that satisfies compiler_spec. compiler_spec must
be concrete."""
operating_system = arch.platform_os
assert(compiler_spec.concrete)
compilers = compilers_for_spec(compiler_spec)
assert(len(compilers) == 1)
compilers = [
c for c in compilers_for_spec(compiler_spec, platform=arch.platform)
if c.operating_system == operating_system]
if len(compilers) < 1:
raise NoCompilerForSpecError(compiler_spec, operating_system)
if len(compilers) > 1:
raise CompilerSpecInsufficientlySpecificError(compiler_spec)
return compilers[0]
@@ -289,18 +306,47 @@ def class_for_compiler_name(compiler_name):
return cls
def all_os_classes():
"""
Return the list of classes for all operating systems available on
this platform
"""
classes = []
platform = spack.architecture.platform()
for os_class in platform.operating_sys.values():
classes.append(os_class)
return classes
def all_compiler_types():
return [class_for_compiler_name(c) for c in supported_compilers()]
class InvalidCompilerConfigurationError(spack.error.SpackError):
def __init__(self, compiler_spec):
super(InvalidCompilerConfigurationError, self).__init__(
"Invalid configuration for [compiler \"%s\"]: " % compiler_spec,
"Compiler configuration must contain entries for all compilers: %s"
% _required_instance_vars)
% _path_instance_vars)
class NoCompilersError(spack.error.SpackError):
def __init__(self):
super(NoCompilersError, self).__init__("Spack could not find any compilers!")
super(NoCompilersError, self).__init__(
"Spack could not find any compilers!")
class NoCompilerForSpecError(spack.error.SpackError):
def __init__(self, compiler_spec, target):
super(NoCompilerForSpecError, self).__init__(
"No compilers for operating system %s satisfy spec %s"
% (target, compiler_spec))
class CompilerSpecInsufficientlySpecificError(spack.error.SpackError):
def __init__(self, compiler_spec):
super(CompilerSpecInsufficientlySpecificError, self).__init__(
"Multiple compilers satisfy spec %s" % compiler_spec)

View File

@@ -0,0 +1,55 @@
##############################################################################
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack.compiler import *
class Cce(Compiler):
"""Cray compiler environment compiler."""
# Subclasses use possible names of C compiler
cc_names = ['cc']
# Subclasses use possible names of C++ compiler
cxx_names = ['CC']
# Subclasses use possible names of Fortran 77 compiler
f77_names = ['ftn']
# Subclasses use possible names of Fortran 90 compiler
fc_names = ['ftn']
# MacPorts builds gcc versions with prefixes and -mp-X.Y suffixes.
suffixes = [r'-mp-\d\.\d']
PrgEnv = 'PrgEnv-cray'
PrgEnv_compiler = 'cce'
link_paths = {'cc': 'cc',
'cxx': 'c++',
'f77': 'f77',
'fc': 'fc'}
@classmethod
def default_version(cls, comp):
return get_compiler_version(comp, '-V', r'[Vv]ersion.*(\d+(\.\d+)+)')

View File

@@ -29,6 +29,7 @@
import llnl.util.tty as tty
from spack.version import ver
class Clang(Compiler):
# Subclasses use possible names of C compiler
cc_names = ['clang']
@@ -43,11 +44,12 @@ class Clang(Compiler):
fc_names = []
# Named wrapper links within spack.build_env_path
link_paths = { 'cc' : 'clang/clang',
'cxx' : 'clang/clang++',
# Use default wrappers for fortran, in case provided in compilers.yaml
'f77' : 'f77',
'fc' : 'f90' }
link_paths = {'cc': 'clang/clang',
'cxx': 'clang/clang++',
# Use default wrappers for fortran, in case provided in
# compilers.yaml
'f77': 'f77',
'fc': 'f90'}
@property
def is_apple(self):
@@ -73,7 +75,7 @@ def cxx11_flag(self):
return "-std=c++11"
@classmethod
def default_version(self, comp):
def default_version(cls, comp):
"""The '--version' option works for clang compilers.
On most platforms, output looks like this::

View File

@@ -26,6 +26,7 @@
from spack.compiler import *
from spack.version import ver
class Gcc(Compiler):
# Subclasses use possible names of C compiler
cc_names = ['gcc']
@@ -44,10 +45,13 @@ class Gcc(Compiler):
suffixes = [r'-mp-\d\.\d', r'-\d\.\d', r'-\d']
# Named wrapper links within spack.build_env_path
link_paths = {'cc' : 'gcc/gcc',
'cxx' : 'gcc/g++',
'f77' : 'gcc/gfortran',
'fc' : 'gcc/gfortran' }
link_paths = {'cc': 'gcc/gcc',
'cxx': 'gcc/g++',
'f77': 'gcc/gfortran',
'fc': 'gcc/gfortran'}
PrgEnv = 'PrgEnv-gnu'
PrgEnv_compiler = 'gcc'
@property
def openmp_flag(self):
@@ -76,7 +80,6 @@ def fc_version(cls, fc):
# older gfortran versions don't have simple dumpversion output.
r'(?:GNU Fortran \(GCC\))?(\d+\.\d+(?:\.\d+)?)')
@classmethod
def f77_version(cls, f77):
return cls.fc_version(f77)

View File

@@ -26,6 +26,7 @@
import llnl.util.tty as tty
from spack.version import ver
class Intel(Compiler):
# Subclasses use possible names of C compiler
cc_names = ['icc']
@@ -40,10 +41,13 @@ class Intel(Compiler):
fc_names = ['ifort']
# Named wrapper links within spack.build_env_path
link_paths = { 'cc' : 'intel/icc',
'cxx' : 'intel/icpc',
'f77' : 'intel/ifort',
'fc' : 'intel/ifort' }
link_paths = {'cc': 'intel/icc',
'cxx': 'intel/icpc',
'f77': 'intel/ifort',
'fc': 'intel/ifort'}
PrgEnv = 'PrgEnv-intel'
PrgEnv_compiler = 'intel'
@property
def openmp_flag(self):
@@ -61,7 +65,6 @@ def cxx11_flag(self):
else:
return "-std=c++11"
@classmethod
def default_version(cls, comp):
"""The '--version' option seems to be the most consistent one

View File

@@ -23,7 +23,7 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack.compiler import *
import llnl.util.tty as tty
class Nag(Compiler):
# Subclasses use possible names of C compiler
@@ -39,11 +39,12 @@ class Nag(Compiler):
fc_names = ['nagfor']
# Named wrapper links within spack.build_env_path
link_paths = { # Use default wrappers for C and C++, in case provided in compilers.yaml
'cc' : 'cc',
'cxx' : 'c++',
'f77' : 'nag/nagfor',
'fc' : 'nag/nagfor' }
# Use default wrappers for C and C++, in case provided in compilers.yaml
link_paths = {
'cc': 'cc',
'cxx': 'c++',
'f77': 'nag/nagfor',
'fc': 'nag/nagfor'}
@property
def openmp_flag(self):
@@ -71,9 +72,8 @@ def default_version(self, comp):
"""The '-V' option works for nag compilers.
Output looks like this::
NAG Fortran Compiler Release 6.0(Hibiya) Build 1037
Product NPL6A60NA for x86-64 Linux
Copyright 1990-2015 The Numerical Algorithms Group Ltd., Oxford, U.K.
NAG Fortran Compiler Release 6.0(Hibiya) Build 1037
Product NPL6A60NA for x86-64 Linux
"""
return get_compiler_version(
comp, '-V', r'NAG Fortran Compiler Release ([0-9.]+)')

View File

@@ -23,7 +23,7 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack.compiler import *
import llnl.util.tty as tty
class Pgi(Compiler):
# Subclasses use possible names of C compiler
@@ -39,10 +39,13 @@ class Pgi(Compiler):
fc_names = ['pgfortran', 'pgf95', 'pgf90']
# Named wrapper links within spack.build_env_path
link_paths = { 'cc' : 'pgi/pgcc',
'cxx' : 'pgi/pgc++',
'f77' : 'pgi/pgfortran',
'fc' : 'pgi/pgfortran' }
link_paths = {'cc': 'pgi/pgcc',
'cxx': 'pgi/pgc++',
'f77': 'pgi/pgfortran',
'fc': 'pgi/pgfortran'}
PrgEnv = 'PrgEnv-pgi'
PrgEnv_compiler = 'pgi'
@property
def openmp_flag(self):
@@ -52,7 +55,6 @@ def openmp_flag(self):
def cxx11_flag(self):
return "-std=c++11"
@classmethod
def default_version(cls, comp):
"""The '-V' option works for all the PGI compilers.

View File

@@ -26,24 +26,26 @@
import llnl.util.tty as tty
from spack.version import ver
class Xl(Compiler):
# Subclasses use possible names of C compiler
cc_names = ['xlc','xlc_r']
cc_names = ['xlc', 'xlc_r']
# Subclasses use possible names of C++ compiler
cxx_names = ['xlC','xlC_r','xlc++','xlc++_r']
cxx_names = ['xlC', 'xlC_r', 'xlc++', 'xlc++_r']
# Subclasses use possible names of Fortran 77 compiler
f77_names = ['xlf','xlf_r']
f77_names = ['xlf', 'xlf_r']
# Subclasses use possible names of Fortran 90 compiler
fc_names = ['xlf90','xlf90_r','xlf95','xlf95_r','xlf2003','xlf2003_r','xlf2008','xlf2008_r']
fc_names = ['xlf90', 'xlf90_r', 'xlf95', 'xlf95_r',
'xlf2003', 'xlf2003_r', 'xlf2008', 'xlf2008_r']
# Named wrapper links within spack.build_env_path
link_paths = { 'cc' : 'xl/xlc',
'cxx' : 'xl/xlc++',
'f77' : 'xl/xlf',
'fc' : 'xl/xlf90' }
link_paths = {'cc': 'xl/xlc',
'cxx': 'xl/xlc++',
'f77': 'xl/xlf',
'fc': 'xl/xlf90'}
@property
def openmp_flag(self):
@@ -57,7 +59,7 @@ def cxx11_flag(self):
return "-qlanglvl=extended0x"
@classmethod
def default_version(self, comp):
def default_version(cls, comp):
"""The '-qversion' is the standard option fo XL compilers.
Output looks like this::
@@ -81,28 +83,28 @@ def default_version(self, comp):
"""
return get_compiler_version(
comp, '-qversion',r'([0-9]?[0-9]\.[0-9])')
comp, '-qversion', r'([0-9]?[0-9]\.[0-9])')
@classmethod
def fc_version(cls, fc):
"""The fortran and C/C++ versions of the XL compiler are always two units apart.
By this we mean that the fortran release that goes with XL C/C++ 11.1 is 13.1.
Having such a difference in version number is confusing spack quite a lot.
Most notably if you keep the versions as is the default xl compiler will only
have fortran and no C/C++.
So we associate the Fortran compiler with the version associated to the C/C++
compiler.
One last stumble. Version numbers over 10 have at least a .1 those under 10
a .0. There is no xlf 9.x or under currently available. BG/P and BG/L can
such a compiler mix and possibly older version of AIX and linux on power.
"""The fortran and C/C++ versions of the XL compiler are always
two units apart. By this we mean that the fortran release that
goes with XL C/C++ 11.1 is 13.1. Having such a difference in
version number is confusing spack quite a lot. Most notably
if you keep the versions as is the default xl compiler will
only have fortran and no C/C++. So we associate the Fortran
compiler with the version associated to the C/C++ compiler.
One last stumble. Version numbers over 10 have at least a .1
those under 10 a .0. There is no xlf 9.x or under currently
available. BG/P and BG/L can such a compiler mix and possibly
older version of AIX and linux on power.
"""
fver = get_compiler_version(fc, '-qversion',r'([0-9]?[0-9]\.[0-9])')
fver = get_compiler_version(fc, '-qversion', r'([0-9]?[0-9]\.[0-9])')
cver = float(fver) - 2
if cver < 10 :
cver = cver - 0.1
if cver < 10:
cver = cver - 0.1
return str(cver)
@classmethod
def f77_version(cls, f77):
return cls.fc_version(f77)

View File

@@ -40,12 +40,12 @@
import spack.error
from spack.version import *
from functools import partial
from spec import DependencyMap
from itertools import chain
from spack.config import *
class DefaultConcretizer(object):
"""This class doesn't have any state, it just provides some methods for
concretization. You can subclass it to override just some of the
default concretization strategies, or you can override all of them.
@@ -61,14 +61,19 @@ def _valid_virtuals_and_externals(self, spec):
if not providers:
raise UnsatisfiableProviderSpecError(providers[0], spec)
spec_w_preferred_providers = find_spec(
spec, lambda(x): spack.pkgsort.spec_has_preferred_provider(x.name, spec.name))
spec,
lambda x: spack.pkgsort.spec_has_preferred_provider(
x.name, spec.name))
if not spec_w_preferred_providers:
spec_w_preferred_providers = spec
provider_cmp = partial(spack.pkgsort.provider_compare, spec_w_preferred_providers.name, spec.name)
provider_cmp = partial(spack.pkgsort.provider_compare,
spec_w_preferred_providers.name,
spec.name)
candidates = sorted(providers, cmp=provider_cmp)
# For each candidate package, if it has externals, add those to the usable list.
# if it's not buildable, then *only* add the externals.
# For each candidate package, if it has externals, add those
# to the usable list. if it's not buildable, then *only* add
# the externals.
usable = []
for cspec in candidates:
if is_spec_buildable(cspec):
@@ -84,7 +89,8 @@ def _valid_virtuals_and_externals(self, spec):
raise NoBuildError(spec)
def cmp_externals(a, b):
if a.name != b.name:
if a.name != b.name and (not a.external or a.external_module and
not b.external and b.external_module):
# We're choosing between different providers, so
# maintain order from provider sort
return candidates.index(a) - candidates.index(b)
@@ -102,7 +108,7 @@ def cmp_externals(a, b):
usable.sort(cmp=cmp_externals)
return usable
# XXX(deptypes): Look here.
def choose_virtual_or_external(self, spec):
"""Given a list of candidate virtual and external packages, try to
find one that is most ABI compatible.
@@ -113,26 +119,26 @@ def choose_virtual_or_external(self, spec):
# Find the nearest spec in the dag that has a compiler. We'll
# use that spec to calibrate compiler compatibility.
abi_exemplar = find_spec(spec, lambda(x): x.compiler)
abi_exemplar = find_spec(spec, lambda x: x.compiler)
if not abi_exemplar:
abi_exemplar = spec.root
# Make a list including ABI compatibility of specs with the exemplar.
strict = [spack.abi.compatible(c, abi_exemplar) for c in candidates]
loose = [spack.abi.compatible(c, abi_exemplar, loose=True) for c in candidates]
loose = [spack.abi.compatible(c, abi_exemplar, loose=True)
for c in candidates]
keys = zip(strict, loose, candidates)
# Sort candidates from most to least compatibility.
# Note:
# 1. We reverse because True > False.
# 2. Sort is stable, so c's keep their order.
keys.sort(key=lambda k:k[:2], reverse=True)
keys.sort(key=lambda k: k[:2], reverse=True)
# Pull the candidates back out and return them in order
candidates = [c for s,l,c in keys]
candidates = [c for s, l, c in keys]
return candidates
def concretize_version(self, spec):
"""If the spec is already concrete, return. Otherwise take
the preferred version from spackconfig, and default to the package's
@@ -165,7 +171,12 @@ def prefer_key(v):
valid_versions.sort(key=prefer_key, reverse=True)
if valid_versions:
spec.versions = ver([valid_versions[0]])
# Disregard @develop and take the next valid version
if ver(valid_versions[0]) == ver('develop') and \
len(valid_versions) > 1:
spec.versions = ver([valid_versions[1]])
else:
spec.versions = ver([valid_versions[0]])
else:
# We don't know of any SAFE versions that match the given
# spec. Grab the spec's versions and grab the highest
@@ -187,45 +198,87 @@ def prefer_key(v):
return True # Things changed
def concretize_architecture(self, spec):
"""If the spec already had an architecture, return. Otherwise if
the root of the DAG has an architecture, then use that.
Otherwise take the system's default architecture.
Intuition: Architectures won't be set a lot, and generally you
want the host system's architecture. When architectures are
mised in a spec, it is likely because the tool requries a
cross-compiled component, e.g. for tools that run on BlueGene
or Cray machines. These constraints will likely come directly
from packages, so require the user to be explicit if they want
to mess with the architecture, and revert to the default when
they're not explicit.
"""
if spec.architecture is not None:
def _concretize_operating_system(self, spec):
if spec.architecture.platform_os is not None and isinstance(
spec.architecture.platform_os,
spack.architecture.OperatingSystem):
return False
if spec.root.architecture:
spec.architecture = spec.root.architecture
if spec.root.architecture and spec.root.architecture.platform_os:
if isinstance(spec.root.architecture.platform_os,
spack.architecture.OperatingSystem):
spec.architecture.platform_os = \
spec.root.architecture.platform_os
else:
spec.architecture = spack.architecture.sys_type()
spec.architecture.platform_os = \
spec.architecture.platform.operating_system('default_os')
return True # changed
assert(spec.architecture is not None)
return True # changed
def _concretize_target(self, spec):
if spec.architecture.target is not None and isinstance(
spec.architecture.target, spack.architecture.Target):
return False
if spec.root.architecture and spec.root.architecture.target:
if isinstance(spec.root.architecture.target,
spack.architecture.Target):
spec.architecture.target = spec.root.architecture.target
else:
spec.architecture.target = spec.architecture.platform.target(
'default_target')
return True # changed
def _concretize_platform(self, spec):
if spec.architecture.platform is not None and isinstance(
spec.architecture.platform, spack.architecture.Platform):
return False
if spec.root.architecture and spec.root.architecture.platform:
if isinstance(spec.root.architecture.platform,
spack.architecture.Platform):
spec.architecture.platform = spec.root.architecture.platform
else:
spec.architecture.platform = spack.architecture.platform()
return True # changed?
def concretize_architecture(self, spec):
"""If the spec is empty provide the defaults of the platform. If the
architecture is not a basestring, then check if either the platform,
target or operating system are concretized. If any of the fields are
changed then return True. If everything is concretized (i.e the
architecture attribute is a namedtuple of classes) then return False.
If the target is a string type, then convert the string into a
concretized architecture. If it has no architecture and the root of the
DAG has an architecture, then use the root otherwise use the defaults
on the platform.
"""
if spec.architecture is None:
# Set the architecture to all defaults
spec.architecture = spack.architecture.Arch()
return True
# Concretize the operating_system and target based of the spec
ret = any((self._concretize_platform(spec),
self._concretize_operating_system(spec),
self._concretize_target(spec)))
return ret
def concretize_variants(self, spec):
"""If the spec already has variants filled in, return. Otherwise, add
the default variants from the package specification.
the user preferences from packages.yaml or the default variants from
the package specification.
"""
changed = False
preferred_variants = spack.pkgsort.spec_preferred_variants(
spec.package_class.name)
for name, variant in spec.package_class.variants.items():
if name not in spec.variants:
spec.variants[name] = spack.spec.VariantSpec(name, variant.default)
changed = True
if name in preferred_variants:
spec.variants[name] = preferred_variants.get(name)
else:
spec.variants[name] = \
spack.spec.VariantSpec(name, variant.default)
return changed
def concretize_compiler(self, spec):
"""If the spec already has a compiler, we're done. If not, then take
the compiler used for the nearest ancestor with a compiler
@@ -238,15 +291,35 @@ def concretize_compiler(self, spec):
build with the compiler that will be used by libraries that
link to this one, to maximize compatibility.
"""
# Pass on concretizing the compiler if the target is not yet determined
if not spec.architecture.platform_os:
# Although this usually means changed, this means awaiting other
# changes
return True
# Only use a matching compiler if it is of the proper style
# Takes advantage of the proper logic already existing in
# compiler_for_spec Should think whether this can be more
# efficient
def _proper_compiler_style(cspec, arch):
platform = arch.platform
compilers = spack.compilers.compilers_for_spec(cspec,
platform=platform)
return filter(lambda c: c.operating_system ==
arch.platform_os, compilers)
# return compilers
all_compilers = spack.compilers.all_compilers()
if (spec.compiler and
spec.compiler.concrete and
spec.compiler in all_compilers):
spec.compiler in all_compilers):
return False
#Find the another spec that has a compiler, or the root if none do
other_spec = spec if spec.compiler else find_spec(spec, lambda(x) : x.compiler)
# Find the another spec that has a compiler, or the root if none do
other_spec = spec if spec.compiler else find_spec(
spec, lambda x: x.compiler)
if not other_spec:
other_spec = spec.root
other_compiler = other_spec.compiler
@@ -257,68 +330,97 @@ def concretize_compiler(self, spec):
spec.compiler = other_compiler.copy()
return True
# Filter the compilers into a sorted list based on the compiler_order from spackconfig
compiler_list = all_compilers if not other_compiler else spack.compilers.find(other_compiler)
cmp_compilers = partial(spack.pkgsort.compiler_compare, other_spec.name)
# Filter the compilers into a sorted list based on the compiler_order
# from spackconfig
compiler_list = all_compilers if not other_compiler else \
spack.compilers.find(other_compiler)
cmp_compilers = partial(
spack.pkgsort.compiler_compare, other_spec.name)
matches = sorted(compiler_list, cmp=cmp_compilers)
if not matches:
raise UnavailableCompilerVersionError(other_compiler)
arch = spec.architecture
raise UnavailableCompilerVersionError(other_compiler,
arch.platform_os)
# copy concrete version into other_compiler
spec.compiler = matches[0].copy()
index = 0
while not _proper_compiler_style(matches[index], spec.architecture):
index += 1
if index == len(matches) - 1:
arch = spec.architecture
raise UnavailableCompilerVersionError(spec.compiler,
arch.platform_os)
spec.compiler = matches[index].copy()
assert(spec.compiler.concrete)
return True # things changed.
def concretize_compiler_flags(self, spec):
"""
The compiler flags are updated to match those of the spec whose
compiler is used, defaulting to no compiler flags in the spec.
Default specs set at the compiler level will still be added later.
"""
if not spec.architecture.platform_os:
# Although this usually means changed, this means awaiting other
# changes
return True
ret = False
for flag in spack.spec.FlagMap.valid_compiler_flags():
try:
nearest = next(p for p in spec.traverse(direction='parents')
if ((p.compiler == spec.compiler and p is not spec)
and flag in p.compiler_flags))
if ((not flag in spec.compiler_flags) or
sorted(spec.compiler_flags[flag]) != sorted(nearest.compiler_flags[flag])):
if ((p.compiler == spec.compiler and
p is not spec) and
flag in p.compiler_flags))
if flag not in spec.compiler_flags or \
not (sorted(spec.compiler_flags[flag]) >=
sorted(nearest.compiler_flags[flag])):
if flag in spec.compiler_flags:
spec.compiler_flags[flag] = list(set(spec.compiler_flags[flag]) |
set(nearest.compiler_flags[flag]))
spec.compiler_flags[flag] = list(
set(spec.compiler_flags[flag]) |
set(nearest.compiler_flags[flag]))
else:
spec.compiler_flags[flag] = nearest.compiler_flags[flag]
spec.compiler_flags[
flag] = nearest.compiler_flags[flag]
ret = True
except StopIteration:
if (flag in spec.root.compiler_flags and ((not flag in spec.compiler_flags) or
sorted(spec.compiler_flags[flag]) != sorted(spec.root.compiler_flags[flag]))):
if (flag in spec.root.compiler_flags and
((flag not in spec.compiler_flags) or
sorted(spec.compiler_flags[flag]) !=
sorted(spec.root.compiler_flags[flag]))):
if flag in spec.compiler_flags:
spec.compiler_flags[flag] = list(set(spec.compiler_flags[flag]) |
set(spec.root.compiler_flags[flag]))
spec.compiler_flags[flag] = list(
set(spec.compiler_flags[flag]) |
set(spec.root.compiler_flags[flag]))
else:
spec.compiler_flags[flag] = spec.root.compiler_flags[flag]
spec.compiler_flags[
flag] = spec.root.compiler_flags[flag]
ret = True
else:
if not flag in spec.compiler_flags:
if flag not in spec.compiler_flags:
spec.compiler_flags[flag] = []
# Include the compiler flag defaults from the config files
# This ensures that spack will detect conflicts that stem from a change
# in default compiler flags.
compiler = spack.compilers.compiler_for_spec(spec.compiler)
compiler = spack.compilers.compiler_for_spec(
spec.compiler, spec.architecture)
for flag in compiler.flags:
if flag not in spec.compiler_flags:
spec.compiler_flags[flag] = compiler.flags[flag]
if compiler.flags[flag] != []:
ret = True
else:
if ((sorted(spec.compiler_flags[flag]) != sorted(compiler.flags[flag])) and
(not set(spec.compiler_flags[flag]) >= set(compiler.flags[flag]))):
if ((sorted(spec.compiler_flags[flag]) !=
sorted(compiler.flags[flag])) and
(not set(spec.compiler_flags[flag]) >=
set(compiler.flags[flag]))):
ret = True
spec.compiler_flags[flag] = list(set(spec.compiler_flags[flag]) |
set(compiler.flags[flag]))
spec.compiler_flags[flag] = list(
set(spec.compiler_flags[flag]) |
set(compiler.flags[flag]))
return ret
@@ -327,8 +429,10 @@ def find_spec(spec, condition):
"""Searches the dag from spec in an intelligent order and looks
for a spec that matches a condition"""
# First search parents, then search children
dagiter = chain(spec.traverse(direction='parents', root=False),
spec.traverse(direction='children', root=False))
deptype = ('build', 'link')
dagiter = chain(
spec.traverse(direction='parents', deptype=deptype, root=False),
spec.traverse(direction='children', deptype=deptype, root=False))
visited = set()
for relative in dagiter:
if condition(relative):
@@ -336,9 +440,11 @@ def find_spec(spec, condition):
visited.add(id(relative))
# Then search all other relatives in the DAG *except* spec
for relative in spec.root.traverse():
if relative is spec: continue
if id(relative) in visited: continue
for relative in spec.root.traverse(deptypes=spack.alldeps):
if relative is spec:
continue
if id(relative) in visited:
continue
if condition(relative):
return relative
@@ -385,25 +491,33 @@ def cmp_specs(lhs, rhs):
class UnavailableCompilerVersionError(spack.error.SpackError):
"""Raised when there is no available compiler that satisfies a
compiler spec."""
def __init__(self, compiler_spec):
def __init__(self, compiler_spec, operating_system):
super(UnavailableCompilerVersionError, self).__init__(
"No available compiler version matches '%s'" % compiler_spec,
"No available compiler version matches '%s' on operating_system %s"
% (compiler_spec, operating_system),
"Run 'spack compilers' to see available compiler Options.")
class NoValidVersionError(spack.error.SpackError):
"""Raised when there is no way to have a concrete version for a
particular spec."""
def __init__(self, spec):
super(NoValidVersionError, self).__init__(
"There are no valid versions for %s that match '%s'" % (spec.name, spec.versions))
"There are no valid versions for %s that match '%s'"
% (spec.name, spec.versions))
class NoBuildError(spack.error.SpackError):
"""Raised when a package is configured with the buildable option False, but
no satisfactory external versions can be found"""
def __init__(self, spec):
super(NoBuildError, self).__init__(
"The spec '%s' is configured as not buildable, and no matching external installs were found" % spec.name)
msg = ("The spec '%s' is configured as not buildable, "
"and no matching external installs were found")
super(NoBuildError, self).__init__(msg % spec.name)

View File

@@ -1,4 +1,3 @@
# flake8: noqa
##############################################################################
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
@@ -123,251 +122,31 @@
import re
import sys
import jsonschema
import llnl.util.tty as tty
import spack
import yaml
from jsonschema import Draft4Validator, validators
from llnl.util.filesystem import mkdirp
from ordereddict_backport import OrderedDict
from spack.error import SpackError
import jsonschema
from yaml.error import MarkedYAMLError
from jsonschema import Draft4Validator, validators
from ordereddict_backport import OrderedDict
import llnl.util.tty as tty
from llnl.util.filesystem import mkdirp
import spack
from spack.error import SpackError
import spack.schema
# Hacked yaml for configuration files preserves line numbers.
import spack.util.spack_yaml as syaml
from spack.build_environment import get_path_from_module
"""Dict from section names -> schema for that section."""
section_schemas = {
'compilers': {
'$schema': 'http://json-schema.org/schema#',
'title': 'Spack compiler configuration file schema',
'type': 'object',
'additionalProperties': False,
'patternProperties': {
'compilers:?': { # optional colon for overriding site config.
'type': 'object',
'default': {},
'additionalProperties': False,
'patternProperties': {
r'\w[\w-]*': { # architecture
'type': 'object',
'additionalProperties': False,
'patternProperties': {
r'\w[\w-]*@\w[\w-]*': { # compiler spec
'type': 'object',
'additionalProperties': False,
'required': ['cc', 'cxx', 'f77', 'fc'],
'properties': {
'cc': { 'anyOf': [ {'type' : 'string' },
{'type' : 'null' }]},
'cxx': { 'anyOf': [ {'type' : 'string' },
{'type' : 'null' }]},
'f77': { 'anyOf': [ {'type' : 'string' },
{'type' : 'null' }]},
'fc': { 'anyOf': [ {'type' : 'string' },
{'type' : 'null' }]},
'fflags': { 'anyOf': [ {'type' : 'string' },
{'type' : 'null' }]},
'cppflags': { 'anyOf': [ {'type' : 'string' },
{'type' : 'null' }]},
'cflags': { 'anyOf': [ {'type' : 'string' },
{'type' : 'null' }]},
'cxxflags': { 'anyOf': [ {'type' : 'string' },
{'type' : 'null' }]},
'ldflags': { 'anyOf': [ {'type' : 'string' },
{'type' : 'null' }]},
'ldlibs': { 'anyOf': [ {'type' : 'string' },
{'type' : 'null' }]},
},},},},},},},},
'mirrors': {
'$schema': 'http://json-schema.org/schema#',
'title': 'Spack mirror configuration file schema',
'type': 'object',
'additionalProperties': False,
'patternProperties': {
r'mirrors:?': {
'type': 'object',
'default': {},
'additionalProperties': False,
'patternProperties': {
r'\w[\w-]*': {
'type': 'string'},},},},},
'repos': {
'$schema': 'http://json-schema.org/schema#',
'title': 'Spack repository configuration file schema',
'type': 'object',
'additionalProperties': False,
'patternProperties': {
r'repos:?': {
'type': 'array',
'default': [],
'items': {
'type': 'string'},},},},
'packages': {
'$schema': 'http://json-schema.org/schema#',
'title': 'Spack package configuration file schema',
'type': 'object',
'additionalProperties': False,
'patternProperties': {
r'packages:?': {
'type': 'object',
'default': {},
'additionalProperties': False,
'patternProperties': {
r'\w[\w-]*': { # package name
'type': 'object',
'default': {},
'additionalProperties': False,
'properties': {
'version': {
'type' : 'array',
'default' : [],
'items' : { 'anyOf' : [ { 'type' : 'string' },
{ 'type' : 'number'}]}}, #version strings
'compiler': {
'type' : 'array',
'default' : [],
'items' : { 'type' : 'string' } }, #compiler specs
'buildable': {
'type': 'boolean',
'default': True,
},
'providers': {
'type': 'object',
'default': {},
'additionalProperties': False,
'patternProperties': {
r'\w[\w-]*': {
'type' : 'array',
'default' : [],
'items' : { 'type' : 'string' },},},},
'paths': {
'type' : 'object',
'default' : {},
}
},},},},},},
'modules': {
'$schema': 'http://json-schema.org/schema#',
'title': 'Spack module file configuration file schema',
'type': 'object',
'additionalProperties': False,
'definitions': {
'array_of_strings': {
'type': 'array',
'default': [],
'items': {
'type': 'string'
}
},
'dictionary_of_strings': {
'type': 'object',
'patternProperties': {
r'\w[\w-]*': { # key
'type': 'string'
}
}
},
'dependency_selection': {
'type': 'string',
'enum': ['none', 'direct', 'all']
},
'module_file_configuration': {
'type': 'object',
'default': {},
'additionalProperties': False,
'properties': {
'filter': {
'type': 'object',
'default': {},
'additionalProperties': False,
'properties': {
'environment_blacklist': {
'type': 'array',
'default': [],
'items': {
'type': 'string'
}
}
}
},
'autoload': {'$ref': '#/definitions/dependency_selection'},
'prerequisites': {'$ref': '#/definitions/dependency_selection'},
'conflict': {'$ref': '#/definitions/array_of_strings'},
'environment': {
'type': 'object',
'default': {},
'additionalProperties': False,
'properties': {
'set': {'$ref': '#/definitions/dictionary_of_strings'},
'unset': {'$ref': '#/definitions/array_of_strings'},
'prepend_path': {'$ref': '#/definitions/dictionary_of_strings'},
'append_path': {'$ref': '#/definitions/dictionary_of_strings'}
}
}
}
},
'module_type_configuration': {
'type': 'object',
'default': {},
'anyOf': [
{
'properties': {
'whitelist': {'$ref': '#/definitions/array_of_strings'},
'blacklist': {'$ref': '#/definitions/array_of_strings'},
'naming_scheme': {
'type': 'string' # Can we be more specific here?
}
}
},
{
'patternProperties': {r'\w[\w-]*': {'$ref': '#/definitions/module_file_configuration'}}
}
]
}
},
'patternProperties': {
r'modules:?': {
'type': 'object',
'default': {},
'additionalProperties': False,
'properties': {
'prefix_inspections': {
'type': 'object',
'patternProperties': {
r'\w[\w-]*': { # path to be inspected for existence (relative to prefix)
'$ref': '#/definitions/array_of_strings'
}
}
},
'enable': {
'type': 'array',
'default': [],
'items': {
'type': 'string',
'enum': ['tcl', 'dotkit']
}
},
'tcl': {
'allOf': [
{'$ref': '#/definitions/module_type_configuration'}, # Base configuration
{} # Specific tcl extensions
]
},
'dotkit': {
'allOf': [
{'$ref': '#/definitions/module_type_configuration'}, # Base configuration
{} # Specific dotkit extensions
]
},
}
},
},
},
'compilers': spack.schema.compilers.schema,
'mirrors': spack.schema.mirrors.schema,
'repos': spack.schema.repos.schema,
'packages': spack.schema.packages.schema,
'targets': spack.schema.targets.schema,
'modules': spack.schema.modules.schema,
}
"""OrderedDict of config scopes keyed by name.
@@ -384,7 +163,7 @@ def validate_section_name(section):
def extend_with_default(validator_class):
"""Add support for the 'default' attribute for properties and patternProperties.
"""Add support for the 'default' attr for properties and patternProperties.
jsonschema does not handle this out of the box -- it only
validates. This allows us to set default values for configs
@@ -393,13 +172,15 @@ def extend_with_default(validator_class):
"""
validate_properties = validator_class.VALIDATORS["properties"]
validate_pattern_properties = validator_class.VALIDATORS["patternProperties"]
validate_pattern_properties = validator_class.VALIDATORS[
"patternProperties"]
def set_defaults(validator, properties, instance, schema):
for property, subschema in properties.iteritems():
if "default" in subschema:
instance.setdefault(property, subschema["default"])
for err in validate_properties(validator, properties, instance, schema):
for err in validate_properties(
validator, properties, instance, schema):
yield err
def set_pp_defaults(validator, properties, instance, schema):
@@ -410,7 +191,8 @@ def set_pp_defaults(validator, properties, instance, schema):
if re.match(property, key) and val is None:
instance[key] = subschema["default"]
for err in validate_pattern_properties(validator, properties, instance, schema):
for err in validate_pattern_properties(
validator, properties, instance, schema):
yield err
return validators.extend(validator_class, {
@@ -475,15 +257,23 @@ def write_section(self, section):
except jsonschema.ValidationError as e:
raise ConfigSanityError(e, data)
except (yaml.YAMLError, IOError) as e:
raise ConfigFileError("Error writing to config file: '%s'" % str(e))
raise ConfigFileError(
"Error writing to config file: '%s'" % str(e))
def clear(self):
"""Empty cached config information."""
self.sections = {}
"""Default configuration scope is the lowest-level scope. These are
versioned with Spack and can be overridden by sites or users."""
ConfigScope('defaults', os.path.join(spack.etc_path, 'spack', 'defaults'))
ConfigScope('site', os.path.join(spack.etc_path, 'spack')),
ConfigScope('user', os.path.expanduser('~/.spack'))
"""Site configuration is per spack instance, for sites or projects.
No site-level configs should be checked into spack by default."""
ConfigScope('site', os.path.join(spack.etc_path, 'spack'))
"""User configuration can override both spack defaults and site config."""
ConfigScope('user', spack.user_config_path)
def highest_precedence_scope():
@@ -575,8 +365,7 @@ def they_are(t):
# Source list is prepended (for precedence)
if they_are(list):
seen = set(source)
dest[:] = source + [x for x in dest if x not in seen]
dest[:] = source + [x for x in dest if x not in source]
return dest
# Source dict is merged into dest.
@@ -667,7 +456,7 @@ def print_section(section):
data = syaml.syaml_dict()
data[section] = get_config(section)
syaml.dump(data, stream=sys.stdout, default_flow_style=False)
except (yaml.YAMLError, IOError) as e:
except (yaml.YAMLError, IOError):
raise ConfigError("Error reading configuration: %s" % section)
@@ -679,7 +468,8 @@ def spec_externals(spec):
external_specs = []
pkg_paths = allpkgs.get(name, {}).get('paths', None)
if not pkg_paths:
pkg_modules = allpkgs.get(name, {}).get('modules', None)
if (not pkg_paths) and (not pkg_modules):
return []
for external_spec, path in pkg_paths.iteritems():
@@ -690,6 +480,18 @@ def spec_externals(spec):
external_spec = spack.spec.Spec(external_spec, external=path)
if external_spec.satisfies(spec):
external_specs.append(external_spec)
for external_spec, module in pkg_modules.iteritems():
if not module:
continue
path = get_path_from_module(module)
external_spec = spack.spec.Spec(
external_spec, external=path, external_module=module)
if external_spec.satisfies(spec):
external_specs.append(external_spec)
return external_specs
@@ -720,6 +522,7 @@ def get_path(path, data):
class ConfigFormatError(ConfigError):
"""Raised when a configuration format does not match its schema."""
def __init__(self, validation_error, data):
# Try to get line number from erroneous instance and its parent
instance_mark = getattr(validation_error.instance, '_start_mark', None)

Some files were not shown because too many files have changed in this diff Show More