Merge branch 'develop' of github.com:kev40293/spack into develop

This commit is contained in:
Kevin Brandstatter 2015-02-26 14:42:12 -06:00
commit 2f2eab0e15
115 changed files with 4343 additions and 817 deletions

View File

@ -36,16 +36,19 @@ Authors
----------------
Spack was written by Todd Gamblin, tgamblin@llnl.gov.
Significant contributions were also made by the following awesome
people:
Significant contributions were also made by:
* David Beckingsale
* David Boehme
* Alfredo Gimenez
* Luc Jaulmes
* Matt Legendre
* Greg Lee
* Adam Moody
* Saravan Pantham
* Joachim Protze
* Bob Robey
* Justin Too
Release
----------------

View File

@ -58,14 +58,16 @@ parser = argparse.ArgumentParser(
description='Spack: the Supercomputing PACKage Manager.')
parser.add_argument('-V', '--version', action='version',
version="%s" % spack.spack_version)
parser.add_argument('-v', '--verbose', action='store_true', dest='verbose',
parser.add_argument('-v', '--verbose', action='store_true',
help="Print additional output during builds")
parser.add_argument('-d', '--debug', action='store_true', dest='debug',
parser.add_argument('-d', '--debug', action='store_true',
help="Write out debug logs during compile")
parser.add_argument('-k', '--insecure', action='store_true', dest='insecure',
parser.add_argument('-k', '--insecure', action='store_true',
help="Do not check ssl certificates when downloading archives.")
parser.add_argument('-m', '--mock', action='store_true', dest='mock',
parser.add_argument('-m', '--mock', action='store_true',
help="Use mock packages instead of real ones.")
parser.add_argument('-p', '--profile', action='store_true',
help="Profile execution using cProfile.")
# each command module implements a parser() function, to which we pass its
# subparser for setup.
@ -85,42 +87,49 @@ if len(sys.argv) == 1:
# actually parse the args.
args = parser.parse_args()
# Set up environment based on args.
tty.set_verbose(args.verbose)
tty.set_debug(args.debug)
spack.debug = args.debug
def main():
# Set up environment based on args.
tty.set_verbose(args.verbose)
tty.set_debug(args.debug)
spack.debug = args.debug
spack.spack_working_dir = working_dir
if args.mock:
from spack.packages import PackageDB
spack.db = PackageDB(spack.mock_packages_path)
spack.spack_working_dir = working_dir
if args.mock:
from spack.packages import PackageDB
spack.db = PackageDB(spack.mock_packages_path)
# If the user asked for it, don't check ssl certs.
if args.insecure:
tty.warn("You asked for --insecure, which does not check SSL certificates or checksums.")
spack.curl.add_default_arg('-k')
# If the user asked for it, don't check ssl certs.
if args.insecure:
tty.warn("You asked for --insecure, which does not check SSL certificates or checksums.")
spack.curl.add_default_arg('-k')
# Try to load the particular command asked for and run it
command = spack.cmd.get_command(args.command)
try:
return_val = command(parser, args)
except SpackError, e:
if spack.debug:
# In debug mode, raise with a full stack trace.
raise
elif e.long_message:
tty.die(e.message, e.long_message)
# Try to load the particular command asked for and run it
command = spack.cmd.get_command(args.command)
try:
return_val = command(parser, args)
except SpackError, e:
if spack.debug:
# In debug mode, raise with a full stack trace.
raise
elif e.long_message:
tty.die(e.message, e.long_message)
else:
tty.die(e.message)
except KeyboardInterrupt:
sys.stderr.write('\n')
tty.die("Keyboard interrupt.")
# Allow commands to return values if they want to exit with some ohter code.
if return_val is None:
sys.exit(0)
elif isinstance(return_val, int):
sys.exit(return_val)
else:
tty.die(e.message)
tty.die("Bad return value from command %s: %s" % (args.command, return_val))
except KeyboardInterrupt:
sys.stderr.write('\n')
tty.die("Keyboard interrupt.")
# Allow commands to return values if they want to exit with some ohter code.
if return_val is None:
sys.exit(0)
elif isinstance(return_val, int):
sys.exit(return_val)
if args.profile:
import cProfile
cProfile.run('main()', sort='tottime')
else:
tty.die("Bad return value from command %s: %s" % (args.command, return_val))
main()

View File

@ -1,3 +1,4 @@
package_list.rst
command_index.rst
spack*.rst
_build

View File

@ -27,6 +27,18 @@ all: html
package_list:
spack package-list > package_list.rst
#
# Generate a command index
#
command_index:
cp command_index.in command_index.rst
echo >> command_index.rst
grep -ho '.. _spack-.*:' *rst \
| perl -pe 's/.. _([^:]*):/ * :ref:`\1`/' \
| sort >> command_index.rst
custom_targets: package_list command_index
#
# This creates a git repository and commits generated html docs.
# It them pushes the new branch into THIS repository as gh-pages.
@ -77,10 +89,10 @@ help:
@echo " doctest to run all doctests embedded in the documentation (if enabled)"
clean:
-rm -f package_list.rst
-rm -f package_list.rst command_index.rst
-rm -rf $(BUILDDIR)/* $(APIDOC_FILES)
html: apidoc package_list
html: apidoc custom_targets
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
@echo
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."

View File

@ -4,18 +4,19 @@ Basic usage
=====================
Spack is implemented as a single command (``spack``) with many
*subcommands*, much like ``git``, ``svn``, ``yum``, or ``apt-get``.
Only a small subset of commands are needed for typical usage.
This section covers a small set of subcommands that should cover most
general use cases for Spack.
*subcommands*. Only a small subset of commands is needed for typical
usage.
Listing available packages
------------------------------
The first thing you will likely want to do with spack is find out what
software is available to install. There are a few relevant commands.
The first thing you likely want to do with spack is to install some
software. Before that, you need to know what's available. You can
see available package names either using the :ref:`package-list`, or
using the commands below.
.. _spack-list:
``spack list``
~~~~~~~~~~~~~~~~
@ -26,45 +27,292 @@ Spack can install:
.. command-output:: spack list
The packages are listed by name in alphabetical order. You can also
do wildcard searches using ``*``:
do wildcats searches using ``*``:
.. command-output:: spack list m*
.. command-output:: spack list *util*
.. _spack-info:
``spack info``
~~~~~~~~~~~~~~~~
To get information on a particular package from the full list, run
``spack info <package name>``. For example, for ``mpich`` the output
looks like this:
To get more information on a particular package from `spack list`, use
`spack info`. Just supply the name of a package:
.. command-output:: spack info mpich
This includes basic information about the package: where to download
it, its dependencies, virtual packages it provides (e.g. an MPI
implementation will provide the MPI interface), and a text
description, if one is available. :ref:`Dependencies
<sec-specs>` and :ref:`virtual dependencies
<sec-virtual-dependencies>` are described in more detail later.
Most of the information is self-explanatory. *Safe versions* are
versions that Spack has a checksum for, and Spack will use the
checksum to ensure they downloaded without any errors or malicious
attacks. :ref:`Dependencies <sec-specs>` and :ref:`virtual
dependencies <sec-virtual-dependencies>`, are described in more detail
later.
.. _spack-versions:
``spack versions``
~~~~~~~~~~~~~~~~~~~~~~~~
To see available versions of a package, run ``spack versions``, for
example:
To see *more* available versions of a package, run ``spack versions``,
for example:
.. command-output:: spack versions libelf
Since it has to manage many different software packages, Spack doesn't
place many restrictions on what a package version has to look like.
Packages like ``mpich`` use traditional version numbers like
``3.0.4``. Other packages, like ``libdwarf`` use date-stamp versions
like ``20130729``. Versions can contain numbers, letters, dashes,
underscores, and periods.
There are two sections in the output. *Safe versions* are ones that
have already been checksummed. Spack goes a step further, though, and
also shows you what versions are available out on the web---these are
*remote versions*. Spack gets this information by scraping it
directly from web pages. Depending on the package, Spack may or may
not be able to find any remote versions.
Compiler Configuration
Installing and uninstalling
------------------------------
Now that you know how to list available packages and versions, you're
ready to start installing things.
.. _spack-install:
``spack install``
~~~~~~~~~~~~~~~~~~~~~
``spack install`` will install any package shown by ``spack list``.
To install the latest version of a package, along with all of its
dependencies, simply give it a package name:
.. code-block:: sh
$ spack install mpileaks
If `mpileaks` depends on other packages, Spack will install those
first. It then fetches the tarball for ``mpileaks``, expands it,
verifies that it was downloaded without errors, builds it, and
installs it in its own directory under ``$SPACK_HOME/opt``. You'll see
a number of messages from spack, a lot of build output, and a message
that the packages is installed:
.. code-block:: sh
$ spack install mpileaks
==> Installing mpileaks
==> mpich is already installed in /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/mpich@3.0.4.
==> callpath is already installed in /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/callpath@1.0.2-5dce4318.
==> adept-utils is already installed in /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/adept-utils@1.0-5adef8da.
==> Trying to fetch from https://github.com/hpc/mpileaks/releases/download/v1.0/mpileaks-1.0.tar.gz
######################################################################## 100.0%
==> Staging archive: /home/gamblin2/spack/var/spack/stage/mpileaks@1.0%gcc@4.4.7=chaos_5_x86_64_ib-59f6ad23/mpileaks-1.0.tar.gz
==> Created stage in /home/gamblin2/spack/var/spack/stage/mpileaks@1.0%gcc@4.4.7=chaos_5_x86_64_ib-59f6ad23.
==> No patches needed for mpileaks.
==> Building mpileaks.
... build output ...
==> Successfully installed mpileaks.
Fetch: 2.16s. Build: 9.82s. Total: 11.98s.
[+] /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/mpileaks@1.0-59f6ad23
The last line, with the ``[+]``, indicates where the package is
installed.
Building a specific version
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Spack can also build *specific versions* of a package. To do this,
just add ``@`` after the package name, followed by a version:
.. code-block:: sh
$ spack install mpich@3.0.4
Any number of versions of the same package can be installed at once
without interfering with each other. This is good for multi-user
sites, as installing a version that one user needs will not disrupt
existing installations for other users.
In addition to different versions, Spack can customize the compiler,
compile-time options (variants), and platform (for cross compiles) of
an installation. Spack is unique in that it can also configure the
*dependencies* a package is built with. For example, two
configurations of the same version of a package, one built with boost
1.39.0, and the other version built with version 1.43.0, can coexist.
This can all be done on the command line using special syntax. Spack
calls the descriptor used to refer to a particular package
configuration a **spec**. In the command lines above, both
``mpileaks`` and ``mpileaks@3.0.4`` are specs. Specs are described in
detail in :ref:`sec-specs`.
.. _spack-uninstall:
``spack uninstall``
~~~~~~~~~~~~~~~~~~~~~
To uninstall a package, type ``spack uninstall <package>``. This will
completely remove the directory in which the package was installed.
.. code-block:: sh
spack uninstall mpich
If there are still installed packages that depend on the package to be
uninstalled, spack will refuse to uninstall it. You can override this
behavior with ``spack uninstall -f <package>``, but you risk breaking
other installed packages. In general, it is safer to remove dependent
packages *before* removing their dependencies.
A line like ``spack uninstall mpich`` may be ambiguous, if multiple
``mpich`` configurations are installed. For example, if both
``mpich@3.0.2`` and ``mpich@3.1`` are installed, ``mpich`` could refer
to either one. Because it cannot determine which one to uninstall,
Spack will ask you to provide a version number to remove the
ambiguity. As an example, ``spack uninstall mpich@3.1`` is
unambiguous in this scenario.
Seeing installed packages
-----------------------------------
We know that ``spack list`` shows you the names of available packages,
but how do you figure out which are installed?
.. _spack-find:
``spack find``
~~~~~~~~~~~~~~~~~~~~~~
``spack find`` shows the *specs* of installed packages. A spec is
like a name, but it has a version, compiler, architecture, and build
options associated with it. In spack, you can have many installations
of the same package with different specs.
Running ``spack find`` with no arguments lists installed packages:
.. code-block:: sh
$ spack find
==> 74 installed packages.
-- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
ImageMagick@6.8.9-10 libdwarf@20130729 py-dateutil@2.4.0
adept-utils@1.0 libdwarf@20130729 py-ipython@2.3.1
atk@2.14.0 libelf@0.8.12 py-matplotlib@1.4.2
boost@1.55.0 libelf@0.8.13 py-nose@1.3.4
bzip2@1.0.6 libffi@3.1 py-numpy@1.9.1
cairo@1.14.0 libmng@2.0.2 py-pygments@2.0.1
callpath@1.0.2 libpng@1.6.16 py-pyparsing@2.0.3
cmake@3.0.2 libtiff@4.0.3 py-pyside@1.2.2
dbus@1.8.6 libtool@2.4.2 py-pytz@2014.10
dbus@1.9.0 libxcb@1.11 py-setuptools@11.3.1
dyninst@8.1.2 libxml2@2.9.2 py-six@1.9.0
fontconfig@2.11.1 libxml2@2.9.2 python@2.7.8
freetype@2.5.3 llvm@3.0 qhull@1.0
gdk-pixbuf@2.31.2 memaxes@0.5 qt@4.8.6
glib@2.42.1 mesa@8.0.5 qt@5.4.0
graphlib@2.0.0 mpich@3.0.4 readline@6.3
gtkplus@2.24.25 mpileaks@1.0 sqlite@3.8.5
harfbuzz@0.9.37 mrnet@4.1.0 stat@2.1.0
hdf5@1.8.13 ncurses@5.9 tcl@8.6.3
icu@54.1 netcdf@4.3.3 tk@src
jpeg@9a openssl@1.0.1h vtk@6.1.0
launchmon@1.0.1 pango@1.36.8 xcb-proto@1.11
lcms@2.6 pixman@0.32.6 xz@5.2.0
libdrm@2.4.33 py-dateutil@2.4.0 zlib@1.2.8
-- chaos_5_x86_64_ib / gcc@4.9.2 --------------------------------
libelf@0.8.10 mpich@3.0.4
Packages are divided into groups according to their architecture and
compiler. Within each group, Spack tries to keep the view simple, and
only shows the version of installed packages.
In some cases, there may be different configurations of the *same*
version of a package installed. For example, there are two
installations of of ``libdwarf@20130729`` above. We can look at them
in more detail using ``spack find -d``, and by asking only to show
``libdwarf`` packages:
.. code-block:: sh
$ spack find --deps libdwarf
==> 2 installed packages.
-- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
libdwarf@20130729-d9b90962
^libelf@0.8.12
libdwarf@20130729-b52fac98
^libelf@0.8.13
Now we see that the two instances of ``libdwarf`` depend on
*different* versions of ``libelf``: 0.8.12 and 0.8.13. This view can
become complicated for packages with many dependencies. If you just
want to know whether two packages' dependencies differ, you can use
``spack find -l``:
.. code-block:: sh
$ spack find -l libdwarf
==> 2 installed packages.
-- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
libdwarf@20130729-d9b90962 libdwarf@20130729-b52fac98
Now the ``libwarf`` installs have hashes after their names. These are
hashes over all of the dependencies of each package. If the hashes
are the same, then the packages have the same dependency configuration.
If you want to know the path where each package is installed, you can
use ``spack find -p``:
.. code-block:: sh
$ spack find -p
==> 74 installed packages.
-- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
ImageMagick@6.8.9-10 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/ImageMagick@6.8.9-10-4df950dd
adept-utils@1.0 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/adept-utils@1.0-5adef8da
atk@2.14.0 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/atk@2.14.0-3d09ac09
boost@1.55.0 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/boost@1.55.0
bzip2@1.0.6 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/bzip2@1.0.6
cairo@1.14.0 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/cairo@1.14.0-fcc2ab44
callpath@1.0.2 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/callpath@1.0.2-5dce4318
...
And, finally, you can restrict your search to a particular package
by supplying its name:
.. code-block:: sh
$ spack find -p libelf
-- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
libelf@0.8.11 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/libelf@0.8.11
libelf@0.8.12 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/libelf@0.8.12
libelf@0.8.13 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/libelf@0.8.13
``spack find`` actually does a lot more than this. You can use
*specs* to query for specific configurations and builds of each
package. If you want to find only libelf versions greater than version
0.8.12, you could say:
.. code-block:: sh
$ spack find libelf@0.8.12:
-- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
libelf@0.8.12 libelf@0.8.13
Finding just the versions of libdwarf built with a particular version
of libelf would look like this:
.. code-block:: sh
$ spack find -l libdwarf ^libelf@0.8.12
==> 1 installed packages.
-- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
libdwarf@20130729-d9b90962
The full spec syntax is discussed in detail in :ref:`sec-specs`.
Compiler configuration
-----------------------------------
Spack has the ability to build packages with multiple compilers and
@ -72,6 +320,8 @@ compiler versions. Spack searches for compilers on your machine
automatically the first time it is run. It does this by inspecting
your path.
.. _spack-compilers:
``spack compilers``
~~~~~~~~~~~~~~~~~~~~~~~
@ -98,6 +348,8 @@ compilers`` or ``spack compiler list``::
Any of these compilers can be used to build Spack packages. More on
how this is done is in :ref:`sec-specs`.
.. _spack-compiler-add:
``spack compiler add``
~~~~~~~~~~~~~~~~~~~~~~~
@ -110,15 +362,19 @@ where the compiler is installed. For example::
intel@13.0.079
Or you can run ``spack compiler add`` with no arguments to force
autodetection. This is useful if you do not know where compilers
live, but new compilers have been added to your ``PATH``. For
example, using dotkit, you might do this::
auto-detection. This is useful if you do not know where compilers are
installed, but you know that new compilers have been added to your
``PATH``. For example, using dotkit, you might do this::
$ use gcc-4.9.0
$ module load gcc-4.9.0
$ spack compiler add
==> Added 1 new compiler to /Users/gamblin2/.spackconfig
gcc@4.9.0
This loads the environment module for gcc-4.9.0 to get it into the
``PATH``, and then it adds the compiler to Spack.
.. _spack-compiler-info:
``spack compiler info``
~~~~~~~~~~~~~~~~~~~~~~~
@ -126,20 +382,23 @@ example, using dotkit, you might do this::
If you want to see specifics on a particular compiler, you can run
``spack compiler info`` on it::
$ spack compiler info intel@12.1.3
intel@12.1.3:
cc = /usr/local/bin/icc-12.1.293
cxx = /usr/local/bin/icpc-12.1.293
f77 = /usr/local/bin/ifort-12.1.293
fc = /usr/local/bin/ifort-12.1.293
$ spack compiler info intel@15
intel@15.0.0:
cc = /usr/local/bin/icc-15.0.090
cxx = /usr/local/bin/icpc-15.0.090
f77 = /usr/local/bin/ifort-15.0.090
fc = /usr/local/bin/ifort-15.0.090
This shows which C, C++, and Fortran compilers were detected by Spack.
Notice also that we didn't have to be too specific about the
version. We just said ``intel@15``, and information about the only
matching Intel compiler was displayed.
Manual configuration
~~~~~~~~~~~~~~~~~~~~~~~
Manual compiler configuration
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If autodetection fails, you can manually conigure a compiler by
If auto-detection fails, you can manually configure a compiler by
editing your ``~/.spackconfig`` file. You can do this by running
``spack config edit``, which will open the file in your ``$EDITOR``.
@ -153,8 +412,8 @@ Each compiler configuration in the file looks like this::
fc = /usr/local/bin/ifort-15.0.024-beta
...
For compilers, like ``clang``, that do not support Fortran, you can simply
put ``None`` for ``f77`` and ``fc``::
For compilers, like ``clang``, that do not support Fortran, put
``None`` for ``f77`` and ``fc``::
[compiler "clang@3.3svn"]
cc = /usr/bin/clang
@ -163,180 +422,18 @@ put ``None`` for ``f77`` and ``fc``::
fc = None
Once you save the file, the configured compilers will show up in the
list displayed when you run ``spack compilers``.
Seeing installed packages -----------------------------------
``spack find``
~~~~~~~~~~~~~~~~~~~~~~
The second thing you're likely to want to do with Spack, and the first
thing users of your system will likely want to do, is to find what
software is already installed and ready to use. You can do that with
``spack find``.
Running ``spack find`` with no arguments will list all the installed
packages:
.. code-block:: sh
$ spack find
== chaos_5_x86_64_ib ===========================================
-- gcc@4.4.7 ---------------------------------------------------
libdwarf@20130207-d9b909
libdwarf@20130729-d9b909
libdwarf@20130729-b52fac
libelf@0.8.11
libelf@0.8.12
libelf@0.8.13
Packages are grouped by architecture, then by the compiler used to
build them, and then by their versions and options. If a package has
dependencies, there will also be a hash at the end of the name
indicating the dependency configuration. Packages with the same hash
have the same dependency configuration. If you want ALL information
about dependencies, as well, then you can supply ``-l`` or ``--long``:
.. code-block:: sh
$ spack find -l
== chaos_5_x86_64_ib ===========================================
-- gcc@4.4.7 ---------------------------------------------------
libdwarf@20130207
^libelf@0.8.12
libdwarf@20130729
^libelf@0.8.12
libdwarf@20130729
^libelf@0.8.13
libelf@0.8.11
libelf@0.8.12
libelf@0.8.13
Now you can see which versions of ``libelf`` each version of
``libdwarf`` was built with.
If you want to know the path where each of these packages is
installed, do ``spack find -p`` or ``--path``:
.. code-block:: sh
$ spack find -p
== chaos_5_x86_64_ib ===========================================
-- gcc@4.4.7 ---------------------------------------------------
libdwarf@20130207-d9b909 /g/g21/gamblin2/src/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/libdwarf@20130207-d9b909
libdwarf@20130729-d9b909 /g/g21/gamblin2/src/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/libdwarf@20130729-d9b909
libdwarf@20130729-b52fac /g/g21/gamblin2/src/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/libdwarf@20130729-b52fac
libelf@0.8.11 /g/g21/gamblin2/src/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/libelf@0.8.11
libelf@0.8.12 /g/g21/gamblin2/src/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/libelf@0.8.12
libelf@0.8.13 /g/g21/gamblin2/src/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/libelf@0.8.13
And, finally, you can restrict your search to a particular package
by supplying its name:
.. code-block:: sh
$ spack find -p libelf
== chaos_5_x86_64_ib ===========================================
-- gcc@4.4.7 ---------------------------------------------------
libelf@0.8.11 /g/g21/gamblin2/src/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/libelf@0.8.11
libelf@0.8.12 /g/g21/gamblin2/src/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/libelf@0.8.12
libelf@0.8.13 /g/g21/gamblin2/src/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/libelf@0.8.13
``spack find`` actually does a lot more than this. You can use
*specs* to query for specific configurations and builds of each
package. The full spec syntax is discussed in detail in
:ref:`sec-specs`.
Installing and uninstalling
------------------------------
``spack install``
~~~~~~~~~~~~~~~~~~~~~
``spack install`` will install any package that appears in the output
of ``spack list``. To install the latest version of a pacakge and all
of its dependencies, simply run ``spack install <package>``:
.. code-block:: sh
spack install mpileaks
Spack will fetch the tarball for ``mpileaks``, expand it, verify that
it was downloaded without errors, build it, and install it in its own
directory under ``$SPACK_HOME/opt``. If the requested package depends
on other packages in order to build, Spack fetches them as well, and
installs them before it installs the requested package. Like the main
package, each dependency is also installed in its own directory.
Spack can also build *specific* configurations of a package. For
example, to install something with a specific version, add ``@`` after
the package name, followed by a version string:
.. code-block:: sh
spack install mpich@3.0.4
Any number of configurations of the same package can be installed at
once without interfering with each other. This is good for multi-user
sites, as installing a version that one user needs will not disrupt
existing installations for other users.
In addition to version configuraitons, Spack can customize the
compiler, compile-time options (variants), and platform (for cross
compiles) of an installation. Spack is unique in that it can also
configure the *dependencies* a package is built with. For example,
two configurations of the same version of a package, one built with
boost 1.39.0, and the other version built with version 1.43.0, can
coexist.
This can all be done on the command line using special syntax. Spack
calls the descriptor used to refer to a particular package
configuration a **spec**. In the command lines above, both
``mpileaks`` and ``mpileaks@3.0.4`` are specs. To customize
additional properties, simply add more attributes to the spec. Specs
and their syntax are covered in more detail in :ref:`sec-specs`.
``spack uninstall``
~~~~~~~~~~~~~~~~~~~~~
To uninstall a package, type ``spack uninstall <package>``. This will
completely remove the directory in which the package was installed.
.. code-block:: sh
spack uninstall mpich
If there are still installed packages that depend on the package to be
uninstalled, spack will refuse to uninstall. If you know what you're
doing, you can override this with ``spack uninstall -f <package>``.
However, running this risks breaking other installed packages. In
general, it is safer to remove dependent packages *before* removing
their dependencies.
A line like ``spack uninstall mpich`` may be ambiguous, if multiple
``mpich`` configurations are installed. For example, if both
``mpich@3.0.2`` and ``mpich@3.1`` are installed, it could refer to
either one, and Spack cannot determine which one to uninstall. Spack
will ask you to provide a version number to remove the ambiguity. For
example, ``spack uninstall mpich@3.1`` is unambiguous in the above
scenario.
list displayed by ``spack compilers``.
.. _sec-specs:
Specs & Dependencies
Specs & dependencies
-------------------------
We now know that ``spack install`` and ``spack uninstall`` both take a
package name with an optional version specifier. In Spack, that
descriptor is called a *spec*. Spack uses specs to refer to a
particular build configuration (or configurations) of a package.
We know that ``spack install``, ``spack uninstall``, and other
commands take a package name with an optional version specifier. In
Spack, that descriptor is called a *spec*. Spack uses specs to refer
to a particular build configuration (or configurations) of a package.
Specs are more than a package name and a version; you can use them to
specify the compiler, compiler version, architecture, compile options,
and dependency options for a build. In this section, we'll go over
@ -499,6 +596,11 @@ based on site policies.
Variants
~~~~~~~~~~~~~~~~~~~~~~~
.. Note::
Variants are not yet supported, but will be in the next Spack
release (0.9), due in Q2 2015.
Variants are named options associated with a particular package, and
they can be turned on or off. For example, above, supplying
``+debug`` causes ``mpileaks`` to be built with debug flags. The
@ -544,6 +646,11 @@ the command line is provided for convenience and legibility.
Architecture specifier
~~~~~~~~~~~~~~~~~~~~~~~
.. Note::
Architecture specifiers are part of specs but are not yet
functional. They will be in Spack version 1.0, due in Q3 2015.
The architecture specifier starts with a ``=`` and also comes after
some package name within a spec. It allows a user to specify a
particular architecture for the package to be built. This is mostly
@ -627,6 +734,8 @@ any MPI implementation will do. If another package depends on
error. Likewise, if you try to plug in some package that doesn't
provide MPI, Spack will raise an error.
.. _spack-providers:
``spack providers``
~~~~~~~~~~~~~~~~~~~~~~~~~~
@ -646,7 +755,7 @@ versions are now filtered out.
.. _shell-support:
Environment Modules
Environment modules
-------------------------------
.. note::
@ -678,10 +787,6 @@ For ``csh`` and ``tcsh`` run:
You can put the above code in your ``.bashrc`` or ``.cshrc``, and
Spack's shell support will be available on the command line.
-------------------------------
When you install a package with Spack, it automatically generates an
environment module that lets you add the package to your environment.
@ -698,6 +803,7 @@ The directories are automatically added to your ``MODULEPATH`` and
``DK_NODE`` environment variables when you enable Spack's `shell
support <shell-support_>`_.
Using Modules & Dotkits
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@ -710,7 +816,7 @@ of installed packages.
$ module avail
------- /g/g21/gamblin2/src/spack/share/spack/modules/chaos_5_x86_64_ib --------
------- /home/gamblin2/spack/share/spack/modules/chaos_5_x86_64_ib --------
adept-utils@1.0%gcc@4.4.7-5adef8da libelf@0.8.13%gcc@4.4.7
automaded@1.0%gcc@4.4.7-d9691bb0 libelf@0.8.13%intel@15.0.0
boost@1.55.0%gcc@4.4.7 mpc@1.0.2%gcc@4.4.7-559607f5
@ -845,6 +951,8 @@ if newer, fancier module support is added to Spack at some later date,
you may want to regenerate all the modules to take advantage of these
new features.
.. _spack-module:
``spack module refresh``
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@ -858,9 +966,226 @@ regenerate all module and dotkit files from scratch:
==> Regenerating tcl module files.
==> Regenerating dotkit module files.
.. _extensions:
Extensions & Python support
------------------------------------
Spack's installation model assumes that each package will live in its
own install prefix. However, certain packages are typically installed
*within* the directory hierarchy of other packages. For example,
modules in interpreted languages like `Python
<https://www.python.org>`_ are typically installed in the
``$prefix/lib/python-2.7/site-packages`` directory.
Spack has support for this type of installation as well. In Spack,
a package that can live inside the prefix of another package is called
an *extension*. Suppose you have Python installed like so:
.. code-block:: sh
$ spack find python
==> 1 installed packages.
-- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
python@2.7.8
.. _spack-extensions:
``spack extensions``
~~~~~~~~~~~~~~~~~~~~~~~
You can find extensions for your Python installation like this:
.. code-block:: sh
$ spack extensions python
==> python@2.7.8%gcc@4.4.7=chaos_5_x86_64_ib-703c7a96
==> 36 extensions:
geos py-ipython py-pexpect py-pyside py-sip
py-basemap py-libxml2 py-pil py-pytz py-six
py-biopython py-mako py-pmw py-rpy2 py-sympy
py-cython py-matplotlib py-pychecker py-scientificpython py-virtualenv
py-dateutil py-mpi4py py-pygments py-scikit-learn
py-epydoc py-mx py-pylint py-scipy
py-gnuplot py-nose py-pyparsing py-setuptools
py-h5py py-numpy py-pyqt py-shiboken
==> 12 installed:
-- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
py-dateutil@2.4.0 py-nose@1.3.4 py-pyside@1.2.2
py-dateutil@2.4.0 py-numpy@1.9.1 py-pytz@2014.10
py-ipython@2.3.1 py-pygments@2.0.1 py-setuptools@11.3.1
py-matplotlib@1.4.2 py-pyparsing@2.0.3 py-six@1.9.0
==> None activated.
The extensions are a subset of what's returned by ``spack list``, and
they are packages like any other. They are installed into their own
prefixes, and you can see this with ``spack find -p``:
.. code-block:: sh
$ spack find -p py-numpy
==> 1 installed packages.
-- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
py-numpy@1.9.1 /g/g21/gamblin2/src/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/py-numpy@1.9.1-66733244
However, even though this package is installed, you cannot use it
directly when you run ``python``:
.. code-block:: sh
$ spack load python
$ python
Python 2.7.8 (default, Feb 17 2015, 01:35:25)
[GCC 4.4.7 20120313 (Red Hat 4.4.7-11)] on linux2
Type "help", "copyright", "credits" or "license" for more information.
>>> import numpy
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
ImportError: No module named numpy
>>>
Extensions & Environment Modules
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
There are two ways to get ``numpy`` working in Python. The first is
to use :ref:`shell-support`. You can simply ``use`` or ``load`` the
module for the extension, and it will be added to the ``PYTHONPATH``
in your current shell.
For tcl modules:
.. code-block:: sh
$ spack load python
$ spack load py-numpy
or, for dotkit:
.. code-block:: sh
$ spack use python
$ spack use py-numpy
Now ``import numpy`` will succeed for as long as you keep your current
session open.
Activating Extensions
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
It is often desirable to have certain packages *always* available as
part of a Python installation. Spack offers a more permanent solution
for this case. Instead of requiring users to load particular
environment modules, you can *activate* the package within the Python
installation:
.. _spack-activate:
``spack activate``
^^^^^^^^^^^^^^^^^^^^^^^
.. code-block:: sh
$ spack activate py-numpy
==> Activated extension py-setuptools@11.3.1%gcc@4.4.7=chaos_5_x86_64_ib-3c74eb69 for python@2.7.8%gcc@4.4.7.
==> Activated extension py-nose@1.3.4%gcc@4.4.7=chaos_5_x86_64_ib-5f70f816 for python@2.7.8%gcc@4.4.7.
==> Activated extension py-numpy@1.9.1%gcc@4.4.7=chaos_5_x86_64_ib-66733244 for python@2.7.8%gcc@4.4.7.
Several things have happened here. The user requested that
``py-numpy`` be activated in the ``python`` installation it was built
with. Spack knows that ``py-numpy`` depends on ``py-nose`` and
``py-setuptools``, so it activated those packages first. Finally,
once all dependencies were activated in the ``python`` installation,
``py-numpy`` was activated as well.
If we run ``spack extensions`` again, we now see the three new
packages listed as activated:
.. code-block:: sh
$ spack extensions python
==> python@2.7.8%gcc@4.4.7=chaos_5_x86_64_ib-703c7a96
==> 36 extensions:
geos py-ipython py-pexpect py-pyside py-sip
py-basemap py-libxml2 py-pil py-pytz py-six
py-biopython py-mako py-pmw py-rpy2 py-sympy
py-cython py-matplotlib py-pychecker py-scientificpython py-virtualenv
py-dateutil py-mpi4py py-pygments py-scikit-learn
py-epydoc py-mx py-pylint py-scipy
py-gnuplot py-nose py-pyparsing py-setuptools
py-h5py py-numpy py-pyqt py-shiboken
==> 12 installed:
-- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
py-dateutil@2.4.0 py-nose@1.3.4 py-pyside@1.2.2
py-dateutil@2.4.0 py-numpy@1.9.1 py-pytz@2014.10
py-ipython@2.3.1 py-pygments@2.0.1 py-setuptools@11.3.1
py-matplotlib@1.4.2 py-pyparsing@2.0.3 py-six@1.9.0
==> 3 currently activated:
-- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
py-nose@1.3.4 py-numpy@1.9.1 py-setuptools@11.3.1
Now, when a user runs python, ``numpy`` will be available for import
*without* the user having to explicitly loaded. ``python@2.7.8`` now
acts like a system Python installation with ``numpy`` installed inside
of it.
Spack accomplishes this by symbolically linking the *entire* prefix of
the ``py-numpy`` into the prefix of the ``python`` package. To the
python interpreter, it looks like ``numpy`` is installed in the
``site-packages`` directory.
The only limitation of activation is that you can only have a *single*
version of an extension activated at a time. This is because multiple
versions of the same extension would conflict if symbolically linked
into the same prefix. Users who want a different version of a package
can still get it by using environment modules, but they will have to
explicitly load their preferred version.
``spack activate -f``
^^^^^^^^^^^^^^^^^^^^^^^^^
If, for some reason, you want to activate a package *without* its
dependencies, you can use ``spack activate -f``:
.. code-block:: sh
$ spack activate -f py-numpy
==> Activated extension py-numpy@1.9.1%gcc@4.4.7=chaos_5_x86_64_ib-66733244 for python@2.7.8%gcc@4.4.7.
.. _spack-deactivate:
``spack deactivate``
^^^^^^^^^^^^^^^^^^^^^^^^^
We've seen how activating an extension can be used to set up a default
version of a Python module. Obviously, you may want to change that at
some point. ``spack deactivate`` is the command for this. There are
several variants:
* ``spack deactivate <extension>`` will deactivate a single
extension. If another activated extension depends on this one,
Spack will warn you and exit with an error.
* ``spack deactivate -f <extension>`` deactivates an extension
regardless of packages that depend on it.
* ``spack deactivate -a <extension>`` deactivates an extension and
all of its dependencies. Use ``-f`` to disregard dependents.
* ``spack deactivate -a <extendee>`` deactivates *all* activated
extensions of a package. For example, to deactivate *all* python
extensions, use::
spack deactivate -a python
Getting Help
-----------------------
.. _spack-help:
``spack help``
~~~~~~~~~~~~~~~~~~~~~~

View File

@ -0,0 +1,10 @@
.. _command_index:
Command index
=================
This is an alphabetical list of commands with links to the places they
appear in the documentation.
.. hlist::
:columns: 3

View File

@ -35,7 +35,9 @@
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
import sys
import os
import subprocess
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
@ -43,14 +45,16 @@
sys.path.insert(0, os.path.abspath('exts'))
# Add the Spack bin directory to the path so that we can use its output in docs.
os.environ['SPACK_ROOT'] = '../../..'
spack_root = '../../..'
os.environ['SPACK_ROOT'] = spack_root
os.environ['PATH'] += os.pathsep + '$SPACK_ROOT/bin'
spack_version = subprocess.Popen(
['spack', '-V'], stderr=subprocess.PIPE).communicate()[1].strip().split('.')
# Set an environment variable so that colify will print output like it would to
# a terminal.
os.environ['COLIFY_TTY'] = 'true'
os.environ['COLUMNS'] = '80'
os.environ['LINES'] = '25'
os.environ['COLIFY_SIZE'] = '25x80'
# Enable todo items
todo_include_todos = True
@ -97,9 +101,9 @@
# built documents.
#
# The short X.Y version.
version = '1.0'
version = '.'.join(spack_version[:2])
# The full version, including alpha/beta/rc tags.
release = '1.0'
release = '.'.join(spack_version[:2])
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.

View File

@ -50,11 +50,11 @@ as a descriptor for one or more instances of that template. Users
express the configuration they want using a spec, and a package turns
the spec into a complete build.
The obvious difficulty with this design is that users underspecify
The obvious difficulty with this design is that users under-specify
what they want. To build a software package, the package object needs
a *complete* specification. In Spack, if a spec describes only one
instance of a package, then we say it is **concrete**. If a spec
could describes many instances, (i.e. it is underspecified in one way
could describes many instances, (i.e. it is under-specified in one way
or another), then we say it is **abstract**.
Spack's job is to take an *abstract* spec from the user, find a
@ -92,7 +92,7 @@ with a high level view of Spack's directory structure::
Spack is designed so that it could live within a `standard UNIX
directory hierarchy <http://linux.die.net/man/7/hier>`_, so ``lib``,
``var``, and ``opt`` all contain a ``spack`` subdirectory in case
Spack is installed alongside other software. Most of the insteresting
Spack is installed alongside other software. Most of the interesting
parts of Spack live in ``lib/spack``. Files under ``var`` are created
as needed, so there is no ``var`` directory when you initially clone
Spack from the repository.
@ -123,13 +123,13 @@ Package-related modules
Contains the :class:`Package <spack.package.Package>` class, which
is the superclass for all packages in Spack. Methods on ``Package``
implement all phases of the :ref:`package lifecycle
<pacakge-lifecycle>` and manage the build process.
<package-lifecycle>` and manage the build process.
:mod:`spack.packages`
Contains all of the packages in Spack and methods for managing them.
Functions like :func:`packages.get <spack.packages.get>` and
:func:`class_name_for_package_name
<packages.class_name_for_package_name>` handle mapping packge module
<packages.class_name_for_package_name>` handle mapping package module
names to class names and dynamically instantiating packages by name
from module files.

View File

@ -1,4 +1,4 @@
Feature Overview
Feature overview
==================
This is a high-level overview of features that make Spack different

View File

@ -46,8 +46,10 @@ Table of Contents
getting_started
basic_usage
packaging_guide
mirrors
site_configuration
developer_guide
command_index
package_list
API Docs <spack>

217
lib/spack/docs/mirrors.rst Normal file
View File

@ -0,0 +1,217 @@
.. _mirrors:
Mirrors
============================
Some sites may not have access to the internet for fetching packages.
These sites will need a local repository of tarballs from which they
can get their files. Spack has support for this with *mirrors*. A
mirror is a URL that points to a directory, either on the local
filesystem or on some server, containing tarballs for all of Spack's
packages.
Here's an example of a mirror's directory structure::
mirror/
cmake/
cmake-2.8.10.2.tar.gz
dyninst/
dyninst-8.1.1.tgz
dyninst-8.1.2.tgz
libdwarf/
libdwarf-20130126.tar.gz
libdwarf-20130207.tar.gz
libdwarf-20130729.tar.gz
libelf/
libelf-0.8.12.tar.gz
libelf-0.8.13.tar.gz
libunwind/
libunwind-1.1.tar.gz
mpich/
mpich-3.0.4.tar.gz
mvapich2/
mvapich2-1.9.tgz
The structure is very simple. There is a top-level directory. The
second level directories are named after packages, and the third level
contains tarballs for each package, named after each package.
.. note::
Archives are **not** named exactly they were in the package's fetch
URL. They have the form ``<name>-<version>.<extension>``, where
``<name>`` is Spack's name for the package, ``<version>`` is the
version of the tarball, and ``<extension>`` is whatever format the
package's fetch URL contains.
In order to make mirror creation reasonably fast, we copy the
tarball in its original format to the mirror directory, but we do
not standardize on a particular compression algorithm, because this
would potentially require expanding and re-compressing each archive.
.. _spack-mirror:
``spack mirror``
----------------------------
Mirrors are managed with the ``spack mirror`` command. The help for
``spack mirror`` looks like this::
$ spack mirror -h
usage: spack mirror [-h] SUBCOMMAND ...
positional arguments:
SUBCOMMAND
create Create a directory to be used as a spack mirror, and fill
it with package archives.
add Add a mirror to Spack.
remove Remove a mirror by name.
list Print out available mirrors to the console.
optional arguments:
-h, --help show this help message and exit
The ``create`` command actually builds a mirror by fetching all of its
packages from the internet and checksumming them.
The other three commands are for managing mirror configuration. They
control the URL(s) from which Spack downloads its packages.
.. _spack-mirror-create:
``spack mirror create``
----------------------------
You can create a mirror using the ``spack mirror create`` command, assuming
you're on a machine where you can access the internet.
The command will iterate through all of Spack's packages and download
the safe ones into a directory structure like the one above. Here is
what it looks like:
.. code-block:: bash
$ spack mirror create libelf libdwarf
==> Created new mirror in spack-mirror-2014-06-24
==> Trying to fetch from http://www.mr511.de/software/libelf-0.8.13.tar.gz
########################################################## 81.6%
==> Checksum passed for libelf@0.8.13
==> Added libelf@0.8.13
==> Trying to fetch from http://www.mr511.de/software/libelf-0.8.12.tar.gz
###################################################################### 98.6%
==> Checksum passed for libelf@0.8.12
==> Added libelf@0.8.12
==> Trying to fetch from http://www.prevanders.net/libdwarf-20130207.tar.gz
###################################################################### 97.3%
==> Checksum passed for libdwarf@20130207
==> Added libdwarf@20130207
==> Trying to fetch from http://www.prevanders.net/libdwarf-20130126.tar.gz
######################################################## 78.9%
==> Checksum passed for libdwarf@20130126
==> Added libdwarf@20130126
==> Trying to fetch from http://www.prevanders.net/libdwarf-20130729.tar.gz
############################################################# 84.7%
==> Added libdwarf@20130729
==> Added spack-mirror-2014-06-24/libdwarf/libdwarf-20130729.tar.gz to mirror
==> Added python@2.7.8.
==> Successfully updated mirror in spack-mirror-2015-02-24.
Archive stats:
0 already present
5 added
0 failed to fetch.
Once this is done, you can tar up the ``spack-mirror-2014-06-24`` directory and
copy it over to the machine you want it hosted on.
Custom package sets
~~~~~~~~~~~~~~~~~~~~~~~
Normally, ``spack mirror create`` downloads all the archives it has
checksums for. If you want to only create a mirror for a subset of
packages, you can do that by supplying a list of package specs on the
command line after ``spack mirror create``. For example, this
command::
$ spack mirror create libelf@0.8.12: boost@1.44:
Will create a mirror for libelf versions greater than or equal to
0.8.12 and boost versions greater than or equal to 1.44.
Mirror files
~~~~~~~~~~~~~~~~~~~~~~~
If you have a *very* large number of packages you want to mirror, you
can supply a file with specs in it, one per line::
$ cat specs.txt
libdwarf
libelf@0.8.12:
boost@1.44:
boost@1.39.0
...
$ spack mirror create -f specs.txt
...
This is useful if there is a specific suite of software managed by
your site.
.. _spack-mirror-add:
``spack mirror add``
----------------------------
Once you have a mirror, you need to let spack know about it. This is
relatively simple. First, figure out the URL for the mirror. If it's
a file, you can use a file URL like this one::
file:///Users/gamblin2/spack-mirror-2014-06-24
That points to the directory on the local filesystem. If it were on a
web server, you could use a URL like this one:
https://example.com/some/web-hosted/directory/spack-mirror-2014-06-24
Spack will use the URL as the root for all of the packages it fetches.
You can tell your Spack installation to use that mirror like this:
.. code-block:: bash
$ spack mirror add local_filesystem file:///Users/gamblin2/spack-mirror-2014-06-24
Each mirror has a name so that you can refer to it again later.
.. _spack-mirror-list:
``spack mirror list``
----------------------------
If you want to see all the mirrors Spack knows about you can run ``spack mirror list``::
$ spack mirror list
local_filesystem file:///Users/gamblin2/spack-mirror-2014-06-24
.. _spack-mirror-remove:
``spack mirror remove``
----------------------------
And, if you want to remove a mirror, just remove it by name::
$ spack mirror remove local_filesystem
$ spack mirror list
==> No mirrors configured.
Mirror precedence
----------------------------
Adding a mirror really just adds a section in ``~/.spackconfig``::
[mirror "local_filesystem"]
url = file:///Users/gamblin2/spack-mirror-2014-06-24
[mirror "remote_server"]
url = https://example.com/some/web-hosted/directory/spack-mirror-2014-06-24
If you want to change the order in which mirrors are searched for
packages, you can edit this file and reorder the sections. Spack will
search the topmost mirror first and the bottom-most mirror last.

File diff suppressed because it is too large Load Diff

View File

@ -1,208 +1,15 @@
.. _site-configuration:
Site-specific configuration
Site configuration
===================================
.. _mirrors:
Mirrors
----------------------------
Some sites may not have access to the internet for fetching packages.
These sites will need a local repository of tarballs from which they
can get their files. Spack has support for this with *mirrors*. A
mirror is a URL that points to a directory, either on the local
filesystem or on some server, containing tarballs for all of Spack's
packages.
Here's an example of a mirror's directory structure::
mirror/
cmake/
cmake-2.8.10.2.tar.gz
dyninst/
DyninstAPI-8.1.1.tgz
DyninstAPI-8.1.2.tgz
libdwarf/
libdwarf-20130126.tar.gz
libdwarf-20130207.tar.gz
libdwarf-20130729.tar.gz
libelf/
libelf-0.8.12.tar.gz
libelf-0.8.13.tar.gz
libunwind/
libunwind-1.1.tar.gz
mpich/
mpich-3.0.4.tar.gz
mvapich2/
mvapich2-1.9.tgz
The structure is very simple. There is a top-level directory. The
second level directories are named after packages, and the third level
contains tarballs for each package, named as they were in the
package's fetch URL.
``spack mirror``
~~~~~~~~~~~~~~~~~~~~~~~
Mirrors are managed with the ``spack mirror`` command. The help for
``spack mirror`` looks like this::
$ spack mirror -h
usage: spack mirror [-h] SUBCOMMAND ...
positional arguments:
SUBCOMMAND
create Create a directory to be used as a spack mirror, and fill
it with package archives.
add Add a mirror to Spack.
remove Remove a mirror by name.
list Print out available mirrors to the console.
optional arguments:
-h, --help show this help message and exit
The ``create`` command actually builds a mirror by fetching all of its
packages from the internet and checksumming them.
The other three commands are for managing mirror configuration. They
control the URL(s) from which Spack downloads its packages.
``spack mirror create``
~~~~~~~~~~~~~~~~~~~~~~~
You can create a mirror using the ``spack mirror create`` command, assuming
you're on a machine where you can access the internet.
The command will iterate through all of Spack's packages and download
the safe ones into a directory structure like the one above. Here is
what it looks like:
.. code-block:: bash
$ spack mirror create libelf libdwarf
==> Created new mirror in spack-mirror-2014-06-24
==> Trying to fetch from http://www.mr511.de/software/libelf-0.8.13.tar.gz
########################################################## 81.6%
==> Checksum passed for libelf@0.8.13
==> Added spack-mirror-2014-06-24/libelf/libelf-0.8.13.tar.gz to mirror
==> Trying to fetch from http://www.mr511.de/software/libelf-0.8.12.tar.gz
###################################################################### 98.6%
==> Checksum passed for libelf@0.8.12
==> Added spack-mirror-2014-06-24/libelf/libelf-0.8.12.tar.gz to mirror
==> Trying to fetch from http://www.prevanders.net/libdwarf-20130207.tar.gz
###################################################################### 97.3%
==> Checksum passed for libdwarf@20130207
==> Added spack-mirror-2014-06-24/libdwarf/libdwarf-20130207.tar.gz to mirror
==> Trying to fetch from http://www.prevanders.net/libdwarf-20130126.tar.gz
######################################################## 78.9%
==> Checksum passed for libdwarf@20130126
==> Added spack-mirror-2014-06-24/libdwarf/libdwarf-20130126.tar.gz to mirror
==> Trying to fetch from http://www.prevanders.net/libdwarf-20130729.tar.gz
############################################################# 84.7%
==> Checksum passed for libdwarf@20130729
==> Added spack-mirror-2014-06-24/libdwarf/libdwarf-20130729.tar.gz to mirror
Once this is done, you can tar up the ``spack-mirror-2014-06-24`` directory and
copy it over to the machine you want it hosted on.
Custom package sets
^^^^^^^^^^^^^^^^^^^^^^^^
Normally, ``spack mirror create`` downloads all the archives it has
checksums for. If you want to only create a mirror for a subset of
packages, you can do that by supplying a list of package specs on the
command line after ``spack mirror create``. For example, this
command::
$ spack mirror create libelf@0.8.12: boost@1.44:
Will create a mirror for libelf versions greater than or equal to
0.8.12 and boost versions greater than or equal to 1.44.
Mirror files
^^^^^^^^^^^^^^^^^^^^^^^^
If you have a *very* large number of packages you want to mirror, you
can supply a file with specs in it, one per line::
$ cat specs.txt
libdwarf
libelf@0.8.12:
boost@1.44:
boost@1.39.0
...
$ spack mirror create -f specs.txt
...
This is useful if there is a specific suite of software managed by
your site.
``spack mirror add``
~~~~~~~~~~~~~~~~~~~~~~~~~~~
Once you have a mirrror, you need to let spack know about it. This is
relatively simple. First, figure out the URL for the mirror. If it's
a file, you can use a file URL like this one::
file:///Users/gamblin2/spack-mirror-2014-06-24
That points to the directory on the local filesystem. If it were on a
web server, you could use a URL like this one:
https://example.com/some/web-hosted/directory/spack-mirror-2014-06-24
Spack will use the URL as the root for all of the packages it fetches.
You can tell your Spack installation to use that mirror like this:
.. code-block:: bash
$ spack mirror add local_filesystem file:///Users/gamblin2/spack-mirror-2014-06-24
Each mirror has a name so that you can refer to it again later.
``spack mirror list``
~~~~~~~~~~~~~~~~~~~~~~~~~~~
If you want to see all the mirrors Spack knows about you can run ``spack mirror list``::
$ spack mirror list
local_filesystem file:///Users/gamblin2/spack-mirror-2014-06-24
``spack mirror remove``
~~~~~~~~~~~~~~~~~~~~~~~~~~~
And, if you want to remove a mirror, just remove it by name::
$ spack mirror remove local_filesystem
$ spack mirror list
==> No mirrors configured.
Mirror precedence
~~~~~~~~~~~~~~~~~~~~~~~~~
Adding a mirror really just adds a section in ``~/.spackconfig``::
[mirror "local_filesystem"]
url = file:///Users/gamblin2/spack-mirror-2014-06-24
[mirror "remote_server"]
url = https://example.com/some/web-hosted/directory/spack-mirror-2014-06-24
If you want to change the order in which mirrors are searched for
packages, you can edit this file and reorder the sections. Spack will
search the topmost mirror first and the bottom-most mirror last.
.. _temp-space:
Temporary space
----------------------------
.. warning:: Temporary space configuration will be moved to configuration files.
The intructions here are old and refer to ``__init__.py``
The instructions here are old and refer to ``__init__.py``
By default, Spack will try to do all of its building in temporary
space. There are two main reasons for this. First, Spack is designed
@ -286,7 +93,7 @@ the virtual spec to specs for possible implementations, and
later, so there is no need to fully concretize the spec when returning
it.
The ``DefaultConcretizer`` is intendend to provide sensible defaults
The ``DefaultConcretizer`` is intended to provide sensible defaults
for each policy, but there are certain choices that it can't know
about. For example, one site might prefer ``OpenMPI`` over ``MPICH``,
or another might prefer an old version of some packages. These types
@ -327,3 +134,53 @@ Set concretizer to *your own* class instead of the default:
concretizer = MyConcretizer()
The next time you run Spack, your changes should take effect.
Profiling
~~~~~~~~~~~~~~~~~~~~~
Spack has some limited built-in support for profiling, and can report
statistics using standard Python timing tools. To use this feature,
supply ``-p`` to Spack on the command line, before any subcommands.
.. _spack-p:
``spack -p``
^^^^^^^^^^^^^^^^^^
``spack -p`` output looks like this:
.. code-block:: sh
$ spack -p graph dyninst
o dyninst
|\
| |\
| o | libdwarf
|/ /
o | libelf
/
o boost
307670 function calls (305943 primitive calls) in 0.127 seconds
Ordered by: internal time
ncalls tottime percall cumtime percall filename:lineno(function)
853 0.021 0.000 0.066 0.000 inspect.py:472(getmodule)
51197 0.011 0.000 0.018 0.000 inspect.py:51(ismodule)
73961 0.010 0.000 0.010 0.000 {isinstance}
1762 0.006 0.000 0.053 0.000 inspect.py:440(getsourcefile)
32075 0.006 0.000 0.006 0.000 {hasattr}
1760 0.004 0.000 0.004 0.000 {posix.stat}
2240 0.004 0.000 0.004 0.000 {posix.lstat}
2602 0.004 0.000 0.011 0.000 inspect.py:398(getfile)
771 0.004 0.000 0.077 0.000 inspect.py:518(findsource)
2656 0.004 0.000 0.004 0.000 {method 'match' of '_sre.SRE_Pattern' objects}
30772 0.003 0.000 0.003 0.000 {method 'get' of 'dict' objects}
...
The bottom of the output shows the top most time consuming functions,
slowest on top. The profiling support is from Python's built-in tool,
`cProfile
<https://docs.python.org/2/library/profile.html#module-cProfile>`_.

View File

@ -23,7 +23,7 @@
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
__all__ = ['set_install_permissions', 'install', 'expand_user', 'working_dir',
'touch', 'mkdirp', 'force_remove', 'join_path', 'ancestor',
'touch', 'touchp', 'mkdirp', 'force_remove', 'join_path', 'ancestor',
'can_access', 'filter_file', 'change_sed_delimiter', 'is_exe']
import os
@ -204,6 +204,12 @@ def touch(path):
os.utime(path, None)
def touchp(path):
"""Like touch, but creates any parent directories needed for the file."""
mkdirp(os.path.dirname(path))
touch(path)
def join_path(prefix, *args):
path = str(prefix)
for elt in args:

View File

@ -291,6 +291,37 @@ def foo(self, **kwargs):
% (next(kwargs.iterkeys()), fun.__name__))
def match_predicate(*args):
"""Utility function for making string matching predicates.
Each arg can be a:
- regex
- list or tuple of regexes
- predicate that takes a string.
This returns a predicate that is true if:
- any arg regex matches
- any regex in a list or tuple of regexes matches.
- any predicate in args matches.
"""
def match(string):
for arg in args:
if isinstance(arg, basestring):
if re.search(arg, string):
return True
elif isinstance(arg, list) or isinstance(arg, tuple):
if any(re.search(i, string) for i in arg):
return True
elif callable(arg):
if arg(string):
return True
else:
raise ValueError("args to match_predicate must be regex, "
"list of regexes, or callable.")
return False
return match
class RequiredAttributeError(ValueError):
def __init__(self, message):
super(RequiredAttributeError, self).__init__(message)

View File

@ -0,0 +1,197 @@
##############################################################################
# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://scalability-llnl.github.io/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License (as published by
# the Free Software Foundation) version 2.1 dated February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
"""LinkTree class for setting up trees of symbolic links."""
__all__ = ['LinkTree']
import os
import shutil
from llnl.util.filesystem import *
empty_file_name = '.spack-empty'
def traverse_tree(source_root, dest_root, rel_path='', **kwargs):
"""Traverse two filesystem trees simultaneously.
Walks the LinkTree directory in pre or post order. Yields each
file in the source directory with a matching path from the dest
directory, along with whether the file is a directory.
e.g., for this tree::
root/
a/
file1
file2
b/
file3
When called on dest, this yields::
('root', 'dest')
('root/a', 'dest/a')
('root/a/file1', 'dest/a/file1')
('root/a/file2', 'dest/a/file2')
('root/b', 'dest/b')
('root/b/file3', 'dest/b/file3')
Optional args:
order=[pre|post] -- Whether to do pre- or post-order traveral.
ignore=<predicate> -- Predicate indicating which files to ignore.
follow_nonexisting -- Whether to descend into directories in
src that do not exit in dest. Default True.
follow_links -- Whether to descend into symlinks in src.
"""
follow_nonexisting = kwargs.get('follow_nonexisting', True)
follow_links = kwargs.get('follow_link', False)
# Yield in pre or post order?
order = kwargs.get('order', 'pre')
if order not in ('pre', 'post'):
raise ValueError("Order must be 'pre' or 'post'.")
# List of relative paths to ignore under the src root.
ignore = kwargs.get('ignore', lambda filename: False)
# Don't descend into ignored directories
if ignore(rel_path):
return
source_path = os.path.join(source_root, rel_path)
dest_path = os.path.join(dest_root, rel_path)
# preorder yields directories before children
if order == 'pre':
yield (source_path, dest_path)
for f in os.listdir(source_path):
source_child = os.path.join(source_path, f)
dest_child = os.path.join(dest_path, f)
rel_child = os.path.join(rel_path, f)
# Treat as a directory
if os.path.isdir(source_child) and (
follow_links or not os.path.islink(source_child)):
# When follow_nonexisting isn't set, don't descend into dirs
# in source that do not exist in dest
if follow_nonexisting or os.path.exists(dest_child):
tuples = traverse_tree(source_root, dest_root, rel_child, **kwargs)
for t in tuples: yield t
# Treat as a file.
elif not ignore(os.path.join(rel_path, f)):
yield (source_child, dest_child)
if order == 'post':
yield (source_path, dest_path)
class LinkTree(object):
"""Class to create trees of symbolic links from a source directory.
LinkTree objects are constructed with a source root. Their
methods allow you to create and delete trees of symbolic links
back to the source tree in specific destination directories.
Trees comprise symlinks only to files; directries are never
symlinked to, to prevent the source directory from ever being
modified.
"""
def __init__(self, source_root):
if not os.path.exists(source_root):
raise IOError("No such file or directory: '%s'", source_root)
self._root = source_root
def find_conflict(self, dest_root, **kwargs):
"""Returns the first file in dest that conflicts with src"""
kwargs['follow_nonexisting'] = False
for src, dest in traverse_tree(self._root, dest_root, **kwargs):
if os.path.isdir(src):
if os.path.exists(dest) and not os.path.isdir(dest):
return dest
elif os.path.exists(dest):
return dest
return None
def merge(self, dest_root, **kwargs):
"""Link all files in src into dest, creating directories if necessary."""
kwargs['order'] = 'pre'
for src, dest in traverse_tree(self._root, dest_root, **kwargs):
if os.path.isdir(src):
if not os.path.exists(dest):
mkdirp(dest)
continue
if not os.path.isdir(dest):
raise ValueError("File blocks directory: %s" % dest)
# mark empty directories so they aren't removed on unmerge.
if not os.listdir(dest):
marker = os.path.join(dest, empty_file_name)
touch(marker)
else:
assert(not os.path.exists(dest))
os.symlink(src, dest)
def unmerge(self, dest_root, **kwargs):
"""Unlink all files in dest that exist in src.
Unlinks directories in dest if they are empty.
"""
kwargs['order'] = 'post'
for src, dest in traverse_tree(self._root, dest_root, **kwargs):
if os.path.isdir(src):
# Skip non-existing links.
if not os.path.exists(dest):
continue
if not os.path.isdir(dest):
raise ValueError("File blocks directory: %s" % dest)
# remove directory if it is empty.
if not os.listdir(dest):
shutil.rmtree(dest, ignore_errors=True)
# remove empty dir marker if present.
marker = os.path.join(dest, empty_file_name)
if os.path.exists(marker):
os.remove(marker)
elif os.path.exists(dest):
if not os.path.islink(dest):
raise ValueError("%s is not a link tree!" % dest)
os.remove(dest)

View File

@ -169,6 +169,15 @@ def colify(elts, **options):
if not elts:
return (0, ())
# environment size is of the form "<rows>x<cols>"
env_size = os.environ.get('COLIFY_SIZE')
if env_size:
try:
r, c = env_size.split('x')
console_rows, console_cols = int(r), int(c)
tty = True
except: pass
# Use only one column if not a tty.
if not tty:
if tty is False or not output.isatty():

View File

@ -78,7 +78,7 @@
# Version information
from spack.version import Version
spack_version = Version("0.8")
spack_version = Version("0.8.15")
#
# Executables used by Spack
@ -138,7 +138,7 @@
# should live. This file is overloaded for spack core vs. for packages.
#
__all__ = ['Package', 'Version', 'when', 'ver']
from spack.package import Package
from spack.package import Package, ExtensionConflictError
from spack.version import Version, ver
from spack.multimethod import when

View File

@ -28,6 +28,7 @@
calls you can make from within the install() function.
"""
import os
import sys
import shutil
import multiprocessing
import platform
@ -183,6 +184,10 @@ def set_module_variables_for_package(pkg):
if platform.mac_ver()[0]:
m.std_cmake_args.append('-DCMAKE_FIND_FRAMEWORK=LAST')
# Set up CMake rpath
m.std_cmake_args.append('-DCMAKE_INSTALL_RPATH_USE_LINK_PATH=FALSE')
m.std_cmake_args.append('-DCMAKE_INSTALL_RPATH=%s' % ":".join(get_rpaths(pkg)))
# Emulate some shell commands for convenience
m.pwd = os.getcwd
m.cd = os.chdir
@ -202,8 +207,78 @@ def set_module_variables_for_package(pkg):
m.prefix = pkg.prefix
def get_rpaths(pkg):
"""Get a list of all the rpaths for a package."""
rpaths = [pkg.prefix.lib, pkg.prefix.lib64]
rpaths.extend(d.prefix.lib for d in pkg.spec.traverse(root=False)
if os.path.isdir(d.prefix.lib))
rpaths.extend(d.prefix.lib64 for d in pkg.spec.traverse(root=False)
if os.path.isdir(d.prefix.lib64))
return rpaths
def setup_package(pkg):
"""Execute all environment setup routines."""
set_compiler_environment_variables(pkg)
set_build_environment_variables(pkg)
set_module_variables_for_package(pkg)
# Allow dependencies to set up environment as well.
for dep_spec in pkg.spec.traverse(root=False):
dep_spec.package.setup_dependent_environment(
pkg.module, dep_spec, pkg.spec)
def fork(pkg, function):
"""Fork a child process to do part of a spack build.
Arguments:
pkg -- pkg whose environemnt we should set up the
forked process for.
function -- arg-less function to run in the child process.
Usage:
def child_fun():
# do stuff
build_env.fork(pkg, child_fun)
Forked processes are run with the build environemnt set up by
spack.build_environment. This allows package authors to have
full control over the environment, etc. without offecting
other builds that might be executed in the same spack call.
If something goes wrong, the child process is expected toprint
the error and the parent process will exit with error as
well. If things go well, the child exits and the parent
carries on.
"""
try:
pid = os.fork()
except OSError, e:
raise InstallError("Unable to fork build process: %s" % e)
if pid == 0:
# Give the child process the package's build environemnt.
setup_package(pkg)
try:
# call the forked function.
function()
# Use os._exit here to avoid raising a SystemExit exception,
# which interferes with unit tests.
os._exit(0)
except:
# Child doesn't raise or return to main spack code.
# Just runs default exception handler and exits.
sys.excepthook(*sys.exc_info())
os._exit(1)
else:
# Parent process just waits for the child to complete. If the
# child exited badly, assume it already printed an appropriate
# message. Just make the parent exit with an error code.
pid, returncode = os.waitpid(pid, 0)
if returncode != 0:
sys.exit(1)

View File

@ -121,3 +121,18 @@ def elide_list(line_list, max_num=10):
return line_list[:max_num-1] + ['...'] + line_list[-1:]
else:
return line_list
def disambiguate_spec(spec):
matching_specs = spack.db.get_installed(spec)
if not matching_specs:
tty.die("Spec '%s' matches no installed packages." % spec)
elif len(matching_specs) > 1:
args = ["%s matches multiple packages." % spec,
"Matching packages:"]
args += [" " + str(s) for s in matching_specs]
args += ["Use a more specific spec."]
tty.die(*args)
return matching_specs[0]

View File

@ -0,0 +1,58 @@
##############################################################################
# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://scalability-llnl.github.io/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License (as published by
# the Free Software Foundation) version 2.1 dated February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from external import argparse
import llnl.util.tty as tty
import spack
import spack.cmd
description = "Activate a package extension."
def setup_parser(subparser):
subparser.add_argument(
'-f', '--force', action='store_true',
help="Activate without first activating dependencies.")
subparser.add_argument(
'spec', nargs=argparse.REMAINDER, help="spec of package extension to activate.")
def activate(parser, args):
# TODO: shouldn't have to concretize here. Fix DAG issues.
specs = spack.cmd.parse_specs(args.spec, concretize=True)
if len(specs) != 1:
tty.die("activate requires one spec. %d given." % len(specs))
# TODO: remove this hack when DAG info is stored in dir layout.
# This ensures the ext spec is always normalized properly.
spack.db.get(specs[0])
spec = spack.cmd.disambiguate_spec(specs[0])
if not spec.package.is_extension:
tty.die("%s is not an extension." % spec.name)
if spec.package.activated:
tty.die("Package %s is already activated." % specs[0].short_spec)
spec.package.do_activate()

View File

@ -1,5 +1,5 @@
##############################################################################
# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
# Copyright (c) 2013-2014, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
@ -28,39 +28,19 @@
import spack
import spack.cmd
import spack.stage as stage
description = "Remove staged files for packages"
description = "Remove build stage and source tarball for packages."
def setup_parser(subparser):
subparser.add_argument('-c', "--clean", action="store_true", dest='clean',
help="run make clean in the build directory (default)")
subparser.add_argument('-w', "--work", action="store_true", dest='work',
help="delete the build directory and re-expand it from its archive.")
subparser.add_argument('-d', "--dist", action="store_true", dest='dist',
help="delete the downloaded archive.")
subparser.add_argument('packages', nargs=argparse.REMAINDER,
help="specs of packages to clean")
def clean(parser, args):
if not args.packages:
tty.die("spack clean requires at least one package argument")
tty.die("spack clean requires at least one package spec.")
specs = spack.cmd.parse_specs(args.packages, concretize=True)
for spec in specs:
package = spack.db.get(spec)
if args.dist:
package.do_clean_dist()
tty.msg("Cleaned %s" % package.name)
elif args.work:
package.do_clean_work()
tty.msg("Restaged %s" % package.name)
else:
try:
package.do_clean()
except subprocess.CalledProcessError, e:
tty.warn("Warning: 'make clean' didn't work. Consider 'spack clean --work'.")
tty.msg("Made clean for %s" % package.name)
package.do_clean()

View File

@ -0,0 +1,104 @@
##############################################################################
# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://scalability-llnl.github.io/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License (as published by
# the Free Software Foundation) version 2.1 dated February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from external import argparse
import llnl.util.tty as tty
import spack
import spack.cmd
from spack.graph import topological_sort
description = "Deactivate a package extension."
def setup_parser(subparser):
subparser.add_argument(
'-f', '--force', action='store_true',
help="Run deactivation even if spec is NOT currently activated.")
subparser.add_argument(
'-a', '--all', action='store_true',
help="Deactivate all extensions of an extendable pacakge, or "
"deactivate an extension AND its dependencies.")
subparser.add_argument(
'spec', nargs=argparse.REMAINDER, help="spec of package extension to deactivate.")
def deactivate(parser, args):
# TODO: shouldn't have to concretize here. Fix DAG issues.
specs = spack.cmd.parse_specs(args.spec, concretize=True)
if len(specs) != 1:
tty.die("deactivate requires one spec. %d given." % len(specs))
# TODO: remove this hack when DAG info is stored properly.
# This ensures the ext spec is always normalized properly.
spack.db.get(specs[0])
spec = spack.cmd.disambiguate_spec(specs[0])
pkg = spec.package
if args.all:
if pkg.extendable:
tty.msg("Deactivating all extensions of %s" % pkg.spec.short_spec)
ext_pkgs = spack.db.installed_extensions_for(spec)
for ext_pkg in ext_pkgs:
ext_pkg.spec.normalize()
if ext_pkg.activated:
ext_pkg.do_deactivate(force=True)
elif pkg.is_extension:
# TODO: store DAG info properly (see above)
spec.normalize()
if not args.force and not spec.package.activated:
tty.die("%s is not activated." % pkg.spec.short_spec)
tty.msg("Deactivating %s and all dependencies." % pkg.spec.short_spec)
topo_order = topological_sort(spec)
index = spec.index()
for name in topo_order:
espec = index[name]
epkg = espec.package
# TODO: store DAG info properly (see above)
epkg.spec.normalize()
if epkg.extends(pkg.extendee_spec):
if epkg.activated or args.force:
epkg.do_deactivate(force=args.force)
else:
tty.die("spack deactivate --all requires an extendable package or an extension.")
else:
if not pkg.is_extension:
tty.die("spack deactivate requires an extension.",
"Did you mean 'spack deactivate --all'?")
if not args.force and not spec.package.activated:
tty.die("Package %s is not activated." % specs[0].short_spec)
spec.package.do_deactivate(force=args.force)

View File

@ -0,0 +1,98 @@
##############################################################################
# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://scalability-llnl.github.io/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License (as published by
# the Free Software Foundation) version 2.1 dated February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import sys
from external import argparse
import llnl.util.tty as tty
from llnl.util.tty.colify import colify
import spack
import spack.cmd
import spack.cmd.find
description = "List extensions for package."
def setup_parser(subparser):
format_group = subparser.add_mutually_exclusive_group()
format_group.add_argument(
'-l', '--long', action='store_const', dest='mode', const='long',
help='Show dependency hashes as well as versions.')
format_group.add_argument(
'-p', '--paths', action='store_const', dest='mode', const='paths',
help='Show paths to extension install directories')
format_group.add_argument(
'-d', '--deps', action='store_const', dest='mode', const='deps',
help='Show full dependency DAG of extensions')
subparser.add_argument(
'spec', nargs=argparse.REMAINDER, help='Spec of package to list extensions for')
def extensions(parser, args):
if not args.spec:
tty.die("extensions requires a package spec.")
# Checks
spec = spack.cmd.parse_specs(args.spec)
if len(spec) > 1:
tty.die("Can only list extensions for one package.")
if not spec[0].package.extendable:
tty.die("%s is not an extendable package." % spec[0].name)
spec = spack.cmd.disambiguate_spec(spec[0])
if not spec.package.extendable:
tty.die("%s does not have extensions." % spec.short_spec)
if not args.mode:
args.mode = 'short'
# List package names of extensions
extensions = spack.db.extensions_for(spec)
if not extensions:
tty.msg("%s has no extensions." % spec.cshort_spec)
return
tty.msg(spec.cshort_spec)
tty.msg("%d extensions:" % len(extensions))
colify(ext.name for ext in extensions)
# List specs of installed extensions.
installed = [s.spec for s in spack.db.installed_extensions_for(spec)]
print
if not installed:
tty.msg("None installed.")
return
tty.msg("%d installed:" % len(installed))
spack.cmd.find.display_specs(installed, mode=args.mode)
# List specs of activated extensions.
activated = spack.install_layout.extension_map(spec)
print
if not activated:
tty.msg("None activated.")
return
tty.msg("%d currently activated:" % len(activated))
spack.cmd.find.display_specs(activated.values(), mode=args.mode)

View File

@ -41,13 +41,13 @@
def setup_parser(subparser):
format_group = subparser.add_mutually_exclusive_group()
format_group.add_argument(
'-l', '--long', action='store_true', dest='long',
'-l', '--long', action='store_const', dest='mode', const='long',
help='Show dependency hashes as well as versions.')
format_group.add_argument(
'-p', '--paths', action='store_true', dest='paths',
'-p', '--paths', action='store_const', dest='mode', const='paths',
help='Show paths to package install directories')
format_group.add_argument(
'-d', '--deps', action='store_true', dest='full_deps',
'-d', '--deps', action='store_const', dest='mode', const='deps',
help='Show full dependency DAG of installed packages')
subparser.add_argument(
@ -55,6 +55,50 @@ def setup_parser(subparser):
help='optional specs to filter results')
def display_specs(specs, **kwargs):
mode = kwargs.get('mode', 'short')
# Make a dict with specs keyed by architecture and compiler.
index = index_by(specs, ('architecture', 'compiler'))
# Traverse the index and print out each package
for i, (architecture, compiler) in enumerate(sorted(index)):
if i > 0: print
header = "%s{%s} / %s{%s}" % (
spack.spec.architecture_color, architecture,
spack.spec.compiler_color, compiler)
tty.hline(colorize(header), char='-')
specs = index[(architecture,compiler)]
specs.sort()
abbreviated = [s.format('$_$@$+', color=True) for s in specs]
if mode == 'paths':
# Print one spec per line along with prefix path
width = max(len(s) for s in abbreviated)
width += 2
format = " %-{}s%s".format(width)
for abbrv, spec in zip(abbreviated, specs):
print format % (abbrv, spec.prefix)
elif mode == 'deps':
for spec in specs:
print spec.tree(indent=4, format='$_$@$+$#', color=True),
elif mode in ('short', 'long'):
fmt = '$-_$@$+'
if mode == 'long':
fmt += '$#'
colify(s.format(fmt, color=True) for s in specs)
else:
raise ValueError(
"Invalid mode for display_specs: %s. Must be one of (paths, deps, short)." % mode)
def find(parser, args):
# Filter out specs that don't exist.
query_specs = spack.cmd.parse_specs(args.query_specs)
@ -76,36 +120,10 @@ def find(parser, args):
results = [set(spack.db.get_installed(qs)) for qs in query_specs]
specs = set.union(*results)
# Make a dict with specs keyed by architecture and compiler.
index = index_by(specs, ('architecture', 'compiler'))
if not args.mode:
args.mode = 'short'
# Traverse the index and print out each package
for i, (architecture, compiler) in enumerate(sorted(index)):
if i > 0: print
if sys.stdout.isatty():
tty.msg("%d installed packages." % len(specs))
display_specs(specs, mode=args.mode)
header = "%s{%s} / %s{%s}" % (
spack.spec.architecture_color, architecture,
spack.spec.compiler_color, compiler)
tty.hline(colorize(header), char='-')
specs = index[(architecture,compiler)]
specs.sort()
abbreviated = [s.format('$_$@$+', color=True) for s in specs]
if args.paths:
# Print one spec per line along with prefix path
width = max(len(s) for s in abbreviated)
width += 2
format = " %-{}s%s".format(width)
for abbrv, spec in zip(abbreviated, specs):
print format % (abbrv, spec.prefix)
elif args.full_deps:
for spec in specs:
print spec.tree(indent=4, format='$_$@$+', color=True),
else:
fmt = '$-_$@$+'
if args.long:
fmt += '$#'
colify(s.format(fmt, color=True) for s in specs)

View File

@ -23,6 +23,7 @@
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import os
import sys
from external import argparse
import llnl.util.tty as tty
@ -77,37 +78,30 @@ def location(parser, args):
tty.die("You must supply a spec.")
if len(specs) != 1:
tty.die("Too many specs. Supply only one.")
spec = specs[0]
if args.install_dir:
# install_dir command matches against installed specs.
matching_specs = spack.db.get_installed(spec)
if not matching_specs:
tty.die("Spec '%s' matches no installed packages." % spec)
elif len(matching_specs) > 1:
args = ["%s matches multiple packages." % spec,
"Matching packages:"]
args += [" " + str(s) for s in matching_specs]
args += ["Use a more specific spec."]
tty.die(*args)
print matching_specs[0].prefix
elif args.package_dir:
# This one just needs the spec name.
print join_path(spack.db.root, spec.name)
spec = spack.cmd.disambiguate_spec(specs[0])
print spec.prefix
else:
# These versions need concretized specs.
spec.concretize()
pkg = spack.db.get(spec)
spec = specs[0]
if args.stage_dir:
print pkg.stage.path
if args.package_dir:
# This one just needs the spec name.
print join_path(spack.db.root, spec.name)
else:
# These versions need concretized specs.
spec.concretize()
pkg = spack.db.get(spec)
if args.stage_dir:
print pkg.stage.path
else: # args.build_dir is the default.
if not pkg.stage.source_path:
tty.die("Build directory does not exist yet. Run this to create it:",
"spack stage " + " ".join(args.spec))
print pkg.stage.source_path
else: # args.build_dir is the default.
if not pkg.stage.source_path:
tty.die("Build directory does not exist yet. Run this to create it:",
"spack stage " + " ".join(args.spec))
print pkg.stage.source_path

View File

@ -23,6 +23,7 @@
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import re
import cgi
from StringIO import StringIO
import llnl.util.tty as tty
from llnl.util.tty.colify import *
@ -49,6 +50,8 @@ def print_rst_package_list():
"""Print out information on all packages in restructured text."""
pkgs = sorted(spack.db.all_packages(), key=lambda s:s.name.lower())
print ".. _package-list:"
print
print "Package List"
print "=================="
@ -70,9 +73,9 @@ def print_rst_package_list():
print
print pkg.name
print "-" * len(pkg.name)
print "Links"
print " * `Homepage <%s>`__" % pkg.homepage
print " * `%s/package.py <%s>`__" % (pkg.name, github_url(pkg))
print "Links:"
print " * `%s <%s>`__" % (cgi.escape(pkg.homepage), pkg.homepage)
print " * `%s/package.py <%s>`__" % (pkg.name, github_url(pkg))
print
if pkg.versions:
print "Versions:"
@ -82,7 +85,7 @@ def print_rst_package_list():
print " " + ", ".join("`%s`_" % d if d != "mpi" else d
for d in pkg.dependencies)
print
print "Description"
print "Description:"
print pkg.format_doc(indent=2)
print
print "-----"

View File

@ -0,0 +1,46 @@
##############################################################################
# Copyright (c) 2013-2014, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://scalability-llnl.github.io/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License (as published by
# the Free Software Foundation) version 2.1 dated February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from external import argparse
import llnl.util.tty as tty
import spack
import spack.cmd
description = "Revert checked out package source code."
def setup_parser(subparser):
subparser.add_argument('packages', nargs=argparse.REMAINDER,
help="specs of packages to restage")
def restage(parser, args):
if not args.packages:
tty.die("spack restage requires at least one package spec.")
specs = spack.cmd.parse_specs(args.packages, concretize=True)
for spec in specs:
package = spack.db.get(spec)
package.do_restage()

View File

@ -65,8 +65,8 @@ def uninstall(parser, args):
" b) use a more specific spec."]
tty.die(*args)
if len(matching_specs) == 0:
if args.force: continue
tty.die("%s does not match any installed packages." % spec)
for s in matching_specs:

View File

@ -27,9 +27,11 @@
import exceptions
import hashlib
import shutil
import tempfile
from contextlib import closing
import llnl.util.tty as tty
from llnl.util.lang import memoized
from llnl.util.filesystem import join_path, mkdirp
import spack
@ -53,6 +55,19 @@ def __init__(self, root):
self.root = root
@property
def hidden_file_paths(self):
"""Return a list of hidden files used by the directory layout.
Paths are relative to the root of an install directory.
If the directory layout uses no hidden files to maintain
state, this should return an empty container, e.g. [] or (,).
"""
raise NotImplementedError()
def all_specs(self):
"""To be implemented by subclasses to traverse all specs for which there is
a directory within the root.
@ -71,6 +86,42 @@ def make_path_for_spec(self, spec):
raise NotImplementedError()
def extension_map(self, spec):
"""Get a dict of currently installed extension packages for a spec.
Dict maps { name : extension_spec }
Modifying dict does not affect internals of this layout.
"""
raise NotImplementedError()
def check_extension_conflict(self, spec, ext_spec):
"""Ensure that ext_spec can be activated in spec.
If not, raise ExtensionAlreadyInstalledError or
ExtensionConflictError.
"""
raise NotImplementedError()
def check_activated(self, spec, ext_spec):
"""Ensure that ext_spec can be removed from spec.
If not, raise NoSuchExtensionError.
"""
raise NotImplementedError()
def add_extension(self, spec, ext_spec):
"""Add to the list of currently installed extensions."""
raise NotImplementedError()
def remove_extension(self, spec, ext_spec):
"""Remove from the list of currently installed extensions."""
raise NotImplementedError()
def path_for_spec(self, spec):
"""Return an absolute path from the root to a directory for the spec."""
_check_concrete(spec)
@ -81,12 +132,17 @@ def path_for_spec(self, spec):
def remove_path_for_spec(self, spec):
"""Removes a prefix and any empty parent directories from the root."""
"""Removes a prefix and any empty parent directories from the root.
Raised RemoveFailedError if something goes wrong.
"""
path = self.path_for_spec(spec)
assert(path.startswith(self.root))
if os.path.exists(path):
shutil.rmtree(path, True)
try:
shutil.rmtree(path)
except exceptions.OSError, e:
raise RemoveFailedError(spec, path, e)
path = os.path.dirname(path)
while path != self.root:
@ -134,9 +190,18 @@ def __init__(self, root, **kwargs):
"""Prefix size is number of characters in the SHA-1 prefix to use
to make each hash unique.
"""
spec_file_name = kwargs.get('spec_file_name', '.spec')
spec_file_name = kwargs.get('spec_file_name', '.spec')
extension_file_name = kwargs.get('extension_file_name', '.extensions')
super(SpecHashDirectoryLayout, self).__init__(root)
self.spec_file_name = spec_file_name
self.extension_file_name = extension_file_name
# Cache of already written/read extension maps.
self._extension_maps = {}
@property
def hidden_file_paths(self):
return ('.spec', '.extensions')
def relative_path_for_spec(self, spec):
@ -159,6 +224,9 @@ def read_spec(self, path):
if all(spack.db.exists(s.name) for s in spec.traverse()):
copy = spec.copy()
# TODO: It takes a lot of time to normalize every spec on read.
# TODO: Storing graph info with spec files would fix this.
copy.normalize()
if copy.concrete:
return copy # These are specs spack still understands.
@ -212,17 +280,116 @@ def make_path_for_spec(self, spec):
self.write_spec(spec, spec_file_path)
@memoized
def all_specs(self):
if not os.path.isdir(self.root):
return
return []
specs = []
for path in traverse_dirs_at_depth(self.root, 3):
arch, compiler, last_dir = path
spec_file_path = join_path(
self.root, arch, compiler, last_dir, self.spec_file_name)
if os.path.exists(spec_file_path):
spec = self.read_spec(spec_file_path)
yield spec
specs.append(spec)
return specs
def extension_file_path(self, spec):
"""Gets full path to an installed package's extension file"""
_check_concrete(spec)
return join_path(self.path_for_spec(spec), self.extension_file_name)
def _extension_map(self, spec):
"""Get a dict<name -> spec> for all extensions currnetly
installed for this package."""
_check_concrete(spec)
if not spec in self._extension_maps:
path = self.extension_file_path(spec)
if not os.path.exists(path):
self._extension_maps[spec] = {}
else:
exts = {}
with closing(open(path)) as ext_file:
for line in ext_file:
try:
spec = Spec(line.strip())
exts[spec.name] = spec
except spack.error.SpackError, e:
# TODO: do something better here -- should be
# resilient to corrupt files.
raise InvalidExtensionSpecError(str(e))
self._extension_maps[spec] = exts
return self._extension_maps[spec]
def extension_map(self, spec):
"""Defensive copying version of _extension_map() for external API."""
return self._extension_map(spec).copy()
def check_extension_conflict(self, spec, ext_spec):
exts = self._extension_map(spec)
if ext_spec.name in exts:
installed_spec = exts[ext_spec.name]
if ext_spec == installed_spec:
raise ExtensionAlreadyInstalledError(spec, ext_spec)
else:
raise ExtensionConflictError(spec, ext_spec, installed_spec)
def check_activated(self, spec, ext_spec):
exts = self._extension_map(spec)
if (not ext_spec.name in exts) or (ext_spec != exts[ext_spec.name]):
raise NoSuchExtensionError(spec, ext_spec)
def _write_extensions(self, spec, extensions):
path = self.extension_file_path(spec)
# Create a temp file in the same directory as the actual file.
dirname, basename = os.path.split(path)
tmp = tempfile.NamedTemporaryFile(
prefix=basename, dir=dirname, delete=False)
# Write temp file.
with closing(tmp):
for extension in sorted(extensions.values()):
tmp.write("%s\n" % extension)
# Atomic update by moving tmpfile on top of old one.
os.rename(tmp.name, path)
def add_extension(self, spec, ext_spec):
_check_concrete(spec)
_check_concrete(ext_spec)
# Check whether it's already installed or if it's a conflict.
exts = self._extension_map(spec)
self.check_extension_conflict(spec, ext_spec)
# do the actual adding.
exts[ext_spec.name] = ext_spec
self._write_extensions(spec, exts)
def remove_extension(self, spec, ext_spec):
_check_concrete(spec)
_check_concrete(ext_spec)
# Make sure it's installed before removing.
exts = self._extension_map(spec)
self.check_activated(spec, ext_spec)
# do the actual removing.
del exts[ext_spec.name]
self._write_extensions(spec, exts)
class DirectoryLayoutError(SpackError):
@ -239,6 +406,15 @@ def __init__(self, installed_spec, new_spec):
% installed_spec, new_spec)
class RemoveFailedError(DirectoryLayoutError):
"""Raised when a DirectoryLayout cannot remove an install prefix."""
def __init__(self, installed_spec, prefix, error):
super(RemoveFailedError, self).__init__(
'Could not remove prefix %s for %s : %s'
% prefix, installed_spec.short_spec, error)
self.cause = error
class InconsistentInstallDirectoryError(DirectoryLayoutError):
"""Raised when a package seems to be installed to the wrong place."""
def __init__(self, message):
@ -250,3 +426,34 @@ class InstallDirectoryAlreadyExistsError(DirectoryLayoutError):
def __init__(self, path):
super(InstallDirectoryAlreadyExistsError, self).__init__(
"Install path %s already exists!")
class InvalidExtensionSpecError(DirectoryLayoutError):
"""Raised when an extension file has a bad spec in it."""
def __init__(self, message):
super(InvalidExtensionSpecError, self).__init__(message)
class ExtensionAlreadyInstalledError(DirectoryLayoutError):
"""Raised when an extension is added to a package that already has it."""
def __init__(self, spec, ext_spec):
super(ExtensionAlreadyInstalledError, self).__init__(
"%s is already installed in %s" % (ext_spec.short_spec, spec.short_spec))
class ExtensionConflictError(DirectoryLayoutError):
"""Raised when an extension is added to a package that already has it."""
def __init__(self, spec, ext_spec, conflict):
super(ExtensionConflictError, self).__init__(
"%s cannot be installed in %s because it conflicts with %s."% (
ext_spec.short_spec, spec.short_spec, conflict.short_spec))
class NoSuchExtensionError(DirectoryLayoutError):
"""Raised when an extension isn't there on deactivate."""
def __init__(self, spec, ext_spec):
super(NoSuchExtensionError, self).__init__(
"%s cannot be removed from %s because it's not activated."% (
ext_spec.short_spec, spec.short_spec))

View File

@ -41,6 +41,7 @@
Archive a source directory, e.g. for creating a mirror.
"""
import os
import sys
import re
import shutil
from functools import wraps
@ -141,13 +142,19 @@ def fetch(self):
tty.msg("Trying to fetch from %s" % self.url)
curl_args = ['-O', # save file to disk
'-f', # fail on >400 errors
'-D', '-', # print out HTML headers
'-L', self.url,]
if sys.stdout.isatty():
curl_args.append('-#') # status bar when using a tty
else:
curl_args.append('-sS') # just errors when not.
# Run curl but grab the mime type from the http headers
headers = spack.curl('-#', # status bar
'-O', # save file to disk
'-f', # fail on >400 errors
'-D', '-', # print out HTML headers
'-L', self.url,
return_output=True, fail_on_error=False)
headers = spack.curl(
*curl_args, return_output=True, fail_on_error=False)
if spack.curl.returncode != 0:
# clean up archive on failure.

View File

@ -31,7 +31,9 @@
Currently the following hooks are supported:
* pre_install()
* post_install()
* pre_uninstall()
* post_uninstall()
This can be used to implement support for things like module
@ -70,5 +72,8 @@ def __call__(self, pkg):
#
# Define some functions that can be called to fire off hooks.
#
post_install = HookRunner('post_install')
pre_install = HookRunner('pre_install')
post_install = HookRunner('post_install')
pre_uninstall = HookRunner('pre_uninstall')
post_uninstall = HookRunner('post_uninstall')

View File

@ -0,0 +1,36 @@
##############################################################################
# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://scalability-llnl.github.io/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License (as published by
# the Free Software Foundation) version 2.1 dated February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import spack
def pre_uninstall(pkg):
# Need to do this b/c uninstall does not automatically do it.
# TODO: store full graph info in stored .spec file.
pkg.spec.normalize()
if pkg.is_extension:
if pkg.activated:
pkg.do_deactivate(force=True)

View File

@ -146,7 +146,7 @@ def create(path, specs, **kwargs):
stage = None
try:
# create a subdirectory for the current package@version
archive_path = join_path(path, mirror_archive_path(spec))
archive_path = os.path.abspath(join_path(path, mirror_archive_path(spec)))
subdir = os.path.dirname(archive_path)
mkdirp(subdir)

View File

@ -49,6 +49,7 @@
import re
import textwrap
import shutil
from glob import glob
from contextlib import closing
import llnl.util.tty as tty
@ -123,6 +124,13 @@ def add_path(path_name, directory):
if os.path.isdir(directory):
add_path(var, directory)
# Add python path unless it's an actual python installation
# TODO: is there a better way to do this?
if self.spec.name != 'python':
site_packages = glob(join_path(self.spec.prefix.lib, "python*/site-packages"))
if site_packages:
add_path('PYTHONPATH', site_packages[0])
# short description is just the package + version
# TODO: maybe packages can optionally provide it.
self.short_description = self.spec.format("$_ $@")

View File

@ -45,6 +45,7 @@
from StringIO import StringIO
import llnl.util.tty as tty
from llnl.util.link_tree import LinkTree
from llnl.util.filesystem import *
from llnl.util.lang import *
@ -286,10 +287,9 @@ class SomePackage(Package):
.. code-block:: python
p.do_clean() # runs make clean
p.do_clean_work() # removes the build directory and
p.do_clean() # removes the stage directory entirely
p.do_restage() # removes the build directory and
# re-expands the archive.
p.do_clean_dist() # removes the stage directory entirely
The convention used here is that a do_* function is intended to be called
internally by Spack commands (in spack.cmd). These aren't for package
@ -320,12 +320,21 @@ class SomePackage(Package):
"""Patches to apply to newly expanded source, if any."""
patches = {}
"""Specs of package this one extends, or None.
Currently, ppackages can extend at most one other package.
"""
extendees = {}
#
# These are default values for instance variables.
#
"""By default we build in parallel. Subclasses can override this."""
parallel = True
"""Most packages are NOT extendable. Set to True if you want extensions."""
extendable = False
def __init__(self, spec):
# this determines how the package should be built.
@ -395,6 +404,9 @@ def ensure_has_dict(attr_name):
self._fetch_time = 0.0
self._total_time = 0.0
if self.is_extension:
spack.db.get(self.extendee_spec)._check_extendable()
@property
def version(self):
@ -481,6 +493,47 @@ def fetcher(self, f):
self._fetcher = f
@property
def extendee_spec(self):
"""Spec of the extendee of this package, or None if it is not an extension."""
if not self.extendees:
return None
name = next(iter(self.extendees))
if not name in self.spec:
spec, kwargs = self.extendees[name]
return spec
# Need to do this to get the concrete version of the spec
return self.spec[name]
@property
def extendee_args(self):
"""Spec of the extendee of this package, or None if it is not an extension."""
if not self.extendees:
return None
name = next(iter(self.extendees))
return self.extendees[name][1]
@property
def is_extension(self):
return len(self.extendees) > 0
def extends(self, spec):
return (spec.name in self.extendees and
spec.satisfies(self.extendees[spec.name][0]))
@property
def activated(self):
if not self.is_extension:
raise ValueError("is_extension called on package that is not an extension.")
exts = spack.install_layout.extension_map(self.extendee_spec)
return (self.name in exts) and (exts[self.name] == self.spec)
def preorder_traversal(self, visited=None, **kwargs):
"""This does a preorder traversal of the package's dependence DAG."""
virtual = kwargs.get("virtual", False)
@ -741,34 +794,27 @@ def do_install(self, **kwargs):
tty.msg("Installing %s" % self.name)
if not ignore_deps:
self.do_install_dependencies()
self.do_install_dependencies(**kwargs)
start_time = time.time()
if not fake_install:
self.do_patch()
# Fork a child process to do the build. This allows each
# package authors to have full control over their environment,
# etc. without offecting other builds that might be executed
# in the same spack call.
try:
pid = os.fork()
except OSError, e:
raise InstallError("Unable to fork build process: %s" % e)
# create the install directory. The install layout
# handles this in case so that it can use whatever
# package naming scheme it likes.
spack.install_layout.make_path_for_spec(self.spec)
if pid == 0:
def real_work():
try:
tty.msg("Building %s." % self.name)
# create the install directory. The install layout
# handles this in case so that it can use whatever
# package naming scheme it likes.
spack.install_layout.make_path_for_spec(self.spec)
# Run the pre-install hook in the child process after
# the directory is created.
spack.hooks.pre_install(self)
# Set up process's build environment before running install.
self.stage.chdir_to_source()
build_env.setup_package(self)
if fake_install:
self.do_fake_install()
else:
@ -776,10 +822,7 @@ def do_install(self, **kwargs):
self.install(self.spec, self.prefix)
# Ensure that something was actually installed.
if not os.listdir(self.prefix):
raise InstallError(
"Install failed for %s. Nothing was installed!"
% self.name)
self._sanity_check_install()
# On successful install, remove the stage.
if not keep_stage:
@ -790,14 +833,10 @@ def do_install(self, **kwargs):
build_time = self._total_time - self._fetch_time
tty.msg("Successfully installed %s." % self.name,
"Fetch: %.2f sec. Build: %.2f sec. Total: %.2f sec."
% (self._fetch_time, build_time, self._total_time))
"Fetch: %s. Build: %s. Total: %s."
% (_hms(self._fetch_time), _hms(build_time), _hms(self._total_time)))
print_pkg(self.prefix)
# Use os._exit here to avoid raising a SystemExit exception,
# which interferes with unit tests.
os._exit(0)
except:
if not keep_prefix:
# If anything goes wrong, remove the install prefix
@ -807,28 +846,26 @@ def do_install(self, **kwargs):
"Spack will think this package is installed." +
"Manually remove this directory to fix:",
self.prefix)
raise
# Child doesn't raise or return to main spack code.
# Just runs default exception handler and exits.
sys.excepthook(*sys.exc_info())
os._exit(1)
# Parent process just waits for the child to complete. If the
# child exited badly, assume it already printed an appropriate
# message. Just make the parent exit with an error code.
pid, returncode = os.waitpid(pid, 0)
if returncode != 0:
sys.exit(1)
build_env.fork(self, real_work)
# Once everything else is done, run post install hooks
spack.hooks.post_install(self)
def do_install_dependencies(self):
def _sanity_check_install(self):
installed = set(os.listdir(self.prefix))
installed.difference_update(spack.install_layout.hidden_file_paths)
if not installed:
raise InstallError(
"Install failed for %s. Nothing was installed!" % self.name)
def do_install_dependencies(self, **kwargs):
# Pass along paths of dependencies here
for dep in self.spec.dependencies.values():
dep.package.do_install()
dep.package.do_install(**kwargs)
@property
@ -840,6 +877,32 @@ def module(self):
fromlist=[self.__class__.__name__])
def setup_dependent_environment(self, module, spec, dependent_spec):
"""Called before the install() method of dependents.
Default implementation does nothing, but this can be
overridden by an extendable package to set up the install
environment for its extensions. This is useful if there are
some common steps to installing all extensions for a
certain package.
Some examples:
1. Installing python modules generally requires PYTHONPATH to
point to the lib/pythonX.Y/site-packages directory in the
module's install prefix. This could set that variable.
2. Extensions often need to invoke the 'python' interpreter
from the Python installation being extended. This routine can
put a 'python' Execuable object in the module scope for the
extension package to simplify extension installs.
3. A lot of Qt extensions need QTDIR set. This can be used to do that.
"""
pass
def install(self, spec, prefix):
"""Package implementations override this with their own build configuration."""
raise InstallError("Package %s provides no install method!" % self.name)
@ -859,6 +922,10 @@ def do_uninstall(self, **kwargs):
"The following installed packages depend on it: %s" %
' '.join(formatted_deps))
# Pre-uninstall hook runs first.
spack.hooks.pre_uninstall(self)
# Uninstalling in Spack only requires removing the prefix.
self.remove_prefix()
tty.msg("Successfully uninstalled %s." % self.spec.short_spec)
@ -866,26 +933,127 @@ def do_uninstall(self, **kwargs):
spack.hooks.post_uninstall(self)
def do_clean(self):
if self.stage.expanded_archive_path:
self.stage.chdir_to_source()
self.clean()
def _check_extendable(self):
if not self.extendable:
raise ValueError("Package %s is not extendable!" % self.name)
def clean(self):
"""By default just runs make clean. Override if this isn't good."""
# TODO: should we really call make clean, ro just blow away the directory?
make = build_env.MakeExecutable('make', self.parallel)
make('clean')
def _sanity_check_extension(self):
if not self.is_extension:
raise ActivationError("This package is not an extension.")
extendee_package = self.extendee_spec.package
extendee_package._check_extendable()
if not extendee_package.installed:
raise ActivationError("Can only (de)activate extensions for installed packages.")
if not self.installed:
raise ActivationError("Extensions must first be installed.")
if not self.extendee_spec.name in self.extendees:
raise ActivationError("%s does not extend %s!" % (self.name, self.extendee.name))
def do_clean_work(self):
"""By default just blows away the stage directory and re-stages."""
def do_activate(self, **kwargs):
"""Called on an etension to invoke the extendee's activate method.
Commands should call this routine, and should not call
activate() directly.
"""
self._sanity_check_extension()
force = kwargs.get('force', False)
# TODO: get rid of this normalize - DAG handling.
self.spec.normalize()
spack.install_layout.check_extension_conflict(self.extendee_spec, self.spec)
if not force:
for spec in self.spec.traverse(root=False):
if spec.package.extends(self.extendee_spec):
# TODO: fix this normalize() requirement -- revisit DAG handling.
spec.package.spec.normalize()
if not spec.package.activated:
spec.package.do_activate(**kwargs)
self.extendee_spec.package.activate(self, **self.extendee_args)
spack.install_layout.add_extension(self.extendee_spec, self.spec)
tty.msg("Activated extension %s for %s."
% (self.spec.short_spec, self.extendee_spec.format("$_$@$+$%@")))
def activate(self, extension, **kwargs):
"""Symlinks all files from the extension into extendee's install dir.
Package authors can override this method to support other
extension mechanisms. Spack internals (commands, hooks, etc.)
should call do_activate() method so that proper checks are
always executed.
"""
def ignore(filename):
return (filename in spack.install_layout.hidden_file_paths or
kwargs.get('ignore', lambda f: False)(filename))
tree = LinkTree(extension.prefix)
conflict = tree.find_conflict(self.prefix, ignore=ignore)
if conflict:
raise ExtensionConflictError(conflict)
tree.merge(self.prefix, ignore=ignore)
def do_deactivate(self, **kwargs):
"""Called on the extension to invoke extendee's deactivate() method."""
self._sanity_check_extension()
force = kwargs.get('force', False)
# Allow a force deactivate to happen. This can unlink
# spurious files if something was corrupted.
if not force:
spack.install_layout.check_activated(self.extendee_spec, self.spec)
activated = spack.install_layout.extension_map(self.extendee_spec)
for name, aspec in activated.items():
if aspec != self.spec and self.spec in aspec:
raise ActivationError(
"Cannot deactivate %s beacuse %s is activated and depends on it."
% (self.spec.short_spec, aspec.short_spec))
self.extendee_spec.package.deactivate(self, **self.extendee_args)
# redundant activation check -- makes SURE the spec is not
# still activated even if something was wrong above.
if self.activated:
spack.install_layout.remove_extension(self.extendee_spec, self.spec)
tty.msg("Deactivated extension %s for %s."
% (self.spec.short_spec, self.extendee_spec.format("$_$@$+$%@")))
def deactivate(self, extension, **kwargs):
"""Unlinks all files from extension out of this package's install dir.
Package authors can override this method to support other
extension mechanisms. Spack internals (commands, hooks, etc.)
should call do_deactivate() method so that proper checks are
always executed.
"""
def ignore(filename):
return (filename in spack.install_layout.hidden_file_paths or
kwargs.get('ignore', lambda f: False)(filename))
tree = LinkTree(extension.prefix)
tree.unmerge(self.prefix, ignore=ignore)
def do_restage(self):
"""Reverts expanded/checked out source to a pristine state."""
self.stage.restage()
def do_clean_dist(self):
"""Removes the stage directory where this package was built."""
def do_clean(self):
"""Removes the package's build stage and source tarball."""
if os.path.exists(self.stage.path):
self.stage.destroy()
@ -1002,15 +1170,23 @@ def validate_package_url(url_string):
def print_pkg(message):
"""Outputs a message with a package icon."""
mac_ver = py_platform.mac_ver()[0]
if mac_ver and Version(mac_ver) >= Version('10.7'):
print u"\U0001F4E6" + tty.indent,
else:
from llnl.util.tty.color import cwrite
cwrite('@*g{[+]} ')
from llnl.util.tty.color import cwrite
cwrite('@*g{[+]} ')
print message
def _hms(seconds):
"""Convert time in seconds to hours, minutes, seconds."""
m, s = divmod(seconds, 60)
h, m = divmod(m, 60)
parts = []
if h: parts.append("%dh" % h)
if m: parts.append("%dm" % m)
if s: parts.append("%.2fs" % s)
return ' '.join(parts)
class FetchError(spack.error.SpackError):
"""Raised when something goes wrong during fetch."""
def __init__(self, message, long_msg=None):
@ -1057,3 +1233,17 @@ class NoURLError(PackageError):
def __init__(self, cls):
super(NoURLError, self).__init__(
"Package %s has no version with a URL." % cls.__name__)
class ExtensionError(PackageError): pass
class ExtensionConflictError(ExtensionError):
def __init__(self, path):
super(ExtensionConflictError, self).__init__(
"Extension blocked by file: %s" % path)
class ActivationError(ExtensionError):
def __init__(self, msg, long_msg=None):
super(ActivationError, self).__init__(msg, long_msg)

View File

@ -77,6 +77,8 @@ def get(self, spec, **kwargs):
copy = spec.copy()
self.instances[copy] = package_class(copy)
except Exception, e:
if spack.debug:
sys.excepthook(*sys.exc_info())
raise FailedConstructorError(spec.name, e)
return self.instances[spec]
@ -110,6 +112,24 @@ def providers_for(self, vpkg_spec):
return providers
@_autospec
def extensions_for(self, extendee_spec):
return [p for p in self.all_packages() if p.extends(extendee_spec)]
@_autospec
def installed_extensions_for(self, extendee_spec):
for s in self.installed_package_specs():
try:
if s.package.extends(extendee_spec):
yield s.package
except UnknownPackageError, e:
# Skip packages we know nothing about
continue
# TODO: add some conditional way to do this instead of
# catching exceptions.
def dirname_for_package_name(self, pkg_name):
"""Get the directory name for a particular package. This is the
directory that contains its package.py file."""
@ -172,6 +192,7 @@ def all_packages(self):
yield self.get(name)
@memoized
def exists(self, pkg_name):
"""Whether a package with the supplied name exists ."""
return os.path.exists(self.filename_for_package_name(pkg_name))

View File

@ -68,7 +68,7 @@ class Mpileaks(Package):
spack install mpileaks ^mvapich
spack install mpileaks ^mpich
"""
__all__ = [ 'depends_on', 'provides', 'patch', 'version' ]
__all__ = [ 'depends_on', 'extends', 'provides', 'patch', 'version' ]
import re
import inspect
@ -107,8 +107,9 @@ def depends_on(*specs):
"""Adds a dependencies local variable in the locals of
the calling class, based on args. """
pkg = get_calling_package_name()
clocals = caller_locals()
dependencies = clocals.setdefault('dependencies', {})
dependencies = caller_locals().setdefault('dependencies', {})
for string in specs:
for spec in spack.spec.parse(string):
if pkg == spec.name:
@ -116,6 +117,34 @@ def depends_on(*specs):
dependencies[spec.name] = spec
def extends(spec, **kwargs):
"""Same as depends_on, but dependency is symlinked into parent prefix.
This is for Python and other language modules where the module
needs to be installed into the prefix of the Python installation.
Spack handles this by installing modules into their own prefix,
but allowing ONE module version to be symlinked into a parent
Python install at a time.
keyword arguments can be passed to extends() so that extension
packages can pass parameters to the extendee's extension
mechanism.
"""
pkg = get_calling_package_name()
clocals = caller_locals()
dependencies = clocals.setdefault('dependencies', {})
extendees = clocals.setdefault('extendees', {})
if extendees:
raise RelationError("Packages can extend at most one other package.")
spec = Spec(spec)
if pkg == spec.name:
raise CircularReferenceError('extends', pkg)
dependencies[spec.name] = spec
extendees[spec.name] = (spec, kwargs)
def provides(*specs, **kwargs):
"""Allows packages to provide a virtual dependency. If a package provides
'mpi', other packages can declare that they depend on "mpi", and spack

View File

@ -552,6 +552,13 @@ def short_spec(self):
return self.format('$_$@$%@$+$=$#')
@property
def cshort_spec(self):
"""Returns a version of the spec with the dependencies hashed
instead of completely enumerated."""
return self.format('$_$@$%@$+$=$#', color=True)
@property
def prefix(self):
return Prefix(spack.install_layout.path_for_spec(self))

View File

@ -51,7 +51,8 @@
'hg_fetch',
'mirror',
'url_extrapolate',
'cc']
'cc',
'link_tree']
def list_tests():

View File

@ -61,7 +61,7 @@ def tearDown(self):
if self.repo.stage is not None:
self.repo.stage.destroy()
self.pkg.do_clean_dist()
self.pkg.do_clean()
def assert_rev(self, rev):
@ -93,7 +93,7 @@ def try_fetch(self, rev, test_file, args):
untracked_file = 'foobarbaz'
touch(untracked_file)
self.assertTrue(os.path.isfile(untracked_file))
self.pkg.do_clean_work()
self.pkg.do_restage()
self.assertFalse(os.path.isfile(untracked_file))
self.assertTrue(os.path.isdir(self.pkg.stage.source_path))

View File

@ -60,7 +60,7 @@ def tearDown(self):
if self.repo.stage is not None:
self.repo.stage.destroy()
self.pkg.do_clean_dist()
self.pkg.do_clean()
def try_fetch(self, rev, test_file, args):
@ -87,7 +87,7 @@ def try_fetch(self, rev, test_file, args):
untracked = 'foobarbaz'
touch(untracked)
self.assertTrue(os.path.isfile(untracked))
self.pkg.do_clean_work()
self.pkg.do_restage()
self.assertFalse(os.path.isfile(untracked))
self.assertTrue(os.path.isdir(self.pkg.stage.source_path))

View File

@ -0,0 +1,153 @@
##############################################################################
# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://scalability-llnl.github.io/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License (as published by
# the Free Software Foundation) version 2.1 dated February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import os
import unittest
import shutil
import tempfile
from contextlib import closing
from llnl.util.filesystem import *
from llnl.util.link_tree import LinkTree
from spack.stage import Stage
class LinkTreeTest(unittest.TestCase):
"""Tests Spack's LinkTree class."""
def setUp(self):
self.stage = Stage('link-tree-test')
with working_dir(self.stage.path):
touchp('source/1')
touchp('source/a/b/2')
touchp('source/a/b/3')
touchp('source/c/4')
touchp('source/c/d/5')
touchp('source/c/d/6')
touchp('source/c/d/e/7')
source_path = os.path.join(self.stage.path, 'source')
self.link_tree = LinkTree(source_path)
def tearDown(self):
if self.stage:
self.stage.destroy()
def check_file_link(self, filename):
self.assertTrue(os.path.isfile(filename))
self.assertTrue(os.path.islink(filename))
def check_dir(self, filename):
self.assertTrue(os.path.isdir(filename))
def test_merge_to_new_directory(self):
with working_dir(self.stage.path):
self.link_tree.merge('dest')
self.check_file_link('dest/1')
self.check_file_link('dest/a/b/2')
self.check_file_link('dest/a/b/3')
self.check_file_link('dest/c/4')
self.check_file_link('dest/c/d/5')
self.check_file_link('dest/c/d/6')
self.check_file_link('dest/c/d/e/7')
self.link_tree.unmerge('dest')
self.assertFalse(os.path.exists('dest'))
def test_merge_to_existing_directory(self):
with working_dir(self.stage.path):
touchp('dest/x')
touchp('dest/a/b/y')
self.link_tree.merge('dest')
self.check_file_link('dest/1')
self.check_file_link('dest/a/b/2')
self.check_file_link('dest/a/b/3')
self.check_file_link('dest/c/4')
self.check_file_link('dest/c/d/5')
self.check_file_link('dest/c/d/6')
self.check_file_link('dest/c/d/e/7')
self.assertTrue(os.path.isfile('dest/x'))
self.assertTrue(os.path.isfile('dest/a/b/y'))
self.link_tree.unmerge('dest')
self.assertTrue(os.path.isfile('dest/x'))
self.assertTrue(os.path.isfile('dest/a/b/y'))
self.assertFalse(os.path.isfile('dest/1'))
self.assertFalse(os.path.isfile('dest/a/b/2'))
self.assertFalse(os.path.isfile('dest/a/b/3'))
self.assertFalse(os.path.isfile('dest/c/4'))
self.assertFalse(os.path.isfile('dest/c/d/5'))
self.assertFalse(os.path.isfile('dest/c/d/6'))
self.assertFalse(os.path.isfile('dest/c/d/e/7'))
def test_merge_with_empty_directories(self):
with working_dir(self.stage.path):
mkdirp('dest/f/g')
mkdirp('dest/a/b/h')
self.link_tree.merge('dest')
self.link_tree.unmerge('dest')
self.assertFalse(os.path.exists('dest/1'))
self.assertFalse(os.path.exists('dest/a/b/2'))
self.assertFalse(os.path.exists('dest/a/b/3'))
self.assertFalse(os.path.exists('dest/c/4'))
self.assertFalse(os.path.exists('dest/c/d/5'))
self.assertFalse(os.path.exists('dest/c/d/6'))
self.assertFalse(os.path.exists('dest/c/d/e/7'))
self.assertTrue(os.path.isdir('dest/a/b/h'))
self.assertTrue(os.path.isdir('dest/f/g'))
def test_ignore(self):
with working_dir(self.stage.path):
touchp('source/.spec')
touchp('dest/.spec')
self.link_tree.merge('dest', ignore=lambda x: x == '.spec')
self.link_tree.unmerge('dest', ignore=lambda x: x == '.spec')
self.assertFalse(os.path.exists('dest/1'))
self.assertFalse(os.path.exists('dest/a'))
self.assertFalse(os.path.exists('dest/c'))
self.assertTrue(os.path.isfile('source/.spec'))
self.assertTrue(os.path.isfile('dest/.spec'))

View File

@ -60,7 +60,7 @@ def tearDown(self):
if self.repo.stage is not None:
self.repo.stage.destroy()
self.pkg.do_clean_dist()
self.pkg.do_clean()
def assert_rev(self, rev):
@ -99,7 +99,7 @@ def try_fetch(self, rev, test_file, args):
untracked = 'foobarbaz'
touch(untracked)
self.assertTrue(os.path.isfile(untracked))
self.pkg.do_clean_work()
self.pkg.do_restage()
self.assertFalse(os.path.isfile(untracked))
self.assertTrue(os.path.isdir(self.pkg.stage.source_path))

View File

@ -165,8 +165,8 @@ fi
#
# Set up modules and dotkit search paths in the user environment
#
_sp_share_dir="$(dirname $_sp_source_file)"
_sp_prefix="$(dirname $(dirname $_sp_share_dir))"
_sp_share_dir=$(cd "$(dirname $_sp_source_file)" && pwd)
_sp_prefix=$(cd "$(dirname $(dirname $_sp_share_dir))" && pwd)
# TODO: fix SYS_TYPE to something non-LLNL-specific
_spack_pathadd DK_NODE "$_sp_share_dir/dotkit/$SYS_TYPE"

View File

@ -3,18 +3,32 @@
class Imagemagick(Package):
"""ImageMagick is a image processing library"""
homepage = "http://www.imagemagic.org"
url = "http://www.imagemagick.org/download/ImageMagick-6.8.9-10.tar.gz"
version('6.9.0-0', '2cf094cb86ec518fa5bc669ce2d21613')
version('6.8.9-10', 'aa050bf9785e571c956c111377bbf57c')
version('6.8.9-9', 'e63fed3e3550851328352c708f800676')
#-------------------------------------------------------------------------
# ImageMagick does not keep around anything but *-10 versions, so
# this URL may change. If you want the bleeding edge, you can
# uncomment it and see if it works but you may need to try to
# fetch a newer version (-6, -7, -8, -9, etc.) or you can stick
# wtih the older, stable, archived -10 versions below.
#
# TODO: would be nice if spack had a way to recommend avoiding a
# TODO: bleeding edge version, but not comment it out.
# -------------------------------------------------------------------------
# version('6.9.0-6', 'c1bce7396c22995b8bdb56b7797b4a1b',
# url="http://www.imagemagick.org/download/ImageMagick-6.9.0-6.tar.bz2")
#-------------------------------------------------------------------------
# *-10 versions are archived, so these versions should fetch reliably.
# -------------------------------------------------------------------------
version('6.8.9-10', 'aa050bf9785e571c956c111377bbf57c',
url="http://sourceforge.net/projects/imagemagick/files/old-sources/6.x/6.8/ImageMagick-6.8.9-10.tar.gz/download")
depends_on('libtool')
depends_on('jpeg')
depends_on('libpng')
depends_on('freetype')
depends_on('fontconfig')
# depends_on('libtiff')
depends_on('libtiff')
def install(self, spec, prefix):
configure("--prefix=%s" % prefix)

View File

@ -0,0 +1,18 @@
from spack import *
class Mitos(Package):
"""Mitos is a library and a tool for collecting sampled memory
performance data to view with MemAxes"""
homepage = "https://github.com/scalability-llnl/Mitos"
url = "https://github.com/scalability-llnl/Mitos"
version('0.7', git='https://github.com/scalability-llnl/Mitos.git', tag='v0.7')
depends_on('dyninst')
def install(self, spec, prefix):
with working_dir('spack-build', create=True):
cmake('..', *std_cmake_args)
make()
make("install")

View File

@ -0,0 +1,33 @@
from spack import *
class R(Package):
"""R is 'GNU S', a freely available language and environment for
statistical computing and graphics which provides a wide va
riety of statistical and graphical techniques: linear and
nonlinear modelling, statistical tests, time series analysis,
classification, clustering, etc. Please consult the R project
homepage for further information."""
homepage = "http://www.example.com"
url = "http://cran.cnr.berkeley.edu/src/base/R-3/R-3.1.2.tar.gz"
version('3.1.2', '3af29ec06704cbd08d4ba8d69250ae74')
depends_on("readline")
depends_on("ncurses")
depends_on("icu")
depends_on("glib")
depends_on("zlib")
depends_on("libtiff")
depends_on("jpeg")
depends_on("cairo")
depends_on("pango")
depends_on("freetype")
depends_on("tcl")
depends_on("tk")
def install(self, spec, prefix):
configure("--prefix=%s" % prefix,
"--enable-R-shlib",
"--enable-BLAS-shlib")
make()
make("install")

View File

@ -0,0 +1,17 @@
from spack import *
class Bison(Package):
"""Bison is a general-purpose parser generator that converts
an annotated context-free grammar into a deterministic LR or
generalized LR (GLR) parser employing LALR(1) parser tables."""
homepage = "http://www.gnu.org/software/bison/"
url = "http://ftp.gnu.org/gnu/bison/bison-3.0.tar.gz"
version('3.0.4', 'a586e11cd4aff49c3ff6d3b6a4c9ccf8')
def install(self, spec, prefix):
configure("--prefix=%s" % prefix)
make()
make("install")

View File

@ -29,11 +29,10 @@ class Clang(Package):
Objective C and Objective C++ front-end for the LLVM compiler.
"""
homepage = "http://clang.llvm.org"
url = "http://llvm.org/releases/3.4.2/cfe-3.4.2.src.tar.gz"
list_url = "http://llvm.org/releases/download.html"
depends_on("llvm")
version('3.4.2', '87945973b7c73038871c5f849a818588')
version('3.4.2', '87945973b7c73038871c5f849a818588', url='http://llvm.org/releases/3.4.2/cfe-3.4.2.src.tar.xz')
def install(self, spec, prefix):
env['CXXFLAGS'] = self.compiler.cxx11_flag

View File

@ -23,3 +23,7 @@ def install(self, spec, prefix):
configure("--prefix=%s" % prefix)
make()
make("install")
# dbus needs a machine id generated after install
dbus_uuidgen = Executable(join_path(prefix.bin, 'dbus-uuidgen'))
dbus_uuidgen('--ensure')

View File

@ -0,0 +1,14 @@
from spack import *
class Dri2proto(Package):
"""DRI2 Protocol Headers."""
homepage = "http://http://cgit.freedesktop.org/xorg/proto/dri2proto/"
url = "http://xorg.freedesktop.org/releases/individual/proto/dri2proto-2.8.tar.gz"
version('2.8', '19ea18f63d8ae8053c9fa84b60365b77')
def install(self, spec, prefix):
configure("--prefix=%s" % prefix)
make()
make("install")

View File

@ -31,8 +31,9 @@ class Dyninst(Package):
url = "http://www.dyninst.org/sites/default/files/downloads/dyninst/8.1.2/DyninstAPI-8.1.2.tgz"
list_url = "http://www.dyninst.org/downloads/dyninst-8.x"
version('8.2.1', 'abf60b7faabe7a2e4b54395757be39c7',
url="http://www.paradyn.org/release8.2/DyninstAPI-8.2.1.tgz")
# Doesn't build right with boost@1.55.0
# version('8.2.1', 'abf60b7faabe7a2e4b54395757be39c7',
# url="http://www.paradyn.org/release8.2/DyninstAPI-8.2.1.tgz")
version('8.1.2', 'bf03b33375afa66fe0efa46ce3f4b17a',
url="http://www.paradyn.org/release8.1.2/DyninstAPI-8.1.2.tgz")
version('8.1.1', 'd1a04e995b7aa70960cd1d1fac8bd6ac',
@ -40,7 +41,7 @@ class Dyninst(Package):
depends_on("libelf")
depends_on("libdwarf")
depends_on("boost@1.42:1.43")
depends_on("boost@1.42:")
# new version uses cmake
def install(self, spec, prefix):

View File

@ -0,0 +1,15 @@
from spack import *
class Flex(Package):
"""Flex is a tool for generating scanners."""
homepage = "http://flex.sourceforge.net/"
url = "http://download.sourceforge.net/flex/flex-2.5.39.tar.gz"
version('2.5.39', 'e133e9ead8ec0a58d81166b461244fde')
def install(self, spec, prefix):
configure("--prefix=%s" % prefix)
make()
make("install")

View File

@ -0,0 +1,31 @@
from spack import *
class Geos(Package):
"""GEOS (Geometry Engine - Open Source) is a C++ port of the Java
Topology Suite (JTS). As such, it aims to contain the complete
functionality of JTS in C++. This includes all the OpenGIS
Simple Features for SQL spatial predicate functions and spatial
operators, as well as specific JTS enhanced topology functions."""
homepage = "http://trac.osgeo.org/geos/"
url = "http://download.osgeo.org/geos/geos-3.4.2.tar.bz2"
version('3.4.2', 'fc5df2d926eb7e67f988a43a92683bae')
version('3.4.1', '4c930dec44c45c49cd71f3e0931ded7e')
version('3.4.0', 'e41318fc76b5dc764a69d43ac6b18488')
version('3.3.9', '4794c20f07721d5011c93efc6ccb8e4e')
version('3.3.8', '75be476d0831a2d14958fed76ca266de')
version('3.3.7', '95ab996d22672b067d92c7dee2170460')
version('3.3.6', '6fadfb941541875f4976f75fb0bbc800')
version('3.3.5', '2ba61afb7fe2c5ddf642d82d7b16e75b')
version('3.3.4', '1bb9f14d57ef06ffa41cb1d67acb55a1')
version('3.3.3', '8454e653d7ecca475153cc88fd1daa26')
extends('python')
depends_on('swig')
def install(self, spec, prefix):
configure("--prefix=%s" % prefix,
"--enable-python")
make()
make("install")

View File

@ -0,0 +1,19 @@
from spack import *
class Gperf(Package):
"""GNU gperf is a perfect hash function generator. For a given
list of strings, it produces a hash function and hash table, in
form of C or C++ code, for looking up a value depending on the
input string. The hash function is perfect, which means that the
hash table has no collisions, and the hash table lookup needs a
single string comparison only."""
homepage = "https://www.gnu.org/software/gperf/"
url = "http://ftp.gnu.org/pub/gnu/gperf/gperf-3.0.4.tar.gz"
version('3.0.4', 'c1f1db32fb6598d6a93e6e88796a8632')
def install(self, spec, prefix):
configure("--prefix=%s" % prefix)
make()
make("install")

View File

@ -18,12 +18,14 @@ class Hdf5(Package):
# TODO: currently hard-coded to use OpenMPI
def install(self, spec, prefix):
configure(
"--prefix=%s" % prefix,
"--with-zlib=%s" % spec['zlib'].prefix,
"--enable-parallel",
"CC=%s" % spec['openmpi'].prefix.bin + "/mpicc",
"CXX=%s" % spec['openmpi'].prefix.bin + "/mpic++")
"--enable-shared",
"CC=%s" % spec['mpich'].prefix.bin + "/mpicc",
"CXX=%s" % spec['mpich'].prefix.bin + "/mpic++")
make()
make("install")

View File

@ -0,0 +1,17 @@
from spack import *
class Icu4c(Package):
"""ICU is a mature, widely used set of C/C++ and Java libraries
providing Unicode and Globalization support for software applications."""
homepage = "http://site.icu-project.org/"
url = "http://downloads.sourceforge.net/project/icu/ICU4C/54.1/icu4c-54_1-src.tgz"
version('54_1', 'e844caed8f2ca24c088505b0d6271bc0')
def install(self, spec, prefix):
cd("source")
configure("--prefix=%s" % prefix)
make()
make("install")

View File

@ -0,0 +1,18 @@
from spack import *
class Libdrm(Package):
"""A userspace library for accessing the DRM, direct
rendering manager, on Linux, BSD and other operating
systems that support the ioctl interface."""
homepage = "http://dri.freedesktop.org/libdrm/" # no real website...
url = "http://dri.freedesktop.org/libdrm/libdrm-2.4.59.tar.gz"
version('2.4.59', '105ac7af1afcd742d402ca7b4eb168b6')
version('2.4.33', '86e4e3debe7087d5404461e0032231c8')
def install(self, spec, prefix):
configure("--prefix=%s" % prefix)
make()
make("install")

View File

@ -53,13 +53,6 @@ class Libdwarf(Package):
parallel = False
def clean(self):
for dir in dwarf_dirs:
with working_dir(dir):
if os.path.exists('Makefile'):
make('clean')
def install(self, spec, prefix):
# dwarf build does not set arguments for ar properly
make.add_default_arg('ARFLAGS=rcs')

View File

@ -0,0 +1,19 @@
from spack import *
class Libgcrypt(Package):
"""Libgcrypt is a general purpose cryptographic library based on
the code from GnuPG. It provides functions for all cryptographic
building blocks: symmetric ciphers, hash algorithms, MACs, public
key algorithms, large integer functions, random numbers and a lot
of supporting functions. """
homepage = "http://www.gnu.org/software/libgcrypt/"
url = "ftp://ftp.gnupg.org/gcrypt/libgcrypt/libgcrypt-1.6.2.tar.bz2"
version('1.6.2', 'b54395a93cb1e57619943c082da09d5f')
depends_on("libgpg-error")
def install(self, spec, prefix):
configure("--prefix=%s" % prefix)
make()
make("install")

View File

@ -0,0 +1,17 @@
from spack import *
class LibgpgError(Package):
"""Libgpg-error is a small library that defines common error
values for all GnuPG components. Among these are GPG, GPGSM,
GPGME, GPG-Agent, libgcrypt, Libksba, DirMngr, Pinentry,
SmartCard Daemon and possibly more in the future. """
homepage = "https://www.gnupg.org/related_software/libgpg-error"
url = "ftp://ftp.gnupg.org/gcrypt/libgpg-error/libgpg-error-1.18.tar.bz2"
version('1.18', '12312802d2065774b787cbfc22cc04e9')
def install(self, spec, prefix):
configure("--prefix=%s" % prefix)
make()
make("install")

View File

@ -5,9 +5,9 @@ class Libpng(Package):
homepage = "http://www.libpng.org/pub/png/libpng.html"
url = "http://download.sourceforge.net/libpng/libpng-1.6.16.tar.gz"
version('1.6.14', '2101b3de1d5f348925990f9aa8405660')
version('1.6.15', '829a256f3de9307731d4f52dc071916d')
version('1.6.16', '1a4ad377919ab15b54f6cb6a3ae2622d')
version('1.6.15', '829a256f3de9307731d4f52dc071916d')
version('1.6.14', '2101b3de1d5f348925990f9aa8405660')
def install(self, spec, prefix):
configure("--prefix=%s" % prefix)

View File

@ -0,0 +1,21 @@
from spack import *
class Libxcb(Package):
"""The X protocol C-language Binding (XCB) is a replacement
for Xlib featuring a small footprint, latency hiding, direct
access to the protocol, improved threading support, and
extensibility."""
homepage = "http://xcb.freedesktop.org/"
url = "http://xcb.freedesktop.org/dist/libxcb-1.11.tar.gz"
version('1.11', '1698dd837d7e6e94d029dbe8b3a82deb')
depends_on("python")
depends_on("xcb-proto")
def install(self, spec, prefix):
configure("--prefix=%s" % prefix)
make()
make("install")

View File

@ -9,6 +9,9 @@ class Libxml2(Package):
version('2.9.2', '9e6a9aca9d155737868b3dc5fd82f788')
depends_on('zlib')
depends_on('xz')
def install(self, spec, prefix):
configure("--prefix=%s" % prefix,
"--without-python")

View File

@ -0,0 +1,16 @@
from spack import *
class Libxshmfence(Package):
"""This is a tiny library that exposes a event API on top of Linux
futexes."""
homepage = "http://keithp.com/blogs/dri3_extension/" # not really...
url = "http://xorg.freedesktop.org/archive/individual/lib/libxshmfence-1.2.tar.gz"
version('1.2', 'f0b30c0fc568b22ec524859ee28556f1')
def install(self, spec, prefix):
configure("--prefix=%s" % prefix)
make()
make("install")

View File

@ -0,0 +1,24 @@
from spack import *
class Libxslt(Package):
"""Libxslt is the XSLT C library developed for the GNOME
project. XSLT itself is a an XML language to define
transformation for XML. Libxslt is based on libxml2 the XML C
library developed for the GNOME project. It also implements
most of the EXSLT set of processor-portable extensions
functions and some of Saxon's evaluate and expressions
extensions."""
homepage = "http://www.xmlsoft.org/XSLT/index.html"
url = "http://xmlsoft.org/sources/libxslt-1.1.28.tar.gz"
version('1.1.28', '9667bf6f9310b957254fdcf6596600b7')
depends_on("libxml2")
depends_on("xz")
depends_on("zlib")
depends_on("libgcrypt")
def install(self, spec, prefix):
configure("--prefix=%s" % prefix)
make()
make("install")

View File

@ -32,9 +32,13 @@ class Llvm(Package):
it is the full name of the project.
"""
homepage = "http://llvm.org/"
url = "http://llvm.org/releases/3.4.2/llvm-3.4.2.src.tar.gz"
list_url = "http://llvm.org/releases/download.html"
version('3.4.2', 'a20669f75967440de949ac3b1bad439c')
version('3.5.1', '2d3d8004f38852aa679e5945b8ce0b14', url='http://llvm.org/releases/3.5.1/llvm-3.5.1.src.tar.xz')
version('3.4.2', 'a20669f75967440de949ac3b1bad439c', url='http://llvm.org/releases/3.4.2/llvm-3.4.2.src.tar.gz')
version('3.0', 'a8e5f5f1c1adebae7b4a654c376a6005', url='http://llvm.org/releases/3.0/llvm-3.0.tar.gz')
version('2.9', '793138412d2af2c7c7f54615f8943771', url='http://llvm.org/releases/2.9/llvm-2.9.tgz')
version('2.8', '220d361b4d17051ff4bb21c64abe05ba', url='http://llvm.org/releases/2.8/llvm-2.8.tgz')
def install(self, spec, prefix):
env['CXXFLAGS'] = self.compiler.cxx11_flag

View File

@ -0,0 +1,20 @@
from spack import *
class Memaxes(Package):
"""MemAxes is a visualizer for sampled memory trace data."""
homepage = "https://github.com/scalability-llnl/MemAxes"
version('0.5', 'b0f561d48aa7301e028d074bc4b5751b',
url='https://github.com/scalability-llnl/MemAxes/archive/v0.5.tar.gz')
depends_on("cmake@2.8.9:")
depends_on("qt@5:")
depends_on("vtk")
def install(self, spec, prefix):
with working_dir('spack-build', create=True):
cmake('..', *std_cmake_args)
make()
make("install")

View File

@ -0,0 +1,33 @@
from spack import *
class Mesa(Package):
"""Mesa is an open-source implementation of the OpenGL
specification - a system for rendering interactive 3D graphics."""
homepage = "http://www.mesa3d.org"
url = "ftp://ftp.freedesktop.org/pub/mesa/older-versions/8.x/8.0.5/MesaLib-8.0.5.tar.gz"
# url = "ftp://ftp.freedesktop.org/pub/mesa/10.4.4/MesaLib-10.4.4.tar.gz"
# version('10.4.4', '8d863a3c209bf5116b2babfccccc68ce')
version('8.0.5', 'cda5d101f43b8784fa60bdeaca4056f2')
# mesa 7.x, 8.x, 9.x
depends_on("libdrm@2.4.33")
depends_on("llvm@3.0")
# patch("llvm-fixes.patch") # using newer llvm
# mesa 10.x
# depends_on("py-mako")
# depends_on("flex")
# depends_on("bison")
# depends_on("dri2proto")
# depends_on("libxcb")
# depends_on("libxshmfence")
def install(self, spec, prefix):
configure("--prefix=%s" % prefix)
make()
make("install")

View File

@ -0,0 +1,28 @@
from spack import *
class Netcdf(Package):
"""NetCDF is a set of software libraries and self-describing, machine-independent
data formats that support the creation, access, and sharing of array-oriented
scientific data."""
homepage = "http://www.unidata.ucar.edu/software/netcdf/"
url = "ftp://ftp.unidata.ucar.edu/pub/netcdf/netcdf-4.3.3.tar.gz"
version('4.3.3', '5fbd0e108a54bd82cb5702a73f56d2ae')
# Dependencies:
# >HDF5
depends_on("hdf5")
def install(self, spec, prefix):
configure(
"--prefix=%s" % prefix,
"--disable-dap", # Disable DAP.
"--disable-shared", # Don't build shared libraries (use static libs).
"CPPFLAGS=-I%s/include" % spec['hdf5'].prefix, # Link HDF5's include dir.
"LDFLAGS=-L%s/lib" % spec['hdf5'].prefix) # Link HDF5's lib dir.
make("install")
# Check the newly installed netcdf package. Currently disabled.
# make("check")

View File

@ -0,0 +1,20 @@
from spack import *
import os
class PyBasemap(Package):
"""The matplotlib basemap toolkit is a library for plotting 2D data on maps in Python."""
homepage = "http://matplotlib.org/basemap/"
url = "https://downloads.sourceforge.net/project/matplotlib/matplotlib-toolkits/basemap-1.0.7/basemap-1.0.7.tar.gz"
version('1.0.7', '48c0557ced9e2c6e440b28b3caff2de8')
extends('python')
depends_on('py-setuptools')
depends_on('py-numpy')
depends_on('py-matplotlib')
depends_on('py-pil')
depends_on("geos")
def install(self, spec, prefix):
env['GEOS_DIR'] = spec['geos'].prefix
python('setup.py', 'install', '--prefix=%s' % prefix)

View File

@ -0,0 +1,15 @@
from spack import *
class PyBiopython(Package):
"""It is a distributed collaborative effort to develop Python libraries and applications which address the needs of current and future work in bioinformatics."""
homepage = "http://biopython.org/wiki/Main_Page"
url = "http://biopython.org/DIST/biopython-1.65.tar.gz"
version('1.65', '143e7861ade85c0a8b5e2bbdd1da1f67')
extends('python')
depends_on('py-mx')
depends_on('py-numpy')
def install(self, spec, prefix):
python('setup.py', 'install', '--prefix=%s' % prefix)

View File

@ -0,0 +1,13 @@
from spack import *
class PyCython(Package):
"""The Cython compiler for writing C extensions for the Python language."""
homepage = "https://pypi.python.org/pypi/cython"
url = "https://pypi.python.org/packages/source/C/Cython/Cython-0.21.2.tar.gz"
version('0.21.2', 'd21adb870c75680dc857cd05d41046a4')
extends('python')
def install(self, spec, prefix):
python('setup.py', 'install', '--prefix=%s' % prefix)

View File

@ -0,0 +1,15 @@
from spack import *
class PyDateutil(Package):
"""Extensions to the standard Python datetime module."""
homepage = "https://pypi.python.org/pypi/dateutil"
url = "https://pypi.python.org/packages/source/p/python-dateutil/python-dateutil-2.4.0.tar.gz"
version('2.4.0', '75714163bb96bedd07685cdb2071b8bc')
extends('python')
depends_on('py-setuptools')
depends_on('py-six')
def install(self, spec, prefix):
python('setup.py', 'install', '--prefix=%s' % prefix)

View File

@ -0,0 +1,13 @@
from spack import *
class PyEpydoc(Package):
"""Epydoc is a tool for generating API documentation documentation for Python modules, based on their docstrings."""
homepage = "https://pypi.python.org/pypi/epydoc"
url = "https://pypi.python.org/packages/source/e/epydoc/epydoc-3.0.1.tar.gz"
version('3.0.1', '36407974bd5da2af00bf90ca27feeb44')
extends('python')
def install(self, spec, prefix):
python('setup.py', 'install', '--prefix=%s' % prefix)

View File

@ -0,0 +1,14 @@
from spack import *
class PyGnuplot(Package):
"""Gnuplot.py is a Python package that allows you to create graphs from within Python using the gnuplot plotting program."""
homepage = "http://gnuplot-py.sourceforge.net/"
url = "http://downloads.sourceforge.net/project/gnuplot-py/Gnuplot-py/1.8/gnuplot-py-1.8.tar.gz"
version('1.8', 'abd6f571e7aec68ae7db90a5217cd5b1')
extends('python')
depends_on('py-numpy')
def install(self, spec, prefix):
python('setup.py', 'install', '--prefix=%s' % prefix)

View File

@ -0,0 +1,18 @@
from spack import *
import re
class PyH5py(Package):
"""The h5py package provides both a high- and low-level interface to the HDF5 library from Python."""
homepage = "https://pypi.python.org/pypi/h5py"
url = "https://pypi.python.org/packages/source/h/h5py/h5py-2.4.0.tar.gz"
version('2.4.0', '80c9a94ae31f84885cc2ebe1323d6758')
extends('python', ignore=lambda f: re.match(r'cy*', f))
depends_on('hdf5')
depends_on('py-numpy')
depends_on('py-cython')
def install(self, spec, prefix):
python('setup.py', 'configure', '--hdf5=%s' % spec['hdf5'].prefix)
python('setup.py', 'install', '--prefix=%s' % prefix)

View File

@ -0,0 +1,15 @@
from spack import *
class PyIpython(Package):
"""IPython provides a rich toolkit to help you make the most out of using Python interactively."""
homepage = "https://pypi.python.org/pypi/ipython"
url = "https://pypi.python.org/packages/source/i/ipython/ipython-2.3.1.tar.gz"
version('2.3.1', '2b7085525dac11190bfb45bb8ec8dcbf')
extends('python')
depends_on('py-pygments')
depends_on('py-setuptools')
def install(self, spec, prefix):
python('setup.py', 'install', '--prefix=%s' % prefix)

View File

@ -0,0 +1,15 @@
from spack import *
class PyLibxml2(Package):
"""A Python wrapper around libxml2."""
homepage = "https://xmlsoft.org/python.html"
url = "ftp://xmlsoft.org/libxml2/python/libxml2-python-2.6.21.tar.gz"
version('2.6.21', '229dd2b3d110a77defeeaa73af83f7f3')
extends('python')
depends_on('libxml2')
depends_on('libxslt')
def install(self, spec, prefix):
python('setup.py', 'install', '--prefix=%s' % prefix)

View File

@ -0,0 +1,16 @@
from spack import *
class PyMako(Package):
"""A super-fast templating language that borrows the best
ideas from the existing templating languages."""
homepage = "https://pypi.python.org/pypi/mako"
url = "https://pypi.python.org/packages/source/M/Mako/Mako-1.0.1.tar.gz"
version('1.0.1', '9f0aafd177b039ef67b90ea350497a54')
depends_on('py-setuptools')
extends('python')
def install(self, spec, prefix):
python('setup.py', 'install', '--prefix=%s' % prefix)

View File

@ -0,0 +1,46 @@
from spack import *
import os
class PyMatplotlib(Package):
"""Python plotting package."""
homepage = "https://pypi.python.org/pypi/matplotlib"
url = "https://pypi.python.org/packages/source/m/matplotlib/matplotlib-1.4.2.tar.gz"
version('1.4.2', '7d22efb6cce475025733c50487bd8898')
extends('python', ignore=r'bin/nosetests.*$')
depends_on('py-pyside')
depends_on('py-ipython')
depends_on('py-pyparsing')
depends_on('py-six')
depends_on('py-dateutil')
depends_on('py-pytz')
depends_on('py-nose')
depends_on('py-numpy')
depends_on('qt')
depends_on('bzip2')
depends_on('tcl')
depends_on('tk')
depends_on('qhull')
def install(self, spec, prefix):
python('setup.py', 'install', '--prefix=%s' % prefix)
if str(self.version) == '1.4.2':
# hack to fix configuration file
config_file = None
for p,d,f in os.walk(prefix.lib):
for file in f:
if file.find('matplotlibrc') != -1:
config_file = join_path(p, 'matplotlibrc')
print config_file
if config_file == None:
raise InstallError('could not find config file')
filter_file(r'backend : pyside',
'backend : Qt4Agg',
config_file)
filter_file(r'#backend.qt4 : PyQt4',
'backend.qt4 : PySide',
config_file)

View File

@ -0,0 +1,14 @@
from spack import *
class PyMpi4py(Package):
"""This package provides Python bindings for the Message Passing Interface (MPI) standard. It is implemented on top of the MPI-1/MPI-2 specification and exposes an API which grounds on the standard MPI-2 C++ bindings."""
homepage = "https://pypi.python.org/pypi/mpi4py"
url = "https://pypi.python.org/packages/source/m/mpi4py/mpi4py-1.3.1.tar.gz"
version('1.3.1', 'dbe9d22bdc8ed965c23a7ceb6f32fc3c')
extends('python')
depends_on('py-setuptools')
depends_on('mpi')
def install(self, spec, prefix):
python('setup.py', 'install', '--prefix=%s' % prefix)

View File

@ -0,0 +1,13 @@
from spack import *
class PyMx(Package):
"""The eGenix.com mx Base Distribution for Python is a collection of professional quality software tools which enhance Python's usability in many important areas such as fast text searching, date/time processing and high speed data types."""
homepage = "http://www.egenix.com/products/python/mxBase/"
url = "https://downloads.egenix.com/python/egenix-mx-base-3.2.8.tar.gz"
version('3.2.8', '9d9d3a25f9dc051a15e97f452413423b')
extends('python')
def install(self, spec, prefix):
python('setup.py', 'install', '--prefix=%s' % prefix)

View File

@ -0,0 +1,16 @@
from spack import *
class PyNose(Package):
"""nose extends the test loading and running features of unittest,
making it easier to write, find and run tests."""
homepage = "https://pypi.python.org/pypi/nose"
url = "https://pypi.python.org/packages/source/n/nose/nose-1.3.4.tar.gz"
version('1.3.4', '6ed7169887580ddc9a8e16048d38274d')
extends('python', ignore=r'bin/nosetests.*$')
depends_on('py-setuptools')
def install(self, spec, prefix):
python('setup.py', 'install', '--prefix=%s' % prefix)

View File

@ -0,0 +1,14 @@
from spack import *
class PyNumpy(Package):
"""array processing for numbers, strings, records, and objects."""
homepage = "https://pypi.python.org/pypi/numpy"
url = "https://pypi.python.org/packages/source/n/numpy/numpy-1.9.1.tar.gz"
version('1.9.1', '78842b73560ec378142665e712ae4ad9')
extends('python')
depends_on('py-nose')
def install(self, spec, prefix):
python('setup.py', 'install', '--prefix=%s' % prefix)

View File

@ -0,0 +1,13 @@
from spack import *
class PyPexpect(Package):
"""Pexpect allows easy control of interactive console applications."""
homepage = "https://pypi.python.org/pypi/pexpect"
url = "https://pypi.python.org/packages/source/p/pexpect/pexpect-3.3.tar.gz"
version('3.3', '0de72541d3f1374b795472fed841dce8')
extends('python')
def install(self, spec, prefix):
python('setup.py', 'install', '--prefix=%s' % prefix)

View File

@ -0,0 +1,14 @@
from spack import *
class PyPil(Package):
"""The Python Imaging Library (PIL) adds image processing capabilities to your Python interpreter. This library supports many file formats, and provides powerful image processing and graphics capabilities."""
homepage = "http://www.pythonware.com/products/pil/"
url = "http://effbot.org/media/downloads/Imaging-1.1.7.tar.gz"
version('1.1.7', 'fc14a54e1ce02a0225be8854bfba478e')
extends('python')
def install(self, spec, prefix):
python('setup.py', 'install', '--prefix=%s' % prefix)

View File

@ -0,0 +1,13 @@
from spack import *
class PyPmw(Package):
"""Pmw is a toolkit for building high-level compound widgets, or megawidgets, constructed using other widgets as component parts."""
homepage = "https://pypi.python.org/pypi/Pmw"
url = "https://pypi.python.org/packages/source/P/Pmw/Pmw-2.0.0.tar.gz"
version('2.0.0', 'c7c3f26c4f5abaa99807edefee578fc0')
extends('python')
def install(self, spec, prefix):
python('setup.py', 'install', '--prefix=%s' % prefix)

View File

@ -0,0 +1,13 @@
from spack import *
class PyPychecker(Package):
""""""
homepage = "http://pychecker.sourceforge.net/"
url = "http://sourceforge.net/projects/pychecker/files/pychecker/0.8.19/pychecker-0.8.19.tar.gz"
version('0.8.19', 'c37182863dfb09209d6ba4f38fce9d2b')
extends('python')
def install(self, spec, prefix):
python('setup.py', 'install', '--prefix=%s' % prefix)

View File

@ -0,0 +1,14 @@
from spack import *
class PyPygments(Package):
"""Pygments is a syntax highlighting package written in Python."""
homepage = "https://pypi.python.org/pypi/pygments"
url = "https://pypi.python.org/packages/source/P/Pygments/Pygments-2.0.1.tar.gz"
version('2.0.1', 'e0daf4c14a4fe5b630da765904de4d6c')
extends('python')
depends_on('py-setuptools')
def install(self, spec, prefix):
python('setup.py', 'install', '--prefix=%s' % prefix)

View File

@ -0,0 +1,16 @@
from spack import *
import re
class PyPylint(Package):
"""array processing for numbers, strings, records, and objects."""
homepage = "https://pypi.python.org/pypi/pylint"
url = "https://pypi.python.org/packages/source/p/pylint/pylint-1.4.1.tar.gz"
version('1.4.1', 'df7c679bdcce5019389038847e4de622')
extends('python')
depends_on('py-nose')
depends_on('py-setuptools')
def install(self, spec, prefix):
python('setup.py', 'install', '--prefix=%s' % prefix)

View File

@ -0,0 +1,13 @@
from spack import *
class PyPyparsing(Package):
"""A Python Parsing Module."""
homepage = "https://pypi.python.org/pypi/pyparsing"
url = "https://pypi.python.org/packages/source/p/pyparsing/pyparsing-2.0.3.tar.gz"
version('2.0.3', '0fe479be09fc2cf005f753d3acc35939')
extends('python')
def install(self, spec, prefix):
python('setup.py', 'install', '--prefix=%s' % prefix)

View File

@ -0,0 +1,24 @@
from spack import *
class PyPyqt(Package):
"""PyQt is a set of Python v2 and v3 bindings for Digia's Qt
application framework and runs on all platforms supported by Qt
including Windows, MacOS/X and Linux."""
homepage = "http://www.riverbankcomputing.com/software/pyqt/intro"
url = "http://sourceforge.net/projects/pyqt/files/PyQt4/PyQt-4.11.3/PyQt-x11-gpl-4.11.3.tar.gz"
version('4.11.3', '997c3e443165a89a559e0d96b061bf70')
extends('python')
depends_on('py-sip')
# TODO: allow qt5 when conditional deps are supported.
# TODO: Fix version matching so that @4 works like @:4
depends_on('qt@:4')
def install(self, spec, prefix):
python('configure.py',
'--confirm-license',
'--destdir=%s' % site_packages_dir)
make()
make('install')

View File

@ -0,0 +1,45 @@
from spack import *
import os
class PyPyside(Package):
"""array processing for numbers, strings, records, and objects."""
homepage = "https://pypi.python.org/pypi/pyside"
url = "https://pypi.python.org/packages/source/P/PySide/PySide-1.2.2.tar.gz"
version('1.2.2', 'c45bc400c8a86d6b35f34c29e379e44d')
# TODO: make build dependency
# depends_on("cmake")
extends('python')
depends_on('py-setuptools')
depends_on('qt@:4')
def patch(self):
"""Undo PySide RPATH handling and add Spack RPATH."""
# Figure out the special RPATH
pypkg = self.spec['python'].package
rpath = self.rpath
rpath.append(os.path.join(self.prefix, pypkg.site_packages_dir, 'PySide'))
# Add Spack's standard CMake args to the sub-builds.
# They're called BY setup.py so we have to patch it.
filter_file(
r'OPTION_CMAKE,',
r'OPTION_CMAKE, ' + (
'"-DCMAKE_INSTALL_RPATH_USE_LINK_PATH=FALSE", '
'"-DCMAKE_INSTALL_RPATH=%s",' % ':'.join(rpath)),
'setup.py')
# PySide tries to patch ELF files to remove RPATHs
# Disable this and go with the one we set.
filter_file(
r'^\s*rpath_cmd\(pyside_path, srcpath\)',
r'#rpath_cmd(pyside_path, srcpath)',
'pyside_postinstall.py')
def install(self, spec, prefix):
python('setup.py', 'install',
'--prefix=%s' % prefix,
'--jobs=%s' % make_jobs)

View File

@ -0,0 +1,13 @@
from spack import *
class PyPytz(Package):
"""World timezone definitions, modern and historical."""
homepage = "https://pypi.python.org/pypi/pytz"
url = "https://pypi.python.org/packages/source/p/pytz/pytz-2014.10.tar.gz"
version('2014.10', 'eb1cb941a20c5b751352c52486aa1dd7')
extends('python')
def install(self, spec, prefix):
python('setup.py', 'install', '--prefix=%s' % prefix)

View File

@ -0,0 +1,16 @@
from spack import *
class PyRpy2(Package):
"""rpy2 is a redesign and rewrite of rpy. It is providing a low-level interface to R from Python, a proposed high-level interface, including wrappers to graphical libraries, as well as R-like structures and functions."""
homepage = "https://pypi.python.org/pypi/rpy2"
url = "https://pypi.python.org/packages/source/r/rpy2/rpy2-2.5.4.tar.gz"
version('2.5.4', '115a20ac30883f096da2bdfcab55196d')
extends('python')
depends_on('py-setuptools')
depends_on('R')
def install(self, spec, prefix):
python('setup.py', 'install', '--prefix=%s' % prefix)

View File

@ -0,0 +1,17 @@
from spack import *
class PyScientificpython(Package):
"""ScientificPython is a collection of Python modules for
scientific computing. It contains support for geometry,
mathematical functions, statistics, physical units, IO,
visualization, and parallelization."""
homepage = "https://sourcesup.renater.fr/projects/scientific-py/"
url = "https://sourcesup.renater.fr/frs/download.php/4411/ScientificPython-2.8.1.tar.gz"
version('2.8.1', '73ee0df19c7b58cdf2954261f0763c77')
extends('python')
def install(self, spec, prefix):
python('setup.py', 'install', '--prefix=%s' % prefix)

View File

@ -0,0 +1,13 @@
from spack import *
class PyScikitLearn(Package):
""""""
homepage = "https://pypi.python.org/pypi/scikit-learn"
url = "https://pypi.python.org/packages/source/s/scikit-learn/scikit-learn-0.15.2.tar.gz"
version('0.15.2', 'd9822ad0238e17b382a3c756ea94fe0d')
extends('python')
def install(self, spec, prefix):
python('setup.py', 'install', '--prefix=%s' % prefix)

View File

@ -0,0 +1,15 @@
from spack import *
class PyScipy(Package):
"""Scientific Library for Python."""
homepage = "https://pypi.python.org/pypi/scipy"
url = "https://pypi.python.org/packages/source/s/scipy/scipy-0.15.0.tar.gz"
version('0.15.0', '639112f077f0aeb6d80718dc5019dc7a')
extends('python')
depends_on('py-nose')
depends_on('py-numpy')
def install(self, spec, prefix):
python('setup.py', 'install', '--prefix=%s' % prefix)

Some files were not shown because too many files have changed in this diff Show More