Merge branch 'develop' into features/external-packages
Conflicts: lib/spack/spack/package.py
This commit is contained in:
commit
87db69478d
9
.mailmap
9
.mailmap
@ -1,13 +1,20 @@
|
|||||||
Todd Gamblin <tgamblin@llnl.gov> George Todd Gamblin <gamblin2@llnl.gov>
|
Todd Gamblin <tgamblin@llnl.gov> George Todd Gamblin <gamblin2@llnl.gov>
|
||||||
|
Todd Gamblin <tgamblin@llnl.gov> Todd Gamblin <gamblin2@llnl.gov>
|
||||||
Adam Moody <moody20@llnl.gov> Adam T. Moody <moody20@llnl.gov>
|
Adam Moody <moody20@llnl.gov> Adam T. Moody <moody20@llnl.gov>
|
||||||
Alfredo Gimenez <gimenez1@llnl.gov> Alfredo Gimenez <alfredo.gimenez@gmail.com>
|
Alfredo Gimenez <gimenez1@llnl.gov> Alfredo Gimenez <alfredo.gimenez@gmail.com>
|
||||||
David Boehme <boehme3@llnl.gov> David Boehme <boehme3@sierra324.llnl.gov>
|
David Boehme <boehme3@llnl.gov> David Boehme <boehme3@sierra324.llnl.gov>
|
||||||
David Boehme <boehme3@llnl.gov> David Boehme <boehme3@sierra648.llnl.gov>
|
David Boehme <boehme3@llnl.gov> David Boehme <boehme3@sierra648.llnl.gov>
|
||||||
Kevin Brandstatter <kjbrandstatter@gmail.com> Kevin Brandstatter <kbrandst@hawk.iit.edu>
|
Kevin Brandstatter <kjbrandstatter@gmail.com> Kevin Brandstatter <kbrandst@hawk.iit.edu>
|
||||||
Luc Jaulmes <luc.jaulmes@bsc.es> Luc Jaulmes <jaulmes1@llnl.gov>
|
Luc Jaulmes <luc.jaulmes@bsc.es> Luc Jaulmes <jaulmes1@llnl.gov>
|
||||||
Saravan Pantham <saravan.pantham@gmail.com> Saravan Pantham <pantham1@surface86.llnl.gov
|
Saravan Pantham <saravan.pantham@gmail.com> Saravan Pantham <pantham1@surface86.llnl.gov>
|
||||||
Tom Scogland <tscogland@llnl.gov> Tom Scogland <scogland1@llnl.gov>
|
Tom Scogland <tscogland@llnl.gov> Tom Scogland <scogland1@llnl.gov>
|
||||||
Tom Scogland <tscogland@llnl.gov> Tom Scogland <tom.scogland@gmail.com>
|
Tom Scogland <tscogland@llnl.gov> Tom Scogland <tom.scogland@gmail.com>
|
||||||
Joachim Protze <protze@rz.rwth-aachen.de> jprotze <protze@rz.rwth-aachen.de>
|
Joachim Protze <protze@rz.rwth-aachen.de> jprotze <protze@rz.rwth-aachen.de>
|
||||||
Gregory L. Lee <lee218@llnl.gov> Gregory L. Lee <lee218@surface86.llnl.gov>
|
Gregory L. Lee <lee218@llnl.gov> Gregory L. Lee <lee218@surface86.llnl.gov>
|
||||||
|
Gregory L. Lee <lee218@llnl.gov> Gregory L. Lee <lee218@cab687.llnl.gov>
|
||||||
|
Gregory L. Lee <lee218@llnl.gov> Gregory L. Lee <lee218@cab690.llnl.gov>
|
||||||
|
Gregory L. Lee <lee218@llnl.gov> Gregory L. Lee <lee218@catalyst159.llnl.gov>
|
||||||
Gregory L. Lee <lee218@llnl.gov> Gregory Lee <lee218@llnl.gov>
|
Gregory L. Lee <lee218@llnl.gov> Gregory Lee <lee218@llnl.gov>
|
||||||
|
Massimiliano Culpo <massimiliano.culpo@epfl.ch> Massimiliano Culpo <massimiliano.culpo@googlemail.com>
|
||||||
|
Massimiliano Culpo <massimiliano.culpo@epfl.ch> alalazo <massimiliano.culpo@googlemail.com>
|
||||||
|
Mark Miller <miller86@llnl.gov> miller86 <miller86@llnl.gov>
|
||||||
|
@ -19,7 +19,7 @@ written in pure Python, and specs allow package authors to write a
|
|||||||
single build script for many different builds of the same package.
|
single build script for many different builds of the same package.
|
||||||
|
|
||||||
See the
|
See the
|
||||||
[Feature Overview](http://llnl.github.io/spack/features.html)
|
[Feature Overview](http://software.llnl.gov/spack/features.html)
|
||||||
for examples and highlights.
|
for examples and highlights.
|
||||||
|
|
||||||
To install spack and install your first package:
|
To install spack and install your first package:
|
||||||
@ -31,7 +31,7 @@ To install spack and install your first package:
|
|||||||
Documentation
|
Documentation
|
||||||
----------------
|
----------------
|
||||||
|
|
||||||
[**Full documentation**](http://llnl.github.io/spack) for Spack is
|
[**Full documentation**](http://software.llnl.gov/spack) for Spack is
|
||||||
the first place to look.
|
the first place to look.
|
||||||
|
|
||||||
See also:
|
See also:
|
||||||
|
84
bin/sbang
Executable file
84
bin/sbang
Executable file
@ -0,0 +1,84 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
#
|
||||||
|
# `sbang`: Run scripts with long shebang lines.
|
||||||
|
#
|
||||||
|
# Many operating systems limit the length of shebang lines, making it
|
||||||
|
# hard to use interpreters that are deep in the directory hierarchy.
|
||||||
|
# `sbang` can run such scripts, either as a shebang interpreter, or
|
||||||
|
# directly on the command line.
|
||||||
|
#
|
||||||
|
# Usage
|
||||||
|
# -----------------------------
|
||||||
|
# Suppose you have a script, long-shebang.sh, like this:
|
||||||
|
#
|
||||||
|
# 1 #!/very/long/path/to/some/interpreter
|
||||||
|
# 2
|
||||||
|
# 3 echo "success!"
|
||||||
|
#
|
||||||
|
# Invoking this script will result in an error on some OS's. On
|
||||||
|
# Linux, you get this:
|
||||||
|
#
|
||||||
|
# $ ./long-shebang.sh
|
||||||
|
# -bash: ./long: /very/long/path/to/some/interp: bad interpreter:
|
||||||
|
# No such file or directory
|
||||||
|
#
|
||||||
|
# On Mac OS X, the system simply assumes the interpreter is the shell
|
||||||
|
# and tries to run with it, which is likely not what you want.
|
||||||
|
#
|
||||||
|
#
|
||||||
|
# `sbang` on the command line
|
||||||
|
# -----------------------------
|
||||||
|
# You can use `sbang` in two ways. The first is to use it directly,
|
||||||
|
# from the command line, like this:
|
||||||
|
#
|
||||||
|
# $ sbang ./long-shebang.sh
|
||||||
|
# success!
|
||||||
|
#
|
||||||
|
#
|
||||||
|
# `sbang` as the interpreter
|
||||||
|
# -----------------------------
|
||||||
|
# You can also use `sbang` *as* the interpreter for your script. Put
|
||||||
|
# `#!/bin/bash /path/to/sbang` on line 1, and move the original
|
||||||
|
# shebang to line 2 of the script:
|
||||||
|
#
|
||||||
|
# 1 #!/bin/bash /path/to/sbang
|
||||||
|
# 2 #!/long/path/to/real/interpreter with arguments
|
||||||
|
# 3
|
||||||
|
# 4 echo "success!"
|
||||||
|
#
|
||||||
|
# $ ./long-shebang.sh
|
||||||
|
# success!
|
||||||
|
#
|
||||||
|
# On Linux, you could shorten line 1 to `#!/path/to/sbang`, but other
|
||||||
|
# operating systems like Mac OS X require the interpreter to be a
|
||||||
|
# binary, so it's best to use `sbang` as a `bash` argument.
|
||||||
|
# Obviously, for this to work, `sbang` needs to have a short enough
|
||||||
|
# path that *it* will run without hitting OS limits.
|
||||||
|
#
|
||||||
|
#
|
||||||
|
# How it works
|
||||||
|
# -----------------------------
|
||||||
|
# `sbang` is a very simple bash script. It looks at the first two
|
||||||
|
# lines of a script argument and runs the last line starting with
|
||||||
|
# `#!`, with the script as an argument. It also forwards arguments.
|
||||||
|
#
|
||||||
|
|
||||||
|
# First argument is the script we want to actually run.
|
||||||
|
script="$1"
|
||||||
|
|
||||||
|
# Search the first two lines of script for interpreters.
|
||||||
|
lines=0
|
||||||
|
while read line && ((lines < 2)) ; do
|
||||||
|
if [[ "$line" = '#!'* ]]; then
|
||||||
|
interpreter="${line#\#!}"
|
||||||
|
fi
|
||||||
|
lines=$((lines+1))
|
||||||
|
done < "$script"
|
||||||
|
|
||||||
|
# Invoke any interpreter found, or raise an error if none was found.
|
||||||
|
if [ -n "$interpreter" ]; then
|
||||||
|
exec $interpreter "$@"
|
||||||
|
else
|
||||||
|
echo "error: sbang found no interpreter in $script"
|
||||||
|
exit 1
|
||||||
|
fi
|
@ -62,7 +62,7 @@ for pyc_file in orphaned_pyc_files:
|
|||||||
try:
|
try:
|
||||||
os.remove(pyc_file)
|
os.remove(pyc_file)
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
print "WARNING: Spack may fail mysteriously. Couldn't remove orphaned .pyc file: %s" % pyc
|
print "WARNING: Spack may fail mysteriously. Couldn't remove orphaned .pyc file: %s" % pyc_file
|
||||||
|
|
||||||
# If there is no working directory, use the spack prefix.
|
# If there is no working directory, use the spack prefix.
|
||||||
try:
|
try:
|
||||||
|
@ -903,7 +903,7 @@ Or, similarly with modules, you could type:
|
|||||||
$ spack load mpich %gcc@4.4.7
|
$ spack load mpich %gcc@4.4.7
|
||||||
|
|
||||||
These commands will add appropriate directories to your ``PATH``,
|
These commands will add appropriate directories to your ``PATH``,
|
||||||
``MANPATH``, and ``LD_LIBRARY_PATH``. When you no longer want to use
|
``MANPATH``, ``CPATH``, and ``LD_LIBRARY_PATH``. When you no longer want to use
|
||||||
a package, you can type unload or unuse similarly:
|
a package, you can type unload or unuse similarly:
|
||||||
|
|
||||||
.. code-block:: sh
|
.. code-block:: sh
|
||||||
|
@ -73,19 +73,32 @@ with a high level view of Spack's directory structure::
|
|||||||
spack/ <- installation root
|
spack/ <- installation root
|
||||||
bin/
|
bin/
|
||||||
spack <- main spack executable
|
spack <- main spack executable
|
||||||
|
|
||||||
|
etc/
|
||||||
|
spack/ <- Spack config files.
|
||||||
|
Can be overridden by files in ~/.spack.
|
||||||
|
|
||||||
var/
|
var/
|
||||||
spack/ <- build & stage directories
|
spack/ <- build & stage directories
|
||||||
|
repos/ <- contains package repositories
|
||||||
|
builtin/ <- pkg repository that comes with Spack
|
||||||
|
repo.yaml <- descriptor for the builtin repository
|
||||||
|
packages/ <- directories under here contain packages
|
||||||
|
|
||||||
opt/
|
opt/
|
||||||
spack/ <- packages are installed here
|
spack/ <- packages are installed here
|
||||||
|
|
||||||
lib/
|
lib/
|
||||||
spack/
|
spack/
|
||||||
docs/ <- source for this documentation
|
docs/ <- source for this documentation
|
||||||
env/ <- compiler wrappers for build environment
|
env/ <- compiler wrappers for build environment
|
||||||
|
|
||||||
|
external/ <- external libs included in Spack distro
|
||||||
|
llnl/ <- some general-use libraries
|
||||||
|
|
||||||
spack/ <- spack module; contains Python code
|
spack/ <- spack module; contains Python code
|
||||||
cmd/ <- each file in here is a spack subcommand
|
cmd/ <- each file in here is a spack subcommand
|
||||||
compilers/ <- compiler description files
|
compilers/ <- compiler description files
|
||||||
packages/ <- each file in here is a spack package
|
|
||||||
test/ <- unit test modules
|
test/ <- unit test modules
|
||||||
util/ <- common code
|
util/ <- common code
|
||||||
|
|
||||||
|
@ -103,7 +103,7 @@ creates a simple python file:
|
|||||||
It doesn't take much python coding to get from there to a working
|
It doesn't take much python coding to get from there to a working
|
||||||
package:
|
package:
|
||||||
|
|
||||||
.. literalinclude:: ../../../var/spack/packages/libelf/package.py
|
.. literalinclude:: ../../../var/spack/repos/builtin/packages/libelf/package.py
|
||||||
:lines: 25-
|
:lines: 25-
|
||||||
|
|
||||||
Spack also provides wrapper functions around common commands like
|
Spack also provides wrapper functions around common commands like
|
||||||
|
@ -22,7 +22,7 @@ go:
|
|||||||
$ spack install libelf
|
$ spack install libelf
|
||||||
|
|
||||||
For a richer experience, use Spack's `shell support
|
For a richer experience, use Spack's `shell support
|
||||||
<http://llnl.github.io/spack/basic_usage.html#environment-modules>`_:
|
<http://software.llnl.gov/spack/basic_usage.html#environment-modules>`_:
|
||||||
|
|
||||||
.. code-block:: sh
|
.. code-block:: sh
|
||||||
|
|
||||||
|
@ -84,7 +84,7 @@ always choose to download just one tarball initially, and run
|
|||||||
|
|
||||||
If it fails entirely, you can get minimal boilerplate by using
|
If it fails entirely, you can get minimal boilerplate by using
|
||||||
:ref:`spack-edit-f`, or you can manually create a directory and
|
:ref:`spack-edit-f`, or you can manually create a directory and
|
||||||
``package.py`` file for the package in ``var/spack/packages``.
|
``package.py`` file for the package in ``var/spack/repos/builtin/packages``.
|
||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
|
|
||||||
@ -203,7 +203,7 @@ edit`` command:
|
|||||||
So, if you used ``spack create`` to create a package, then saved and
|
So, if you used ``spack create`` to create a package, then saved and
|
||||||
closed the resulting file, you can get back to it with ``spack edit``.
|
closed the resulting file, you can get back to it with ``spack edit``.
|
||||||
The ``cmake`` package actually lives in
|
The ``cmake`` package actually lives in
|
||||||
``$SPACK_ROOT/var/spack/packages/cmake/package.py``, but this provides
|
``$SPACK_ROOT/var/spack/repos/builtin/packages/cmake/package.py``, but this provides
|
||||||
a much simpler shortcut and saves you the trouble of typing the full
|
a much simpler shortcut and saves you the trouble of typing the full
|
||||||
path.
|
path.
|
||||||
|
|
||||||
@ -269,18 +269,18 @@ live in Spack's directory structure. In general, `spack-create`_ and
|
|||||||
`spack-edit`_ handle creating package files for you, so you can skip
|
`spack-edit`_ handle creating package files for you, so you can skip
|
||||||
most of the details here.
|
most of the details here.
|
||||||
|
|
||||||
``var/spack/packages``
|
``var/spack/repos/builtin/packages``
|
||||||
~~~~~~~~~~~~~~~~~~~~~~~
|
~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
A Spack installation directory is structured like a standard UNIX
|
A Spack installation directory is structured like a standard UNIX
|
||||||
install prefix (``bin``, ``lib``, ``include``, ``var``, ``opt``,
|
install prefix (``bin``, ``lib``, ``include``, ``var``, ``opt``,
|
||||||
etc.). Most of the code for Spack lives in ``$SPACK_ROOT/lib/spack``.
|
etc.). Most of the code for Spack lives in ``$SPACK_ROOT/lib/spack``.
|
||||||
Packages themselves live in ``$SPACK_ROOT/var/spack/packages``.
|
Packages themselves live in ``$SPACK_ROOT/var/spack/repos/builtin/packages``.
|
||||||
|
|
||||||
If you ``cd`` to that directory, you will see directories for each
|
If you ``cd`` to that directory, you will see directories for each
|
||||||
package:
|
package:
|
||||||
|
|
||||||
.. command-output:: cd $SPACK_ROOT/var/spack/packages; ls -CF
|
.. command-output:: cd $SPACK_ROOT/var/spack/repos/builtin/packages; ls -CF
|
||||||
:shell:
|
:shell:
|
||||||
:ellipsis: 10
|
:ellipsis: 10
|
||||||
|
|
||||||
@ -288,7 +288,7 @@ Each directory contains a file called ``package.py``, which is where
|
|||||||
all the python code for the package goes. For example, the ``libelf``
|
all the python code for the package goes. For example, the ``libelf``
|
||||||
package lives in::
|
package lives in::
|
||||||
|
|
||||||
$SPACK_ROOT/var/spack/packages/libelf/package.py
|
$SPACK_ROOT/var/spack/repos/builtin/packages/libelf/package.py
|
||||||
|
|
||||||
Alongside the ``package.py`` file, a package may contain extra
|
Alongside the ``package.py`` file, a package may contain extra
|
||||||
directories or files (like patches) that it needs to build.
|
directories or files (like patches) that it needs to build.
|
||||||
@ -301,7 +301,7 @@ Packages are named after the directory containing ``package.py``. So,
|
|||||||
``libelf``'s ``package.py`` lives in a directory called ``libelf``.
|
``libelf``'s ``package.py`` lives in a directory called ``libelf``.
|
||||||
The ``package.py`` file defines a class called ``Libelf``, which
|
The ``package.py`` file defines a class called ``Libelf``, which
|
||||||
extends Spack's ``Package`` class. for example, here is
|
extends Spack's ``Package`` class. for example, here is
|
||||||
``$SPACK_ROOT/var/spack/packages/libelf/package.py``:
|
``$SPACK_ROOT/var/spack/repos/builtin/packages/libelf/package.py``:
|
||||||
|
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
:linenos:
|
:linenos:
|
||||||
@ -328,7 +328,7 @@ these:
|
|||||||
$ spack install libelf@0.8.13
|
$ spack install libelf@0.8.13
|
||||||
|
|
||||||
Spack sees the package name in the spec and looks for
|
Spack sees the package name in the spec and looks for
|
||||||
``libelf/package.py`` in ``var/spack/packages``. Likewise, if you say
|
``libelf/package.py`` in ``var/spack/repos/builtin/packages``. Likewise, if you say
|
||||||
``spack install py-numpy``, then Spack looks for
|
``spack install py-numpy``, then Spack looks for
|
||||||
``py-numpy/package.py``.
|
``py-numpy/package.py``.
|
||||||
|
|
||||||
@ -401,6 +401,35 @@ construct the new one for ``8.2.1``.
|
|||||||
When you supply a custom URL for a version, Spack uses that URL
|
When you supply a custom URL for a version, Spack uses that URL
|
||||||
*verbatim* and does not perform extrapolation.
|
*verbatim* and does not perform extrapolation.
|
||||||
|
|
||||||
|
Skipping the expand step
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
Spack normally expands archives automatically after downloading
|
||||||
|
them. If you want to skip this step (e.g., for self-extracting
|
||||||
|
executables and other custom archive types), you can add
|
||||||
|
``expand=False`` to a ``version`` directive.
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
version('8.2.1', '4136d7b4c04df68b686570afa26988ac',
|
||||||
|
url='http://example.com/foo-8.2.1-special-version.tar.gz', 'expand=False')
|
||||||
|
|
||||||
|
When ``expand`` is set to ``False``, Spack sets the current working
|
||||||
|
directory to the directory containing the downloaded archive before it
|
||||||
|
calls your ``install`` method. Within ``install``, the path to the
|
||||||
|
downloaded archive is available as ``self.stage.archive_file``.
|
||||||
|
|
||||||
|
Here is an example snippet for packages distribuetd as self-extracting
|
||||||
|
archives. The example sets permissions on the downloaded file to make
|
||||||
|
it executable, then runs it with some arguments.
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
def install(self, spec, prefix):
|
||||||
|
set_executable(self.stage.archive_file)
|
||||||
|
installer = Executable(self.stage.archive_file)
|
||||||
|
installer('--prefix=%s' % prefix, 'arg1', 'arg2', 'etc.')
|
||||||
|
|
||||||
Checksums
|
Checksums
|
||||||
~~~~~~~~~~~~~~~~~
|
~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
@ -703,7 +732,7 @@ supply is a filename, then the patch needs to live within the spack
|
|||||||
source tree. For example, the patch above lives in a directory
|
source tree. For example, the patch above lives in a directory
|
||||||
structure like this::
|
structure like this::
|
||||||
|
|
||||||
$SPACK_ROOT/var/spack/packages/
|
$SPACK_ROOT/var/spack/repos/builtin/packages/
|
||||||
mvapich2/
|
mvapich2/
|
||||||
package.py
|
package.py
|
||||||
ad_lustre_rwcontig_open_source.patch
|
ad_lustre_rwcontig_open_source.patch
|
||||||
@ -1597,7 +1626,7 @@ The last element of a package is its ``install()`` method. This is
|
|||||||
where the real work of installation happens, and it's the main part of
|
where the real work of installation happens, and it's the main part of
|
||||||
the package you'll need to customize for each piece of software.
|
the package you'll need to customize for each piece of software.
|
||||||
|
|
||||||
.. literalinclude:: ../../../var/spack/packages/libelf/package.py
|
.. literalinclude:: ../../../var/spack/repos/builtin/packages/libelf/package.py
|
||||||
:start-after: 0.8.12
|
:start-after: 0.8.12
|
||||||
:linenos:
|
:linenos:
|
||||||
|
|
||||||
@ -1775,15 +1804,15 @@ Compile-time library search paths
|
|||||||
* ``-L$dep_prefix/lib``
|
* ``-L$dep_prefix/lib``
|
||||||
* ``-L$dep_prefix/lib64``
|
* ``-L$dep_prefix/lib64``
|
||||||
Runtime library search paths (RPATHs)
|
Runtime library search paths (RPATHs)
|
||||||
* ``-Wl,-rpath=$dep_prefix/lib``
|
* ``-Wl,-rpath,$dep_prefix/lib``
|
||||||
* ``-Wl,-rpath=$dep_prefix/lib64``
|
* ``-Wl,-rpath,$dep_prefix/lib64``
|
||||||
Include search paths
|
Include search paths
|
||||||
* ``-I$dep_prefix/include``
|
* ``-I$dep_prefix/include``
|
||||||
|
|
||||||
An example of this would be the ``libdwarf`` build, which has one
|
An example of this would be the ``libdwarf`` build, which has one
|
||||||
dependency: ``libelf``. Every call to ``cc`` in the ``libdwarf``
|
dependency: ``libelf``. Every call to ``cc`` in the ``libdwarf``
|
||||||
build will have ``-I$LIBELF_PREFIX/include``,
|
build will have ``-I$LIBELF_PREFIX/include``,
|
||||||
``-L$LIBELF_PREFIX/lib``, and ``-Wl,-rpath=$LIBELF_PREFIX/lib``
|
``-L$LIBELF_PREFIX/lib``, and ``-Wl,-rpath,$LIBELF_PREFIX/lib``
|
||||||
inserted on the command line. This is done transparently to the
|
inserted on the command line. This is done transparently to the
|
||||||
project's build system, which will just think it's using a system
|
project's build system, which will just think it's using a system
|
||||||
where ``libelf`` is readily available. Because of this, you **do
|
where ``libelf`` is readily available. Because of this, you **do
|
||||||
@ -2172,6 +2201,15 @@ Filtering functions
|
|||||||
|
|
||||||
Examples:
|
Examples:
|
||||||
|
|
||||||
|
#. Filtering a Makefile to force it to use Spack's compiler wrappers:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
filter_file(r'^CC\s*=.*', spack_cc, 'Makefile')
|
||||||
|
filter_file(r'^CXX\s*=.*', spack_cxx, 'Makefile')
|
||||||
|
filter_file(r'^F77\s*=.*', spack_f77, 'Makefile')
|
||||||
|
filter_file(r'^FC\s*=.*', spack_fc, 'Makefile')
|
||||||
|
|
||||||
#. Replacing ``#!/usr/bin/perl`` with ``#!/usr/bin/env perl`` in ``bib2xhtml``:
|
#. Replacing ``#!/usr/bin/perl`` with ``#!/usr/bin/env perl`` in ``bib2xhtml``:
|
||||||
|
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
50
lib/spack/env/cc
vendored
50
lib/spack/env/cc
vendored
@ -90,15 +90,15 @@ case "$command" in
|
|||||||
command="$SPACK_CC"
|
command="$SPACK_CC"
|
||||||
language="C"
|
language="C"
|
||||||
;;
|
;;
|
||||||
c++|CC|g++|clang++|icpc|pgCC|xlc++)
|
c++|CC|g++|clang++|icpc|pgc++|xlc++)
|
||||||
command="$SPACK_CXX"
|
command="$SPACK_CXX"
|
||||||
language="C++"
|
language="C++"
|
||||||
;;
|
;;
|
||||||
f90|fc|f95|gfortran|ifort|pgf90|xlf90|nagfor)
|
f90|fc|f95|gfortran|ifort|pgfortran|xlf90|nagfor)
|
||||||
command="$SPACK_FC"
|
command="$SPACK_FC"
|
||||||
language="Fortran 90"
|
language="Fortran 90"
|
||||||
;;
|
;;
|
||||||
f77|gfortran|ifort|pgf77|xlf|nagfor)
|
f77|gfortran|ifort|pgfortran|xlf|nagfor)
|
||||||
command="$SPACK_F77"
|
command="$SPACK_F77"
|
||||||
language="Fortran 77"
|
language="Fortran 77"
|
||||||
;;
|
;;
|
||||||
@ -130,7 +130,7 @@ if [ -z "$mode" ]; then
|
|||||||
done
|
done
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Dump the version and exist if we're in testing mode.
|
# Dump the version and exit if we're in testing mode.
|
||||||
if [ "$SPACK_TEST_COMMAND" = "dump-mode" ]; then
|
if [ "$SPACK_TEST_COMMAND" = "dump-mode" ]; then
|
||||||
echo "$mode"
|
echo "$mode"
|
||||||
exit
|
exit
|
||||||
@ -175,32 +175,44 @@ while [ -n "$1" ]; do
|
|||||||
;;
|
;;
|
||||||
-Wl,*)
|
-Wl,*)
|
||||||
arg="${1#-Wl,}"
|
arg="${1#-Wl,}"
|
||||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
# TODO: Handle multiple -Wl, continuations of -Wl,-rpath
|
||||||
if [[ "$arg" = -rpath=* ]]; then
|
if [[ $arg == -rpath=* ]]; then
|
||||||
rpaths+=("${arg#-rpath=}")
|
arg="${arg#-rpath=}"
|
||||||
elif [[ "$arg" = -rpath ]]; then
|
for rpath in ${arg//,/ }; do
|
||||||
|
rpaths+=("$rpath")
|
||||||
|
done
|
||||||
|
elif [[ $arg == -rpath,* ]]; then
|
||||||
|
arg="${arg#-rpath,}"
|
||||||
|
for rpath in ${arg//,/ }; do
|
||||||
|
rpaths+=("$rpath")
|
||||||
|
done
|
||||||
|
elif [[ $arg == -rpath ]]; then
|
||||||
shift; arg="$1"
|
shift; arg="$1"
|
||||||
if [[ "$arg" != -Wl,* ]]; then
|
if [[ $arg != '-Wl,'* ]]; then
|
||||||
die "-Wl,-rpath was not followed by -Wl,*"
|
die "-Wl,-rpath was not followed by -Wl,*"
|
||||||
fi
|
fi
|
||||||
rpaths+=("${arg#-Wl,}")
|
arg="${arg#-Wl,}"
|
||||||
|
for rpath in ${arg//,/ }; do
|
||||||
|
rpaths+=("$rpath")
|
||||||
|
done
|
||||||
else
|
else
|
||||||
other_args+=("-Wl,$arg")
|
other_args+=("-Wl,$arg")
|
||||||
fi
|
fi
|
||||||
;;
|
;;
|
||||||
-Xlinker,*)
|
-Xlinker)
|
||||||
arg="${1#-Xlinker,}"
|
shift; arg="$1";
|
||||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
if [[ $arg = -rpath=* ]]; then
|
||||||
if [[ "$arg" = -rpath=* ]]; then
|
|
||||||
rpaths+=("${arg#-rpath=}")
|
rpaths+=("${arg#-rpath=}")
|
||||||
elif [[ "$arg" = -rpath ]]; then
|
elif [[ $arg = -rpath ]]; then
|
||||||
shift; arg="$1"
|
shift; arg="$1"
|
||||||
if [[ "$arg" != -Xlinker,* ]]; then
|
if [[ $arg != -Xlinker ]]; then
|
||||||
die "-Xlinker,-rpath was not followed by -Xlinker,*"
|
die "-Xlinker -rpath was not followed by -Xlinker <arg>"
|
||||||
fi
|
fi
|
||||||
rpaths+=("${arg#-Xlinker,}")
|
shift; arg="$1"
|
||||||
|
rpaths+=("$arg")
|
||||||
else
|
else
|
||||||
other_args+=("-Xlinker,$arg")
|
other_args+=("-Xlinker")
|
||||||
|
other_args+=("$arg")
|
||||||
fi
|
fi
|
||||||
;;
|
;;
|
||||||
*)
|
*)
|
||||||
|
1
lib/spack/env/pgi/case-insensitive/pgCC
vendored
1
lib/spack/env/pgi/case-insensitive/pgCC
vendored
@ -1 +0,0 @@
|
|||||||
../../cc
|
|
@ -25,7 +25,9 @@
|
|||||||
__all__ = ['set_install_permissions', 'install', 'install_tree', 'traverse_tree',
|
__all__ = ['set_install_permissions', 'install', 'install_tree', 'traverse_tree',
|
||||||
'expand_user', 'working_dir', 'touch', 'touchp', 'mkdirp',
|
'expand_user', 'working_dir', 'touch', 'touchp', 'mkdirp',
|
||||||
'force_remove', 'join_path', 'ancestor', 'can_access', 'filter_file',
|
'force_remove', 'join_path', 'ancestor', 'can_access', 'filter_file',
|
||||||
'FileFilter', 'change_sed_delimiter', 'is_exe', 'force_symlink']
|
'FileFilter', 'change_sed_delimiter', 'is_exe', 'force_symlink',
|
||||||
|
'set_executable', 'copy_mode', 'unset_executable_mode',
|
||||||
|
'remove_dead_links', 'remove_linked_tree']
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
@ -152,15 +154,28 @@ def set_install_permissions(path):
|
|||||||
def copy_mode(src, dest):
|
def copy_mode(src, dest):
|
||||||
src_mode = os.stat(src).st_mode
|
src_mode = os.stat(src).st_mode
|
||||||
dest_mode = os.stat(dest).st_mode
|
dest_mode = os.stat(dest).st_mode
|
||||||
if src_mode | stat.S_IXUSR: dest_mode |= stat.S_IXUSR
|
if src_mode & stat.S_IXUSR: dest_mode |= stat.S_IXUSR
|
||||||
if src_mode | stat.S_IXGRP: dest_mode |= stat.S_IXGRP
|
if src_mode & stat.S_IXGRP: dest_mode |= stat.S_IXGRP
|
||||||
if src_mode | stat.S_IXOTH: dest_mode |= stat.S_IXOTH
|
if src_mode & stat.S_IXOTH: dest_mode |= stat.S_IXOTH
|
||||||
os.chmod(dest, dest_mode)
|
os.chmod(dest, dest_mode)
|
||||||
|
|
||||||
|
|
||||||
|
def unset_executable_mode(path):
|
||||||
|
mode = os.stat(path).st_mode
|
||||||
|
mode &= ~stat.S_IXUSR
|
||||||
|
mode &= ~stat.S_IXGRP
|
||||||
|
mode &= ~stat.S_IXOTH
|
||||||
|
os.chmod(path, mode)
|
||||||
|
|
||||||
|
|
||||||
def install(src, dest):
|
def install(src, dest):
|
||||||
"""Manually install a file to a particular location."""
|
"""Manually install a file to a particular location."""
|
||||||
tty.debug("Installing %s to %s" % (src, dest))
|
tty.debug("Installing %s to %s" % (src, dest))
|
||||||
|
|
||||||
|
# Expand dsst to its eventual full path if it is a directory.
|
||||||
|
if os.path.isdir(dest):
|
||||||
|
dest = join_path(dest, os.path.basename(src))
|
||||||
|
|
||||||
shutil.copy(src, dest)
|
shutil.copy(src, dest)
|
||||||
set_install_permissions(dest)
|
set_install_permissions(dest)
|
||||||
copy_mode(src, dest)
|
copy_mode(src, dest)
|
||||||
@ -235,7 +250,7 @@ def touchp(path):
|
|||||||
def force_symlink(src, dest):
|
def force_symlink(src, dest):
|
||||||
try:
|
try:
|
||||||
os.symlink(src, dest)
|
os.symlink(src, dest)
|
||||||
except OSError, e:
|
except OSError as e:
|
||||||
os.remove(dest)
|
os.remove(dest)
|
||||||
os.symlink(src, dest)
|
os.symlink(src, dest)
|
||||||
|
|
||||||
@ -339,3 +354,41 @@ def traverse_tree(source_root, dest_root, rel_path='', **kwargs):
|
|||||||
|
|
||||||
if order == 'post':
|
if order == 'post':
|
||||||
yield (source_path, dest_path)
|
yield (source_path, dest_path)
|
||||||
|
|
||||||
|
|
||||||
|
def set_executable(path):
|
||||||
|
st = os.stat(path)
|
||||||
|
os.chmod(path, st.st_mode | stat.S_IEXEC)
|
||||||
|
|
||||||
|
|
||||||
|
def remove_dead_links(root):
|
||||||
|
"""
|
||||||
|
Removes any dead link that is present in root
|
||||||
|
|
||||||
|
Args:
|
||||||
|
root: path where to search for dead links
|
||||||
|
|
||||||
|
"""
|
||||||
|
for file in os.listdir(root):
|
||||||
|
path = join_path(root, file)
|
||||||
|
if os.path.islink(path):
|
||||||
|
real_path = os.path.realpath(path)
|
||||||
|
if not os.path.exists(real_path):
|
||||||
|
os.unlink(path)
|
||||||
|
|
||||||
|
def remove_linked_tree(path):
|
||||||
|
"""
|
||||||
|
Removes a directory and its contents. If the directory is a
|
||||||
|
symlink, follows the link and removes the real directory before
|
||||||
|
removing the link.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
path: directory to be removed
|
||||||
|
|
||||||
|
"""
|
||||||
|
if os.path.exists(path):
|
||||||
|
if os.path.islink(path):
|
||||||
|
shutil.rmtree(os.path.realpath(path), True)
|
||||||
|
os.unlink(path)
|
||||||
|
else:
|
||||||
|
shutil.rmtree(path, True)
|
||||||
|
@ -177,8 +177,6 @@ def set_module_variables_for_package(pkg, m):
|
|||||||
"""Populate the module scope of install() with some useful functions.
|
"""Populate the module scope of install() with some useful functions.
|
||||||
This makes things easier for package writers.
|
This makes things easier for package writers.
|
||||||
"""
|
"""
|
||||||
m = pkg.module
|
|
||||||
|
|
||||||
# number of jobs spack will to build with.
|
# number of jobs spack will to build with.
|
||||||
jobs = multiprocessing.cpu_count()
|
jobs = multiprocessing.cpu_count()
|
||||||
if not pkg.parallel:
|
if not pkg.parallel:
|
||||||
@ -214,6 +212,13 @@ def set_module_variables_for_package(pkg, m):
|
|||||||
m.std_cmake_args.append('-DCMAKE_INSTALL_RPATH_USE_LINK_PATH=FALSE')
|
m.std_cmake_args.append('-DCMAKE_INSTALL_RPATH_USE_LINK_PATH=FALSE')
|
||||||
m.std_cmake_args.append('-DCMAKE_INSTALL_RPATH=%s' % ":".join(get_rpaths(pkg)))
|
m.std_cmake_args.append('-DCMAKE_INSTALL_RPATH=%s' % ":".join(get_rpaths(pkg)))
|
||||||
|
|
||||||
|
# Put spack compiler paths in module scope.
|
||||||
|
link_dir = spack.build_env_path
|
||||||
|
m.spack_cc = join_path(link_dir, pkg.compiler.link_paths['cc'])
|
||||||
|
m.spack_cxx = join_path(link_dir, pkg.compiler.link_paths['cxx'])
|
||||||
|
m.spack_f77 = join_path(link_dir, pkg.compiler.link_paths['f77'])
|
||||||
|
m.spack_f90 = join_path(link_dir, pkg.compiler.link_paths['fc'])
|
||||||
|
|
||||||
# Emulate some shell commands for convenience
|
# Emulate some shell commands for convenience
|
||||||
m.pwd = os.getcwd
|
m.pwd = os.getcwd
|
||||||
m.cd = os.chdir
|
m.cd = os.chdir
|
||||||
@ -237,9 +242,9 @@ def set_module_variables_for_package(pkg, m):
|
|||||||
def get_rpaths(pkg):
|
def get_rpaths(pkg):
|
||||||
"""Get a list of all the rpaths for a package."""
|
"""Get a list of all the rpaths for a package."""
|
||||||
rpaths = [pkg.prefix.lib, pkg.prefix.lib64]
|
rpaths = [pkg.prefix.lib, pkg.prefix.lib64]
|
||||||
rpaths.extend(d.prefix.lib for d in pkg.spec.traverse(root=False)
|
rpaths.extend(d.prefix.lib for d in pkg.spec.dependencies.values()
|
||||||
if os.path.isdir(d.prefix.lib))
|
if os.path.isdir(d.prefix.lib))
|
||||||
rpaths.extend(d.prefix.lib64 for d in pkg.spec.traverse(root=False)
|
rpaths.extend(d.prefix.lib64 for d in pkg.spec.dependencies.values()
|
||||||
if os.path.isdir(d.prefix.lib64))
|
if os.path.isdir(d.prefix.lib64))
|
||||||
return rpaths
|
return rpaths
|
||||||
|
|
||||||
|
@ -22,23 +22,18 @@
|
|||||||
# along with this program; if not, write to the Free Software Foundation,
|
# along with this program; if not, write to the Free Software Foundation,
|
||||||
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||||
##############################################################################
|
##############################################################################
|
||||||
import os
|
|
||||||
import re
|
|
||||||
import argparse
|
import argparse
|
||||||
import hashlib
|
import hashlib
|
||||||
from pprint import pprint
|
|
||||||
from subprocess import CalledProcessError
|
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.util.tty.colify import colify
|
|
||||||
|
|
||||||
import spack
|
import spack
|
||||||
import spack.cmd
|
import spack.cmd
|
||||||
import spack.util.crypto
|
import spack.util.crypto
|
||||||
from spack.stage import Stage, FailedDownloadError
|
from spack.stage import Stage, FailedDownloadError
|
||||||
from spack.version import *
|
from spack.version import *
|
||||||
|
|
||||||
description ="Checksum available versions of a package."
|
description = "Checksum available versions of a package."
|
||||||
|
|
||||||
|
|
||||||
def setup_parser(subparser):
|
def setup_parser(subparser):
|
||||||
subparser.add_argument(
|
subparser.add_argument(
|
||||||
@ -58,25 +53,23 @@ def get_checksums(versions, urls, **kwargs):
|
|||||||
|
|
||||||
tty.msg("Downloading...")
|
tty.msg("Downloading...")
|
||||||
hashes = []
|
hashes = []
|
||||||
for i, (url, version) in enumerate(zip(urls, versions)):
|
i = 0
|
||||||
stage = Stage(url)
|
for url, version in zip(urls, versions):
|
||||||
try:
|
try:
|
||||||
stage.fetch()
|
with Stage(url, keep=keep_stage) as stage:
|
||||||
if i == 0 and first_stage_function:
|
stage.fetch()
|
||||||
first_stage_function(stage)
|
if i == 0 and first_stage_function:
|
||||||
|
first_stage_function(stage)
|
||||||
|
|
||||||
hashes.append(
|
hashes.append((version,
|
||||||
spack.util.crypto.checksum(hashlib.md5, stage.archive_file))
|
spack.util.crypto.checksum(hashlib.md5, stage.archive_file)))
|
||||||
except FailedDownloadError, e:
|
i += 1
|
||||||
|
except FailedDownloadError as e:
|
||||||
tty.msg("Failed to fetch %s" % url)
|
tty.msg("Failed to fetch %s" % url)
|
||||||
continue
|
except Exception as e:
|
||||||
|
tty.msg('Something failed on %s, skipping.\n (%s)' % (url, e))
|
||||||
finally:
|
|
||||||
if not keep_stage:
|
|
||||||
stage.destroy()
|
|
||||||
|
|
||||||
return zip(versions, hashes)
|
|
||||||
|
|
||||||
|
return hashes
|
||||||
|
|
||||||
|
|
||||||
def checksum(parser, args):
|
def checksum(parser, args):
|
||||||
@ -95,13 +88,13 @@ def checksum(parser, args):
|
|||||||
else:
|
else:
|
||||||
versions = pkg.fetch_remote_versions()
|
versions = pkg.fetch_remote_versions()
|
||||||
if not versions:
|
if not versions:
|
||||||
tty.die("Could not fetch any versions for %s." % pkg.name)
|
tty.die("Could not fetch any versions for %s" % pkg.name)
|
||||||
|
|
||||||
sorted_versions = sorted(versions, reverse=True)
|
sorted_versions = sorted(versions, reverse=True)
|
||||||
|
|
||||||
tty.msg("Found %s versions of %s." % (len(versions), pkg.name),
|
tty.msg("Found %s versions of %s" % (len(versions), pkg.name),
|
||||||
*spack.cmd.elide_list(
|
*spack.cmd.elide_list(
|
||||||
["%-10s%s" % (v, versions[v]) for v in sorted_versions]))
|
["%-10s%s" % (v, versions[v]) for v in sorted_versions]))
|
||||||
print
|
print
|
||||||
archives_to_fetch = tty.get_number(
|
archives_to_fetch = tty.get_number(
|
||||||
"How many would you like to checksum?", default=5, abort='q')
|
"How many would you like to checksum?", default=5, abort='q')
|
||||||
@ -116,7 +109,7 @@ def checksum(parser, args):
|
|||||||
keep_stage=args.keep_stage)
|
keep_stage=args.keep_stage)
|
||||||
|
|
||||||
if not version_hashes:
|
if not version_hashes:
|
||||||
tty.die("Could not fetch any versions for %s." % pkg.name)
|
tty.die("Could not fetch any versions for %s" % pkg.name)
|
||||||
|
|
||||||
version_lines = [" version('%s', '%s')" % (v, h) for v, h in version_hashes]
|
version_lines = [" version('%s', '%s')" % (v, h) for v, h in version_hashes]
|
||||||
tty.msg("Checksummed new versions of %s:" % pkg.name, *version_lines)
|
tty.msg("Checksummed new versions of %s:" % pkg.name, *version_lines)
|
||||||
|
@ -96,7 +96,7 @@ def compiler_remove(args):
|
|||||||
compilers = spack.compilers.compilers_for_spec(cspec, scope=args.scope)
|
compilers = spack.compilers.compilers_for_spec(cspec, scope=args.scope)
|
||||||
|
|
||||||
if not compilers:
|
if not compilers:
|
||||||
tty.die("No compilers match spec %s." % cspec)
|
tty.die("No compilers match spec %s" % cspec)
|
||||||
elif not args.all and len(compilers) > 1:
|
elif not args.all and len(compilers) > 1:
|
||||||
tty.error("Multiple compilers match spec %s. Choose one:" % cspec)
|
tty.error("Multiple compilers match spec %s. Choose one:" % cspec)
|
||||||
colify(reversed(sorted([c.spec for c in compilers])), indent=4)
|
colify(reversed(sorted([c.spec for c in compilers])), indent=4)
|
||||||
@ -105,7 +105,7 @@ def compiler_remove(args):
|
|||||||
|
|
||||||
for compiler in compilers:
|
for compiler in compilers:
|
||||||
spack.compilers.remove_compiler_from_config(compiler.spec, scope=args.scope)
|
spack.compilers.remove_compiler_from_config(compiler.spec, scope=args.scope)
|
||||||
tty.msg("Removed compiler %s." % compiler.spec)
|
tty.msg("Removed compiler %s" % compiler.spec)
|
||||||
|
|
||||||
|
|
||||||
def compiler_info(args):
|
def compiler_info(args):
|
||||||
@ -114,7 +114,7 @@ def compiler_info(args):
|
|||||||
compilers = spack.compilers.compilers_for_spec(cspec, scope=args.scope)
|
compilers = spack.compilers.compilers_for_spec(cspec, scope=args.scope)
|
||||||
|
|
||||||
if not compilers:
|
if not compilers:
|
||||||
tty.error("No compilers match spec %s." % cspec)
|
tty.error("No compilers match spec %s" % cspec)
|
||||||
else:
|
else:
|
||||||
for c in compilers:
|
for c in compilers:
|
||||||
print str(c.spec) + ":"
|
print str(c.spec) + ":"
|
||||||
|
@ -156,7 +156,7 @@ def guess_name_and_version(url, args):
|
|||||||
# Try to deduce name and version of the new package from the URL
|
# Try to deduce name and version of the new package from the URL
|
||||||
version = spack.url.parse_version(url)
|
version = spack.url.parse_version(url)
|
||||||
if not version:
|
if not version:
|
||||||
tty.die("Couldn't guess a version string from %s." % url)
|
tty.die("Couldn't guess a version string from %s" % url)
|
||||||
|
|
||||||
# Try to guess a name. If it doesn't work, allow the user to override.
|
# Try to guess a name. If it doesn't work, allow the user to override.
|
||||||
if args.alternate_name:
|
if args.alternate_name:
|
||||||
@ -189,7 +189,7 @@ def find_repository(spec, args):
|
|||||||
try:
|
try:
|
||||||
repo = Repo(repo_path)
|
repo = Repo(repo_path)
|
||||||
if spec.namespace and spec.namespace != repo.namespace:
|
if spec.namespace and spec.namespace != repo.namespace:
|
||||||
tty.die("Can't create package with namespace %s in repo with namespace %s."
|
tty.die("Can't create package with namespace %s in repo with namespace %s"
|
||||||
% (spec.namespace, repo.namespace))
|
% (spec.namespace, repo.namespace))
|
||||||
except RepoError as e:
|
except RepoError as e:
|
||||||
tty.die(str(e))
|
tty.die(str(e))
|
||||||
@ -222,7 +222,7 @@ def fetch_tarballs(url, name, args):
|
|||||||
archives_to_fetch = 1
|
archives_to_fetch = 1
|
||||||
if not versions:
|
if not versions:
|
||||||
# If the fetch failed for some reason, revert to what the user provided
|
# If the fetch failed for some reason, revert to what the user provided
|
||||||
versions = { version : url }
|
versions = { "version" : url }
|
||||||
elif len(versions) > 1:
|
elif len(versions) > 1:
|
||||||
tty.msg("Found %s versions of %s:" % (len(versions), name),
|
tty.msg("Found %s versions of %s:" % (len(versions), name),
|
||||||
*spack.cmd.elide_list(
|
*spack.cmd.elide_list(
|
||||||
@ -252,7 +252,7 @@ def create(parser, args):
|
|||||||
name = spec.name # factors out namespace, if any
|
name = spec.name # factors out namespace, if any
|
||||||
repo = find_repository(spec, args)
|
repo = find_repository(spec, args)
|
||||||
|
|
||||||
tty.msg("This looks like a URL for %s version %s." % (name, version))
|
tty.msg("This looks like a URL for %s version %s" % (name, version))
|
||||||
tty.msg("Creating template for package %s" % name)
|
tty.msg("Creating template for package %s" % name)
|
||||||
|
|
||||||
# Fetch tarballs (prompting user if necessary)
|
# Fetch tarballs (prompting user if necessary)
|
||||||
@ -266,7 +266,7 @@ def create(parser, args):
|
|||||||
keep_stage=args.keep_stage)
|
keep_stage=args.keep_stage)
|
||||||
|
|
||||||
if not ver_hash_tuples:
|
if not ver_hash_tuples:
|
||||||
tty.die("Could not fetch any tarballs for %s." % name)
|
tty.die("Could not fetch any tarballs for %s" % name)
|
||||||
|
|
||||||
# Prepend 'py-' to python package names, by convention.
|
# Prepend 'py-' to python package names, by convention.
|
||||||
if guesser.build_system == 'python':
|
if guesser.build_system == 'python':
|
||||||
@ -291,4 +291,4 @@ def create(parser, args):
|
|||||||
|
|
||||||
# If everything checks out, go ahead and edit.
|
# If everything checks out, go ahead and edit.
|
||||||
spack.editor(pkg_path)
|
spack.editor(pkg_path)
|
||||||
tty.msg("Created package %s." % pkg_path)
|
tty.msg("Created package %s" % pkg_path)
|
||||||
|
@ -45,6 +45,9 @@ def setup_parser(subparser):
|
|||||||
subparser.add_argument(
|
subparser.add_argument(
|
||||||
'--skip-patch', action='store_true',
|
'--skip-patch', action='store_true',
|
||||||
help="Skip patching for the DIY build.")
|
help="Skip patching for the DIY build.")
|
||||||
|
subparser.add_argument(
|
||||||
|
'-q', '--quiet', action='store_true', dest='quiet',
|
||||||
|
help="Do not display verbose build output while installing.")
|
||||||
subparser.add_argument(
|
subparser.add_argument(
|
||||||
'spec', nargs=argparse.REMAINDER,
|
'spec', nargs=argparse.REMAINDER,
|
||||||
help="specs to use for install. Must contain package AND verison.")
|
help="specs to use for install. Must contain package AND verison.")
|
||||||
@ -92,4 +95,5 @@ def diy(self, args):
|
|||||||
package.do_install(
|
package.do_install(
|
||||||
keep_prefix=args.keep_prefix,
|
keep_prefix=args.keep_prefix,
|
||||||
ignore_deps=args.ignore_deps,
|
ignore_deps=args.ignore_deps,
|
||||||
|
verbose=not args.quiet,
|
||||||
keep_stage=True) # don't remove source dir for DIY.
|
keep_stage=True) # don't remove source dir for DIY.
|
||||||
|
@ -22,32 +22,51 @@
|
|||||||
# along with this program; if not, write to the Free Software Foundation,
|
# along with this program; if not, write to the Free Software Foundation,
|
||||||
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||||
##############################################################################
|
##############################################################################
|
||||||
import os
|
|
||||||
import hashlib
|
|
||||||
import argparse
|
import argparse
|
||||||
|
import hashlib
|
||||||
|
import os
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.util.filesystem import *
|
|
||||||
|
|
||||||
import spack.util.crypto
|
import spack.util.crypto
|
||||||
|
from spack.stage import Stage, FailedDownloadError
|
||||||
|
|
||||||
|
description = "Calculate md5 checksums for files/urls."
|
||||||
|
|
||||||
description = "Calculate md5 checksums for files."
|
|
||||||
|
|
||||||
def setup_parser(subparser):
|
def setup_parser(subparser):
|
||||||
setup_parser.parser = subparser
|
setup_parser.parser = subparser
|
||||||
subparser.add_argument('files', nargs=argparse.REMAINDER,
|
subparser.add_argument('files', nargs=argparse.REMAINDER,
|
||||||
help="Files to checksum.")
|
help="Files to checksum.")
|
||||||
|
|
||||||
|
|
||||||
|
def compute_md5_checksum(url):
|
||||||
|
if not os.path.isfile(url):
|
||||||
|
with Stage(url) as stage:
|
||||||
|
stage.fetch()
|
||||||
|
value = spack.util.crypto.checksum(hashlib.md5, stage.archive_file)
|
||||||
|
else:
|
||||||
|
value = spack.util.crypto.checksum(hashlib.md5, url)
|
||||||
|
return value
|
||||||
|
|
||||||
|
|
||||||
def md5(parser, args):
|
def md5(parser, args):
|
||||||
if not args.files:
|
if not args.files:
|
||||||
setup_parser.parser.print_help()
|
setup_parser.parser.print_help()
|
||||||
return 1
|
return 1
|
||||||
|
|
||||||
for f in args.files:
|
results = []
|
||||||
if not os.path.isfile(f):
|
for url in args.files:
|
||||||
tty.die("Not a file: %s" % f)
|
try:
|
||||||
if not can_access(f):
|
checksum = compute_md5_checksum(url)
|
||||||
tty.die("Cannot read file: %s" % f)
|
results.append((checksum, url))
|
||||||
|
except FailedDownloadError as e:
|
||||||
|
tty.warn("Failed to fetch %s" % url)
|
||||||
|
tty.warn("%s" % e)
|
||||||
|
except IOError as e:
|
||||||
|
tty.warn("Error when reading %s" % url)
|
||||||
|
tty.warn("%s" % e)
|
||||||
|
|
||||||
checksum = spack.util.crypto.checksum(hashlib.md5, f)
|
# Dump the MD5s at last without interleaving them with downloads
|
||||||
print "%s %s" % (checksum, f)
|
tty.msg("%d MD5 checksums:" % len(results))
|
||||||
|
for checksum, url in results:
|
||||||
|
print "%s %s" % (checksum, url)
|
||||||
|
@ -53,11 +53,13 @@ def setup_parser(subparser):
|
|||||||
create_parser.add_argument('-d', '--directory', default=None,
|
create_parser.add_argument('-d', '--directory', default=None,
|
||||||
help="Directory in which to create mirror.")
|
help="Directory in which to create mirror.")
|
||||||
create_parser.add_argument(
|
create_parser.add_argument(
|
||||||
'specs', nargs=argparse.REMAINDER, help="Specs of packages to put in mirror")
|
'specs', nargs=argparse.REMAINDER,
|
||||||
|
help="Specs of packages to put in mirror")
|
||||||
create_parser.add_argument(
|
create_parser.add_argument(
|
||||||
'-f', '--file', help="File with specs of packages to put in mirror.")
|
'-f', '--file', help="File with specs of packages to put in mirror.")
|
||||||
create_parser.add_argument(
|
create_parser.add_argument(
|
||||||
'-D', '--dependencies', action='store_true', help="Also fetch all dependencies")
|
'-D', '--dependencies', action='store_true',
|
||||||
|
help="Also fetch all dependencies")
|
||||||
create_parser.add_argument(
|
create_parser.add_argument(
|
||||||
'-o', '--one-version-per-spec', action='store_const', const=1, default=0,
|
'-o', '--one-version-per-spec', action='store_const', const=1, default=0,
|
||||||
help="Only fetch one 'preferred' version per spec, not all known versions.")
|
help="Only fetch one 'preferred' version per spec, not all known versions.")
|
||||||
@ -74,7 +76,8 @@ def setup_parser(subparser):
|
|||||||
help="Configuration scope to modify.")
|
help="Configuration scope to modify.")
|
||||||
|
|
||||||
# Remove
|
# Remove
|
||||||
remove_parser = sp.add_parser('remove', aliases=['rm'], help=mirror_remove.__doc__)
|
remove_parser = sp.add_parser('remove', aliases=['rm'],
|
||||||
|
help=mirror_remove.__doc__)
|
||||||
remove_parser.add_argument('name')
|
remove_parser.add_argument('name')
|
||||||
remove_parser.add_argument(
|
remove_parser.add_argument(
|
||||||
'--scope', choices=scopes, default=spack.cmd.default_modify_scope,
|
'--scope', choices=scopes, default=spack.cmd.default_modify_scope,
|
||||||
@ -123,7 +126,7 @@ def mirror_remove(args):
|
|||||||
|
|
||||||
old_value = mirrors.pop(name)
|
old_value = mirrors.pop(name)
|
||||||
spack.config.update_config('mirrors', mirrors, scope=args.scope)
|
spack.config.update_config('mirrors', mirrors, scope=args.scope)
|
||||||
tty.msg("Removed mirror %s with url %s." % (name, old_value))
|
tty.msg("Removed mirror %s with url %s" % (name, old_value))
|
||||||
|
|
||||||
|
|
||||||
def mirror_list(args):
|
def mirror_list(args):
|
||||||
@ -141,15 +144,17 @@ def mirror_list(args):
|
|||||||
|
|
||||||
|
|
||||||
def _read_specs_from_file(filename):
|
def _read_specs_from_file(filename):
|
||||||
|
specs = []
|
||||||
with open(filename, "r") as stream:
|
with open(filename, "r") as stream:
|
||||||
for i, string in enumerate(stream):
|
for i, string in enumerate(stream):
|
||||||
try:
|
try:
|
||||||
s = Spec(string)
|
s = Spec(string)
|
||||||
s.package
|
s.package
|
||||||
args.specs.append(s)
|
specs.append(s)
|
||||||
except SpackError, e:
|
except SpackError, e:
|
||||||
tty.die("Parse error in %s, line %d:" % (args.file, i+1),
|
tty.die("Parse error in %s, line %d:" % (args.file, i+1),
|
||||||
">>> " + string, str(e))
|
">>> " + string, str(e))
|
||||||
|
return specs
|
||||||
|
|
||||||
|
|
||||||
def mirror_create(args):
|
def mirror_create(args):
|
||||||
@ -169,6 +174,7 @@ def mirror_create(args):
|
|||||||
specs = [Spec(n) for n in spack.repo.all_package_names()]
|
specs = [Spec(n) for n in spack.repo.all_package_names()]
|
||||||
specs.sort(key=lambda s: s.format("$_$@").lower())
|
specs.sort(key=lambda s: s.format("$_$@").lower())
|
||||||
|
|
||||||
|
# If the user asked for dependencies, traverse spec DAG get them.
|
||||||
if args.dependencies:
|
if args.dependencies:
|
||||||
new_specs = set()
|
new_specs = set()
|
||||||
for spec in specs:
|
for spec in specs:
|
||||||
@ -197,7 +203,7 @@ def mirror_create(args):
|
|||||||
|
|
||||||
verb = "updated" if existed else "created"
|
verb = "updated" if existed else "created"
|
||||||
tty.msg(
|
tty.msg(
|
||||||
"Successfully %s mirror in %s." % (verb, directory),
|
"Successfully %s mirror in %s" % (verb, directory),
|
||||||
"Archive stats:",
|
"Archive stats:",
|
||||||
" %-4d already present" % p,
|
" %-4d already present" % p,
|
||||||
" %-4d added" % m,
|
" %-4d added" % m,
|
||||||
|
@ -58,7 +58,7 @@ def module_find(mtype, spec_array):
|
|||||||
should type to use that package's module.
|
should type to use that package's module.
|
||||||
"""
|
"""
|
||||||
if mtype not in module_types:
|
if mtype not in module_types:
|
||||||
tty.die("Invalid module type: '%s'. Options are %s." % (mtype, comma_or(module_types)))
|
tty.die("Invalid module type: '%s'. Options are %s" % (mtype, comma_or(module_types)))
|
||||||
|
|
||||||
specs = spack.cmd.parse_specs(spec_array)
|
specs = spack.cmd.parse_specs(spec_array)
|
||||||
if len(specs) > 1:
|
if len(specs) > 1:
|
||||||
@ -78,7 +78,7 @@ def module_find(mtype, spec_array):
|
|||||||
mt = module_types[mtype]
|
mt = module_types[mtype]
|
||||||
mod = mt(specs[0])
|
mod = mt(specs[0])
|
||||||
if not os.path.isfile(mod.file_name):
|
if not os.path.isfile(mod.file_name):
|
||||||
tty.die("No %s module is installed for %s." % (mtype, spec))
|
tty.die("No %s module is installed for %s" % (mtype, spec))
|
||||||
|
|
||||||
print mod.use_name
|
print mod.use_name
|
||||||
|
|
||||||
@ -94,7 +94,7 @@ def module_refresh():
|
|||||||
shutil.rmtree(cls.path, ignore_errors=False)
|
shutil.rmtree(cls.path, ignore_errors=False)
|
||||||
mkdirp(cls.path)
|
mkdirp(cls.path)
|
||||||
for spec in specs:
|
for spec in specs:
|
||||||
tty.debug(" Writing file for %s." % spec)
|
tty.debug(" Writing file for %s" % spec)
|
||||||
cls(spec).write()
|
cls(spec).write()
|
||||||
|
|
||||||
|
|
||||||
|
@ -24,6 +24,7 @@
|
|||||||
##############################################################################
|
##############################################################################
|
||||||
import argparse
|
import argparse
|
||||||
|
|
||||||
|
import llnl.util.tty as tty
|
||||||
import spack.cmd
|
import spack.cmd
|
||||||
import spack
|
import spack
|
||||||
|
|
||||||
|
@ -6,7 +6,7 @@
|
|||||||
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
|
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
|
||||||
# LLNL-CODE-647188
|
# LLNL-CODE-647188
|
||||||
#
|
#
|
||||||
# For details, see https://llnl.github.io/spack
|
# For details, see https://software.llnl.gov/spack
|
||||||
# Please also see the LICENSE file for our notice and the LGPL.
|
# Please also see the LICENSE file for our notice and the LGPL.
|
||||||
#
|
#
|
||||||
# This program is free software; you can redistribute it and/or modify
|
# This program is free software; you can redistribute it and/or modify
|
||||||
@ -74,51 +74,7 @@ def setup_parser(subparser):
|
|||||||
|
|
||||||
def repo_create(args):
|
def repo_create(args):
|
||||||
"""Create a new package repository."""
|
"""Create a new package repository."""
|
||||||
root = canonicalize_path(args.directory)
|
full_path, namespace = create_repo(args.directory, args.namespace)
|
||||||
namespace = args.namespace
|
|
||||||
|
|
||||||
if not args.namespace:
|
|
||||||
namespace = os.path.basename(root)
|
|
||||||
|
|
||||||
if not re.match(r'\w[\.\w-]*', namespace):
|
|
||||||
tty.die("'%s' is not a valid namespace." % namespace)
|
|
||||||
|
|
||||||
existed = False
|
|
||||||
if os.path.exists(root):
|
|
||||||
if os.path.isfile(root):
|
|
||||||
tty.die('File %s already exists and is not a directory' % root)
|
|
||||||
elif os.path.isdir(root):
|
|
||||||
if not os.access(root, os.R_OK | os.W_OK):
|
|
||||||
tty.die('Cannot create new repo in %s: cannot access directory.' % root)
|
|
||||||
if os.listdir(root):
|
|
||||||
tty.die('Cannot create new repo in %s: directory is not empty.' % root)
|
|
||||||
existed = True
|
|
||||||
|
|
||||||
full_path = os.path.realpath(root)
|
|
||||||
parent = os.path.dirname(full_path)
|
|
||||||
if not os.access(parent, os.R_OK | os.W_OK):
|
|
||||||
tty.die("Cannot create repository in %s: can't access parent!" % root)
|
|
||||||
|
|
||||||
try:
|
|
||||||
config_path = os.path.join(root, repo_config_name)
|
|
||||||
packages_path = os.path.join(root, packages_dir_name)
|
|
||||||
|
|
||||||
mkdirp(packages_path)
|
|
||||||
with open(config_path, 'w') as config:
|
|
||||||
config.write("repo:\n")
|
|
||||||
config.write(" namespace: '%s'\n" % namespace)
|
|
||||||
|
|
||||||
except (IOError, OSError) as e:
|
|
||||||
tty.die('Failed to create new repository in %s.' % root,
|
|
||||||
"Caused by %s: %s" % (type(e), e))
|
|
||||||
|
|
||||||
# try to clean up.
|
|
||||||
if existed:
|
|
||||||
shutil.rmtree(config_path, ignore_errors=True)
|
|
||||||
shutil.rmtree(packages_path, ignore_errors=True)
|
|
||||||
else:
|
|
||||||
shutil.rmtree(root, ignore_errors=True)
|
|
||||||
|
|
||||||
tty.msg("Created repo with namespace '%s'." % namespace)
|
tty.msg("Created repo with namespace '%s'." % namespace)
|
||||||
tty.msg("To register it with spack, run this command:",
|
tty.msg("To register it with spack, run this command:",
|
||||||
'spack repo add %s' % full_path)
|
'spack repo add %s' % full_path)
|
||||||
@ -133,11 +89,11 @@ def repo_add(args):
|
|||||||
|
|
||||||
# check if the path exists
|
# check if the path exists
|
||||||
if not os.path.exists(canon_path):
|
if not os.path.exists(canon_path):
|
||||||
tty.die("No such file or directory: '%s'." % path)
|
tty.die("No such file or directory: %s" % path)
|
||||||
|
|
||||||
# Make sure the path is a directory.
|
# Make sure the path is a directory.
|
||||||
if not os.path.isdir(canon_path):
|
if not os.path.isdir(canon_path):
|
||||||
tty.die("Not a Spack repository: '%s'." % path)
|
tty.die("Not a Spack repository: %s" % path)
|
||||||
|
|
||||||
# Make sure it's actually a spack repository by constructing it.
|
# Make sure it's actually a spack repository by constructing it.
|
||||||
repo = Repo(canon_path)
|
repo = Repo(canon_path)
|
||||||
@ -147,7 +103,7 @@ def repo_add(args):
|
|||||||
if not repos: repos = []
|
if not repos: repos = []
|
||||||
|
|
||||||
if repo.root in repos or path in repos:
|
if repo.root in repos or path in repos:
|
||||||
tty.die("Repository is already registered with Spack: '%s'" % path)
|
tty.die("Repository is already registered with Spack: %s" % path)
|
||||||
|
|
||||||
repos.insert(0, canon_path)
|
repos.insert(0, canon_path)
|
||||||
spack.config.update_config('repos', repos, args.scope)
|
spack.config.update_config('repos', repos, args.scope)
|
||||||
@ -166,7 +122,7 @@ def repo_remove(args):
|
|||||||
if canon_path == repo_canon_path:
|
if canon_path == repo_canon_path:
|
||||||
repos.remove(repo_path)
|
repos.remove(repo_path)
|
||||||
spack.config.update_config('repos', repos, args.scope)
|
spack.config.update_config('repos', repos, args.scope)
|
||||||
tty.msg("Removed repository '%s'." % repo_path)
|
tty.msg("Removed repository %s" % repo_path)
|
||||||
return
|
return
|
||||||
|
|
||||||
# If it is a namespace, remove corresponding repo
|
# If it is a namespace, remove corresponding repo
|
||||||
@ -176,13 +132,13 @@ def repo_remove(args):
|
|||||||
if repo.namespace == path_or_namespace:
|
if repo.namespace == path_or_namespace:
|
||||||
repos.remove(path)
|
repos.remove(path)
|
||||||
spack.config.update_config('repos', repos, args.scope)
|
spack.config.update_config('repos', repos, args.scope)
|
||||||
tty.msg("Removed repository '%s' with namespace %s."
|
tty.msg("Removed repository %s with namespace '%s'."
|
||||||
% (repo.root, repo.namespace))
|
% (repo.root, repo.namespace))
|
||||||
return
|
return
|
||||||
except RepoError as e:
|
except RepoError as e:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
tty.die("No repository with path or namespace: '%s'"
|
tty.die("No repository with path or namespace: %s"
|
||||||
% path_or_namespace)
|
% path_or_namespace)
|
||||||
|
|
||||||
|
|
||||||
|
@ -37,7 +37,7 @@
|
|||||||
from spack.fetch_strategy import FetchError
|
from spack.fetch_strategy import FetchError
|
||||||
import spack.cmd
|
import spack.cmd
|
||||||
|
|
||||||
description = "Treat package installations as unit tests and output formatted test results"
|
description = "Run package installation as a unit test, output formatted results."
|
||||||
|
|
||||||
def setup_parser(subparser):
|
def setup_parser(subparser):
|
||||||
subparser.add_argument(
|
subparser.add_argument(
|
||||||
|
@ -256,12 +256,12 @@ def find(cls, *path):
|
|||||||
|
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
"""Return a string represntation of the compiler toolchain."""
|
"""Return a string representation of the compiler toolchain."""
|
||||||
return self.__str__()
|
return self.__str__()
|
||||||
|
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
"""Return a string represntation of the compiler toolchain."""
|
"""Return a string representation of the compiler toolchain."""
|
||||||
return "%s(%s)" % (
|
return "%s(%s)" % (
|
||||||
self.name, '\n '.join((str(s) for s in (self.cc, self.cxx, self.f77, self.fc))))
|
self.name, '\n '.join((str(s) for s in (self.cc, self.cxx, self.f77, self.fc))))
|
||||||
|
|
||||||
|
@ -74,28 +74,36 @@ def _to_dict(compiler):
|
|||||||
def get_compiler_config(arch=None, scope=None):
|
def get_compiler_config(arch=None, scope=None):
|
||||||
"""Return the compiler configuration for the specified architecture.
|
"""Return the compiler configuration for the specified architecture.
|
||||||
"""
|
"""
|
||||||
# If any configuration file has compilers, just stick with the
|
# Check whether we're on a front-end (native) architecture.
|
||||||
# ones already configured.
|
|
||||||
config = spack.config.get_config('compilers', scope=scope)
|
|
||||||
|
|
||||||
my_arch = spack.architecture.sys_type()
|
my_arch = spack.architecture.sys_type()
|
||||||
if arch is None:
|
if arch is None:
|
||||||
arch = my_arch
|
arch = my_arch
|
||||||
|
|
||||||
if arch in config:
|
def init_compiler_config():
|
||||||
return config[arch]
|
"""Compiler search used when Spack has no compilers."""
|
||||||
|
|
||||||
# Only for the current arch in *highest* scope: automatically try to
|
|
||||||
# find compilers if none are configured yet.
|
|
||||||
if arch == my_arch and scope == 'user':
|
|
||||||
config[arch] = {}
|
config[arch] = {}
|
||||||
compilers = find_compilers(*get_path('PATH'))
|
compilers = find_compilers(*get_path('PATH'))
|
||||||
for compiler in compilers:
|
for compiler in compilers:
|
||||||
config[arch].update(_to_dict(compiler))
|
config[arch].update(_to_dict(compiler))
|
||||||
spack.config.update_config('compilers', config, scope=scope)
|
spack.config.update_config('compilers', config, scope=scope)
|
||||||
return config[arch]
|
|
||||||
|
|
||||||
return {}
|
config = spack.config.get_config('compilers', scope=scope)
|
||||||
|
|
||||||
|
# Update the configuration if there are currently no compilers
|
||||||
|
# configured. Avoid updating automatically if there ARE site
|
||||||
|
# compilers configured but no user ones.
|
||||||
|
if arch == my_arch and arch not in config:
|
||||||
|
if scope is None:
|
||||||
|
# We know no compilers were configured in any scope.
|
||||||
|
init_compiler_config()
|
||||||
|
elif scope == 'user':
|
||||||
|
# Check the site config and update the user config if
|
||||||
|
# nothing is configured at the site level.
|
||||||
|
site_config = spack.config.get_config('compilers', scope='site')
|
||||||
|
if not site_config:
|
||||||
|
init_compiler_config()
|
||||||
|
|
||||||
|
return config[arch] if arch in config else {}
|
||||||
|
|
||||||
|
|
||||||
def add_compilers_to_config(compilers, arch=None, scope=None):
|
def add_compilers_to_config(compilers, arch=None, scope=None):
|
||||||
|
@ -22,7 +22,10 @@
|
|||||||
# along with this program; if not, write to the Free Software Foundation,
|
# along with this program; if not, write to the Free Software Foundation,
|
||||||
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||||
##############################################################################
|
##############################################################################
|
||||||
|
import re
|
||||||
|
import spack.compiler as cpr
|
||||||
from spack.compiler import *
|
from spack.compiler import *
|
||||||
|
from spack.util.executable import *
|
||||||
|
|
||||||
class Clang(Compiler):
|
class Clang(Compiler):
|
||||||
# Subclasses use possible names of C compiler
|
# Subclasses use possible names of C compiler
|
||||||
@ -47,11 +50,34 @@ class Clang(Compiler):
|
|||||||
@classmethod
|
@classmethod
|
||||||
def default_version(self, comp):
|
def default_version(self, comp):
|
||||||
"""The '--version' option works for clang compilers.
|
"""The '--version' option works for clang compilers.
|
||||||
Output looks like this::
|
On most platforms, output looks like this::
|
||||||
|
|
||||||
clang version 3.1 (trunk 149096)
|
clang version 3.1 (trunk 149096)
|
||||||
Target: x86_64-unknown-linux-gnu
|
Target: x86_64-unknown-linux-gnu
|
||||||
Thread model: posix
|
Thread model: posix
|
||||||
|
|
||||||
|
On Mac OS X, it looks like this:
|
||||||
|
|
||||||
|
Apple LLVM version 7.0.2 (clang-700.1.81)
|
||||||
|
Target: x86_64-apple-darwin15.2.0
|
||||||
|
Thread model: posix
|
||||||
|
|
||||||
"""
|
"""
|
||||||
return get_compiler_version(
|
if comp not in cpr._version_cache:
|
||||||
comp, '--version', r'(?:clang version|based on LLVM) ([^ )]+)')
|
compiler = Executable(comp)
|
||||||
|
output = compiler('--version', output=str, error=str)
|
||||||
|
|
||||||
|
ver = 'unknown'
|
||||||
|
match = re.search(r'^Apple LLVM version ([^ )]+)', output)
|
||||||
|
if match:
|
||||||
|
# Apple's LLVM compiler has its own versions, so suffix them.
|
||||||
|
ver = match.group(1) + '-apple'
|
||||||
|
else:
|
||||||
|
# Normal clang compiler versions are left as-is
|
||||||
|
match = re.search(r'^clang version ([^ )]+)', output)
|
||||||
|
if match:
|
||||||
|
ver = match.group(1)
|
||||||
|
|
||||||
|
cpr._version_cache[comp] = ver
|
||||||
|
|
||||||
|
return cpr._version_cache[comp]
|
||||||
|
@ -16,7 +16,7 @@ class Nag(Compiler):
|
|||||||
# Named wrapper links within spack.build_env_path
|
# Named wrapper links within spack.build_env_path
|
||||||
link_paths = { # Use default wrappers for C and C++, in case provided in compilers.yaml
|
link_paths = { # Use default wrappers for C and C++, in case provided in compilers.yaml
|
||||||
'cc' : 'cc',
|
'cc' : 'cc',
|
||||||
'cxx' : 'cxx',
|
'cxx' : 'c++',
|
||||||
'f77' : 'nag/nagfor',
|
'f77' : 'nag/nagfor',
|
||||||
'fc' : 'nag/nagfor' }
|
'fc' : 'nag/nagfor' }
|
||||||
|
|
||||||
|
@ -29,28 +29,28 @@ class Pgi(Compiler):
|
|||||||
cc_names = ['pgcc']
|
cc_names = ['pgcc']
|
||||||
|
|
||||||
# Subclasses use possible names of C++ compiler
|
# Subclasses use possible names of C++ compiler
|
||||||
cxx_names = ['pgCC']
|
cxx_names = ['pgc++', 'pgCC']
|
||||||
|
|
||||||
# Subclasses use possible names of Fortran 77 compiler
|
# Subclasses use possible names of Fortran 77 compiler
|
||||||
f77_names = ['pgf77']
|
f77_names = ['pgfortran', 'pgf77']
|
||||||
|
|
||||||
# Subclasses use possible names of Fortran 90 compiler
|
# Subclasses use possible names of Fortran 90 compiler
|
||||||
fc_names = ['pgf95', 'pgf90']
|
fc_names = ['pgfortran', 'pgf95', 'pgf90']
|
||||||
|
|
||||||
# Named wrapper links within spack.build_env_path
|
# Named wrapper links within spack.build_env_path
|
||||||
link_paths = { 'cc' : 'pgi/pgcc',
|
link_paths = { 'cc' : 'pgi/pgcc',
|
||||||
'cxx' : 'pgi/case-insensitive/pgCC',
|
'cxx' : 'pgi/pgc++',
|
||||||
'f77' : 'pgi/pgf77',
|
'f77' : 'pgi/pgfortran',
|
||||||
'fc' : 'pgi/pgf90' }
|
'fc' : 'pgi/pgfortran' }
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def default_version(cls, comp):
|
def default_version(cls, comp):
|
||||||
"""The '-V' option works for all the PGI compilers.
|
"""The '-V' option works for all the PGI compilers.
|
||||||
Output looks like this::
|
Output looks like this::
|
||||||
|
|
||||||
pgf95 10.2-0 64-bit target on x86-64 Linux -tp nehalem-64
|
pgcc 15.10-0 64-bit target on x86-64 Linux -tp sandybridge
|
||||||
Copyright 1989-2000, The Portland Group, Inc. All Rights Reserved.
|
The Portland Group - PGI Compilers and Tools
|
||||||
Copyright 2000-2010, STMicroelectronics, Inc. All Rights Reserved.
|
Copyright (c) 2015, NVIDIA CORPORATION. All rights reserved.
|
||||||
"""
|
"""
|
||||||
return get_compiler_version(
|
return get_compiler_version(
|
||||||
comp, '-V', r'pg[^ ]* ([^ ]+) \d\d\d?-bit target')
|
comp, '-V', r'pg[^ ]* ([^ ]+) \d\d\d?-bit target')
|
||||||
|
@ -239,7 +239,7 @@
|
|||||||
def validate_section_name(section):
|
def validate_section_name(section):
|
||||||
"""Raise a ValueError if the section is not a valid section."""
|
"""Raise a ValueError if the section is not a valid section."""
|
||||||
if section not in section_schemas:
|
if section not in section_schemas:
|
||||||
raise ValueError("Invalid config section: '%s'. Options are %s."
|
raise ValueError("Invalid config section: '%s'. Options are %s"
|
||||||
% (section, section_schemas))
|
% (section, section_schemas))
|
||||||
|
|
||||||
|
|
||||||
@ -369,7 +369,7 @@ def validate_scope(scope):
|
|||||||
return config_scopes[scope]
|
return config_scopes[scope]
|
||||||
|
|
||||||
else:
|
else:
|
||||||
raise ValueError("Invalid config scope: '%s'. Must be one of %s."
|
raise ValueError("Invalid config scope: '%s'. Must be one of %s"
|
||||||
% (scope, config_scopes.keys()))
|
% (scope, config_scopes.keys()))
|
||||||
|
|
||||||
|
|
||||||
@ -384,7 +384,7 @@ def _read_config_file(filename, schema):
|
|||||||
"Invlaid configuration. %s exists but is not a file." % filename)
|
"Invlaid configuration. %s exists but is not a file." % filename)
|
||||||
|
|
||||||
elif not os.access(filename, os.R_OK):
|
elif not os.access(filename, os.R_OK):
|
||||||
raise ConfigFileError("Config file is not readable: %s." % filename)
|
raise ConfigFileError("Config file is not readable: %s" % filename)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
tty.debug("Reading config file %s" % filename)
|
tty.debug("Reading config file %s" % filename)
|
||||||
|
@ -330,7 +330,7 @@ def _check_ref_counts(self):
|
|||||||
found = rec.ref_count
|
found = rec.ref_count
|
||||||
if not expected == found:
|
if not expected == found:
|
||||||
raise AssertionError(
|
raise AssertionError(
|
||||||
"Invalid ref_count: %s: %d (expected %d), in DB %s."
|
"Invalid ref_count: %s: %d (expected %d), in DB %s"
|
||||||
% (key, found, expected, self._index_path))
|
% (key, found, expected, self._index_path))
|
||||||
|
|
||||||
|
|
||||||
|
@ -125,7 +125,7 @@ def __init__(self, dicts=None):
|
|||||||
dicts = (dicts,)
|
dicts = (dicts,)
|
||||||
elif type(dicts) not in (list, tuple):
|
elif type(dicts) not in (list, tuple):
|
||||||
raise TypeError(
|
raise TypeError(
|
||||||
"dicts arg must be list, tuple, or string. Found %s."
|
"dicts arg must be list, tuple, or string. Found %s"
|
||||||
% type(dicts))
|
% type(dicts))
|
||||||
|
|
||||||
self.dicts = dicts
|
self.dicts = dicts
|
||||||
@ -174,7 +174,11 @@ def version(pkg, ver, checksum=None, **kwargs):
|
|||||||
|
|
||||||
|
|
||||||
def _depends_on(pkg, spec, when=None):
|
def _depends_on(pkg, spec, when=None):
|
||||||
if when is None:
|
# If when is False do nothing
|
||||||
|
if when is False:
|
||||||
|
return
|
||||||
|
# If when is None or True make sure the condition is always satisfied
|
||||||
|
if when is None or when is True:
|
||||||
when = pkg.name
|
when = pkg.name
|
||||||
when_spec = parse_anonymous_spec(when, pkg.name)
|
when_spec = parse_anonymous_spec(when, pkg.name)
|
||||||
|
|
||||||
@ -296,8 +300,8 @@ def resource(pkg, **kwargs):
|
|||||||
raise RuntimeError(message)
|
raise RuntimeError(message)
|
||||||
when_spec = parse_anonymous_spec(when, pkg.name)
|
when_spec = parse_anonymous_spec(when, pkg.name)
|
||||||
resources = pkg.resources.setdefault(when_spec, [])
|
resources = pkg.resources.setdefault(when_spec, [])
|
||||||
fetcher = from_kwargs(**kwargs)
|
|
||||||
name = kwargs.get('name')
|
name = kwargs.get('name')
|
||||||
|
fetcher = from_kwargs(**kwargs)
|
||||||
resources.append(Resource(name, fetcher, destination, placement))
|
resources.append(Resource(name, fetcher, destination, placement))
|
||||||
|
|
||||||
|
|
||||||
@ -313,5 +317,5 @@ class CircularReferenceError(DirectiveError):
|
|||||||
def __init__(self, directive, package):
|
def __init__(self, directive, package):
|
||||||
super(CircularReferenceError, self).__init__(
|
super(CircularReferenceError, self).__init__(
|
||||||
directive,
|
directive,
|
||||||
"Package '%s' cannot pass itself to %s." % (package, directive))
|
"Package '%s' cannot pass itself to %s" % (package, directive))
|
||||||
self.package = package
|
self.package = package
|
||||||
|
@ -85,6 +85,16 @@ def create_install_directory(self, spec):
|
|||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
|
||||||
|
def check_installed(self, spec):
|
||||||
|
"""Checks whether a spec is installed.
|
||||||
|
|
||||||
|
Return the spec's prefix, if it is installed, None otherwise.
|
||||||
|
|
||||||
|
Raise an exception if the install is inconsistent or corrupt.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
|
||||||
def extension_map(self, spec):
|
def extension_map(self, spec):
|
||||||
"""Get a dict of currently installed extension packages for a spec.
|
"""Get a dict of currently installed extension packages for a spec.
|
||||||
|
|
||||||
@ -173,7 +183,9 @@ def __init__(self, root, **kwargs):
|
|||||||
|
|
||||||
self.spec_file_name = 'spec.yaml'
|
self.spec_file_name = 'spec.yaml'
|
||||||
self.extension_file_name = 'extensions.yaml'
|
self.extension_file_name = 'extensions.yaml'
|
||||||
self.build_log_name = 'build.out' # TODO: use config file.
|
self.build_log_name = 'build.out' # build log.
|
||||||
|
self.build_env_name = 'build.env' # build environment
|
||||||
|
self.packages_dir = 'repos' # archive of package.py files
|
||||||
|
|
||||||
# Cache of already written/read extension maps.
|
# Cache of already written/read extension maps.
|
||||||
self._extension_maps = {}
|
self._extension_maps = {}
|
||||||
@ -239,29 +251,49 @@ def build_log_path(self, spec):
|
|||||||
self.build_log_name)
|
self.build_log_name)
|
||||||
|
|
||||||
|
|
||||||
|
def build_env_path(self, spec):
|
||||||
|
return join_path(self.path_for_spec(spec), self.metadata_dir,
|
||||||
|
self.build_env_name)
|
||||||
|
|
||||||
|
|
||||||
|
def build_packages_path(self, spec):
|
||||||
|
return join_path(self.path_for_spec(spec), self.metadata_dir,
|
||||||
|
self.packages_dir)
|
||||||
|
|
||||||
|
|
||||||
def create_install_directory(self, spec):
|
def create_install_directory(self, spec):
|
||||||
_check_concrete(spec)
|
_check_concrete(spec)
|
||||||
|
|
||||||
|
prefix = self.check_installed(spec)
|
||||||
|
if prefix:
|
||||||
|
raise InstallDirectoryAlreadyExistsError(prefix)
|
||||||
|
|
||||||
|
mkdirp(self.metadata_path(spec))
|
||||||
|
self.write_spec(spec, self.spec_file_path(spec))
|
||||||
|
|
||||||
|
|
||||||
|
def check_installed(self, spec):
|
||||||
|
_check_concrete(spec)
|
||||||
path = self.path_for_spec(spec)
|
path = self.path_for_spec(spec)
|
||||||
spec_file_path = self.spec_file_path(spec)
|
spec_file_path = self.spec_file_path(spec)
|
||||||
|
|
||||||
if os.path.isdir(path):
|
if not os.path.isdir(path):
|
||||||
if not os.path.isfile(spec_file_path):
|
return None
|
||||||
raise InconsistentInstallDirectoryError(
|
|
||||||
'No spec file found at path %s' % spec_file_path)
|
|
||||||
|
|
||||||
installed_spec = self.read_spec(spec_file_path)
|
if not os.path.isfile(spec_file_path):
|
||||||
if installed_spec == self.spec:
|
raise InconsistentInstallDirectoryError(
|
||||||
raise InstallDirectoryAlreadyExistsError(path)
|
'Inconsistent state: install prefix exists but contains no spec.yaml:',
|
||||||
|
" " + path)
|
||||||
|
|
||||||
if spec.dag_hash() == installed_spec.dag_hash():
|
installed_spec = self.read_spec(spec_file_path)
|
||||||
raise SpecHashCollisionError(installed_hash, spec_hash)
|
if installed_spec == spec:
|
||||||
else:
|
return path
|
||||||
raise InconsistentInstallDirectoryError(
|
|
||||||
'Spec file in %s does not match hash!' % spec_file_path)
|
|
||||||
|
|
||||||
mkdirp(self.metadata_path(spec))
|
if spec.dag_hash() == installed_spec.dag_hash():
|
||||||
self.write_spec(spec, spec_file_path)
|
raise SpecHashCollisionError(installed_hash, spec_hash)
|
||||||
|
else:
|
||||||
|
raise InconsistentInstallDirectoryError(
|
||||||
|
'Spec file in %s does not match hash!' % spec_file_path)
|
||||||
|
|
||||||
|
|
||||||
def all_specs(self):
|
def all_specs(self):
|
||||||
@ -331,7 +363,7 @@ def _extension_map(self, spec):
|
|||||||
|
|
||||||
if not dag_hash in by_hash:
|
if not dag_hash in by_hash:
|
||||||
raise InvalidExtensionSpecError(
|
raise InvalidExtensionSpecError(
|
||||||
"Spec %s not found in %s." % (dag_hash, prefix))
|
"Spec %s not found in %s" % (dag_hash, prefix))
|
||||||
|
|
||||||
ext_spec = by_hash[dag_hash]
|
ext_spec = by_hash[dag_hash]
|
||||||
if not prefix == ext_spec.prefix:
|
if not prefix == ext_spec.prefix:
|
||||||
@ -395,8 +427,8 @@ def remove_extension(self, spec, ext_spec):
|
|||||||
|
|
||||||
class DirectoryLayoutError(SpackError):
|
class DirectoryLayoutError(SpackError):
|
||||||
"""Superclass for directory layout errors."""
|
"""Superclass for directory layout errors."""
|
||||||
def __init__(self, message):
|
def __init__(self, message, long_msg=None):
|
||||||
super(DirectoryLayoutError, self).__init__(message)
|
super(DirectoryLayoutError, self).__init__(message, long_msg)
|
||||||
|
|
||||||
|
|
||||||
class SpecHashCollisionError(DirectoryLayoutError):
|
class SpecHashCollisionError(DirectoryLayoutError):
|
||||||
@ -418,8 +450,8 @@ def __init__(self, installed_spec, prefix, error):
|
|||||||
|
|
||||||
class InconsistentInstallDirectoryError(DirectoryLayoutError):
|
class InconsistentInstallDirectoryError(DirectoryLayoutError):
|
||||||
"""Raised when a package seems to be installed to the wrong place."""
|
"""Raised when a package seems to be installed to the wrong place."""
|
||||||
def __init__(self, message):
|
def __init__(self, message, long_msg=None):
|
||||||
super(InconsistentInstallDirectoryError, self).__init__(message)
|
super(InconsistentInstallDirectoryError, self).__init__(message, long_msg)
|
||||||
|
|
||||||
|
|
||||||
class InstallDirectoryAlreadyExistsError(DirectoryLayoutError):
|
class InstallDirectoryAlreadyExistsError(DirectoryLayoutError):
|
||||||
@ -446,7 +478,7 @@ class ExtensionConflictError(DirectoryLayoutError):
|
|||||||
"""Raised when an extension is added to a package that already has it."""
|
"""Raised when an extension is added to a package that already has it."""
|
||||||
def __init__(self, spec, ext_spec, conflict):
|
def __init__(self, spec, ext_spec, conflict):
|
||||||
super(ExtensionConflictError, self).__init__(
|
super(ExtensionConflictError, self).__init__(
|
||||||
"%s cannot be installed in %s because it conflicts with %s."% (
|
"%s cannot be installed in %s because it conflicts with %s"% (
|
||||||
ext_spec.short_spec, spec.short_spec, conflict.short_spec))
|
ext_spec.short_spec, spec.short_spec, conflict.short_spec))
|
||||||
|
|
||||||
|
|
||||||
|
@ -44,6 +44,7 @@
|
|||||||
import sys
|
import sys
|
||||||
import re
|
import re
|
||||||
import shutil
|
import shutil
|
||||||
|
import copy
|
||||||
from functools import wraps
|
from functools import wraps
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.util.filesystem import *
|
from llnl.util.filesystem import *
|
||||||
@ -55,23 +56,28 @@
|
|||||||
from spack.version import Version, ver
|
from spack.version import Version, ver
|
||||||
from spack.util.compression import decompressor_for, extension
|
from spack.util.compression import decompressor_for, extension
|
||||||
|
|
||||||
|
import spack.util.pattern as pattern
|
||||||
|
|
||||||
"""List of all fetch strategies, created by FetchStrategy metaclass."""
|
"""List of all fetch strategies, created by FetchStrategy metaclass."""
|
||||||
all_strategies = []
|
all_strategies = []
|
||||||
|
|
||||||
|
|
||||||
def _needs_stage(fun):
|
def _needs_stage(fun):
|
||||||
"""Many methods on fetch strategies require a stage to be set
|
"""Many methods on fetch strategies require a stage to be set
|
||||||
using set_stage(). This decorator adds a check for self.stage."""
|
using set_stage(). This decorator adds a check for self.stage."""
|
||||||
|
|
||||||
@wraps(fun)
|
@wraps(fun)
|
||||||
def wrapper(self, *args, **kwargs):
|
def wrapper(self, *args, **kwargs):
|
||||||
if not self.stage:
|
if not self.stage:
|
||||||
raise NoStageError(fun)
|
raise NoStageError(fun)
|
||||||
return fun(self, *args, **kwargs)
|
return fun(self, *args, **kwargs)
|
||||||
|
|
||||||
return wrapper
|
return wrapper
|
||||||
|
|
||||||
|
|
||||||
class FetchStrategy(object):
|
class FetchStrategy(object):
|
||||||
"""Superclass of all fetch strategies."""
|
"""Superclass of all fetch strategies."""
|
||||||
enabled = False # Non-abstract subclasses should be enabled.
|
enabled = False # Non-abstract subclasses should be enabled.
|
||||||
required_attributes = None # Attributes required in version() args.
|
required_attributes = None # Attributes required in version() args.
|
||||||
|
|
||||||
class __metaclass__(type):
|
class __metaclass__(type):
|
||||||
@ -80,28 +86,28 @@ def __init__(cls, name, bases, dict):
|
|||||||
type.__init__(cls, name, bases, dict)
|
type.__init__(cls, name, bases, dict)
|
||||||
if cls.enabled: all_strategies.append(cls)
|
if cls.enabled: all_strategies.append(cls)
|
||||||
|
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
# The stage is initialized late, so that fetch strategies can be constructed
|
# The stage is initialized late, so that fetch strategies can be constructed
|
||||||
# at package construction time. This is where things will be fetched.
|
# at package construction time. This is where things will be fetched.
|
||||||
self.stage = None
|
self.stage = None
|
||||||
|
|
||||||
|
|
||||||
def set_stage(self, stage):
|
def set_stage(self, stage):
|
||||||
"""This is called by Stage before any of the fetching
|
"""This is called by Stage before any of the fetching
|
||||||
methods are called on the stage."""
|
methods are called on the stage."""
|
||||||
self.stage = stage
|
self.stage = stage
|
||||||
|
|
||||||
|
|
||||||
# Subclasses need to implement these methods
|
# Subclasses need to implement these methods
|
||||||
def fetch(self): pass # Return True on success, False on fail.
|
def fetch(self): pass # Return True on success, False on fail.
|
||||||
|
|
||||||
def check(self): pass # Do checksum.
|
def check(self): pass # Do checksum.
|
||||||
|
|
||||||
def expand(self): pass # Expand archive.
|
def expand(self): pass # Expand archive.
|
||||||
|
|
||||||
def reset(self): pass # Revert to freshly downloaded state.
|
def reset(self): pass # Revert to freshly downloaded state.
|
||||||
|
|
||||||
def archive(self, destination): pass # Used to create tarball for mirror.
|
def archive(self, destination): pass # Used to create tarball for mirror.
|
||||||
|
|
||||||
def __str__(self): # Should be human readable URL.
|
def __str__(self): # Should be human readable URL.
|
||||||
return "FetchStrategy.__str___"
|
return "FetchStrategy.__str___"
|
||||||
|
|
||||||
# This method is used to match fetch strategies to version()
|
# This method is used to match fetch strategies to version()
|
||||||
@ -111,6 +117,15 @@ def matches(cls, args):
|
|||||||
return any(k in args for k in cls.required_attributes)
|
return any(k in args for k in cls.required_attributes)
|
||||||
|
|
||||||
|
|
||||||
|
@pattern.composite(interface=FetchStrategy)
|
||||||
|
class FetchStrategyComposite(object):
|
||||||
|
"""
|
||||||
|
Composite for a FetchStrategy object. Implements the GoF composite pattern.
|
||||||
|
"""
|
||||||
|
matches = FetchStrategy.matches
|
||||||
|
set_stage = FetchStrategy.set_stage
|
||||||
|
|
||||||
|
|
||||||
class URLFetchStrategy(FetchStrategy):
|
class URLFetchStrategy(FetchStrategy):
|
||||||
"""FetchStrategy that pulls source code from a URL for an archive,
|
"""FetchStrategy that pulls source code from a URL for an archive,
|
||||||
checks the archive against a checksum,and decompresses the archive.
|
checks the archive against a checksum,and decompresses the archive.
|
||||||
@ -129,6 +144,8 @@ def __init__(self, url=None, digest=None, **kwargs):
|
|||||||
self.digest = kwargs.get('md5', None)
|
self.digest = kwargs.get('md5', None)
|
||||||
if not self.digest: self.digest = digest
|
if not self.digest: self.digest = digest
|
||||||
|
|
||||||
|
self.expand_archive = kwargs.get('expand', True)
|
||||||
|
|
||||||
if not self.url:
|
if not self.url:
|
||||||
raise ValueError("URLFetchStrategy requires a url for fetching.")
|
raise ValueError("URLFetchStrategy requires a url for fetching.")
|
||||||
|
|
||||||
@ -137,20 +154,20 @@ def fetch(self):
|
|||||||
self.stage.chdir()
|
self.stage.chdir()
|
||||||
|
|
||||||
if self.archive_file:
|
if self.archive_file:
|
||||||
tty.msg("Already downloaded %s." % self.archive_file)
|
tty.msg("Already downloaded %s" % self.archive_file)
|
||||||
return
|
return
|
||||||
|
|
||||||
tty.msg("Trying to fetch from %s" % self.url)
|
tty.msg("Trying to fetch from %s" % self.url)
|
||||||
|
|
||||||
curl_args = ['-O', # save file to disk
|
curl_args = ['-O', # save file to disk
|
||||||
'-f', # fail on >400 errors
|
'-f', # fail on >400 errors
|
||||||
'-D', '-', # print out HTML headers
|
'-D', '-', # print out HTML headers
|
||||||
'-L', self.url,]
|
'-L', self.url, ]
|
||||||
|
|
||||||
if sys.stdout.isatty():
|
if sys.stdout.isatty():
|
||||||
curl_args.append('-#') # status bar when using a tty
|
curl_args.append('-#') # status bar when using a tty
|
||||||
else:
|
else:
|
||||||
curl_args.append('-sS') # just errors when not.
|
curl_args.append('-sS') # just errors when not.
|
||||||
|
|
||||||
# Run curl but grab the mime type from the http headers
|
# Run curl but grab the mime type from the http headers
|
||||||
headers = spack.curl(
|
headers = spack.curl(
|
||||||
@ -164,24 +181,23 @@ def fetch(self):
|
|||||||
if spack.curl.returncode == 22:
|
if spack.curl.returncode == 22:
|
||||||
# This is a 404. Curl will print the error.
|
# This is a 404. Curl will print the error.
|
||||||
raise FailedDownloadError(
|
raise FailedDownloadError(
|
||||||
self.url, "URL %s was not found!" % self.url)
|
self.url, "URL %s was not found!" % self.url)
|
||||||
|
|
||||||
elif spack.curl.returncode == 60:
|
elif spack.curl.returncode == 60:
|
||||||
# This is a certificate error. Suggest spack -k
|
# This is a certificate error. Suggest spack -k
|
||||||
raise FailedDownloadError(
|
raise FailedDownloadError(
|
||||||
self.url,
|
self.url,
|
||||||
"Curl was unable to fetch due to invalid certificate. "
|
"Curl was unable to fetch due to invalid certificate. "
|
||||||
"This is either an attack, or your cluster's SSL configuration "
|
"This is either an attack, or your cluster's SSL configuration "
|
||||||
"is bad. If you believe your SSL configuration is bad, you "
|
"is bad. If you believe your SSL configuration is bad, you "
|
||||||
"can try running spack -k, which will not check SSL certificates."
|
"can try running spack -k, which will not check SSL certificates."
|
||||||
"Use this at your own risk.")
|
"Use this at your own risk.")
|
||||||
|
|
||||||
else:
|
else:
|
||||||
# This is some other curl error. Curl will print the
|
# This is some other curl error. Curl will print the
|
||||||
# error, but print a spack message too
|
# error, but print a spack message too
|
||||||
raise FailedDownloadError(
|
raise FailedDownloadError(
|
||||||
self.url, "Curl failed with error %d" % spack.curl.returncode)
|
self.url, "Curl failed with error %d" % spack.curl.returncode)
|
||||||
|
|
||||||
|
|
||||||
# Check if we somehow got an HTML file rather than the archive we
|
# Check if we somehow got an HTML file rather than the archive we
|
||||||
# asked for. We only look at the last content type, to handle
|
# asked for. We only look at the last content type, to handle
|
||||||
@ -196,7 +212,6 @@ def fetch(self):
|
|||||||
if not self.archive_file:
|
if not self.archive_file:
|
||||||
raise FailedDownloadError(self.url)
|
raise FailedDownloadError(self.url)
|
||||||
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def archive_file(self):
|
def archive_file(self):
|
||||||
"""Path to the source archive within this stage directory."""
|
"""Path to the source archive within this stage directory."""
|
||||||
@ -204,12 +219,16 @@ def archive_file(self):
|
|||||||
|
|
||||||
@_needs_stage
|
@_needs_stage
|
||||||
def expand(self):
|
def expand(self):
|
||||||
|
if not self.expand_archive:
|
||||||
|
tty.msg("Skipping expand step for %s" % self.archive_file)
|
||||||
|
return
|
||||||
|
|
||||||
tty.msg("Staging archive: %s" % self.archive_file)
|
tty.msg("Staging archive: %s" % self.archive_file)
|
||||||
|
|
||||||
self.stage.chdir()
|
self.stage.chdir()
|
||||||
if not self.archive_file:
|
if not self.archive_file:
|
||||||
raise NoArchiveFileError("URLFetchStrategy couldn't find archive file",
|
raise NoArchiveFileError("URLFetchStrategy couldn't find archive file",
|
||||||
"Failed on expand() for URL %s" % self.url)
|
"Failed on expand() for URL %s" % self.url)
|
||||||
|
|
||||||
decompress = decompressor_for(self.archive_file)
|
decompress = decompressor_for(self.archive_file)
|
||||||
|
|
||||||
@ -241,7 +260,6 @@ def expand(self):
|
|||||||
# Set the wd back to the stage when done.
|
# Set the wd back to the stage when done.
|
||||||
self.stage.chdir()
|
self.stage.chdir()
|
||||||
|
|
||||||
|
|
||||||
def archive(self, destination):
|
def archive(self, destination):
|
||||||
"""Just moves this archive to the destination."""
|
"""Just moves this archive to the destination."""
|
||||||
if not self.archive_file:
|
if not self.archive_file:
|
||||||
@ -252,7 +270,6 @@ def archive(self, destination):
|
|||||||
|
|
||||||
shutil.move(self.archive_file, destination)
|
shutil.move(self.archive_file, destination)
|
||||||
|
|
||||||
|
|
||||||
@_needs_stage
|
@_needs_stage
|
||||||
def check(self):
|
def check(self):
|
||||||
"""Check the downloaded archive against a checksum digest.
|
"""Check the downloaded archive against a checksum digest.
|
||||||
@ -263,9 +280,8 @@ def check(self):
|
|||||||
checker = crypto.Checker(self.digest)
|
checker = crypto.Checker(self.digest)
|
||||||
if not checker.check(self.archive_file):
|
if not checker.check(self.archive_file):
|
||||||
raise ChecksumError(
|
raise ChecksumError(
|
||||||
"%s checksum failed for %s." % (checker.hash_name, self.archive_file),
|
"%s checksum failed for %s" % (checker.hash_name, self.archive_file),
|
||||||
"Expected %s but got %s." % (self.digest, checker.sum))
|
"Expected %s but got %s" % (self.digest, checker.sum))
|
||||||
|
|
||||||
|
|
||||||
@_needs_stage
|
@_needs_stage
|
||||||
def reset(self):
|
def reset(self):
|
||||||
@ -277,12 +293,10 @@ def reset(self):
|
|||||||
shutil.rmtree(self.stage.source_path, ignore_errors=True)
|
shutil.rmtree(self.stage.source_path, ignore_errors=True)
|
||||||
self.expand()
|
self.expand()
|
||||||
|
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
url = self.url if self.url else "no url"
|
url = self.url if self.url else "no url"
|
||||||
return "URLFetchStrategy<%s>" % url
|
return "URLFetchStrategy<%s>" % url
|
||||||
|
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
if self.url:
|
if self.url:
|
||||||
return self.url
|
return self.url
|
||||||
@ -298,33 +312,30 @@ def __init__(self, name, *rev_types, **kwargs):
|
|||||||
# Set a URL based on the type of fetch strategy.
|
# Set a URL based on the type of fetch strategy.
|
||||||
self.url = kwargs.get(name, None)
|
self.url = kwargs.get(name, None)
|
||||||
if not self.url: raise ValueError(
|
if not self.url: raise ValueError(
|
||||||
"%s requires %s argument." % (self.__class__, name))
|
"%s requires %s argument." % (self.__class__, name))
|
||||||
|
|
||||||
# Ensure that there's only one of the rev_types
|
# Ensure that there's only one of the rev_types
|
||||||
if sum(k in kwargs for k in rev_types) > 1:
|
if sum(k in kwargs for k in rev_types) > 1:
|
||||||
raise FetchStrategyError(
|
raise FetchStrategyError(
|
||||||
"Supply only one of %s to fetch with %s." % (
|
"Supply only one of %s to fetch with %s" % (
|
||||||
comma_or(rev_types), name))
|
comma_or(rev_types), name))
|
||||||
|
|
||||||
# Set attributes for each rev type.
|
# Set attributes for each rev type.
|
||||||
for rt in rev_types:
|
for rt in rev_types:
|
||||||
setattr(self, rt, kwargs.get(rt, None))
|
setattr(self, rt, kwargs.get(rt, None))
|
||||||
|
|
||||||
|
|
||||||
@_needs_stage
|
@_needs_stage
|
||||||
def check(self):
|
def check(self):
|
||||||
tty.msg("No checksum needed when fetching with %s." % self.name)
|
tty.msg("No checksum needed when fetching with %s" % self.name)
|
||||||
|
|
||||||
|
|
||||||
@_needs_stage
|
@_needs_stage
|
||||||
def expand(self):
|
def expand(self):
|
||||||
tty.debug("Source fetched with %s is already expanded." % self.name)
|
tty.debug("Source fetched with %s is already expanded." % self.name)
|
||||||
|
|
||||||
|
|
||||||
@_needs_stage
|
@_needs_stage
|
||||||
def archive(self, destination, **kwargs):
|
def archive(self, destination, **kwargs):
|
||||||
assert(extension(destination) == 'tar.gz')
|
assert (extension(destination) == 'tar.gz')
|
||||||
assert(self.stage.source_path.startswith(self.stage.path))
|
assert (self.stage.source_path.startswith(self.stage.path))
|
||||||
|
|
||||||
tar = which('tar', required=True)
|
tar = which('tar', required=True)
|
||||||
|
|
||||||
@ -338,16 +349,13 @@ def archive(self, destination, **kwargs):
|
|||||||
self.stage.chdir()
|
self.stage.chdir()
|
||||||
tar('-czf', destination, os.path.basename(self.stage.source_path))
|
tar('-czf', destination, os.path.basename(self.stage.source_path))
|
||||||
|
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return "VCS: %s" % self.url
|
return "VCS: %s" % self.url
|
||||||
|
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return "%s<%s>" % (self.__class__, self.url)
|
return "%s<%s>" % (self.__class__, self.url)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class GitFetchStrategy(VCSFetchStrategy):
|
class GitFetchStrategy(VCSFetchStrategy):
|
||||||
"""Fetch strategy that gets source code from a git repository.
|
"""Fetch strategy that gets source code from a git repository.
|
||||||
Use like this in a package:
|
Use like this in a package:
|
||||||
@ -368,30 +376,31 @@ class GitFetchStrategy(VCSFetchStrategy):
|
|||||||
required_attributes = ('git',)
|
required_attributes = ('git',)
|
||||||
|
|
||||||
def __init__(self, **kwargs):
|
def __init__(self, **kwargs):
|
||||||
super(GitFetchStrategy, self).__init__(
|
# Discards the keywords in kwargs that may conflict with the next call to __init__
|
||||||
'git', 'tag', 'branch', 'commit', **kwargs)
|
forwarded_args = copy.copy(kwargs)
|
||||||
self._git = None
|
forwarded_args.pop('name', None)
|
||||||
|
|
||||||
|
super(GitFetchStrategy, self).__init__(
|
||||||
|
'git', 'tag', 'branch', 'commit', **forwarded_args)
|
||||||
|
self._git = None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def git_version(self):
|
def git_version(self):
|
||||||
vstring = self.git('--version', output=str).lstrip('git version ')
|
vstring = self.git('--version', output=str).lstrip('git version ')
|
||||||
return Version(vstring)
|
return Version(vstring)
|
||||||
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def git(self):
|
def git(self):
|
||||||
if not self._git:
|
if not self._git:
|
||||||
self._git = which('git', required=True)
|
self._git = which('git', required=True)
|
||||||
return self._git
|
return self._git
|
||||||
|
|
||||||
|
|
||||||
@_needs_stage
|
@_needs_stage
|
||||||
def fetch(self):
|
def fetch(self):
|
||||||
self.stage.chdir()
|
self.stage.chdir()
|
||||||
|
|
||||||
if self.stage.source_path:
|
if self.stage.source_path:
|
||||||
tty.msg("Already fetched %s." % self.stage.source_path)
|
tty.msg("Already fetched %s" % self.stage.source_path)
|
||||||
return
|
return
|
||||||
|
|
||||||
args = []
|
args = []
|
||||||
@ -418,7 +427,7 @@ def fetch(self):
|
|||||||
if self.branch:
|
if self.branch:
|
||||||
args.extend(['--branch', self.branch])
|
args.extend(['--branch', self.branch])
|
||||||
elif self.tag and self.git_version >= ver('1.8.5.2'):
|
elif self.tag and self.git_version >= ver('1.8.5.2'):
|
||||||
args.extend(['--branch', self.tag])
|
args.extend(['--branch', self.tag])
|
||||||
|
|
||||||
# Try to be efficient if we're using a new enough git.
|
# Try to be efficient if we're using a new enough git.
|
||||||
# This checks out only one branch's history
|
# This checks out only one branch's history
|
||||||
@ -429,7 +438,7 @@ def fetch(self):
|
|||||||
# Yet more efficiency, only download a 1-commit deep tree
|
# Yet more efficiency, only download a 1-commit deep tree
|
||||||
if self.git_version >= ver('1.7.1'):
|
if self.git_version >= ver('1.7.1'):
|
||||||
try:
|
try:
|
||||||
self.git(*(args + ['--depth','1', self.url]))
|
self.git(*(args + ['--depth', '1', self.url]))
|
||||||
cloned = True
|
cloned = True
|
||||||
except spack.error.SpackError:
|
except spack.error.SpackError:
|
||||||
# This will fail with the dumb HTTP transport
|
# This will fail with the dumb HTTP transport
|
||||||
@ -452,18 +461,15 @@ def fetch(self):
|
|||||||
self.git('pull', '--tags', ignore_errors=1)
|
self.git('pull', '--tags', ignore_errors=1)
|
||||||
self.git('checkout', self.tag)
|
self.git('checkout', self.tag)
|
||||||
|
|
||||||
|
|
||||||
def archive(self, destination):
|
def archive(self, destination):
|
||||||
super(GitFetchStrategy, self).archive(destination, exclude='.git')
|
super(GitFetchStrategy, self).archive(destination, exclude='.git')
|
||||||
|
|
||||||
|
|
||||||
@_needs_stage
|
@_needs_stage
|
||||||
def reset(self):
|
def reset(self):
|
||||||
self.stage.chdir_to_source()
|
self.stage.chdir_to_source()
|
||||||
self.git('checkout', '.')
|
self.git('checkout', '.')
|
||||||
self.git('clean', '-f')
|
self.git('clean', '-f')
|
||||||
|
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return "[git] %s" % self.url
|
return "[git] %s" % self.url
|
||||||
|
|
||||||
@ -483,26 +489,28 @@ class SvnFetchStrategy(VCSFetchStrategy):
|
|||||||
required_attributes = ['svn']
|
required_attributes = ['svn']
|
||||||
|
|
||||||
def __init__(self, **kwargs):
|
def __init__(self, **kwargs):
|
||||||
|
# Discards the keywords in kwargs that may conflict with the next call to __init__
|
||||||
|
forwarded_args = copy.copy(kwargs)
|
||||||
|
forwarded_args.pop('name', None)
|
||||||
|
|
||||||
super(SvnFetchStrategy, self).__init__(
|
super(SvnFetchStrategy, self).__init__(
|
||||||
'svn', 'revision', **kwargs)
|
'svn', 'revision', **forwarded_args)
|
||||||
self._svn = None
|
self._svn = None
|
||||||
if self.revision is not None:
|
if self.revision is not None:
|
||||||
self.revision = str(self.revision)
|
self.revision = str(self.revision)
|
||||||
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def svn(self):
|
def svn(self):
|
||||||
if not self._svn:
|
if not self._svn:
|
||||||
self._svn = which('svn', required=True)
|
self._svn = which('svn', required=True)
|
||||||
return self._svn
|
return self._svn
|
||||||
|
|
||||||
|
|
||||||
@_needs_stage
|
@_needs_stage
|
||||||
def fetch(self):
|
def fetch(self):
|
||||||
self.stage.chdir()
|
self.stage.chdir()
|
||||||
|
|
||||||
if self.stage.source_path:
|
if self.stage.source_path:
|
||||||
tty.msg("Already fetched %s." % self.stage.source_path)
|
tty.msg("Already fetched %s" % self.stage.source_path)
|
||||||
return
|
return
|
||||||
|
|
||||||
tty.msg("Trying to check out svn repository: %s" % self.url)
|
tty.msg("Trying to check out svn repository: %s" % self.url)
|
||||||
@ -515,7 +523,6 @@ def fetch(self):
|
|||||||
self.svn(*args)
|
self.svn(*args)
|
||||||
self.stage.chdir_to_source()
|
self.stage.chdir_to_source()
|
||||||
|
|
||||||
|
|
||||||
def _remove_untracked_files(self):
|
def _remove_untracked_files(self):
|
||||||
"""Removes untracked files in an svn repository."""
|
"""Removes untracked files in an svn repository."""
|
||||||
status = self.svn('status', '--no-ignore', output=str)
|
status = self.svn('status', '--no-ignore', output=str)
|
||||||
@ -529,23 +536,19 @@ def _remove_untracked_files(self):
|
|||||||
elif os.path.isdir(path):
|
elif os.path.isdir(path):
|
||||||
shutil.rmtree(path, ignore_errors=True)
|
shutil.rmtree(path, ignore_errors=True)
|
||||||
|
|
||||||
|
|
||||||
def archive(self, destination):
|
def archive(self, destination):
|
||||||
super(SvnFetchStrategy, self).archive(destination, exclude='.svn')
|
super(SvnFetchStrategy, self).archive(destination, exclude='.svn')
|
||||||
|
|
||||||
|
|
||||||
@_needs_stage
|
@_needs_stage
|
||||||
def reset(self):
|
def reset(self):
|
||||||
self.stage.chdir_to_source()
|
self.stage.chdir_to_source()
|
||||||
self._remove_untracked_files()
|
self._remove_untracked_files()
|
||||||
self.svn('revert', '.', '-R')
|
self.svn('revert', '.', '-R')
|
||||||
|
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return "[svn] %s" % self.url
|
return "[svn] %s" % self.url
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class HgFetchStrategy(VCSFetchStrategy):
|
class HgFetchStrategy(VCSFetchStrategy):
|
||||||
"""Fetch strategy that gets source code from a Mercurial repository.
|
"""Fetch strategy that gets source code from a Mercurial repository.
|
||||||
Use like this in a package:
|
Use like this in a package:
|
||||||
@ -567,10 +570,13 @@ class HgFetchStrategy(VCSFetchStrategy):
|
|||||||
required_attributes = ['hg']
|
required_attributes = ['hg']
|
||||||
|
|
||||||
def __init__(self, **kwargs):
|
def __init__(self, **kwargs):
|
||||||
super(HgFetchStrategy, self).__init__(
|
# Discards the keywords in kwargs that may conflict with the next call to __init__
|
||||||
'hg', 'revision', **kwargs)
|
forwarded_args = copy.copy(kwargs)
|
||||||
self._hg = None
|
forwarded_args.pop('name', None)
|
||||||
|
|
||||||
|
super(HgFetchStrategy, self).__init__(
|
||||||
|
'hg', 'revision', **forwarded_args)
|
||||||
|
self._hg = None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def hg(self):
|
def hg(self):
|
||||||
@ -583,7 +589,7 @@ def fetch(self):
|
|||||||
self.stage.chdir()
|
self.stage.chdir()
|
||||||
|
|
||||||
if self.stage.source_path:
|
if self.stage.source_path:
|
||||||
tty.msg("Already fetched %s." % self.stage.source_path)
|
tty.msg("Already fetched %s" % self.stage.source_path)
|
||||||
return
|
return
|
||||||
|
|
||||||
args = []
|
args = []
|
||||||
@ -597,11 +603,9 @@ def fetch(self):
|
|||||||
|
|
||||||
self.hg(*args)
|
self.hg(*args)
|
||||||
|
|
||||||
|
|
||||||
def archive(self, destination):
|
def archive(self, destination):
|
||||||
super(HgFetchStrategy, self).archive(destination, exclude='.hg')
|
super(HgFetchStrategy, self).archive(destination, exclude='.hg')
|
||||||
|
|
||||||
|
|
||||||
@_needs_stage
|
@_needs_stage
|
||||||
def reset(self):
|
def reset(self):
|
||||||
self.stage.chdir()
|
self.stage.chdir()
|
||||||
@ -619,7 +623,6 @@ def reset(self):
|
|||||||
shutil.move(scrubbed, source_path)
|
shutil.move(scrubbed, source_path)
|
||||||
self.stage.chdir_to_source()
|
self.stage.chdir_to_source()
|
||||||
|
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return "[hg] %s" % self.url
|
return "[hg] %s" % self.url
|
||||||
|
|
||||||
@ -693,9 +696,10 @@ def __init__(self, msg, long_msg=None):
|
|||||||
|
|
||||||
class FailedDownloadError(FetchError):
|
class FailedDownloadError(FetchError):
|
||||||
"""Raised wen a download fails."""
|
"""Raised wen a download fails."""
|
||||||
|
|
||||||
def __init__(self, url, msg=""):
|
def __init__(self, url, msg=""):
|
||||||
super(FailedDownloadError, self).__init__(
|
super(FailedDownloadError, self).__init__(
|
||||||
"Failed to fetch file from URL: %s" % url, msg)
|
"Failed to fetch file from URL: %s" % url, msg)
|
||||||
self.url = url
|
self.url = url
|
||||||
|
|
||||||
|
|
||||||
@ -718,12 +722,14 @@ def __init__(self, pkg, version):
|
|||||||
|
|
||||||
class ChecksumError(FetchError):
|
class ChecksumError(FetchError):
|
||||||
"""Raised when archive fails to checksum."""
|
"""Raised when archive fails to checksum."""
|
||||||
|
|
||||||
def __init__(self, message, long_msg=None):
|
def __init__(self, message, long_msg=None):
|
||||||
super(ChecksumError, self).__init__(message, long_msg)
|
super(ChecksumError, self).__init__(message, long_msg)
|
||||||
|
|
||||||
|
|
||||||
class NoStageError(FetchError):
|
class NoStageError(FetchError):
|
||||||
"""Raised when fetch operations are called before set_stage()."""
|
"""Raised when fetch operations are called before set_stage()."""
|
||||||
|
|
||||||
def __init__(self, method):
|
def __init__(self, method):
|
||||||
super(NoStageError, self).__init__(
|
super(NoStageError, self).__init__(
|
||||||
"Must call FetchStrategy.set_stage() before calling %s" % method.__name__)
|
"Must call FetchStrategy.set_stage() before calling %s" % method.__name__)
|
||||||
|
77
lib/spack/spack/hooks/sbang.py
Normal file
77
lib/spack/spack/hooks/sbang.py
Normal file
@ -0,0 +1,77 @@
|
|||||||
|
##############################################################################
|
||||||
|
# Copyright (c) 2013-2015, Lawrence Livermore National Security, LLC.
|
||||||
|
# Produced at the Lawrence Livermore National Laboratory.
|
||||||
|
#
|
||||||
|
# This file is part of Spack.
|
||||||
|
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
|
||||||
|
# LLNL-CODE-647188
|
||||||
|
#
|
||||||
|
# For details, see https://github.com/llnl/spack
|
||||||
|
# Please also see the LICENSE file for our notice and the LGPL.
|
||||||
|
#
|
||||||
|
# This program is free software; you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License (as published by
|
||||||
|
# the Free Software Foundation) version 2.1 dated February 1999.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful, but
|
||||||
|
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
|
||||||
|
# conditions of the GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU Lesser General Public License
|
||||||
|
# along with this program; if not, write to the Free Software Foundation,
|
||||||
|
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||||
|
##############################################################################
|
||||||
|
import os
|
||||||
|
|
||||||
|
from llnl.util.filesystem import *
|
||||||
|
import llnl.util.tty as tty
|
||||||
|
|
||||||
|
import spack
|
||||||
|
import spack.modules
|
||||||
|
|
||||||
|
# Character limit for shebang line. Using Linux's 127 characters
|
||||||
|
# here, as it is the shortest I could find on a modern OS.
|
||||||
|
shebang_limit = 127
|
||||||
|
|
||||||
|
def shebang_too_long(path):
|
||||||
|
"""Detects whether an file has a shebang line that is too long."""
|
||||||
|
with open(path, 'r') as script:
|
||||||
|
bytes = script.read(2)
|
||||||
|
if bytes != '#!':
|
||||||
|
return False
|
||||||
|
|
||||||
|
line = bytes + script.readline()
|
||||||
|
return len(line) > shebang_limit
|
||||||
|
|
||||||
|
|
||||||
|
def filter_shebang(path):
|
||||||
|
"""Adds a second shebang line, using sbang, at the beginning of a file."""
|
||||||
|
backup = path + ".shebang.bak"
|
||||||
|
os.rename(path, backup)
|
||||||
|
|
||||||
|
with open(backup, 'r') as bak_file:
|
||||||
|
original = bak_file.read()
|
||||||
|
|
||||||
|
with open(path, 'w') as new_file:
|
||||||
|
new_file.write('#!/bin/bash %s/bin/sbang\n' % spack.spack_root)
|
||||||
|
new_file.write(original)
|
||||||
|
|
||||||
|
copy_mode(backup, path)
|
||||||
|
unset_executable_mode(backup)
|
||||||
|
|
||||||
|
tty.warn("Patched overly long shebang in %s" % path)
|
||||||
|
|
||||||
|
|
||||||
|
def post_install(pkg):
|
||||||
|
"""This hook edits scripts so that they call /bin/bash
|
||||||
|
$spack_prefix/bin/sbang instead of something longer than the
|
||||||
|
shebang limit."""
|
||||||
|
if not os.path.isdir(pkg.prefix.bin):
|
||||||
|
return
|
||||||
|
|
||||||
|
for file in os.listdir(pkg.prefix.bin):
|
||||||
|
path = os.path.join(pkg.prefix.bin, file)
|
||||||
|
if shebang_too_long(path):
|
||||||
|
filter_shebang(path)
|
||||||
|
|
@ -45,25 +45,31 @@
|
|||||||
from spack.util.compression import extension, allowed_archive
|
from spack.util.compression import extension, allowed_archive
|
||||||
|
|
||||||
|
|
||||||
def mirror_archive_filename(spec):
|
def mirror_archive_filename(spec, fetcher):
|
||||||
"""Get the name of the spec's archive in the mirror."""
|
"""Get the name of the spec's archive in the mirror."""
|
||||||
if not spec.version.concrete:
|
if not spec.version.concrete:
|
||||||
raise ValueError("mirror.path requires spec with concrete version.")
|
raise ValueError("mirror.path requires spec with concrete version.")
|
||||||
|
|
||||||
fetcher = spec.package.fetcher
|
|
||||||
if isinstance(fetcher, fs.URLFetchStrategy):
|
if isinstance(fetcher, fs.URLFetchStrategy):
|
||||||
# If we fetch this version with a URLFetchStrategy, use URL's archive type
|
if fetcher.expand_archive:
|
||||||
ext = url.downloaded_file_extension(fetcher.url)
|
# If we fetch this version with a URLFetchStrategy, use URL's archive type
|
||||||
|
ext = url.downloaded_file_extension(fetcher.url)
|
||||||
|
else:
|
||||||
|
# If the archive shouldn't be expanded, don't check for its extension.
|
||||||
|
ext = None
|
||||||
else:
|
else:
|
||||||
# Otherwise we'll make a .tar.gz ourselves
|
# Otherwise we'll make a .tar.gz ourselves
|
||||||
ext = 'tar.gz'
|
ext = 'tar.gz'
|
||||||
|
|
||||||
return "%s-%s.%s" % (spec.package.name, spec.version, ext)
|
filename = "%s-%s" % (spec.package.name, spec.version)
|
||||||
|
if ext:
|
||||||
|
filename += ".%s" % ext
|
||||||
|
return filename
|
||||||
|
|
||||||
|
|
||||||
def mirror_archive_path(spec):
|
def mirror_archive_path(spec, fetcher):
|
||||||
"""Get the relative path to the spec's archive within a mirror."""
|
"""Get the relative path to the spec's archive within a mirror."""
|
||||||
return join_path(spec.name, mirror_archive_filename(spec))
|
return join_path(spec.name, mirror_archive_filename(spec, fetcher))
|
||||||
|
|
||||||
|
|
||||||
def get_matching_versions(specs, **kwargs):
|
def get_matching_versions(specs, **kwargs):
|
||||||
@ -74,10 +80,11 @@ def get_matching_versions(specs, **kwargs):
|
|||||||
|
|
||||||
# Skip any package that has no known versions.
|
# Skip any package that has no known versions.
|
||||||
if not pkg.versions:
|
if not pkg.versions:
|
||||||
tty.msg("No safe (checksummed) versions for package %s." % pkg.name)
|
tty.msg("No safe (checksummed) versions for package %s" % pkg.name)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
num_versions = kwargs.get('num_versions', 0)
|
num_versions = kwargs.get('num_versions', 0)
|
||||||
|
matching_spec = []
|
||||||
for i, v in enumerate(reversed(sorted(pkg.versions))):
|
for i, v in enumerate(reversed(sorted(pkg.versions))):
|
||||||
# Generate no more than num_versions versions for each spec.
|
# Generate no more than num_versions versions for each spec.
|
||||||
if num_versions and i >= num_versions:
|
if num_versions and i >= num_versions:
|
||||||
@ -88,7 +95,11 @@ def get_matching_versions(specs, **kwargs):
|
|||||||
s = Spec(pkg.name)
|
s = Spec(pkg.name)
|
||||||
s.versions = VersionList([v])
|
s.versions = VersionList([v])
|
||||||
s.variants = spec.variants.copy()
|
s.variants = spec.variants.copy()
|
||||||
matching.append(s)
|
matching_spec.append(s)
|
||||||
|
|
||||||
|
if not matching_spec:
|
||||||
|
tty.warn("No known version matches spec: %s" % spec)
|
||||||
|
matching.extend(matching_spec)
|
||||||
|
|
||||||
return matching
|
return matching
|
||||||
|
|
||||||
@ -106,7 +117,6 @@ def suggest_archive_basename(resource):
|
|||||||
return basename
|
return basename
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def create(path, specs, **kwargs):
|
def create(path, specs, **kwargs):
|
||||||
"""Create a directory to be used as a spack mirror, and fill it with
|
"""Create a directory to be used as a spack mirror, and fill it with
|
||||||
package archives.
|
package archives.
|
||||||
@ -147,81 +157,72 @@ def create(path, specs, **kwargs):
|
|||||||
# Get the absolute path of the root before we start jumping around.
|
# Get the absolute path of the root before we start jumping around.
|
||||||
mirror_root = os.path.abspath(path)
|
mirror_root = os.path.abspath(path)
|
||||||
if not os.path.isdir(mirror_root):
|
if not os.path.isdir(mirror_root):
|
||||||
mkdirp(mirror_root)
|
try:
|
||||||
|
mkdirp(mirror_root)
|
||||||
|
except OSError as e:
|
||||||
|
raise MirrorError(
|
||||||
|
"Cannot create directory '%s':" % mirror_root, str(e))
|
||||||
|
|
||||||
# Things to keep track of while parsing specs.
|
# Things to keep track of while parsing specs.
|
||||||
present = []
|
categories = {
|
||||||
mirrored = []
|
'present': [],
|
||||||
error = []
|
'mirrored': [],
|
||||||
|
'error': []
|
||||||
|
}
|
||||||
|
|
||||||
# Iterate through packages and download all the safe tarballs for each of them
|
# Iterate through packages and download all the safe tarballs for each of them
|
||||||
everything_already_exists = True
|
|
||||||
for spec in version_specs:
|
for spec in version_specs:
|
||||||
pkg = spec.package
|
add_single_spec(spec, mirror_root, categories, **kwargs)
|
||||||
|
|
||||||
stage = None
|
return categories['present'], categories['mirrored'], categories['error']
|
||||||
try:
|
|
||||||
# create a subdirectory for the current package@version
|
|
||||||
archive_path = os.path.abspath(join_path(mirror_root, mirror_archive_path(spec)))
|
|
||||||
subdir = os.path.dirname(archive_path)
|
|
||||||
mkdirp(subdir)
|
|
||||||
|
|
||||||
if os.path.exists(archive_path):
|
|
||||||
tty.msg("Already added %s" % spec.format("$_$@"))
|
|
||||||
else:
|
|
||||||
everything_already_exists = False
|
|
||||||
# Set up a stage and a fetcher for the download
|
|
||||||
unique_fetch_name = spec.format("$_$@")
|
|
||||||
fetcher = fs.for_package_version(pkg, pkg.version)
|
|
||||||
stage = Stage(fetcher, name=unique_fetch_name)
|
|
||||||
fetcher.set_stage(stage)
|
|
||||||
|
|
||||||
# Do the fetch and checksum if necessary
|
def add_single_spec(spec, mirror_root, categories, **kwargs):
|
||||||
fetcher.fetch()
|
tty.msg("Adding package {pkg} to mirror".format(pkg=spec.format("$_$@")))
|
||||||
if not kwargs.get('no_checksum', False):
|
spec_exists_in_mirror = True
|
||||||
fetcher.check()
|
try:
|
||||||
tty.msg("Checksum passed for %s@%s" % (pkg.name, pkg.version))
|
with spec.package.stage:
|
||||||
|
# fetcher = stage.fetcher
|
||||||
|
# fetcher.fetch()
|
||||||
|
# ...
|
||||||
|
# fetcher.archive(archive_path)
|
||||||
|
for ii, stage in enumerate(spec.package.stage):
|
||||||
|
fetcher = stage.fetcher
|
||||||
|
if ii == 0:
|
||||||
|
# create a subdirectory for the current package@version
|
||||||
|
archive_path = os.path.abspath(join_path(mirror_root, mirror_archive_path(spec, fetcher)))
|
||||||
|
name = spec.format("$_$@")
|
||||||
|
else:
|
||||||
|
resource = stage.resource
|
||||||
|
archive_path = join_path(subdir, suggest_archive_basename(resource))
|
||||||
|
name = "{resource} ({pkg}).".format(resource=resource.name, pkg=spec.format("$_$@"))
|
||||||
|
subdir = os.path.dirname(archive_path)
|
||||||
|
mkdirp(subdir)
|
||||||
|
|
||||||
# Fetchers have to know how to archive their files. Use
|
if os.path.exists(archive_path):
|
||||||
# that to move/copy/create an archive in the mirror.
|
tty.msg("{name} : already added".format(name=name))
|
||||||
fetcher.archive(archive_path)
|
else:
|
||||||
tty.msg("Added %s." % spec.format("$_$@"))
|
spec_exists_in_mirror = False
|
||||||
|
fetcher.fetch()
|
||||||
|
if not kwargs.get('no_checksum', False):
|
||||||
|
fetcher.check()
|
||||||
|
tty.msg("{name} : checksum passed".format(name=name))
|
||||||
|
|
||||||
# Fetch resources if they are associated with the spec
|
# Fetchers have to know how to archive their files. Use
|
||||||
resources = pkg._get_resources()
|
# that to move/copy/create an archive in the mirror.
|
||||||
for resource in resources:
|
fetcher.archive(archive_path)
|
||||||
resource_archive_path = join_path(subdir, suggest_archive_basename(resource))
|
tty.msg("{name} : added".format(name=name))
|
||||||
if os.path.exists(resource_archive_path):
|
|
||||||
tty.msg("Already added resource %s (%s@%s)." % (resource.name, pkg.name, pkg.version))
|
|
||||||
continue
|
|
||||||
everything_already_exists = False
|
|
||||||
resource_stage_folder = pkg._resource_stage(resource)
|
|
||||||
resource_stage = Stage(resource.fetcher, name=resource_stage_folder)
|
|
||||||
resource.fetcher.set_stage(resource_stage)
|
|
||||||
resource.fetcher.fetch()
|
|
||||||
if not kwargs.get('no_checksum', False):
|
|
||||||
resource.fetcher.check()
|
|
||||||
tty.msg("Checksum passed for the resource %s (%s@%s)" % (resource.name, pkg.name, pkg.version))
|
|
||||||
resource.fetcher.archive(resource_archive_path)
|
|
||||||
tty.msg("Added resource %s (%s@%s)." % (resource.name, pkg.name, pkg.version))
|
|
||||||
|
|
||||||
if everything_already_exists:
|
if spec_exists_in_mirror:
|
||||||
present.append(spec)
|
categories['present'].append(spec)
|
||||||
else:
|
else:
|
||||||
mirrored.append(spec)
|
categories['mirrored'].append(spec)
|
||||||
|
except Exception as e:
|
||||||
except Exception, e:
|
if spack.debug:
|
||||||
if spack.debug:
|
sys.excepthook(*sys.exc_info())
|
||||||
sys.excepthook(*sys.exc_info())
|
else:
|
||||||
else:
|
tty.warn("Error while fetching %s" % spec.format('$_$@'), e.message)
|
||||||
tty.warn("Error while fetching %s." % spec.format('$_$@'), e.message)
|
categories['error'].append(spec)
|
||||||
error.append(spec)
|
|
||||||
|
|
||||||
finally:
|
|
||||||
if stage:
|
|
||||||
stage.destroy()
|
|
||||||
|
|
||||||
return (present, mirrored, error)
|
|
||||||
|
|
||||||
|
|
||||||
class MirrorError(spack.error.SpackError):
|
class MirrorError(spack.error.SpackError):
|
||||||
|
@ -33,6 +33,7 @@
|
|||||||
|
|
||||||
* /bin directories to be appended to PATH
|
* /bin directories to be appended to PATH
|
||||||
* /lib* directories for LD_LIBRARY_PATH
|
* /lib* directories for LD_LIBRARY_PATH
|
||||||
|
* /include directories for CPATH
|
||||||
* /man* and /share/man* directories for MANPATH
|
* /man* and /share/man* directories for MANPATH
|
||||||
* the package prefix for CMAKE_PREFIX_PATH
|
* the package prefix for CMAKE_PREFIX_PATH
|
||||||
|
|
||||||
@ -121,6 +122,7 @@ def add_path(path_name, directory):
|
|||||||
('LIBRARY_PATH', self.spec.prefix.lib64),
|
('LIBRARY_PATH', self.spec.prefix.lib64),
|
||||||
('LD_LIBRARY_PATH', self.spec.prefix.lib),
|
('LD_LIBRARY_PATH', self.spec.prefix.lib),
|
||||||
('LD_LIBRARY_PATH', self.spec.prefix.lib64),
|
('LD_LIBRARY_PATH', self.spec.prefix.lib64),
|
||||||
|
('CPATH', self.spec.prefix.include),
|
||||||
('PKG_CONFIG_PATH', join_path(self.spec.prefix.lib, 'pkgconfig')),
|
('PKG_CONFIG_PATH', join_path(self.spec.prefix.lib, 'pkgconfig')),
|
||||||
('PKG_CONFIG_PATH', join_path(self.spec.prefix.lib64, 'pkgconfig'))]:
|
('PKG_CONFIG_PATH', join_path(self.spec.prefix.lib64, 'pkgconfig'))]:
|
||||||
|
|
||||||
@ -194,12 +196,14 @@ class Dotkit(EnvModule):
|
|||||||
@property
|
@property
|
||||||
def file_name(self):
|
def file_name(self):
|
||||||
return join_path(Dotkit.path, self.spec.architecture,
|
return join_path(Dotkit.path, self.spec.architecture,
|
||||||
self.spec.format('$_$@$%@$+$#.dk'))
|
'%s.dk' % self.use_name)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def use_name(self):
|
def use_name(self):
|
||||||
return self.spec.format('$_$@$%@$+$#')
|
return "%s-%s-%s-%s-%s" % (self.spec.name, self.spec.version,
|
||||||
|
self.spec.compiler.name,
|
||||||
|
self.spec.compiler.version,
|
||||||
|
self.spec.dag_hash())
|
||||||
|
|
||||||
def _write(self, dk_file):
|
def _write(self, dk_file):
|
||||||
# Category
|
# Category
|
||||||
@ -235,7 +239,10 @@ def file_name(self):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def use_name(self):
|
def use_name(self):
|
||||||
return self.spec.format('$_$@$%@$+$#')
|
return "%s-%s-%s-%s-%s" % (self.spec.name, self.spec.version,
|
||||||
|
self.spec.compiler.name,
|
||||||
|
self.spec.compiler.version,
|
||||||
|
self.spec.dag_hash())
|
||||||
|
|
||||||
|
|
||||||
def _write(self, m_file):
|
def _write(self, m_file):
|
||||||
|
@ -138,7 +138,7 @@ class when(object):
|
|||||||
methods like install() that depend on the package's spec.
|
methods like install() that depend on the package's spec.
|
||||||
For example:
|
For example:
|
||||||
|
|
||||||
.. code-block::
|
.. code-block:: python
|
||||||
|
|
||||||
class SomePackage(Package):
|
class SomePackage(Package):
|
||||||
...
|
...
|
||||||
@ -163,26 +163,28 @@ def install(self, prefix):
|
|||||||
if you only have part of the install that is platform specific, you
|
if you only have part of the install that is platform specific, you
|
||||||
could do this:
|
could do this:
|
||||||
|
|
||||||
class SomePackage(Package):
|
.. code-block:: python
|
||||||
...
|
|
||||||
# virtual dependence on MPI.
|
|
||||||
# could resolve to mpich, mpich2, OpenMPI
|
|
||||||
depends_on('mpi')
|
|
||||||
|
|
||||||
def setup(self):
|
class SomePackage(Package):
|
||||||
# do nothing in the default case
|
...
|
||||||
pass
|
# virtual dependence on MPI.
|
||||||
|
# could resolve to mpich, mpich2, OpenMPI
|
||||||
|
depends_on('mpi')
|
||||||
|
|
||||||
@when('^openmpi')
|
def setup(self):
|
||||||
def setup(self):
|
# do nothing in the default case
|
||||||
# do something special when this is built with OpenMPI for
|
pass
|
||||||
# its MPI implementations.
|
|
||||||
|
@when('^openmpi')
|
||||||
|
def setup(self):
|
||||||
|
# do something special when this is built with OpenMPI for
|
||||||
|
# its MPI implementations.
|
||||||
|
|
||||||
|
|
||||||
def install(self, prefix):
|
def install(self, prefix):
|
||||||
# Do common install stuff
|
# Do common install stuff
|
||||||
self.setup()
|
self.setup()
|
||||||
# Do more common install stuff
|
# Do more common install stuff
|
||||||
|
|
||||||
There must be one (and only one) @when clause that matches the
|
There must be one (and only one) @when clause that matches the
|
||||||
package's spec. If there is more than one, or if none match,
|
package's spec. If there is more than one, or if none match,
|
||||||
@ -193,10 +195,11 @@ def install(self, prefix):
|
|||||||
platform-specific versions. There's not much we can do to get
|
platform-specific versions. There's not much we can do to get
|
||||||
around this because of the way decorators work.
|
around this because of the way decorators work.
|
||||||
"""
|
"""
|
||||||
class when(object):
|
|
||||||
def __init__(self, spec):
|
def __init__(self, spec):
|
||||||
pkg = get_calling_module_name()
|
pkg = get_calling_module_name()
|
||||||
self.spec = parse_anonymous_spec(spec, pkg)
|
if spec is True:
|
||||||
|
spec = pkg
|
||||||
|
self.spec = parse_anonymous_spec(spec, pkg) if spec is not False else None
|
||||||
|
|
||||||
def __call__(self, method):
|
def __call__(self, method):
|
||||||
# Get the first definition of the method in the calling scope
|
# Get the first definition of the method in the calling scope
|
||||||
@ -207,7 +210,9 @@ def __call__(self, method):
|
|||||||
if not type(original_method) == SpecMultiMethod:
|
if not type(original_method) == SpecMultiMethod:
|
||||||
original_method = SpecMultiMethod(original_method)
|
original_method = SpecMultiMethod(original_method)
|
||||||
|
|
||||||
original_method.register(self.spec, method)
|
if self.spec is not None:
|
||||||
|
original_method.register(self.spec, method)
|
||||||
|
|
||||||
return original_method
|
return original_method
|
||||||
|
|
||||||
|
|
||||||
|
@ -58,14 +58,16 @@
|
|||||||
import spack.mirror
|
import spack.mirror
|
||||||
import spack.hooks
|
import spack.hooks
|
||||||
import spack.directives
|
import spack.directives
|
||||||
|
import spack.repository
|
||||||
import spack.build_environment
|
import spack.build_environment
|
||||||
import spack.url
|
import spack.url
|
||||||
import spack.util.web
|
import spack.util.web
|
||||||
import spack.fetch_strategy as fs
|
import spack.fetch_strategy as fs
|
||||||
from spack.version import *
|
from spack.version import *
|
||||||
from spack.stage import Stage
|
from spack.stage import Stage, ResourceStage, StageComposite
|
||||||
from spack.util.compression import allowed_archive, extension
|
from spack.util.compression import allowed_archive, extension
|
||||||
from spack.util.executable import ProcessError
|
from spack.util.executable import ProcessError
|
||||||
|
from spack.util.environment import dump_environment
|
||||||
|
|
||||||
"""Allowed URL schemes for spack packages."""
|
"""Allowed URL schemes for spack packages."""
|
||||||
_ALLOWED_URL_SCHEMES = ["http", "https", "ftp", "file", "git"]
|
_ALLOWED_URL_SCHEMES = ["http", "https", "ftp", "file", "git"]
|
||||||
@ -433,23 +435,51 @@ def url_for_version(self, version):
|
|||||||
return spack.url.substitute_version(self.nearest_url(version),
|
return spack.url.substitute_version(self.nearest_url(version),
|
||||||
self.url_version(version))
|
self.url_version(version))
|
||||||
|
|
||||||
|
def _make_resource_stage(self, root_stage, fetcher, resource):
|
||||||
|
resource_stage_folder = self._resource_stage(resource)
|
||||||
|
resource_mirror = join_path(self.name, os.path.basename(fetcher.url))
|
||||||
|
stage = ResourceStage(resource.fetcher, root=root_stage, resource=resource,
|
||||||
|
name=resource_stage_folder, mirror_path=resource_mirror)
|
||||||
|
return stage
|
||||||
|
|
||||||
|
def _make_root_stage(self, fetcher):
|
||||||
|
# Construct a mirror path (TODO: get this out of package.py)
|
||||||
|
mp = spack.mirror.mirror_archive_path(self.spec, fetcher)
|
||||||
|
# Construct a path where the stage should build..
|
||||||
|
s = self.spec
|
||||||
|
stage_name = "%s-%s-%s" % (s.name, s.version, s.dag_hash())
|
||||||
|
# Build the composite stage
|
||||||
|
stage = Stage(fetcher, mirror_path=mp, name=stage_name)
|
||||||
|
return stage
|
||||||
|
|
||||||
|
def _make_stage(self):
|
||||||
|
# Construct a composite stage on top of the composite FetchStrategy
|
||||||
|
composite_fetcher = self.fetcher
|
||||||
|
composite_stage = StageComposite()
|
||||||
|
resources = self._get_needed_resources()
|
||||||
|
for ii, fetcher in enumerate(composite_fetcher):
|
||||||
|
if ii == 0:
|
||||||
|
# Construct root stage first
|
||||||
|
stage = self._make_root_stage(fetcher)
|
||||||
|
else:
|
||||||
|
# Construct resource stage
|
||||||
|
resource = resources[ii - 1] # ii == 0 is root!
|
||||||
|
stage = self._make_resource_stage(composite_stage[0], fetcher, resource)
|
||||||
|
# Append the item to the composite
|
||||||
|
composite_stage.append(stage)
|
||||||
|
|
||||||
|
# Create stage on first access. Needed because fetch, stage,
|
||||||
|
# patch, and install can be called independently of each
|
||||||
|
# other, so `with self.stage:` in do_install isn't sufficient.
|
||||||
|
composite_stage.create()
|
||||||
|
return composite_stage
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def stage(self):
|
def stage(self):
|
||||||
if not self.spec.concrete:
|
if not self.spec.concrete:
|
||||||
raise ValueError("Can only get a stage for a concrete package.")
|
raise ValueError("Can only get a stage for a concrete package.")
|
||||||
|
|
||||||
if self._stage is None:
|
if self._stage is None:
|
||||||
# Construct a mirror path (TODO: get this out of package.py)
|
self._stage = self._make_stage()
|
||||||
mp = spack.mirror.mirror_archive_path(self.spec)
|
|
||||||
|
|
||||||
# Construct a path where the stage should build..
|
|
||||||
s = self.spec
|
|
||||||
stage_name = "%s-%s-%s" % (s.name, s.version, s.dag_hash())
|
|
||||||
|
|
||||||
# Build the stage
|
|
||||||
self._stage = Stage(self.fetcher, mirror_path=mp, name=stage_name)
|
|
||||||
|
|
||||||
return self._stage
|
return self._stage
|
||||||
|
|
||||||
|
|
||||||
@ -459,17 +489,27 @@ def stage(self, stage):
|
|||||||
self._stage = stage
|
self._stage = stage
|
||||||
|
|
||||||
|
|
||||||
|
def _make_fetcher(self):
|
||||||
|
# Construct a composite fetcher that always contains at least
|
||||||
|
# one element (the root package). In case there are resources
|
||||||
|
# associated with the package, append their fetcher to the
|
||||||
|
# composite.
|
||||||
|
root_fetcher = fs.for_package_version(self, self.version)
|
||||||
|
fetcher = fs.FetchStrategyComposite() # Composite fetcher
|
||||||
|
fetcher.append(root_fetcher) # Root fetcher is always present
|
||||||
|
resources = self._get_needed_resources()
|
||||||
|
for resource in resources:
|
||||||
|
fetcher.append(resource.fetcher)
|
||||||
|
return fetcher
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def fetcher(self):
|
def fetcher(self):
|
||||||
if not self.spec.versions.concrete:
|
if not self.spec.versions.concrete:
|
||||||
raise ValueError(
|
raise ValueError("Can only get a fetcher for a package with concrete versions.")
|
||||||
"Can only get a fetcher for a package with concrete versions.")
|
|
||||||
|
|
||||||
if not self._fetcher:
|
if not self._fetcher:
|
||||||
self._fetcher = fs.for_package_version(self, self.version)
|
self._fetcher = self._make_fetcher()
|
||||||
return self._fetcher
|
return self._fetcher
|
||||||
|
|
||||||
|
|
||||||
@fetcher.setter
|
@fetcher.setter
|
||||||
def fetcher(self, f):
|
def fetcher(self, f):
|
||||||
self._fetcher = f
|
self._fetcher = f
|
||||||
@ -631,8 +671,8 @@ def remove_prefix(self):
|
|||||||
spack.install_layout.remove_install_directory(self.spec)
|
spack.install_layout.remove_install_directory(self.spec)
|
||||||
|
|
||||||
|
|
||||||
def do_fetch(self):
|
def do_fetch(self, mirror_only=False):
|
||||||
"""Creates a stage directory and downloads the taball for this package.
|
"""Creates a stage directory and downloads the tarball for this package.
|
||||||
Working directory will be set to the stage directory.
|
Working directory will be set to the stage directory.
|
||||||
"""
|
"""
|
||||||
if not self.spec.concrete:
|
if not self.spec.concrete:
|
||||||
@ -654,79 +694,24 @@ def do_fetch(self):
|
|||||||
|
|
||||||
if not ignore_checksum:
|
if not ignore_checksum:
|
||||||
raise FetchError(
|
raise FetchError(
|
||||||
"Will not fetch %s." % self.spec.format('$_$@'), checksum_msg)
|
"Will not fetch %s" % self.spec.format('$_$@'), checksum_msg)
|
||||||
|
|
||||||
self.stage.fetch()
|
self.stage.fetch(mirror_only)
|
||||||
|
|
||||||
##########
|
|
||||||
# Fetch resources
|
|
||||||
resources = self._get_resources()
|
|
||||||
for resource in resources:
|
|
||||||
resource_stage_folder = self._resource_stage(resource)
|
|
||||||
# FIXME : works only for URLFetchStrategy
|
|
||||||
resource_mirror = join_path(self.name, os.path.basename(resource.fetcher.url))
|
|
||||||
resource_stage = Stage(resource.fetcher, name=resource_stage_folder, mirror_path=resource_mirror)
|
|
||||||
resource.fetcher.set_stage(resource_stage)
|
|
||||||
# Delegate to stage object to trigger mirror logic
|
|
||||||
resource_stage.fetch()
|
|
||||||
resource_stage.check()
|
|
||||||
##########
|
|
||||||
|
|
||||||
self._fetch_time = time.time() - start_time
|
self._fetch_time = time.time() - start_time
|
||||||
|
|
||||||
if spack.do_checksum and self.version in self.versions:
|
if spack.do_checksum and self.version in self.versions:
|
||||||
self.stage.check()
|
self.stage.check()
|
||||||
|
|
||||||
def do_stage(self):
|
|
||||||
|
def do_stage(self, mirror_only=False):
|
||||||
"""Unpacks the fetched tarball, then changes into the expanded tarball
|
"""Unpacks the fetched tarball, then changes into the expanded tarball
|
||||||
directory."""
|
directory."""
|
||||||
if not self.spec.concrete:
|
if not self.spec.concrete:
|
||||||
raise ValueError("Can only stage concrete packages.")
|
raise ValueError("Can only stage concrete packages.")
|
||||||
|
|
||||||
def _expand_archive(stage, name=self.name):
|
self.do_fetch(mirror_only)
|
||||||
archive_dir = stage.source_path
|
self.stage.expand_archive()
|
||||||
if not archive_dir:
|
|
||||||
stage.expand_archive()
|
|
||||||
tty.msg("Created stage in %s." % stage.path)
|
|
||||||
else:
|
|
||||||
tty.msg("Already staged %s in %s." % (name, stage.path))
|
|
||||||
|
|
||||||
|
|
||||||
self.do_fetch()
|
|
||||||
_expand_archive(self.stage)
|
|
||||||
|
|
||||||
##########
|
|
||||||
# Stage resources in appropriate path
|
|
||||||
resources = self._get_resources()
|
|
||||||
# TODO: this is to allow nested resources, a better solution would be
|
|
||||||
# good
|
|
||||||
for resource in sorted(resources, key=lambda res: len(res.destination)):
|
|
||||||
stage = resource.fetcher.stage
|
|
||||||
_expand_archive(stage, resource.name)
|
|
||||||
# Turn placement into a dict with relative paths
|
|
||||||
placement = os.path.basename(stage.source_path) if resource.placement is None else resource.placement
|
|
||||||
if not isinstance(placement, dict):
|
|
||||||
placement = {'': placement}
|
|
||||||
# Make the paths in the dictionary absolute and link
|
|
||||||
for key, value in placement.iteritems():
|
|
||||||
target_path = join_path(self.stage.source_path, resource.destination)
|
|
||||||
link_path = join_path(target_path, value)
|
|
||||||
source_path = join_path(stage.source_path, key)
|
|
||||||
|
|
||||||
try:
|
|
||||||
os.makedirs(target_path)
|
|
||||||
except OSError as err:
|
|
||||||
if err.errno == errno.EEXIST and os.path.isdir(target_path):
|
|
||||||
pass
|
|
||||||
else: raise
|
|
||||||
|
|
||||||
# NOTE: a reasonable fix for the TODO above might be to have
|
|
||||||
# these expand in place, but expand_archive does not offer
|
|
||||||
# this
|
|
||||||
|
|
||||||
if not os.path.exists(link_path):
|
|
||||||
shutil.move(source_path, link_path)
|
|
||||||
##########
|
|
||||||
self.stage.chdir_to_source()
|
self.stage.chdir_to_source()
|
||||||
|
|
||||||
|
|
||||||
@ -744,7 +729,7 @@ def do_patch(self):
|
|||||||
|
|
||||||
# If there are no patches, note it.
|
# If there are no patches, note it.
|
||||||
if not self.patches and not has_patch_fun:
|
if not self.patches and not has_patch_fun:
|
||||||
tty.msg("No patches needed for %s." % self.name)
|
tty.msg("No patches needed for %s" % self.name)
|
||||||
return
|
return
|
||||||
|
|
||||||
# Construct paths to special files in the archive dir used to
|
# Construct paths to special files in the archive dir used to
|
||||||
@ -757,7 +742,7 @@ def do_patch(self):
|
|||||||
# If we encounter an archive that failed to patch, restage it
|
# If we encounter an archive that failed to patch, restage it
|
||||||
# so that we can apply all the patches again.
|
# so that we can apply all the patches again.
|
||||||
if os.path.isfile(bad_file):
|
if os.path.isfile(bad_file):
|
||||||
tty.msg("Patching failed last time. Restaging.")
|
tty.msg("Patching failed last time. Restaging.")
|
||||||
self.stage.restage()
|
self.stage.restage()
|
||||||
|
|
||||||
self.stage.chdir_to_source()
|
self.stage.chdir_to_source()
|
||||||
@ -767,7 +752,7 @@ def do_patch(self):
|
|||||||
tty.msg("Already patched %s" % self.name)
|
tty.msg("Already patched %s" % self.name)
|
||||||
return
|
return
|
||||||
elif os.path.isfile(no_patches_file):
|
elif os.path.isfile(no_patches_file):
|
||||||
tty.msg("No patches needed for %s." % self.name)
|
tty.msg("No patches needed for %s" % self.name)
|
||||||
return
|
return
|
||||||
|
|
||||||
# Apply all the patches for specs that match this one
|
# Apply all the patches for specs that match this one
|
||||||
@ -788,10 +773,10 @@ def do_patch(self):
|
|||||||
if has_patch_fun:
|
if has_patch_fun:
|
||||||
try:
|
try:
|
||||||
self.patch()
|
self.patch()
|
||||||
tty.msg("Ran patch() for %s." % self.name)
|
tty.msg("Ran patch() for %s" % self.name)
|
||||||
patched = True
|
patched = True
|
||||||
except:
|
except:
|
||||||
tty.msg("patch() function failed for %s." % self.name)
|
tty.msg("patch() function failed for %s" % self.name)
|
||||||
touch(bad_file)
|
touch(bad_file)
|
||||||
raise
|
raise
|
||||||
|
|
||||||
@ -822,12 +807,15 @@ def do_fake_install(self):
|
|||||||
mkdirp(self.prefix.man1)
|
mkdirp(self.prefix.man1)
|
||||||
|
|
||||||
|
|
||||||
def _get_resources(self):
|
def _get_needed_resources(self):
|
||||||
resources = []
|
resources = []
|
||||||
# Select the resources that are needed for this build
|
# Select the resources that are needed for this build
|
||||||
for when_spec, resource_list in self.resources.items():
|
for when_spec, resource_list in self.resources.items():
|
||||||
if when_spec in self.spec:
|
if when_spec in self.spec:
|
||||||
resources.extend(resource_list)
|
resources.extend(resource_list)
|
||||||
|
# Sorts the resources by the length of the string representing their destination. Since any nested resource
|
||||||
|
# must contain another resource's name in its path, it seems that should work
|
||||||
|
resources = sorted(resources, key=lambda res: len(res.destination))
|
||||||
return resources
|
return resources
|
||||||
|
|
||||||
def _resource_stage(self, resource):
|
def _resource_stage(self, resource):
|
||||||
@ -835,13 +823,9 @@ def _resource_stage(self, resource):
|
|||||||
resource_stage_folder = '-'.join(pieces)
|
resource_stage_folder = '-'.join(pieces)
|
||||||
return resource_stage_folder
|
return resource_stage_folder
|
||||||
|
|
||||||
def _build_logger(self, log_path):
|
|
||||||
"""Create a context manager to log build output."""
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def do_install(self,
|
def do_install(self,
|
||||||
keep_prefix=False, keep_stage=False, ignore_deps=False,
|
keep_prefix=False, keep_stage=None, ignore_deps=False,
|
||||||
skip_patch=False, verbose=False, make_jobs=None, fake=False):
|
skip_patch=False, verbose=False, make_jobs=None, fake=False):
|
||||||
"""Called by commands to install a package and its dependencies.
|
"""Called by commands to install a package and its dependencies.
|
||||||
|
|
||||||
@ -850,7 +834,8 @@ def do_install(self,
|
|||||||
|
|
||||||
Args:
|
Args:
|
||||||
keep_prefix -- Keep install prefix on failure. By default, destroys it.
|
keep_prefix -- Keep install prefix on failure. By default, destroys it.
|
||||||
keep_stage -- Keep stage on successful build. By default, destroys it.
|
keep_stage -- Set to True or false to always keep or always delete stage.
|
||||||
|
By default, stage is destroyed only if there are no exceptions.
|
||||||
ignore_deps -- Do not install dependencies before installing this package.
|
ignore_deps -- Do not install dependencies before installing this package.
|
||||||
fake -- Don't really build -- install fake stub files instead.
|
fake -- Don't really build -- install fake stub files instead.
|
||||||
skip_patch -- Skip patch stage of build if True.
|
skip_patch -- Skip patch stage of build if True.
|
||||||
@ -860,103 +845,103 @@ def do_install(self,
|
|||||||
if not self.spec.concrete:
|
if not self.spec.concrete:
|
||||||
raise ValueError("Can only install concrete packages.")
|
raise ValueError("Can only install concrete packages.")
|
||||||
|
|
||||||
|
# No installation needed if package is external
|
||||||
if self.spec.external:
|
if self.spec.external:
|
||||||
tty.msg("%s is externally installed in %s." % (self.name, self.spec.external))
|
tty.msg("%s is externally installed in %s" % (self.name, self.spec.external))
|
||||||
return
|
return
|
||||||
|
|
||||||
if os.path.exists(self.prefix):
|
# Ensure package is not already installed
|
||||||
tty.msg("%s is already installed in %s." % (self.name, self.prefix))
|
if spack.install_layout.check_installed(self.spec):
|
||||||
|
tty.msg("%s is already installed in %s" % (self.name, self.prefix))
|
||||||
return
|
return
|
||||||
|
|
||||||
tty.msg("Installing %s" % self.name)
|
tty.msg("Installing %s" % self.name)
|
||||||
|
|
||||||
|
# First, install dependencies recursively.
|
||||||
if not ignore_deps:
|
if not ignore_deps:
|
||||||
self.do_install_dependencies(
|
self.do_install_dependencies(
|
||||||
keep_prefix=keep_prefix, keep_stage=keep_stage, ignore_deps=ignore_deps,
|
keep_prefix=keep_prefix, keep_stage=keep_stage, ignore_deps=ignore_deps,
|
||||||
fake=fake, skip_patch=skip_patch, verbose=verbose,
|
fake=fake, skip_patch=skip_patch, verbose=verbose, make_jobs=make_jobs)
|
||||||
make_jobs=make_jobs)
|
|
||||||
|
|
||||||
start_time = time.time()
|
|
||||||
if not fake:
|
|
||||||
if not skip_patch:
|
|
||||||
self.do_patch()
|
|
||||||
else:
|
|
||||||
self.do_stage()
|
|
||||||
|
|
||||||
# create the install directory. The install layout
|
|
||||||
# handles this in case so that it can use whatever
|
|
||||||
# package naming scheme it likes.
|
|
||||||
spack.install_layout.create_install_directory(self.spec)
|
|
||||||
|
|
||||||
def cleanup():
|
|
||||||
if not keep_prefix:
|
|
||||||
# If anything goes wrong, remove the install prefix
|
|
||||||
self.remove_prefix()
|
|
||||||
else:
|
|
||||||
tty.warn("Keeping install prefix in place despite error.",
|
|
||||||
"Spack will think this package is installed." +
|
|
||||||
"Manually remove this directory to fix:",
|
|
||||||
self.prefix, wrap=True)
|
|
||||||
|
|
||||||
|
|
||||||
def real_work():
|
|
||||||
try:
|
|
||||||
tty.msg("Building %s." % self.name)
|
|
||||||
|
|
||||||
# Run the pre-install hook in the child process after
|
|
||||||
# the directory is created.
|
|
||||||
spack.hooks.pre_install(self)
|
|
||||||
|
|
||||||
# Set up process's build environment before running install.
|
|
||||||
if fake:
|
|
||||||
self.do_fake_install()
|
|
||||||
else:
|
|
||||||
# Do the real install in the source directory.
|
|
||||||
self.stage.chdir_to_source()
|
|
||||||
|
|
||||||
# This redirects I/O to a build log (and optionally to the terminal)
|
|
||||||
log_path = join_path(os.getcwd(), 'spack-build.out')
|
|
||||||
log_file = open(log_path, 'w')
|
|
||||||
with log_output(log_file, verbose, sys.stdout.isatty(), True):
|
|
||||||
self.install(self.spec, self.prefix)
|
|
||||||
|
|
||||||
# Ensure that something was actually installed.
|
|
||||||
self._sanity_check_install()
|
|
||||||
|
|
||||||
# Move build log into install directory on success
|
|
||||||
if not fake:
|
|
||||||
log_install_path = spack.install_layout.build_log_path(self.spec)
|
|
||||||
install(log_path, log_install_path)
|
|
||||||
|
|
||||||
# On successful install, remove the stage.
|
|
||||||
if not keep_stage:
|
|
||||||
self.stage.destroy()
|
|
||||||
|
|
||||||
# Stop timer.
|
|
||||||
self._total_time = time.time() - start_time
|
|
||||||
build_time = self._total_time - self._fetch_time
|
|
||||||
|
|
||||||
tty.msg("Successfully installed %s." % self.name,
|
|
||||||
"Fetch: %s. Build: %s. Total: %s."
|
|
||||||
% (_hms(self._fetch_time), _hms(build_time), _hms(self._total_time)))
|
|
||||||
print_pkg(self.prefix)
|
|
||||||
|
|
||||||
except ProcessError, e:
|
|
||||||
# Annotate with location of build log.
|
|
||||||
e.build_log = log_path
|
|
||||||
cleanup()
|
|
||||||
raise e
|
|
||||||
|
|
||||||
except:
|
|
||||||
# other exceptions just clean up and raise.
|
|
||||||
cleanup()
|
|
||||||
raise
|
|
||||||
|
|
||||||
# Set parallelism before starting build.
|
# Set parallelism before starting build.
|
||||||
self.make_jobs = make_jobs
|
self.make_jobs = make_jobs
|
||||||
|
|
||||||
# Do the build.
|
# Then install the package itself.
|
||||||
spack.build_environment.fork(self, real_work)
|
def build_process():
|
||||||
|
"""Forked for each build. Has its own process and python
|
||||||
|
module space set up by build_environment.fork()."""
|
||||||
|
start_time = time.time()
|
||||||
|
if not fake:
|
||||||
|
if not skip_patch:
|
||||||
|
self.do_patch()
|
||||||
|
else:
|
||||||
|
self.do_stage()
|
||||||
|
|
||||||
|
tty.msg("Building %s" % self.name)
|
||||||
|
|
||||||
|
self.stage.keep = keep_stage
|
||||||
|
with self.stage:
|
||||||
|
# Run the pre-install hook in the child process after
|
||||||
|
# the directory is created.
|
||||||
|
spack.hooks.pre_install(self)
|
||||||
|
|
||||||
|
if fake:
|
||||||
|
self.do_fake_install()
|
||||||
|
else:
|
||||||
|
# Do the real install in the source directory.
|
||||||
|
self.stage.chdir_to_source()
|
||||||
|
|
||||||
|
# Save the build environment in a file before building.
|
||||||
|
env_path = join_path(os.getcwd(), 'spack-build.env')
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Redirect I/O to a build log (and optionally to the terminal)
|
||||||
|
log_path = join_path(os.getcwd(), 'spack-build.out')
|
||||||
|
log_file = open(log_path, 'w')
|
||||||
|
with log_output(log_file, verbose, sys.stdout.isatty(), True):
|
||||||
|
dump_environment(env_path)
|
||||||
|
self.install(self.spec, self.prefix)
|
||||||
|
|
||||||
|
except ProcessError as e:
|
||||||
|
# Annotate ProcessErrors with the location of the build log.
|
||||||
|
e.build_log = log_path
|
||||||
|
raise e
|
||||||
|
|
||||||
|
# Ensure that something was actually installed.
|
||||||
|
self._sanity_check_install()
|
||||||
|
|
||||||
|
# Copy provenance into the install directory on success
|
||||||
|
log_install_path = spack.install_layout.build_log_path(self.spec)
|
||||||
|
env_install_path = spack.install_layout.build_env_path(self.spec)
|
||||||
|
packages_dir = spack.install_layout.build_packages_path(self.spec)
|
||||||
|
|
||||||
|
install(log_path, log_install_path)
|
||||||
|
install(env_path, env_install_path)
|
||||||
|
dump_packages(self.spec, packages_dir)
|
||||||
|
|
||||||
|
# Stop timer.
|
||||||
|
self._total_time = time.time() - start_time
|
||||||
|
build_time = self._total_time - self._fetch_time
|
||||||
|
|
||||||
|
tty.msg("Successfully installed %s" % self.name,
|
||||||
|
"Fetch: %s. Build: %s. Total: %s."
|
||||||
|
% (_hms(self._fetch_time), _hms(build_time), _hms(self._total_time)))
|
||||||
|
print_pkg(self.prefix)
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Create the install prefix and fork the build process.
|
||||||
|
spack.install_layout.create_install_directory(self.spec)
|
||||||
|
spack.build_environment.fork(self, build_process)
|
||||||
|
except:
|
||||||
|
# remove the install prefix if anything went wrong during install.
|
||||||
|
if not keep_prefix:
|
||||||
|
self.remove_prefix()
|
||||||
|
else:
|
||||||
|
tty.warn("Keeping install prefix in place despite error.",
|
||||||
|
"Spack will think this package is installed. " +
|
||||||
|
"Manually remove this directory to fix:",
|
||||||
|
self.prefix, wrap=True)
|
||||||
|
raise
|
||||||
|
|
||||||
# note: PARENT of the build process adds the new package to
|
# note: PARENT of the build process adds the new package to
|
||||||
# the database, so that we don't need to re-read from file.
|
# the database, so that we don't need to re-read from file.
|
||||||
@ -1043,7 +1028,7 @@ def do_uninstall(self, force=False):
|
|||||||
# Uninstalling in Spack only requires removing the prefix.
|
# Uninstalling in Spack only requires removing the prefix.
|
||||||
self.remove_prefix()
|
self.remove_prefix()
|
||||||
spack.installed_db.remove(self.spec)
|
spack.installed_db.remove(self.spec)
|
||||||
tty.msg("Successfully uninstalled %s." % self.spec.short_spec)
|
tty.msg("Successfully uninstalled %s" % self.spec.short_spec)
|
||||||
|
|
||||||
# Once everything else is done, run post install hooks
|
# Once everything else is done, run post install hooks
|
||||||
spack.hooks.post_uninstall(self)
|
spack.hooks.post_uninstall(self)
|
||||||
@ -1090,7 +1075,7 @@ def do_activate(self, force=False):
|
|||||||
self.extendee_spec.package.activate(self, **self.extendee_args)
|
self.extendee_spec.package.activate(self, **self.extendee_args)
|
||||||
|
|
||||||
spack.install_layout.add_extension(self.extendee_spec, self.spec)
|
spack.install_layout.add_extension(self.extendee_spec, self.spec)
|
||||||
tty.msg("Activated extension %s for %s."
|
tty.msg("Activated extension %s for %s"
|
||||||
% (self.spec.short_spec, self.extendee_spec.format("$_$@$+$%@")))
|
% (self.spec.short_spec, self.extendee_spec.format("$_$@$+$%@")))
|
||||||
|
|
||||||
|
|
||||||
@ -1142,7 +1127,7 @@ def do_deactivate(self, **kwargs):
|
|||||||
if self.activated:
|
if self.activated:
|
||||||
spack.install_layout.remove_extension(self.extendee_spec, self.spec)
|
spack.install_layout.remove_extension(self.extendee_spec, self.spec)
|
||||||
|
|
||||||
tty.msg("Deactivated extension %s for %s."
|
tty.msg("Deactivated extension %s for %s"
|
||||||
% (self.spec.short_spec, self.extendee_spec.format("$_$@$+$%@")))
|
% (self.spec.short_spec, self.extendee_spec.format("$_$@$+$%@")))
|
||||||
|
|
||||||
|
|
||||||
@ -1170,8 +1155,7 @@ def do_restage(self):
|
|||||||
|
|
||||||
def do_clean(self):
|
def do_clean(self):
|
||||||
"""Removes the package's build stage and source tarball."""
|
"""Removes the package's build stage and source tarball."""
|
||||||
if os.path.exists(self.stage.path):
|
self.stage.destroy()
|
||||||
self.stage.destroy()
|
|
||||||
|
|
||||||
|
|
||||||
def format_doc(self, **kwargs):
|
def format_doc(self, **kwargs):
|
||||||
@ -1210,7 +1194,7 @@ def fetch_remote_versions(self):
|
|||||||
try:
|
try:
|
||||||
return spack.util.web.find_versions_of_archive(
|
return spack.util.web.find_versions_of_archive(
|
||||||
*self.all_urls, list_url=self.list_url, list_depth=self.list_depth)
|
*self.all_urls, list_url=self.list_url, list_depth=self.list_depth)
|
||||||
except spack.error.NoNetworkConnectionError, e:
|
except spack.error.NoNetworkConnectionError as e:
|
||||||
tty.die("Package.fetch_versions couldn't connect to:",
|
tty.die("Package.fetch_versions couldn't connect to:",
|
||||||
e.url, e.message)
|
e.url, e.message)
|
||||||
|
|
||||||
@ -1228,8 +1212,8 @@ def rpath(self):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def rpath_args(self):
|
def rpath_args(self):
|
||||||
"""Get the rpath args as a string, with -Wl,-rpath= for each element."""
|
"""Get the rpath args as a string, with -Wl,-rpath, for each element."""
|
||||||
return " ".join("-Wl,-rpath=%s" % p for p in self.rpath)
|
return " ".join("-Wl,-rpath,%s" % p for p in self.rpath)
|
||||||
|
|
||||||
|
|
||||||
def validate_package_url(url_string):
|
def validate_package_url(url_string):
|
||||||
@ -1242,6 +1226,52 @@ def validate_package_url(url_string):
|
|||||||
tty.die("Invalid file type in URL: '%s'" % url_string)
|
tty.die("Invalid file type in URL: '%s'" % url_string)
|
||||||
|
|
||||||
|
|
||||||
|
def dump_packages(spec, path):
|
||||||
|
"""Dump all package information for a spec and its dependencies.
|
||||||
|
|
||||||
|
This creates a package repository within path for every
|
||||||
|
namespace in the spec DAG, and fills the repos wtih package
|
||||||
|
files and patch files for every node in the DAG.
|
||||||
|
"""
|
||||||
|
mkdirp(path)
|
||||||
|
|
||||||
|
# Copy in package.py files from any dependencies.
|
||||||
|
# Note that we copy them in as they are in the *install* directory
|
||||||
|
# NOT as they are in the repository, because we want a snapshot of
|
||||||
|
# how *this* particular build was done.
|
||||||
|
for node in spec.traverse():
|
||||||
|
if node is not spec:
|
||||||
|
# Locate the dependency package in the install tree and find
|
||||||
|
# its provenance information.
|
||||||
|
source = spack.install_layout.build_packages_path(node)
|
||||||
|
source_repo_root = join_path(source, node.namespace)
|
||||||
|
|
||||||
|
# There's no provenance installed for the source package. Skip it.
|
||||||
|
# User can always get something current from the builtin repo.
|
||||||
|
if not os.path.isdir(source_repo_root):
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Create a source repo and get the pkg directory out of it.
|
||||||
|
try:
|
||||||
|
source_repo = spack.repository.Repo(source_repo_root)
|
||||||
|
source_pkg_dir = source_repo.dirname_for_package_name(node.name)
|
||||||
|
except RepoError as e:
|
||||||
|
tty.warn("Warning: Couldn't copy in provenance for %s" % node.name)
|
||||||
|
|
||||||
|
# Create a destination repository
|
||||||
|
dest_repo_root = join_path(path, node.namespace)
|
||||||
|
if not os.path.exists(dest_repo_root):
|
||||||
|
spack.repository.create_repo(dest_repo_root)
|
||||||
|
repo = spack.repository.Repo(dest_repo_root)
|
||||||
|
|
||||||
|
# Get the location of the package in the dest repo.
|
||||||
|
dest_pkg_dir = repo.dirname_for_package_name(node.name)
|
||||||
|
if node is not spec:
|
||||||
|
install_tree(source_pkg_dir, dest_pkg_dir)
|
||||||
|
else:
|
||||||
|
spack.repo.dump_provenance(node, dest_pkg_dir)
|
||||||
|
|
||||||
|
|
||||||
def print_pkg(message):
|
def print_pkg(message):
|
||||||
"""Outputs a message with a package icon."""
|
"""Outputs a message with a package icon."""
|
||||||
from llnl.util.tty.color import cwrite
|
from llnl.util.tty.color import cwrite
|
||||||
@ -1292,7 +1322,7 @@ class PackageVersionError(PackageError):
|
|||||||
"""Raised when a version URL cannot automatically be determined."""
|
"""Raised when a version URL cannot automatically be determined."""
|
||||||
def __init__(self, version):
|
def __init__(self, version):
|
||||||
super(PackageVersionError, self).__init__(
|
super(PackageVersionError, self).__init__(
|
||||||
"Cannot determine a URL automatically for version %s." % version,
|
"Cannot determine a URL automatically for version %s" % version,
|
||||||
"Please provide a url for this version in the package.py file.")
|
"Please provide a url for this version in the package.py file.")
|
||||||
|
|
||||||
|
|
||||||
|
@ -6,7 +6,7 @@
|
|||||||
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
|
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
|
||||||
# LLNL-CODE-647188
|
# LLNL-CODE-647188
|
||||||
#
|
#
|
||||||
# For details, see https://llnl.github.io/spack
|
# For details, see https://software.llnl.gov/spack
|
||||||
# Please also see the LICENSE file for our notice and the LGPL.
|
# Please also see the LICENSE file for our notice and the LGPL.
|
||||||
#
|
#
|
||||||
# This program is free software; you can redistribute it and/or modify
|
# This program is free software; you can redistribute it and/or modify
|
||||||
@ -33,7 +33,7 @@
|
|||||||
from external import yaml
|
from external import yaml
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.util.filesystem import join_path
|
from llnl.util.filesystem import *
|
||||||
|
|
||||||
import spack.error
|
import spack.error
|
||||||
import spack.config
|
import spack.config
|
||||||
@ -156,7 +156,7 @@ def _add(self, repo):
|
|||||||
|
|
||||||
if repo.namespace in self.by_namespace:
|
if repo.namespace in self.by_namespace:
|
||||||
raise DuplicateRepoError(
|
raise DuplicateRepoError(
|
||||||
"Package repos '%s' and '%s' both provide namespace %s."
|
"Package repos '%s' and '%s' both provide namespace %s"
|
||||||
% (repo.root, self.by_namespace[repo.namespace].root, repo.namespace))
|
% (repo.root, self.by_namespace[repo.namespace].root, repo.namespace))
|
||||||
|
|
||||||
# Add repo to the pkg indexes
|
# Add repo to the pkg indexes
|
||||||
@ -233,6 +233,11 @@ def providers_for(self, vpkg_spec):
|
|||||||
return providers
|
return providers
|
||||||
|
|
||||||
|
|
||||||
|
@_autospec
|
||||||
|
def extensions_for(self, extendee_spec):
|
||||||
|
return [p for p in self.all_packages() if p.extends(extendee_spec)]
|
||||||
|
|
||||||
|
|
||||||
def find_module(self, fullname, path=None):
|
def find_module(self, fullname, path=None):
|
||||||
"""Implements precedence for overlaid namespaces.
|
"""Implements precedence for overlaid namespaces.
|
||||||
|
|
||||||
@ -295,8 +300,11 @@ def repo_for_pkg(self, spec):
|
|||||||
for repo in self.repos:
|
for repo in self.repos:
|
||||||
if spec.name in repo:
|
if spec.name in repo:
|
||||||
return repo
|
return repo
|
||||||
else:
|
|
||||||
raise UnknownPackageError(spec.name)
|
# If the package isn't in any repo, return the one with
|
||||||
|
# highest precedence. This is for commands like `spack edit`
|
||||||
|
# that can operate on packages that don't exist yet.
|
||||||
|
return self.first_repo()
|
||||||
|
|
||||||
|
|
||||||
@_autospec
|
@_autospec
|
||||||
@ -308,6 +316,16 @@ def get(self, spec, new=False):
|
|||||||
return self.repo_for_pkg(spec).get(spec)
|
return self.repo_for_pkg(spec).get(spec)
|
||||||
|
|
||||||
|
|
||||||
|
@_autospec
|
||||||
|
def dump_provenance(self, spec, path):
|
||||||
|
"""Dump provenance information for a spec to a particular path.
|
||||||
|
|
||||||
|
This dumps the package file and any associated patch files.
|
||||||
|
Raises UnknownPackageError if not found.
|
||||||
|
"""
|
||||||
|
return self.repo_for_pkg(spec).dump_provenance(spec, path)
|
||||||
|
|
||||||
|
|
||||||
def dirname_for_package_name(self, pkg_name):
|
def dirname_for_package_name(self, pkg_name):
|
||||||
return self.repo_for_pkg(pkg_name).dirname_for_package_name(pkg_name)
|
return self.repo_for_pkg(pkg_name).dirname_for_package_name(pkg_name)
|
||||||
|
|
||||||
@ -527,7 +545,7 @@ def get(self, spec, new=False):
|
|||||||
raise UnknownPackageError(spec.name)
|
raise UnknownPackageError(spec.name)
|
||||||
|
|
||||||
if spec.namespace and spec.namespace != self.namespace:
|
if spec.namespace and spec.namespace != self.namespace:
|
||||||
raise UnknownPackageError("Repository %s does not contain package %s."
|
raise UnknownPackageError("Repository %s does not contain package %s"
|
||||||
% (self.namespace, spec.fullname))
|
% (self.namespace, spec.fullname))
|
||||||
|
|
||||||
key = hash(spec)
|
key = hash(spec)
|
||||||
@ -544,6 +562,35 @@ def get(self, spec, new=False):
|
|||||||
return self._instances[key]
|
return self._instances[key]
|
||||||
|
|
||||||
|
|
||||||
|
@_autospec
|
||||||
|
def dump_provenance(self, spec, path):
|
||||||
|
"""Dump provenance information for a spec to a particular path.
|
||||||
|
|
||||||
|
This dumps the package file and any associated patch files.
|
||||||
|
Raises UnknownPackageError if not found.
|
||||||
|
"""
|
||||||
|
# Some preliminary checks.
|
||||||
|
if spec.virtual:
|
||||||
|
raise UnknownPackageError(spec.name)
|
||||||
|
|
||||||
|
if spec.namespace and spec.namespace != self.namespace:
|
||||||
|
raise UnknownPackageError("Repository %s does not contain package %s."
|
||||||
|
% (self.namespace, spec.fullname))
|
||||||
|
|
||||||
|
# Install any patch files needed by packages.
|
||||||
|
mkdirp(path)
|
||||||
|
for spec, patches in spec.package.patches.items():
|
||||||
|
for patch in patches:
|
||||||
|
if patch.path:
|
||||||
|
if os.path.exists(patch.path):
|
||||||
|
install(patch.path, path)
|
||||||
|
else:
|
||||||
|
tty.warn("Patch file did not exist: %s" % patch.path)
|
||||||
|
|
||||||
|
# Install the package.py file itself.
|
||||||
|
install(self.filename_for_package_name(spec), path)
|
||||||
|
|
||||||
|
|
||||||
def purge(self):
|
def purge(self):
|
||||||
"""Clear entire package instance cache."""
|
"""Clear entire package instance cache."""
|
||||||
self._instances.clear()
|
self._instances.clear()
|
||||||
@ -697,6 +744,58 @@ def __contains__(self, pkg_name):
|
|||||||
return self.exists(pkg_name)
|
return self.exists(pkg_name)
|
||||||
|
|
||||||
|
|
||||||
|
def create_repo(root, namespace=None):
|
||||||
|
"""Create a new repository in root with the specified namespace.
|
||||||
|
|
||||||
|
If the namespace is not provided, use basename of root.
|
||||||
|
Return the canonicalized path and the namespace of the created repository.
|
||||||
|
"""
|
||||||
|
root = canonicalize_path(root)
|
||||||
|
if not namespace:
|
||||||
|
namespace = os.path.basename(root)
|
||||||
|
|
||||||
|
if not re.match(r'\w[\.\w-]*', namespace):
|
||||||
|
raise InvalidNamespaceError("'%s' is not a valid namespace." % namespace)
|
||||||
|
|
||||||
|
existed = False
|
||||||
|
if os.path.exists(root):
|
||||||
|
if os.path.isfile(root):
|
||||||
|
raise BadRepoError('File %s already exists and is not a directory' % root)
|
||||||
|
elif os.path.isdir(root):
|
||||||
|
if not os.access(root, os.R_OK | os.W_OK):
|
||||||
|
raise BadRepoError('Cannot create new repo in %s: cannot access directory.' % root)
|
||||||
|
if os.listdir(root):
|
||||||
|
raise BadRepoError('Cannot create new repo in %s: directory is not empty.' % root)
|
||||||
|
existed = True
|
||||||
|
|
||||||
|
full_path = os.path.realpath(root)
|
||||||
|
parent = os.path.dirname(full_path)
|
||||||
|
if not os.access(parent, os.R_OK | os.W_OK):
|
||||||
|
raise BadRepoError("Cannot create repository in %s: can't access parent!" % root)
|
||||||
|
|
||||||
|
try:
|
||||||
|
config_path = os.path.join(root, repo_config_name)
|
||||||
|
packages_path = os.path.join(root, packages_dir_name)
|
||||||
|
|
||||||
|
mkdirp(packages_path)
|
||||||
|
with open(config_path, 'w') as config:
|
||||||
|
config.write("repo:\n")
|
||||||
|
config.write(" namespace: '%s'\n" % namespace)
|
||||||
|
|
||||||
|
except (IOError, OSError) as e:
|
||||||
|
raise BadRepoError('Failed to create new repository in %s.' % root,
|
||||||
|
"Caused by %s: %s" % (type(e), e))
|
||||||
|
|
||||||
|
# try to clean up.
|
||||||
|
if existed:
|
||||||
|
shutil.rmtree(config_path, ignore_errors=True)
|
||||||
|
shutil.rmtree(packages_path, ignore_errors=True)
|
||||||
|
else:
|
||||||
|
shutil.rmtree(root, ignore_errors=True)
|
||||||
|
|
||||||
|
return full_path, namespace
|
||||||
|
|
||||||
|
|
||||||
class RepoError(spack.error.SpackError):
|
class RepoError(spack.error.SpackError):
|
||||||
"""Superclass for repository-related errors."""
|
"""Superclass for repository-related errors."""
|
||||||
|
|
||||||
@ -705,6 +804,10 @@ class NoRepoConfiguredError(RepoError):
|
|||||||
"""Raised when there are no repositories configured."""
|
"""Raised when there are no repositories configured."""
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidNamespaceError(RepoError):
|
||||||
|
"""Raised when an invalid namespace is encountered."""
|
||||||
|
|
||||||
|
|
||||||
class BadRepoError(RepoError):
|
class BadRepoError(RepoError):
|
||||||
"""Raised when repo layout is invalid."""
|
"""Raised when repo layout is invalid."""
|
||||||
|
|
||||||
@ -722,7 +825,7 @@ class UnknownPackageError(PackageLoadError):
|
|||||||
def __init__(self, name, repo=None):
|
def __init__(self, name, repo=None):
|
||||||
msg = None
|
msg = None
|
||||||
if repo:
|
if repo:
|
||||||
msg = "Package %s not found in repository %s." % (name, repo)
|
msg = "Package %s not found in repository %s" % (name, repo)
|
||||||
else:
|
else:
|
||||||
msg = "Package %s not found." % name
|
msg = "Package %s not found." % name
|
||||||
super(UnknownPackageError, self).__init__(msg)
|
super(UnknownPackageError, self).__init__(msg)
|
||||||
|
@ -6,7 +6,7 @@
|
|||||||
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
|
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
|
||||||
# LLNL-CODE-647188
|
# LLNL-CODE-647188
|
||||||
#
|
#
|
||||||
# For details, see https://llnl.github.io/spack
|
# For details, see https://software.llnl.gov/spack
|
||||||
# Please also see the LICENSE file for our notice and the LGPL.
|
# Please also see the LICENSE file for our notice and the LGPL.
|
||||||
#
|
#
|
||||||
# This program is free software; you can redistribute it and/or modify
|
# This program is free software; you can redistribute it and/or modify
|
||||||
|
@ -23,7 +23,7 @@
|
|||||||
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||||
##############################################################################
|
##############################################################################
|
||||||
import os
|
import os
|
||||||
import re
|
import errno
|
||||||
import shutil
|
import shutil
|
||||||
import tempfile
|
import tempfile
|
||||||
import sys
|
import sys
|
||||||
@ -32,46 +32,64 @@
|
|||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.util.filesystem import *
|
from llnl.util.filesystem import *
|
||||||
|
|
||||||
|
import spack.util.pattern as pattern
|
||||||
|
|
||||||
import spack
|
import spack
|
||||||
import spack.config
|
import spack.config
|
||||||
import spack.fetch_strategy as fs
|
import spack.fetch_strategy as fs
|
||||||
import spack.error
|
import spack.error
|
||||||
|
|
||||||
|
|
||||||
STAGE_PREFIX = 'spack-stage-'
|
STAGE_PREFIX = 'spack-stage-'
|
||||||
|
|
||||||
|
|
||||||
class Stage(object):
|
class Stage(object):
|
||||||
"""A Stage object manaages a directory where some source code is
|
"""Manages a temporary stage directory for building.
|
||||||
downloaded and built before being installed. It handles
|
|
||||||
fetching the source code, either as an archive to be expanded
|
|
||||||
or by checking it out of a repository. A stage's lifecycle
|
|
||||||
looks like this:
|
|
||||||
|
|
||||||
Stage()
|
A Stage object is a context manager that handles a directory where
|
||||||
Constructor creates the stage directory.
|
some source code is downloaded and built before being installed.
|
||||||
fetch()
|
It handles fetching the source code, either as an archive to be
|
||||||
Fetch a source archive into the stage.
|
expanded or by checking it out of a repository. A stage's
|
||||||
expand_archive()
|
lifecycle looks like this:
|
||||||
Expand the source archive.
|
|
||||||
<install>
|
|
||||||
Build and install the archive. This is handled by the Package class.
|
|
||||||
destroy()
|
|
||||||
Remove the stage once the package has been installed.
|
|
||||||
|
|
||||||
If spack.use_tmp_stage is True, spack will attempt to create stages
|
```
|
||||||
in a tmp directory. Otherwise, stages are created directly in
|
with Stage() as stage: # Context manager creates and destroys the stage directory
|
||||||
spack.stage_path.
|
stage.fetch() # Fetch a source archive into the stage.
|
||||||
|
stage.expand_archive() # Expand the source archive.
|
||||||
|
<install> # Build and install the archive. (handled by user of Stage)
|
||||||
|
```
|
||||||
|
|
||||||
There are two kinds of stages: named and unnamed. Named stages can
|
When used as a context manager, the stage is automatically
|
||||||
persist between runs of spack, e.g. if you fetched a tarball but
|
destroyed if no exception is raised by the context. If an
|
||||||
didn't finish building it, you won't have to fetch it again.
|
excpetion is raised, the stage is left in the filesystem and NOT
|
||||||
|
destroyed, for potential reuse later.
|
||||||
|
|
||||||
Unnamed stages are created using standard mkdtemp mechanisms or
|
You can also use the stage's create/destroy functions manually,
|
||||||
similar, and are intended to persist for only one run of spack.
|
like this:
|
||||||
|
|
||||||
|
```
|
||||||
|
stage = Stage()
|
||||||
|
try:
|
||||||
|
stage.create() # Explicitly create the stage directory.
|
||||||
|
stage.fetch() # Fetch a source archive into the stage.
|
||||||
|
stage.expand_archive() # Expand the source archive.
|
||||||
|
<install> # Build and install the archive. (handled by user of Stage)
|
||||||
|
finally:
|
||||||
|
stage.destroy() # Explicitly destroy the stage directory.
|
||||||
|
```
|
||||||
|
|
||||||
|
If spack.use_tmp_stage is True, spack will attempt to create
|
||||||
|
stages in a tmp directory. Otherwise, stages are created directly
|
||||||
|
in spack.stage_path.
|
||||||
|
|
||||||
|
There are two kinds of stages: named and unnamed. Named stages
|
||||||
|
can persist between runs of spack, e.g. if you fetched a tarball
|
||||||
|
but didn't finish building it, you won't have to fetch it again.
|
||||||
|
|
||||||
|
Unnamed stages are created using standard mkdtemp mechanisms or
|
||||||
|
similar, and are intended to persist for only one run of spack.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, url_or_fetch_strategy, **kwargs):
|
def __init__(self, url_or_fetch_strategy, name=None, mirror_path=None, keep=None):
|
||||||
"""Create a stage object.
|
"""Create a stage object.
|
||||||
Parameters:
|
Parameters:
|
||||||
url_or_fetch_strategy
|
url_or_fetch_strategy
|
||||||
@ -83,6 +101,18 @@ def __init__(self, url_or_fetch_strategy, **kwargs):
|
|||||||
and will persist between runs (or if you construct another
|
and will persist between runs (or if you construct another
|
||||||
stage object later). If name is not provided, then this
|
stage object later). If name is not provided, then this
|
||||||
stage will be given a unique name automatically.
|
stage will be given a unique name automatically.
|
||||||
|
|
||||||
|
mirror_path
|
||||||
|
If provided, Stage will search Spack's mirrors for
|
||||||
|
this archive at the mirror_path, before using the
|
||||||
|
default fetch strategy.
|
||||||
|
|
||||||
|
keep
|
||||||
|
By default, when used as a context manager, the Stage
|
||||||
|
is cleaned up when everything goes well, and it is
|
||||||
|
kept intact when an exception is raised. You can
|
||||||
|
override this behavior by setting keep to True
|
||||||
|
(always keep) or False (always delete).
|
||||||
"""
|
"""
|
||||||
# TODO: fetch/stage coupling needs to be reworked -- the logic
|
# TODO: fetch/stage coupling needs to be reworked -- the logic
|
||||||
# TODO: here is convoluted and not modular enough.
|
# TODO: here is convoluted and not modular enough.
|
||||||
@ -94,25 +124,56 @@ def __init__(self, url_or_fetch_strategy, **kwargs):
|
|||||||
raise ValueError("Can't construct Stage without url or fetch strategy")
|
raise ValueError("Can't construct Stage without url or fetch strategy")
|
||||||
self.fetcher.set_stage(self)
|
self.fetcher.set_stage(self)
|
||||||
self.default_fetcher = self.fetcher # self.fetcher can change with mirrors.
|
self.default_fetcher = self.fetcher # self.fetcher can change with mirrors.
|
||||||
self.skip_checksum_for_mirror = True # used for mirrored archives of repositories.
|
self.skip_checksum_for_mirror = True # used for mirrored archives of repositories.
|
||||||
|
|
||||||
self.name = kwargs.get('name')
|
|
||||||
self.mirror_path = kwargs.get('mirror_path')
|
|
||||||
|
|
||||||
|
# TODO : this uses a protected member of tempfile, but seemed the only way to get a temporary name
|
||||||
|
# TODO : besides, the temporary link name won't be the same as the temporary stage area in tmp_root
|
||||||
|
self.name = name
|
||||||
|
if name is None:
|
||||||
|
self.name = STAGE_PREFIX + next(tempfile._get_candidate_names())
|
||||||
|
self.mirror_path = mirror_path
|
||||||
self.tmp_root = find_tmp_root()
|
self.tmp_root = find_tmp_root()
|
||||||
|
|
||||||
self.path = None
|
# Try to construct here a temporary name for the stage directory
|
||||||
self._setup()
|
# If this is a named stage, then construct a named path.
|
||||||
|
self.path = join_path(spack.stage_path, self.name)
|
||||||
|
|
||||||
|
# Flag to decide whether to delete the stage folder on exit or not
|
||||||
|
self.keep = keep
|
||||||
|
|
||||||
|
|
||||||
def _cleanup_dead_links(self):
|
def __enter__(self):
|
||||||
"""Remove any dead links in the stage directory."""
|
"""
|
||||||
for file in os.listdir(spack.stage_path):
|
Entering a stage context will create the stage directory
|
||||||
path = join_path(spack.stage_path, file)
|
|
||||||
if os.path.islink(path):
|
Returns:
|
||||||
real_path = os.path.realpath(path)
|
self
|
||||||
if not os.path.exists(path):
|
"""
|
||||||
os.unlink(path)
|
self.create()
|
||||||
|
return self
|
||||||
|
|
||||||
|
|
||||||
|
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||||
|
"""
|
||||||
|
Exiting from a stage context will delete the stage directory unless:
|
||||||
|
- it was explicitly requested not to do so
|
||||||
|
- an exception has been raised
|
||||||
|
|
||||||
|
Args:
|
||||||
|
exc_type: exception type
|
||||||
|
exc_val: exception value
|
||||||
|
exc_tb: exception traceback
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Boolean
|
||||||
|
"""
|
||||||
|
if self.keep is None:
|
||||||
|
# Default: delete when there are no exceptions.
|
||||||
|
if exc_type is None: self.destroy()
|
||||||
|
|
||||||
|
elif not self.keep:
|
||||||
|
# Overridden. Either always keep or always delete.
|
||||||
|
self.destroy()
|
||||||
|
|
||||||
|
|
||||||
def _need_to_create_path(self):
|
def _need_to_create_path(self):
|
||||||
@ -132,7 +193,7 @@ def _need_to_create_path(self):
|
|||||||
# Path looks ok, but need to check the target of the link.
|
# Path looks ok, but need to check the target of the link.
|
||||||
if os.path.islink(self.path):
|
if os.path.islink(self.path):
|
||||||
real_path = os.path.realpath(self.path)
|
real_path = os.path.realpath(self.path)
|
||||||
real_tmp = os.path.realpath(self.tmp_root)
|
real_tmp = os.path.realpath(self.tmp_root)
|
||||||
|
|
||||||
if spack.use_tmp_stage:
|
if spack.use_tmp_stage:
|
||||||
# If we're using a tmp dir, it's a link, and it points at the right spot,
|
# If we're using a tmp dir, it's a link, and it points at the right spot,
|
||||||
@ -151,56 +212,6 @@ def _need_to_create_path(self):
|
|||||||
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
def _setup(self):
|
|
||||||
"""Creates the stage directory.
|
|
||||||
If spack.use_tmp_stage is False, the stage directory is created
|
|
||||||
directly under spack.stage_path.
|
|
||||||
|
|
||||||
If spack.use_tmp_stage is True, this will attempt to create a
|
|
||||||
stage in a temporary directory and link it into spack.stage_path.
|
|
||||||
Spack will use the first writable location in spack.tmp_dirs to
|
|
||||||
create a stage. If there is no valid location in tmp_dirs, fall
|
|
||||||
back to making the stage inside spack.stage_path.
|
|
||||||
"""
|
|
||||||
# Create the top-level stage directory
|
|
||||||
mkdirp(spack.stage_path)
|
|
||||||
self._cleanup_dead_links()
|
|
||||||
|
|
||||||
# If this is a named stage, then construct a named path.
|
|
||||||
if self.name is not None:
|
|
||||||
self.path = join_path(spack.stage_path, self.name)
|
|
||||||
|
|
||||||
# If this is a temporary stage, them make the temp directory
|
|
||||||
tmp_dir = None
|
|
||||||
if self.tmp_root:
|
|
||||||
if self.name is None:
|
|
||||||
# Unnamed tmp root. Link the path in
|
|
||||||
tmp_dir = tempfile.mkdtemp('', STAGE_PREFIX, self.tmp_root)
|
|
||||||
self.name = os.path.basename(tmp_dir)
|
|
||||||
self.path = join_path(spack.stage_path, self.name)
|
|
||||||
if self._need_to_create_path():
|
|
||||||
os.symlink(tmp_dir, self.path)
|
|
||||||
|
|
||||||
else:
|
|
||||||
if self._need_to_create_path():
|
|
||||||
tmp_dir = tempfile.mkdtemp('', STAGE_PREFIX, self.tmp_root)
|
|
||||||
os.symlink(tmp_dir, self.path)
|
|
||||||
|
|
||||||
# if we're not using a tmp dir, create the stage directly in the
|
|
||||||
# stage dir, rather than linking to it.
|
|
||||||
else:
|
|
||||||
if self.name is None:
|
|
||||||
self.path = tempfile.mkdtemp('', STAGE_PREFIX, spack.stage_path)
|
|
||||||
self.name = os.path.basename(self.path)
|
|
||||||
else:
|
|
||||||
if self._need_to_create_path():
|
|
||||||
mkdirp(self.path)
|
|
||||||
|
|
||||||
# Make sure we can actually do something with the stage we made.
|
|
||||||
ensure_access(self.path)
|
|
||||||
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def archive_file(self):
|
def archive_file(self):
|
||||||
"""Path to the source archive within this stage directory."""
|
"""Path to the source archive within this stage directory."""
|
||||||
@ -217,35 +228,43 @@ def archive_file(self):
|
|||||||
else:
|
else:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def source_path(self):
|
def source_path(self):
|
||||||
"""Returns the path to the expanded/checked out source code
|
"""Returns the path to the expanded/checked out source code.
|
||||||
within this fetch strategy's path.
|
|
||||||
|
|
||||||
This assumes nothing else is going ot be put in the
|
To find the source code, this method searches for the first
|
||||||
FetchStrategy's path. It searches for the first
|
subdirectory of the stage that it can find, and returns it.
|
||||||
subdirectory of the path it can find, then returns that.
|
This assumes nothing besides the archive file will be in the
|
||||||
|
stage path, but it has the advantage that we don't need to
|
||||||
|
know the name of the archive or its contents.
|
||||||
|
|
||||||
|
If the fetch strategy is not supposed to expand the downloaded
|
||||||
|
file, it will just return the stage path. If the archive needs
|
||||||
|
to be expanded, it will return None when no archive is found.
|
||||||
"""
|
"""
|
||||||
|
if isinstance(self.fetcher, fs.URLFetchStrategy):
|
||||||
|
if not self.fetcher.expand_archive:
|
||||||
|
return self.path
|
||||||
|
|
||||||
for p in [os.path.join(self.path, f) for f in os.listdir(self.path)]:
|
for p in [os.path.join(self.path, f) for f in os.listdir(self.path)]:
|
||||||
if os.path.isdir(p):
|
if os.path.isdir(p):
|
||||||
return p
|
return p
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
def chdir(self):
|
def chdir(self):
|
||||||
"""Changes directory to the stage path. Or dies if it is not set up."""
|
"""Changes directory to the stage path. Or dies if it is not set up."""
|
||||||
if os.path.isdir(self.path):
|
if os.path.isdir(self.path):
|
||||||
os.chdir(self.path)
|
os.chdir(self.path)
|
||||||
else:
|
else:
|
||||||
tty.die("Setup failed: no such directory: " + self.path)
|
raise ChdirError("Setup failed: no such directory: " + self.path)
|
||||||
|
|
||||||
|
def fetch(self, mirror_only=False):
|
||||||
def fetch(self):
|
|
||||||
"""Downloads an archive or checks out code from a repository."""
|
"""Downloads an archive or checks out code from a repository."""
|
||||||
self.chdir()
|
self.chdir()
|
||||||
|
|
||||||
fetchers = [self.default_fetcher]
|
fetchers = []
|
||||||
|
if not mirror_only:
|
||||||
|
fetchers.append(self.default_fetcher)
|
||||||
|
|
||||||
# TODO: move mirror logic out of here and clean it up!
|
# TODO: move mirror logic out of here and clean it up!
|
||||||
# TODO: Or @alalazo may have some ideas about how to use a
|
# TODO: Or @alalazo may have some ideas about how to use a
|
||||||
@ -253,7 +272,13 @@ def fetch(self):
|
|||||||
self.skip_checksum_for_mirror = True
|
self.skip_checksum_for_mirror = True
|
||||||
if self.mirror_path:
|
if self.mirror_path:
|
||||||
mirrors = spack.config.get_config('mirrors')
|
mirrors = spack.config.get_config('mirrors')
|
||||||
urls = [urljoin(u, self.mirror_path) for name, u in mirrors.items()]
|
|
||||||
|
# Join URLs of mirror roots with mirror paths. Because
|
||||||
|
# urljoin() will strip everything past the final '/' in
|
||||||
|
# the root, so we add a '/' if it is not present.
|
||||||
|
mirror_roots = [root if root.endswith('/') else root + '/'
|
||||||
|
for root in mirrors.values()]
|
||||||
|
urls = [urljoin(root, self.mirror_path) for root in mirror_roots]
|
||||||
|
|
||||||
# If this archive is normally fetched from a tarball URL,
|
# If this archive is normally fetched from a tarball URL,
|
||||||
# then use the same digest. `spack mirror` ensures that
|
# then use the same digest. `spack mirror` ensures that
|
||||||
@ -262,10 +287,11 @@ def fetch(self):
|
|||||||
if isinstance(self.default_fetcher, fs.URLFetchStrategy):
|
if isinstance(self.default_fetcher, fs.URLFetchStrategy):
|
||||||
digest = self.default_fetcher.digest
|
digest = self.default_fetcher.digest
|
||||||
|
|
||||||
# Have to skip the checkesum for things archived from
|
# Have to skip the checksum for things archived from
|
||||||
# repositories. How can this be made safer?
|
# repositories. How can this be made safer?
|
||||||
self.skip_checksum_for_mirror = not bool(digest)
|
self.skip_checksum_for_mirror = not bool(digest)
|
||||||
|
|
||||||
|
# Add URL strategies for all the mirrors with the digest
|
||||||
for url in urls:
|
for url in urls:
|
||||||
fetchers.insert(0, fs.URLFetchStrategy(url, digest))
|
fetchers.insert(0, fs.URLFetchStrategy(url, digest))
|
||||||
|
|
||||||
@ -275,7 +301,7 @@ def fetch(self):
|
|||||||
self.fetcher = fetcher
|
self.fetcher = fetcher
|
||||||
self.fetcher.fetch()
|
self.fetcher.fetch()
|
||||||
break
|
break
|
||||||
except spack.error.SpackError, e:
|
except spack.error.SpackError as e:
|
||||||
tty.msg("Fetching from %s failed." % fetcher)
|
tty.msg("Fetching from %s failed." % fetcher)
|
||||||
tty.debug(e)
|
tty.debug(e)
|
||||||
continue
|
continue
|
||||||
@ -284,7 +310,6 @@ def fetch(self):
|
|||||||
self.fetcher = self.default_fetcher
|
self.fetcher = self.default_fetcher
|
||||||
raise fs.FetchError(errMessage, None)
|
raise fs.FetchError(errMessage, None)
|
||||||
|
|
||||||
|
|
||||||
def check(self):
|
def check(self):
|
||||||
"""Check the downloaded archive against a checksum digest.
|
"""Check the downloaded archive against a checksum digest.
|
||||||
No-op if this stage checks code out of a repository."""
|
No-op if this stage checks code out of a repository."""
|
||||||
@ -298,14 +323,17 @@ def check(self):
|
|||||||
else:
|
else:
|
||||||
self.fetcher.check()
|
self.fetcher.check()
|
||||||
|
|
||||||
|
|
||||||
def expand_archive(self):
|
def expand_archive(self):
|
||||||
"""Changes to the stage directory and attempt to expand the downloaded
|
"""Changes to the stage directory and attempt to expand the downloaded
|
||||||
archive. Fail if the stage is not set up or if the archive is not yet
|
archive. Fail if the stage is not set up or if the archive is not yet
|
||||||
downloaded.
|
downloaded.
|
||||||
"""
|
"""
|
||||||
self.fetcher.expand()
|
archive_dir = self.source_path
|
||||||
|
if not archive_dir:
|
||||||
|
self.fetcher.expand()
|
||||||
|
tty.msg("Created stage in %s" % self.path)
|
||||||
|
else:
|
||||||
|
tty.msg("Already staged %s in %s" % (self.name, self.path))
|
||||||
|
|
||||||
def chdir_to_source(self):
|
def chdir_to_source(self):
|
||||||
"""Changes directory to the expanded archive directory.
|
"""Changes directory to the expanded archive directory.
|
||||||
@ -319,16 +347,41 @@ def chdir_to_source(self):
|
|||||||
if not os.listdir(path):
|
if not os.listdir(path):
|
||||||
tty.die("Archive was empty for %s" % self.name)
|
tty.die("Archive was empty for %s" % self.name)
|
||||||
|
|
||||||
|
|
||||||
def restage(self):
|
def restage(self):
|
||||||
"""Removes the expanded archive path if it exists, then re-expands
|
"""Removes the expanded archive path if it exists, then re-expands
|
||||||
the archive.
|
the archive.
|
||||||
"""
|
"""
|
||||||
self.fetcher.reset()
|
self.fetcher.reset()
|
||||||
|
|
||||||
|
def create(self):
|
||||||
|
"""
|
||||||
|
Creates the stage directory
|
||||||
|
|
||||||
|
If self.tmp_root evaluates to False, the stage directory is
|
||||||
|
created directly under spack.stage_path, otherwise this will
|
||||||
|
attempt to create a stage in a temporary directory and link it
|
||||||
|
into spack.stage_path.
|
||||||
|
|
||||||
|
Spack will use the first writable location in spack.tmp_dirs
|
||||||
|
to create a stage. If there is no valid location in tmp_dirs,
|
||||||
|
fall back to making the stage inside spack.stage_path.
|
||||||
|
"""
|
||||||
|
# Create the top-level stage directory
|
||||||
|
mkdirp(spack.stage_path)
|
||||||
|
remove_dead_links(spack.stage_path)
|
||||||
|
# If a tmp_root exists then create a directory there and then link it in the stage area,
|
||||||
|
# otherwise create the stage directory in self.path
|
||||||
|
if self._need_to_create_path():
|
||||||
|
if self.tmp_root:
|
||||||
|
tmp_dir = tempfile.mkdtemp('', STAGE_PREFIX, self.tmp_root)
|
||||||
|
os.symlink(tmp_dir, self.path)
|
||||||
|
else:
|
||||||
|
mkdirp(self.path)
|
||||||
|
# Make sure we can actually do something with the stage we made.
|
||||||
|
ensure_access(self.path)
|
||||||
|
|
||||||
def destroy(self):
|
def destroy(self):
|
||||||
"""Remove this stage directory."""
|
"""Removes this stage directory."""
|
||||||
remove_linked_tree(self.path)
|
remove_linked_tree(self.path)
|
||||||
|
|
||||||
# Make sure we don't end up in a removed directory
|
# Make sure we don't end up in a removed directory
|
||||||
@ -338,8 +391,82 @@ def destroy(self):
|
|||||||
os.chdir(os.path.dirname(self.path))
|
os.chdir(os.path.dirname(self.path))
|
||||||
|
|
||||||
|
|
||||||
|
class ResourceStage(Stage):
|
||||||
|
def __init__(self, url_or_fetch_strategy, root, resource, **kwargs):
|
||||||
|
super(ResourceStage, self).__init__(url_or_fetch_strategy, **kwargs)
|
||||||
|
self.root_stage = root
|
||||||
|
self.resource = resource
|
||||||
|
|
||||||
|
def expand_archive(self):
|
||||||
|
super(ResourceStage, self).expand_archive()
|
||||||
|
root_stage = self.root_stage
|
||||||
|
resource = self.resource
|
||||||
|
placement = os.path.basename(self.source_path) if resource.placement is None else resource.placement
|
||||||
|
if not isinstance(placement, dict):
|
||||||
|
placement = {'': placement}
|
||||||
|
# Make the paths in the dictionary absolute and link
|
||||||
|
for key, value in placement.iteritems():
|
||||||
|
target_path = join_path(root_stage.source_path, resource.destination)
|
||||||
|
destination_path = join_path(target_path, value)
|
||||||
|
source_path = join_path(self.source_path, key)
|
||||||
|
|
||||||
|
try:
|
||||||
|
os.makedirs(target_path)
|
||||||
|
except OSError as err:
|
||||||
|
if err.errno == errno.EEXIST and os.path.isdir(target_path):
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
raise
|
||||||
|
|
||||||
|
if not os.path.exists(destination_path):
|
||||||
|
# Create a symlink
|
||||||
|
tty.info('Moving resource stage\n\tsource : {stage}\n\tdestination : {destination}'.format(
|
||||||
|
stage=source_path, destination=destination_path
|
||||||
|
))
|
||||||
|
shutil.move(source_path, destination_path)
|
||||||
|
|
||||||
|
|
||||||
|
@pattern.composite(method_list=['fetch', 'create', 'check', 'expand_archive', 'restage', 'destroy'])
|
||||||
|
class StageComposite:
|
||||||
|
"""
|
||||||
|
Composite for Stage type objects. The first item in this composite is considered to be the root package, and
|
||||||
|
operations that return a value are forwarded to it.
|
||||||
|
"""
|
||||||
|
#
|
||||||
|
# __enter__ and __exit__ delegate to all stages in the composite.
|
||||||
|
#
|
||||||
|
def __enter__(self):
|
||||||
|
for item in self:
|
||||||
|
item.__enter__()
|
||||||
|
return self
|
||||||
|
|
||||||
|
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||||
|
for item in reversed(self):
|
||||||
|
item.keep = getattr(self, 'keep', None)
|
||||||
|
item.__exit__(exc_type, exc_val, exc_tb)
|
||||||
|
|
||||||
|
#
|
||||||
|
# Below functions act only on the *first* stage in the composite.
|
||||||
|
#
|
||||||
|
@property
|
||||||
|
def source_path(self):
|
||||||
|
return self[0].source_path
|
||||||
|
|
||||||
|
@property
|
||||||
|
def path(self):
|
||||||
|
return self[0].path
|
||||||
|
|
||||||
|
def chdir_to_source(self):
|
||||||
|
return self[0].chdir_to_source()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def archive_file(self):
|
||||||
|
return self[0].archive_file
|
||||||
|
|
||||||
|
|
||||||
class DIYStage(object):
|
class DIYStage(object):
|
||||||
"""Simple class that allows any directory to be a spack stage."""
|
"""Simple class that allows any directory to be a spack stage."""
|
||||||
|
|
||||||
def __init__(self, path):
|
def __init__(self, path):
|
||||||
self.archive_file = None
|
self.archive_file = None
|
||||||
self.path = path
|
self.path = path
|
||||||
@ -349,12 +476,16 @@ def chdir(self):
|
|||||||
if os.path.isdir(self.path):
|
if os.path.isdir(self.path):
|
||||||
os.chdir(self.path)
|
os.chdir(self.path)
|
||||||
else:
|
else:
|
||||||
tty.die("Setup failed: no such directory: " + self.path)
|
raise ChdirError("Setup failed: no such directory: " + self.path)
|
||||||
|
|
||||||
|
# DIY stages do nothing as context managers.
|
||||||
|
def __enter__(self): pass
|
||||||
|
def __exit__(self, exc_type, exc_val, exc_tb): pass
|
||||||
|
|
||||||
def chdir_to_source(self):
|
def chdir_to_source(self):
|
||||||
self.chdir()
|
self.chdir()
|
||||||
|
|
||||||
def fetch(self):
|
def fetch(self, mirror_only):
|
||||||
tty.msg("No need to fetch for DIY.")
|
tty.msg("No need to fetch for DIY.")
|
||||||
|
|
||||||
def check(self):
|
def check(self):
|
||||||
@ -383,19 +514,6 @@ def ensure_access(file=spack.stage_path):
|
|||||||
tty.die("Insufficient permissions for %s" % file)
|
tty.die("Insufficient permissions for %s" % file)
|
||||||
|
|
||||||
|
|
||||||
def remove_linked_tree(path):
|
|
||||||
"""Removes a directory and its contents. If the directory is a symlink,
|
|
||||||
follows the link and reamoves the real directory before removing the
|
|
||||||
link.
|
|
||||||
"""
|
|
||||||
if os.path.exists(path):
|
|
||||||
if os.path.islink(path):
|
|
||||||
shutil.rmtree(os.path.realpath(path), True)
|
|
||||||
os.unlink(path)
|
|
||||||
else:
|
|
||||||
shutil.rmtree(path, True)
|
|
||||||
|
|
||||||
|
|
||||||
def purge():
|
def purge():
|
||||||
"""Remove all build directories in the top-level stage path."""
|
"""Remove all build directories in the top-level stage path."""
|
||||||
if os.path.isdir(spack.stage_path):
|
if os.path.isdir(spack.stage_path):
|
||||||
@ -424,19 +542,15 @@ def find_tmp_root():
|
|||||||
|
|
||||||
|
|
||||||
class StageError(spack.error.SpackError):
|
class StageError(spack.error.SpackError):
|
||||||
def __init__(self, message, long_message=None):
|
""""Superclass for all errors encountered during staging."""
|
||||||
super(self, StageError).__init__(message, long_message)
|
|
||||||
|
|
||||||
|
|
||||||
class RestageError(StageError):
|
class RestageError(StageError):
|
||||||
def __init__(self, message, long_msg=None):
|
""""Error encountered during restaging."""
|
||||||
super(RestageError, self).__init__(message, long_msg)
|
|
||||||
|
|
||||||
|
|
||||||
class ChdirError(StageError):
|
class ChdirError(StageError):
|
||||||
def __init__(self, message, long_msg=None):
|
"""Raised when Spack can't change directories."""
|
||||||
super(ChdirError, self).__init__(message, long_msg)
|
|
||||||
|
|
||||||
|
|
||||||
# Keep this in namespace for convenience
|
# Keep this in namespace for convenience
|
||||||
FailedDownloadError = fs.FailedDownloadError
|
FailedDownloadError = fs.FailedDownloadError
|
||||||
|
@ -48,6 +48,7 @@
|
|||||||
'package_sanity',
|
'package_sanity',
|
||||||
'config',
|
'config',
|
||||||
'directory_layout',
|
'directory_layout',
|
||||||
|
'pattern',
|
||||||
'python_version',
|
'python_version',
|
||||||
'git_fetch',
|
'git_fetch',
|
||||||
'svn_fetch',
|
'svn_fetch',
|
||||||
|
@ -39,11 +39,11 @@
|
|||||||
'arg1',
|
'arg1',
|
||||||
'-Wl,--start-group',
|
'-Wl,--start-group',
|
||||||
'arg2',
|
'arg2',
|
||||||
'-Wl,-rpath=/first/rpath', 'arg3', '-Wl,-rpath', '-Wl,/second/rpath',
|
'-Wl,-rpath,/first/rpath', 'arg3', '-Wl,-rpath', '-Wl,/second/rpath',
|
||||||
'-llib1', '-llib2',
|
'-llib1', '-llib2',
|
||||||
'arg4',
|
'arg4',
|
||||||
'-Wl,--end-group',
|
'-Wl,--end-group',
|
||||||
'-Xlinker,-rpath', '-Xlinker,/third/rpath', '-Xlinker,-rpath=/fourth/rpath',
|
'-Xlinker', '-rpath', '-Xlinker', '/third/rpath', '-Xlinker', '-rpath', '-Xlinker', '/fourth/rpath',
|
||||||
'-llib3', '-llib4',
|
'-llib3', '-llib4',
|
||||||
'arg5', 'arg6']
|
'arg5', 'arg6']
|
||||||
|
|
||||||
@ -95,13 +95,13 @@ def test_cpp_mode(self):
|
|||||||
def test_ccld_mode(self):
|
def test_ccld_mode(self):
|
||||||
self.check_cc('dump-mode', [], "ccld")
|
self.check_cc('dump-mode', [], "ccld")
|
||||||
self.check_cc('dump-mode', ['foo.c', '-o', 'foo'], "ccld")
|
self.check_cc('dump-mode', ['foo.c', '-o', 'foo'], "ccld")
|
||||||
self.check_cc('dump-mode', ['foo.c', '-o', 'foo', '-Wl,-rpath=foo'], "ccld")
|
self.check_cc('dump-mode', ['foo.c', '-o', 'foo', '-Wl,-rpath,foo'], "ccld")
|
||||||
self.check_cc('dump-mode', ['foo.o', 'bar.o', 'baz.o', '-o', 'foo', '-Wl,-rpath=foo'], "ccld")
|
self.check_cc('dump-mode', ['foo.o', 'bar.o', 'baz.o', '-o', 'foo', '-Wl,-rpath,foo'], "ccld")
|
||||||
|
|
||||||
|
|
||||||
def test_ld_mode(self):
|
def test_ld_mode(self):
|
||||||
self.check_ld('dump-mode', [], "ld")
|
self.check_ld('dump-mode', [], "ld")
|
||||||
self.check_ld('dump-mode', ['foo.o', 'bar.o', 'baz.o', '-o', 'foo', '-Wl,-rpath=foo'], "ld")
|
self.check_ld('dump-mode', ['foo.o', 'bar.o', 'baz.o', '-o', 'foo', '-Wl,-rpath,foo'], "ld")
|
||||||
|
|
||||||
|
|
||||||
def test_includes(self):
|
def test_includes(self):
|
||||||
|
@ -22,8 +22,6 @@
|
|||||||
# along with this program; if not, write to the Free Software Foundation,
|
# along with this program; if not, write to the Free Software Foundation,
|
||||||
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||||
##############################################################################
|
##############################################################################
|
||||||
import unittest
|
|
||||||
|
|
||||||
import spack
|
import spack
|
||||||
from spack.spec import Spec, CompilerSpec
|
from spack.spec import Spec, CompilerSpec
|
||||||
from spack.test.mock_packages_test import *
|
from spack.test.mock_packages_test import *
|
||||||
|
@ -22,13 +22,13 @@
|
|||||||
# along with this program; if not, write to the Free Software Foundation,
|
# along with this program; if not, write to the Free Software Foundation,
|
||||||
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||||
##############################################################################
|
##############################################################################
|
||||||
import unittest
|
|
||||||
import shutil
|
|
||||||
import os
|
import os
|
||||||
|
import shutil
|
||||||
from tempfile import mkdtemp
|
from tempfile import mkdtemp
|
||||||
from ordereddict_backport import OrderedDict
|
|
||||||
import spack
|
import spack
|
||||||
import spack.config
|
import spack.config
|
||||||
|
from ordereddict_backport import OrderedDict
|
||||||
from spack.test.mock_packages_test import *
|
from spack.test.mock_packages_test import *
|
||||||
|
|
||||||
# Some sample compiler config data
|
# Some sample compiler config data
|
||||||
|
@ -23,20 +23,15 @@
|
|||||||
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||||
##############################################################################
|
##############################################################################
|
||||||
import os
|
import os
|
||||||
import unittest
|
|
||||||
import shutil
|
import shutil
|
||||||
import tempfile
|
import tempfile
|
||||||
|
import unittest
|
||||||
|
|
||||||
from llnl.util.filesystem import *
|
from llnl.util.filesystem import *
|
||||||
|
|
||||||
from spack.cmd.create import ConfigureGuesser
|
from spack.cmd.create import ConfigureGuesser
|
||||||
from spack.stage import Stage
|
from spack.stage import Stage
|
||||||
|
|
||||||
from spack.fetch_strategy import URLFetchStrategy
|
|
||||||
from spack.directory_layout import YamlDirectoryLayout
|
|
||||||
from spack.util.executable import which
|
|
||||||
from spack.test.mock_packages_test import *
|
from spack.test.mock_packages_test import *
|
||||||
from spack.test.mock_repo import MockArchive
|
from spack.util.executable import which
|
||||||
|
|
||||||
|
|
||||||
class InstallTest(unittest.TestCase):
|
class InstallTest(unittest.TestCase):
|
||||||
@ -52,8 +47,6 @@ def setUp(self):
|
|||||||
|
|
||||||
def tearDown(self):
|
def tearDown(self):
|
||||||
shutil.rmtree(self.tmpdir, ignore_errors=True)
|
shutil.rmtree(self.tmpdir, ignore_errors=True)
|
||||||
if self.stage:
|
|
||||||
self.stage.destroy()
|
|
||||||
os.chdir(self.orig_dir)
|
os.chdir(self.orig_dir)
|
||||||
|
|
||||||
|
|
||||||
@ -64,12 +57,12 @@ def check_archive(self, filename, system):
|
|||||||
|
|
||||||
url = 'file://' + join_path(os.getcwd(), 'archive.tar.gz')
|
url = 'file://' + join_path(os.getcwd(), 'archive.tar.gz')
|
||||||
print url
|
print url
|
||||||
self.stage = Stage(url)
|
with Stage(url) as stage:
|
||||||
self.stage.fetch()
|
stage.fetch()
|
||||||
|
|
||||||
guesser = ConfigureGuesser()
|
guesser = ConfigureGuesser()
|
||||||
guesser(self.stage)
|
guesser(stage)
|
||||||
self.assertEqual(system, guesser.build_system)
|
self.assertEqual(system, guesser.build_system)
|
||||||
|
|
||||||
|
|
||||||
def test_python(self):
|
def test_python(self):
|
||||||
|
@ -26,19 +26,18 @@
|
|||||||
These tests check the database is functioning properly,
|
These tests check the database is functioning properly,
|
||||||
both in memory and in its file
|
both in memory and in its file
|
||||||
"""
|
"""
|
||||||
import tempfile
|
|
||||||
import shutil
|
|
||||||
import multiprocessing
|
import multiprocessing
|
||||||
|
import shutil
|
||||||
from llnl.util.lock import *
|
import tempfile
|
||||||
from llnl.util.filesystem import join_path
|
|
||||||
|
|
||||||
import spack
|
import spack
|
||||||
|
from llnl.util.filesystem import join_path
|
||||||
|
from llnl.util.lock import *
|
||||||
|
from llnl.util.tty.colify import colify
|
||||||
from spack.database import Database
|
from spack.database import Database
|
||||||
from spack.directory_layout import YamlDirectoryLayout
|
from spack.directory_layout import YamlDirectoryLayout
|
||||||
from spack.test.mock_packages_test import *
|
from spack.test.mock_packages_test import *
|
||||||
|
|
||||||
from llnl.util.tty.colify import colify
|
|
||||||
|
|
||||||
def _print_ref_counts():
|
def _print_ref_counts():
|
||||||
"""Print out all ref counts for the graph used here, for debugging"""
|
"""Print out all ref counts for the graph used here, for debugging"""
|
||||||
|
@ -25,20 +25,17 @@
|
|||||||
"""\
|
"""\
|
||||||
This test verifies that the Spack directory layout works properly.
|
This test verifies that the Spack directory layout works properly.
|
||||||
"""
|
"""
|
||||||
import unittest
|
|
||||||
import tempfile
|
|
||||||
import shutil
|
|
||||||
import os
|
import os
|
||||||
|
import shutil
|
||||||
from llnl.util.filesystem import *
|
import tempfile
|
||||||
|
|
||||||
import spack
|
import spack
|
||||||
from spack.spec import Spec
|
from llnl.util.filesystem import *
|
||||||
from spack.repository import RepoPath
|
|
||||||
from spack.directory_layout import YamlDirectoryLayout
|
from spack.directory_layout import YamlDirectoryLayout
|
||||||
|
from spack.repository import RepoPath
|
||||||
|
from spack.spec import Spec
|
||||||
from spack.test.mock_packages_test import *
|
from spack.test.mock_packages_test import *
|
||||||
|
|
||||||
|
|
||||||
# number of packages to test (to reduce test time)
|
# number of packages to test (to reduce test time)
|
||||||
max_packages = 10
|
max_packages = 10
|
||||||
|
|
||||||
|
@ -23,19 +23,12 @@
|
|||||||
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||||
##############################################################################
|
##############################################################################
|
||||||
import os
|
import os
|
||||||
import unittest
|
|
||||||
import shutil
|
|
||||||
import tempfile
|
|
||||||
|
|
||||||
from llnl.util.filesystem import *
|
|
||||||
|
|
||||||
import spack
|
import spack
|
||||||
from spack.version import ver
|
from llnl.util.filesystem import *
|
||||||
from spack.stage import Stage
|
|
||||||
from spack.util.executable import which
|
|
||||||
|
|
||||||
from spack.test.mock_packages_test import *
|
from spack.test.mock_packages_test import *
|
||||||
from spack.test.mock_repo import MockGitRepo
|
from spack.test.mock_repo import MockGitRepo
|
||||||
|
from spack.version import ver
|
||||||
|
|
||||||
|
|
||||||
class GitFetchTest(MockPackagesTest):
|
class GitFetchTest(MockPackagesTest):
|
||||||
@ -52,22 +45,15 @@ def setUp(self):
|
|||||||
spec.concretize()
|
spec.concretize()
|
||||||
self.pkg = spack.repo.get(spec, new=True)
|
self.pkg = spack.repo.get(spec, new=True)
|
||||||
|
|
||||||
|
|
||||||
def tearDown(self):
|
def tearDown(self):
|
||||||
"""Destroy the stage space used by this test."""
|
"""Destroy the stage space used by this test."""
|
||||||
super(GitFetchTest, self).tearDown()
|
super(GitFetchTest, self).tearDown()
|
||||||
|
self.repo.destroy()
|
||||||
if self.repo.stage is not None:
|
|
||||||
self.repo.stage.destroy()
|
|
||||||
|
|
||||||
self.pkg.do_clean()
|
|
||||||
|
|
||||||
|
|
||||||
def assert_rev(self, rev):
|
def assert_rev(self, rev):
|
||||||
"""Check that the current git revision is equal to the supplied rev."""
|
"""Check that the current git revision is equal to the supplied rev."""
|
||||||
self.assertEqual(self.repo.rev_hash('HEAD'), self.repo.rev_hash(rev))
|
self.assertEqual(self.repo.rev_hash('HEAD'), self.repo.rev_hash(rev))
|
||||||
|
|
||||||
|
|
||||||
def try_fetch(self, rev, test_file, args):
|
def try_fetch(self, rev, test_file, args):
|
||||||
"""Tries to:
|
"""Tries to:
|
||||||
1. Fetch the repo using a fetch strategy constructed with
|
1. Fetch the repo using a fetch strategy constructed with
|
||||||
@ -79,26 +65,27 @@ def try_fetch(self, rev, test_file, args):
|
|||||||
"""
|
"""
|
||||||
self.pkg.versions[ver('git')] = args
|
self.pkg.versions[ver('git')] = args
|
||||||
|
|
||||||
self.pkg.do_stage()
|
with self.pkg.stage:
|
||||||
self.assert_rev(rev)
|
self.pkg.do_stage()
|
||||||
|
self.assert_rev(rev)
|
||||||
|
|
||||||
file_path = join_path(self.pkg.stage.source_path, test_file)
|
file_path = join_path(self.pkg.stage.source_path, test_file)
|
||||||
self.assertTrue(os.path.isdir(self.pkg.stage.source_path))
|
self.assertTrue(os.path.isdir(self.pkg.stage.source_path))
|
||||||
self.assertTrue(os.path.isfile(file_path))
|
self.assertTrue(os.path.isfile(file_path))
|
||||||
|
|
||||||
os.unlink(file_path)
|
os.unlink(file_path)
|
||||||
self.assertFalse(os.path.isfile(file_path))
|
self.assertFalse(os.path.isfile(file_path))
|
||||||
|
|
||||||
untracked_file = 'foobarbaz'
|
untracked_file = 'foobarbaz'
|
||||||
touch(untracked_file)
|
touch(untracked_file)
|
||||||
self.assertTrue(os.path.isfile(untracked_file))
|
self.assertTrue(os.path.isfile(untracked_file))
|
||||||
self.pkg.do_restage()
|
self.pkg.do_restage()
|
||||||
self.assertFalse(os.path.isfile(untracked_file))
|
self.assertFalse(os.path.isfile(untracked_file))
|
||||||
|
|
||||||
self.assertTrue(os.path.isdir(self.pkg.stage.source_path))
|
self.assertTrue(os.path.isdir(self.pkg.stage.source_path))
|
||||||
self.assertTrue(os.path.isfile(file_path))
|
self.assertTrue(os.path.isfile(file_path))
|
||||||
|
|
||||||
self.assert_rev(rev)
|
self.assert_rev(rev)
|
||||||
|
|
||||||
|
|
||||||
def test_fetch_master(self):
|
def test_fetch_master(self):
|
||||||
|
@ -23,16 +23,12 @@
|
|||||||
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||||
##############################################################################
|
##############################################################################
|
||||||
import os
|
import os
|
||||||
import unittest
|
|
||||||
|
|
||||||
from llnl.util.filesystem import *
|
|
||||||
|
|
||||||
import spack
|
import spack
|
||||||
|
|
||||||
from spack.version import ver
|
from spack.version import ver
|
||||||
from spack.stage import Stage
|
|
||||||
from spack.util.executable import which
|
|
||||||
from spack.test.mock_packages_test import *
|
|
||||||
from spack.test.mock_repo import MockHgRepo
|
from spack.test.mock_repo import MockHgRepo
|
||||||
|
from llnl.util.filesystem import *
|
||||||
|
from spack.test.mock_packages_test import *
|
||||||
|
|
||||||
|
|
||||||
class HgFetchTest(MockPackagesTest):
|
class HgFetchTest(MockPackagesTest):
|
||||||
@ -49,16 +45,10 @@ def setUp(self):
|
|||||||
spec.concretize()
|
spec.concretize()
|
||||||
self.pkg = spack.repo.get(spec, new=True)
|
self.pkg = spack.repo.get(spec, new=True)
|
||||||
|
|
||||||
|
|
||||||
def tearDown(self):
|
def tearDown(self):
|
||||||
"""Destroy the stage space used by this test."""
|
"""Destroy the stage space used by this test."""
|
||||||
super(HgFetchTest, self).tearDown()
|
super(HgFetchTest, self).tearDown()
|
||||||
|
self.repo.destroy()
|
||||||
if self.repo.stage is not None:
|
|
||||||
self.repo.stage.destroy()
|
|
||||||
|
|
||||||
self.pkg.do_clean()
|
|
||||||
|
|
||||||
|
|
||||||
def try_fetch(self, rev, test_file, args):
|
def try_fetch(self, rev, test_file, args):
|
||||||
"""Tries to:
|
"""Tries to:
|
||||||
@ -71,26 +61,27 @@ def try_fetch(self, rev, test_file, args):
|
|||||||
"""
|
"""
|
||||||
self.pkg.versions[ver('hg')] = args
|
self.pkg.versions[ver('hg')] = args
|
||||||
|
|
||||||
self.pkg.do_stage()
|
with self.pkg.stage:
|
||||||
self.assertEqual(self.repo.get_rev(), rev)
|
self.pkg.do_stage()
|
||||||
|
self.assertEqual(self.repo.get_rev(), rev)
|
||||||
|
|
||||||
file_path = join_path(self.pkg.stage.source_path, test_file)
|
file_path = join_path(self.pkg.stage.source_path, test_file)
|
||||||
self.assertTrue(os.path.isdir(self.pkg.stage.source_path))
|
self.assertTrue(os.path.isdir(self.pkg.stage.source_path))
|
||||||
self.assertTrue(os.path.isfile(file_path))
|
self.assertTrue(os.path.isfile(file_path))
|
||||||
|
|
||||||
os.unlink(file_path)
|
os.unlink(file_path)
|
||||||
self.assertFalse(os.path.isfile(file_path))
|
self.assertFalse(os.path.isfile(file_path))
|
||||||
|
|
||||||
untracked = 'foobarbaz'
|
untracked = 'foobarbaz'
|
||||||
touch(untracked)
|
touch(untracked)
|
||||||
self.assertTrue(os.path.isfile(untracked))
|
self.assertTrue(os.path.isfile(untracked))
|
||||||
self.pkg.do_restage()
|
self.pkg.do_restage()
|
||||||
self.assertFalse(os.path.isfile(untracked))
|
self.assertFalse(os.path.isfile(untracked))
|
||||||
|
|
||||||
self.assertTrue(os.path.isdir(self.pkg.stage.source_path))
|
self.assertTrue(os.path.isdir(self.pkg.stage.source_path))
|
||||||
self.assertTrue(os.path.isfile(file_path))
|
self.assertTrue(os.path.isfile(file_path))
|
||||||
|
|
||||||
self.assertEqual(self.repo.get_rev(), rev)
|
self.assertEqual(self.repo.get_rev(), rev)
|
||||||
|
|
||||||
|
|
||||||
def test_fetch_default(self):
|
def test_fetch_default(self):
|
||||||
|
@ -22,18 +22,13 @@
|
|||||||
# along with this program; if not, write to the Free Software Foundation,
|
# along with this program; if not, write to the Free Software Foundation,
|
||||||
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||||
##############################################################################
|
##############################################################################
|
||||||
import os
|
|
||||||
import unittest
|
|
||||||
import shutil
|
import shutil
|
||||||
import tempfile
|
import tempfile
|
||||||
|
|
||||||
from llnl.util.filesystem import *
|
|
||||||
|
|
||||||
import spack
|
import spack
|
||||||
from spack.stage import Stage
|
from llnl.util.filesystem import *
|
||||||
from spack.fetch_strategy import URLFetchStrategy
|
|
||||||
from spack.directory_layout import YamlDirectoryLayout
|
from spack.directory_layout import YamlDirectoryLayout
|
||||||
from spack.util.executable import which
|
from spack.fetch_strategy import URLFetchStrategy, FetchStrategyComposite
|
||||||
from spack.test.mock_packages_test import *
|
from spack.test.mock_packages_test import *
|
||||||
from spack.test.mock_repo import MockArchive
|
from spack.test.mock_repo import MockArchive
|
||||||
|
|
||||||
@ -59,9 +54,7 @@ def setUp(self):
|
|||||||
|
|
||||||
def tearDown(self):
|
def tearDown(self):
|
||||||
super(InstallTest, self).tearDown()
|
super(InstallTest, self).tearDown()
|
||||||
|
self.repo.destroy()
|
||||||
if self.repo.stage is not None:
|
|
||||||
self.repo.stage.destroy()
|
|
||||||
|
|
||||||
# Turn checksumming back on
|
# Turn checksumming back on
|
||||||
spack.do_checksum = True
|
spack.do_checksum = True
|
||||||
@ -81,7 +74,10 @@ def test_install_and_uninstall(self):
|
|||||||
pkg = spack.repo.get(spec)
|
pkg = spack.repo.get(spec)
|
||||||
|
|
||||||
# Fake the URL for the package so it downloads from a file.
|
# Fake the URL for the package so it downloads from a file.
|
||||||
pkg.fetcher = URLFetchStrategy(self.repo.url)
|
|
||||||
|
fetcher = FetchStrategyComposite()
|
||||||
|
fetcher.append(URLFetchStrategy(self.repo.url))
|
||||||
|
pkg.fetcher = fetcher
|
||||||
|
|
||||||
try:
|
try:
|
||||||
pkg.do_install()
|
pkg.do_install()
|
||||||
|
@ -24,8 +24,6 @@
|
|||||||
##############################################################################
|
##############################################################################
|
||||||
import os
|
import os
|
||||||
import unittest
|
import unittest
|
||||||
import shutil
|
|
||||||
import tempfile
|
|
||||||
|
|
||||||
from llnl.util.filesystem import *
|
from llnl.util.filesystem import *
|
||||||
from llnl.util.link_tree import LinkTree
|
from llnl.util.link_tree import LinkTree
|
||||||
@ -38,6 +36,7 @@ class LinkTreeTest(unittest.TestCase):
|
|||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
self.stage = Stage('link-tree-test')
|
self.stage = Stage('link-tree-test')
|
||||||
|
self.stage.create()
|
||||||
|
|
||||||
with working_dir(self.stage.path):
|
with working_dir(self.stage.path):
|
||||||
touchp('source/1')
|
touchp('source/1')
|
||||||
@ -51,10 +50,8 @@ def setUp(self):
|
|||||||
source_path = os.path.join(self.stage.path, 'source')
|
source_path = os.path.join(self.stage.path, 'source')
|
||||||
self.link_tree = LinkTree(source_path)
|
self.link_tree = LinkTree(source_path)
|
||||||
|
|
||||||
|
|
||||||
def tearDown(self):
|
def tearDown(self):
|
||||||
if self.stage:
|
self.stage.destroy()
|
||||||
self.stage.destroy()
|
|
||||||
|
|
||||||
|
|
||||||
def check_file_link(self, filename):
|
def check_file_link(self, filename):
|
||||||
|
@ -25,15 +25,13 @@
|
|||||||
"""
|
"""
|
||||||
These tests ensure that our lock works correctly.
|
These tests ensure that our lock works correctly.
|
||||||
"""
|
"""
|
||||||
import unittest
|
|
||||||
import os
|
|
||||||
import tempfile
|
|
||||||
import shutil
|
import shutil
|
||||||
|
import tempfile
|
||||||
|
import unittest
|
||||||
from multiprocessing import Process
|
from multiprocessing import Process
|
||||||
|
|
||||||
from llnl.util.lock import *
|
|
||||||
from llnl.util.filesystem import join_path, touch
|
from llnl.util.filesystem import join_path, touch
|
||||||
|
from llnl.util.lock import *
|
||||||
from spack.util.multiproc import Barrier
|
from spack.util.multiproc import Barrier
|
||||||
|
|
||||||
# This is the longest a failed test will take, as the barriers will
|
# This is the longest a failed test will take, as the barriers will
|
||||||
|
@ -28,13 +28,13 @@
|
|||||||
This just tests whether the right args are getting passed to make.
|
This just tests whether the right args are getting passed to make.
|
||||||
"""
|
"""
|
||||||
import os
|
import os
|
||||||
import unittest
|
|
||||||
import tempfile
|
|
||||||
import shutil
|
import shutil
|
||||||
|
import tempfile
|
||||||
|
import unittest
|
||||||
|
|
||||||
from llnl.util.filesystem import *
|
from llnl.util.filesystem import *
|
||||||
from spack.util.environment import path_put_first
|
|
||||||
from spack.build_environment import MakeExecutable
|
from spack.build_environment import MakeExecutable
|
||||||
|
from spack.util.environment import path_put_first
|
||||||
|
|
||||||
|
|
||||||
class MakeExecutableTest(unittest.TestCase):
|
class MakeExecutableTest(unittest.TestCase):
|
||||||
|
@ -23,11 +23,10 @@
|
|||||||
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||||
##############################################################################
|
##############################################################################
|
||||||
import os
|
import os
|
||||||
from filecmp import dircmp
|
|
||||||
|
|
||||||
import spack
|
import spack
|
||||||
import spack.mirror
|
import spack.mirror
|
||||||
from spack.util.compression import decompressor_for
|
|
||||||
|
from filecmp import dircmp
|
||||||
from spack.test.mock_packages_test import *
|
from spack.test.mock_packages_test import *
|
||||||
from spack.test.mock_repo import *
|
from spack.test.mock_repo import *
|
||||||
|
|
||||||
@ -44,8 +43,16 @@ def setUp(self):
|
|||||||
self.repos = {}
|
self.repos = {}
|
||||||
|
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
"""Destroy all the stages created by the repos in setup."""
|
||||||
|
super(MirrorTest, self).tearDown()
|
||||||
|
for repo in self.repos.values():
|
||||||
|
repo.destroy()
|
||||||
|
self.repos.clear()
|
||||||
|
|
||||||
|
|
||||||
def set_up_package(self, name, MockRepoClass, url_attr):
|
def set_up_package(self, name, MockRepoClass, url_attr):
|
||||||
"""Use this to set up a mock package to be mirrored.
|
"""Set up a mock package to be mirrored.
|
||||||
Each package needs us to:
|
Each package needs us to:
|
||||||
1. Set up a mock repo/archive to fetch from.
|
1. Set up a mock repo/archive to fetch from.
|
||||||
2. Point the package's version args at that repo.
|
2. Point the package's version args at that repo.
|
||||||
@ -65,22 +72,15 @@ def set_up_package(self, name, MockRepoClass, url_attr):
|
|||||||
pkg.versions[v][url_attr] = repo.url
|
pkg.versions[v][url_attr] = repo.url
|
||||||
|
|
||||||
|
|
||||||
def tearDown(self):
|
|
||||||
"""Destroy all the stages created by the repos in setup."""
|
|
||||||
super(MirrorTest, self).tearDown()
|
|
||||||
|
|
||||||
for name, repo in self.repos.items():
|
|
||||||
if repo.stage:
|
|
||||||
pass #repo.stage.destroy()
|
|
||||||
|
|
||||||
self.repos.clear()
|
|
||||||
|
|
||||||
|
|
||||||
def check_mirror(self):
|
def check_mirror(self):
|
||||||
stage = Stage('spack-mirror-test')
|
with Stage('spack-mirror-test') as stage:
|
||||||
mirror_root = join_path(stage.path, 'test-mirror')
|
mirror_root = join_path(stage.path, 'test-mirror')
|
||||||
|
|
||||||
|
# register mirror with spack config
|
||||||
|
mirrors = { 'spack-mirror-test' : 'file://' + mirror_root }
|
||||||
|
spack.config.update_config('mirrors', mirrors)
|
||||||
|
|
||||||
|
|
||||||
try:
|
|
||||||
os.chdir(stage.path)
|
os.chdir(stage.path)
|
||||||
spack.mirror.create(
|
spack.mirror.create(
|
||||||
mirror_root, self.repos, no_checksum=True)
|
mirror_root, self.repos, no_checksum=True)
|
||||||
@ -88,7 +88,7 @@ def check_mirror(self):
|
|||||||
# Stage directory exists
|
# Stage directory exists
|
||||||
self.assertTrue(os.path.isdir(mirror_root))
|
self.assertTrue(os.path.isdir(mirror_root))
|
||||||
|
|
||||||
# subdirs for each package
|
# check that there are subdirs for each package
|
||||||
for name in self.repos:
|
for name in self.repos:
|
||||||
subdir = join_path(mirror_root, name)
|
subdir = join_path(mirror_root, name)
|
||||||
self.assertTrue(os.path.isdir(subdir))
|
self.assertTrue(os.path.isdir(subdir))
|
||||||
@ -96,40 +96,28 @@ def check_mirror(self):
|
|||||||
files = os.listdir(subdir)
|
files = os.listdir(subdir)
|
||||||
self.assertEqual(len(files), 1)
|
self.assertEqual(len(files), 1)
|
||||||
|
|
||||||
# Decompress archive in the mirror
|
# Now try to fetch each package.
|
||||||
archive = files[0]
|
for name, mock_repo in self.repos.items():
|
||||||
archive_path = join_path(subdir, archive)
|
spec = Spec(name).concretized()
|
||||||
decomp = decompressor_for(archive_path)
|
pkg = spec.package
|
||||||
|
|
||||||
with working_dir(subdir):
|
saved_checksum_setting = spack.do_checksum
|
||||||
decomp(archive_path)
|
with pkg.stage:
|
||||||
|
# Stage the archive from the mirror and cd to it.
|
||||||
# Find the untarred archive directory.
|
spack.do_checksum = False
|
||||||
files = os.listdir(subdir)
|
pkg.do_stage(mirror_only=True)
|
||||||
self.assertEqual(len(files), 2)
|
# Compare the original repo with the expanded archive
|
||||||
self.assertTrue(archive in files)
|
original_path = mock_repo.path
|
||||||
files.remove(archive)
|
if 'svn' in name:
|
||||||
|
# have to check out the svn repo to compare.
|
||||||
expanded_archive = join_path(subdir, files[0])
|
original_path = join_path(mock_repo.path, 'checked_out')
|
||||||
self.assertTrue(os.path.isdir(expanded_archive))
|
svn('checkout', mock_repo.url, original_path)
|
||||||
|
dcmp = dircmp(original_path, pkg.stage.source_path)
|
||||||
# Compare the original repo with the expanded archive
|
# make sure there are no new files in the expanded tarball
|
||||||
repo = self.repos[name]
|
self.assertFalse(dcmp.right_only)
|
||||||
if not 'svn' in name:
|
# and that all original files are present.
|
||||||
original_path = repo.path
|
self.assertTrue(all(l in exclude for l in dcmp.left_only))
|
||||||
else:
|
spack.do_checksum = saved_checksum_setting
|
||||||
co = 'checked_out'
|
|
||||||
svn('checkout', repo.url, co)
|
|
||||||
original_path = join_path(subdir, co)
|
|
||||||
|
|
||||||
dcmp = dircmp(original_path, expanded_archive)
|
|
||||||
|
|
||||||
# make sure there are no new files in the expanded tarball
|
|
||||||
self.assertFalse(dcmp.right_only)
|
|
||||||
self.assertTrue(all(l in exclude for l in dcmp.left_only))
|
|
||||||
|
|
||||||
finally:
|
|
||||||
pass #stage.destroy()
|
|
||||||
|
|
||||||
|
|
||||||
def test_git_mirror(self):
|
def test_git_mirror(self):
|
||||||
|
@ -22,17 +22,15 @@
|
|||||||
# along with this program; if not, write to the Free Software Foundation,
|
# along with this program; if not, write to the Free Software Foundation,
|
||||||
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||||
##############################################################################
|
##############################################################################
|
||||||
import sys
|
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
import unittest
|
|
||||||
import tempfile
|
import tempfile
|
||||||
from ordereddict_backport import OrderedDict
|
import unittest
|
||||||
|
|
||||||
from llnl.util.filesystem import mkdirp
|
|
||||||
|
|
||||||
import spack
|
import spack
|
||||||
import spack.config
|
import spack.config
|
||||||
|
from llnl.util.filesystem import mkdirp
|
||||||
|
from ordereddict_backport import OrderedDict
|
||||||
from spack.repository import RepoPath
|
from spack.repository import RepoPath
|
||||||
from spack.spec import Spec
|
from spack.spec import Spec
|
||||||
|
|
||||||
|
@ -26,13 +26,9 @@
|
|||||||
import shutil
|
import shutil
|
||||||
|
|
||||||
from llnl.util.filesystem import *
|
from llnl.util.filesystem import *
|
||||||
|
|
||||||
import spack
|
|
||||||
from spack.version import ver
|
|
||||||
from spack.stage import Stage
|
from spack.stage import Stage
|
||||||
from spack.util.executable import which
|
from spack.util.executable import which
|
||||||
|
|
||||||
|
|
||||||
#
|
#
|
||||||
# VCS Systems used by mock repo code.
|
# VCS Systems used by mock repo code.
|
||||||
#
|
#
|
||||||
@ -55,6 +51,12 @@ def __init__(self, stage_name, repo_name):
|
|||||||
mkdirp(self.path)
|
mkdirp(self.path)
|
||||||
|
|
||||||
|
|
||||||
|
def destroy(self):
|
||||||
|
"""Destroy resources associated with this mock repo."""
|
||||||
|
if self.stage:
|
||||||
|
self.stage.destroy()
|
||||||
|
|
||||||
|
|
||||||
class MockArchive(MockRepo):
|
class MockArchive(MockRepo):
|
||||||
"""Creates a very simple archive directory with a configure script and a
|
"""Creates a very simple archive directory with a configure script and a
|
||||||
makefile that installs to a prefix. Tars it up into an archive."""
|
makefile that installs to a prefix. Tars it up into an archive."""
|
||||||
|
@ -25,14 +25,11 @@
|
|||||||
"""
|
"""
|
||||||
Test for multi_method dispatch.
|
Test for multi_method dispatch.
|
||||||
"""
|
"""
|
||||||
import unittest
|
|
||||||
|
|
||||||
import spack
|
import spack
|
||||||
from spack.multimethod import *
|
from spack.multimethod import *
|
||||||
from spack.version import *
|
|
||||||
from spack.spec import Spec
|
|
||||||
from spack.multimethod import when
|
|
||||||
from spack.test.mock_packages_test import *
|
from spack.test.mock_packages_test import *
|
||||||
|
from spack.version import *
|
||||||
|
|
||||||
|
|
||||||
class MultiMethodTest(MockPackagesTest):
|
class MultiMethodTest(MockPackagesTest):
|
||||||
|
@ -6,7 +6,7 @@
|
|||||||
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
|
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
|
||||||
# LLNL-CODE-647188
|
# LLNL-CODE-647188
|
||||||
#
|
#
|
||||||
# For details, see https://llnl.github.io/spack
|
# For details, see https://software.llnl.gov/spack
|
||||||
# Please also see the LICENSE file for our notice and the LGPL.
|
# Please also see the LICENSE file for our notice and the LGPL.
|
||||||
#
|
#
|
||||||
# This program is free software; you can redistribute it and/or modify
|
# This program is free software; you can redistribute it and/or modify
|
||||||
@ -23,6 +23,7 @@
|
|||||||
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||||
##############################################################################
|
##############################################################################
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
from spack.util.naming import NamespaceTrie
|
from spack.util.naming import NamespaceTrie
|
||||||
|
|
||||||
|
|
||||||
|
@ -22,10 +22,8 @@
|
|||||||
# along with this program; if not, write to the Free Software Foundation,
|
# along with this program; if not, write to the Free Software Foundation,
|
||||||
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||||
##############################################################################
|
##############################################################################
|
||||||
import unittest
|
|
||||||
|
|
||||||
import spack
|
from spack.spec import Spec
|
||||||
from spack.spec import Spec, CompilerSpec
|
|
||||||
from spack.test.mock_packages_test import *
|
from spack.test.mock_packages_test import *
|
||||||
|
|
||||||
class ConcretizeTest(MockPackagesTest):
|
class ConcretizeTest(MockPackagesTest):
|
||||||
|
@ -22,14 +22,12 @@
|
|||||||
# along with this program; if not, write to the Free Software Foundation,
|
# along with this program; if not, write to the Free Software Foundation,
|
||||||
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||||
##############################################################################
|
##############################################################################
|
||||||
import unittest
|
|
||||||
|
|
||||||
from llnl.util.filesystem import join_path
|
|
||||||
|
|
||||||
import spack
|
import spack
|
||||||
|
from llnl.util.filesystem import join_path
|
||||||
from spack.repository import Repo
|
from spack.repository import Repo
|
||||||
from spack.util.naming import mod_to_class
|
|
||||||
from spack.test.mock_packages_test import *
|
from spack.test.mock_packages_test import *
|
||||||
|
from spack.util.naming import mod_to_class
|
||||||
|
|
||||||
|
|
||||||
class PackagesTest(MockPackagesTest):
|
class PackagesTest(MockPackagesTest):
|
||||||
|
104
lib/spack/spack/test/pattern.py
Normal file
104
lib/spack/spack/test/pattern.py
Normal file
@ -0,0 +1,104 @@
|
|||||||
|
##############################################################################
|
||||||
|
# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
|
||||||
|
# Produced at the Lawrence Livermore National Laboratory.
|
||||||
|
#
|
||||||
|
# This file is part of Spack.
|
||||||
|
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
|
||||||
|
# LLNL-CODE-647188
|
||||||
|
#
|
||||||
|
# For details, see https://github.com/llnl/spack
|
||||||
|
# Please also see the LICENSE file for our notice and the LGPL.
|
||||||
|
#
|
||||||
|
# This program is free software; you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License (as published by
|
||||||
|
# the Free Software Foundation) version 2.1 dated February 1999.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful, but
|
||||||
|
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
|
||||||
|
# conditions of the GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU Lesser General Public License
|
||||||
|
# along with this program; if not, write to the Free Software Foundation,
|
||||||
|
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||||
|
##############################################################################
|
||||||
|
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
import spack.util.pattern as pattern
|
||||||
|
|
||||||
|
|
||||||
|
class CompositeTest(unittest.TestCase):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
class Base:
|
||||||
|
counter = 0
|
||||||
|
|
||||||
|
def add(self):
|
||||||
|
raise NotImplemented('add not implemented')
|
||||||
|
|
||||||
|
def subtract(self):
|
||||||
|
raise NotImplemented('subtract not implemented')
|
||||||
|
|
||||||
|
class One(Base):
|
||||||
|
def add(self):
|
||||||
|
Base.counter += 1
|
||||||
|
|
||||||
|
def subtract(self):
|
||||||
|
Base.counter -= 1
|
||||||
|
|
||||||
|
class Two(Base):
|
||||||
|
def add(self):
|
||||||
|
Base.counter += 2
|
||||||
|
|
||||||
|
def subtract(self):
|
||||||
|
Base.counter -= 2
|
||||||
|
|
||||||
|
self.Base = Base
|
||||||
|
self.One = One
|
||||||
|
self.Two = Two
|
||||||
|
|
||||||
|
def test_composite_from_method_list(self):
|
||||||
|
|
||||||
|
@pattern.composite(method_list=['add', 'subtract'])
|
||||||
|
class CompositeFromMethodList:
|
||||||
|
pass
|
||||||
|
|
||||||
|
composite = CompositeFromMethodList()
|
||||||
|
composite.append(self.One())
|
||||||
|
composite.append(self.Two())
|
||||||
|
composite.add()
|
||||||
|
self.assertEqual(self.Base.counter, 3)
|
||||||
|
composite.pop()
|
||||||
|
composite.subtract()
|
||||||
|
self.assertEqual(self.Base.counter, 2)
|
||||||
|
|
||||||
|
def test_composite_from_interface(self):
|
||||||
|
|
||||||
|
@pattern.composite(interface=self.Base)
|
||||||
|
class CompositeFromInterface:
|
||||||
|
pass
|
||||||
|
|
||||||
|
composite = CompositeFromInterface()
|
||||||
|
composite.append(self.One())
|
||||||
|
composite.append(self.Two())
|
||||||
|
composite.add()
|
||||||
|
self.assertEqual(self.Base.counter, 3)
|
||||||
|
composite.pop()
|
||||||
|
composite.subtract()
|
||||||
|
self.assertEqual(self.Base.counter, 2)
|
||||||
|
|
||||||
|
def test_error_conditions(self):
|
||||||
|
|
||||||
|
def wrong_container():
|
||||||
|
@pattern.composite(interface=self.Base, container=2)
|
||||||
|
class CompositeFromInterface:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def no_methods():
|
||||||
|
@pattern.composite()
|
||||||
|
class CompositeFromInterface:
|
||||||
|
pass
|
||||||
|
|
||||||
|
self.assertRaises(TypeError, wrong_container)
|
||||||
|
self.assertRaises(TypeError, no_methods)
|
@ -28,12 +28,11 @@
|
|||||||
Spack was originally 2.7, but enough systems in 2014 are still using
|
Spack was originally 2.7, but enough systems in 2014 are still using
|
||||||
2.6 on their frontend nodes that we need 2.6 to get adopted.
|
2.6 on their frontend nodes that we need 2.6 to get adopted.
|
||||||
"""
|
"""
|
||||||
import unittest
|
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
|
import unittest
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
|
|
||||||
import pyqver2
|
import pyqver2
|
||||||
import spack
|
import spack
|
||||||
|
|
||||||
|
@ -31,8 +31,6 @@
|
|||||||
import spack
|
import spack
|
||||||
import spack.package
|
import spack.package
|
||||||
|
|
||||||
from llnl.util.lang import list_modules
|
|
||||||
|
|
||||||
from spack.spec import Spec
|
from spack.spec import Spec
|
||||||
from spack.test.mock_packages_test import *
|
from spack.test.mock_packages_test import *
|
||||||
|
|
||||||
|
@ -22,7 +22,6 @@
|
|||||||
# along with this program; if not, write to the Free Software Foundation,
|
# along with this program; if not, write to the Free Software Foundation,
|
||||||
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||||
##############################################################################
|
##############################################################################
|
||||||
import unittest
|
|
||||||
from spack.spec import *
|
from spack.spec import *
|
||||||
from spack.test.mock_packages_test import *
|
from spack.test.mock_packages_test import *
|
||||||
|
|
||||||
|
@ -23,9 +23,10 @@
|
|||||||
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||||
##############################################################################
|
##############################################################################
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
import spack.spec
|
import spack.spec
|
||||||
from spack.spec import *
|
|
||||||
from spack.parse import Token
|
from spack.parse import Token
|
||||||
|
from spack.spec import *
|
||||||
|
|
||||||
# Sample output for a complex lexing.
|
# Sample output for a complex lexing.
|
||||||
complex_lex = [Token(ID, 'mvapich_foo'),
|
complex_lex = [Token(ID, 'mvapich_foo'),
|
||||||
|
@ -25,15 +25,13 @@
|
|||||||
"""\
|
"""\
|
||||||
Test that the Stage class works correctly.
|
Test that the Stage class works correctly.
|
||||||
"""
|
"""
|
||||||
import unittest
|
|
||||||
import shutil
|
|
||||||
import os
|
import os
|
||||||
import getpass
|
import shutil
|
||||||
|
import unittest
|
||||||
from contextlib import *
|
from contextlib import *
|
||||||
|
|
||||||
from llnl.util.filesystem import *
|
|
||||||
|
|
||||||
import spack
|
import spack
|
||||||
|
from llnl.util.filesystem import *
|
||||||
from spack.stage import Stage
|
from spack.stage import Stage
|
||||||
from spack.util.executable import which
|
from spack.util.executable import which
|
||||||
|
|
||||||
@ -192,116 +190,90 @@ def check_destroy(self, stage, stage_name):
|
|||||||
|
|
||||||
def test_setup_and_destroy_name_with_tmp(self):
|
def test_setup_and_destroy_name_with_tmp(self):
|
||||||
with use_tmp(True):
|
with use_tmp(True):
|
||||||
stage = Stage(archive_url, name=stage_name)
|
with Stage(archive_url, name=stage_name) as stage:
|
||||||
self.check_setup(stage, stage_name)
|
self.check_setup(stage, stage_name)
|
||||||
|
|
||||||
stage.destroy()
|
|
||||||
self.check_destroy(stage, stage_name)
|
self.check_destroy(stage, stage_name)
|
||||||
|
|
||||||
|
|
||||||
def test_setup_and_destroy_name_without_tmp(self):
|
def test_setup_and_destroy_name_without_tmp(self):
|
||||||
with use_tmp(False):
|
with use_tmp(False):
|
||||||
stage = Stage(archive_url, name=stage_name)
|
with Stage(archive_url, name=stage_name) as stage:
|
||||||
self.check_setup(stage, stage_name)
|
self.check_setup(stage, stage_name)
|
||||||
|
|
||||||
stage.destroy()
|
|
||||||
self.check_destroy(stage, stage_name)
|
self.check_destroy(stage, stage_name)
|
||||||
|
|
||||||
|
|
||||||
def test_setup_and_destroy_no_name_with_tmp(self):
|
def test_setup_and_destroy_no_name_with_tmp(self):
|
||||||
with use_tmp(True):
|
with use_tmp(True):
|
||||||
stage = Stage(archive_url)
|
with Stage(archive_url) as stage:
|
||||||
self.check_setup(stage, None)
|
self.check_setup(stage, None)
|
||||||
|
|
||||||
stage.destroy()
|
|
||||||
self.check_destroy(stage, None)
|
self.check_destroy(stage, None)
|
||||||
|
|
||||||
|
|
||||||
def test_setup_and_destroy_no_name_without_tmp(self):
|
def test_setup_and_destroy_no_name_without_tmp(self):
|
||||||
with use_tmp(False):
|
with use_tmp(False):
|
||||||
stage = Stage(archive_url)
|
with Stage(archive_url) as stage:
|
||||||
self.check_setup(stage, None)
|
self.check_setup(stage, None)
|
||||||
|
|
||||||
stage.destroy()
|
|
||||||
self.check_destroy(stage, None)
|
self.check_destroy(stage, None)
|
||||||
|
|
||||||
|
|
||||||
def test_chdir(self):
|
def test_chdir(self):
|
||||||
stage = Stage(archive_url, name=stage_name)
|
with Stage(archive_url, name=stage_name) as stage:
|
||||||
|
stage.chdir()
|
||||||
stage.chdir()
|
self.check_setup(stage, stage_name)
|
||||||
self.check_setup(stage, stage_name)
|
self.check_chdir(stage, stage_name)
|
||||||
self.check_chdir(stage, stage_name)
|
|
||||||
|
|
||||||
stage.destroy()
|
|
||||||
self.check_destroy(stage, stage_name)
|
self.check_destroy(stage, stage_name)
|
||||||
|
|
||||||
|
|
||||||
def test_fetch(self):
|
def test_fetch(self):
|
||||||
stage = Stage(archive_url, name=stage_name)
|
with Stage(archive_url, name=stage_name) as stage:
|
||||||
|
stage.fetch()
|
||||||
stage.fetch()
|
self.check_setup(stage, stage_name)
|
||||||
self.check_setup(stage, stage_name)
|
self.check_chdir(stage, stage_name)
|
||||||
self.check_chdir(stage, stage_name)
|
self.check_fetch(stage, stage_name)
|
||||||
self.check_fetch(stage, stage_name)
|
|
||||||
|
|
||||||
stage.destroy()
|
|
||||||
self.check_destroy(stage, stage_name)
|
self.check_destroy(stage, stage_name)
|
||||||
|
|
||||||
|
|
||||||
def test_expand_archive(self):
|
def test_expand_archive(self):
|
||||||
stage = Stage(archive_url, name=stage_name)
|
with Stage(archive_url, name=stage_name) as stage:
|
||||||
|
stage.fetch()
|
||||||
stage.fetch()
|
self.check_setup(stage, stage_name)
|
||||||
self.check_setup(stage, stage_name)
|
self.check_fetch(stage, stage_name)
|
||||||
self.check_fetch(stage, stage_name)
|
stage.expand_archive()
|
||||||
|
self.check_expand_archive(stage, stage_name)
|
||||||
stage.expand_archive()
|
|
||||||
self.check_expand_archive(stage, stage_name)
|
|
||||||
|
|
||||||
stage.destroy()
|
|
||||||
self.check_destroy(stage, stage_name)
|
self.check_destroy(stage, stage_name)
|
||||||
|
|
||||||
|
|
||||||
def test_expand_archive(self):
|
def test_expand_archive(self):
|
||||||
stage = Stage(archive_url, name=stage_name)
|
with Stage(archive_url, name=stage_name) as stage:
|
||||||
|
stage.fetch()
|
||||||
stage.fetch()
|
self.check_setup(stage, stage_name)
|
||||||
self.check_setup(stage, stage_name)
|
self.check_fetch(stage, stage_name)
|
||||||
self.check_fetch(stage, stage_name)
|
stage.expand_archive()
|
||||||
|
stage.chdir_to_source()
|
||||||
stage.expand_archive()
|
self.check_expand_archive(stage, stage_name)
|
||||||
stage.chdir_to_source()
|
self.check_chdir_to_source(stage, stage_name)
|
||||||
self.check_expand_archive(stage, stage_name)
|
|
||||||
self.check_chdir_to_source(stage, stage_name)
|
|
||||||
|
|
||||||
stage.destroy()
|
|
||||||
self.check_destroy(stage, stage_name)
|
self.check_destroy(stage, stage_name)
|
||||||
|
|
||||||
|
|
||||||
def test_restage(self):
|
def test_restage(self):
|
||||||
stage = Stage(archive_url, name=stage_name)
|
with Stage(archive_url, name=stage_name) as stage:
|
||||||
|
stage.fetch()
|
||||||
|
stage.expand_archive()
|
||||||
|
stage.chdir_to_source()
|
||||||
|
self.check_expand_archive(stage, stage_name)
|
||||||
|
self.check_chdir_to_source(stage, stage_name)
|
||||||
|
|
||||||
stage.fetch()
|
# Try to make a file in the old archive dir
|
||||||
stage.expand_archive()
|
with open('foobar', 'w') as file:
|
||||||
stage.chdir_to_source()
|
file.write("this file is to be destroyed.")
|
||||||
self.check_expand_archive(stage, stage_name)
|
|
||||||
self.check_chdir_to_source(stage, stage_name)
|
|
||||||
|
|
||||||
# Try to make a file in the old archive dir
|
self.assertTrue('foobar' in os.listdir(stage.source_path))
|
||||||
with open('foobar', 'w') as file:
|
|
||||||
file.write("this file is to be destroyed.")
|
|
||||||
|
|
||||||
self.assertTrue('foobar' in os.listdir(stage.source_path))
|
# Make sure the file is not there after restage.
|
||||||
|
stage.restage()
|
||||||
# Make sure the file is not there after restage.
|
self.check_chdir(stage, stage_name)
|
||||||
stage.restage()
|
self.check_fetch(stage, stage_name)
|
||||||
self.check_chdir(stage, stage_name)
|
stage.chdir_to_source()
|
||||||
self.check_fetch(stage, stage_name)
|
self.check_chdir_to_source(stage, stage_name)
|
||||||
|
self.assertFalse('foobar' in os.listdir(stage.source_path))
|
||||||
stage.chdir_to_source()
|
|
||||||
self.check_chdir_to_source(stage, stage_name)
|
|
||||||
self.assertFalse('foobar' in os.listdir(stage.source_path))
|
|
||||||
|
|
||||||
stage.destroy()
|
|
||||||
self.check_destroy(stage, stage_name)
|
self.check_destroy(stage, stage_name)
|
||||||
|
@ -24,18 +24,12 @@
|
|||||||
##############################################################################
|
##############################################################################
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import unittest
|
|
||||||
import shutil
|
|
||||||
import tempfile
|
|
||||||
|
|
||||||
from llnl.util.filesystem import *
|
|
||||||
|
|
||||||
import spack
|
import spack
|
||||||
from spack.version import ver
|
|
||||||
from spack.stage import Stage
|
|
||||||
from spack.util.executable import which
|
|
||||||
from spack.test.mock_packages_test import *
|
|
||||||
from spack.test.mock_repo import svn, MockSvnRepo
|
from spack.test.mock_repo import svn, MockSvnRepo
|
||||||
|
from spack.version import ver
|
||||||
|
from spack.test.mock_packages_test import *
|
||||||
|
from llnl.util.filesystem import *
|
||||||
|
|
||||||
|
|
||||||
class SvnFetchTest(MockPackagesTest):
|
class SvnFetchTest(MockPackagesTest):
|
||||||
@ -51,16 +45,10 @@ def setUp(self):
|
|||||||
spec.concretize()
|
spec.concretize()
|
||||||
self.pkg = spack.repo.get(spec, new=True)
|
self.pkg = spack.repo.get(spec, new=True)
|
||||||
|
|
||||||
|
|
||||||
def tearDown(self):
|
def tearDown(self):
|
||||||
"""Destroy the stage space used by this test."""
|
"""Destroy the stage space used by this test."""
|
||||||
super(SvnFetchTest, self).tearDown()
|
super(SvnFetchTest, self).tearDown()
|
||||||
|
self.repo.destroy()
|
||||||
if self.repo.stage is not None:
|
|
||||||
self.repo.stage.destroy()
|
|
||||||
|
|
||||||
self.pkg.do_clean()
|
|
||||||
|
|
||||||
|
|
||||||
def assert_rev(self, rev):
|
def assert_rev(self, rev):
|
||||||
"""Check that the current revision is equal to the supplied rev."""
|
"""Check that the current revision is equal to the supplied rev."""
|
||||||
@ -73,7 +61,6 @@ def get_rev():
|
|||||||
return match.group(1)
|
return match.group(1)
|
||||||
self.assertEqual(get_rev(), rev)
|
self.assertEqual(get_rev(), rev)
|
||||||
|
|
||||||
|
|
||||||
def try_fetch(self, rev, test_file, args):
|
def try_fetch(self, rev, test_file, args):
|
||||||
"""Tries to:
|
"""Tries to:
|
||||||
1. Fetch the repo using a fetch strategy constructed with
|
1. Fetch the repo using a fetch strategy constructed with
|
||||||
@ -85,26 +72,27 @@ def try_fetch(self, rev, test_file, args):
|
|||||||
"""
|
"""
|
||||||
self.pkg.versions[ver('svn')] = args
|
self.pkg.versions[ver('svn')] = args
|
||||||
|
|
||||||
self.pkg.do_stage()
|
with self.pkg.stage:
|
||||||
self.assert_rev(rev)
|
self.pkg.do_stage()
|
||||||
|
self.assert_rev(rev)
|
||||||
|
|
||||||
file_path = join_path(self.pkg.stage.source_path, test_file)
|
file_path = join_path(self.pkg.stage.source_path, test_file)
|
||||||
self.assertTrue(os.path.isdir(self.pkg.stage.source_path))
|
self.assertTrue(os.path.isdir(self.pkg.stage.source_path))
|
||||||
self.assertTrue(os.path.isfile(file_path))
|
self.assertTrue(os.path.isfile(file_path))
|
||||||
|
|
||||||
os.unlink(file_path)
|
os.unlink(file_path)
|
||||||
self.assertFalse(os.path.isfile(file_path))
|
self.assertFalse(os.path.isfile(file_path))
|
||||||
|
|
||||||
untracked = 'foobarbaz'
|
untracked = 'foobarbaz'
|
||||||
touch(untracked)
|
touch(untracked)
|
||||||
self.assertTrue(os.path.isfile(untracked))
|
self.assertTrue(os.path.isfile(untracked))
|
||||||
self.pkg.do_restage()
|
self.pkg.do_restage()
|
||||||
self.assertFalse(os.path.isfile(untracked))
|
self.assertFalse(os.path.isfile(untracked))
|
||||||
|
|
||||||
self.assertTrue(os.path.isdir(self.pkg.stage.source_path))
|
self.assertTrue(os.path.isdir(self.pkg.stage.source_path))
|
||||||
self.assertTrue(os.path.isfile(file_path))
|
self.assertTrue(os.path.isfile(file_path))
|
||||||
|
|
||||||
self.assert_rev(rev)
|
self.assert_rev(rev)
|
||||||
|
|
||||||
|
|
||||||
def test_fetch_default(self):
|
def test_fetch_default(self):
|
||||||
|
@ -6,7 +6,7 @@
|
|||||||
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
|
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
|
||||||
# LLNL-CODE-647188
|
# LLNL-CODE-647188
|
||||||
#
|
#
|
||||||
# For details, see https://scalability-llnl.github.io/spack
|
# For details, see https://scalability-software.llnl.gov/spack
|
||||||
# Please also see the LICENSE file for our notice and the LGPL.
|
# Please also see the LICENSE file for our notice and the LGPL.
|
||||||
#
|
#
|
||||||
# This program is free software; you can redistribute it and/or modify
|
# This program is free software; you can redistribute it and/or modify
|
||||||
@ -22,10 +22,10 @@
|
|||||||
# along with this program; if not, write to the Free Software Foundation,
|
# along with this program; if not, write to the Free Software Foundation,
|
||||||
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||||
##############################################################################
|
##############################################################################
|
||||||
from nose.plugins import Plugin
|
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
|
||||||
|
from nose.plugins import Plugin
|
||||||
|
|
||||||
class Tally(Plugin):
|
class Tally(Plugin):
|
||||||
name = 'tally'
|
name = 'tally'
|
||||||
|
|
||||||
|
@ -22,10 +22,11 @@
|
|||||||
# along with this program; if not, write to the Free Software Foundation,
|
# along with this program; if not, write to the Free Software Foundation,
|
||||||
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||||
##############################################################################
|
##############################################################################
|
||||||
import unittest
|
|
||||||
import itertools
|
import itertools
|
||||||
|
import unittest
|
||||||
|
|
||||||
import spack
|
import spack
|
||||||
|
|
||||||
test_install = __import__("spack.cmd.test-install",
|
test_install = __import__("spack.cmd.test-install",
|
||||||
fromlist=["BuildId", "create_test_output", "TestResult"])
|
fromlist=["BuildId", "create_test_output", "TestResult"])
|
||||||
|
|
||||||
|
@ -25,10 +25,7 @@
|
|||||||
"""\
|
"""\
|
||||||
Tests ability of spack to extrapolate URL versions from existing versions.
|
Tests ability of spack to extrapolate URL versions from existing versions.
|
||||||
"""
|
"""
|
||||||
import spack
|
|
||||||
import spack.url as url
|
import spack.url as url
|
||||||
from spack.spec import Spec
|
|
||||||
from spack.version import ver
|
|
||||||
from spack.test.mock_packages_test import *
|
from spack.test.mock_packages_test import *
|
||||||
|
|
||||||
|
|
||||||
|
@ -27,8 +27,8 @@
|
|||||||
detection in Homebrew.
|
detection in Homebrew.
|
||||||
"""
|
"""
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
import spack.url as url
|
import spack.url as url
|
||||||
from pprint import pprint
|
|
||||||
|
|
||||||
|
|
||||||
class UrlParseTest(unittest.TestCase):
|
class UrlParseTest(unittest.TestCase):
|
||||||
|
@ -27,7 +27,6 @@
|
|||||||
"""
|
"""
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
import spack
|
|
||||||
import spack.url as url
|
import spack.url as url
|
||||||
|
|
||||||
|
|
||||||
|
@ -28,6 +28,7 @@
|
|||||||
where it makes sense.
|
where it makes sense.
|
||||||
"""
|
"""
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
from spack.version import *
|
from spack.version import *
|
||||||
|
|
||||||
|
|
||||||
|
@ -26,6 +26,7 @@
|
|||||||
Test Spack's custom YAML format.
|
Test Spack's custom YAML format.
|
||||||
"""
|
"""
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
import spack.util.spack_yaml as syaml
|
import spack.util.spack_yaml as syaml
|
||||||
|
|
||||||
test_file = """\
|
test_file = """\
|
||||||
|
@ -225,7 +225,7 @@ def parse_version_offset(path):
|
|||||||
(r'_((\d+\.)+\d+[a-z]?)[.]orig$', stem),
|
(r'_((\d+\.)+\d+[a-z]?)[.]orig$', stem),
|
||||||
|
|
||||||
# e.g. http://www.openssl.org/source/openssl-0.9.8s.tar.gz
|
# e.g. http://www.openssl.org/source/openssl-0.9.8s.tar.gz
|
||||||
(r'-([^-]+(-alpha|-beta)?)', stem),
|
(r'-v?([^-]+(-alpha|-beta)?)', stem),
|
||||||
|
|
||||||
# e.g. astyle_1.23_macosx.tar.gz
|
# e.g. astyle_1.23_macosx.tar.gz
|
||||||
(r'_([^_]+(_alpha|_beta)?)', stem),
|
(r'_([^_]+(_alpha|_beta)?)', stem),
|
||||||
|
@ -63,3 +63,10 @@ def pop_keys(dictionary, *keys):
|
|||||||
for key in keys:
|
for key in keys:
|
||||||
if key in dictionary:
|
if key in dictionary:
|
||||||
dictionary.pop(key)
|
dictionary.pop(key)
|
||||||
|
|
||||||
|
|
||||||
|
def dump_environment(path):
|
||||||
|
"""Dump the current environment out to a file."""
|
||||||
|
with open(path, 'w') as env_file:
|
||||||
|
for key,val in sorted(os.environ.items()):
|
||||||
|
env_file.write("%s=%s\n" % (key, val))
|
||||||
|
116
lib/spack/spack/util/pattern.py
Normal file
116
lib/spack/spack/util/pattern.py
Normal file
@ -0,0 +1,116 @@
|
|||||||
|
##############################################################################
|
||||||
|
# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
|
||||||
|
# Produced at the Lawrence Livermore National Laboratory.
|
||||||
|
#
|
||||||
|
# This file is part of Spack.
|
||||||
|
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
|
||||||
|
# LLNL-CODE-647188
|
||||||
|
#
|
||||||
|
# For details, see https://github.com/llnl/spack
|
||||||
|
# Please also see the LICENSE file for our notice and the LGPL.
|
||||||
|
#
|
||||||
|
# This program is free software; you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License (as published by
|
||||||
|
# the Free Software Foundation) version 2.1 dated February 1999.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful, but
|
||||||
|
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
|
||||||
|
# conditions of the GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU Lesser General Public License
|
||||||
|
# along with this program; if not, write to the Free Software Foundation,
|
||||||
|
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||||
|
##############################################################################
|
||||||
|
import inspect
|
||||||
|
import collections
|
||||||
|
import functools
|
||||||
|
|
||||||
|
|
||||||
|
def composite(interface=None, method_list=None, container=list):
|
||||||
|
"""
|
||||||
|
Returns a class decorator that patches a class adding all the methods it needs to be a composite for a given
|
||||||
|
interface.
|
||||||
|
|
||||||
|
:param interface: class exposing the interface to which the composite object must conform. Only non-private and
|
||||||
|
non-special methods will be taken into account
|
||||||
|
|
||||||
|
:param method_list: names of methods that should be part of the composite
|
||||||
|
|
||||||
|
:param container: container for the composite object (default = list). Must fulfill the MutableSequence contract.
|
||||||
|
The composite class will expose the container API to manage object composition
|
||||||
|
|
||||||
|
:return: class decorator
|
||||||
|
"""
|
||||||
|
# Check if container fulfills the MutableSequence contract and raise an exception if it doesn't
|
||||||
|
# The patched class returned by the decorator will inherit from the container class to expose the
|
||||||
|
# interface needed to manage objects composition
|
||||||
|
if not issubclass(container, collections.MutableSequence):
|
||||||
|
raise TypeError("Container must fulfill the MutableSequence contract")
|
||||||
|
|
||||||
|
# Check if at least one of the 'interface' or the 'method_list' arguments are defined
|
||||||
|
if interface is None and method_list is None:
|
||||||
|
raise TypeError("Either 'interface' or 'method_list' must be defined on a call to composite")
|
||||||
|
|
||||||
|
def cls_decorator(cls):
|
||||||
|
# Retrieve the base class of the composite. Inspect its methods and decide which ones will be overridden
|
||||||
|
def no_special_no_private(x):
|
||||||
|
return inspect.ismethod(x) and not x.__name__.startswith('_')
|
||||||
|
|
||||||
|
# Patch the behavior of each of the methods in the previous list. This is done associating an instance of the
|
||||||
|
# descriptor below to any method that needs to be patched.
|
||||||
|
class IterateOver(object):
|
||||||
|
"""
|
||||||
|
Decorator used to patch methods in a composite. It iterates over all the items in the instance containing the
|
||||||
|
associated attribute and calls for each of them an attribute with the same name
|
||||||
|
"""
|
||||||
|
def __init__(self, name, func=None):
|
||||||
|
self.name = name
|
||||||
|
self.func = func
|
||||||
|
|
||||||
|
def __get__(self, instance, owner):
|
||||||
|
def getter(*args, **kwargs):
|
||||||
|
for item in instance:
|
||||||
|
getattr(item, self.name)(*args, **kwargs)
|
||||||
|
# If we are using this descriptor to wrap a method from an interface, then we must conditionally
|
||||||
|
# use the `functools.wraps` decorator to set the appropriate fields.
|
||||||
|
if self.func is not None:
|
||||||
|
getter = functools.wraps(self.func)(getter)
|
||||||
|
return getter
|
||||||
|
|
||||||
|
dictionary_for_type_call = {}
|
||||||
|
# Construct a dictionary with the methods explicitly passed as name
|
||||||
|
if method_list is not None:
|
||||||
|
# python@2.7: method_list_dict = {name: IterateOver(name) for name in method_list}
|
||||||
|
method_list_dict = {}
|
||||||
|
for name in method_list:
|
||||||
|
method_list_dict[name] = IterateOver(name)
|
||||||
|
dictionary_for_type_call.update(method_list_dict)
|
||||||
|
# Construct a dictionary with the methods inspected from the interface
|
||||||
|
if interface is not None:
|
||||||
|
##########
|
||||||
|
# python@2.7: interface_methods = {name: method for name, method in inspect.getmembers(interface, predicate=no_special_no_private)}
|
||||||
|
interface_methods = {}
|
||||||
|
for name, method in inspect.getmembers(interface, predicate=no_special_no_private):
|
||||||
|
interface_methods[name] = method
|
||||||
|
##########
|
||||||
|
# python@2.7: interface_methods_dict = {name: IterateOver(name, method) for name, method in interface_methods.iteritems()}
|
||||||
|
interface_methods_dict = {}
|
||||||
|
for name, method in interface_methods.iteritems():
|
||||||
|
interface_methods_dict[name] = IterateOver(name, method)
|
||||||
|
##########
|
||||||
|
dictionary_for_type_call.update(interface_methods_dict)
|
||||||
|
# Get the methods that are defined in the scope of the composite class and override any previous definition
|
||||||
|
##########
|
||||||
|
# python@2.7: cls_method = {name: method for name, method in inspect.getmembers(cls, predicate=inspect.ismethod)}
|
||||||
|
cls_method = {}
|
||||||
|
for name, method in inspect.getmembers(cls, predicate=inspect.ismethod):
|
||||||
|
cls_method[name] = method
|
||||||
|
##########
|
||||||
|
dictionary_for_type_call.update(cls_method)
|
||||||
|
# Generate the new class on the fly and return it
|
||||||
|
# FIXME : inherit from interface if we start to use ABC classes?
|
||||||
|
wrapper_class = type(cls.__name__, (cls, container), dictionary_for_type_call)
|
||||||
|
return wrapper_class
|
||||||
|
|
||||||
|
return cls_decorator
|
@ -86,12 +86,12 @@ def _spider(args):
|
|||||||
|
|
||||||
if not "Content-type" in resp.headers:
|
if not "Content-type" in resp.headers:
|
||||||
tty.debug("ignoring page " + url)
|
tty.debug("ignoring page " + url)
|
||||||
return pages
|
return pages, links
|
||||||
|
|
||||||
if not resp.headers["Content-type"].startswith('text/html'):
|
if not resp.headers["Content-type"].startswith('text/html'):
|
||||||
tty.debug("ignoring page " + url + " with content type " +
|
tty.debug("ignoring page " + url + " with content type " +
|
||||||
resp.headers["Content-type"])
|
resp.headers["Content-type"])
|
||||||
return pages
|
return pages, links
|
||||||
|
|
||||||
# Do the real GET request when we know it's just HTML.
|
# Do the real GET request when we know it's just HTML.
|
||||||
req.get_method = lambda: "GET"
|
req.get_method = lambda: "GET"
|
||||||
@ -173,7 +173,7 @@ def spider(root_url, **kwargs):
|
|||||||
performance over a sequential fetch.
|
performance over a sequential fetch.
|
||||||
"""
|
"""
|
||||||
max_depth = kwargs.setdefault('depth', 1)
|
max_depth = kwargs.setdefault('depth', 1)
|
||||||
pages, links = _spider((root_url, set(), root_url, None, 1, max_depth, False))
|
pages, links = _spider((root_url, set(), root_url, None, 1, max_depth, False))
|
||||||
return pages, links
|
return pages, links
|
||||||
|
|
||||||
|
|
||||||
|
27
var/spack/repos/builtin/packages/SuiteSparse/package.py
Normal file
27
var/spack/repos/builtin/packages/SuiteSparse/package.py
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
from spack import *
|
||||||
|
|
||||||
|
|
||||||
|
class Suitesparse(Package):
|
||||||
|
"""
|
||||||
|
SuiteSparse is a suite of sparse matrix algorithms
|
||||||
|
"""
|
||||||
|
homepage = 'http://faculty.cse.tamu.edu/davis/suitesparse.html'
|
||||||
|
url = 'http://faculty.cse.tamu.edu/davis/SuiteSparse/SuiteSparse-4.5.1.tar.gz'
|
||||||
|
|
||||||
|
version('4.5.1', 'f0ea9aad8d2d1ffec66a5b6bfeff5319')
|
||||||
|
|
||||||
|
depends_on('blas')
|
||||||
|
depends_on('lapack')
|
||||||
|
|
||||||
|
depends_on('metis@5.1.0', when='@4.5.1')
|
||||||
|
|
||||||
|
def install(self, spec, prefix):
|
||||||
|
# The build system of SuiteSparse is quite old-fashioned
|
||||||
|
# It's basically a plain Makefile which include an header (SuiteSparse_config/SuiteSparse_config.mk)
|
||||||
|
# with a lot of convoluted logic in it.
|
||||||
|
# Any kind of customization will need to go through filtering of that file
|
||||||
|
|
||||||
|
# FIXME : this actually uses the current workaround
|
||||||
|
# FIXME : (blas / lapack always provide libblas and liblapack as aliases)
|
||||||
|
make('install', 'INSTALL=%s' % prefix, 'BLAS=-lblas', 'LAPACK=-llapack')
|
||||||
|
|
57
var/spack/repos/builtin/packages/arpack-ng/package.py
Normal file
57
var/spack/repos/builtin/packages/arpack-ng/package.py
Normal file
@ -0,0 +1,57 @@
|
|||||||
|
from spack import *
|
||||||
|
|
||||||
|
|
||||||
|
class ArpackNg(Package):
|
||||||
|
"""
|
||||||
|
ARPACK-NG is a collection of Fortran77 subroutines designed to solve large scale eigenvalue problems.
|
||||||
|
|
||||||
|
Important Features:
|
||||||
|
|
||||||
|
* Reverse Communication Interface.
|
||||||
|
* Single and Double Precision Real Arithmetic Versions for Symmetric,
|
||||||
|
Non-symmetric, Standard or Generalized Problems.
|
||||||
|
* Single and Double Precision Complex Arithmetic Versions for Standard or
|
||||||
|
Generalized Problems.
|
||||||
|
* Routines for Banded Matrices - Standard or Generalized Problems.
|
||||||
|
* Routines for The Singular Value Decomposition.
|
||||||
|
* Example driver routines that may be used as templates to implement numerous
|
||||||
|
Shift-Invert strategies for all problem types, data types and precision.
|
||||||
|
|
||||||
|
This project is a joint project between Debian, Octave and Scilab in order to
|
||||||
|
provide a common and maintained version of arpack.
|
||||||
|
|
||||||
|
Indeed, no single release has been published by Rice university for the last
|
||||||
|
few years and since many software (Octave, Scilab, R, Matlab...) forked it and
|
||||||
|
implemented their own modifications, arpack-ng aims to tackle this by providing
|
||||||
|
a common repository and maintained versions.
|
||||||
|
|
||||||
|
arpack-ng is replacing arpack almost everywhere.
|
||||||
|
"""
|
||||||
|
homepage = 'https://github.com/opencollab/arpack-ng'
|
||||||
|
url = 'https://github.com/opencollab/arpack-ng/archive/3.3.0.tar.gz'
|
||||||
|
|
||||||
|
version('3.3.0', 'ed3648a23f0a868a43ef44c97a21bad5')
|
||||||
|
|
||||||
|
variant('shared', default=True, description='Enables the build of shared libraries')
|
||||||
|
variant('mpi', default=False, description='Activates MPI support')
|
||||||
|
|
||||||
|
depends_on('blas')
|
||||||
|
depends_on('lapack')
|
||||||
|
depends_on('mpi', when='+mpi')
|
||||||
|
|
||||||
|
def install(self, spec, prefix):
|
||||||
|
# Apparently autotools are not bootstrapped
|
||||||
|
bootstrap = Executable('./bootstrap')
|
||||||
|
|
||||||
|
options = ['--prefix=%s' % prefix]
|
||||||
|
|
||||||
|
if '+mpi' in spec:
|
||||||
|
options.append('--enable-mpi')
|
||||||
|
|
||||||
|
if '~shared' in spec:
|
||||||
|
options.append('--enable-shared=no')
|
||||||
|
|
||||||
|
bootstrap()
|
||||||
|
configure(*options)
|
||||||
|
make()
|
||||||
|
make('install')
|
@ -6,6 +6,7 @@ class Autoconf(Package):
|
|||||||
url = "http://ftp.gnu.org/gnu/autoconf/autoconf-2.69.tar.gz"
|
url = "http://ftp.gnu.org/gnu/autoconf/autoconf-2.69.tar.gz"
|
||||||
|
|
||||||
version('2.69', '82d05e03b93e45f5a39b828dc9c6c29b')
|
version('2.69', '82d05e03b93e45f5a39b828dc9c6c29b')
|
||||||
|
version('2.62', '6c1f3b3734999035d77da5024aab4fbd')
|
||||||
|
|
||||||
def install(self, spec, prefix):
|
def install(self, spec, prefix):
|
||||||
configure("--prefix=%s" % prefix)
|
configure("--prefix=%s" % prefix)
|
||||||
|
@ -5,7 +5,9 @@ class Automake(Package):
|
|||||||
homepage = "http://www.gnu.org/software/automake/"
|
homepage = "http://www.gnu.org/software/automake/"
|
||||||
url = "http://ftp.gnu.org/gnu/automake/automake-1.14.tar.gz"
|
url = "http://ftp.gnu.org/gnu/automake/automake-1.14.tar.gz"
|
||||||
|
|
||||||
|
version('1.15', '716946a105ca228ab545fc37a70df3a3')
|
||||||
version('1.14.1', 'd052a3e884631b9c7892f2efce542d75')
|
version('1.14.1', 'd052a3e884631b9c7892f2efce542d75')
|
||||||
|
version('1.11.6', '0286dc30295b62985ca51919202ecfcc')
|
||||||
|
|
||||||
depends_on('autoconf')
|
depends_on('autoconf')
|
||||||
|
|
||||||
|
15
var/spack/repos/builtin/packages/blitz/package.py
Normal file
15
var/spack/repos/builtin/packages/blitz/package.py
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
from spack import *
|
||||||
|
|
||||||
|
class Blitz(Package):
|
||||||
|
"""N-dimensional arrays for C++"""
|
||||||
|
homepage = "http://github.com/blitzpp/blitz"
|
||||||
|
url = "https://github.com/blitzpp/blitz/tarball/1.0.0"
|
||||||
|
|
||||||
|
version('1.0.0', '9f040b9827fe22228a892603671a77af')
|
||||||
|
|
||||||
|
# No dependencies
|
||||||
|
|
||||||
|
def install(self, spec, prefix):
|
||||||
|
configure('--prefix=%s' % prefix)
|
||||||
|
make()
|
||||||
|
make("install")
|
34
var/spack/repos/builtin/packages/boost/boost_11856.patch
Normal file
34
var/spack/repos/builtin/packages/boost/boost_11856.patch
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
--- a/libs/container/src/pool_resource.cpp 2015-11-06 12:49:55.000000000 -0800
|
||||||
|
+++ b/libs/container/src/pool_resource.cpp 2015-12-22 07:54:36.202131121 -0800
|
||||||
|
@@ -32,11 +32,11 @@
|
||||||
|
class pool_data_t
|
||||||
|
: public block_slist_base<>
|
||||||
|
{
|
||||||
|
- typedef block_slist_base<> block_slist_base;
|
||||||
|
+ typedef block_slist_base<> block_slist_base_t;
|
||||||
|
|
||||||
|
public:
|
||||||
|
explicit pool_data_t(std::size_t initial_blocks_per_chunk)
|
||||||
|
- : block_slist_base(), next_blocks_per_chunk(initial_blocks_per_chunk)
|
||||||
|
+ : block_slist_base_t(), next_blocks_per_chunk(initial_blocks_per_chunk)
|
||||||
|
{ slist_algo::init_header(&free_slist); }
|
||||||
|
|
||||||
|
void *allocate_block() BOOST_NOEXCEPT
|
||||||
|
@@ -59,7 +59,7 @@
|
||||||
|
void release(memory_resource &upstream)
|
||||||
|
{
|
||||||
|
slist_algo::init_header(&free_slist);
|
||||||
|
- this->block_slist_base::release(upstream);
|
||||||
|
+ this->block_slist_base_t::release(upstream);
|
||||||
|
next_blocks_per_chunk = pool_options_minimum_max_blocks_per_chunk;
|
||||||
|
}
|
||||||
|
|
||||||
|
@@ -72,7 +72,7 @@
|
||||||
|
|
||||||
|
//Minimum block size is at least max_align, so all pools allocate sizes that are multiple of max_align,
|
||||||
|
//meaning that all blocks are max_align-aligned.
|
||||||
|
- char *p = static_cast<char *>(block_slist_base::allocate(blocks_per_chunk*pool_block, mr));
|
||||||
|
+ char *p = static_cast<char *>(block_slist_base_t::allocate(blocks_per_chunk*pool_block, mr));
|
||||||
|
|
||||||
|
//Create header types. This is no-throw
|
||||||
|
for(std::size_t i = 0, max = blocks_per_chunk; i != max; ++i){
|
@ -1,4 +1,5 @@
|
|||||||
from spack import *
|
from spack import *
|
||||||
|
import spack
|
||||||
|
|
||||||
class Boost(Package):
|
class Boost(Package):
|
||||||
"""Boost provides free peer-reviewed portable C++ source
|
"""Boost provides free peer-reviewed portable C++ source
|
||||||
@ -44,15 +45,50 @@ class Boost(Package):
|
|||||||
version('1.34.1', '2d938467e8a448a2c9763e0a9f8ca7e5')
|
version('1.34.1', '2d938467e8a448a2c9763e0a9f8ca7e5')
|
||||||
version('1.34.0', 'ed5b9291ffad776f8757a916e1726ad0')
|
version('1.34.0', 'ed5b9291ffad776f8757a916e1726ad0')
|
||||||
|
|
||||||
variant('debug', default=False, description='Switch to the debug version of Boost')
|
default_install_libs = set(['atomic',
|
||||||
variant('python', default=False, description='Activate the component Boost.Python')
|
'chrono',
|
||||||
variant('mpi', default=False, description='Activate the component Boost.MPI')
|
'date_time',
|
||||||
variant('compression', default=True, description='Activate the compression Boost.iostreams')
|
'filesystem',
|
||||||
|
'graph',
|
||||||
|
'iostreams',
|
||||||
|
'locale',
|
||||||
|
'log',
|
||||||
|
'math',
|
||||||
|
'program_options',
|
||||||
|
'random',
|
||||||
|
'regex',
|
||||||
|
'serialization',
|
||||||
|
'signals',
|
||||||
|
'system',
|
||||||
|
'test',
|
||||||
|
'thread',
|
||||||
|
'wave'])
|
||||||
|
|
||||||
|
# mpi/python are not installed by default because they pull in many
|
||||||
|
# dependencies and/or because there is a great deal of customization
|
||||||
|
# possible (and it would be difficult to choose sensible defaults)
|
||||||
|
default_noinstall_libs = set(['mpi', 'python'])
|
||||||
|
|
||||||
|
all_libs = default_install_libs | default_noinstall_libs
|
||||||
|
|
||||||
|
for lib in all_libs:
|
||||||
|
variant(lib, default=(lib not in default_noinstall_libs),
|
||||||
|
description="Compile with {0} library".format(lib))
|
||||||
|
|
||||||
|
variant('debug', default=False, description='Switch to the debug version of Boost')
|
||||||
|
variant('shared', default=True, description="Additionally build shared libraries")
|
||||||
|
variant('multithreaded', default=True, description="Build multi-threaded versions of libraries")
|
||||||
|
variant('singlethreaded', default=True, description="Build single-threaded versions of libraries")
|
||||||
|
variant('icu_support', default=False, description="Include ICU support (for regex/locale libraries)")
|
||||||
|
|
||||||
|
depends_on('icu', when='+icu_support')
|
||||||
depends_on('python', when='+python')
|
depends_on('python', when='+python')
|
||||||
depends_on('mpi', when='+mpi')
|
depends_on('mpi', when='+mpi')
|
||||||
depends_on('bzip2', when='+compression')
|
depends_on('bzip2', when='+iostreams')
|
||||||
depends_on('zlib', when='+compression')
|
depends_on('zlib', when='+iostreams')
|
||||||
|
|
||||||
|
# Patch fix from https://svn.boost.org/trac/boost/ticket/11856
|
||||||
|
patch('boost_11856.patch', when='@1.60.0%gcc@4.4.7')
|
||||||
|
|
||||||
def url_for_version(self, version):
|
def url_for_version(self, version):
|
||||||
"""Handle Boost's weird URLs, which write the version two different ways."""
|
"""Handle Boost's weird URLs, which write the version two different ways."""
|
||||||
@ -77,22 +113,20 @@ def determine_toolset(self, spec):
|
|||||||
# fallback to gcc if no toolset found
|
# fallback to gcc if no toolset found
|
||||||
return 'gcc'
|
return 'gcc'
|
||||||
|
|
||||||
def determine_bootstrap_options(self, spec, options):
|
def determine_bootstrap_options(self, spec, withLibs, options):
|
||||||
options.append('--with-toolset=%s' % self.determine_toolset(spec))
|
boostToolsetId = self.determine_toolset(spec)
|
||||||
|
options.append('--with-toolset=%s' % boostToolsetId)
|
||||||
|
options.append("--with-libraries=%s" % ','.join(withLibs))
|
||||||
|
|
||||||
without_libs = []
|
if '+python' in spec:
|
||||||
if '~mpi' in spec:
|
|
||||||
without_libs.append('mpi')
|
|
||||||
if '~python' in spec:
|
|
||||||
without_libs.append('python')
|
|
||||||
else:
|
|
||||||
options.append('--with-python=%s' %
|
options.append('--with-python=%s' %
|
||||||
join_path(spec['python'].prefix.bin, 'python'))
|
join_path(spec['python'].prefix.bin, 'python'))
|
||||||
|
|
||||||
if without_libs:
|
|
||||||
options.append('--without-libraries=%s' % ','.join(without_libs))
|
|
||||||
|
|
||||||
with open('user-config.jam', 'w') as f:
|
with open('user-config.jam', 'w') as f:
|
||||||
|
compiler_wrapper = join_path(spack.build_env_path, 'c++')
|
||||||
|
f.write("using {0} : : {1} ;\n".format(boostToolsetId,
|
||||||
|
compiler_wrapper))
|
||||||
|
|
||||||
if '+mpi' in spec:
|
if '+mpi' in spec:
|
||||||
f.write('using mpi : %s ;\n' %
|
f.write('using mpi : %s ;\n' %
|
||||||
join_path(spec['mpi'].prefix.bin, 'mpicxx'))
|
join_path(spec['mpi'].prefix.bin, 'mpicxx'))
|
||||||
@ -107,12 +141,10 @@ def determine_b2_options(self, spec, options):
|
|||||||
else:
|
else:
|
||||||
options.append('variant=release')
|
options.append('variant=release')
|
||||||
|
|
||||||
if '~compression' in spec:
|
if '+icu_support' in spec:
|
||||||
options.extend([
|
options.extend(['-s', 'ICU_PATH=%s' % spec['icu'].prefix])
|
||||||
'-s', 'NO_BZIP2=1',
|
|
||||||
'-s', 'NO_ZLIB=1'])
|
|
||||||
|
|
||||||
if '+compression' in spec:
|
if '+iostreams' in spec:
|
||||||
options.extend([
|
options.extend([
|
||||||
'-s', 'BZIP2_INCLUDE=%s' % spec['bzip2'].prefix.include,
|
'-s', 'BZIP2_INCLUDE=%s' % spec['bzip2'].prefix.include,
|
||||||
'-s', 'BZIP2_LIBPATH=%s' % spec['bzip2'].prefix.lib,
|
'-s', 'BZIP2_LIBPATH=%s' % spec['bzip2'].prefix.lib,
|
||||||
@ -120,20 +152,46 @@ def determine_b2_options(self, spec, options):
|
|||||||
'-s', 'ZLIB_LIBPATH=%s' % spec['zlib'].prefix.lib,
|
'-s', 'ZLIB_LIBPATH=%s' % spec['zlib'].prefix.lib,
|
||||||
])
|
])
|
||||||
|
|
||||||
|
linkTypes = ['static']
|
||||||
|
if '+shared' in spec:
|
||||||
|
linkTypes.append('shared')
|
||||||
|
|
||||||
|
threadingOpts = []
|
||||||
|
if '+multithreaded' in spec:
|
||||||
|
threadingOpts.append('multi')
|
||||||
|
if '+singlethreaded' in spec:
|
||||||
|
threadingOpts.append('single')
|
||||||
|
if not threadingOpts:
|
||||||
|
raise RuntimeError("At least one of {singlethreaded, multithreaded} must be enabled")
|
||||||
|
|
||||||
options.extend([
|
options.extend([
|
||||||
'toolset=%s' % self.determine_toolset(spec),
|
'toolset=%s' % self.determine_toolset(spec),
|
||||||
'link=static,shared',
|
'link=%s' % ','.join(linkTypes),
|
||||||
'threading=single,multi',
|
|
||||||
'--layout=tagged'])
|
'--layout=tagged'])
|
||||||
|
|
||||||
|
return threadingOpts
|
||||||
|
|
||||||
def install(self, spec, prefix):
|
def install(self, spec, prefix):
|
||||||
|
withLibs = list()
|
||||||
|
for lib in Boost.all_libs:
|
||||||
|
if "+{0}".format(lib) in spec:
|
||||||
|
withLibs.append(lib)
|
||||||
|
if not withLibs:
|
||||||
|
# if no libraries are specified for compilation, then you dont have
|
||||||
|
# to configure/build anything, just copy over to the prefix directory.
|
||||||
|
src = join_path(self.stage.source_path, 'boost')
|
||||||
|
mkdirp(join_path(prefix, 'include'))
|
||||||
|
dst = join_path(prefix, 'include', 'boost')
|
||||||
|
install_tree(src, dst)
|
||||||
|
return
|
||||||
|
|
||||||
# to make Boost find the user-config.jam
|
# to make Boost find the user-config.jam
|
||||||
env['BOOST_BUILD_PATH'] = './'
|
env['BOOST_BUILD_PATH'] = './'
|
||||||
|
|
||||||
bootstrap = Executable('./bootstrap.sh')
|
bootstrap = Executable('./bootstrap.sh')
|
||||||
|
|
||||||
bootstrap_options = ['--prefix=%s' % prefix]
|
bootstrap_options = ['--prefix=%s' % prefix]
|
||||||
self.determine_bootstrap_options(spec, bootstrap_options)
|
self.determine_bootstrap_options(spec, withLibs, bootstrap_options)
|
||||||
|
|
||||||
bootstrap(*bootstrap_options)
|
bootstrap(*bootstrap_options)
|
||||||
|
|
||||||
@ -143,6 +201,10 @@ def install(self, spec, prefix):
|
|||||||
b2 = Executable(b2name)
|
b2 = Executable(b2name)
|
||||||
b2_options = ['-j', '%s' % make_jobs]
|
b2_options = ['-j', '%s' % make_jobs]
|
||||||
|
|
||||||
self.determine_b2_options(spec, b2_options)
|
threadingOpts = self.determine_b2_options(spec, b2_options)
|
||||||
|
|
||||||
|
# In theory it could be done on one call but it fails on
|
||||||
|
# Boost.MPI if the threading options are not separated.
|
||||||
|
for threadingOpt in threadingOpts:
|
||||||
|
b2('install', 'threading=%s' % threadingOpt, *b2_options)
|
||||||
|
|
||||||
b2('install', *b2_options)
|
|
||||||
|
25
var/spack/repos/builtin/packages/caliper/package.py
Normal file
25
var/spack/repos/builtin/packages/caliper/package.py
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
from spack import *
|
||||||
|
|
||||||
|
class Caliper(Package):
|
||||||
|
"""
|
||||||
|
Caliper is a generic context annotation system. It gives programmers the
|
||||||
|
ability to provide arbitrary program context information to (performance)
|
||||||
|
tools at runtime.
|
||||||
|
"""
|
||||||
|
|
||||||
|
homepage = "https://github.com/LLNL/Caliper"
|
||||||
|
url = ""
|
||||||
|
|
||||||
|
version('master', git='ssh://git@github.com:LLNL/Caliper.git')
|
||||||
|
|
||||||
|
variant('mpi', default=False, description='Enable MPI function wrappers.')
|
||||||
|
|
||||||
|
depends_on('libunwind')
|
||||||
|
depends_on('papi')
|
||||||
|
depends_on('mpi', when='+mpi')
|
||||||
|
|
||||||
|
def install(self, spec, prefix):
|
||||||
|
with working_dir('build', create=True):
|
||||||
|
cmake('..', *std_cmake_args)
|
||||||
|
make()
|
||||||
|
make("install")
|
@ -46,6 +46,7 @@ class Cgal(Package):
|
|||||||
depends_on('mpfr')
|
depends_on('mpfr')
|
||||||
depends_on('gmp')
|
depends_on('gmp')
|
||||||
depends_on('zlib')
|
depends_on('zlib')
|
||||||
|
depends_on('cmake')
|
||||||
|
|
||||||
# FIXME : Qt5 dependency missing (needs Qt5 and OpenGL)
|
# FIXME : Qt5 dependency missing (needs Qt5 and OpenGL)
|
||||||
# FIXME : Optional third party libraries missing
|
# FIXME : Optional third party libraries missing
|
||||||
|
@ -28,20 +28,22 @@ class Cmake(Package):
|
|||||||
"""A cross-platform, open-source build system. CMake is a family of
|
"""A cross-platform, open-source build system. CMake is a family of
|
||||||
tools designed to build, test and package software."""
|
tools designed to build, test and package software."""
|
||||||
homepage = 'https://www.cmake.org'
|
homepage = 'https://www.cmake.org'
|
||||||
|
url = 'https://cmake.org/files/v3.4/cmake-3.4.3.tar.gz'
|
||||||
|
|
||||||
version('2.8.10.2', '097278785da7182ec0aea8769d06860c',
|
version('3.4.3', '4cb3ff35b2472aae70f542116d616e63')
|
||||||
url = 'http://www.cmake.org/files/v2.8/cmake-2.8.10.2.tar.gz')
|
version('3.4.0', 'cd3034e0a44256a0917e254167217fc8')
|
||||||
|
version('3.3.1', '52638576f4e1e621fed6c3410d3a1b12')
|
||||||
version('3.0.2', 'db4c687a31444a929d2fdc36c4dfb95f',
|
version('3.0.2', 'db4c687a31444a929d2fdc36c4dfb95f')
|
||||||
url = 'http://www.cmake.org/files/v3.0/cmake-3.0.2.tar.gz')
|
version('2.8.10.2', '097278785da7182ec0aea8769d06860c')
|
||||||
|
|
||||||
version('3.4.0', 'cd3034e0a44256a0917e254167217fc8',
|
|
||||||
url = 'http://cmake.org/files/v3.4/cmake-3.4.0.tar.gz')
|
|
||||||
|
|
||||||
variant('ncurses', default=True, description='Enables the build of the ncurses gui')
|
variant('ncurses', default=True, description='Enables the build of the ncurses gui')
|
||||||
|
|
||||||
depends_on('ncurses', when='+ncurses')
|
depends_on('ncurses', when='+ncurses')
|
||||||
|
|
||||||
|
def url_for_version(self, version):
|
||||||
|
"""Handle CMake's version-based custom URLs."""
|
||||||
|
return 'https://cmake.org/files/v%s/cmake-%s.tar.gz' % (version.up_to(2), version)
|
||||||
|
|
||||||
|
|
||||||
def install(self, spec, prefix):
|
def install(self, spec, prefix):
|
||||||
configure('--prefix=' + prefix,
|
configure('--prefix=' + prefix,
|
||||||
'--parallel=' + str(make_jobs),
|
'--parallel=' + str(make_jobs),
|
||||||
|
16
var/spack/repos/builtin/packages/cmocka/package.py
Normal file
16
var/spack/repos/builtin/packages/cmocka/package.py
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
from spack import *
|
||||||
|
|
||||||
|
class Cmocka(Package):
|
||||||
|
"""Unit-testing framework in pure C"""
|
||||||
|
homepage = "https://cmocka.org/"
|
||||||
|
url = "https://cmocka.org/files/1.0/cmocka-1.0.1.tar.xz"
|
||||||
|
|
||||||
|
version('1.0.1', 'ed861e501a21a92b2af63e466df2015e')
|
||||||
|
parallel = False
|
||||||
|
|
||||||
|
def install(self, spec, prefix):
|
||||||
|
with working_dir('spack-build', create=True):
|
||||||
|
cmake('..', *std_cmake_args)
|
||||||
|
|
||||||
|
make()
|
||||||
|
make("install")
|
31
var/spack/repos/builtin/packages/cryptopp/package.py
Normal file
31
var/spack/repos/builtin/packages/cryptopp/package.py
Normal file
@ -0,0 +1,31 @@
|
|||||||
|
import glob
|
||||||
|
from spack import *
|
||||||
|
|
||||||
|
class Cryptopp(Package):
|
||||||
|
"""Crypto++ is an open-source C++ library of cryptographic schemes. The
|
||||||
|
library supports a number of different cryptography algorithms, including
|
||||||
|
authenticated encryption schemes (GCM, CCM), hash functions (SHA-1, SHA2),
|
||||||
|
public-key encryption (RSA, DSA), and a few obsolete/historical encryption
|
||||||
|
algorithms (MD5, Panama)."""
|
||||||
|
|
||||||
|
homepage = "http://www.cryptopp.com/"
|
||||||
|
url = "http://www.cryptopp.com/cryptopp563.zip"
|
||||||
|
|
||||||
|
version('5.6.3', '3c5b70e2ec98b7a24988734446242d07')
|
||||||
|
version('5.6.2', '7ed022585698df48e65ce9218f6c6a67')
|
||||||
|
|
||||||
|
def install(self, spec, prefix):
|
||||||
|
make()
|
||||||
|
|
||||||
|
mkdirp(prefix.include)
|
||||||
|
for hfile in glob.glob('*.h*'):
|
||||||
|
install(hfile, prefix.include)
|
||||||
|
|
||||||
|
mkdirp(prefix.lib)
|
||||||
|
install('libcryptopp.a', prefix.lib)
|
||||||
|
|
||||||
|
def url_for_version(self, version):
|
||||||
|
version_tuple = tuple(v for v in iter(version))
|
||||||
|
version_string = reduce(lambda vs, nv: vs + str(nv), version_tuple, "")
|
||||||
|
|
||||||
|
return "%scryptopp%s.zip" % (Cryptopp.homepage, version_string)
|
55
var/spack/repos/builtin/packages/dakota/package.py
Normal file
55
var/spack/repos/builtin/packages/dakota/package.py
Normal file
@ -0,0 +1,55 @@
|
|||||||
|
from spack import *
|
||||||
|
|
||||||
|
|
||||||
|
class Dakota(Package):
|
||||||
|
"""
|
||||||
|
The Dakota toolkit provides a flexible, extensible interface between analysis codes and iterative systems
|
||||||
|
analysis methods. Dakota contains algorithms for:
|
||||||
|
|
||||||
|
- optimization with gradient and non gradient-based methods;
|
||||||
|
- uncertainty quantification with sampling, reliability, stochastic expansion, and epistemic methods;
|
||||||
|
- parameter estimation with nonlinear least squares methods;
|
||||||
|
- sensitivity/variance analysis with design of experiments and parameter study methods.
|
||||||
|
|
||||||
|
These capabilities may be used on their own or as components within advanced strategies such as hybrid optimization,
|
||||||
|
surrogate-based optimization, mixed integer nonlinear programming, or optimization under uncertainty.
|
||||||
|
"""
|
||||||
|
|
||||||
|
homepage = 'https://dakota.sandia.gov/'
|
||||||
|
url = 'https://dakota.sandia.gov/sites/default/files/distributions/public/dakota-6.3-public.src.tar.gz'
|
||||||
|
_url_str = 'https://dakota.sandia.gov/sites/default/files/distributions/public/dakota-{version}-public.src.tar.gz'
|
||||||
|
|
||||||
|
version('6.3', '05a58d209fae604af234c894c3f73f6d')
|
||||||
|
|
||||||
|
variant('debug', default=False, description='Builds a debug version of the libraries')
|
||||||
|
variant('shared', default=True, description='Enables the build of shared libraries')
|
||||||
|
variant('mpi', default=True, description='Activates MPI support')
|
||||||
|
|
||||||
|
depends_on('blas')
|
||||||
|
depends_on('lapack')
|
||||||
|
depends_on('mpi', when='+mpi')
|
||||||
|
|
||||||
|
depends_on('python')
|
||||||
|
depends_on('boost')
|
||||||
|
|
||||||
|
def url_for_version(self, version):
|
||||||
|
return Dakota._url_str.format(version=version)
|
||||||
|
|
||||||
|
def install(self, spec, prefix):
|
||||||
|
options = []
|
||||||
|
options.extend(std_cmake_args)
|
||||||
|
|
||||||
|
options.extend(['-DCMAKE_BUILD_TYPE:STRING=%s' % ('Debug' if '+debug' in spec else 'Release'),
|
||||||
|
'-DBUILD_SHARED_LIBS:BOOL=%s' % ('ON' if '+shared' in spec else 'OFF')])
|
||||||
|
|
||||||
|
if '+mpi' in spec:
|
||||||
|
options.extend(['-DDAKOTA_HAVE_MPI:BOOL=ON',
|
||||||
|
'-DMPI_CXX_COMPILER:STRING=%s' % join_path(spec['mpi'].prefix.bin, 'mpicxx')])
|
||||||
|
|
||||||
|
build_directory = join_path(self.stage.path, 'spack-build')
|
||||||
|
source_directory = self.stage.source_path
|
||||||
|
|
||||||
|
with working_dir(build_directory, create=True):
|
||||||
|
cmake(source_directory, *options)
|
||||||
|
make()
|
||||||
|
make("install")
|
@ -41,13 +41,14 @@ class Eigen(Package):
|
|||||||
variant('metis', default=True, description='Enables metis backend')
|
variant('metis', default=True, description='Enables metis backend')
|
||||||
variant('scotch', default=True, description='Enables scotch backend')
|
variant('scotch', default=True, description='Enables scotch backend')
|
||||||
variant('fftw', default=True, description='Enables FFTW backend')
|
variant('fftw', default=True, description='Enables FFTW backend')
|
||||||
|
variant('suitesparse', default=True, description='Enables SuiteSparse support')
|
||||||
|
|
||||||
# TODO : dependency on SuiteSparse, googlehash, superlu, adolc missing
|
# TODO : dependency on googlehash, superlu, adolc missing
|
||||||
|
|
||||||
depends_on('metis', when='+metis')
|
depends_on('metis', when='+metis')
|
||||||
depends_on('scotch', when='+scotch')
|
depends_on('scotch', when='+scotch')
|
||||||
depends_on('fftw', when='+fftw')
|
depends_on('fftw', when='+fftw')
|
||||||
|
depends_on('SuiteSparse', when='+suitesparse')
|
||||||
depends_on('mpfr@2.3.0:') # Eigen 3.2.7 requires at least 2.3.0
|
depends_on('mpfr@2.3.0:') # Eigen 3.2.7 requires at least 2.3.0
|
||||||
depends_on('gmp')
|
depends_on('gmp')
|
||||||
|
|
||||||
|
65
var/spack/repos/builtin/packages/espresso/package.py
Normal file
65
var/spack/repos/builtin/packages/espresso/package.py
Normal file
@ -0,0 +1,65 @@
|
|||||||
|
from spack import *
|
||||||
|
|
||||||
|
import os
|
||||||
|
|
||||||
|
class Espresso(Package):
|
||||||
|
"""
|
||||||
|
QE is an integrated suite of Open-Source computer codes for electronic-structure calculations and materials
|
||||||
|
modeling at the nanoscale. It is based on density-functional theory, plane waves, and pseudopotentials.
|
||||||
|
"""
|
||||||
|
homepage = 'http://quantum-espresso.org'
|
||||||
|
url = 'http://www.qe-forge.org/gf/download/frsrelease/204/912/espresso-5.3.0.tar.gz'
|
||||||
|
|
||||||
|
version('5.3.0', '6848fcfaeb118587d6be36bd10b7f2c3')
|
||||||
|
|
||||||
|
variant('mpi', default=True, description='Build Quantum-ESPRESSO with mpi support')
|
||||||
|
variant('openmp', default=False, description='Enables openMP support')
|
||||||
|
variant('scalapack', default=True, description='Enables scalapack support')
|
||||||
|
variant('elpa', default=True, description='Use elpa as an eigenvalue solver')
|
||||||
|
|
||||||
|
depends_on('blas')
|
||||||
|
depends_on('lapack')
|
||||||
|
|
||||||
|
depends_on('mpi', when='+mpi')
|
||||||
|
depends_on('fftw~mpi', when='~mpi')
|
||||||
|
depends_on('fftw+mpi', when='+mpi')
|
||||||
|
depends_on('scalapack', when='+scalapack+mpi') # TODO : + mpi needed to avoid false dependencies installation
|
||||||
|
|
||||||
|
def check_variants(self, spec):
|
||||||
|
error = 'you cannot ask for \'+{variant}\' when \'+mpi\' is not active'
|
||||||
|
if '+scalapack' in spec and '~mpi' in spec:
|
||||||
|
raise RuntimeError(error.format(variant='scalapack'))
|
||||||
|
if '+elpa' in spec and ('~mpi' in spec or '~scalapack' in spec):
|
||||||
|
raise RuntimeError(error.format(variant='elpa'))
|
||||||
|
|
||||||
|
def install(self, spec, prefix):
|
||||||
|
self.check_variants(spec)
|
||||||
|
|
||||||
|
options = ['-prefix=%s' % prefix]
|
||||||
|
|
||||||
|
if '+mpi' in spec:
|
||||||
|
options.append('--enable-parallel')
|
||||||
|
|
||||||
|
if '+openmp' in spec:
|
||||||
|
options.append('--enable-openmp')
|
||||||
|
|
||||||
|
if '+scalapack' in spec:
|
||||||
|
options.append('--with-scalapack=yes')
|
||||||
|
|
||||||
|
if '+elpa' in spec:
|
||||||
|
options.append('--with-elpa=yes')
|
||||||
|
|
||||||
|
# Add a list of directories to search
|
||||||
|
search_list = []
|
||||||
|
for name, dependency_spec in spec.dependencies.iteritems():
|
||||||
|
search_list.extend([dependency_spec.prefix.lib,
|
||||||
|
dependency_spec.prefix.lib64])
|
||||||
|
|
||||||
|
search_list = " ".join(search_list)
|
||||||
|
options.append('LIBDIRS=%s' % search_list)
|
||||||
|
options.append('F90=%s' % os.environ['FC'])
|
||||||
|
|
||||||
|
configure(*options)
|
||||||
|
make('all')
|
||||||
|
make('install')
|
||||||
|
|
12
var/spack/repos/builtin/packages/exodusii/exodus-cmake.patch
Normal file
12
var/spack/repos/builtin/packages/exodusii/exodus-cmake.patch
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
diff --git a/cmake-exodus b/cmake-exodus
|
||||||
|
index 787fd9d..ed073a2 100755
|
||||||
|
--- a/cmake-exodus
|
||||||
|
+++ b/cmake-exodus
|
||||||
|
@@ -1,4 +1,6 @@
|
||||||
|
-EXTRA_ARGS=$@
|
||||||
|
+#!/bin/bash
|
||||||
|
+
|
||||||
|
+EXTRA_ARGS=-DSEACASProj_ENABLE_CXX11=OFF
|
||||||
|
|
||||||
|
### Change this to point to the compilers you want to use
|
||||||
|
CC=gcc
|
49
var/spack/repos/builtin/packages/exodusii/package.py
Normal file
49
var/spack/repos/builtin/packages/exodusii/package.py
Normal file
@ -0,0 +1,49 @@
|
|||||||
|
from spack import *
|
||||||
|
|
||||||
|
# TODO: Add support for a C++11 enabled installation that filters out the
|
||||||
|
# TODO: "C++11-Disabled" flag (but only if the spec compiler supports C++11).
|
||||||
|
|
||||||
|
# TODO: Add support for parallel installation that uses MPI.
|
||||||
|
|
||||||
|
# TODO: Create installation options for NetCDF that support larger page size
|
||||||
|
# TODO: suggested by Exodus (see the repository "README" file).
|
||||||
|
|
||||||
|
class Exodusii(Package):
|
||||||
|
"""Exodus II is a C++/Fortran library developed to store and retrieve data for
|
||||||
|
finite element analyses. It's used for preprocessing (problem definition),
|
||||||
|
postprocessing (results visualization), and data transfer between codes.
|
||||||
|
An Exodus II data file is a random access, machine independent, binary
|
||||||
|
file that is written and read via C, C++, or Fortran API routines."""
|
||||||
|
|
||||||
|
homepage = "https://github.com/gsjaardema/seacas"
|
||||||
|
url = "https://github.com/gsjaardema/seacas/archive/master.zip"
|
||||||
|
|
||||||
|
version('2016-02-08', git='https://github.com/gsjaardema/seacas.git', commit='dcf3529')
|
||||||
|
|
||||||
|
# TODO: Make this a build dependency once build dependencies are supported
|
||||||
|
# (see: https://github.com/LLNL/spack/pull/378).
|
||||||
|
depends_on('cmake@2.8.7:')
|
||||||
|
depends_on('hdf5~shared~mpi')
|
||||||
|
depends_on('netcdf~mpi')
|
||||||
|
|
||||||
|
patch('exodus-cmake.patch')
|
||||||
|
|
||||||
|
def patch(self):
|
||||||
|
ff = FileFilter('cmake-exodus')
|
||||||
|
|
||||||
|
ff.filter('CMAKE_INSTALL_PREFIX:PATH=${ACCESS}',
|
||||||
|
'CMAKE_INSTALL_PREFIX:PATH=%s' % self.spec.prefix, string=True)
|
||||||
|
ff.filter('NetCDF_DIR:PATH=${TPL}',
|
||||||
|
'NetCDF_DIR:PATH=%s' % self.spec['netcdf'].prefix, string=True)
|
||||||
|
ff.filter('HDF5_ROOT:PATH=${TPL}',
|
||||||
|
'HDF5_ROOT:PATH=%s' % self.spec['hdf5'].prefix, string=True)
|
||||||
|
|
||||||
|
def install(self, spec, prefix):
|
||||||
|
mkdirp('build')
|
||||||
|
cd('build')
|
||||||
|
|
||||||
|
cmake_exodus = Executable('../cmake-exodus')
|
||||||
|
cmake_exodus()
|
||||||
|
|
||||||
|
make()
|
||||||
|
make('install')
|
@ -7,6 +7,7 @@ class Expat(Package):
|
|||||||
|
|
||||||
version('2.1.0', 'dd7dab7a5fea97d2a6a43f511449b7cd')
|
version('2.1.0', 'dd7dab7a5fea97d2a6a43f511449b7cd')
|
||||||
|
|
||||||
|
depends_on('cmake')
|
||||||
|
|
||||||
def install(self, spec, prefix):
|
def install(self, spec, prefix):
|
||||||
|
|
||||||
|
@ -47,6 +47,8 @@ class Fftw(Package):
|
|||||||
|
|
||||||
depends_on('mpi', when='+mpi')
|
depends_on('mpi', when='+mpi')
|
||||||
|
|
||||||
|
# TODO : add support for architecture specific optimizations as soon as targets are supported
|
||||||
|
|
||||||
def install(self, spec, prefix):
|
def install(self, spec, prefix):
|
||||||
options = ['--prefix=%s' % prefix,
|
options = ['--prefix=%s' % prefix,
|
||||||
'--enable-shared',
|
'--enable-shared',
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user