Merge remote-tracking branch 'upstream/develop' into develop
This commit is contained in:
12
.codecov.yml
Normal file
12
.codecov.yml
Normal file
@@ -0,0 +1,12 @@
|
||||
coverage:
|
||||
precision: 2
|
||||
round: nearest
|
||||
range: 60...100
|
||||
|
||||
ignore:
|
||||
- lib/spack/spack/test/.*
|
||||
- lib/spack/env/.*
|
||||
- lib/spack/docs/.*
|
||||
- lib/spack/external/.*
|
||||
|
||||
comment: off
|
47
.travis.yml
47
.travis.yml
@@ -1,40 +1,37 @@
|
||||
#=============================================================================
|
||||
# Project settings
|
||||
#=============================================================================
|
||||
language: python
|
||||
|
||||
# Only build master and develop on push; do not build every branch.
|
||||
branches:
|
||||
only:
|
||||
- master
|
||||
- develop
|
||||
- /^releases\/.*$/
|
||||
|
||||
#=============================================================================
|
||||
# Build matrix
|
||||
#=============================================================================
|
||||
python:
|
||||
- 2.6
|
||||
- 2.7
|
||||
|
||||
env:
|
||||
- TEST_SUITE=unit
|
||||
- TEST_SUITE=flake8
|
||||
- TEST_SUITE=doc
|
||||
|
||||
matrix:
|
||||
# Flake8 and Sphinx no longer support Python 2.6, and one run is enough.
|
||||
exclude:
|
||||
- python: 2.6
|
||||
env: TEST_SUITE=flake8
|
||||
- python: 2.6
|
||||
env: TEST_SUITE=doc
|
||||
# Explicitly include an OS X build with homebrew's python.
|
||||
# Works around Python issues on Travis for OSX, described here:
|
||||
# http://blog.fizyk.net.pl/blog/running-python-tests-on-traviss-osx-workers.html
|
||||
include:
|
||||
- os: osx
|
||||
language: generic
|
||||
env: TEST_SUITE=unit
|
||||
- python: '2.6'
|
||||
os: linux
|
||||
language: python
|
||||
env: TEST_SUITE=unit
|
||||
- python: '2.7'
|
||||
os: linux
|
||||
language: python
|
||||
env: TEST_SUITE=unit
|
||||
- python: '2.7'
|
||||
os: linux
|
||||
language: python
|
||||
env: TEST_SUITE=flake8
|
||||
- python: '2.7'
|
||||
os: linux
|
||||
language: python
|
||||
env: TEST_SUITE=doc
|
||||
- os: osx
|
||||
language: generic
|
||||
env: [ TEST_SUITE=unit, PYTHON_VERSION=2.7 ]
|
||||
|
||||
#=============================================================================
|
||||
# Environment
|
||||
@@ -60,7 +57,7 @@ before_install:
|
||||
|
||||
# Install various dependencies
|
||||
install:
|
||||
- pip install --upgrade coveralls
|
||||
- pip install --upgrade codecov
|
||||
- pip install --upgrade flake8
|
||||
- pip install --upgrade sphinx
|
||||
- pip install --upgrade mercurial
|
||||
@@ -79,7 +76,7 @@ before_script:
|
||||
script: share/spack/qa/run-$TEST_SUITE-tests
|
||||
|
||||
after_success:
|
||||
- if [[ $TEST_SUITE == unit && $TRAVIS_PYTHON_VERSION == 2.7 && $TRAVIS_OS_NAME == "linux" ]]; then coveralls; fi
|
||||
- if [[ $TEST_SUITE == unit && $TRAVIS_PYTHON_VERSION == 2.7 && $TRAVIS_OS_NAME == "linux" ]]; then codecov --env PY_VERSION; fi
|
||||
|
||||
#=============================================================================
|
||||
# Notifications
|
||||
|
@@ -2,7 +2,7 @@
|
||||
============
|
||||
|
||||
[](https://travis-ci.org/LLNL/spack)
|
||||
[](https://coveralls.io/github/LLNL/spack?branch=develop)
|
||||
[](https://codecov.io/gh/LLNL/spack)
|
||||
|
||||
Spack is a package management tool designed to support multiple
|
||||
versions and configurations of software on a wide variety of platforms
|
||||
|
14
bin/spack
14
bin/spack
@@ -102,19 +102,19 @@ spec expressions:
|
||||
[^DEPENDENCY [CONSTRAINTS] ...]"""))
|
||||
|
||||
parser.add_argument('-d', '--debug', action='store_true',
|
||||
help="Write out debug logs during compile")
|
||||
help="write out debug logs during compile")
|
||||
parser.add_argument('-D', '--pdb', action='store_true',
|
||||
help="Run spack under the pdb debugger")
|
||||
help="run spack under the pdb debugger")
|
||||
parser.add_argument('-k', '--insecure', action='store_true',
|
||||
help="Do not check ssl certificates when downloading.")
|
||||
help="do not check ssl certificates when downloading")
|
||||
parser.add_argument('-m', '--mock', action='store_true',
|
||||
help="Use mock packages instead of real ones.")
|
||||
help="use mock packages instead of real ones")
|
||||
parser.add_argument('-p', '--profile', action='store_true',
|
||||
help="Profile execution using cProfile.")
|
||||
help="profile execution using cProfile")
|
||||
parser.add_argument('-v', '--verbose', action='store_true',
|
||||
help="Print additional output during builds")
|
||||
help="print additional output during builds")
|
||||
parser.add_argument('-s', '--stacktrace', action='store_true',
|
||||
help="Add stacktrace information to all printed statements")
|
||||
help="add stacktrace information to all printed statements")
|
||||
parser.add_argument('-V', '--version', action='version',
|
||||
version="%s" % spack.spack_version)
|
||||
|
||||
|
@@ -75,7 +75,10 @@ This allows you to develop iteratively: make a change, test that change, make
|
||||
another change, test that change, etc. To get a list of all available unit
|
||||
tests, run:
|
||||
|
||||
.. command-output:: spack test --collect-only
|
||||
.. command-output:: spack test --list
|
||||
|
||||
A more detailed list of available unit tests can be found by running
|
||||
``spack test --long-list``.
|
||||
|
||||
Unit tests are crucial to making sure bugs aren't introduced into Spack. If you
|
||||
are modifying core Spack libraries or adding new functionality, please consider
|
||||
|
@@ -300,6 +300,42 @@ Stage objects
|
||||
Writing commands
|
||||
----------------
|
||||
|
||||
Adding a new command to Spack is easy. Simply add a ``<name>.py`` file to
|
||||
``lib/spack/spack/cmd/``, where ``<name>`` is the name of the subcommand.
|
||||
At the bare minimum, two functions are required in this file:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
``setup_parser()``
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Unless your command doesn't accept any arguments, a ``setup_parser()``
|
||||
function is required to define what arguments and flags your command takes.
|
||||
See the `Argparse documentation <https://docs.python.org/2.7/library/argparse.html>`_
|
||||
for more details on how to add arguments.
|
||||
|
||||
Some commands have a set of subcommands, like ``spack compiler find`` or
|
||||
``spack module refresh``. You can add subparsers to your parser to handle
|
||||
this. Check out ``spack edit --command compiler`` for an example of this.
|
||||
|
||||
A lot of commands take the same arguments and flags. These arguments should
|
||||
be defined in ``lib/spack/spack/cmd/common/arguments.py`` so that they don't
|
||||
need to be redefined in multiple commands.
|
||||
|
||||
^^^^^^^^^^^^
|
||||
``<name>()``
|
||||
^^^^^^^^^^^^
|
||||
|
||||
In order to run your command, Spack searches for a function with the same
|
||||
name as your command in ``<name>.py``. This is the main method for your
|
||||
command, and can call other helper methods to handle common tasks.
|
||||
|
||||
Remember, before adding a new command, think to yourself whether or not this
|
||||
new command is actually necessary. Sometimes, the functionality you desire
|
||||
can be added to an existing command. Also remember to add unit tests for
|
||||
your command. If it isn't used very frequently, changes to the rest of
|
||||
Spack can cause your command to break without sufficient unit tests to
|
||||
prevent this from happening.
|
||||
|
||||
----------
|
||||
Unit tests
|
||||
----------
|
||||
@@ -312,14 +348,80 @@ Unit testing
|
||||
Developer commands
|
||||
------------------
|
||||
|
||||
.. _cmd-spack-doc:
|
||||
|
||||
^^^^^^^^^^^^^
|
||||
``spack doc``
|
||||
^^^^^^^^^^^^^
|
||||
|
||||
.. _cmd-spack-test:
|
||||
|
||||
^^^^^^^^^^^^^^
|
||||
``spack test``
|
||||
^^^^^^^^^^^^^^
|
||||
|
||||
.. _cmd-spack-url:
|
||||
|
||||
^^^^^^^^^^^^^
|
||||
``spack url``
|
||||
^^^^^^^^^^^^^
|
||||
|
||||
A package containing a single URL can be used to download several different
|
||||
versions of the package. If you've ever wondered how this works, all of the
|
||||
magic is in :mod:`spack.url`. This module contains methods for extracting
|
||||
the name and version of a package from its URL. The name is used by
|
||||
``spack create`` to guess the name of the package. By determining the version
|
||||
from the URL, Spack can replace it with other versions to determine where to
|
||||
download them from.
|
||||
|
||||
The regular expressions in ``parse_name_offset`` and ``parse_version_offset``
|
||||
are used to extract the name and version, but they aren't perfect. In order
|
||||
to debug Spack's URL parsing support, the ``spack url`` command can be used.
|
||||
|
||||
"""""""""""""""""""
|
||||
``spack url parse``
|
||||
"""""""""""""""""""
|
||||
|
||||
If you need to debug a single URL, you can use the following command:
|
||||
|
||||
.. command-output:: spack url parse http://cache.ruby-lang.org/pub/ruby/2.2/ruby-2.2.0.tar.gz
|
||||
|
||||
You'll notice that the name and version of this URL are correctly detected,
|
||||
and you can even see which regular expressions it was matched to. However,
|
||||
you'll notice that when it substitutes the version number in, it doesn't
|
||||
replace the ``2.2`` with ``9.9`` where we would expect ``9.9.9b`` to live.
|
||||
This particular package may require a ``list_url`` or ``url_for_version``
|
||||
function.
|
||||
|
||||
This command also accepts a ``--spider`` flag. If provided, Spack searches
|
||||
for other versions of the package and prints the matching URLs.
|
||||
|
||||
""""""""""""""""""
|
||||
``spack url list``
|
||||
""""""""""""""""""
|
||||
|
||||
This command lists every URL in every package in Spack. If given the
|
||||
``--color`` and ``--extrapolation`` flags, it also colors the part of
|
||||
the string that it detected to be the name and version. The
|
||||
``--incorrect-name`` and ``--incorrect-version`` flags can be used to
|
||||
print URLs that were not being parsed correctly.
|
||||
|
||||
""""""""""""""""""
|
||||
``spack url test``
|
||||
""""""""""""""""""
|
||||
|
||||
This command attempts to parse every URL for every package in Spack
|
||||
and prints a summary of how many of them are being correctly parsed.
|
||||
It also prints a histogram showing which regular expressions are being
|
||||
matched and how frequently:
|
||||
|
||||
.. command-output:: spack url test
|
||||
|
||||
This command is essential for anyone adding or changing the regular
|
||||
expressions that parse names and versions. By running this command
|
||||
before and after the change, you can make sure that your regular
|
||||
expression fixes more packages than it breaks.
|
||||
|
||||
---------
|
||||
Profiling
|
||||
---------
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -45,7 +45,7 @@ Add a new compiler
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Spack automatically scans the environment to search for available
|
||||
compilers on first use. On a Ubuntu 14.04 a fresh clone will show
|
||||
compilers on first use. On Ubuntu 14.04, a fresh clone will show
|
||||
something like this:
|
||||
|
||||
.. code-block:: console
|
||||
@@ -58,9 +58,10 @@ something like this:
|
||||
-- gcc ----------------------------------------------------------
|
||||
gcc@4.8
|
||||
|
||||
For the purpose of building a limited set of packages with some features
|
||||
that will help showcasing the capabilities of
|
||||
module customization the first thing we need is to build a new compiler:
|
||||
In order to showcase the capabilities of module customization, we will want to
|
||||
build a limited set of packages with multiple compilers. If you do not already
|
||||
have multiple compilers listed by ``spack compilers``, you should build one
|
||||
with Spack:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
@@ -85,7 +86,7 @@ Then we can use shell support for modules to add it to the list of known compile
|
||||
-- gcc ----------------------------------------------------------
|
||||
gcc@6.2.0 gcc@4.8
|
||||
|
||||
Note that the final 7 digits hash at the end of the generated module may vary depending
|
||||
Note that the 7-digit hash at the end of the generated module may vary depending
|
||||
on architecture or package version.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
@@ -96,16 +97,11 @@ Next you should install a few modules that will be used in the tutorial:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install netlib-scalapack ^openmpi ^openblas
|
||||
# ...
|
||||
|
||||
The packages you need to install are:
|
||||
|
||||
- ``netlib-scalapack ^openmpi ^openblas``
|
||||
- ``netlib-scalapack ^mpich ^openblas``
|
||||
- ``netlib-scalapack ^openmpi ^netlib-lapack``
|
||||
- ``netlib-scalapack ^mpich ^netlib-lapack``
|
||||
- ``py-scipy ^openblas``
|
||||
$ spack install netlib-scalapack ^openmpi ^openblas
|
||||
$ spack install netlib-scalapack ^mpich ^openblas
|
||||
$ spack install netlib-scalapack ^openmpi ^netlib-lapack
|
||||
$ spack install netlib-scalapack ^mpich ^netlib-lapack
|
||||
$ spack install py-scipy ^openblas
|
||||
|
||||
In the end your environment should look something like:
|
||||
|
||||
@@ -500,7 +496,7 @@ Regenerating the module files should result in something like:
|
||||
Fortran, and Java.
|
||||
]])
|
||||
|
||||
As you see the ``gcc`` module has the environment variable ``GCC_ROOT`` set.
|
||||
As you can see, the ``gcc`` module has the environment variable ``GCC_ROOT`` set.
|
||||
|
||||
Sometimes it's also useful to apply environment modifications selectively and target
|
||||
only certain packages. You can, for instance set the common variables ``CC``, ``CXX``,
|
||||
@@ -727,7 +723,7 @@ Core/Compiler/MPI
|
||||
|
||||
The most common hierarchy is the so called ``Core/Compiler/MPI``. To have an idea
|
||||
how a hierarchy is organized you may refer to the
|
||||
`Lmod guide <https://www.tacc.utexas.edu/research-development/tacc-projects/lmod/user-guide/module-hierarchy>`_.
|
||||
`Lmod guide <http://lmod.readthedocs.io/en/latest/080_hierarchy.html>`_.
|
||||
Since ``lmod`` is not enabled by default, you need to add it to the list of
|
||||
enabled module file generators. The other things you need to do are:
|
||||
|
||||
@@ -782,9 +778,10 @@ After modifications the configuration file will be:
|
||||
purpose of overriding the default list of enabled generators so
|
||||
that only ``lmod`` will be active (see :ref:`the reference
|
||||
manual <config-overrides>` for a more detailed explanation of
|
||||
config scopes).
|
||||
config scopes). If a single colon is used, it will append instead
|
||||
of override.
|
||||
|
||||
The directive ``core_compilers`` accepts a list of compilers : everything built
|
||||
The directive ``core_compilers`` accepts a list of compilers; everything built
|
||||
using these compilers will create a module in the ``Core`` part of the hierarchy. It is
|
||||
common practice to put the OS provided compilers in the list and only build common utilities
|
||||
and other compilers in ``Core``.
|
||||
|
8
lib/spack/env/cc
vendored
8
lib/spack/env/cc
vendored
@@ -98,25 +98,25 @@ case "$command" in
|
||||
cpp)
|
||||
mode=cpp
|
||||
;;
|
||||
cc|c89|c99|gcc|clang|icc|pgcc|xlc)
|
||||
cc|c89|c99|gcc|clang|icc|pgcc|xlc|xlc_r)
|
||||
command="$SPACK_CC"
|
||||
language="C"
|
||||
comp="CC"
|
||||
lang_flags=C
|
||||
;;
|
||||
c++|CC|g++|clang++|icpc|pgc++|xlc++)
|
||||
c++|CC|g++|clang++|icpc|pgc++|xlc++|xlc++_r)
|
||||
command="$SPACK_CXX"
|
||||
language="C++"
|
||||
comp="CXX"
|
||||
lang_flags=CXX
|
||||
;;
|
||||
ftn|f90|fc|f95|gfortran|ifort|pgfortran|xlf90|nagfor)
|
||||
ftn|f90|fc|f95|gfortran|ifort|pgfortran|xlf90|xlf90_r|nagfor)
|
||||
command="$SPACK_FC"
|
||||
language="Fortran 90"
|
||||
comp="FC"
|
||||
lang_flags=F
|
||||
;;
|
||||
f77|gfortran|ifort|pgfortran|xlf|nagfor|ftn)
|
||||
f77|gfortran|ifort|pgfortran|xlf|xlf_r|nagfor|ftn)
|
||||
command="$SPACK_F77"
|
||||
language="Fortran 77"
|
||||
comp="F77"
|
||||
|
1
lib/spack/env/xl_r/xlc++_r
vendored
Symbolic link
1
lib/spack/env/xl_r/xlc++_r
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../cc
|
1
lib/spack/env/xl_r/xlc_r
vendored
Symbolic link
1
lib/spack/env/xl_r/xlc_r
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../cc
|
1
lib/spack/env/xl_r/xlf90_r
vendored
Symbolic link
1
lib/spack/env/xl_r/xlf90_r
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../cc
|
1
lib/spack/env/xl_r/xlf_r
vendored
Symbolic link
1
lib/spack/env/xl_r/xlf_r
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../cc
|
@@ -455,7 +455,12 @@ def fix_darwin_install_name(path):
|
||||
# fix all dependencies:
|
||||
for dep in deps:
|
||||
for loc in libs:
|
||||
if dep == os.path.basename(loc):
|
||||
# We really want to check for either
|
||||
# dep == os.path.basename(loc) or
|
||||
# dep == join_path(builddir, os.path.basename(loc)),
|
||||
# but we don't know builddir (nor how symbolic links look
|
||||
# in builddir). We thus only compare the basenames.
|
||||
if os.path.basename(dep) == os.path.basename(loc):
|
||||
subprocess.Popen(
|
||||
["install_name_tool", "-change", dep, loc, lib],
|
||||
stdout=subprocess.PIPE).communicate()[0]
|
||||
|
@@ -46,6 +46,7 @@
|
||||
module_path = join_path(lib_path, "spack")
|
||||
platform_path = join_path(module_path, 'platforms')
|
||||
compilers_path = join_path(module_path, "compilers")
|
||||
build_systems_path = join_path(module_path, 'build_systems')
|
||||
operating_system_path = join_path(module_path, 'operating_systems')
|
||||
test_path = join_path(module_path, "test")
|
||||
hooks_path = join_path(module_path, "hooks")
|
||||
@@ -88,7 +89,7 @@
|
||||
# Initialize various data structures & objects at the core of Spack.
|
||||
#-----------------------------------------------------------------------------
|
||||
# Version information
|
||||
spack_version = Version("0.9.1")
|
||||
spack_version = Version("0.10.0")
|
||||
|
||||
|
||||
# Set up the default packages database.
|
||||
@@ -155,13 +156,24 @@
|
||||
#-----------------------------------------------------------------------------
|
||||
__all__ = []
|
||||
|
||||
from spack.package import Package
|
||||
from spack.package import Package, run_before, run_after, on_package_attributes
|
||||
from spack.build_systems.makefile import MakefilePackage
|
||||
from spack.build_systems.autotools import AutotoolsPackage
|
||||
from spack.build_systems.cmake import CMakePackage
|
||||
from spack.build_systems.python import PythonPackage
|
||||
from spack.build_systems.r import RPackage
|
||||
__all__ += ['Package', 'CMakePackage', 'AutotoolsPackage', 'MakefilePackage',
|
||||
'RPackage']
|
||||
|
||||
__all__ += [
|
||||
'run_before',
|
||||
'run_after',
|
||||
'on_package_attributes',
|
||||
'Package',
|
||||
'CMakePackage',
|
||||
'AutotoolsPackage',
|
||||
'MakefilePackage',
|
||||
'PythonPackage',
|
||||
'RPackage'
|
||||
]
|
||||
|
||||
from spack.version import Version, ver
|
||||
__all__ += ['Version', 'ver']
|
||||
|
@@ -31,36 +31,68 @@
|
||||
from subprocess import check_call
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import working_dir
|
||||
from spack.package import PackageBase
|
||||
from llnl.util.filesystem import working_dir, join_path, force_remove
|
||||
from spack.package import PackageBase, run_after, run_before
|
||||
from spack.util.executable import Executable
|
||||
|
||||
|
||||
class AutotoolsPackage(PackageBase):
|
||||
"""Specialized class for packages that are built using GNU Autotools
|
||||
"""Specialized class for packages built using GNU Autotools.
|
||||
|
||||
This class provides four phases that can be overridden:
|
||||
|
||||
* autoreconf
|
||||
* configure
|
||||
* build
|
||||
* install
|
||||
1. :py:meth:`~.AutotoolsPackage.autoreconf`
|
||||
2. :py:meth:`~.AutotoolsPackage.configure`
|
||||
3. :py:meth:`~.AutotoolsPackage.build`
|
||||
4. :py:meth:`~.AutotoolsPackage.install`
|
||||
|
||||
They all have sensible defaults and for many packages the only thing
|
||||
necessary will be to override ``configure_args``
|
||||
necessary will be to override the helper method :py:meth:`.configure_args`.
|
||||
For a finer tuning you may also override:
|
||||
|
||||
+-----------------------------------------------+--------------------+
|
||||
| **Method** | **Purpose** |
|
||||
+===============================================+====================+
|
||||
| :py:attr:`~.AutotoolsPackage.build_targets` | Specify ``make`` |
|
||||
| | targets for the |
|
||||
| | build phase |
|
||||
+-----------------------------------------------+--------------------+
|
||||
| :py:attr:`~.AutotoolsPackage.install_targets` | Specify ``make`` |
|
||||
| | targets for the |
|
||||
| | install phase |
|
||||
+-----------------------------------------------+--------------------+
|
||||
| :py:meth:`~.AutotoolsPackage.check` | Run build time |
|
||||
| | tests if required |
|
||||
+-----------------------------------------------+--------------------+
|
||||
|
||||
Additionally, you may specify make targets for build and install
|
||||
phases by overriding ``build_targets`` and ``install_targets``
|
||||
"""
|
||||
#: Phases of a GNU Autotools package
|
||||
phases = ['autoreconf', 'configure', 'build', 'install']
|
||||
# To be used in UI queries that require to know which
|
||||
# build-system class we are using
|
||||
#: This attribute is used in UI queries that need to know the build
|
||||
#: system base class
|
||||
build_system_class = 'AutotoolsPackage'
|
||||
#: Whether or not to update ``config.guess`` on old architectures
|
||||
patch_config_guess = True
|
||||
|
||||
#: Targets for ``make`` during the :py:meth:`~.AutotoolsPackage.build`
|
||||
#: phase
|
||||
build_targets = []
|
||||
#: Targets for ``make`` during the :py:meth:`~.AutotoolsPackage.install`
|
||||
#: phase
|
||||
install_targets = ['install']
|
||||
|
||||
def do_patch_config_guess(self):
|
||||
#: Callback names for build-time test
|
||||
build_time_test_callbacks = ['check']
|
||||
|
||||
#: Callback names for install-time test
|
||||
install_time_test_callbacks = ['installcheck']
|
||||
|
||||
#: Set to true to force the autoreconf step even if configure is present
|
||||
force_autoreconf = False
|
||||
#: Options to be passed to autoreconf when using the default implementation
|
||||
autoreconf_extra_args = []
|
||||
|
||||
def _do_patch_config_guess(self):
|
||||
"""Some packages ship with an older config.guess and need to have
|
||||
this updated when installed on a newer architecture."""
|
||||
|
||||
@@ -86,7 +118,7 @@ def do_patch_config_guess(self):
|
||||
check_call([my_config_guess], stdout=PIPE, stderr=PIPE)
|
||||
# The package's config.guess already runs OK, so just use it
|
||||
return True
|
||||
except:
|
||||
except Exception:
|
||||
pass
|
||||
else:
|
||||
return True
|
||||
@@ -104,7 +136,7 @@ def do_patch_config_guess(self):
|
||||
check_call([config_guess], stdout=PIPE, stderr=PIPE)
|
||||
shutil.copyfile(config_guess, my_config_guess)
|
||||
return True
|
||||
except:
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Look for the system's config.guess
|
||||
@@ -121,83 +153,152 @@ def do_patch_config_guess(self):
|
||||
check_call([config_guess], stdout=PIPE, stderr=PIPE)
|
||||
shutil.copyfile(config_guess, my_config_guess)
|
||||
return True
|
||||
except:
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return False
|
||||
|
||||
def build_directory(self):
|
||||
"""Override to provide another place to build the package"""
|
||||
@property
|
||||
def configure_directory(self):
|
||||
"""Returns the directory where 'configure' resides.
|
||||
|
||||
:return: directory where to find configure
|
||||
"""
|
||||
return self.stage.source_path
|
||||
|
||||
@property
|
||||
def configure_abs_path(self):
|
||||
# Absolute path to configure
|
||||
configure_abs_path = join_path(
|
||||
os.path.abspath(self.configure_directory), 'configure'
|
||||
)
|
||||
return configure_abs_path
|
||||
|
||||
@property
|
||||
def build_directory(self):
|
||||
"""Override to provide another place to build the package"""
|
||||
return self.configure_directory
|
||||
|
||||
def patch(self):
|
||||
"""Perform any required patches."""
|
||||
"""Patches config.guess if
|
||||
:py:attr:``~.AutotoolsPackage.patch_config_guess`` is True
|
||||
|
||||
:raise RuntimeError: if something goes wrong when patching
|
||||
``config.guess``
|
||||
"""
|
||||
|
||||
if self.patch_config_guess and self.spec.satisfies(
|
||||
'arch=linux-rhel7-ppc64le'):
|
||||
if not self.do_patch_config_guess():
|
||||
'arch=linux-rhel7-ppc64le'
|
||||
):
|
||||
if not self._do_patch_config_guess():
|
||||
raise RuntimeError('Failed to find suitable config.guess')
|
||||
|
||||
@run_before('autoreconf')
|
||||
def delete_configure_to_force_update(self):
|
||||
if self.force_autoreconf:
|
||||
force_remove(self.configure_abs_path)
|
||||
|
||||
def autoreconf(self, spec, prefix):
|
||||
"""Not needed usually, configure should be already there"""
|
||||
pass
|
||||
# If configure exists nothing needs to be done
|
||||
if os.path.exists(self.configure_abs_path):
|
||||
return
|
||||
# Else try to regenerate it
|
||||
autotools = ['m4', 'autoconf', 'automake', 'libtool']
|
||||
missing = [x for x in autotools if x not in spec]
|
||||
if missing:
|
||||
msg = 'Cannot generate configure: missing dependencies {0}'
|
||||
raise RuntimeError(msg.format(missing))
|
||||
tty.msg('Configure script not found: trying to generate it')
|
||||
tty.warn('*********************************************************')
|
||||
tty.warn('* If the default procedure fails, consider implementing *')
|
||||
tty.warn('* a custom AUTORECONF phase in the package *')
|
||||
tty.warn('*********************************************************')
|
||||
with working_dir(self.configure_directory):
|
||||
m = inspect.getmodule(self)
|
||||
# This part should be redundant in principle, but
|
||||
# won't hurt
|
||||
m.libtoolize()
|
||||
m.aclocal()
|
||||
# This line is what is needed most of the time
|
||||
# --install, --verbose, --force
|
||||
autoreconf_args = ['-ivf']
|
||||
if 'pkg-config' in spec:
|
||||
autoreconf_args += [
|
||||
'-I',
|
||||
join_path(spec['pkg-config'].prefix, 'share', 'aclocal'),
|
||||
]
|
||||
autoreconf_args += self.autoreconf_extra_args
|
||||
m.autoreconf(*autoreconf_args)
|
||||
|
||||
@PackageBase.sanity_check('autoreconf')
|
||||
def is_configure_or_die(self):
|
||||
@run_after('autoreconf')
|
||||
def set_configure_or_die(self):
|
||||
"""Checks the presence of a ``configure`` file after the
|
||||
autoreconf phase"""
|
||||
with working_dir(self.build_directory()):
|
||||
if not os.path.exists('configure'):
|
||||
raise RuntimeError(
|
||||
'configure script not found in {0}'.format(os.getcwd()))
|
||||
autoreconf phase. If it is found sets a module attribute
|
||||
appropriately, otherwise raises an error.
|
||||
|
||||
:raises RuntimeError: if a configure script is not found in
|
||||
:py:meth:`~.configure_directory`
|
||||
"""
|
||||
# Check if a configure script is there. If not raise a RuntimeError.
|
||||
if not os.path.exists(self.configure_abs_path):
|
||||
msg = 'configure script not found in {0}'
|
||||
raise RuntimeError(msg.format(self.configure_directory))
|
||||
|
||||
# Monkey-patch the configure script in the corresponding module
|
||||
inspect.getmodule(self).configure = Executable(
|
||||
self.configure_abs_path
|
||||
)
|
||||
|
||||
def configure_args(self):
|
||||
"""Method to be overridden. Should return an iterable containing
|
||||
all the arguments that must be passed to configure, except ``--prefix``
|
||||
"""Produces a list containing all the arguments that must be passed to
|
||||
configure, except ``--prefix`` which will be pre-pended to the list.
|
||||
|
||||
:return: list of arguments for configure
|
||||
"""
|
||||
return []
|
||||
|
||||
def configure(self, spec, prefix):
|
||||
"""Runs configure with the arguments specified in ``configure_args``
|
||||
and an appropriately set prefix
|
||||
"""Runs configure with the arguments specified in :py:meth:`.configure_args`
|
||||
and an appropriately set prefix.
|
||||
"""
|
||||
options = ['--prefix={0}'.format(prefix)] + self.configure_args()
|
||||
|
||||
with working_dir(self.build_directory()):
|
||||
with working_dir(self.build_directory, create=True):
|
||||
inspect.getmodule(self).configure(*options)
|
||||
|
||||
def build(self, spec, prefix):
|
||||
"""Make the build targets"""
|
||||
with working_dir(self.build_directory()):
|
||||
"""Makes the build targets specified by
|
||||
:py:attr:``~.AutotoolsPackage.build_targets``
|
||||
"""
|
||||
with working_dir(self.build_directory):
|
||||
inspect.getmodule(self).make(*self.build_targets)
|
||||
|
||||
def install(self, spec, prefix):
|
||||
"""Make the install targets"""
|
||||
with working_dir(self.build_directory()):
|
||||
"""Makes the install targets specified by
|
||||
:py:attr:``~.AutotoolsPackage.install_targets``
|
||||
"""
|
||||
with working_dir(self.build_directory):
|
||||
inspect.getmodule(self).make(*self.install_targets)
|
||||
|
||||
@PackageBase.sanity_check('build')
|
||||
@PackageBase.on_package_attributes(run_tests=True)
|
||||
def _run_default_function(self):
|
||||
"""This function is run after build if ``self.run_tests == True``
|
||||
|
||||
It will search for a method named ``check`` and run it. A sensible
|
||||
default is provided in the base class.
|
||||
"""
|
||||
try:
|
||||
fn = getattr(self, 'check')
|
||||
tty.msg('Trying default sanity checks [check]')
|
||||
fn()
|
||||
except AttributeError:
|
||||
tty.msg('Skipping default sanity checks [method `check` not implemented]') # NOQA: ignore=E501
|
||||
run_after('build')(PackageBase._run_default_build_time_test_callbacks)
|
||||
|
||||
def check(self):
|
||||
"""Default test: search the Makefile for targets ``test`` and ``check``
|
||||
and run them if found.
|
||||
"""Searches the Makefile for targets ``test`` and ``check``
|
||||
and runs them if found.
|
||||
"""
|
||||
with working_dir(self.build_directory()):
|
||||
with working_dir(self.build_directory):
|
||||
self._if_make_target_execute('test')
|
||||
self._if_make_target_execute('check')
|
||||
|
||||
run_after('install')(PackageBase._run_default_install_time_test_callbacks)
|
||||
|
||||
def installcheck(self):
|
||||
"""Searches the Makefile for an ``installcheck`` target
|
||||
and runs it if found.
|
||||
"""
|
||||
with working_dir(self.build_directory):
|
||||
self._if_make_target_execute('installcheck')
|
||||
|
||||
# Check that self.prefix is there after installation
|
||||
PackageBase.sanity_check('install')(PackageBase.sanity_check_prefix)
|
||||
run_after('install')(PackageBase.sanity_check_prefix)
|
||||
|
@@ -26,52 +26,76 @@
|
||||
import inspect
|
||||
import platform
|
||||
|
||||
import llnl.util.tty as tty
|
||||
import spack.build_environment
|
||||
from llnl.util.filesystem import working_dir, join_path
|
||||
from spack.directives import depends_on
|
||||
from spack.package import PackageBase
|
||||
from spack.package import PackageBase, run_after
|
||||
|
||||
|
||||
class CMakePackage(PackageBase):
|
||||
"""Specialized class for packages that are built using CMake
|
||||
"""Specialized class for packages built using CMake
|
||||
|
||||
This class provides three phases that can be overridden:
|
||||
|
||||
* cmake
|
||||
* build
|
||||
* install
|
||||
1. :py:meth:`~.CMakePackage.cmake`
|
||||
2. :py:meth:`~.CMakePackage.build`
|
||||
3. :py:meth:`~.CMakePackage.install`
|
||||
|
||||
They all have sensible defaults and for many packages the only thing
|
||||
necessary will be to override ``cmake_args``
|
||||
necessary will be to override :py:meth:`~.CMakePackage.cmake_args`.
|
||||
For a finer tuning you may also override:
|
||||
|
||||
+-----------------------------------------------+--------------------+
|
||||
| **Method** | **Purpose** |
|
||||
+===============================================+====================+
|
||||
| :py:meth:`~.CMakePackage.build_type` | Specify the value |
|
||||
| | for the |
|
||||
| | CMAKE_BUILD_TYPE |
|
||||
| | variable |
|
||||
+-----------------------------------------------+--------------------+
|
||||
| :py:meth:`~.CMakePackage.root_cmakelists_dir` | Location of the |
|
||||
| | root CMakeLists.txt|
|
||||
+-----------------------------------------------+--------------------+
|
||||
| :py:meth:`~.CMakePackage.build_directory` | Directory where to |
|
||||
| | build the package |
|
||||
+-----------------------------------------------+--------------------+
|
||||
|
||||
|
||||
Additionally, you may specify make targets for build and install
|
||||
phases by overriding ``build_targets`` and ``install_targets``
|
||||
"""
|
||||
#: Phases of a CMake package
|
||||
phases = ['cmake', 'build', 'install']
|
||||
# To be used in UI queries that require to know which
|
||||
# build-system class we are using
|
||||
#: This attribute is used in UI queries that need to know the build
|
||||
#: system base class
|
||||
build_system_class = 'CMakePackage'
|
||||
|
||||
build_targets = []
|
||||
install_targets = ['install']
|
||||
|
||||
build_time_test_callbacks = ['check']
|
||||
|
||||
depends_on('cmake', type='build')
|
||||
|
||||
def build_type(self):
|
||||
"""Override to provide the correct build_type in case a complex
|
||||
logic is needed
|
||||
"""Returns the correct value for the ``CMAKE_BUILD_TYPE`` variable
|
||||
|
||||
:return: value for ``CMAKE_BUILD_TYPE``
|
||||
"""
|
||||
return 'RelWithDebInfo'
|
||||
|
||||
@property
|
||||
def root_cmakelists_dir(self):
|
||||
"""Directory where to find the root CMakeLists.txt"""
|
||||
"""Returns the location of the root CMakeLists.txt
|
||||
|
||||
:return: directory containing the root CMakeLists.txt
|
||||
"""
|
||||
return self.stage.source_path
|
||||
|
||||
@property
|
||||
def std_cmake_args(self):
|
||||
"""Standard cmake arguments provided as a property for
|
||||
convenience of package writers
|
||||
|
||||
:return: standard cmake arguments
|
||||
"""
|
||||
# standard CMake arguments
|
||||
return CMakePackage._std_args(self)
|
||||
@@ -96,57 +120,52 @@ def _std_args(pkg):
|
||||
args.append('-DCMAKE_INSTALL_RPATH:STRING={0}'.format(rpaths))
|
||||
return args
|
||||
|
||||
@property
|
||||
def build_directory(self):
|
||||
"""Override to provide another place to build the package"""
|
||||
"""Returns the directory to use when building the package
|
||||
|
||||
:return: directory where to build the package
|
||||
"""
|
||||
return join_path(self.stage.source_path, 'spack-build')
|
||||
|
||||
def cmake_args(self):
|
||||
"""Method to be overridden. Should return an iterable containing
|
||||
all the arguments that must be passed to configure, except:
|
||||
"""Produces a list containing all the arguments that must be passed to
|
||||
cmake, except:
|
||||
|
||||
* CMAKE_INSTALL_PREFIX
|
||||
* CMAKE_BUILD_TYPE
|
||||
* CMAKE_INSTALL_PREFIX
|
||||
* CMAKE_BUILD_TYPE
|
||||
|
||||
which will be set automatically.
|
||||
|
||||
:return: list of arguments for cmake
|
||||
"""
|
||||
return []
|
||||
|
||||
def cmake(self, spec, prefix):
|
||||
"""Run cmake in the build directory"""
|
||||
options = [self.root_cmakelists_dir()] + self.std_cmake_args + \
|
||||
"""Runs ``cmake`` in the build directory"""
|
||||
options = [self.root_cmakelists_dir] + self.std_cmake_args + \
|
||||
self.cmake_args()
|
||||
with working_dir(self.build_directory(), create=True):
|
||||
with working_dir(self.build_directory, create=True):
|
||||
inspect.getmodule(self).cmake(*options)
|
||||
|
||||
def build(self, spec, prefix):
|
||||
"""Make the build targets"""
|
||||
with working_dir(self.build_directory()):
|
||||
with working_dir(self.build_directory):
|
||||
inspect.getmodule(self).make(*self.build_targets)
|
||||
|
||||
def install(self, spec, prefix):
|
||||
"""Make the install targets"""
|
||||
with working_dir(self.build_directory()):
|
||||
with working_dir(self.build_directory):
|
||||
inspect.getmodule(self).make(*self.install_targets)
|
||||
|
||||
@PackageBase.sanity_check('build')
|
||||
@PackageBase.on_package_attributes(run_tests=True)
|
||||
def _run_default_function(self):
|
||||
"""This function is run after build if ``self.run_tests == True``
|
||||
|
||||
It will search for a method named ``check`` and run it. A sensible
|
||||
default is provided in the base class.
|
||||
"""
|
||||
try:
|
||||
fn = getattr(self, 'check')
|
||||
tty.msg('Trying default build sanity checks [check]')
|
||||
fn()
|
||||
except AttributeError:
|
||||
tty.msg('Skipping default build sanity checks [method `check` not implemented]') # NOQA: ignore=E501
|
||||
run_after('build')(PackageBase._run_default_build_time_test_callbacks)
|
||||
|
||||
def check(self):
|
||||
"""Default test: search the Makefile for the target ``test``
|
||||
and run them if found.
|
||||
"""Searches the CMake-generated Makefile for the target ``test``
|
||||
and runs it if found.
|
||||
"""
|
||||
with working_dir(self.build_directory()):
|
||||
with working_dir(self.build_directory):
|
||||
self._if_make_target_execute('test')
|
||||
|
||||
# Check that self.prefix is there after installation
|
||||
PackageBase.sanity_check('install')(PackageBase.sanity_check_prefix)
|
||||
run_after('install')(PackageBase.sanity_check_prefix)
|
||||
|
@@ -27,7 +27,7 @@
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import working_dir
|
||||
from spack.package import PackageBase
|
||||
from spack.package import PackageBase, run_after
|
||||
|
||||
|
||||
class MakefilePackage(PackageBase):
|
||||
@@ -35,38 +35,70 @@ class MakefilePackage(PackageBase):
|
||||
|
||||
This class provides three phases that can be overridden:
|
||||
|
||||
* edit
|
||||
* build
|
||||
* install
|
||||
1. :py:meth:`~.MakefilePackage.edit`
|
||||
2. :py:meth:`~.MakefilePackage.build`
|
||||
3. :py:meth:`~.MakefilePackage.install`
|
||||
|
||||
It is necessary to override the 'edit' phase, while 'build' and 'install'
|
||||
have sensible defaults.
|
||||
It is usually necessary to override the :py:meth:`~.MakefilePackage.edit`
|
||||
phase, while :py:meth:`~.MakefilePackage.build` and
|
||||
:py:meth:`~.MakefilePackage.install` have sensible defaults.
|
||||
For a finer tuning you may override:
|
||||
|
||||
+-----------------------------------------------+--------------------+
|
||||
| **Method** | **Purpose** |
|
||||
+===============================================+====================+
|
||||
| :py:attr:`~.MakefilePackage.build_targets` | Specify ``make`` |
|
||||
| | targets for the |
|
||||
| | build phase |
|
||||
+-----------------------------------------------+--------------------+
|
||||
| :py:attr:`~.MakefilePackage.install_targets` | Specify ``make`` |
|
||||
| | targets for the |
|
||||
| | install phase |
|
||||
+-----------------------------------------------+--------------------+
|
||||
| :py:meth:`~.MakefilePackage.build_directory` | Directory where the|
|
||||
| | Makefile is located|
|
||||
+-----------------------------------------------+--------------------+
|
||||
"""
|
||||
#: Phases of a package that is built with an hand-written Makefile
|
||||
phases = ['edit', 'build', 'install']
|
||||
# To be used in UI queries that require to know which
|
||||
# build-system class we are using
|
||||
#: This attribute is used in UI queries that need to know the build
|
||||
#: system base class
|
||||
build_system_class = 'MakefilePackage'
|
||||
|
||||
#: Targets for ``make`` during the :py:meth:`~.MakefilePackage.build`
|
||||
#: phase
|
||||
build_targets = []
|
||||
#: Targets for ``make`` during the :py:meth:`~.MakefilePackage.install`
|
||||
#: phase
|
||||
install_targets = ['install']
|
||||
|
||||
@property
|
||||
def build_directory(self):
|
||||
"""Directory where the main Makefile is located"""
|
||||
"""Returns the directory containing the main Makefile
|
||||
|
||||
:return: build directory
|
||||
"""
|
||||
return self.stage.source_path
|
||||
|
||||
def edit(self, spec, prefix):
|
||||
"""This phase cannot be defaulted for obvious reasons..."""
|
||||
"""Edits the Makefile before calling make. This phase cannot
|
||||
be defaulted.
|
||||
"""
|
||||
tty.msg('Using default implementation: skipping edit phase.')
|
||||
|
||||
def build(self, spec, prefix):
|
||||
"""Make the build targets"""
|
||||
with working_dir(self.build_directory()):
|
||||
"""Calls make, passing :py:attr:`~.MakefilePackage.build_targets`
|
||||
as targets.
|
||||
"""
|
||||
with working_dir(self.build_directory):
|
||||
inspect.getmodule(self).make(*self.build_targets)
|
||||
|
||||
def install(self, spec, prefix):
|
||||
"""Make the install targets"""
|
||||
with working_dir(self.build_directory()):
|
||||
"""Calls make, passing :py:attr:`~.MakefilePackage.install_targets`
|
||||
as targets.
|
||||
"""
|
||||
with working_dir(self.build_directory):
|
||||
inspect.getmodule(self).make(*self.install_targets)
|
||||
|
||||
# Check that self.prefix is there after installation
|
||||
PackageBase.sanity_check('install')(PackageBase.sanity_check_prefix)
|
||||
run_after('install')(PackageBase.sanity_check_prefix)
|
||||
|
310
lib/spack/spack/build_systems/python.py
Normal file
310
lib/spack/spack/build_systems/python.py
Normal file
@@ -0,0 +1,310 @@
|
||||
##############################################################################
|
||||
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
|
||||
# Produced at the Lawrence Livermore National Laboratory.
|
||||
#
|
||||
# This file is part of Spack.
|
||||
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
|
||||
# LLNL-CODE-647188
|
||||
#
|
||||
# For details, see https://github.com/llnl/spack
|
||||
# Please also see the LICENSE file for our notice and the LGPL.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License (as
|
||||
# published by the Free Software Foundation) version 2.1, February 1999.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
|
||||
# conditions of the GNU Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
|
||||
import inspect
|
||||
|
||||
from spack.directives import extends
|
||||
from spack.package import PackageBase, run_after
|
||||
|
||||
from llnl.util.filesystem import working_dir
|
||||
|
||||
|
||||
class PythonPackage(PackageBase):
|
||||
"""Specialized class for packages that are built using Python
|
||||
setup.py files
|
||||
|
||||
This class provides the following phases that can be overridden:
|
||||
|
||||
* build
|
||||
* build_py
|
||||
* build_ext
|
||||
* build_clib
|
||||
* build_scripts
|
||||
* clean
|
||||
* install
|
||||
* install_lib
|
||||
* install_headers
|
||||
* install_scripts
|
||||
* install_data
|
||||
* sdist
|
||||
* register
|
||||
* bdist
|
||||
* bdist_dumb
|
||||
* bdist_rpm
|
||||
* bdist_wininst
|
||||
* upload
|
||||
* check
|
||||
|
||||
These are all standard setup.py commands and can be found by running:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ python setup.py --help-commands
|
||||
|
||||
By default, only the 'build' and 'install' phases are run, but if you
|
||||
need to run more phases, simply modify your ``phases`` list like so:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
phases = ['build_ext', 'install', 'bdist']
|
||||
|
||||
Each phase provides a function <phase> that runs:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ python --no-user-cfg setup.py <phase>
|
||||
|
||||
Each phase also has a <phase_args> function that can pass arguments to
|
||||
this call. All of these functions are empty except for the ``install_args``
|
||||
function, which passes ``--prefix=/path/to/installation/directory``.
|
||||
|
||||
If you need to run a phase which is not a standard setup.py command,
|
||||
you'll need to define a function for it like so:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def configure(self, spec, prefix):
|
||||
self.setup_py('configure')
|
||||
"""
|
||||
# Default phases
|
||||
phases = ['build', 'install']
|
||||
|
||||
# To be used in UI queries that require to know which
|
||||
# build-system class we are using
|
||||
build_system_class = 'PythonPackage'
|
||||
|
||||
extends('python')
|
||||
|
||||
def setup_file(self):
|
||||
"""Returns the name of the setup file to use."""
|
||||
return 'setup.py'
|
||||
|
||||
@property
|
||||
def build_directory(self):
|
||||
"""The directory containing the ``setup.py`` file."""
|
||||
return self.stage.source_path
|
||||
|
||||
def python(self, *args):
|
||||
inspect.getmodule(self).python(*args)
|
||||
|
||||
def setup_py(self, *args):
|
||||
setup = self.setup_file()
|
||||
|
||||
with working_dir(self.build_directory):
|
||||
self.python(setup, '--no-user-cfg', *args)
|
||||
|
||||
# The following phases and their descriptions come from:
|
||||
# $ python setup.py --help-commands
|
||||
# Only standard commands are included here, but some packages
|
||||
# define extra commands as well
|
||||
|
||||
def build(self, spec, prefix):
|
||||
"""Build everything needed to install."""
|
||||
args = self.build_args(spec, prefix)
|
||||
|
||||
self.setup_py('build', *args)
|
||||
|
||||
def build_args(self, spec, prefix):
|
||||
"""Arguments to pass to build."""
|
||||
return []
|
||||
|
||||
def build_py(self, spec, prefix):
|
||||
'''"Build" pure Python modules (copy to build directory).'''
|
||||
args = self.build_py_args(spec, prefix)
|
||||
|
||||
self.setup_py('build_py', *args)
|
||||
|
||||
def build_py_args(self, spec, prefix):
|
||||
"""Arguments to pass to build_py."""
|
||||
return []
|
||||
|
||||
def build_ext(self, spec, prefix):
|
||||
"""Build C/C++ extensions (compile/link to build directory)."""
|
||||
args = self.build_ext_args(spec, prefix)
|
||||
|
||||
self.setup_py('build_ext', *args)
|
||||
|
||||
def build_ext_args(self, spec, prefix):
|
||||
"""Arguments to pass to build_ext."""
|
||||
return []
|
||||
|
||||
def build_clib(self, spec, prefix):
|
||||
"""Build C/C++ libraries used by Python extensions."""
|
||||
args = self.build_clib_args(spec, prefix)
|
||||
|
||||
self.setup_py('build_clib', *args)
|
||||
|
||||
def build_clib_args(self, spec, prefix):
|
||||
"""Arguments to pass to build_clib."""
|
||||
return []
|
||||
|
||||
def build_scripts(self, spec, prefix):
|
||||
'''"Build" scripts (copy and fixup #! line).'''
|
||||
args = self.build_scripts_args(spec, prefix)
|
||||
|
||||
self.setup_py('build_scripts', *args)
|
||||
|
||||
def clean(self, spec, prefix):
|
||||
"""Clean up temporary files from 'build' command."""
|
||||
args = self.clean_args(spec, prefix)
|
||||
|
||||
self.setup_py('clean', *args)
|
||||
|
||||
def clean_args(self, spec, prefix):
|
||||
"""Arguments to pass to clean."""
|
||||
return []
|
||||
|
||||
def install(self, spec, prefix):
|
||||
"""Install everything from build directory."""
|
||||
args = self.install_args(spec, prefix)
|
||||
|
||||
self.setup_py('install', *args)
|
||||
|
||||
def install_args(self, spec, prefix):
|
||||
"""Arguments to pass to install."""
|
||||
return ['--prefix={0}'.format(prefix)]
|
||||
|
||||
def install_lib(self, spec, prefix):
|
||||
"""Install all Python modules (extensions and pure Python)."""
|
||||
args = self.install_lib_args(spec, prefix)
|
||||
|
||||
self.setup_py('install_lib', *args)
|
||||
|
||||
def install_lib_args(self, spec, prefix):
|
||||
"""Arguments to pass to install_lib."""
|
||||
return []
|
||||
|
||||
def install_headers(self, spec, prefix):
|
||||
"""Install C/C++ header files."""
|
||||
args = self.install_headers_args(spec, prefix)
|
||||
|
||||
self.setup_py('install_headers', *args)
|
||||
|
||||
def install_headers_args(self, spec, prefix):
|
||||
"""Arguments to pass to install_headers."""
|
||||
return []
|
||||
|
||||
def install_scripts(self, spec, prefix):
|
||||
"""Install scripts (Python or otherwise)."""
|
||||
args = self.install_scripts_args(spec, prefix)
|
||||
|
||||
self.setup_py('install_scripts', *args)
|
||||
|
||||
def install_scripts_args(self, spec, prefix):
|
||||
"""Arguments to pass to install_scripts."""
|
||||
return []
|
||||
|
||||
def install_data(self, spec, prefix):
|
||||
"""Install data files."""
|
||||
args = self.install_data_args(spec, prefix)
|
||||
|
||||
self.setup_py('install_data', *args)
|
||||
|
||||
def install_data_args(self, spec, prefix):
|
||||
"""Arguments to pass to install_data."""
|
||||
return []
|
||||
|
||||
def sdist(self, spec, prefix):
|
||||
"""Create a source distribution (tarball, zip file, etc.)."""
|
||||
args = self.sdist_args(spec, prefix)
|
||||
|
||||
self.setup_py('sdist', *args)
|
||||
|
||||
def sdist_args(self, spec, prefix):
|
||||
"""Arguments to pass to sdist."""
|
||||
return []
|
||||
|
||||
def register(self, spec, prefix):
|
||||
"""Register the distribution with the Python package index."""
|
||||
args = self.register_args(spec, prefix)
|
||||
|
||||
self.setup_py('register', *args)
|
||||
|
||||
def register_args(self, spec, prefix):
|
||||
"""Arguments to pass to register."""
|
||||
return []
|
||||
|
||||
def bdist(self, spec, prefix):
|
||||
"""Create a built (binary) distribution."""
|
||||
args = self.bdist_args(spec, prefix)
|
||||
|
||||
self.setup_py('bdist', *args)
|
||||
|
||||
def bdist_args(self, spec, prefix):
|
||||
"""Arguments to pass to bdist."""
|
||||
return []
|
||||
|
||||
def bdist_dumb(self, spec, prefix):
|
||||
'''Create a "dumb" built distribution.'''
|
||||
args = self.bdist_dumb_args(spec, prefix)
|
||||
|
||||
self.setup_py('bdist_dumb', *args)
|
||||
|
||||
def bdist_dumb_args(self, spec, prefix):
|
||||
"""Arguments to pass to bdist_dumb."""
|
||||
return []
|
||||
|
||||
def bdist_rpm(self, spec, prefix):
|
||||
"""Create an RPM distribution."""
|
||||
args = self.bdist_rpm(spec, prefix)
|
||||
|
||||
self.setup_py('bdist_rpm', *args)
|
||||
|
||||
def bdist_rpm_args(self, spec, prefix):
|
||||
"""Arguments to pass to bdist_rpm."""
|
||||
return []
|
||||
|
||||
def bdist_wininst(self, spec, prefix):
|
||||
"""Create an executable installer for MS Windows."""
|
||||
args = self.bdist_wininst_args(spec, prefix)
|
||||
|
||||
self.setup_py('bdist_wininst', *args)
|
||||
|
||||
def bdist_wininst_args(self, spec, prefix):
|
||||
"""Arguments to pass to bdist_wininst."""
|
||||
return []
|
||||
|
||||
def upload(self, spec, prefix):
|
||||
"""Upload binary package to PyPI."""
|
||||
args = self.upload_args(spec, prefix)
|
||||
|
||||
self.setup_py('upload', *args)
|
||||
|
||||
def upload_args(self, spec, prefix):
|
||||
"""Arguments to pass to upload."""
|
||||
return []
|
||||
|
||||
def check(self, spec, prefix):
|
||||
"""Perform some checks on the package."""
|
||||
args = self.check_args(spec, prefix)
|
||||
|
||||
self.setup_py('check', *args)
|
||||
|
||||
def check_args(self, spec, prefix):
|
||||
"""Arguments to pass to check."""
|
||||
return []
|
||||
|
||||
# Check that self.prefix is there after installation
|
||||
run_after('install')(PackageBase.sanity_check_prefix)
|
@@ -26,7 +26,7 @@
|
||||
import inspect
|
||||
|
||||
from spack.directives import extends
|
||||
from spack.package import PackageBase
|
||||
from spack.package import PackageBase, run_after
|
||||
|
||||
|
||||
class RPackage(PackageBase):
|
||||
@@ -34,25 +34,25 @@ class RPackage(PackageBase):
|
||||
|
||||
This class provides a single phase that can be overridden:
|
||||
|
||||
* install
|
||||
1. :py:meth:`~.RPackage.install`
|
||||
|
||||
It has sensible defaults and for many packages the only thing
|
||||
It has sensible defaults, and for many packages the only thing
|
||||
necessary will be to add dependencies
|
||||
"""
|
||||
phases = ['install']
|
||||
|
||||
# To be used in UI queries that require to know which
|
||||
# build-system class we are using
|
||||
#: This attribute is used in UI queries that need to know the build
|
||||
#: system base class
|
||||
build_system_class = 'RPackage'
|
||||
|
||||
extends('r')
|
||||
|
||||
def install(self, spec, prefix):
|
||||
"""Install the R package"""
|
||||
"""Installs an R package."""
|
||||
inspect.getmodule(self).R(
|
||||
'CMD', 'INSTALL',
|
||||
'--library={0}'.format(self.module.r_lib_dir),
|
||||
self.stage.source_path)
|
||||
|
||||
# Check that self.prefix is there after installation
|
||||
PackageBase.sanity_check('install')(PackageBase.sanity_check_prefix)
|
||||
run_after('install')(PackageBase.sanity_check_prefix)
|
||||
|
@@ -39,10 +39,10 @@
|
||||
#
|
||||
# Settings for commands that modify configuration
|
||||
#
|
||||
# Commands that modify confguration By default modify the *highest*
|
||||
# Commands that modify configuration by default modify the *highest*
|
||||
# priority scope.
|
||||
default_modify_scope = spack.config.highest_precedence_scope().name
|
||||
# Commands that list confguration list *all* scopes by default.
|
||||
# Commands that list configuration list *all* scopes by default.
|
||||
default_list_scope = None
|
||||
|
||||
# cmd has a submodule called "list" so preserve the python list module
|
||||
@@ -61,7 +61,6 @@
|
||||
if file.endswith(".py") and not re.search(ignore_files, file):
|
||||
cmd = re.sub(r'.py$', '', file)
|
||||
commands.append(cmd)
|
||||
commands.append('test')
|
||||
commands.sort()
|
||||
|
||||
|
||||
|
@@ -27,16 +27,16 @@
|
||||
import spack
|
||||
import spack.cmd
|
||||
|
||||
description = "Activate a package extension."
|
||||
description = "activate a package extension"
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
'-f', '--force', action='store_true',
|
||||
help="Activate without first activating dependencies.")
|
||||
help="activate without first activating dependencies")
|
||||
subparser.add_argument(
|
||||
'spec', nargs=argparse.REMAINDER,
|
||||
help="spec of package extension to activate.")
|
||||
help="spec of package extension to activate")
|
||||
|
||||
|
||||
def activate(parser, args):
|
||||
|
@@ -24,14 +24,14 @@
|
||||
##############################################################################
|
||||
import spack.architecture as architecture
|
||||
|
||||
description = "Print architecture information about this machine."
|
||||
description = "print architecture information about this machine"
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
parts = subparser.add_mutually_exclusive_group()
|
||||
parts.add_argument(
|
||||
'-p', '--platform', action='store_true', default=False,
|
||||
help="Print only the platform.")
|
||||
help="print only the platform")
|
||||
|
||||
|
||||
def arch(parser, args):
|
||||
|
@@ -32,7 +32,7 @@
|
||||
|
||||
_SPACK_UPSTREAM = 'https://github.com/llnl/spack'
|
||||
|
||||
description = "Create a new installation of spack in another prefix"
|
||||
description = "create a new installation of spack in another prefix"
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
|
@@ -26,11 +26,12 @@
|
||||
|
||||
from spack import *
|
||||
|
||||
description = 'Stops at build stage when installing a package, if possible'
|
||||
description = 'stops at build stage when installing a package, if possible'
|
||||
|
||||
build_system_to_phase = {
|
||||
CMakePackage: 'build',
|
||||
AutotoolsPackage: 'build'
|
||||
AutotoolsPackage: 'build',
|
||||
PythonPackage: 'build'
|
||||
}
|
||||
|
||||
|
||||
|
@@ -25,7 +25,7 @@
|
||||
import spack.cmd.location
|
||||
import spack.modules
|
||||
|
||||
description = "cd to spack directories in the shell."
|
||||
description = "cd to spack directories in the shell"
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
|
@@ -22,6 +22,8 @@
|
||||
# License along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import hashlib
|
||||
|
||||
@@ -30,93 +32,133 @@
|
||||
import spack.cmd
|
||||
import spack.util.crypto
|
||||
from spack.stage import Stage, FailedDownloadError
|
||||
from spack.util.naming import *
|
||||
from spack.version import *
|
||||
|
||||
description = "Checksum available versions of a package."
|
||||
description = "checksum available versions of a package"
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
'package', metavar='PACKAGE', help='Package to list versions for')
|
||||
'package',
|
||||
help='package to checksum versions for')
|
||||
subparser.add_argument(
|
||||
'--keep-stage', action='store_true', dest='keep_stage',
|
||||
help="Don't clean up staging area when command completes.")
|
||||
'--keep-stage', action='store_true',
|
||||
help="don't clean up staging area when command completes")
|
||||
subparser.add_argument(
|
||||
'versions', nargs=argparse.REMAINDER,
|
||||
help='Versions to generate checksums for')
|
||||
help='versions to generate checksums for')
|
||||
|
||||
|
||||
def get_checksums(versions, urls, **kwargs):
|
||||
# Allow commands like create() to do some analysis on the first
|
||||
# archive after it is downloaded.
|
||||
def get_checksums(url_dict, name, **kwargs):
|
||||
"""Fetches and checksums archives from URLs.
|
||||
|
||||
This function is called by both ``spack checksum`` and ``spack create``.
|
||||
The ``first_stage_function`` kwarg allows ``spack create`` to determine
|
||||
things like the build system of the archive.
|
||||
|
||||
:param dict url_dict: A dictionary of the form: version -> URL
|
||||
:param str name: The name of the package
|
||||
:param callable first_stage_function: Function to run on first staging area
|
||||
:param bool keep_stage: Don't clean up staging area when command completes
|
||||
|
||||
:returns: A multi-line string containing versions and corresponding hashes
|
||||
:rtype: str
|
||||
"""
|
||||
first_stage_function = kwargs.get('first_stage_function', None)
|
||||
keep_stage = kwargs.get('keep_stage', False)
|
||||
|
||||
sorted_versions = sorted(url_dict.keys(), reverse=True)
|
||||
|
||||
# Find length of longest string in the list for padding
|
||||
max_len = max(len(str(v)) for v in sorted_versions)
|
||||
num_ver = len(sorted_versions)
|
||||
|
||||
tty.msg("Found {0} version{1} of {2}:".format(
|
||||
num_ver, '' if num_ver == 1 else 's', name),
|
||||
"",
|
||||
*spack.cmd.elide_list(
|
||||
["{0:{1}} {2}".format(v, max_len, url_dict[v])
|
||||
for v in sorted_versions]))
|
||||
print()
|
||||
|
||||
archives_to_fetch = tty.get_number(
|
||||
"How many would you like to checksum?", default=1, abort='q')
|
||||
|
||||
if not archives_to_fetch:
|
||||
tty.die("Aborted.")
|
||||
|
||||
versions = sorted_versions[:archives_to_fetch]
|
||||
urls = [url_dict[v] for v in versions]
|
||||
|
||||
tty.msg("Downloading...")
|
||||
hashes = []
|
||||
version_hashes = []
|
||||
i = 0
|
||||
for url, version in zip(urls, versions):
|
||||
try:
|
||||
with Stage(url, keep=keep_stage) as stage:
|
||||
# Fetch the archive
|
||||
stage.fetch()
|
||||
if i == 0 and first_stage_function:
|
||||
# Only run first_stage_function the first time,
|
||||
# no need to run it every time
|
||||
first_stage_function(stage, url)
|
||||
|
||||
hashes.append((version, spack.util.crypto.checksum(
|
||||
# Checksum the archive and add it to the list
|
||||
version_hashes.append((version, spack.util.crypto.checksum(
|
||||
hashlib.md5, stage.archive_file)))
|
||||
i += 1
|
||||
except FailedDownloadError as e:
|
||||
tty.msg("Failed to fetch %s" % url)
|
||||
except FailedDownloadError:
|
||||
tty.msg("Failed to fetch {0}".format(url))
|
||||
except Exception as e:
|
||||
tty.msg('Something failed on %s, skipping.\n (%s)' % (url, e))
|
||||
tty.msg("Something failed on {0}, skipping.".format(url),
|
||||
" ({0})".format(e))
|
||||
|
||||
return hashes
|
||||
if not version_hashes:
|
||||
tty.die("Could not fetch any versions for {0}".format(name))
|
||||
|
||||
# Find length of longest string in the list for padding
|
||||
max_len = max(len(str(v)) for v, h in version_hashes)
|
||||
|
||||
# Generate the version directives to put in a package.py
|
||||
version_lines = "\n".join([
|
||||
" version('{0}', {1}'{2}')".format(
|
||||
v, ' ' * (max_len - len(str(v))), h) for v, h in version_hashes
|
||||
])
|
||||
|
||||
num_hash = len(version_hashes)
|
||||
tty.msg("Checksummed {0} version{1} of {2}".format(
|
||||
num_hash, '' if num_hash == 1 else 's', name))
|
||||
|
||||
return version_lines
|
||||
|
||||
|
||||
def checksum(parser, args):
|
||||
# get the package we're going to generate checksums for
|
||||
# Make sure the user provided a package and not a URL
|
||||
if not valid_fully_qualified_module_name(args.package):
|
||||
tty.die("`spack checksum` accepts package names, not URLs. "
|
||||
"Use `spack md5 <url>` instead.")
|
||||
|
||||
# Get the package we're going to generate checksums for
|
||||
pkg = spack.repo.get(args.package)
|
||||
|
||||
# If the user asked for specific versions, use those.
|
||||
if args.versions:
|
||||
versions = {}
|
||||
# If the user asked for specific versions, use those
|
||||
url_dict = {}
|
||||
for version in args.versions:
|
||||
version = ver(version)
|
||||
if not isinstance(version, Version):
|
||||
tty.die("Cannot generate checksums for version lists or " +
|
||||
"version ranges. Use unambiguous versions.")
|
||||
versions[version] = pkg.url_for_version(version)
|
||||
tty.die("Cannot generate checksums for version lists or "
|
||||
"version ranges. Use unambiguous versions.")
|
||||
url_dict[version] = pkg.url_for_version(version)
|
||||
else:
|
||||
versions = pkg.fetch_remote_versions()
|
||||
if not versions:
|
||||
tty.die("Could not fetch any versions for %s" % pkg.name)
|
||||
# Otherwise, see what versions we can find online
|
||||
url_dict = pkg.fetch_remote_versions()
|
||||
if not url_dict:
|
||||
tty.die("Could not find any versions for {0}".format(pkg.name))
|
||||
|
||||
sorted_versions = sorted(versions, reverse=True)
|
||||
version_lines = get_checksums(
|
||||
url_dict, pkg.name, keep_stage=args.keep_stage)
|
||||
|
||||
# Find length of longest string in the list for padding
|
||||
maxlen = max(len(str(v)) for v in versions)
|
||||
|
||||
tty.msg("Found %s versions of %s" % (len(versions), pkg.name),
|
||||
*spack.cmd.elide_list(
|
||||
["{0:{1}} {2}".format(v, maxlen, versions[v])
|
||||
for v in sorted_versions]))
|
||||
print
|
||||
archives_to_fetch = tty.get_number(
|
||||
"How many would you like to checksum?", default=5, abort='q')
|
||||
|
||||
if not archives_to_fetch:
|
||||
tty.msg("Aborted.")
|
||||
return
|
||||
|
||||
version_hashes = get_checksums(
|
||||
sorted_versions[:archives_to_fetch],
|
||||
[versions[v] for v in sorted_versions[:archives_to_fetch]],
|
||||
keep_stage=args.keep_stage)
|
||||
|
||||
if not version_hashes:
|
||||
tty.die("Could not fetch any versions for %s" % pkg.name)
|
||||
|
||||
version_lines = [
|
||||
" version('%s', '%s')" % (v, h) for v, h in version_hashes
|
||||
]
|
||||
tty.msg("Checksummed new versions of %s:" % pkg.name, *version_lines)
|
||||
print()
|
||||
print(version_lines)
|
||||
|
@@ -29,7 +29,7 @@
|
||||
import spack
|
||||
import spack.cmd
|
||||
|
||||
description = "Remove build stage and source tarball for packages."
|
||||
description = "remove build stage and source tarball for packages"
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
|
@@ -76,32 +76,32 @@ def _specs(self, **kwargs):
|
||||
|
||||
_arguments['constraint'] = Args(
|
||||
'constraint', nargs=argparse.REMAINDER, action=ConstraintAction,
|
||||
help='Constraint to select a subset of installed packages')
|
||||
help='constraint to select a subset of installed packages')
|
||||
|
||||
_arguments['module_type'] = Args(
|
||||
'-m', '--module-type', help='Type of module files',
|
||||
'-m', '--module-type', help='type of module files',
|
||||
default='tcl', choices=spack.modules.module_types)
|
||||
|
||||
_arguments['yes_to_all'] = Args(
|
||||
'-y', '--yes-to-all', action='store_true', dest='yes_to_all',
|
||||
help='Assume "yes" is the answer to every confirmation request.')
|
||||
help='assume "yes" is the answer to every confirmation request')
|
||||
|
||||
_arguments['recurse_dependencies'] = Args(
|
||||
'-r', '--dependencies', action='store_true', dest='recurse_dependencies',
|
||||
help='Recursively traverse spec dependencies')
|
||||
help='recursively traverse spec dependencies')
|
||||
|
||||
_arguments['clean'] = Args(
|
||||
'--clean', action='store_false', dest='dirty',
|
||||
help='Clean environment before installing package.')
|
||||
help='clean environment before installing package')
|
||||
|
||||
_arguments['dirty'] = Args(
|
||||
'--dirty', action='store_true', dest='dirty',
|
||||
help='Do NOT clean environment before installing.')
|
||||
help='do NOT clean environment before installing')
|
||||
|
||||
_arguments['long'] = Args(
|
||||
'-l', '--long', action='store_true',
|
||||
help='Show dependency hashes as well as versions.')
|
||||
help='show dependency hashes as well as versions')
|
||||
|
||||
_arguments['very_long'] = Args(
|
||||
'-L', '--very-long', action='store_true',
|
||||
help='Show full dependency hashes as well as versions.')
|
||||
help='show full dependency hashes as well as versions')
|
||||
|
@@ -35,7 +35,7 @@
|
||||
from spack.spec import CompilerSpec, ArchSpec
|
||||
from spack.util.environment import get_path
|
||||
|
||||
description = "Manage compilers"
|
||||
description = "manage compilers"
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
@@ -47,35 +47,35 @@ def setup_parser(subparser):
|
||||
# Find
|
||||
find_parser = sp.add_parser(
|
||||
'find', aliases=['add'],
|
||||
help='Search the system for compilers to add to Spack configuration.')
|
||||
help='search the system for compilers to add to Spack configuration')
|
||||
find_parser.add_argument('add_paths', nargs=argparse.REMAINDER)
|
||||
find_parser.add_argument(
|
||||
'--scope', choices=scopes, default=spack.cmd.default_modify_scope,
|
||||
help="Configuration scope to modify.")
|
||||
help="configuration scope to modify")
|
||||
|
||||
# Remove
|
||||
remove_parser = sp.add_parser(
|
||||
'remove', aliases=['rm'], help='Remove compiler by spec.')
|
||||
'remove', aliases=['rm'], help='remove compiler by spec')
|
||||
remove_parser.add_argument(
|
||||
'-a', '--all', action='store_true',
|
||||
help='Remove ALL compilers that match spec.')
|
||||
help='remove ALL compilers that match spec')
|
||||
remove_parser.add_argument('compiler_spec')
|
||||
remove_parser.add_argument(
|
||||
'--scope', choices=scopes, default=spack.cmd.default_modify_scope,
|
||||
help="Configuration scope to modify.")
|
||||
help="configuration scope to modify")
|
||||
|
||||
# List
|
||||
list_parser = sp.add_parser('list', help='list available compilers')
|
||||
list_parser.add_argument(
|
||||
'--scope', choices=scopes, default=spack.cmd.default_list_scope,
|
||||
help="Configuration scope to read from.")
|
||||
help="configuration scope to read from")
|
||||
|
||||
# Info
|
||||
info_parser = sp.add_parser('info', help='Show compiler paths.')
|
||||
info_parser = sp.add_parser('info', help='show compiler paths')
|
||||
info_parser.add_argument('compiler_spec')
|
||||
info_parser.add_argument(
|
||||
'--scope', choices=scopes, default=spack.cmd.default_list_scope,
|
||||
help="Configuration scope to read from.")
|
||||
help="configuration scope to read from")
|
||||
|
||||
|
||||
def compiler_find(args):
|
||||
|
@@ -25,12 +25,12 @@
|
||||
import spack
|
||||
from spack.cmd.compiler import compiler_list
|
||||
|
||||
description = "List available compilers. Same as 'spack compiler list'."
|
||||
description = "list available compilers, same as 'spack compiler list'"
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
subparser.add_argument('--scope', choices=spack.config.config_scopes,
|
||||
help="Configuration scope to read/modify.")
|
||||
help="configuration scope to read/modify")
|
||||
|
||||
|
||||
def compilers(parser, args):
|
||||
|
@@ -24,27 +24,27 @@
|
||||
##############################################################################
|
||||
import spack.config
|
||||
|
||||
description = "Get and set configuration options."
|
||||
description = "get and set configuration options"
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
# User can only choose one
|
||||
subparser.add_argument('--scope', choices=spack.config.config_scopes,
|
||||
help="Configuration scope to read/modify.")
|
||||
help="configuration scope to read/modify")
|
||||
|
||||
sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='config_command')
|
||||
|
||||
get_parser = sp.add_parser('get', help='Print configuration values.')
|
||||
get_parser = sp.add_parser('get', help='print configuration values')
|
||||
get_parser.add_argument('section',
|
||||
help="Configuration section to print. "
|
||||
"Options: %(choices)s.",
|
||||
help="configuration section to print. "
|
||||
"options: %(choices)s",
|
||||
metavar='SECTION',
|
||||
choices=spack.config.section_schemas)
|
||||
|
||||
edit_parser = sp.add_parser('edit', help='Edit configuration file.')
|
||||
edit_parser = sp.add_parser('edit', help='edit configuration file')
|
||||
edit_parser.add_argument('section',
|
||||
help="Configuration section to edit. "
|
||||
"Options: %(choices)s.",
|
||||
help="configuration section to edit. "
|
||||
"options: %(choices)s",
|
||||
metavar='SECTION',
|
||||
choices=spack.config.section_schemas)
|
||||
|
||||
|
@@ -31,7 +31,7 @@
|
||||
|
||||
from spack import *
|
||||
|
||||
description = 'Stops at configuration stage when installing a package, if possible' # NOQA: ignore=E501
|
||||
description = 'stops at configuration stage when installing a package, if possible' # NOQA: ignore=E501
|
||||
|
||||
|
||||
build_system_to_phase = {
|
||||
@@ -49,7 +49,7 @@ def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
'-v', '--verbose',
|
||||
action='store_true',
|
||||
help="Print additional output during builds"
|
||||
help="print additional output during builds"
|
||||
)
|
||||
|
||||
|
||||
|
@@ -26,7 +26,6 @@
|
||||
|
||||
import os
|
||||
import re
|
||||
import string
|
||||
|
||||
import llnl.util.tty as tty
|
||||
import spack
|
||||
@@ -35,15 +34,14 @@
|
||||
import spack.url
|
||||
import spack.util.web
|
||||
from llnl.util.filesystem import mkdirp
|
||||
from ordereddict_backport import OrderedDict
|
||||
from spack.repository import Repo, RepoError
|
||||
from spack.repository import Repo
|
||||
from spack.spec import Spec
|
||||
from spack.util.executable import which
|
||||
from spack.util.naming import *
|
||||
|
||||
description = "Create a new package file from an archive URL"
|
||||
description = "create a new package file"
|
||||
|
||||
package_template = string.Template("""\
|
||||
package_template = '''\
|
||||
##############################################################################
|
||||
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
|
||||
# Produced at the Lawrence Livermore National Laboratory.
|
||||
@@ -73,11 +71,11 @@
|
||||
# next to all the things you'll want to change. Once you've handled
|
||||
# them, you can save this file and test your package like this:
|
||||
#
|
||||
# spack install ${name}
|
||||
# spack install {name}
|
||||
#
|
||||
# You can edit this file again by typing:
|
||||
#
|
||||
# spack edit ${name}
|
||||
# spack edit {name}
|
||||
#
|
||||
# See the Spack documentation for more information on packaging.
|
||||
# If you submit this package back to Spack as a pull request,
|
||||
@@ -86,23 +84,24 @@
|
||||
from spack import *
|
||||
|
||||
|
||||
class ${class_name}(${base_class_name}):
|
||||
""\"FIXME: Put a proper description of your package here.""\"
|
||||
class {class_name}({base_class_name}):
|
||||
"""FIXME: Put a proper description of your package here."""
|
||||
|
||||
# FIXME: Add a proper url for your package's homepage here.
|
||||
homepage = "http://www.example.com"
|
||||
url = "${url}"
|
||||
url = "{url}"
|
||||
|
||||
${versions}
|
||||
{versions}
|
||||
|
||||
${dependencies}
|
||||
{dependencies}
|
||||
|
||||
${body}
|
||||
""")
|
||||
{body}
|
||||
'''
|
||||
|
||||
|
||||
class DefaultGuess(object):
|
||||
class PackageTemplate(object):
|
||||
"""Provides the default values to be used for the package file template"""
|
||||
|
||||
base_class_name = 'Package'
|
||||
|
||||
dependencies = """\
|
||||
@@ -115,57 +114,89 @@ def install(self, spec, prefix):
|
||||
make()
|
||||
make('install')"""
|
||||
|
||||
def __init__(self, name, url, version_hash_tuples):
|
||||
self.name = name
|
||||
def __init__(self, name, url, versions):
|
||||
self.name = name
|
||||
self.class_name = mod_to_class(name)
|
||||
self.url = url
|
||||
self.version_hash_tuples = version_hash_tuples
|
||||
self.url = url
|
||||
self.versions = versions
|
||||
|
||||
@property
|
||||
def versions(self):
|
||||
"""Adds a version() call to the package for each version found."""
|
||||
max_len = max(len(str(v)) for v, h in self.version_hash_tuples)
|
||||
format = " version(%%-%ds, '%%s')" % (max_len + 2)
|
||||
return '\n'.join(
|
||||
format % ("'%s'" % v, h) for v, h in self.version_hash_tuples
|
||||
)
|
||||
def write(self, pkg_path):
|
||||
"""Writes the new package file."""
|
||||
|
||||
# Write out a template for the file
|
||||
with open(pkg_path, "w") as pkg_file:
|
||||
pkg_file.write(package_template.format(
|
||||
name=self.name,
|
||||
class_name=self.class_name,
|
||||
base_class_name=self.base_class_name,
|
||||
url=self.url,
|
||||
versions=self.versions,
|
||||
dependencies=self.dependencies,
|
||||
body=self.body))
|
||||
|
||||
|
||||
class AutotoolsGuess(DefaultGuess):
|
||||
"""Provides appropriate overrides for autotools-based packages"""
|
||||
class AutotoolsPackageTemplate(PackageTemplate):
|
||||
"""Provides appropriate overrides for Autotools-based packages
|
||||
that *do* come with a ``configure`` script"""
|
||||
|
||||
base_class_name = 'AutotoolsPackage'
|
||||
|
||||
dependencies = """\
|
||||
# FIXME: Add dependencies if required.
|
||||
# depends_on('m4', type='build')
|
||||
# depends_on('autoconf', type='build')
|
||||
# depends_on('automake', type='build')
|
||||
# depends_on('libtool', type='build')
|
||||
# depends_on('foo')"""
|
||||
|
||||
body = """\
|
||||
def configure_args(self):
|
||||
# FIXME: Add arguments other than --prefix
|
||||
# FIXME: If not needed delete the function
|
||||
# FIXME: If not needed delete this function
|
||||
args = []
|
||||
return args"""
|
||||
|
||||
|
||||
class CMakeGuess(DefaultGuess):
|
||||
"""Provides appropriate overrides for cmake-based packages"""
|
||||
class AutoreconfPackageTemplate(PackageTemplate):
|
||||
"""Provides appropriate overrides for Autotools-based packages
|
||||
that *do not* come with a ``configure`` script"""
|
||||
|
||||
base_class_name = 'AutotoolsPackage'
|
||||
|
||||
dependencies = """\
|
||||
depends_on('autoconf', type='build')
|
||||
depends_on('automake', type='build')
|
||||
depends_on('libtool', type='build')
|
||||
depends_on('m4', type='build')
|
||||
|
||||
# FIXME: Add additional dependencies if required.
|
||||
# depends_on('foo')"""
|
||||
|
||||
body = """\
|
||||
def autoreconf(self, spec, prefix):
|
||||
# FIXME: Modify the autoreconf method as necessary
|
||||
autoreconf('--install', '--verbose', '--force')
|
||||
|
||||
def configure_args(self):
|
||||
# FIXME: Add arguments other than --prefix
|
||||
# FIXME: If not needed delete this function
|
||||
args = []
|
||||
return args"""
|
||||
|
||||
|
||||
class CMakePackageTemplate(PackageTemplate):
|
||||
"""Provides appropriate overrides for CMake-based packages"""
|
||||
|
||||
base_class_name = 'CMakePackage'
|
||||
|
||||
body = """\
|
||||
def cmake_args(self):
|
||||
# FIXME: Add arguments other than
|
||||
# FIXME: CMAKE_INSTALL_PREFIX and CMAKE_BUILD_TYPE
|
||||
# FIXME: If not needed delete the function
|
||||
# FIXME: If not needed delete this function
|
||||
args = []
|
||||
return args"""
|
||||
|
||||
|
||||
class SconsGuess(DefaultGuess):
|
||||
"""Provides appropriate overrides for scons-based packages"""
|
||||
class SconsPackageTemplate(PackageTemplate):
|
||||
"""Provides appropriate overrides for SCons-based packages"""
|
||||
|
||||
dependencies = """\
|
||||
# FIXME: Add additional dependencies if required.
|
||||
depends_on('scons', type='build')"""
|
||||
@@ -177,8 +208,9 @@ def install(self, spec, prefix):
|
||||
scons('install')"""
|
||||
|
||||
|
||||
class BazelGuess(DefaultGuess):
|
||||
"""Provides appropriate overrides for bazel-based packages"""
|
||||
class BazelPackageTemplate(PackageTemplate):
|
||||
"""Provides appropriate overrides for Bazel-based packages"""
|
||||
|
||||
dependencies = """\
|
||||
# FIXME: Add additional dependencies if required.
|
||||
depends_on('bazel', type='build')"""
|
||||
@@ -189,27 +221,36 @@ def install(self, spec, prefix):
|
||||
bazel()"""
|
||||
|
||||
|
||||
class PythonGuess(DefaultGuess):
|
||||
class PythonPackageTemplate(PackageTemplate):
|
||||
"""Provides appropriate overrides for python extensions"""
|
||||
dependencies = """\
|
||||
extends('python')
|
||||
base_class_name = 'PythonPackage'
|
||||
|
||||
# FIXME: Add additional dependencies if required.
|
||||
dependencies = """\
|
||||
# FIXME: Add dependencies if required.
|
||||
# depends_on('py-setuptools', type='build')
|
||||
# depends_on('py-foo', type=('build', 'run'))"""
|
||||
|
||||
body = """\
|
||||
def install(self, spec, prefix):
|
||||
# FIXME: Add logic to build and install here.
|
||||
setup_py('install', '--prefix={0}'.format(prefix))"""
|
||||
def build_args(self):
|
||||
# FIXME: Add arguments other than --prefix
|
||||
# FIXME: If not needed delete the function
|
||||
args = []
|
||||
return args"""
|
||||
|
||||
def __init__(self, name, *args):
|
||||
name = 'py-{0}'.format(name)
|
||||
super(PythonGuess, self).__init__(name, *args)
|
||||
# If the user provided `--name py-numpy`, don't rename it py-py-numpy
|
||||
if not name.startswith('py-'):
|
||||
# Make it more obvious that we are renaming the package
|
||||
tty.msg("Changing package name from {0} to py-{0}".format(name))
|
||||
name = 'py-{0}'.format(name)
|
||||
|
||||
super(PythonPackageTemplate, self).__init__(name, *args)
|
||||
|
||||
|
||||
class RGuess(DefaultGuess):
|
||||
class RPackageTemplate(PackageTemplate):
|
||||
"""Provides appropriate overrides for R extensions"""
|
||||
base_class_name = 'RPackage'
|
||||
|
||||
dependencies = """\
|
||||
# FIXME: Add dependencies if required.
|
||||
# depends_on('r-foo', type=('build', 'run'))"""
|
||||
@@ -218,12 +259,18 @@ class RGuess(DefaultGuess):
|
||||
# FIXME: Override install() if necessary."""
|
||||
|
||||
def __init__(self, name, *args):
|
||||
name = 'r-{0}'.format(name)
|
||||
super(RGuess, self).__init__(name, *args)
|
||||
# If the user provided `--name r-rcpp`, don't rename it r-r-rcpp
|
||||
if not name.startswith('r-'):
|
||||
# Make it more obvious that we are renaming the package
|
||||
tty.msg("Changing package name from {0} to r-{0}".format(name))
|
||||
name = 'r-{0}'.format(name)
|
||||
|
||||
super(RPackageTemplate, self).__init__(name, *args)
|
||||
|
||||
|
||||
class OctaveGuess(DefaultGuess):
|
||||
class OctavePackageTemplate(PackageTemplate):
|
||||
"""Provides appropriate overrides for octave packages"""
|
||||
|
||||
dependencies = """\
|
||||
extends('octave')
|
||||
|
||||
@@ -240,43 +287,59 @@ def install(self, spec, prefix):
|
||||
prefix, self.stage.archive_file))"""
|
||||
|
||||
def __init__(self, name, *args):
|
||||
name = 'octave-{0}'.format(name)
|
||||
super(OctaveGuess, self).__init__(name, *args)
|
||||
# If the user provided `--name octave-splines`, don't rename it
|
||||
# octave-octave-splines
|
||||
if not name.startswith('octave-'):
|
||||
# Make it more obvious that we are renaming the package
|
||||
tty.msg("Changing package name from {0} to octave-{0}".format(name)) # noqa
|
||||
name = 'octave-{0}'.format(name)
|
||||
|
||||
super(OctavePackageTemplate, self).__init__(name, *args)
|
||||
|
||||
|
||||
templates = {
|
||||
'autotools': AutotoolsPackageTemplate,
|
||||
'autoreconf': AutoreconfPackageTemplate,
|
||||
'cmake': CMakePackageTemplate,
|
||||
'scons': SconsPackageTemplate,
|
||||
'bazel': BazelPackageTemplate,
|
||||
'python': PythonPackageTemplate,
|
||||
'r': RPackageTemplate,
|
||||
'octave': OctavePackageTemplate,
|
||||
'generic': PackageTemplate
|
||||
}
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
subparser.add_argument('url', nargs='?', help="url of package archive")
|
||||
subparser.add_argument(
|
||||
'url', nargs='?',
|
||||
help="url of package archive")
|
||||
subparser.add_argument(
|
||||
'--keep-stage', action='store_true',
|
||||
help="Don't clean up staging area when command completes.")
|
||||
help="don't clean up staging area when command completes")
|
||||
subparser.add_argument(
|
||||
'-n', '--name', dest='alternate_name', default=None, metavar='NAME',
|
||||
help="Override the autodetected name for the created package.")
|
||||
'-n', '--name',
|
||||
help="name of the package to create")
|
||||
subparser.add_argument(
|
||||
'-r', '--repo', default=None,
|
||||
help="Path to a repository where the package should be created.")
|
||||
'-t', '--template', metavar='TEMPLATE', choices=templates.keys(),
|
||||
help="build system template to use. options: %(choices)s")
|
||||
subparser.add_argument(
|
||||
'-r', '--repo',
|
||||
help="path to a repository where the package should be created")
|
||||
subparser.add_argument(
|
||||
'-N', '--namespace',
|
||||
help="Specify a namespace for the package. Must be the namespace of "
|
||||
"a repository registered with Spack.")
|
||||
help="specify a namespace for the package. must be the namespace of "
|
||||
"a repository registered with Spack")
|
||||
subparser.add_argument(
|
||||
'-f', '--force', action='store_true', dest='force',
|
||||
help="Overwrite any existing package file with the same name.")
|
||||
|
||||
setup_parser.subparser = subparser
|
||||
'-f', '--force', action='store_true',
|
||||
help="overwrite any existing package file with the same name")
|
||||
|
||||
|
||||
class BuildSystemGuesser(object):
|
||||
|
||||
_choices = {
|
||||
'autotools': AutotoolsGuess,
|
||||
'cmake': CMakeGuess,
|
||||
'scons': SconsGuess,
|
||||
'bazel': BazelGuess,
|
||||
'python': PythonGuess,
|
||||
'r': RGuess,
|
||||
'octave': OctaveGuess
|
||||
}
|
||||
class BuildSystemGuesser:
|
||||
"""An instance of BuildSystemGuesser provides a callable object to be used
|
||||
during ``spack create``. By passing this object to ``spack checksum``, we
|
||||
can take a peek at the fetched tarball and discern the build system it uses
|
||||
"""
|
||||
|
||||
def __call__(self, stage, url):
|
||||
"""Try to guess the type of build system used by a project based on
|
||||
@@ -293,12 +356,14 @@ def __call__(self, stage, url):
|
||||
# uses. If the regular expression matches a file contained in the
|
||||
# archive, the corresponding build system is assumed.
|
||||
clues = [
|
||||
(r'/configure$', 'autotools'),
|
||||
(r'/CMakeLists.txt$', 'cmake'),
|
||||
(r'/SConstruct$', 'scons'),
|
||||
(r'/setup.py$', 'python'),
|
||||
(r'/NAMESPACE$', 'r'),
|
||||
(r'/WORKSPACE$', 'bazel')
|
||||
(r'/configure$', 'autotools'),
|
||||
(r'/configure.(in|ac)$', 'autoreconf'),
|
||||
(r'/Makefile.am$', 'autoreconf'),
|
||||
(r'/CMakeLists.txt$', 'cmake'),
|
||||
(r'/SConstruct$', 'scons'),
|
||||
(r'/setup.py$', 'python'),
|
||||
(r'/NAMESPACE$', 'r'),
|
||||
(r'/WORKSPACE$', 'bazel')
|
||||
]
|
||||
|
||||
# Peek inside the compressed file.
|
||||
@@ -319,65 +384,174 @@ def __call__(self, stage, url):
|
||||
|
||||
# Determine the build system based on the files contained
|
||||
# in the archive.
|
||||
build_system = 'unknown'
|
||||
build_system = 'generic'
|
||||
for pattern, bs in clues:
|
||||
if any(re.search(pattern, l) for l in lines):
|
||||
build_system = bs
|
||||
break
|
||||
|
||||
self.build_system = build_system
|
||||
|
||||
def make_guess(self, name, url, ver_hash_tuples):
|
||||
cls = self._choices.get(self.build_system, DefaultGuess)
|
||||
return cls(name, url, ver_hash_tuples)
|
||||
|
||||
def get_name(args):
|
||||
"""Get the name of the package based on the supplied arguments.
|
||||
|
||||
def guess_name_and_version(url, args):
|
||||
# Try to deduce name and version of the new package from the URL
|
||||
version = spack.url.parse_version(url)
|
||||
if not version:
|
||||
tty.die("Couldn't guess a version string from %s" % url)
|
||||
If a name was provided, always use that. Otherwise, if a URL was
|
||||
provided, extract the name from that. Otherwise, use a default.
|
||||
|
||||
# Try to guess a name. If it doesn't work, allow the user to override.
|
||||
if args.alternate_name:
|
||||
name = args.alternate_name
|
||||
else:
|
||||
:param argparse.Namespace args: The arguments given to ``spack create``
|
||||
|
||||
:returns: The name of the package
|
||||
:rtype: str
|
||||
"""
|
||||
|
||||
# Default package name
|
||||
name = 'example'
|
||||
|
||||
if args.name:
|
||||
# Use a user-supplied name if one is present
|
||||
name = args.name
|
||||
tty.msg("Using specified package name: '{0}'".format(name))
|
||||
elif args.url:
|
||||
# Try to guess the package name based on the URL
|
||||
try:
|
||||
name = spack.url.parse_name(url, version)
|
||||
name = spack.url.parse_name(args.url)
|
||||
tty.msg("This looks like a URL for {0}".format(name))
|
||||
except spack.url.UndetectableNameError:
|
||||
# Use a user-supplied name if one is present
|
||||
tty.die("Couldn't guess a name for this package. Try running:", "",
|
||||
"spack create --name <name> <url>")
|
||||
tty.die("Couldn't guess a name for this package.",
|
||||
" Please report this bug. In the meantime, try running:",
|
||||
" `spack create --name <name> <url>`")
|
||||
|
||||
if not valid_fully_qualified_module_name(name):
|
||||
tty.die("Package name can only contain A-Z, a-z, 0-9, '_' and '-'")
|
||||
tty.die("Package name can only contain a-z, 0-9, and '-'")
|
||||
|
||||
return name, version
|
||||
return name
|
||||
|
||||
|
||||
def find_repository(spec, args):
|
||||
# figure out namespace for spec
|
||||
def get_url(args):
|
||||
"""Get the URL to use.
|
||||
|
||||
Use a default URL if none is provided.
|
||||
|
||||
:param argparse.Namespace args: The arguments given to ``spack create``
|
||||
|
||||
:returns: The URL of the package
|
||||
:rtype: str
|
||||
"""
|
||||
|
||||
# Default URL
|
||||
url = 'http://www.example.com/example-1.2.3.tar.gz'
|
||||
|
||||
if args.url:
|
||||
# Use a user-supplied URL if one is present
|
||||
url = args.url
|
||||
|
||||
return url
|
||||
|
||||
|
||||
def get_versions(args, name):
|
||||
"""Returns a list of versions and hashes for a package.
|
||||
|
||||
Also returns a BuildSystemGuesser object.
|
||||
|
||||
Returns default values if no URL is provided.
|
||||
|
||||
:param argparse.Namespace args: The arguments given to ``spack create``
|
||||
:param str name: The name of the package
|
||||
|
||||
:returns: Versions and hashes, and a BuildSystemGuesser object
|
||||
:rtype: str and BuildSystemGuesser
|
||||
"""
|
||||
|
||||
# Default version, hash, and guesser
|
||||
versions = """\
|
||||
# FIXME: Add proper versions and checksums here.
|
||||
# version('1.2.3', '0123456789abcdef0123456789abcdef')"""
|
||||
|
||||
guesser = BuildSystemGuesser()
|
||||
|
||||
if args.url:
|
||||
# Find available versions
|
||||
url_dict = spack.util.web.find_versions_of_archive(args.url)
|
||||
|
||||
if not url_dict:
|
||||
# If no versions were found, revert to what the user provided
|
||||
version = spack.url.parse_version(args.url)
|
||||
url_dict = {version: args.url}
|
||||
|
||||
versions = spack.cmd.checksum.get_checksums(
|
||||
url_dict, name, first_stage_function=guesser,
|
||||
keep_stage=args.keep_stage)
|
||||
|
||||
return versions, guesser
|
||||
|
||||
|
||||
def get_build_system(args, guesser):
|
||||
"""Determine the build system template.
|
||||
|
||||
If a template is specified, always use that. Otherwise, if a URL
|
||||
is provided, download the tarball and peek inside to guess what
|
||||
build system it uses. Otherwise, use a generic template by default.
|
||||
|
||||
:param argparse.Namespace args: The arguments given to ``spack create``
|
||||
:param BuildSystemGuesser guesser: The first_stage_function given to \
|
||||
``spack checksum`` which records the build system it detects
|
||||
|
||||
:returns: The name of the build system template to use
|
||||
:rtype: str
|
||||
"""
|
||||
|
||||
# Default template
|
||||
template = 'generic'
|
||||
|
||||
if args.template:
|
||||
# Use a user-supplied template if one is present
|
||||
template = args.template
|
||||
tty.msg("Using specified package template: '{0}'".format(template))
|
||||
elif args.url:
|
||||
# Use whatever build system the guesser detected
|
||||
template = guesser.build_system
|
||||
if template == 'generic':
|
||||
tty.warn("Unable to detect a build system. "
|
||||
"Using a generic package template.")
|
||||
else:
|
||||
msg = "This package looks like it uses the {0} build system"
|
||||
tty.msg(msg.format(template))
|
||||
|
||||
return template
|
||||
|
||||
|
||||
def get_repository(args, name):
|
||||
"""Returns a Repo object that will allow us to determine the path where
|
||||
the new package file should be created.
|
||||
|
||||
:param argparse.Namespace args: The arguments given to ``spack create``
|
||||
:param str name: The name of the package to create
|
||||
|
||||
:returns: A Repo object capable of determining the path to the package file
|
||||
:rtype: Repo
|
||||
"""
|
||||
spec = Spec(name)
|
||||
# Figure out namespace for spec
|
||||
if spec.namespace and args.namespace and spec.namespace != args.namespace:
|
||||
tty.die("Namespaces '%s' and '%s' do not match." % (spec.namespace,
|
||||
args.namespace))
|
||||
tty.die("Namespaces '{0}' and '{1}' do not match.".format(
|
||||
spec.namespace, args.namespace))
|
||||
|
||||
if not spec.namespace and args.namespace:
|
||||
spec.namespace = args.namespace
|
||||
|
||||
# Figure out where the new package should live.
|
||||
# Figure out where the new package should live
|
||||
repo_path = args.repo
|
||||
if repo_path is not None:
|
||||
try:
|
||||
repo = Repo(repo_path)
|
||||
if spec.namespace and spec.namespace != repo.namespace:
|
||||
tty.die("Can't create package with namespace %s in repo with "
|
||||
"namespace %s" % (spec.namespace, repo.namespace))
|
||||
except RepoError as e:
|
||||
tty.die(str(e))
|
||||
repo = Repo(repo_path)
|
||||
if spec.namespace and spec.namespace != repo.namespace:
|
||||
tty.die("Can't create package with namespace {0} in repo with "
|
||||
"namespace {0}".format(spec.namespace, repo.namespace))
|
||||
else:
|
||||
if spec.namespace:
|
||||
repo = spack.repo.get_repo(spec.namespace, None)
|
||||
if not repo:
|
||||
tty.die("Unknown namespace: %s" % spec.namespace)
|
||||
tty.die("Unknown namespace: '{0}'".format(spec.namespace))
|
||||
else:
|
||||
repo = spack.repo.first_repo()
|
||||
|
||||
@@ -388,84 +562,30 @@ def find_repository(spec, args):
|
||||
return repo
|
||||
|
||||
|
||||
def fetch_tarballs(url, name, version):
|
||||
"""Try to find versions of the supplied archive by scraping the web.
|
||||
Prompts the user to select how many to download if many are found."""
|
||||
versions = spack.util.web.find_versions_of_archive(url)
|
||||
rkeys = sorted(versions.keys(), reverse=True)
|
||||
versions = OrderedDict(zip(rkeys, (versions[v] for v in rkeys)))
|
||||
|
||||
archives_to_fetch = 1
|
||||
if not versions:
|
||||
# If the fetch failed for some reason, revert to what the user provided
|
||||
versions = {version: url}
|
||||
elif len(versions) > 1:
|
||||
tty.msg("Found %s versions of %s:" % (len(versions), name),
|
||||
*spack.cmd.elide_list(
|
||||
["%-10s%s" % (v, u) for v, u in versions.iteritems()]))
|
||||
print('')
|
||||
archives_to_fetch = tty.get_number(
|
||||
"Include how many checksums in the package file?",
|
||||
default=5, abort='q')
|
||||
|
||||
if not archives_to_fetch:
|
||||
tty.die("Aborted.")
|
||||
|
||||
sorted_versions = sorted(versions.keys(), reverse=True)
|
||||
sorted_urls = [versions[v] for v in sorted_versions]
|
||||
return sorted_versions[:archives_to_fetch], sorted_urls[:archives_to_fetch]
|
||||
|
||||
|
||||
def create(parser, args):
|
||||
url = args.url
|
||||
if not url:
|
||||
setup_parser.subparser.print_help()
|
||||
return
|
||||
# Gather information about the package to be created
|
||||
name = get_name(args)
|
||||
url = get_url(args)
|
||||
versions, guesser = get_versions(args, name)
|
||||
build_system = get_build_system(args, guesser)
|
||||
|
||||
# Figure out a name and repo for the package.
|
||||
name, version = guess_name_and_version(url, args)
|
||||
spec = Spec(name)
|
||||
repo = find_repository(spec, args)
|
||||
# Create the package template object
|
||||
PackageClass = templates[build_system]
|
||||
package = PackageClass(name, url, versions)
|
||||
tty.msg("Created template for {0} package".format(package.name))
|
||||
|
||||
tty.msg("This looks like a URL for %s version %s" % (name, version))
|
||||
tty.msg("Creating template for package %s" % name)
|
||||
|
||||
# Fetch tarballs (prompting user if necessary)
|
||||
versions, urls = fetch_tarballs(url, name, version)
|
||||
|
||||
# Try to guess what build system is used.
|
||||
guesser = BuildSystemGuesser()
|
||||
ver_hash_tuples = spack.cmd.checksum.get_checksums(
|
||||
versions, urls,
|
||||
first_stage_function=guesser,
|
||||
keep_stage=args.keep_stage)
|
||||
|
||||
if not ver_hash_tuples:
|
||||
tty.die("Could not fetch any tarballs for %s" % name)
|
||||
|
||||
guess = guesser.make_guess(name, url, ver_hash_tuples)
|
||||
|
||||
# Create a directory for the new package.
|
||||
pkg_path = repo.filename_for_package_name(guess.name)
|
||||
# Create a directory for the new package
|
||||
repo = get_repository(args, name)
|
||||
pkg_path = repo.filename_for_package_name(package.name)
|
||||
if os.path.exists(pkg_path) and not args.force:
|
||||
tty.die("%s already exists." % pkg_path)
|
||||
tty.die('{0} already exists.'.format(pkg_path),
|
||||
' Try running `spack create --force` to overwrite it.')
|
||||
else:
|
||||
mkdirp(os.path.dirname(pkg_path))
|
||||
|
||||
# Write out a template for the file
|
||||
with open(pkg_path, "w") as pkg_file:
|
||||
pkg_file.write(
|
||||
package_template.substitute(
|
||||
name=guess.name,
|
||||
class_name=guess.class_name,
|
||||
base_class_name=guess.base_class_name,
|
||||
url=guess.url,
|
||||
versions=guess.versions,
|
||||
dependencies=guess.dependencies,
|
||||
body=guess.body
|
||||
)
|
||||
)
|
||||
# Write the new package file
|
||||
package.write(pkg_path)
|
||||
tty.msg("Created package file: {0}".format(pkg_path))
|
||||
|
||||
# If everything checks out, go ahead and edit.
|
||||
# Open up the new package file in your $EDITOR
|
||||
spack.editor(pkg_path)
|
||||
tty.msg("Created package %s" % pkg_path)
|
||||
|
@@ -30,20 +30,20 @@
|
||||
import spack.store
|
||||
from spack.graph import topological_sort
|
||||
|
||||
description = "Deactivate a package extension."
|
||||
description = "deactivate a package extension"
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
'-f', '--force', action='store_true',
|
||||
help="Run deactivation even if spec is NOT currently activated.")
|
||||
help="run deactivation even if spec is NOT currently activated")
|
||||
subparser.add_argument(
|
||||
'-a', '--all', action='store_true',
|
||||
help="Deactivate all extensions of an extendable package, or "
|
||||
"deactivate an extension AND its dependencies.")
|
||||
help="deactivate all extensions of an extendable package, or "
|
||||
"deactivate an extension AND its dependencies")
|
||||
subparser.add_argument(
|
||||
'spec', nargs=argparse.REMAINDER,
|
||||
help="spec of package extension to deactivate.")
|
||||
help="spec of package extension to deactivate")
|
||||
|
||||
|
||||
def deactivate(parser, args):
|
||||
|
@@ -33,13 +33,13 @@
|
||||
import spack
|
||||
from spack.util.executable import which
|
||||
|
||||
description = "Debugging commands for troubleshooting Spack."
|
||||
description = "debugging commands for troubleshooting Spack"
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='debug_command')
|
||||
sp.add_parser('create-db-tarball',
|
||||
help="Create a tarball of Spack's installation metadata.")
|
||||
help="create a tarball of Spack's installation metadata")
|
||||
|
||||
|
||||
def _debug_tarball_suffix():
|
||||
|
@@ -30,13 +30,13 @@
|
||||
import spack.store
|
||||
import spack.cmd
|
||||
|
||||
description = "Show installed packages that depend on another."
|
||||
description = "show installed packages that depend on another"
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
'spec', nargs=argparse.REMAINDER,
|
||||
help="specs to list dependencies of.")
|
||||
help="specs to list dependencies of")
|
||||
|
||||
|
||||
def dependents(parser, args):
|
||||
|
@@ -31,28 +31,27 @@
|
||||
import spack
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
from spack.cmd.edit import edit_package
|
||||
from spack.stage import DIYStage
|
||||
|
||||
description = "Do-It-Yourself: build from an existing source directory."
|
||||
description = "do-it-yourself: build from an existing source directory"
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
'-i', '--ignore-dependencies', action='store_true', dest='ignore_deps',
|
||||
help="Do not try to install dependencies of requested packages.")
|
||||
help="don't try to install dependencies of requested packages")
|
||||
subparser.add_argument(
|
||||
'--keep-prefix', action='store_true',
|
||||
help="Don't remove the install prefix if installation fails.")
|
||||
help="do not remove the install prefix if installation fails")
|
||||
subparser.add_argument(
|
||||
'--skip-patch', action='store_true',
|
||||
help="Skip patching for the DIY build.")
|
||||
help="skip patching for the DIY build")
|
||||
subparser.add_argument(
|
||||
'-q', '--quiet', action='store_true', dest='quiet',
|
||||
help="Do not display verbose build output while installing.")
|
||||
help="do not display verbose build output while installing")
|
||||
subparser.add_argument(
|
||||
'spec', nargs=argparse.REMAINDER,
|
||||
help="specs to use for install. Must contain package AND version.")
|
||||
help="specs to use for install. must contain package AND version")
|
||||
|
||||
cd_group = subparser.add_mutually_exclusive_group()
|
||||
arguments.add_common_arguments(cd_group, ['clean', 'dirty'])
|
||||
@@ -68,15 +67,8 @@ def diy(self, args):
|
||||
|
||||
spec = specs[0]
|
||||
if not spack.repo.exists(spec.name):
|
||||
tty.warn("No such package: %s" % spec.name)
|
||||
create = tty.get_yes_or_no("Create this package?", default=False)
|
||||
if not create:
|
||||
tty.msg("Exiting without creating.")
|
||||
sys.exit(1)
|
||||
else:
|
||||
tty.msg("Running 'spack edit -f %s'" % spec.name)
|
||||
edit_package(spec.name, spack.repo.first_repo(), None, True)
|
||||
return
|
||||
tty.die("No package for '{0}' was found.".format(spec.name),
|
||||
" Use `spack create` to create a new package")
|
||||
|
||||
if not spec.versions.concrete:
|
||||
tty.die(
|
||||
|
@@ -23,11 +23,11 @@
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
|
||||
description = "Run pydoc from within spack."
|
||||
description = "run pydoc from within spack"
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
subparser.add_argument('entity', help="Run pydoc help on entity")
|
||||
subparser.add_argument('entity', help="run pydoc help on entity")
|
||||
|
||||
|
||||
def doc(parser, args):
|
||||
|
@@ -23,39 +23,26 @@
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
import os
|
||||
import string
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import mkdirp, join_path
|
||||
from llnl.util.filesystem import join_path
|
||||
|
||||
import spack
|
||||
import spack.cmd
|
||||
from spack.spec import Spec
|
||||
from spack.repository import Repo
|
||||
from spack.util.naming import mod_to_class
|
||||
|
||||
description = "Open package files in $EDITOR"
|
||||
|
||||
# When -f is supplied, we'll create a very minimal skeleton.
|
||||
package_template = string.Template("""\
|
||||
from spack import *
|
||||
|
||||
class ${class_name}(Package):
|
||||
""\"Description""\"
|
||||
|
||||
homepage = "http://www.example.com"
|
||||
url = "http://www.example.com/${name}-1.0.tar.gz"
|
||||
|
||||
version('1.0', '0123456789abcdef0123456789abcdef')
|
||||
|
||||
def install(self, spec, prefix):
|
||||
configure("--prefix=%s" % prefix)
|
||||
make()
|
||||
make("install")
|
||||
""")
|
||||
description = "open package files in $EDITOR"
|
||||
|
||||
|
||||
def edit_package(name, repo_path, namespace, force=False):
|
||||
def edit_package(name, repo_path, namespace):
|
||||
"""Opens the requested package file in your favorite $EDITOR.
|
||||
|
||||
:param str name: The name of the package
|
||||
:param str repo_path: The path to the repository containing this package
|
||||
:param str namespace: A valid namespace registered with Spack
|
||||
"""
|
||||
# Find the location of the package
|
||||
if repo_path:
|
||||
repo = Repo(repo_path)
|
||||
elif namespace:
|
||||
@@ -67,68 +54,67 @@ def edit_package(name, repo_path, namespace, force=False):
|
||||
spec = Spec(name)
|
||||
if os.path.exists(path):
|
||||
if not os.path.isfile(path):
|
||||
tty.die("Something's wrong. '%s' is not a file!" % path)
|
||||
tty.die("Something is wrong. '{0}' is not a file!".format(path))
|
||||
if not os.access(path, os.R_OK | os.W_OK):
|
||||
tty.die("Insufficient permissions on '%s'!" % path)
|
||||
elif not force:
|
||||
tty.die("No package '%s'. Use spack create, or supply -f/--force "
|
||||
"to edit a new file." % spec.name)
|
||||
else:
|
||||
mkdirp(os.path.dirname(path))
|
||||
with open(path, "w") as pkg_file:
|
||||
pkg_file.write(
|
||||
package_template.substitute(
|
||||
name=spec.name, class_name=mod_to_class(spec.name)))
|
||||
tty.die("No package for '{0}' was found.".format(spec.name),
|
||||
" Use `spack create` to create a new package")
|
||||
|
||||
spack.editor(path)
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
'-f', '--force', dest='force', action='store_true',
|
||||
help="Open a new file in $EDITOR even if package doesn't exist.")
|
||||
|
||||
excl_args = subparser.add_mutually_exclusive_group()
|
||||
|
||||
# Various filetypes you can edit directly from the cmd line.
|
||||
# Various types of Spack files that can be edited
|
||||
# Edits package files by default
|
||||
excl_args.add_argument(
|
||||
'-b', '--build-system', dest='path', action='store_const',
|
||||
const=spack.build_systems_path,
|
||||
help="Edit the build system with the supplied name.")
|
||||
excl_args.add_argument(
|
||||
'-c', '--command', dest='path', action='store_const',
|
||||
const=spack.cmd.command_path,
|
||||
help="Edit the command with the supplied name.")
|
||||
help="edit the command with the supplied name")
|
||||
excl_args.add_argument(
|
||||
'-t', '--test', dest='path', action='store_const',
|
||||
const=spack.test_path, help="Edit the test with the supplied name.")
|
||||
const=spack.test_path,
|
||||
help="edit the test with the supplied name")
|
||||
excl_args.add_argument(
|
||||
'-m', '--module', dest='path', action='store_const',
|
||||
const=spack.module_path,
|
||||
help="Edit the main spack module with the supplied name.")
|
||||
help="edit the main spack module with the supplied name")
|
||||
|
||||
# Options for editing packages
|
||||
excl_args.add_argument(
|
||||
'-r', '--repo', default=None,
|
||||
help="Path to repo to edit package in.")
|
||||
help="path to repo to edit package in")
|
||||
excl_args.add_argument(
|
||||
'-N', '--namespace', default=None,
|
||||
help="Namespace of package to edit.")
|
||||
help="namespace of package to edit")
|
||||
|
||||
subparser.add_argument(
|
||||
'name', nargs='?', default=None, help="name of package to edit")
|
||||
'name', nargs='?', default=None,
|
||||
help="name of package to edit")
|
||||
|
||||
|
||||
def edit(parser, args):
|
||||
name = args.name
|
||||
|
||||
# By default, edit package files
|
||||
path = spack.packages_path
|
||||
|
||||
# If `--command`, `--test`, or `--module` is chosen, edit those instead
|
||||
if args.path:
|
||||
path = args.path
|
||||
if name:
|
||||
path = join_path(path, name + ".py")
|
||||
if not args.force and not os.path.exists(path):
|
||||
tty.die("No command named '%s'." % name)
|
||||
if not os.path.exists(path):
|
||||
tty.die("No command for '{0}' was found.".format(name))
|
||||
spack.editor(path)
|
||||
|
||||
elif name:
|
||||
edit_package(name, args.repo, args.namespace, args.force)
|
||||
edit_package(name, args.repo, args.namespace)
|
||||
else:
|
||||
# By default open the directory where packages or commands live.
|
||||
# By default open the directory where packages live
|
||||
spack.editor(path)
|
||||
|
@@ -28,13 +28,13 @@
|
||||
import spack.cmd
|
||||
import spack.build_environment as build_env
|
||||
|
||||
description = "Run a command with the install environment for a spec."
|
||||
description = "run a command with the install environment for a spec"
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
'spec', nargs=argparse.REMAINDER,
|
||||
help="specs of package environment to emulate.")
|
||||
help="specs of package environment to emulate")
|
||||
|
||||
|
||||
def env(parser, args):
|
||||
|
@@ -32,24 +32,24 @@
|
||||
import spack.cmd.find
|
||||
import spack.store
|
||||
|
||||
description = "List extensions for package."
|
||||
description = "list extensions for package"
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
format_group = subparser.add_mutually_exclusive_group()
|
||||
format_group.add_argument(
|
||||
'-l', '--long', action='store_true', dest='long',
|
||||
help='Show dependency hashes as well as versions.')
|
||||
help='show dependency hashes as well as versions')
|
||||
format_group.add_argument(
|
||||
'-p', '--paths', action='store_const', dest='mode', const='paths',
|
||||
help='Show paths to extension install directories')
|
||||
help='show paths to extension install directories')
|
||||
format_group.add_argument(
|
||||
'-d', '--deps', action='store_const', dest='mode', const='deps',
|
||||
help='Show full dependency DAG of extensions')
|
||||
help='show full dependency DAG of extensions')
|
||||
|
||||
subparser.add_argument(
|
||||
'spec', nargs=argparse.REMAINDER,
|
||||
help='Spec of package to list extensions for')
|
||||
help='spec of package to list extensions for')
|
||||
|
||||
|
||||
def extensions(parser, args):
|
||||
|
@@ -27,19 +27,19 @@
|
||||
import spack
|
||||
import spack.cmd
|
||||
|
||||
description = "Fetch archives for packages"
|
||||
description = "fetch archives for packages"
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
'-n', '--no-checksum', action='store_true', dest='no_checksum',
|
||||
help="Do not check packages against checksum")
|
||||
help="do not check packages against checksum")
|
||||
subparser.add_argument(
|
||||
'-m', '--missing', action='store_true',
|
||||
help="Also fetch all missing dependencies")
|
||||
help="also fetch all missing dependencies")
|
||||
subparser.add_argument(
|
||||
'-D', '--dependencies', action='store_true',
|
||||
help="Also fetch all dependencies")
|
||||
help="also fetch all dependencies")
|
||||
subparser.add_argument(
|
||||
'packages', nargs=argparse.REMAINDER,
|
||||
help="specs of packages to fetch")
|
||||
|
@@ -29,7 +29,7 @@
|
||||
|
||||
from spack.cmd import display_specs
|
||||
|
||||
description = "Find installed spack packages"
|
||||
description = "find installed spack packages"
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
@@ -39,56 +39,56 @@ def setup_parser(subparser):
|
||||
dest='mode',
|
||||
const='short',
|
||||
default='short',
|
||||
help='Show only specs (default)')
|
||||
help='show only specs (default)')
|
||||
format_group.add_argument('-p', '--paths',
|
||||
action='store_const',
|
||||
dest='mode',
|
||||
const='paths',
|
||||
help='Show paths to package install directories')
|
||||
help='show paths to package install directories')
|
||||
format_group.add_argument(
|
||||
'-d', '--deps',
|
||||
action='store_const',
|
||||
dest='mode',
|
||||
const='deps',
|
||||
help='Show full dependency DAG of installed packages')
|
||||
help='show full dependency DAG of installed packages')
|
||||
|
||||
arguments.add_common_arguments(subparser, ['long', 'very_long'])
|
||||
|
||||
subparser.add_argument('-f', '--show-flags',
|
||||
action='store_true',
|
||||
dest='show_flags',
|
||||
help='Show spec compiler flags.')
|
||||
help='show spec compiler flags')
|
||||
implicit_explicit = subparser.add_mutually_exclusive_group()
|
||||
implicit_explicit.add_argument(
|
||||
'-e', '--explicit',
|
||||
action='store_true',
|
||||
help='Show only specs that were installed explicitly')
|
||||
help='show only specs that were installed explicitly')
|
||||
implicit_explicit.add_argument(
|
||||
'-E', '--implicit',
|
||||
action='store_true',
|
||||
help='Show only specs that were installed as dependencies')
|
||||
help='show only specs that were installed as dependencies')
|
||||
subparser.add_argument(
|
||||
'-u', '--unknown',
|
||||
action='store_true',
|
||||
dest='unknown',
|
||||
help='Show only specs Spack does not have a package for.')
|
||||
help='show only specs Spack does not have a package for')
|
||||
subparser.add_argument(
|
||||
'-m', '--missing',
|
||||
action='store_true',
|
||||
dest='missing',
|
||||
help='Show missing dependencies as well as installed specs.')
|
||||
help='show missing dependencies as well as installed specs')
|
||||
subparser.add_argument(
|
||||
'-v', '--variants',
|
||||
action='store_true',
|
||||
dest='variants',
|
||||
help='Show variants in output (can be long)')
|
||||
help='show variants in output (can be long)')
|
||||
subparser.add_argument('-M', '--only-missing',
|
||||
action='store_true',
|
||||
dest='only_missing',
|
||||
help='Show only missing dependencies.')
|
||||
help='show only missing dependencies')
|
||||
subparser.add_argument('-N', '--namespace',
|
||||
action='store_true',
|
||||
help='Show fully qualified package names.')
|
||||
help='show fully qualified package names')
|
||||
|
||||
arguments.add_common_arguments(subparser, ['constraint'])
|
||||
|
||||
|
@@ -34,7 +34,7 @@
|
||||
import spack
|
||||
from spack.util.executable import *
|
||||
|
||||
description = "Runs source code style checks on Spack. Requires flake8."
|
||||
description = "runs source code style checks on Spack. requires flake8"
|
||||
flake8 = None
|
||||
include_untracked = True
|
||||
|
||||
@@ -138,17 +138,17 @@ def filter_file(source, dest, output=False):
|
||||
def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
'-k', '--keep-temp', action='store_true',
|
||||
help="Do not delete temporary directory where flake8 runs. "
|
||||
"Use for debugging, to see filtered files.")
|
||||
help="do not delete temporary directory where flake8 runs. "
|
||||
"use for debugging, to see filtered files")
|
||||
subparser.add_argument(
|
||||
'-o', '--output', action='store_true',
|
||||
help="Send filtered files to stdout as well as temp files.")
|
||||
help="send filtered files to stdout as well as temp files")
|
||||
subparser.add_argument(
|
||||
'-r', '--root-relative', action='store_true', default=False,
|
||||
help="print root-relative paths (default is cwd-relative)")
|
||||
subparser.add_argument(
|
||||
'-U', '--no-untracked', dest='untracked', action='store_false',
|
||||
default=True, help="Exclude untracked files from checks.")
|
||||
default=True, help="exclude untracked files from checks")
|
||||
subparser.add_argument(
|
||||
'files', nargs=argparse.REMAINDER, help="specific files to check")
|
||||
|
||||
|
@@ -32,7 +32,7 @@
|
||||
from spack.spec import *
|
||||
from spack.graph import *
|
||||
|
||||
description = "Generate graphs of package dependency relationships."
|
||||
description = "generate graphs of package dependency relationships"
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
@@ -41,31 +41,31 @@ def setup_parser(subparser):
|
||||
method = subparser.add_mutually_exclusive_group()
|
||||
method.add_argument(
|
||||
'-a', '--ascii', action='store_true',
|
||||
help="Draw graph as ascii to stdout (default).")
|
||||
help="draw graph as ascii to stdout (default)")
|
||||
method.add_argument(
|
||||
'-d', '--dot', action='store_true',
|
||||
help="Generate graph in dot format and print to stdout.")
|
||||
help="generate graph in dot format and print to stdout")
|
||||
|
||||
subparser.add_argument(
|
||||
'-n', '--normalize', action='store_true',
|
||||
help="Skip concretization; only print normalized spec.")
|
||||
help="skip concretization; only print normalized spec")
|
||||
|
||||
subparser.add_argument(
|
||||
'-s', '--static', action='store_true',
|
||||
help="Use static information from packages, not dynamic spec info.")
|
||||
help="use static information from packages, not dynamic spec info")
|
||||
|
||||
subparser.add_argument(
|
||||
'-i', '--installed', action='store_true',
|
||||
help="Graph all installed specs in dot format (implies --dot).")
|
||||
help="graph all installed specs in dot format (implies --dot)")
|
||||
|
||||
subparser.add_argument(
|
||||
'-t', '--deptype', action='store',
|
||||
help="Comma-separated list of deptypes to traverse. default=%s."
|
||||
help="comma-separated list of deptypes to traverse. default=%s"
|
||||
% ','.join(alldeps))
|
||||
|
||||
subparser.add_argument(
|
||||
'specs', nargs=argparse.REMAINDER,
|
||||
help="specs of packages to graph.")
|
||||
help="specs of packages to graph")
|
||||
|
||||
|
||||
def graph(parser, args):
|
||||
|
@@ -22,7 +22,7 @@
|
||||
# License along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
description = "Get help on spack and its commands"
|
||||
description = "get help on spack and its commands"
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
|
@@ -27,7 +27,7 @@
|
||||
import spack
|
||||
import spack.fetch_strategy as fs
|
||||
|
||||
description = "Get detailed information on a particular package"
|
||||
description = "get detailed information on a particular package"
|
||||
|
||||
|
||||
def padder(str_list, extra=0):
|
||||
@@ -43,7 +43,7 @@ def pad(string):
|
||||
|
||||
def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
'name', metavar="PACKAGE", help="Name of package to get info for.")
|
||||
'name', metavar="PACKAGE", help="name of package to get info for")
|
||||
|
||||
|
||||
def print_text_info(pkg):
|
||||
|
@@ -40,7 +40,7 @@
|
||||
from spack.fetch_strategy import FetchError
|
||||
from spack.package import PackageBase
|
||||
|
||||
description = "Build and install packages"
|
||||
description = "build and install packages"
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
@@ -49,29 +49,29 @@ def setup_parser(subparser):
|
||||
default='package,dependencies',
|
||||
dest='things_to_install',
|
||||
choices=['package', 'dependencies'],
|
||||
help="""Select the mode of installation.
|
||||
The default is to install the package along with all its dependencies.
|
||||
Alternatively one can decide to install only the package or only
|
||||
the dependencies."""
|
||||
help="""select the mode of installation.
|
||||
the default is to install the package along with all its dependencies.
|
||||
alternatively one can decide to install only the package or only
|
||||
the dependencies"""
|
||||
)
|
||||
subparser.add_argument(
|
||||
'-j', '--jobs', action='store', type=int,
|
||||
help="Explicitly set number of make jobs. Default is #cpus.")
|
||||
help="explicitly set number of make jobs. default is #cpus")
|
||||
subparser.add_argument(
|
||||
'--keep-prefix', action='store_true', dest='keep_prefix',
|
||||
help="Don't remove the install prefix if installation fails.")
|
||||
help="don't remove the install prefix if installation fails")
|
||||
subparser.add_argument(
|
||||
'--keep-stage', action='store_true', dest='keep_stage',
|
||||
help="Don't remove the build stage if installation succeeds.")
|
||||
help="don't remove the build stage if installation succeeds")
|
||||
subparser.add_argument(
|
||||
'-n', '--no-checksum', action='store_true', dest='no_checksum',
|
||||
help="Do not check packages against checksum")
|
||||
help="do not check packages against checksum")
|
||||
subparser.add_argument(
|
||||
'-v', '--verbose', action='store_true', dest='verbose',
|
||||
help="Display verbose build output while installing.")
|
||||
help="display verbose build output while installing")
|
||||
subparser.add_argument(
|
||||
'--fake', action='store_true', dest='fake',
|
||||
help="Fake install. Just remove prefix and create a fake file.")
|
||||
help="fake install. just remove prefix and create a fake file")
|
||||
|
||||
cd_group = subparser.add_mutually_exclusive_group()
|
||||
arguments.add_common_arguments(cd_group, ['clean', 'dirty'])
|
||||
@@ -83,18 +83,18 @@ def setup_parser(subparser):
|
||||
)
|
||||
subparser.add_argument(
|
||||
'--run-tests', action='store_true', dest='run_tests',
|
||||
help="Run package level tests during installation."
|
||||
help="run package level tests during installation"
|
||||
)
|
||||
subparser.add_argument(
|
||||
'--log-format',
|
||||
default=None,
|
||||
choices=['junit'],
|
||||
help="Format to be used for log files."
|
||||
help="format to be used for log files"
|
||||
)
|
||||
subparser.add_argument(
|
||||
'--log-file',
|
||||
default=None,
|
||||
help="Filename for the log file. If not passed a default will be used."
|
||||
help="filename for the log file. if not passed a default will be used"
|
||||
)
|
||||
|
||||
|
||||
|
@@ -33,7 +33,7 @@
|
||||
import spack
|
||||
from llnl.util.tty.colify import colify
|
||||
|
||||
description = "Print available spack packages to stdout in different formats"
|
||||
description = "print available spack packages to stdout in different formats"
|
||||
|
||||
formatters = {}
|
||||
|
||||
@@ -47,13 +47,13 @@ def formatter(func):
|
||||
def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
'filter', nargs=argparse.REMAINDER,
|
||||
help='Optional case-insensitive glob patterns to filter results.')
|
||||
help='optional case-insensitive glob patterns to filter results')
|
||||
subparser.add_argument(
|
||||
'-d', '--search-description', action='store_true', default=False,
|
||||
help='Filtering will also search the description for a match.')
|
||||
help='filtering will also search the description for a match')
|
||||
subparser.add_argument(
|
||||
'--format', default='name_only', choices=formatters,
|
||||
help='Format to be used to print the output [default: name_only]')
|
||||
help='format to be used to print the output [default: name_only]')
|
||||
|
||||
|
||||
def filter_by_name(pkgs, args):
|
||||
|
@@ -25,7 +25,7 @@
|
||||
import argparse
|
||||
import spack.modules
|
||||
|
||||
description = "Add package to environment using modules."
|
||||
description = "add package to environment using modules"
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
@@ -33,8 +33,8 @@ def setup_parser(subparser):
|
||||
message with -h. """
|
||||
subparser.add_argument(
|
||||
'spec', nargs=argparse.REMAINDER,
|
||||
help="Spec of package to load with modules. "
|
||||
"(If -, read specs from STDIN)")
|
||||
help="spec of package to load with modules "
|
||||
"(if -, read specs from STDIN)")
|
||||
|
||||
|
||||
def load(parser, args):
|
||||
|
@@ -29,7 +29,7 @@
|
||||
import spack
|
||||
import spack.cmd
|
||||
|
||||
description = "Print out locations of various directories used by Spack"
|
||||
description = "print out locations of various directories used by Spack"
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
@@ -38,34 +38,34 @@ def setup_parser(subparser):
|
||||
|
||||
directories.add_argument(
|
||||
'-m', '--module-dir', action='store_true',
|
||||
help="Spack python module directory.")
|
||||
help="spack python module directory")
|
||||
directories.add_argument(
|
||||
'-r', '--spack-root', action='store_true',
|
||||
help="Spack installation root.")
|
||||
help="spack installation root")
|
||||
|
||||
directories.add_argument(
|
||||
'-i', '--install-dir', action='store_true',
|
||||
help="Install prefix for spec (spec need not be installed).")
|
||||
help="install prefix for spec (spec need not be installed)")
|
||||
directories.add_argument(
|
||||
'-p', '--package-dir', action='store_true',
|
||||
help="Directory enclosing a spec's package.py file.")
|
||||
help="directory enclosing a spec's package.py file")
|
||||
directories.add_argument(
|
||||
'-P', '--packages', action='store_true',
|
||||
help="Top-level packages directory for Spack.")
|
||||
help="top-level packages directory for Spack")
|
||||
directories.add_argument(
|
||||
'-s', '--stage-dir', action='store_true',
|
||||
help="Stage directory for a spec.")
|
||||
help="stage directory for a spec")
|
||||
directories.add_argument(
|
||||
'-S', '--stages', action='store_true',
|
||||
help="Top level Stage directory.")
|
||||
help="top level stage directory")
|
||||
directories.add_argument(
|
||||
'-b', '--build-dir', action='store_true',
|
||||
help="Checked out or expanded source directory for a spec "
|
||||
"(requires it to be staged first).")
|
||||
help="checked out or expanded source directory for a spec "
|
||||
"(requires it to be staged first)")
|
||||
|
||||
subparser.add_argument(
|
||||
'spec', nargs=argparse.REMAINDER,
|
||||
help="spec of package to fetch directory for.")
|
||||
help="spec of package to fetch directory for")
|
||||
|
||||
|
||||
def location(parser, args):
|
||||
|
@@ -31,13 +31,13 @@
|
||||
import spack.util.crypto
|
||||
from spack.stage import Stage, FailedDownloadError
|
||||
|
||||
description = "Calculate md5 checksums for files/urls."
|
||||
description = "calculate md5 checksums for files/urls"
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
setup_parser.parser = subparser
|
||||
subparser.add_argument('files', nargs=argparse.REMAINDER,
|
||||
help="Files/urls to checksum.")
|
||||
help="files/urls to checksum")
|
||||
|
||||
|
||||
def compute_md5_checksum(url):
|
||||
|
@@ -37,13 +37,13 @@
|
||||
from spack.error import SpackError
|
||||
from spack.util.spack_yaml import syaml_dict
|
||||
|
||||
description = "Manage mirrors."
|
||||
description = "manage mirrors"
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
'-n', '--no-checksum', action='store_true', dest='no_checksum',
|
||||
help="Do not check fetched packages against checksum")
|
||||
help="do not check fetched packages against checksum")
|
||||
|
||||
sp = subparser.add_subparsers(
|
||||
metavar='SUBCOMMAND', dest='mirror_command')
|
||||
@@ -51,30 +51,30 @@ def setup_parser(subparser):
|
||||
# Create
|
||||
create_parser = sp.add_parser('create', help=mirror_create.__doc__)
|
||||
create_parser.add_argument('-d', '--directory', default=None,
|
||||
help="Directory in which to create mirror.")
|
||||
help="directory in which to create mirror")
|
||||
create_parser.add_argument(
|
||||
'specs', nargs=argparse.REMAINDER,
|
||||
help="Specs of packages to put in mirror")
|
||||
help="specs of packages to put in mirror")
|
||||
create_parser.add_argument(
|
||||
'-f', '--file', help="File with specs of packages to put in mirror.")
|
||||
'-f', '--file', help="file with specs of packages to put in mirror")
|
||||
create_parser.add_argument(
|
||||
'-D', '--dependencies', action='store_true',
|
||||
help="Also fetch all dependencies")
|
||||
help="also fetch all dependencies")
|
||||
create_parser.add_argument(
|
||||
'-o', '--one-version-per-spec', action='store_const',
|
||||
const=1, default=0,
|
||||
help="Only fetch one 'preferred' version per spec, not all known.")
|
||||
help="only fetch one 'preferred' version per spec, not all known")
|
||||
|
||||
scopes = spack.config.config_scopes
|
||||
|
||||
# Add
|
||||
add_parser = sp.add_parser('add', help=mirror_add.__doc__)
|
||||
add_parser.add_argument('name', help="Mnemonic name for mirror.")
|
||||
add_parser.add_argument('name', help="mnemonic name for mirror")
|
||||
add_parser.add_argument(
|
||||
'url', help="URL of mirror directory from 'spack mirror create'.")
|
||||
'url', help="url of mirror directory from 'spack mirror create'")
|
||||
add_parser.add_argument(
|
||||
'--scope', choices=scopes, default=spack.cmd.default_modify_scope,
|
||||
help="Configuration scope to modify.")
|
||||
help="configuration scope to modify")
|
||||
|
||||
# Remove
|
||||
remove_parser = sp.add_parser('remove', aliases=['rm'],
|
||||
@@ -82,13 +82,13 @@ def setup_parser(subparser):
|
||||
remove_parser.add_argument('name')
|
||||
remove_parser.add_argument(
|
||||
'--scope', choices=scopes, default=spack.cmd.default_modify_scope,
|
||||
help="Configuration scope to modify.")
|
||||
help="configuration scope to modify")
|
||||
|
||||
# List
|
||||
list_parser = sp.add_parser('list', help=mirror_list.__doc__)
|
||||
list_parser.add_argument(
|
||||
'--scope', choices=scopes, default=spack.cmd.default_list_scope,
|
||||
help="Configuration scope to read from.")
|
||||
help="configuration scope to read from")
|
||||
|
||||
|
||||
def mirror_add(args):
|
||||
|
@@ -35,7 +35,7 @@
|
||||
import spack.cmd.common.arguments as arguments
|
||||
from spack.modules import module_types
|
||||
|
||||
description = "Manipulate module files"
|
||||
description = "manipulate module files"
|
||||
|
||||
# Dictionary that will be populated with the list of sub-commands
|
||||
# Each sub-command must be callable and accept 3 arguments :
|
||||
@@ -57,10 +57,10 @@ def setup_parser(subparser):
|
||||
sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='subparser_name')
|
||||
|
||||
# spack module refresh
|
||||
refresh_parser = sp.add_parser('refresh', help='Regenerate module files')
|
||||
refresh_parser = sp.add_parser('refresh', help='regenerate module files')
|
||||
refresh_parser.add_argument(
|
||||
'--delete-tree',
|
||||
help='Delete the module file tree before refresh',
|
||||
help='delete the module file tree before refresh',
|
||||
action='store_true'
|
||||
)
|
||||
arguments.add_common_arguments(
|
||||
@@ -68,11 +68,11 @@ def setup_parser(subparser):
|
||||
)
|
||||
|
||||
# spack module find
|
||||
find_parser = sp.add_parser('find', help='Find module files for packages')
|
||||
find_parser = sp.add_parser('find', help='find module files for packages')
|
||||
arguments.add_common_arguments(find_parser, ['constraint', 'module_type'])
|
||||
|
||||
# spack module rm
|
||||
rm_parser = sp.add_parser('rm', help='Remove module files')
|
||||
rm_parser = sp.add_parser('rm', help='remove module files')
|
||||
arguments.add_common_arguments(
|
||||
rm_parser, ['constraint', 'module_type', 'yes_to_all']
|
||||
)
|
||||
@@ -80,19 +80,19 @@ def setup_parser(subparser):
|
||||
# spack module loads
|
||||
loads_parser = sp.add_parser(
|
||||
'loads',
|
||||
help='Prompt the list of modules associated with a constraint'
|
||||
help='prompt the list of modules associated with a constraint'
|
||||
)
|
||||
loads_parser.add_argument(
|
||||
'--input-only', action='store_false', dest='shell',
|
||||
help='Generate input for module command (instead of a shell script)'
|
||||
help='generate input for module command (instead of a shell script)'
|
||||
)
|
||||
loads_parser.add_argument(
|
||||
'-p', '--prefix', dest='prefix', default='',
|
||||
help='Prepend to module names when issuing module load commands'
|
||||
help='prepend to module names when issuing module load commands'
|
||||
)
|
||||
loads_parser.add_argument(
|
||||
'-x', '--exclude', dest='exclude', action='append', default=[],
|
||||
help="Exclude package from output; may be specified multiple times"
|
||||
help="exclude package from output; may be specified multiple times"
|
||||
)
|
||||
arguments.add_common_arguments(
|
||||
loads_parser, ['constraint', 'module_type', 'recurse_dependencies']
|
||||
|
@@ -29,13 +29,13 @@
|
||||
import spack
|
||||
|
||||
|
||||
description = "Patch expanded archive sources in preparation for install"
|
||||
description = "patch expanded archive sources in preparation for install"
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
'-n', '--no-checksum', action='store_true', dest='no_checksum',
|
||||
help="Do not check downloaded packages against checksum")
|
||||
help="do not check downloaded packages against checksum")
|
||||
subparser.add_argument(
|
||||
'packages', nargs=argparse.REMAINDER,
|
||||
help="specs of packages to stage")
|
||||
|
@@ -31,7 +31,7 @@
|
||||
import spack
|
||||
from spack.util.executable import *
|
||||
|
||||
description = "Query packages associated with particular git revisions."
|
||||
description = "query packages associated with particular git revisions"
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
@@ -40,35 +40,35 @@ def setup_parser(subparser):
|
||||
|
||||
add_parser = sp.add_parser('add', help=pkg_add.__doc__)
|
||||
add_parser.add_argument('packages', nargs=argparse.REMAINDER,
|
||||
help="Names of packages to add to git repo.")
|
||||
help="names of packages to add to git repo")
|
||||
|
||||
list_parser = sp.add_parser('list', help=pkg_list.__doc__)
|
||||
list_parser.add_argument('rev', default='HEAD', nargs='?',
|
||||
help="Revision to list packages for.")
|
||||
help="revision to list packages for")
|
||||
|
||||
diff_parser = sp.add_parser('diff', help=pkg_diff.__doc__)
|
||||
diff_parser.add_argument(
|
||||
'rev1', nargs='?', default='HEAD^',
|
||||
help="Revision to compare against.")
|
||||
help="revision to compare against")
|
||||
diff_parser.add_argument(
|
||||
'rev2', nargs='?', default='HEAD',
|
||||
help="Revision to compare to rev1 (default is HEAD).")
|
||||
help="revision to compare to rev1 (default is HEAD)")
|
||||
|
||||
add_parser = sp.add_parser('added', help=pkg_added.__doc__)
|
||||
add_parser.add_argument(
|
||||
'rev1', nargs='?', default='HEAD^',
|
||||
help="Revision to compare against.")
|
||||
help="revision to compare against")
|
||||
add_parser.add_argument(
|
||||
'rev2', nargs='?', default='HEAD',
|
||||
help="Revision to compare to rev1 (default is HEAD).")
|
||||
help="revision to compare to rev1 (default is HEAD)")
|
||||
|
||||
rm_parser = sp.add_parser('removed', help=pkg_removed.__doc__)
|
||||
rm_parser.add_argument(
|
||||
'rev1', nargs='?', default='HEAD^',
|
||||
help="Revision to compare against.")
|
||||
help="revision to compare against")
|
||||
rm_parser.add_argument(
|
||||
'rev2', nargs='?', default='HEAD',
|
||||
help="Revision to compare to rev1 (default is HEAD).")
|
||||
help="revision to compare to rev1 (default is HEAD)")
|
||||
|
||||
|
||||
def get_git():
|
||||
|
@@ -29,13 +29,13 @@
|
||||
import spack
|
||||
import spack.cmd
|
||||
|
||||
description = "List packages that provide a particular virtual package"
|
||||
description = "list packages that provide a particular virtual package"
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
'vpkg_spec', metavar='VPACKAGE_SPEC', nargs=argparse.REMAINDER,
|
||||
help='Find packages that provide this virtual package')
|
||||
help='find packages that provide this virtual package')
|
||||
|
||||
|
||||
def providers(parser, args):
|
||||
|
@@ -25,22 +25,22 @@
|
||||
import spack
|
||||
import spack.stage as stage
|
||||
|
||||
description = "Remove temporary build files and/or downloaded archives"
|
||||
description = "remove temporary build files and/or downloaded archives"
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
'-s', '--stage', action='store_true', default=True,
|
||||
help="Remove all temporary build stages (default).")
|
||||
help="remove all temporary build stages (default)")
|
||||
subparser.add_argument(
|
||||
'-d', '--downloads', action='store_true',
|
||||
help="Remove cached downloads.")
|
||||
help="remove cached downloads")
|
||||
subparser.add_argument(
|
||||
'-m', '--misc-cache', action='store_true',
|
||||
help="Remove long-lived caches, like the virtual package index.")
|
||||
help="remove long-lived caches, like the virtual package index")
|
||||
subparser.add_argument(
|
||||
'-a', '--all', action='store_true',
|
||||
help="Remove all of the above.")
|
||||
help="remove all of the above")
|
||||
|
||||
|
||||
def purge(parser, args):
|
||||
|
@@ -31,15 +31,15 @@
|
||||
import spack
|
||||
|
||||
|
||||
description = "launch an interpreter as spack would launch a command"
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
'-c', dest='python_command', help='Command to execute.')
|
||||
'-c', dest='python_command', help='command to execute')
|
||||
subparser.add_argument(
|
||||
'python_args', nargs=argparse.REMAINDER,
|
||||
help="File to run plus arguments.")
|
||||
|
||||
|
||||
description = "Launch an interpreter as spack would launch a command"
|
||||
help="file to run plus arguments")
|
||||
|
||||
|
||||
def python(parser, args):
|
||||
|
@@ -24,7 +24,7 @@
|
||||
##############################################################################
|
||||
import spack
|
||||
import spack.store
|
||||
description = "Rebuild Spack's package database."
|
||||
description = "rebuild Spack's package database"
|
||||
|
||||
|
||||
def reindex(parser, args):
|
||||
|
@@ -30,7 +30,7 @@
|
||||
import spack.config
|
||||
from spack.repository import *
|
||||
|
||||
description = "Manage package source repositories."
|
||||
description = "manage package source repositories"
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
@@ -40,34 +40,34 @@ def setup_parser(subparser):
|
||||
# Create
|
||||
create_parser = sp.add_parser('create', help=repo_create.__doc__)
|
||||
create_parser.add_argument(
|
||||
'directory', help="Directory to create the repo in.")
|
||||
'directory', help="directory to create the repo in")
|
||||
create_parser.add_argument(
|
||||
'namespace', help="Namespace to identify packages in the repository. "
|
||||
"Defaults to the directory name.", nargs='?')
|
||||
'namespace', help="namespace to identify packages in the repository. "
|
||||
"defaults to the directory name", nargs='?')
|
||||
|
||||
# List
|
||||
list_parser = sp.add_parser('list', help=repo_list.__doc__)
|
||||
list_parser.add_argument(
|
||||
'--scope', choices=scopes, default=spack.cmd.default_list_scope,
|
||||
help="Configuration scope to read from.")
|
||||
help="configuration scope to read from")
|
||||
|
||||
# Add
|
||||
add_parser = sp.add_parser('add', help=repo_add.__doc__)
|
||||
add_parser.add_argument(
|
||||
'path', help="Path to a Spack package repository directory.")
|
||||
'path', help="path to a Spack package repository directory")
|
||||
add_parser.add_argument(
|
||||
'--scope', choices=scopes, default=spack.cmd.default_modify_scope,
|
||||
help="Configuration scope to modify.")
|
||||
help="configuration scope to modify")
|
||||
|
||||
# Remove
|
||||
remove_parser = sp.add_parser(
|
||||
'remove', help=repo_remove.__doc__, aliases=['rm'])
|
||||
remove_parser.add_argument(
|
||||
'path_or_namespace',
|
||||
help="Path or namespace of a Spack package repository.")
|
||||
help="path or namespace of a Spack package repository")
|
||||
remove_parser.add_argument(
|
||||
'--scope', choices=scopes, default=spack.cmd.default_modify_scope,
|
||||
help="Configuration scope to modify.")
|
||||
help="configuration scope to modify")
|
||||
|
||||
|
||||
def repo_create(args):
|
||||
|
@@ -29,7 +29,7 @@
|
||||
import spack
|
||||
import spack.cmd
|
||||
|
||||
description = "Revert checked out package source code."
|
||||
description = "revert checked out package source code"
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
|
@@ -36,22 +36,21 @@
|
||||
import spack.cmd.common.arguments as arguments
|
||||
from llnl.util.filesystem import set_executable
|
||||
from spack import which
|
||||
from spack.cmd.edit import edit_package
|
||||
from spack.stage import DIYStage
|
||||
|
||||
description = "Create a configuration script and module, but don't build."
|
||||
description = "create a configuration script and module, but don't build"
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
'-i', '--ignore-dependencies', action='store_true', dest='ignore_deps',
|
||||
help="Do not try to install dependencies of requested packages.")
|
||||
help="do not try to install dependencies of requested packages")
|
||||
subparser.add_argument(
|
||||
'-v', '--verbose', action='store_true', dest='verbose',
|
||||
help="Display verbose build output while installing.")
|
||||
help="display verbose build output while installing")
|
||||
subparser.add_argument(
|
||||
'spec', nargs=argparse.REMAINDER,
|
||||
help="specs to use for install. Must contain package AND version.")
|
||||
help="specs to use for install. must contain package AND version")
|
||||
|
||||
cd_group = subparser.add_mutually_exclusive_group()
|
||||
arguments.add_common_arguments(cd_group, ['clean', 'dirty'])
|
||||
@@ -134,16 +133,8 @@ def setup(self, args):
|
||||
with spack.store.db.write_transaction():
|
||||
spec = specs[0]
|
||||
if not spack.repo.exists(spec.name):
|
||||
tty.warn("No such package: %s" % spec.name)
|
||||
create = tty.get_yes_or_no("Create this package?", default=False)
|
||||
if not create:
|
||||
tty.msg("Exiting without creating.")
|
||||
sys.exit(1)
|
||||
else:
|
||||
tty.msg("Running 'spack edit -f %s'" % spec.name)
|
||||
edit_package(spec.name, spack.repo.first_repo(), None, True)
|
||||
return
|
||||
|
||||
tty.die("No package for '{0}' was found.".format(spec.name),
|
||||
" Use `spack create` to create a new package")
|
||||
if not spec.versions.concrete:
|
||||
tty.die(
|
||||
"spack setup spec must have a single, concrete version. "
|
||||
|
@@ -28,29 +28,29 @@
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
|
||||
description = "print out abstract and concrete versions of a spec."
|
||||
description = "print out abstract and concrete versions of a spec"
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
arguments.add_common_arguments(subparser, ['long', 'very_long'])
|
||||
subparser.add_argument(
|
||||
'-y', '--yaml', action='store_true', default=False,
|
||||
help='Print concrete spec as YAML.')
|
||||
help='print concrete spec as YAML')
|
||||
subparser.add_argument(
|
||||
'-c', '--cover', action='store',
|
||||
default='nodes', choices=['nodes', 'edges', 'paths'],
|
||||
help='How extensively to traverse the DAG. (default: nodes).')
|
||||
help='how extensively to traverse the DAG (default: nodes)')
|
||||
subparser.add_argument(
|
||||
'-N', '--namespaces', action='store_true', default=False,
|
||||
help='Show fully qualified package names.')
|
||||
help='show fully qualified package names')
|
||||
subparser.add_argument(
|
||||
'-I', '--install-status', action='store_true', default=False,
|
||||
help='Show install status of packages. Packages can be: '
|
||||
help='show install status of packages. packages can be: '
|
||||
'installed [+], missing and needed by an installed package [-], '
|
||||
'or not installed (no annotation).')
|
||||
'or not installed (no annotation)')
|
||||
subparser.add_argument(
|
||||
'-t', '--types', action='store_true', default=False,
|
||||
help='Show dependency types.')
|
||||
help='show dependency types')
|
||||
subparser.add_argument(
|
||||
'specs', nargs=argparse.REMAINDER, help="specs of packages")
|
||||
|
||||
|
@@ -28,16 +28,16 @@
|
||||
import spack
|
||||
import spack.cmd
|
||||
|
||||
description = "Expand downloaded archive in preparation for install"
|
||||
description = "expand downloaded archive in preparation for install"
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
'-n', '--no-checksum', action='store_true', dest='no_checksum',
|
||||
help="Do not check downloaded packages against checksum")
|
||||
help="do not check downloaded packages against checksum")
|
||||
subparser.add_argument(
|
||||
'-p', '--path', dest='path',
|
||||
help="Path to stage package, does not add to spack tree")
|
||||
help="path to stage package, does not add to spack tree")
|
||||
|
||||
subparser.add_argument(
|
||||
'specs', nargs=argparse.REMAINDER, help="specs of packages to stage")
|
||||
|
@@ -34,24 +34,24 @@
|
||||
|
||||
import spack
|
||||
|
||||
description = "A thin wrapper around the pytest command."
|
||||
description = "a thin wrapper around the pytest command"
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
'-H', '--pytest-help', action='store_true', default=False,
|
||||
help="print full pytest help message, showing advanced options.")
|
||||
help="print full pytest help message, showing advanced options")
|
||||
|
||||
list_group = subparser.add_mutually_exclusive_group()
|
||||
list_group.add_argument(
|
||||
'-l', '--list', action='store_true', default=False,
|
||||
help="list basic test names.")
|
||||
help="list basic test names")
|
||||
list_group.add_argument(
|
||||
'-L', '--long-list', action='store_true', default=False,
|
||||
help="list the entire hierarchy of tests.")
|
||||
help="list the entire hierarchy of tests")
|
||||
subparser.add_argument(
|
||||
'tests', nargs=argparse.REMAINDER,
|
||||
help="list of tests to run (will be passed to pytest -k).")
|
||||
help="list of tests to run (will be passed to pytest -k)")
|
||||
|
||||
|
||||
def do_list(args, unknown_args):
|
||||
|
@@ -32,7 +32,7 @@
|
||||
import spack.store
|
||||
import spack.repository
|
||||
|
||||
description = "Remove an installed package"
|
||||
description = "remove an installed package"
|
||||
|
||||
error_message = """You can either:
|
||||
a) Use a more specific spec, or
|
||||
@@ -50,24 +50,24 @@
|
||||
def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
'-f', '--force', action='store_true', dest='force',
|
||||
help="Remove regardless of whether other packages depend on this one.")
|
||||
help="remove regardless of whether other packages depend on this one")
|
||||
|
||||
subparser.add_argument(
|
||||
'-a', '--all', action='store_true', dest='all',
|
||||
help="USE CAREFULLY. Remove ALL installed packages that match each "
|
||||
help="USE CAREFULLY. remove ALL installed packages that match each "
|
||||
"supplied spec. i.e., if you say uninstall `libelf`,"
|
||||
" ALL versions of `libelf` are uninstalled. If no spec is "
|
||||
"supplied all installed software will be uninstalled. This "
|
||||
"is both useful and dangerous, like rm -r.")
|
||||
" ALL versions of `libelf` are uninstalled. if no spec is "
|
||||
"supplied all installed software will be uninstalled. this "
|
||||
"is both useful and dangerous, like rm -r")
|
||||
|
||||
subparser.add_argument(
|
||||
'-d', '--dependents', action='store_true', dest='dependents',
|
||||
help='Also uninstall any packages that depend on the ones given '
|
||||
'via command line.')
|
||||
help='also uninstall any packages that depend on the ones given '
|
||||
'via command line')
|
||||
|
||||
subparser.add_argument(
|
||||
'-y', '--yes-to-all', action='store_true', dest='yes_to_all',
|
||||
help='Assume "yes" is the answer to every confirmation requested')
|
||||
help='assume "yes" is the answer to every confirmation requested')
|
||||
|
||||
subparser.add_argument(
|
||||
'packages',
|
||||
|
@@ -25,7 +25,7 @@
|
||||
import argparse
|
||||
import spack.modules
|
||||
|
||||
description = "Remove package from environment using module."
|
||||
description = "remove package from environment using module"
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
@@ -33,7 +33,7 @@ def setup_parser(subparser):
|
||||
message with -h. """
|
||||
subparser.add_argument(
|
||||
'spec', nargs=argparse.REMAINDER,
|
||||
help='Spec of package to unload with modules.')
|
||||
help='spec of package to unload with modules')
|
||||
|
||||
|
||||
def unload(parser, args):
|
||||
|
@@ -25,7 +25,7 @@
|
||||
import argparse
|
||||
import spack.modules
|
||||
|
||||
description = "Remove package from environment using dotkit."
|
||||
description = "remove package from environment using dotkit"
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
@@ -33,7 +33,7 @@ def setup_parser(subparser):
|
||||
message with -h. """
|
||||
subparser.add_argument(
|
||||
'spec', nargs=argparse.REMAINDER,
|
||||
help='Spec of package to unuse with dotkit.')
|
||||
help='spec of package to unuse with dotkit')
|
||||
|
||||
|
||||
def unuse(parser, args):
|
||||
|
319
lib/spack/spack/cmd/url.py
Normal file
319
lib/spack/spack/cmd/url.py
Normal file
@@ -0,0 +1,319 @@
|
||||
##############################################################################
|
||||
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
|
||||
# Produced at the Lawrence Livermore National Laboratory.
|
||||
#
|
||||
# This file is part of Spack.
|
||||
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
|
||||
# LLNL-CODE-647188
|
||||
#
|
||||
# For details, see https://github.com/llnl/spack
|
||||
# Please also see the LICENSE file for our notice and the LGPL.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License (as
|
||||
# published by the Free Software Foundation) version 2.1, February 1999.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
|
||||
# conditions of the GNU Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
from __future__ import division, print_function
|
||||
|
||||
from collections import defaultdict
|
||||
|
||||
import spack
|
||||
|
||||
from llnl.util import tty
|
||||
from spack.url import *
|
||||
from spack.util.web import find_versions_of_archive
|
||||
|
||||
description = "debugging tool for url parsing"
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='subcommand')
|
||||
|
||||
# Parse
|
||||
parse_parser = sp.add_parser('parse', help='attempt to parse a url')
|
||||
|
||||
parse_parser.add_argument(
|
||||
'url',
|
||||
help='url to parse')
|
||||
parse_parser.add_argument(
|
||||
'-s', '--spider', action='store_true',
|
||||
help='spider the source page for versions')
|
||||
|
||||
# List
|
||||
list_parser = sp.add_parser('list', help='list urls in all packages')
|
||||
|
||||
list_parser.add_argument(
|
||||
'-c', '--color', action='store_true',
|
||||
help='color the parsed version and name in the urls shown '
|
||||
'(versions will be cyan, name red)')
|
||||
list_parser.add_argument(
|
||||
'-e', '--extrapolation', action='store_true',
|
||||
help='color the versions used for extrapolation as well '
|
||||
'(additional versions will be green, names magenta)')
|
||||
|
||||
excl_args = list_parser.add_mutually_exclusive_group()
|
||||
|
||||
excl_args.add_argument(
|
||||
'-n', '--incorrect-name', action='store_true',
|
||||
help='only list urls for which the name was incorrectly parsed')
|
||||
excl_args.add_argument(
|
||||
'-v', '--incorrect-version', action='store_true',
|
||||
help='only list urls for which the version was incorrectly parsed')
|
||||
|
||||
# Test
|
||||
sp.add_parser(
|
||||
'test', help='print a summary of how well we are parsing package urls')
|
||||
|
||||
|
||||
def url(parser, args):
|
||||
action = {
|
||||
'parse': url_parse,
|
||||
'list': url_list,
|
||||
'test': url_test
|
||||
}
|
||||
|
||||
action[args.subcommand](args)
|
||||
|
||||
|
||||
def url_parse(args):
|
||||
url = args.url
|
||||
|
||||
tty.msg('Parsing URL: {0}'.format(url))
|
||||
print()
|
||||
|
||||
ver, vs, vl, vi, vregex = parse_version_offset(url)
|
||||
tty.msg('Matched version regex {0:>2}: r{1!r}'.format(vi, vregex))
|
||||
|
||||
name, ns, nl, ni, nregex = parse_name_offset(url, ver)
|
||||
tty.msg('Matched name regex {0:>2}: r{1!r}'.format(ni, nregex))
|
||||
|
||||
print()
|
||||
tty.msg('Detected:')
|
||||
try:
|
||||
print_name_and_version(url)
|
||||
except UrlParseError as e:
|
||||
tty.error(str(e))
|
||||
|
||||
print(' name: {0}'.format(name))
|
||||
print(' version: {0}'.format(ver))
|
||||
print()
|
||||
|
||||
tty.msg('Substituting version 9.9.9b:')
|
||||
newurl = substitute_version(url, '9.9.9b')
|
||||
print_name_and_version(newurl)
|
||||
|
||||
if args.spider:
|
||||
print()
|
||||
tty.msg('Spidering for versions:')
|
||||
versions = find_versions_of_archive(url)
|
||||
|
||||
max_len = max(len(str(v)) for v in versions)
|
||||
|
||||
for v in sorted(versions):
|
||||
print('{0:{1}} {2}'.format(v, max_len, versions[v]))
|
||||
|
||||
|
||||
def url_list(args):
|
||||
urls = set()
|
||||
|
||||
# Gather set of URLs from all packages
|
||||
for pkg in spack.repo.all_packages():
|
||||
url = getattr(pkg.__class__, 'url', None)
|
||||
urls = url_list_parsing(args, urls, url, pkg)
|
||||
|
||||
for params in pkg.versions.values():
|
||||
url = params.get('url', None)
|
||||
urls = url_list_parsing(args, urls, url, pkg)
|
||||
|
||||
# Print URLs
|
||||
for url in sorted(urls):
|
||||
if args.color or args.extrapolation:
|
||||
print(color_url(url, subs=args.extrapolation, errors=True))
|
||||
else:
|
||||
print(url)
|
||||
|
||||
# Return the number of URLs that were printed, only for testing purposes
|
||||
return len(urls)
|
||||
|
||||
|
||||
def url_test(args):
|
||||
# Collect statistics on how many URLs were correctly parsed
|
||||
total_urls = 0
|
||||
correct_names = 0
|
||||
correct_versions = 0
|
||||
|
||||
# Collect statistics on which regexes were matched and how often
|
||||
name_regex_dict = dict()
|
||||
name_count_dict = defaultdict(int)
|
||||
version_regex_dict = dict()
|
||||
version_count_dict = defaultdict(int)
|
||||
|
||||
tty.msg('Generating a summary of URL parsing in Spack...')
|
||||
|
||||
# Loop through all packages
|
||||
for pkg in spack.repo.all_packages():
|
||||
urls = set()
|
||||
|
||||
url = getattr(pkg.__class__, 'url', None)
|
||||
if url:
|
||||
urls.add(url)
|
||||
|
||||
for params in pkg.versions.values():
|
||||
url = params.get('url', None)
|
||||
if url:
|
||||
urls.add(url)
|
||||
|
||||
# Calculate statistics
|
||||
for url in urls:
|
||||
total_urls += 1
|
||||
|
||||
# Parse versions
|
||||
version = None
|
||||
try:
|
||||
version, vs, vl, vi, vregex = parse_version_offset(url)
|
||||
version_regex_dict[vi] = vregex
|
||||
version_count_dict[vi] += 1
|
||||
if version_parsed_correctly(pkg, version):
|
||||
correct_versions += 1
|
||||
except UndetectableVersionError:
|
||||
pass
|
||||
|
||||
# Parse names
|
||||
try:
|
||||
name, ns, nl, ni, nregex = parse_name_offset(url, version)
|
||||
name_regex_dict[ni] = nregex
|
||||
name_count_dict[ni] += 1
|
||||
if name_parsed_correctly(pkg, name):
|
||||
correct_names += 1
|
||||
except UndetectableNameError:
|
||||
pass
|
||||
|
||||
print()
|
||||
print(' Total URLs found: {0}'.format(total_urls))
|
||||
print(' Names correctly parsed: {0:>4}/{1:>4} ({2:>6.2%})'.format(
|
||||
correct_names, total_urls, correct_names / total_urls))
|
||||
print(' Versions correctly parsed: {0:>4}/{1:>4} ({2:>6.2%})'.format(
|
||||
correct_versions, total_urls, correct_versions / total_urls))
|
||||
print()
|
||||
|
||||
tty.msg('Statistics on name regular expresions:')
|
||||
|
||||
print()
|
||||
print(' Index Count Regular Expresion')
|
||||
for ni in name_regex_dict:
|
||||
print(' {0:>3}: {1:>6} r{2!r}'.format(
|
||||
ni, name_count_dict[ni], name_regex_dict[ni]))
|
||||
print()
|
||||
|
||||
tty.msg('Statistics on version regular expresions:')
|
||||
|
||||
print()
|
||||
print(' Index Count Regular Expresion')
|
||||
for vi in version_regex_dict:
|
||||
print(' {0:>3}: {1:>6} r{2!r}'.format(
|
||||
vi, version_count_dict[vi], version_regex_dict[vi]))
|
||||
print()
|
||||
|
||||
# Return statistics, only for testing purposes
|
||||
return (total_urls, correct_names, correct_versions,
|
||||
name_count_dict, version_count_dict)
|
||||
|
||||
|
||||
def print_name_and_version(url):
|
||||
"""Prints a URL. Underlines the detected name with dashes and
|
||||
the detected version with tildes.
|
||||
|
||||
:param str url: The url to parse
|
||||
"""
|
||||
name, ns, nl, ntup, ver, vs, vl, vtup = substitution_offsets(url)
|
||||
underlines = [' '] * max(ns + nl, vs + vl)
|
||||
for i in range(ns, ns + nl):
|
||||
underlines[i] = '-'
|
||||
for i in range(vs, vs + vl):
|
||||
underlines[i] = '~'
|
||||
|
||||
print(' {0}'.format(url))
|
||||
print(' {0}'.format(''.join(underlines)))
|
||||
|
||||
|
||||
def url_list_parsing(args, urls, url, pkg):
|
||||
"""Helper function for :func:`url_list`.
|
||||
|
||||
:param argparse.Namespace args: The arguments given to ``spack url list``
|
||||
:param set urls: List of URLs that have already been added
|
||||
:param url: A URL to potentially add to ``urls`` depending on ``args``
|
||||
:type url: str or None
|
||||
:param spack.package.PackageBase pkg: The Spack package
|
||||
:returns: The updated ``urls`` list
|
||||
:rtype: set
|
||||
"""
|
||||
if url:
|
||||
if args.incorrect_name:
|
||||
# Only add URLs whose name was incorrectly parsed
|
||||
try:
|
||||
name = parse_name(url)
|
||||
if not name_parsed_correctly(pkg, name):
|
||||
urls.add(url)
|
||||
except UndetectableNameError:
|
||||
urls.add(url)
|
||||
elif args.incorrect_version:
|
||||
# Only add URLs whose version was incorrectly parsed
|
||||
try:
|
||||
version = parse_version(url)
|
||||
if not version_parsed_correctly(pkg, version):
|
||||
urls.add(url)
|
||||
except UndetectableVersionError:
|
||||
urls.add(url)
|
||||
else:
|
||||
urls.add(url)
|
||||
|
||||
return urls
|
||||
|
||||
|
||||
def name_parsed_correctly(pkg, name):
|
||||
"""Determine if the name of a package was correctly parsed.
|
||||
|
||||
:param spack.package.PackageBase pkg: The Spack package
|
||||
:param str name: The name that was extracted from the URL
|
||||
:returns: True if the name was correctly parsed, else False
|
||||
:rtype: bool
|
||||
"""
|
||||
pkg_name = pkg.name
|
||||
|
||||
# After determining a name, `spack create` determines a build system.
|
||||
# Some build systems prepend a special string to the front of the name.
|
||||
# Since this can't be guessed from the URL, it would be unfair to say
|
||||
# that these names are incorrectly parsed, so we remove them.
|
||||
if pkg_name.startswith('r-'):
|
||||
pkg_name = pkg_name[2:]
|
||||
elif pkg_name.startswith('py-'):
|
||||
pkg_name = pkg_name[3:]
|
||||
elif pkg_name.startswith('octave-'):
|
||||
pkg_name = pkg_name[7:]
|
||||
|
||||
return name == pkg_name
|
||||
|
||||
|
||||
def version_parsed_correctly(pkg, version):
|
||||
"""Determine if the version of a package was correctly parsed.
|
||||
|
||||
:param spack.package.PackageBase pkg: The Spack package
|
||||
:param str version: The version that was extracted from the URL
|
||||
:returns: True if the name was correctly parsed, else False
|
||||
:rtype: bool
|
||||
"""
|
||||
# If the version parsed from the URL is listed in a version()
|
||||
# directive, we assume it was correctly parsed
|
||||
for pkg_version in pkg.versions:
|
||||
if str(pkg_version) == str(version):
|
||||
return True
|
||||
return False
|
@@ -1,79 +0,0 @@
|
||||
##############################################################################
|
||||
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
|
||||
# Produced at the Lawrence Livermore National Laboratory.
|
||||
#
|
||||
# This file is part of Spack.
|
||||
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
|
||||
# LLNL-CODE-647188
|
||||
#
|
||||
# For details, see https://github.com/llnl/spack
|
||||
# Please also see the LICENSE file for our notice and the LGPL.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License (as
|
||||
# published by the Free Software Foundation) version 2.1, February 1999.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
|
||||
# conditions of the GNU Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack
|
||||
import spack.url
|
||||
from spack.util.web import find_versions_of_archive
|
||||
|
||||
description = "Show parsing of a URL, optionally spider web for versions."
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
subparser.add_argument('url', help="url of a package archive")
|
||||
subparser.add_argument(
|
||||
'-s', '--spider', action='store_true',
|
||||
help="Spider the source page for versions.")
|
||||
|
||||
|
||||
def print_name_and_version(url):
|
||||
name, ns, nl, ntup, ver, vs, vl, vtup = spack.url.substitution_offsets(url)
|
||||
underlines = [" "] * max(ns + nl, vs + vl)
|
||||
for i in range(ns, ns + nl):
|
||||
underlines[i] = '-'
|
||||
for i in range(vs, vs + vl):
|
||||
underlines[i] = '~'
|
||||
|
||||
print " %s" % url
|
||||
print " %s" % ''.join(underlines)
|
||||
|
||||
|
||||
def url_parse(parser, args):
|
||||
url = args.url
|
||||
|
||||
ver, vs, vl = spack.url.parse_version_offset(url, debug=True)
|
||||
name, ns, nl = spack.url.parse_name_offset(url, ver, debug=True)
|
||||
print
|
||||
|
||||
tty.msg("Detected:")
|
||||
try:
|
||||
print_name_and_version(url)
|
||||
except spack.url.UrlParseError as e:
|
||||
tty.error(str(e))
|
||||
|
||||
print ' name: %s' % name
|
||||
print ' version: %s' % ver
|
||||
|
||||
print
|
||||
tty.msg("Substituting version 9.9.9b:")
|
||||
newurl = spack.url.substitute_version(url, '9.9.9b')
|
||||
print_name_and_version(newurl)
|
||||
|
||||
if args.spider:
|
||||
print
|
||||
tty.msg("Spidering for versions:")
|
||||
versions = find_versions_of_archive(url)
|
||||
for v in sorted(versions):
|
||||
print "%-20s%s" % (v, versions[v])
|
@@ -25,7 +25,7 @@
|
||||
import argparse
|
||||
import spack.modules
|
||||
|
||||
description = "Add package to environment using dotkit."
|
||||
description = "add package to environment using dotkit"
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
@@ -33,7 +33,7 @@ def setup_parser(subparser):
|
||||
message with -h. """
|
||||
subparser.add_argument(
|
||||
'spec', nargs=argparse.REMAINDER,
|
||||
help='Spec of package to use with dotkit.')
|
||||
help='spec of package to use with dotkit')
|
||||
|
||||
|
||||
def use(parser, args):
|
||||
|
@@ -26,12 +26,12 @@
|
||||
import llnl.util.tty as tty
|
||||
import spack
|
||||
|
||||
description = "List available versions of a package"
|
||||
description = "list available versions of a package"
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
subparser.add_argument('package', metavar='PACKAGE',
|
||||
help='Package to list versions for')
|
||||
help='package to list versions for')
|
||||
|
||||
|
||||
def versions(parser, args):
|
||||
|
@@ -69,7 +69,7 @@
|
||||
import spack.cmd
|
||||
import llnl.util.tty as tty
|
||||
|
||||
description = "Produce a single-rooted directory view of a spec."
|
||||
description = "produce a single-rooted directory view of a spec"
|
||||
|
||||
|
||||
def setup_parser(sp):
|
||||
@@ -77,40 +77,40 @@ def setup_parser(sp):
|
||||
|
||||
sp.add_argument(
|
||||
'-v', '--verbose', action='store_true', default=False,
|
||||
help="Display verbose output.")
|
||||
help="display verbose output")
|
||||
sp.add_argument(
|
||||
'-e', '--exclude', action='append', default=[],
|
||||
help="Exclude packages with names matching the given regex pattern.")
|
||||
help="exclude packages with names matching the given regex pattern")
|
||||
sp.add_argument(
|
||||
'-d', '--dependencies', choices=['true', 'false', 'yes', 'no'],
|
||||
default='true',
|
||||
help="Follow dependencies.")
|
||||
help="follow dependencies")
|
||||
|
||||
ssp = sp.add_subparsers(metavar='ACTION', dest='action')
|
||||
|
||||
specs_opts = dict(metavar='spec', nargs='+',
|
||||
help="Seed specs of the packages to view.")
|
||||
help="seed specs of the packages to view")
|
||||
|
||||
# The action parameterizes the command but in keeping with Spack
|
||||
# patterns we make it a subcommand.
|
||||
file_system_view_actions = [
|
||||
ssp.add_parser(
|
||||
'symlink', aliases=['add', 'soft'],
|
||||
help='Add package files to a filesystem view via symbolic links.'),
|
||||
help='add package files to a filesystem view via symbolic links'),
|
||||
ssp.add_parser(
|
||||
'hardlink', aliases=['hard'],
|
||||
help='Add packages files to a filesystem via via hard links.'),
|
||||
help='add packages files to a filesystem via via hard links'),
|
||||
ssp.add_parser(
|
||||
'remove', aliases=['rm'],
|
||||
help='Remove packages from a filesystem view.'),
|
||||
help='remove packages from a filesystem view'),
|
||||
ssp.add_parser(
|
||||
'statlink', aliases=['status', 'check'],
|
||||
help='Check status of packages in a filesystem view.')
|
||||
help='check status of packages in a filesystem view')
|
||||
]
|
||||
# All these options and arguments are common to every action.
|
||||
for act in file_system_view_actions:
|
||||
act.add_argument('path', nargs=1,
|
||||
help="Path to file system view directory.")
|
||||
help="path to file system view directory")
|
||||
act.add_argument('specs', **specs_opts)
|
||||
|
||||
return
|
||||
|
@@ -40,6 +40,7 @@
|
||||
|
||||
_imported_compilers_module = 'spack.compilers'
|
||||
_path_instance_vars = ['cc', 'cxx', 'f77', 'fc']
|
||||
_flags_instance_vars = ['cflags', 'cppflags', 'cxxflags', 'fflags']
|
||||
_other_instance_vars = ['modules', 'operating_system', 'environment',
|
||||
'extra_rpaths']
|
||||
_cache_config_file = []
|
||||
@@ -60,6 +61,9 @@ def _to_dict(compiler):
|
||||
d['paths'] = dict((attr, getattr(compiler, attr, None))
|
||||
for attr in _path_instance_vars)
|
||||
d['flags'] = dict((fname, fvals) for fname, fvals in compiler.flags)
|
||||
d['flags'].update(dict((attr, getattr(compiler, attr, None))
|
||||
for attr in _flags_instance_vars
|
||||
if hasattr(compiler, attr)))
|
||||
d['operating_system'] = str(compiler.operating_system)
|
||||
d['target'] = str(compiler.target)
|
||||
d['modules'] = compiler.modules if compiler.modules else []
|
||||
|
@@ -29,17 +29,16 @@
|
||||
|
||||
class Xl(Compiler):
|
||||
# Subclasses use possible names of C compiler
|
||||
cc_names = ['xlc', 'xlc_r']
|
||||
cc_names = ['xlc']
|
||||
|
||||
# Subclasses use possible names of C++ compiler
|
||||
cxx_names = ['xlC', 'xlC_r', 'xlc++', 'xlc++_r']
|
||||
cxx_names = ['xlC', 'xlc++']
|
||||
|
||||
# Subclasses use possible names of Fortran 77 compiler
|
||||
f77_names = ['xlf', 'xlf_r']
|
||||
f77_names = ['xlf']
|
||||
|
||||
# Subclasses use possible names of Fortran 90 compiler
|
||||
fc_names = ['xlf90', 'xlf90_r', 'xlf95', 'xlf95_r',
|
||||
'xlf2003', 'xlf2003_r', 'xlf2008', 'xlf2008_r']
|
||||
fc_names = ['xlf90', 'xlf95', 'xlf2003', 'xlf2008']
|
||||
|
||||
# Named wrapper links within spack.build_env_path
|
||||
link_paths = {'cc': 'xl/xlc',
|
||||
@@ -62,6 +61,14 @@ def cxx11_flag(self):
|
||||
def pic_flag(self):
|
||||
return "-qpic"
|
||||
|
||||
@property
|
||||
def fflags(self):
|
||||
# The -qzerosize flag is effective only for the Fortran 77
|
||||
# compilers and allows the use of zero size objects.
|
||||
# For Fortran 90 and beyond, it is set by default and has not impact.
|
||||
# Its use has no negative side effects.
|
||||
return "-qzerosize"
|
||||
|
||||
@classmethod
|
||||
def default_version(cls, comp):
|
||||
"""The '-qversion' is the standard option fo XL compilers.
|
||||
|
122
lib/spack/spack/compilers/xl_r.py
Normal file
122
lib/spack/spack/compilers/xl_r.py
Normal file
@@ -0,0 +1,122 @@
|
||||
##############################################################################
|
||||
# Copyright (c) 2016, International Business Machines Corporation
|
||||
#
|
||||
# This file is part of Spack.
|
||||
# Created by Serban Maerean, serban@us.ibm.com based on a similar file,
|
||||
# spack/lib/spack/spack/compilers/xl.py, produced by Todd Gamblin,
|
||||
# tgamblin@llnl.gov, All rights reserved.
|
||||
# LLNL-CODE-647188
|
||||
#
|
||||
# For details, see https://github.com/llnl/spack
|
||||
# Please also see the LICENSE file for our notice and the LGPL.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License (as
|
||||
# published by the Free Software Foundation) version 2.1, February 1999.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
|
||||
# conditions of the GNU Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
from spack.compiler import *
|
||||
import llnl.util.tty as tty
|
||||
from spack.version import ver
|
||||
|
||||
|
||||
class XlR(Compiler):
|
||||
# Subclasses use possible names of C compiler
|
||||
cc_names = ['xlc_r']
|
||||
|
||||
# Subclasses use possible names of C++ compiler
|
||||
cxx_names = ['xlC_r', 'xlc++_r']
|
||||
|
||||
# Subclasses use possible names of Fortran 77 compiler
|
||||
f77_names = ['xlf_r']
|
||||
|
||||
# Subclasses use possible names of Fortran 90 compiler
|
||||
fc_names = ['xlf90_r', 'xlf95_r', 'xlf2003_r', 'xlf2008_r']
|
||||
|
||||
# Named wrapper links within spack.build_env_path
|
||||
link_paths = {'cc': 'xl_r/xlc_r',
|
||||
'cxx': 'xl_r/xlc++_r',
|
||||
'f77': 'xl_r/xlf_r',
|
||||
'fc': 'xl_r/xlf90_r'}
|
||||
|
||||
@property
|
||||
def openmp_flag(self):
|
||||
return "-qsmp=omp"
|
||||
|
||||
@property
|
||||
def cxx11_flag(self):
|
||||
if self.version < ver('13.1'):
|
||||
tty.die("Only xlC 13.1 and above have some c++11 support.")
|
||||
else:
|
||||
return "-qlanglvl=extended0x"
|
||||
|
||||
@property
|
||||
def pic_flag(self):
|
||||
return("-qpic")
|
||||
|
||||
@property
|
||||
def fflags(self):
|
||||
# The -qzerosize flag is effective only for the Fortran 77
|
||||
# compilers and allows the use of zero size objects.
|
||||
# For Fortran 90 and beyond, it is set by default and has not impact.
|
||||
# Its use has no negative side effects.
|
||||
return "-qzerosize"
|
||||
|
||||
@classmethod
|
||||
def default_version(self, comp):
|
||||
"""The '-qversion' is the standard option fo XL compilers.
|
||||
Output looks like this::
|
||||
|
||||
IBM XL C/C++ for Linux, V11.1 (5724-X14)
|
||||
Version: 11.01.0000.0000
|
||||
|
||||
or::
|
||||
|
||||
IBM XL Fortran for Linux, V13.1 (5724-X16)
|
||||
Version: 13.01.0000.0000
|
||||
|
||||
or::
|
||||
|
||||
IBM XL C/C++ for AIX, V11.1 (5724-X13)
|
||||
Version: 11.01.0000.0009
|
||||
|
||||
or::
|
||||
|
||||
IBM XL C/C++ Advanced Edition for Blue Gene/P, V9.0
|
||||
Version: 09.00.0000.0017
|
||||
"""
|
||||
|
||||
return get_compiler_version(
|
||||
comp, '-qversion', r'([0-9]?[0-9]\.[0-9])')
|
||||
|
||||
@classmethod
|
||||
def fc_version(cls, fc):
|
||||
"""The fortran and C/C++ versions of the XL compiler are always
|
||||
two units apart. By this we mean that the fortran release that
|
||||
goes with XL C/C++ 11.1 is 13.1. Having such a difference in
|
||||
version number is confusing spack quite a lot. Most notably
|
||||
if you keep the versions as is the default xl compiler will
|
||||
only have fortran and no C/C++. So we associate the Fortran
|
||||
compiler with the version associated to the C/C++ compiler.
|
||||
One last stumble. Version numbers over 10 have at least a .1
|
||||
those under 10 a .0. There is no xlf 9.x or under currently
|
||||
available. BG/P and BG/L can such a compiler mix and possibly
|
||||
older version of AIX and linux on power.
|
||||
"""
|
||||
fver = get_compiler_version(fc, '-qversion', r'([0-9]?[0-9]\.[0-9])')
|
||||
cver = float(fver) - 2
|
||||
if cver < 10:
|
||||
cver = cver - 0.1
|
||||
return str(cver)
|
||||
|
||||
@classmethod
|
||||
def f77_version(cls, f77):
|
||||
return cls.fc_version(f77)
|
@@ -289,9 +289,10 @@ def extends(spec, **kwargs):
|
||||
|
||||
"""
|
||||
def _execute(pkg):
|
||||
if pkg.extendees:
|
||||
msg = 'Packages can extend at most one other package.'
|
||||
raise DirectiveError(msg)
|
||||
# if pkg.extendees:
|
||||
# directive = 'extends'
|
||||
# msg = 'Packages can extend at most one other package.'
|
||||
# raise DirectiveError(directive, msg)
|
||||
|
||||
when = kwargs.pop('when', pkg.name)
|
||||
_depends_on(pkg, spec, when=when)
|
||||
@@ -344,8 +345,9 @@ def variant(name, default=False, description=""):
|
||||
|
||||
def _execute(pkg):
|
||||
if not re.match(spack.spec.identifier_re, name):
|
||||
msg = 'Invalid variant name in {0}: \'{1}\''
|
||||
raise DirectiveError(msg.format(pkg.name, name))
|
||||
directive = 'variant'
|
||||
msg = "Invalid variant name in {0}: '{1}'"
|
||||
raise DirectiveError(directive, msg.format(pkg.name, name))
|
||||
|
||||
pkg.variants[name] = Variant(default, description)
|
||||
return _execute
|
||||
|
@@ -138,7 +138,7 @@ class AsciiGraph(object):
|
||||
def __init__(self):
|
||||
# These can be set after initialization or after a call to
|
||||
# graph() to change behavior.
|
||||
self.node_character = '*'
|
||||
self.node_character = 'o'
|
||||
self.debug = False
|
||||
self.indent = 0
|
||||
self.deptype = alldeps
|
||||
@@ -364,7 +364,7 @@ def _expand_right_line(self, index):
|
||||
self._set_state(EXPAND_RIGHT, index)
|
||||
self._out.write("\n")
|
||||
|
||||
def write(self, spec, **kwargs):
|
||||
def write(self, spec, color=None, out=None):
|
||||
"""Write out an ascii graph of the provided spec.
|
||||
|
||||
Arguments:
|
||||
@@ -378,14 +378,13 @@ def write(self, spec, **kwargs):
|
||||
based on output file.
|
||||
|
||||
"""
|
||||
out = kwargs.get('out', None)
|
||||
if not out:
|
||||
if out is None:
|
||||
out = sys.stdout
|
||||
|
||||
color = kwargs.get('color', None)
|
||||
if not color:
|
||||
if color is None:
|
||||
color = out.isatty()
|
||||
self._out = ColorStream(sys.stdout, color=color)
|
||||
|
||||
self._out = ColorStream(out, color=color)
|
||||
|
||||
# We'll traverse the spec in topo order as we graph it.
|
||||
topo_order = topological_sort(spec, reverse=True, deptype=self.deptype)
|
||||
|
@@ -78,13 +78,14 @@ class InstallPhase(object):
|
||||
search for execution. The method is retrieved at __get__ time, so that
|
||||
it can be overridden by subclasses of whatever class declared the phases.
|
||||
|
||||
It also provides hooks to execute prerequisite and sanity checks.
|
||||
It also provides hooks to execute arbitrary callbacks before and after
|
||||
the phase.
|
||||
"""
|
||||
|
||||
def __init__(self, name):
|
||||
self.name = name
|
||||
self.preconditions = []
|
||||
self.sanity_checks = []
|
||||
self.run_before = []
|
||||
self.run_after = []
|
||||
|
||||
def __get__(self, instance, owner):
|
||||
# The caller is a class that is trying to customize
|
||||
@@ -101,14 +102,13 @@ def phase_wrapper(spec, prefix):
|
||||
self._on_phase_start(instance)
|
||||
# Execute phase pre-conditions,
|
||||
# and give them the chance to fail
|
||||
for check in self.preconditions:
|
||||
# Do something sensible at some point
|
||||
check(instance)
|
||||
for callback in self.run_before:
|
||||
callback(instance)
|
||||
phase(spec, prefix)
|
||||
# Execute phase sanity_checks,
|
||||
# and give them the chance to fail
|
||||
for check in self.sanity_checks:
|
||||
check(instance)
|
||||
for callback in self.run_after:
|
||||
callback(instance)
|
||||
# Check instance attributes at the end of a phase
|
||||
self._on_phase_exit(instance)
|
||||
return phase_wrapper
|
||||
@@ -129,8 +129,8 @@ def copy(self):
|
||||
# This bug-fix was not back-ported in Python 2.6
|
||||
# http://bugs.python.org/issue1515
|
||||
other = InstallPhase(self.name)
|
||||
other.preconditions.extend(self.preconditions)
|
||||
other.sanity_checks.extend(self.sanity_checks)
|
||||
other.run_before.extend(self.run_before)
|
||||
other.run_after.extend(self.run_after)
|
||||
return other
|
||||
|
||||
|
||||
@@ -142,22 +142,23 @@ class PackageMeta(spack.directives.DirectiveMetaMixin):
|
||||
"""
|
||||
phase_fmt = '_InstallPhase_{0}'
|
||||
|
||||
_InstallPhase_sanity_checks = {}
|
||||
_InstallPhase_preconditions = {}
|
||||
_InstallPhase_run_before = {}
|
||||
_InstallPhase_run_after = {}
|
||||
|
||||
def __new__(mcs, name, bases, attr_dict):
|
||||
|
||||
def __new__(meta, name, bases, attr_dict):
|
||||
# Check if phases is in attr dict, then set
|
||||
# install phases wrappers
|
||||
if 'phases' in attr_dict:
|
||||
# Turn the strings in 'phases' into InstallPhase instances
|
||||
# and add them as private attributes
|
||||
_InstallPhase_phases = [PackageMeta.phase_fmt.format(x) for x in attr_dict['phases']] # NOQA: ignore=E501
|
||||
for phase_name, callback_name in zip(_InstallPhase_phases, attr_dict['phases']): # NOQA: ignore=E501
|
||||
attr_dict[phase_name] = InstallPhase(callback_name)
|
||||
attr_dict['_InstallPhase_phases'] = _InstallPhase_phases
|
||||
|
||||
def _append_checks(check_name):
|
||||
def _flush_callbacks(check_name):
|
||||
# Name of the attribute I am going to check it exists
|
||||
attr_name = PackageMeta.phase_fmt.format(check_name)
|
||||
checks = getattr(meta, attr_name)
|
||||
checks = getattr(mcs, attr_name)
|
||||
if checks:
|
||||
for phase_name, funcs in checks.items():
|
||||
try:
|
||||
@@ -180,57 +181,61 @@ def _append_checks(check_name):
|
||||
PackageMeta.phase_fmt.format(phase_name)]
|
||||
getattr(phase, check_name).extend(funcs)
|
||||
# Clear the attribute for the next class
|
||||
setattr(meta, attr_name, {})
|
||||
|
||||
@classmethod
|
||||
def _register_checks(cls, check_type, *args):
|
||||
def _register_sanity_checks(func):
|
||||
attr_name = PackageMeta.phase_fmt.format(check_type)
|
||||
check_list = getattr(meta, attr_name)
|
||||
for item in args:
|
||||
checks = check_list.setdefault(item, [])
|
||||
checks.append(func)
|
||||
setattr(meta, attr_name, check_list)
|
||||
return func
|
||||
return _register_sanity_checks
|
||||
|
||||
@staticmethod
|
||||
def on_package_attributes(**attrs):
|
||||
def _execute_under_condition(func):
|
||||
@functools.wraps(func)
|
||||
def _wrapper(instance):
|
||||
# If all the attributes have the value we require, then
|
||||
# execute
|
||||
if all([getattr(instance, key, None) == value for key, value in attrs.items()]): # NOQA: ignore=E501
|
||||
func(instance)
|
||||
return _wrapper
|
||||
return _execute_under_condition
|
||||
|
||||
@classmethod
|
||||
def precondition(cls, *args):
|
||||
return cls._register_checks('preconditions', *args)
|
||||
|
||||
@classmethod
|
||||
def sanity_check(cls, *args):
|
||||
return cls._register_checks('sanity_checks', *args)
|
||||
|
||||
if all([not hasattr(x, '_register_checks') for x in bases]):
|
||||
attr_dict['_register_checks'] = _register_checks
|
||||
|
||||
if all([not hasattr(x, 'sanity_check') for x in bases]):
|
||||
attr_dict['sanity_check'] = sanity_check
|
||||
|
||||
if all([not hasattr(x, 'precondition') for x in bases]):
|
||||
attr_dict['precondition'] = precondition
|
||||
|
||||
if all([not hasattr(x, 'on_package_attributes') for x in bases]):
|
||||
attr_dict['on_package_attributes'] = on_package_attributes
|
||||
setattr(mcs, attr_name, {})
|
||||
|
||||
# Preconditions
|
||||
_append_checks('preconditions')
|
||||
_flush_callbacks('run_before')
|
||||
# Sanity checks
|
||||
_append_checks('sanity_checks')
|
||||
return super(PackageMeta, meta).__new__(meta, name, bases, attr_dict)
|
||||
_flush_callbacks('run_after')
|
||||
return super(PackageMeta, mcs).__new__(mcs, name, bases, attr_dict)
|
||||
|
||||
@staticmethod
|
||||
def register_callback(check_type, *phases):
|
||||
def _decorator(func):
|
||||
attr_name = PackageMeta.phase_fmt.format(check_type)
|
||||
check_list = getattr(PackageMeta, attr_name)
|
||||
for item in phases:
|
||||
checks = check_list.setdefault(item, [])
|
||||
checks.append(func)
|
||||
setattr(PackageMeta, attr_name, check_list)
|
||||
return func
|
||||
return _decorator
|
||||
|
||||
|
||||
def run_before(*phases):
|
||||
"""Registers a method of a package to be run before a given phase"""
|
||||
return PackageMeta.register_callback('run_before', *phases)
|
||||
|
||||
|
||||
def run_after(*phases):
|
||||
"""Registers a method of a package to be run after a given phase"""
|
||||
return PackageMeta.register_callback('run_after', *phases)
|
||||
|
||||
|
||||
def on_package_attributes(**attr_dict):
|
||||
"""Executes the decorated method only if at the moment of calling
|
||||
the instance has attributes that are equal to certain values.
|
||||
|
||||
:param dict attr_dict: dictionary mapping attribute names to their
|
||||
required values
|
||||
"""
|
||||
def _execute_under_condition(func):
|
||||
|
||||
@functools.wraps(func)
|
||||
def _wrapper(instance, *args, **kwargs):
|
||||
# If all the attributes have the value we require, then execute
|
||||
has_all_attributes = all(
|
||||
[hasattr(instance, key) for key in attr_dict]
|
||||
)
|
||||
if has_all_attributes:
|
||||
has_the_right_values = all(
|
||||
[getattr(instance, key) == value for key, value in attr_dict.items()] # NOQA: ignore=E501
|
||||
)
|
||||
if has_the_right_values:
|
||||
func(instance, *args, **kwargs)
|
||||
return _wrapper
|
||||
|
||||
return _execute_under_condition
|
||||
|
||||
|
||||
class PackageBase(object):
|
||||
@@ -1555,10 +1560,9 @@ def do_activate(self, force=False):
|
||||
|
||||
# Activate any package dependencies that are also extensions.
|
||||
if not force:
|
||||
for spec in self.spec.traverse(root=False, deptype='run'):
|
||||
if spec.package.extends(self.extendee_spec):
|
||||
if not spec.package.activated:
|
||||
spec.package.do_activate(force=force)
|
||||
for spec in self.dependency_activations():
|
||||
if not spec.package.activated:
|
||||
spec.package.do_activate(force=force)
|
||||
|
||||
self.extendee_spec.package.activate(self, **self.extendee_args)
|
||||
|
||||
@@ -1566,6 +1570,10 @@ def do_activate(self, force=False):
|
||||
tty.msg("Activated extension %s for %s" %
|
||||
(self.spec.short_spec, self.extendee_spec.format("$_$@$+$%@")))
|
||||
|
||||
def dependency_activations(self):
|
||||
return (spec for spec in self.spec.traverse(root=False, deptype='run')
|
||||
if spec.package.extends(self.extendee_spec))
|
||||
|
||||
def activate(self, extension, **kwargs):
|
||||
"""Symlinks all files from the extension into extendee's install dir.
|
||||
|
||||
@@ -1704,15 +1712,61 @@ def rpath_args(self):
|
||||
"""
|
||||
return " ".join("-Wl,-rpath,%s" % p for p in self.rpath)
|
||||
|
||||
build_time_test_callbacks = None
|
||||
|
||||
@on_package_attributes(run_tests=True)
|
||||
def _run_default_build_time_test_callbacks(self):
|
||||
"""Tries to call all the methods that are listed in the attribute
|
||||
``build_time_test_callbacks`` if ``self.run_tests is True``.
|
||||
|
||||
If ``build_time_test_callbacks is None`` returns immediately.
|
||||
"""
|
||||
if self.build_time_test_callbacks is None:
|
||||
return
|
||||
|
||||
for name in self.build_time_test_callbacks:
|
||||
try:
|
||||
fn = getattr(self, name)
|
||||
tty.msg('RUN-TESTS: build-time tests [{0}]'.format(name))
|
||||
fn()
|
||||
except AttributeError:
|
||||
msg = 'RUN-TESTS: method not implemented [{0}]'
|
||||
tty.warn(msg.format(name))
|
||||
|
||||
install_time_test_callbacks = None
|
||||
|
||||
@on_package_attributes(run_tests=True)
|
||||
def _run_default_install_time_test_callbacks(self):
|
||||
"""Tries to call all the methods that are listed in the attribute
|
||||
``install_time_test_callbacks`` if ``self.run_tests is True``.
|
||||
|
||||
If ``install_time_test_callbacks is None`` returns immediately.
|
||||
"""
|
||||
if self.install_time_test_callbacks is None:
|
||||
return
|
||||
|
||||
for name in self.install_time_test_callbacks:
|
||||
try:
|
||||
fn = getattr(self, name)
|
||||
tty.msg('RUN-TESTS: install-time tests [{0}]'.format(name))
|
||||
fn()
|
||||
except AttributeError:
|
||||
msg = 'RUN-TESTS: method not implemented [{0}]'
|
||||
tty.warn(msg.format(name))
|
||||
|
||||
|
||||
class Package(PackageBase):
|
||||
"""General purpose class with a single ``install``
|
||||
phase that needs to be coded by packagers.
|
||||
"""
|
||||
#: The one and only phase
|
||||
phases = ['install']
|
||||
# To be used in UI queries that require to know which
|
||||
# build-system class we are using
|
||||
#: This attribute is used in UI queries that require to know which
|
||||
#: build-system class we are using
|
||||
build_system_class = 'Package'
|
||||
# This will be used as a registration decorator in user
|
||||
# packages, if need be
|
||||
PackageBase.sanity_check('install')(PackageBase.sanity_check_prefix)
|
||||
run_after('install')(PackageBase.sanity_check_prefix)
|
||||
|
||||
|
||||
def install_dependency_symlinks(pkg, spec, prefix):
|
||||
|
@@ -152,7 +152,9 @@
|
||||
'UnsatisfiableArchitectureSpecError',
|
||||
'UnsatisfiableProviderSpecError',
|
||||
'UnsatisfiableDependencySpecError',
|
||||
'AmbiguousHashError']
|
||||
'AmbiguousHashError',
|
||||
'InvalidHashError',
|
||||
'RedundantSpecError']
|
||||
|
||||
# Valid pattern for an identifier in Spack
|
||||
identifier_re = r'\w[\w-]*'
|
||||
@@ -1388,8 +1390,9 @@ def _replace_with(self, concrete):
|
||||
dependent = dep_spec.parent
|
||||
deptypes = dep_spec.deptypes
|
||||
|
||||
# remove self from all dependents.
|
||||
del dependent._dependencies[self.name]
|
||||
# remove self from all dependents, unless it is already removed
|
||||
if self.name in dependent._dependencies:
|
||||
del dependent._dependencies[self.name]
|
||||
|
||||
# add the replacement, unless it is already a dep of dependent.
|
||||
if concrete.name not in dependent._dependencies:
|
||||
@@ -1992,7 +1995,7 @@ def _autospec(self, spec_like):
|
||||
except SpecError:
|
||||
return parse_anonymous_spec(spec_like, self.name)
|
||||
|
||||
def satisfies(self, other, deps=True, strict=False):
|
||||
def satisfies(self, other, deps=True, strict=False, strict_deps=False):
|
||||
"""Determine if this spec satisfies all constraints of another.
|
||||
|
||||
There are two senses for satisfies:
|
||||
@@ -2008,8 +2011,8 @@ def satisfies(self, other, deps=True, strict=False):
|
||||
other = self._autospec(other)
|
||||
|
||||
# The only way to satisfy a concrete spec is to match its hash exactly.
|
||||
if other._concrete:
|
||||
return self._concrete and self.dag_hash() == other.dag_hash()
|
||||
if other.concrete:
|
||||
return self.concrete and self.dag_hash() == other.dag_hash()
|
||||
|
||||
# A concrete provider can satisfy a virtual dependency.
|
||||
if not self.virtual and other.virtual:
|
||||
@@ -2066,7 +2069,8 @@ def satisfies(self, other, deps=True, strict=False):
|
||||
# If we need to descend into dependencies, do it, otherwise we're done.
|
||||
if deps:
|
||||
deps_strict = strict
|
||||
if not (self.name and other.name):
|
||||
if self.concrete and not other.name:
|
||||
# We're dealing with existing specs
|
||||
deps_strict = True
|
||||
return self.satisfies_dependencies(other, strict=deps_strict)
|
||||
else:
|
||||
@@ -2082,9 +2086,10 @@ def satisfies_dependencies(self, other, strict=False):
|
||||
if other._dependencies and not self._dependencies:
|
||||
return False
|
||||
|
||||
alldeps = set(d.name for d in self.traverse(root=False))
|
||||
if not all(dep.name in alldeps
|
||||
for dep in other.traverse(root=False)):
|
||||
selfdeps = self.traverse(root=False)
|
||||
otherdeps = other.traverse(root=False)
|
||||
if not all(any(d.satisfies(dep) for d in selfdeps)
|
||||
for dep in otherdeps):
|
||||
return False
|
||||
|
||||
elif not self._dependencies or not other._dependencies:
|
||||
@@ -2696,30 +2701,28 @@ def do_parse(self):
|
||||
specs = []
|
||||
|
||||
try:
|
||||
while self.next or self.previous:
|
||||
while self.next:
|
||||
# TODO: clean this parsing up a bit
|
||||
if self.previous:
|
||||
# We picked up the name of this spec while finishing the
|
||||
# previous spec
|
||||
specs.append(self.spec(self.previous.value))
|
||||
self.previous = None
|
||||
elif self.accept(ID):
|
||||
if self.accept(ID):
|
||||
self.previous = self.token
|
||||
if self.accept(EQ):
|
||||
# We're either parsing an anonymous spec beginning
|
||||
# with a key-value pair or adding a key-value pair
|
||||
# to the last spec
|
||||
# We're parsing an anonymous spec beginning with a
|
||||
# key-value pair.
|
||||
if not specs:
|
||||
specs.append(self.spec(None))
|
||||
self.expect(VAL)
|
||||
# Raise an error if the previous spec is already
|
||||
# concrete (assigned by hash)
|
||||
if specs[-1]._hash:
|
||||
raise RedundantSpecError(specs[-1],
|
||||
'key-value pair')
|
||||
specs[-1]._add_flag(
|
||||
self.previous.value, self.token.value)
|
||||
self.previous = None
|
||||
else:
|
||||
# We're parsing a new spec by name
|
||||
value = self.previous.value
|
||||
self.previous = None
|
||||
specs.append(self.spec(value))
|
||||
specs.append(self.spec(self.token.value))
|
||||
elif self.accept(HASH):
|
||||
# We're finding a spec by hash
|
||||
specs.append(self.spec_by_hash())
|
||||
@@ -2727,27 +2730,38 @@ def do_parse(self):
|
||||
elif self.accept(DEP):
|
||||
if not specs:
|
||||
# We're parsing an anonymous spec beginning with a
|
||||
# dependency
|
||||
self.previous = self.token
|
||||
# dependency. Push the token to recover after creating
|
||||
# anonymous spec
|
||||
self.push_tokens([self.token])
|
||||
specs.append(self.spec(None))
|
||||
self.previous = None
|
||||
if self.accept(HASH):
|
||||
# We're finding a dependency by hash for an anonymous
|
||||
# spec
|
||||
dep = self.spec_by_hash()
|
||||
else:
|
||||
# We're adding a dependency to the last spec
|
||||
self.expect(ID)
|
||||
dep = self.spec(self.token.value)
|
||||
if self.accept(HASH):
|
||||
# We're finding a dependency by hash for an
|
||||
# anonymous spec
|
||||
dep = self.spec_by_hash()
|
||||
else:
|
||||
# We're adding a dependency to the last spec
|
||||
self.expect(ID)
|
||||
dep = self.spec(self.token.value)
|
||||
|
||||
# command line deps get empty deptypes now.
|
||||
# Real deptypes are assigned later per packages.
|
||||
specs[-1]._add_dependency(dep, ())
|
||||
# Raise an error if the previous spec is already
|
||||
# concrete (assigned by hash)
|
||||
if specs[-1]._hash:
|
||||
raise RedundantSpecError(specs[-1], 'dependency')
|
||||
# command line deps get empty deptypes now.
|
||||
# Real deptypes are assigned later per packages.
|
||||
specs[-1]._add_dependency(dep, ())
|
||||
|
||||
else:
|
||||
# If the next token can be part of a valid anonymous spec,
|
||||
# create the anonymous spec
|
||||
if self.next.type in (AT, ON, OFF, PCT):
|
||||
# Raise an error if the previous spec is already
|
||||
# concrete (assigned by hash)
|
||||
if specs and specs[-1]._hash:
|
||||
raise RedundantSpecError(specs[-1],
|
||||
'compiler, version, '
|
||||
'or variant')
|
||||
specs.append(self.spec(None))
|
||||
else:
|
||||
self.unexpected_token()
|
||||
@@ -2782,13 +2796,13 @@ def spec_by_hash(self):
|
||||
|
||||
if len(matches) != 1:
|
||||
raise AmbiguousHashError(
|
||||
"Multiple packages specify hash %s." % self.token.value,
|
||||
*matches)
|
||||
"Multiple packages specify hash beginning '%s'."
|
||||
% self.token.value, *matches)
|
||||
|
||||
return matches[0]
|
||||
|
||||
def spec(self, name):
|
||||
"""Parse a spec out of the input. If a spec is supplied, then initialize
|
||||
"""Parse a spec out of the input. If a spec is supplied, initialize
|
||||
and return it instead of creating a new one."""
|
||||
if name:
|
||||
spec_namespace, dot, spec_name = name.rpartition('.')
|
||||
@@ -2822,16 +2836,6 @@ def spec(self, name):
|
||||
# unspecified or not.
|
||||
added_version = False
|
||||
|
||||
if self.previous and self.previous.value == DEP:
|
||||
if self.accept(HASH):
|
||||
spec.add_dependency(self.spec_by_hash())
|
||||
else:
|
||||
self.expect(ID)
|
||||
if self.accept(EQ):
|
||||
raise SpecParseError(spack.parse.ParseError(
|
||||
"", "", "Expected dependency received anonymous spec"))
|
||||
spec.add_dependency(self.spec(self.token.value))
|
||||
|
||||
while self.next:
|
||||
if self.accept(AT):
|
||||
vlist = self.version_list()
|
||||
@@ -2857,13 +2861,25 @@ def spec(self, name):
|
||||
self.previous = None
|
||||
else:
|
||||
# We've found the start of a new spec. Go back to do_parse
|
||||
# and read this token again.
|
||||
self.push_tokens([self.token])
|
||||
self.previous = None
|
||||
break
|
||||
|
||||
elif self.accept(HASH):
|
||||
# Get spec by hash and confirm it matches what we already have
|
||||
hash_spec = self.spec_by_hash()
|
||||
if hash_spec.satisfies(spec):
|
||||
spec = hash_spec
|
||||
break
|
||||
else:
|
||||
raise InvalidHashError(spec, hash_spec.dag_hash())
|
||||
|
||||
else:
|
||||
break
|
||||
|
||||
# If there was no version in the spec, consier it an open range
|
||||
if not added_version:
|
||||
if not added_version and not spec._hash:
|
||||
spec.versions = VersionList(':')
|
||||
|
||||
return spec
|
||||
@@ -3135,6 +3151,21 @@ def __init__(self, provided, required):
|
||||
|
||||
class AmbiguousHashError(SpecError):
|
||||
def __init__(self, msg, *specs):
|
||||
super(AmbiguousHashError, self).__init__(msg)
|
||||
for spec in specs:
|
||||
print(' ', spec.format('$.$@$%@+$+$=$#'))
|
||||
specs_str = '\n ' + '\n '.join(spec.format('$.$@$%@+$+$=$#')
|
||||
for spec in specs)
|
||||
super(AmbiguousHashError, self).__init__(msg + specs_str)
|
||||
|
||||
|
||||
class InvalidHashError(SpecError):
|
||||
def __init__(self, spec, hash):
|
||||
super(InvalidHashError, self).__init__(
|
||||
"The spec specified by %s does not match provided spec %s"
|
||||
% (hash, spec))
|
||||
|
||||
|
||||
class RedundantSpecError(SpecError):
|
||||
def __init__(self, spec, addition):
|
||||
super(RedundantSpecError, self).__init__(
|
||||
"Attempting to add %s to spec %s which is already concrete."
|
||||
" This is likely the result of adding to a spec specified by hash."
|
||||
% (addition, spec))
|
||||
|
@@ -32,12 +32,13 @@
|
||||
@pytest.fixture(
|
||||
scope='function',
|
||||
params=[
|
||||
('configure', 'autotools'),
|
||||
('configure', 'autotools'),
|
||||
('CMakeLists.txt', 'cmake'),
|
||||
('SConstruct', 'scons'),
|
||||
('setup.py', 'python'),
|
||||
('NAMESPACE', 'r'),
|
||||
('foobar', 'unknown')
|
||||
('SConstruct', 'scons'),
|
||||
('setup.py', 'python'),
|
||||
('NAMESPACE', 'r'),
|
||||
('WORKSPACE', 'bazel'),
|
||||
('foobar', 'generic')
|
||||
]
|
||||
)
|
||||
def url_and_build_system(request, tmpdir):
|
||||
|
116
lib/spack/spack/test/cmd/url.py
Normal file
116
lib/spack/spack/test/cmd/url.py
Normal file
@@ -0,0 +1,116 @@
|
||||
##############################################################################
|
||||
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
|
||||
# Produced at the Lawrence Livermore National Laboratory.
|
||||
#
|
||||
# This file is part of Spack.
|
||||
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
|
||||
# LLNL-CODE-647188
|
||||
#
|
||||
# For details, see https://github.com/llnl/spack
|
||||
# Please also see the LICENSE file for our notice and the LGPL.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License (as
|
||||
# published by the Free Software Foundation) version 2.1, February 1999.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
|
||||
# conditions of the GNU Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
import argparse
|
||||
import pytest
|
||||
|
||||
from spack.cmd.url import *
|
||||
|
||||
|
||||
@pytest.fixture(scope='module')
|
||||
def parser():
|
||||
"""Returns the parser for the ``url`` command"""
|
||||
parser = argparse.ArgumentParser()
|
||||
setup_parser(parser)
|
||||
return parser
|
||||
|
||||
|
||||
class MyPackage:
|
||||
def __init__(self, name, versions):
|
||||
self.name = name
|
||||
self.versions = versions
|
||||
|
||||
|
||||
def test_name_parsed_correctly():
|
||||
# Expected True
|
||||
assert name_parsed_correctly(MyPackage('netcdf', []), 'netcdf')
|
||||
assert name_parsed_correctly(MyPackage('r-devtools', []), 'devtools')
|
||||
assert name_parsed_correctly(MyPackage('py-numpy', []), 'numpy')
|
||||
assert name_parsed_correctly(MyPackage('octave-splines', []), 'splines')
|
||||
|
||||
# Expected False
|
||||
assert not name_parsed_correctly(MyPackage('', []), 'hdf5')
|
||||
assert not name_parsed_correctly(MyPackage('hdf5', []), '')
|
||||
assert not name_parsed_correctly(MyPackage('imagemagick', []), 'ImageMagick') # noqa
|
||||
assert not name_parsed_correctly(MyPackage('yaml-cpp', []), 'yamlcpp')
|
||||
assert not name_parsed_correctly(MyPackage('yamlcpp', []), 'yaml-cpp')
|
||||
assert not name_parsed_correctly(MyPackage('r-py-parser', []), 'parser')
|
||||
assert not name_parsed_correctly(MyPackage('oce', []), 'oce-0.18.0') # noqa
|
||||
|
||||
|
||||
def test_version_parsed_correctly():
|
||||
# Expected True
|
||||
assert version_parsed_correctly(MyPackage('', ['1.2.3']), '1.2.3')
|
||||
assert version_parsed_correctly(MyPackage('', ['5.4a', '5.4b']), '5.4a')
|
||||
assert version_parsed_correctly(MyPackage('', ['5.4a', '5.4b']), '5.4b')
|
||||
|
||||
# Expected False
|
||||
assert not version_parsed_correctly(MyPackage('', []), '1.2.3')
|
||||
assert not version_parsed_correctly(MyPackage('', ['1.2.3']), '')
|
||||
assert not version_parsed_correctly(MyPackage('', ['1.2.3']), '1.2.4')
|
||||
assert not version_parsed_correctly(MyPackage('', ['3.4a']), '3.4')
|
||||
assert not version_parsed_correctly(MyPackage('', ['3.4']), '3.4b')
|
||||
assert not version_parsed_correctly(MyPackage('', ['0.18.0']), 'oce-0.18.0') # noqa
|
||||
|
||||
|
||||
def test_url_parse(parser):
|
||||
args = parser.parse_args(['parse', 'http://zlib.net/fossils/zlib-1.2.10.tar.gz'])
|
||||
url(parser, args)
|
||||
|
||||
|
||||
@pytest.mark.xfail
|
||||
def test_url_parse_xfail(parser):
|
||||
# No version in URL
|
||||
args = parser.parse_args(['parse', 'http://www.netlib.org/voronoi/triangle.zip'])
|
||||
url(parser, args)
|
||||
|
||||
|
||||
def test_url_list(parser):
|
||||
args = parser.parse_args(['list'])
|
||||
total_urls = url_list(args)
|
||||
|
||||
# The following two options should not change the number of URLs printed.
|
||||
args = parser.parse_args(['list', '--color', '--extrapolation'])
|
||||
colored_urls = url_list(args)
|
||||
assert colored_urls == total_urls
|
||||
|
||||
# The following two options should print fewer URLs than the default.
|
||||
# If they print the same number of URLs, something is horribly broken.
|
||||
# If they say we missed 0 URLs, something is probably broken too.
|
||||
args = parser.parse_args(['list', '--incorrect-name'])
|
||||
incorrect_name_urls = url_list(args)
|
||||
assert 0 < incorrect_name_urls < total_urls
|
||||
|
||||
args = parser.parse_args(['list', '--incorrect-version'])
|
||||
incorrect_version_urls = url_list(args)
|
||||
assert 0 < incorrect_version_urls < total_urls
|
||||
|
||||
|
||||
def test_url_test(parser):
|
||||
args = parser.parse_args(['test'])
|
||||
(total_urls, correct_names, correct_versions,
|
||||
name_count_dict, version_count_dict) = url_test(args)
|
||||
|
||||
assert 0 < correct_names <= sum(name_count_dict.values()) <= total_urls # noqa
|
||||
assert 0 < correct_versions <= sum(version_count_dict.values()) <= total_urls # noqa
|
138
lib/spack/spack/test/graph.py
Normal file
138
lib/spack/spack/test/graph.py
Normal file
@@ -0,0 +1,138 @@
|
||||
##############################################################################
|
||||
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
|
||||
# Produced at the Lawrence Livermore National Laboratory.
|
||||
#
|
||||
# This file is part of Spack.
|
||||
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
|
||||
# LLNL-CODE-647188
|
||||
#
|
||||
# For details, see https://github.com/llnl/spack
|
||||
# Please also see the LICENSE file for our notice and the LGPL.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License (as
|
||||
# published by the Free Software Foundation) version 2.1, February 1999.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
|
||||
# conditions of the GNU Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
from StringIO import StringIO
|
||||
|
||||
from spack.spec import Spec
|
||||
from spack.graph import AsciiGraph, topological_sort, graph_dot
|
||||
|
||||
|
||||
def test_topo_sort(builtin_mock):
|
||||
"""Test topo sort gives correct order."""
|
||||
s = Spec('mpileaks').normalized()
|
||||
|
||||
topo = topological_sort(s)
|
||||
|
||||
assert topo.index('mpileaks') < topo.index('callpath')
|
||||
assert topo.index('mpileaks') < topo.index('mpi')
|
||||
assert topo.index('mpileaks') < topo.index('dyninst')
|
||||
assert topo.index('mpileaks') < topo.index('libdwarf')
|
||||
assert topo.index('mpileaks') < topo.index('libelf')
|
||||
|
||||
assert topo.index('callpath') < topo.index('mpi')
|
||||
assert topo.index('callpath') < topo.index('dyninst')
|
||||
assert topo.index('callpath') < topo.index('libdwarf')
|
||||
assert topo.index('callpath') < topo.index('libelf')
|
||||
|
||||
assert topo.index('dyninst') < topo.index('libdwarf')
|
||||
assert topo.index('dyninst') < topo.index('libelf')
|
||||
|
||||
assert topo.index('libdwarf') < topo.index('libelf')
|
||||
|
||||
|
||||
def test_static_graph_mpileaks(builtin_mock):
|
||||
"""Test a static spack graph for a simple package."""
|
||||
s = Spec('mpileaks').normalized()
|
||||
|
||||
stream = StringIO()
|
||||
graph_dot([s], static=True, out=stream)
|
||||
|
||||
dot = stream.getvalue()
|
||||
|
||||
assert ' "mpileaks" [label="mpileaks"]\n' in dot
|
||||
assert ' "dyninst" [label="dyninst"]\n' in dot
|
||||
assert ' "callpath" [label="callpath"]\n' in dot
|
||||
assert ' "libelf" [label="libelf"]\n' in dot
|
||||
assert ' "libdwarf" [label="libdwarf"]\n' in dot
|
||||
|
||||
assert ' "dyninst" -> "libdwarf"\n' in dot
|
||||
assert ' "callpath" -> "dyninst"\n' in dot
|
||||
assert ' "mpileaks" -> "mpi"\n' in dot
|
||||
assert ' "libdwarf" -> "libelf"\n' in dot
|
||||
assert ' "callpath" -> "mpi"\n' in dot
|
||||
assert ' "mpileaks" -> "callpath"\n' in dot
|
||||
assert ' "dyninst" -> "libelf"\n' in dot
|
||||
|
||||
|
||||
def test_dynamic_dot_graph_mpileaks(builtin_mock):
|
||||
"""Test dynamically graphing the mpileaks package."""
|
||||
s = Spec('mpileaks').normalized()
|
||||
|
||||
stream = StringIO()
|
||||
graph_dot([s], static=False, out=stream)
|
||||
|
||||
dot = stream.getvalue()
|
||||
|
||||
mpileaks_hash, mpileaks_lbl = s.dag_hash(), s.format('$_$#')
|
||||
mpi_hash, mpi_lbl = s['mpi'].dag_hash(), s['mpi'].format('$_$#')
|
||||
callpath_hash, callpath_lbl = (
|
||||
s['callpath'].dag_hash(), s['callpath'].format('$_$#'))
|
||||
dyninst_hash, dyninst_lbl = (
|
||||
s['dyninst'].dag_hash(), s['dyninst'].format('$_$#'))
|
||||
libdwarf_hash, libdwarf_lbl = (
|
||||
s['libdwarf'].dag_hash(), s['libdwarf'].format('$_$#'))
|
||||
libelf_hash, libelf_lbl = (
|
||||
s['libelf'].dag_hash(), s['libelf'].format('$_$#'))
|
||||
|
||||
assert ' "%s" [label="%s"]\n' % (mpileaks_hash, mpileaks_lbl) in dot
|
||||
assert ' "%s" [label="%s"]\n' % (callpath_hash, callpath_lbl) in dot
|
||||
assert ' "%s" [label="%s"]\n' % (mpi_hash, mpi_lbl) in dot
|
||||
assert ' "%s" [label="%s"]\n' % (dyninst_hash, dyninst_lbl) in dot
|
||||
assert ' "%s" [label="%s"]\n' % (libdwarf_hash, libdwarf_lbl) in dot
|
||||
assert ' "%s" [label="%s"]\n' % (libelf_hash, libelf_lbl) in dot
|
||||
|
||||
assert ' "%s" -> "%s"\n' % (dyninst_hash, libdwarf_hash) in dot
|
||||
assert ' "%s" -> "%s"\n' % (callpath_hash, dyninst_hash) in dot
|
||||
assert ' "%s" -> "%s"\n' % (mpileaks_hash, mpi_hash) in dot
|
||||
assert ' "%s" -> "%s"\n' % (libdwarf_hash, libelf_hash) in dot
|
||||
assert ' "%s" -> "%s"\n' % (callpath_hash, mpi_hash) in dot
|
||||
assert ' "%s" -> "%s"\n' % (mpileaks_hash, callpath_hash) in dot
|
||||
assert ' "%s" -> "%s"\n' % (dyninst_hash, libelf_hash) in dot
|
||||
|
||||
|
||||
def test_ascii_graph_mpileaks(builtin_mock):
|
||||
"""Test dynamically graphing the mpileaks package."""
|
||||
s = Spec('mpileaks').normalized()
|
||||
|
||||
stream = StringIO()
|
||||
graph = AsciiGraph()
|
||||
graph.write(s, out=stream, color=False)
|
||||
string = stream.getvalue()
|
||||
|
||||
# Some lines in spack graph still have trailing space
|
||||
# TODO: fix this.
|
||||
string = '\n'.join([line.rstrip() for line in string.split('\n')])
|
||||
|
||||
assert string == r'''o mpileaks
|
||||
|\
|
||||
| o callpath
|
||||
|/|
|
||||
o | mpi
|
||||
/
|
||||
o dyninst
|
||||
|\
|
||||
| o libdwarf
|
||||
|/
|
||||
o libelf
|
||||
'''
|
@@ -108,6 +108,13 @@ def test_inheritance_of_diretives():
|
||||
assert 'mpi' in s
|
||||
|
||||
|
||||
def test_dependency_extensions():
|
||||
s = Spec('extension2')
|
||||
s.concretize()
|
||||
deps = set(x.name for x in s.package.dependency_activations())
|
||||
assert deps == set(['extension1'])
|
||||
|
||||
|
||||
def test_import_class_from_package(builtin_mock):
|
||||
from spack.pkg.builtin.mock.mpich import Mpich # noqa
|
||||
|
||||
|
@@ -287,6 +287,14 @@ def test_unsatisfiable_compiler_flag(self):
|
||||
# 'mpich' is concrete:
|
||||
check_unsatisfiable('mpich', 'mpich cppflags="-O3"', True)
|
||||
|
||||
def test_copy_satisfies_transitive(self):
|
||||
spec = Spec('dttop')
|
||||
spec.concretize()
|
||||
copy = spec.copy()
|
||||
for s in spec.traverse():
|
||||
assert s.satisfies(copy[s.name])
|
||||
assert copy[s.name].satisfies(s)
|
||||
|
||||
def test_unsatisfiable_compiler_flag_mismatch(self):
|
||||
# No matchi in specs
|
||||
check_unsatisfiable(
|
||||
|
@@ -132,6 +132,13 @@ def test_package_names(self):
|
||||
self.check_parse("mvapich_foo")
|
||||
self.check_parse("_mvapich_foo")
|
||||
|
||||
def test_anonymous_specs(self):
|
||||
self.check_parse("%intel")
|
||||
self.check_parse("@2.7")
|
||||
self.check_parse("^zlib")
|
||||
self.check_parse("+foo")
|
||||
self.check_parse("arch=test-None-None", "platform=test")
|
||||
|
||||
def test_simple_dependence(self):
|
||||
self.check_parse("openmpi^hwloc")
|
||||
self.check_parse("openmpi^hwloc^libunwind")
|
||||
@@ -218,6 +225,115 @@ def test_parse_errors(self):
|
||||
errors = ['x@@1.2', 'x ^y@@1.2', 'x@1.2::', 'x::']
|
||||
self._check_raises(SpecParseError, errors)
|
||||
|
||||
def test_spec_by_hash(self, database):
|
||||
specs = database.mock.db.query()
|
||||
hashes = [s._hash for s in specs] # Preserves order of elements
|
||||
|
||||
# Make sure the database is still the shape we expect
|
||||
assert len(specs) > 3
|
||||
|
||||
self.check_parse(str(specs[0]), '/' + hashes[0])
|
||||
self.check_parse(str(specs[1]), '/ ' + hashes[1][:5])
|
||||
self.check_parse(str(specs[2]), specs[2].name + '/' + hashes[2])
|
||||
self.check_parse(str(specs[3]),
|
||||
specs[3].name + '@' + str(specs[3].version) +
|
||||
' /' + hashes[3][:6])
|
||||
|
||||
def test_dep_spec_by_hash(self, database):
|
||||
specs = database.mock.db.query()
|
||||
hashes = [s._hash for s in specs] # Preserves order of elements
|
||||
|
||||
# Make sure the database is still the shape we expect
|
||||
assert len(specs) > 10
|
||||
assert specs[4].name in specs[10]
|
||||
assert specs[-1].name in specs[10]
|
||||
|
||||
spec1 = sp.Spec(specs[10].name + '^/' + hashes[4])
|
||||
assert specs[4].name in spec1 and spec1[specs[4].name] == specs[4]
|
||||
spec2 = sp.Spec(specs[10].name + '%' + str(specs[10].compiler) +
|
||||
' ^ / ' + hashes[-1])
|
||||
assert (specs[-1].name in spec2 and
|
||||
spec2[specs[-1].name] == specs[-1] and
|
||||
spec2.compiler == specs[10].compiler)
|
||||
spec3 = sp.Spec(specs[10].name + '^/' + hashes[4][:4] +
|
||||
'^ / ' + hashes[-1][:5])
|
||||
assert (specs[-1].name in spec3 and
|
||||
spec3[specs[-1].name] == specs[-1] and
|
||||
specs[4].name in spec3 and spec3[specs[4].name] == specs[4])
|
||||
|
||||
def test_multiple_specs_with_hash(self, database):
|
||||
specs = database.mock.db.query()
|
||||
hashes = [s._hash for s in specs] # Preserves order of elements
|
||||
|
||||
assert len(specs) > 3
|
||||
|
||||
output = sp.parse(specs[0].name + '/' + hashes[0] + '/' + hashes[1])
|
||||
assert len(output) == 2
|
||||
output = sp.parse('/' + hashes[0] + '/' + hashes[1])
|
||||
assert len(output) == 2
|
||||
output = sp.parse('/' + hashes[0] + '/' + hashes[1] +
|
||||
' ' + specs[2].name)
|
||||
assert len(output) == 3
|
||||
output = sp.parse('/' + hashes[0] +
|
||||
' ' + specs[1].name + ' ' + specs[2].name)
|
||||
assert len(output) == 3
|
||||
output = sp.parse('/' + hashes[0] + ' ' +
|
||||
specs[1].name + ' / ' + hashes[1])
|
||||
assert len(output) == 2
|
||||
|
||||
def test_ambiguous_hash(self, database):
|
||||
specs = database.mock.db.query()
|
||||
hashes = [s._hash for s in specs] # Preserves order of elements
|
||||
|
||||
# Make sure the database is as expected
|
||||
assert hashes[1][:1] == hashes[2][:1] == 'b'
|
||||
|
||||
ambiguous_hashes = ['/b',
|
||||
specs[1].name + '/b',
|
||||
specs[0].name + '^/b',
|
||||
specs[0].name + '^' + specs[1].name + '/b']
|
||||
self._check_raises(AmbiguousHashError, ambiguous_hashes)
|
||||
|
||||
def test_invalid_hash(self, database):
|
||||
specs = database.mock.db.query()
|
||||
hashes = [s._hash for s in specs] # Preserves order of elements
|
||||
|
||||
# Make sure the database is as expected
|
||||
assert (hashes[0] != hashes[3] and
|
||||
hashes[1] != hashes[4] and len(specs) > 4)
|
||||
|
||||
inputs = [specs[0].name + '/' + hashes[3],
|
||||
specs[1].name + '^' + specs[4].name + '/' + hashes[0],
|
||||
specs[1].name + '^' + specs[4].name + '/' + hashes[1]]
|
||||
self._check_raises(InvalidHashError, inputs)
|
||||
|
||||
def test_nonexistent_hash(self, database):
|
||||
# This test uses database to make sure we don't accidentally access
|
||||
# real installs, however unlikely
|
||||
specs = database.mock.db.query()
|
||||
hashes = [s._hash for s in specs] # Preserves order of elements
|
||||
|
||||
# Make sure the database is as expected
|
||||
assert 'abc123' not in [h[:6] for h in hashes]
|
||||
|
||||
nonexistant_hashes = ['/abc123',
|
||||
specs[0].name + '/abc123']
|
||||
self._check_raises(SystemExit, nonexistant_hashes)
|
||||
|
||||
def test_redundant_spec(self, database):
|
||||
specs = database.mock.db.query()
|
||||
hashes = [s._hash for s in specs] # Preserves order of elements
|
||||
|
||||
# Make sure the database is as expected
|
||||
assert len(specs) > 3
|
||||
|
||||
redundant_specs = ['/' + hashes[0] + '%' + str(specs[0].compiler),
|
||||
specs[1].name + '/' + hashes[1] +
|
||||
'@' + str(specs[1].version),
|
||||
specs[2].name + '/' + hashes[2] + '^ libelf',
|
||||
'/' + hashes[3] + ' cflags="-O3 -fPIC"']
|
||||
self._check_raises(RedundantSpecError, redundant_specs)
|
||||
|
||||
def test_duplicate_variant(self):
|
||||
duplicates = [
|
||||
'x@1.2+debug+debug',
|
||||
|
@@ -364,3 +364,8 @@ def test_luaposix_version(self):
|
||||
self.check(
|
||||
'luaposix', '33.4.0',
|
||||
'https://github.com/luaposix/luaposix/archive/release-v33.4.0.tar.gz')
|
||||
|
||||
def test_sionlib_version(self):
|
||||
self.check(
|
||||
'sionlib', '1.7.1',
|
||||
'http://apps.fz-juelich.de/jsc/sionlib/download.php?version=1.7.1')
|
||||
|
@@ -28,17 +28,17 @@
|
||||
download location of the package, and figure out version and name information
|
||||
from there.
|
||||
|
||||
Example: when spack is given the following URL:
|
||||
**Example:** when spack is given the following URL:
|
||||
|
||||
ftp://ftp.ruby-lang.org/pub/ruby/1.9/ruby-1.9.1-p243.tar.gz
|
||||
https://www.hdfgroup.org/ftp/HDF/releases/HDF4.2.12/src/hdf-4.2.12.tar.gz
|
||||
|
||||
It can figure out that the package name is ruby, and that it is at version
|
||||
1.9.1-p243. This is useful for making the creation of packages simple: a user
|
||||
It can figure out that the package name is ``hdf``, and that it is at version
|
||||
``4.2.12``. This is useful for making the creation of packages simple: a user
|
||||
just supplies a URL and skeleton code is generated automatically.
|
||||
|
||||
Spack can also figure out that it can most likely download 1.8.1 at this URL:
|
||||
Spack can also figure out that it can most likely download 4.2.6 at this URL:
|
||||
|
||||
ftp://ftp.ruby-lang.org/pub/ruby/1.9/ruby-1.8.1.tar.gz
|
||||
https://www.hdfgroup.org/ftp/HDF/releases/HDF4.2.6/src/hdf-4.2.6.tar.gz
|
||||
|
||||
This is useful if a user asks for a package at a particular version number;
|
||||
spack doesn't need anyone to tell it where to get the tarball even though
|
||||
@@ -104,24 +104,23 @@ def strip_query_and_fragment(path):
|
||||
def split_url_extension(path):
|
||||
"""Some URLs have a query string, e.g.:
|
||||
|
||||
1. https://github.com/losalamos/CLAMR/blob/packages/PowerParser_v2.0.7.tgz?raw=true
|
||||
2. http://www.apache.org/dyn/closer.cgi?path=/cassandra/1.2.0/apache-cassandra-1.2.0-rc2-bin.tar.gz
|
||||
3. https://gitlab.kitware.com/vtk/vtk/repository/archive.tar.bz2?ref=v7.0.0
|
||||
1. https://github.com/losalamos/CLAMR/blob/packages/PowerParser_v2.0.7.tgz?raw=true
|
||||
2. http://www.apache.org/dyn/closer.cgi?path=/cassandra/1.2.0/apache-cassandra-1.2.0-rc2-bin.tar.gz
|
||||
3. https://gitlab.kitware.com/vtk/vtk/repository/archive.tar.bz2?ref=v7.0.0
|
||||
|
||||
In (1), the query string needs to be stripped to get at the
|
||||
extension, but in (2) & (3), the filename is IN a single final query
|
||||
argument.
|
||||
In (1), the query string needs to be stripped to get at the
|
||||
extension, but in (2) & (3), the filename is IN a single final query
|
||||
argument.
|
||||
|
||||
This strips the URL into three pieces: prefix, ext, and suffix.
|
||||
The suffix contains anything that was stripped off the URL to
|
||||
get at the file extension. In (1), it will be '?raw=true', but
|
||||
in (2), it will be empty. In (3) the suffix is a parameter that follows
|
||||
after the file extension, e.g.:
|
||||
This strips the URL into three pieces: ``prefix``, ``ext``, and ``suffix``.
|
||||
The suffix contains anything that was stripped off the URL to
|
||||
get at the file extension. In (1), it will be ``'?raw=true'``, but
|
||||
in (2), it will be empty. In (3) the suffix is a parameter that follows
|
||||
after the file extension, e.g.:
|
||||
|
||||
1. ('https://github.com/losalamos/CLAMR/blob/packages/PowerParser_v2.0.7', '.tgz', '?raw=true')
|
||||
2. ('http://www.apache.org/dyn/closer.cgi?path=/cassandra/1.2.0/apache-cassandra-1.2.0-rc2-bin',
|
||||
'.tar.gz', None)
|
||||
3. ('https://gitlab.kitware.com/vtk/vtk/repository/archive', '.tar.bz2', '?ref=v7.0.0')
|
||||
1. ``('https://github.com/losalamos/CLAMR/blob/packages/PowerParser_v2.0.7', '.tgz', '?raw=true')``
|
||||
2. ``('http://www.apache.org/dyn/closer.cgi?path=/cassandra/1.2.0/apache-cassandra-1.2.0-rc2-bin', '.tar.gz', None)``
|
||||
3. ``('https://gitlab.kitware.com/vtk/vtk/repository/archive', '.tar.bz2', '?ref=v7.0.0')``
|
||||
"""
|
||||
prefix, ext, suffix = path, '', ''
|
||||
|
||||
@@ -149,7 +148,7 @@ def determine_url_file_extension(path):
|
||||
"""This returns the type of archive a URL refers to. This is
|
||||
sometimes confusing because of URLs like:
|
||||
|
||||
(1) https://github.com/petdance/ack/tarball/1.93_02
|
||||
(1) https://github.com/petdance/ack/tarball/1.93_02
|
||||
|
||||
Where the URL doesn't actually contain the filename. We need
|
||||
to know what type it is so that we can appropriately name files
|
||||
@@ -166,19 +165,44 @@ def determine_url_file_extension(path):
|
||||
return ext
|
||||
|
||||
|
||||
def parse_version_offset(path, debug=False):
|
||||
"""Try to extract a version string from a filename or URL. This is taken
|
||||
largely from Homebrew's Version class."""
|
||||
def parse_version_offset(path):
|
||||
"""Try to extract a version string from a filename or URL.
|
||||
|
||||
:param str path: The filename or URL for the package
|
||||
|
||||
:return: A tuple containing:
|
||||
version of the package,
|
||||
first index of version,
|
||||
length of version string,
|
||||
the index of the matching regex
|
||||
the matching regex
|
||||
|
||||
:rtype: tuple
|
||||
|
||||
:raises UndetectableVersionError: If the URL does not match any regexes
|
||||
"""
|
||||
original_path = path
|
||||
|
||||
# path: The prefix of the URL, everything before the ext and suffix
|
||||
# ext: The file extension
|
||||
# suffix: Any kind of query string that begins with a '?'
|
||||
path, ext, suffix = split_url_extension(path)
|
||||
|
||||
# Allow matches against the basename, to avoid including parent
|
||||
# dirs in version name Remember the offset of the stem in the path
|
||||
# stem: Everything from path after the final '/'
|
||||
stem = os.path.basename(path)
|
||||
offset = len(path) - len(stem)
|
||||
|
||||
version_types = [
|
||||
# List of the following format:
|
||||
#
|
||||
# [
|
||||
# (regex, string),
|
||||
# ...
|
||||
# ]
|
||||
#
|
||||
# The first regex that matches string will be used to determine
|
||||
# the version of the package. Thefore, hyperspecific regexes should
|
||||
# come first while generic, catch-all regexes should come last.
|
||||
version_regexes = [
|
||||
# GitHub tarballs, e.g. v1.2.3
|
||||
(r'github.com/.+/(?:zip|tar)ball/v?((\d+\.)+\d+)$', path),
|
||||
|
||||
@@ -213,7 +237,9 @@ def parse_version_offset(path, debug=False):
|
||||
# Search dotted versions:
|
||||
# e.g., https://gitlab.kitware.com/vtk/vtk/repository/archive.tar.bz2?ref=v7.0.0
|
||||
# e.g., https://example.com/org/repo/repository/archive.tar.bz2?ref=SomePrefix-2.1.1
|
||||
# e.g., http://apps.fz-juelich.de/jsc/sionlib/download.php?version=1.7.1
|
||||
(r'\?ref=(?:.*-|v)*((\d+\.)+\d+).*$', suffix),
|
||||
(r'\?version=((\d+\.)+\d+)', suffix),
|
||||
|
||||
# e.g. boost_1_39_0
|
||||
(r'((\d+_)+\d+)$', stem),
|
||||
@@ -256,16 +282,13 @@ def parse_version_offset(path, debug=False):
|
||||
(r'\/(\d\.\d+)\/', path),
|
||||
|
||||
# e.g. http://www.ijg.org/files/jpegsrc.v8d.tar.gz
|
||||
(r'\.v(\d+[a-z]?)', stem)]
|
||||
(r'\.v(\d+[a-z]?)', stem)
|
||||
]
|
||||
|
||||
for i, vtype in enumerate(version_types):
|
||||
regex, match_string = vtype
|
||||
for i, version_regex in enumerate(version_regexes):
|
||||
regex, match_string = version_regex
|
||||
match = re.search(regex, match_string)
|
||||
if match and match.group(1) is not None:
|
||||
if debug:
|
||||
tty.msg("Parsing URL: %s" % path,
|
||||
" Matched regex %d: r'%s'" % (i, regex))
|
||||
|
||||
version = match.group(1)
|
||||
start = match.start(1)
|
||||
|
||||
@@ -273,30 +296,74 @@ def parse_version_offset(path, debug=False):
|
||||
if match_string is stem:
|
||||
start += offset
|
||||
|
||||
return version, start, len(version)
|
||||
return version, start, len(version), i, regex
|
||||
|
||||
raise UndetectableVersionError(original_path)
|
||||
|
||||
|
||||
def parse_version(path, debug=False):
|
||||
"""Given a URL or archive name, extract a version from it and return
|
||||
a version object.
|
||||
def parse_version(path):
|
||||
"""Try to extract a version string from a filename or URL.
|
||||
|
||||
:param str path: The filename or URL for the package
|
||||
|
||||
:return: The version of the package
|
||||
:rtype: spack.version.Version
|
||||
|
||||
:raises UndetectableVersionError: If the URL does not match any regexes
|
||||
"""
|
||||
ver, start, l = parse_version_offset(path, debug=debug)
|
||||
return Version(ver)
|
||||
version, start, length, i, regex = parse_version_offset(path)
|
||||
return Version(version)
|
||||
|
||||
|
||||
def parse_name_offset(path, v=None, debug=False):
|
||||
def parse_name_offset(path, v=None):
|
||||
"""Try to determine the name of a package from its filename or URL.
|
||||
|
||||
:param str path: The filename or URL for the package
|
||||
:param str v: The version of the package
|
||||
|
||||
:return: A tuple containing:
|
||||
name of the package,
|
||||
first index of name,
|
||||
length of name,
|
||||
the index of the matching regex
|
||||
the matching regex
|
||||
|
||||
:rtype: tuple
|
||||
|
||||
:raises UndetectableNameError: If the URL does not match any regexes
|
||||
"""
|
||||
original_path = path
|
||||
|
||||
# We really need to know the version of the package
|
||||
# This helps us prevent collisions between the name and version
|
||||
if v is None:
|
||||
v = parse_version(path, debug=debug)
|
||||
try:
|
||||
v = parse_version(path)
|
||||
except UndetectableVersionError:
|
||||
# Not all URLs contain a version. We still want to be able
|
||||
# to determine a name if possible.
|
||||
v = ''
|
||||
|
||||
# path: The prefix of the URL, everything before the ext and suffix
|
||||
# ext: The file extension
|
||||
# suffix: Any kind of query string that begins with a '?'
|
||||
path, ext, suffix = split_url_extension(path)
|
||||
|
||||
# Allow matching with either path or stem, as with the version.
|
||||
# stem: Everything from path after the final '/'
|
||||
stem = os.path.basename(path)
|
||||
offset = len(path) - len(stem)
|
||||
|
||||
name_types = [
|
||||
# List of the following format:
|
||||
#
|
||||
# [
|
||||
# (regex, string),
|
||||
# ...
|
||||
# ]
|
||||
#
|
||||
# The first regex that matches string will be used to determine
|
||||
# the name of the package. Thefore, hyperspecific regexes should
|
||||
# come first while generic, catch-all regexes should come last.
|
||||
name_regexes = [
|
||||
(r'/sourceforge/([^/]+)/', path),
|
||||
(r'github.com/[^/]+/[^/]+/releases/download/%s/(.*)-%s$' %
|
||||
(v, v), path),
|
||||
@@ -304,6 +371,7 @@ def parse_name_offset(path, v=None, debug=False):
|
||||
(r'/([^/]+)[_.-](bin|dist|stable|src|sources)[_.-]%s' % v, path),
|
||||
(r'github.com/[^/]+/([^/]+)/archive', path),
|
||||
(r'[^/]+/([^/]+)/repository/archive', path), # gitlab
|
||||
(r'([^/]+)/download.php', path),
|
||||
|
||||
(r'([^/]+)[_.-]v?%s' % v, stem), # prefer the stem
|
||||
(r'([^/]+)%s' % v, stem),
|
||||
@@ -313,10 +381,11 @@ def parse_name_offset(path, v=None, debug=False):
|
||||
(r'/([^/]+)%s' % v, path),
|
||||
|
||||
(r'^([^/]+)[_.-]v?%s' % v, path),
|
||||
(r'^([^/]+)%s' % v, path)]
|
||||
(r'^([^/]+)%s' % v, path)
|
||||
]
|
||||
|
||||
for i, name_type in enumerate(name_types):
|
||||
regex, match_string = name_type
|
||||
for i, name_regex in enumerate(name_regexes):
|
||||
regex, match_string = name_regex
|
||||
match = re.search(regex, match_string)
|
||||
if match:
|
||||
name = match.group(1)
|
||||
@@ -330,17 +399,38 @@ def parse_name_offset(path, v=None, debug=False):
|
||||
name = name.lower()
|
||||
name = re.sub('[_.]', '-', name)
|
||||
|
||||
return name, start, len(name)
|
||||
return name, start, len(name), i, regex
|
||||
|
||||
raise UndetectableNameError(path)
|
||||
raise UndetectableNameError(original_path)
|
||||
|
||||
|
||||
def parse_name(path, ver=None):
|
||||
name, start, l = parse_name_offset(path, ver)
|
||||
"""Try to determine the name of a package from its filename or URL.
|
||||
|
||||
:param str path: The filename or URL for the package
|
||||
:param str ver: The version of the package
|
||||
|
||||
:return: The name of the package
|
||||
:rtype: str
|
||||
|
||||
:raises UndetectableNameError: If the URL does not match any regexes
|
||||
"""
|
||||
name, start, length, i, regex = parse_name_offset(path, ver)
|
||||
return name
|
||||
|
||||
|
||||
def parse_name_and_version(path):
|
||||
"""Try to determine the name of a package and extract its version
|
||||
from its filename or URL.
|
||||
|
||||
:param str path: The filename or URL for the package
|
||||
|
||||
:return: A tuple containing:
|
||||
The name of the package
|
||||
The version of the package
|
||||
|
||||
:rtype: tuple
|
||||
"""
|
||||
ver = parse_version(path)
|
||||
name = parse_name(path, ver)
|
||||
return (name, ver)
|
||||
@@ -368,12 +458,12 @@ def cumsum(elts, init=0, fn=lambda x: x):
|
||||
|
||||
def substitution_offsets(path):
|
||||
"""This returns offsets for substituting versions and names in the
|
||||
provided path. It is a helper for substitute_version().
|
||||
provided path. It is a helper for :func:`substitute_version`.
|
||||
"""
|
||||
# Get name and version offsets
|
||||
try:
|
||||
ver, vs, vl = parse_version_offset(path)
|
||||
name, ns, nl = parse_name_offset(path, ver)
|
||||
ver, vs, vl, vi, vregex = parse_version_offset(path)
|
||||
name, ns, nl, ni, nregex = parse_name_offset(path, ver)
|
||||
except UndetectableNameError:
|
||||
return (None, -1, -1, (), ver, vs, vl, (vs,))
|
||||
except UndetectableVersionError:
|
||||
@@ -441,21 +531,22 @@ def wildcard_version(path):
|
||||
|
||||
def substitute_version(path, new_version):
|
||||
"""Given a URL or archive name, find the version in the path and
|
||||
substitute the new version for it. Replace all occurrences of
|
||||
the version *if* they don't overlap with the package name.
|
||||
substitute the new version for it. Replace all occurrences of
|
||||
the version *if* they don't overlap with the package name.
|
||||
|
||||
Simple example::
|
||||
substitute_version('http://www.mr511.de/software/libelf-0.8.13.tar.gz', '2.9.3')
|
||||
->'http://www.mr511.de/software/libelf-2.9.3.tar.gz'
|
||||
Simple example:
|
||||
|
||||
Complex examples::
|
||||
substitute_version('http://mvapich.cse.ohio-state.edu/download/mvapich/mv2/mvapich2-2.0.tar.gz', 2.1)
|
||||
-> 'http://mvapich.cse.ohio-state.edu/download/mvapich/mv2/mvapich2-2.1.tar.gz'
|
||||
.. code-block:: python
|
||||
|
||||
# In this string, the "2" in mvapich2 is NOT replaced.
|
||||
substitute_version('http://mvapich.cse.ohio-state.edu/download/mvapich/mv2/mvapich2-2.tar.gz', 2.1)
|
||||
-> 'http://mvapich.cse.ohio-state.edu/download/mvapich/mv2/mvapich2-2.1.tar.gz'
|
||||
substitute_version('http://www.mr511.de/software/libelf-0.8.13.tar.gz', '2.9.3')
|
||||
>>> 'http://www.mr511.de/software/libelf-2.9.3.tar.gz'
|
||||
|
||||
Complex example:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
substitute_version('https://www.hdfgroup.org/ftp/HDF/releases/HDF4.2.12/src/hdf-4.2.12.tar.gz', '2.3')
|
||||
>>> 'https://www.hdfgroup.org/ftp/HDF/releases/HDF2.3/src/hdf-2.3.tar.gz'
|
||||
"""
|
||||
(name, ns, nl, noffs,
|
||||
ver, vs, vl, voffs) = substitution_offsets(path)
|
||||
@@ -474,17 +565,16 @@ def substitute_version(path, new_version):
|
||||
def color_url(path, **kwargs):
|
||||
"""Color the parts of the url according to Spack's parsing.
|
||||
|
||||
Colors are:
|
||||
Cyan: The version found by parse_version_offset().
|
||||
Red: The name found by parse_name_offset().
|
||||
Colors are:
|
||||
| Cyan: The version found by :func:`parse_version_offset`.
|
||||
| Red: The name found by :func:`parse_name_offset`.
|
||||
|
||||
Green: Instances of version string from substitute_version().
|
||||
Magenta: Instances of the name (protected from substitution).
|
||||
|
||||
Optional args:
|
||||
errors=True Append parse errors at end of string.
|
||||
subs=True Color substitutions as well as parsed name/version.
|
||||
| Green: Instances of version string from :func:`substitute_version`.
|
||||
| Magenta: Instances of the name (protected from substitution).
|
||||
|
||||
:param str path: The filename or URL for the package
|
||||
:keyword bool errors: Append parse errors at end of string.
|
||||
:keyword bool subs: Color substitutions as well as parsed name/version.
|
||||
"""
|
||||
errors = kwargs.get('errors', False)
|
||||
subs = kwargs.get('subs', False)
|
||||
|
@@ -167,6 +167,11 @@ function _spack_pathadd {
|
||||
fi
|
||||
}
|
||||
|
||||
# Export spack function so it is available in subshells (only works with bash)
|
||||
if [ -n "${BASH_VERSION:-}" ]; then
|
||||
export -f spack
|
||||
fi
|
||||
|
||||
#
|
||||
# Figure out where this file is. Below code needs to be portable to
|
||||
# bash and zsh.
|
||||
|
@@ -22,9 +22,10 @@
|
||||
# License along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
from spack import *
|
||||
import os
|
||||
|
||||
from spack import *
|
||||
|
||||
|
||||
def check(condition, msg):
|
||||
"""Raise an install error if condition is False."""
|
||||
@@ -39,6 +40,28 @@ class CmakeClient(CMakePackage):
|
||||
|
||||
version('1.0', '4cb3ff35b2472aae70f542116d616e63')
|
||||
|
||||
callback_counter = 0
|
||||
|
||||
flipped = False
|
||||
run_this = True
|
||||
check_this_is_None = None
|
||||
did_something = False
|
||||
|
||||
@run_after('cmake')
|
||||
@run_before('cmake', 'build', 'install')
|
||||
def increment(self):
|
||||
self.callback_counter += 1
|
||||
|
||||
@run_after('cmake')
|
||||
@on_package_attributes(run_this=True, check_this_is_None=None)
|
||||
def flip(self):
|
||||
self.flipped = True
|
||||
|
||||
@run_after('cmake')
|
||||
@on_package_attributes(does_not_exist=None)
|
||||
def do_not_execute(self):
|
||||
self.did_something = True
|
||||
|
||||
def setup_environment(self, spack_env, run_env):
|
||||
spack_cc # Ensure spack module-scope variable is avaiabl
|
||||
check(from_cmake == "from_cmake",
|
||||
@@ -67,11 +90,15 @@ def setup_dependent_package(self, module, dspec):
|
||||
"setup_dependent_package.")
|
||||
|
||||
def cmake(self, spec, prefix):
|
||||
pass
|
||||
assert self.callback_counter == 1
|
||||
|
||||
build = cmake
|
||||
def build(self, spec, prefix):
|
||||
assert self.did_something is False
|
||||
assert self.flipped is True
|
||||
assert self.callback_counter == 3
|
||||
|
||||
def install(self, spec, prefix):
|
||||
assert self.callback_counter == 4
|
||||
# check that cmake is in the global scope.
|
||||
global cmake
|
||||
check(cmake is not None, "No cmake was in environment!")
|
||||
|
39
var/spack/repos/builtin.mock/packages/extendee/package.py
Normal file
39
var/spack/repos/builtin.mock/packages/extendee/package.py
Normal file
@@ -0,0 +1,39 @@
|
||||
##############################################################################
|
||||
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
|
||||
# Produced at the Lawrence Livermore National Laboratory.
|
||||
#
|
||||
# This file is part of Spack.
|
||||
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
|
||||
# LLNL-CODE-647188
|
||||
#
|
||||
# For details, see https://github.com/llnl/spack
|
||||
# Please also see the LICENSE file for our notice and the LGPL.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License (as
|
||||
# published by the Free Software Foundation) version 2.1, February 1999.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
|
||||
# conditions of the GNU Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
from spack import *
|
||||
|
||||
|
||||
class Extendee(Package):
|
||||
"""A package with extensions"""
|
||||
|
||||
homepage = "http://www.example.com"
|
||||
url = "http://www.example.com/extendee-1.0.tar.gz"
|
||||
|
||||
extendable = True
|
||||
|
||||
version('1.0', 'hash-extendee-1.0')
|
||||
|
||||
def install(self, spec, prefix):
|
||||
pass
|
39
var/spack/repos/builtin.mock/packages/extension1/package.py
Normal file
39
var/spack/repos/builtin.mock/packages/extension1/package.py
Normal file
@@ -0,0 +1,39 @@
|
||||
##############################################################################
|
||||
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
|
||||
# Produced at the Lawrence Livermore National Laboratory.
|
||||
#
|
||||
# This file is part of Spack.
|
||||
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
|
||||
# LLNL-CODE-647188
|
||||
#
|
||||
# For details, see https://github.com/llnl/spack
|
||||
# Please also see the LICENSE file for our notice and the LGPL.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License (as
|
||||
# published by the Free Software Foundation) version 2.1, February 1999.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
|
||||
# conditions of the GNU Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
from spack import *
|
||||
|
||||
|
||||
class Extension1(Package):
|
||||
"""A package which extends another package"""
|
||||
|
||||
homepage = "http://www.example.com"
|
||||
url = "http://www.example.com/extension1-1.0.tar.gz"
|
||||
|
||||
extends('extendee')
|
||||
|
||||
version('1.0', 'hash-extension1-1.0')
|
||||
|
||||
def install(self, spec, prefix):
|
||||
pass
|
41
var/spack/repos/builtin.mock/packages/extension2/package.py
Normal file
41
var/spack/repos/builtin.mock/packages/extension2/package.py
Normal file
@@ -0,0 +1,41 @@
|
||||
##############################################################################
|
||||
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
|
||||
# Produced at the Lawrence Livermore National Laboratory.
|
||||
#
|
||||
# This file is part of Spack.
|
||||
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
|
||||
# LLNL-CODE-647188
|
||||
#
|
||||
# For details, see https://github.com/llnl/spack
|
||||
# Please also see the LICENSE file for our notice and the LGPL.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License (as
|
||||
# published by the Free Software Foundation) version 2.1, February 1999.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
|
||||
# conditions of the GNU Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
from spack import *
|
||||
|
||||
|
||||
class Extension2(Package):
|
||||
"""A package which extends another package. It also depends on another
|
||||
package which extends the same package."""
|
||||
|
||||
homepage = "http://www.example.com"
|
||||
url = "http://www.example.com/extension2-1.0.tar.gz"
|
||||
|
||||
extends('extendee')
|
||||
depends_on('extension1', type=('build', 'run'))
|
||||
|
||||
version('1.0', 'hash-extension2-1.0')
|
||||
|
||||
def install(self, spec, prefix):
|
||||
pass
|
@@ -36,6 +36,8 @@ class AdolC(Package):
|
||||
version('2.6.2', '0f9547584c99c0673e4f81cf64e8d865')
|
||||
version('2.6.1', '1032b28427d6e399af4610e78c0f087b')
|
||||
|
||||
variant('advanced_branching', default=False,
|
||||
description='Enable advanced branching to reduce retaping')
|
||||
variant('doc', default=True, description='Install documentation')
|
||||
variant('openmp', default=False, description='Enable OpenMP support')
|
||||
variant('sparse', default=False, description='Enable sparse drivers')
|
||||
@@ -45,10 +47,13 @@ class AdolC(Package):
|
||||
patch('openmp_exam_261.patch', when='@2.6.1')
|
||||
|
||||
def install(self, spec, prefix):
|
||||
make_args = ['--prefix=%s' % prefix]
|
||||
make_args = ['--prefix=%s' % prefix,
|
||||
'--enable-atrig-erf']
|
||||
|
||||
# --with-cflags=FLAGS use CFLAGS=FLAGS (default: -O3 -Wall -ansi)
|
||||
# --with-cxxflags=FLAGS use CXXFLAGS=FLAGS (default: -O3 -Wall)
|
||||
if '+advanced_branching' in spec:
|
||||
make_args.extend([
|
||||
'--enable-advanced-branching'
|
||||
])
|
||||
|
||||
if '+openmp' in spec:
|
||||
if spec.satisfies('%gcc'):
|
||||
|
@@ -25,7 +25,7 @@
|
||||
from spack import *
|
||||
|
||||
|
||||
class Applewmproto(Package):
|
||||
class Applewmproto(AutotoolsPackage):
|
||||
"""Apple Rootless Window Management Extension.
|
||||
|
||||
This extension defines a protcol that allows X window managers
|
||||
@@ -39,8 +39,3 @@ class Applewmproto(Package):
|
||||
|
||||
depends_on('pkg-config@0.9.0:', type='build')
|
||||
depends_on('util-macros', type='build')
|
||||
|
||||
def install(self, spec, prefix):
|
||||
configure('--prefix={0}'.format(prefix))
|
||||
|
||||
make('install')
|
||||
|
@@ -25,7 +25,7 @@
|
||||
from spack import *
|
||||
|
||||
|
||||
class Appres(Package):
|
||||
class Appres(AutotoolsPackage):
|
||||
"""The appres program prints the resources seen by an application (or
|
||||
subhierarchy of an application) with the specified class and instance
|
||||
names. It can be used to determine which resources a particular
|
||||
@@ -42,9 +42,3 @@ class Appres(Package):
|
||||
depends_on('xproto@7.0.17:', type='build')
|
||||
depends_on('pkg-config@0.9.0:', type='build')
|
||||
depends_on('util-macros', type='build')
|
||||
|
||||
def install(self, spec, prefix):
|
||||
configure('--prefix={0}'.format(prefix))
|
||||
|
||||
make()
|
||||
make('install')
|
||||
|
@@ -25,7 +25,7 @@
|
||||
from spack import *
|
||||
|
||||
|
||||
class Asciidoc(Package):
|
||||
class Asciidoc(AutotoolsPackage):
|
||||
"""A presentable text document format for writing articles, UNIX man
|
||||
pages and other small to medium sized documents."""
|
||||
|
||||
@@ -38,9 +38,3 @@ class Asciidoc(Package):
|
||||
depends_on('libxslt')
|
||||
depends_on('docbook-xml')
|
||||
depends_on('docbook-xsl')
|
||||
|
||||
def install(self, spec, prefix):
|
||||
configure('--prefix=%s' % prefix)
|
||||
|
||||
make()
|
||||
make("install")
|
||||
|
@@ -39,6 +39,7 @@ class Astyle(MakefilePackage):
|
||||
|
||||
parallel = False
|
||||
|
||||
@property
|
||||
def build_directory(self):
|
||||
return join_path(self.stage.source_path, 'build', self.compiler.name)
|
||||
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user