API Docs: fix broken reference targets

This commit is contained in:
Adam J. Stewart 2021-07-03 17:10:13 -05:00 committed by Todd Gamblin
parent c37df94932
commit b8afc0fd29
57 changed files with 510 additions and 471 deletions

View File

@ -2,7 +2,7 @@
# #
# You can set these variables from the command line. # You can set these variables from the command line.
SPHINXOPTS = -W SPHINXOPTS = -W --keep-going
SPHINXBUILD = sphinx-build SPHINXBUILD = sphinx-build
PAPER = PAPER =
BUILDDIR = _build BUILDDIR = _build

View File

@ -130,8 +130,8 @@ Adding flags to cmake
To add additional flags to the ``cmake`` call, simply override the To add additional flags to the ``cmake`` call, simply override the
``cmake_args`` function. The following example defines values for the flags ``cmake_args`` function. The following example defines values for the flags
``WHATEVER``, ``ENABLE_BROKEN_FEATURE``, ``DETECT_HDF5``, and ``THREADS`` with ``WHATEVER``, ``ENABLE_BROKEN_FEATURE``, ``DETECT_HDF5``, and ``THREADS`` with
and without the :py:meth:`~.CMakePackage.define` and and without the :meth:`~spack.build_systems.cmake.CMakePackage.define` and
:py:meth:`~.CMakePackage.define_from_variant` helper functions: :meth:`~spack.build_systems.cmake.CMakePackage.define_from_variant` helper functions:
.. code-block:: python .. code-block:: python

View File

@ -101,11 +101,14 @@ def setup(sphinx):
# Add any Sphinx extension module names here, as strings. They can be extensions # Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones. # coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', extensions = [
'sphinx.ext.graphviz', 'sphinx.ext.autodoc',
'sphinx.ext.napoleon', 'sphinx.ext.graphviz',
'sphinx.ext.todo', 'sphinx.ext.intersphinx',
'sphinxcontrib.programoutput'] 'sphinx.ext.napoleon',
'sphinx.ext.todo',
'sphinxcontrib.programoutput',
]
# Set default graphviz options # Set default graphviz options
graphviz_dot_args = [ graphviz_dot_args = [
@ -164,6 +167,19 @@ def setup(sphinx):
# directories to ignore when looking for source files. # directories to ignore when looking for source files.
exclude_patterns = ['_build', '_spack_root', '.spack-env'] exclude_patterns = ['_build', '_spack_root', '.spack-env']
nitpicky = True
nitpick_ignore = [
# Python classes that intersphinx is unable to resolve
('py:class', 'argparse.HelpFormatter'),
('py:class', 'contextlib.contextmanager'),
('py:class', 'module'),
('py:class', '_io.BufferedReader'),
('py:class', 'unittest.case.TestCase'),
('py:class', '_frozen_importlib_external.SourceFileLoader'),
# Spack classes that are private and we don't want to expose
('py:class', 'spack.provider_index._IndexBase'),
]
# The reST default role (used for this markup: `text`) to use for all documents. # The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None #default_role = None
@ -358,3 +374,11 @@ class SpackStyle(DefaultStyle):
# How to display URL addresses: 'footnote', 'no', or 'inline'. # How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote' #texinfo_show_urls = 'footnote'
# -- Extension configuration -------------------------------------------------
# sphinx.ext.intersphinx
intersphinx_mapping = {
"python": ("https://docs.python.org/3", None),
}

View File

@ -108,9 +108,9 @@ with a high level view of Spack's directory structure:
spack/ <- spack module; contains Python code spack/ <- spack module; contains Python code
analyzers/ <- modules to run analysis on installed packages analyzers/ <- modules to run analysis on installed packages
build_systems/ <- modules for different build systems build_systems/ <- modules for different build systems
cmd/ <- each file in here is a spack subcommand cmd/ <- each file in here is a spack subcommand
compilers/ <- compiler description files compilers/ <- compiler description files
container/ <- module for spack containerize container/ <- module for spack containerize
hooks/ <- hook modules to run at different points hooks/ <- hook modules to run at different points
modules/ <- modules for lmod, tcl, etc. modules/ <- modules for lmod, tcl, etc.
@ -151,24 +151,22 @@ Package-related modules
^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^
:mod:`spack.package` :mod:`spack.package`
Contains the :class:`Package <spack.package.Package>` class, which Contains the :class:`~spack.package.Package` class, which
is the superclass for all packages in Spack. Methods on ``Package`` is the superclass for all packages in Spack. Methods on ``Package``
implement all phases of the :ref:`package lifecycle implement all phases of the :ref:`package lifecycle
<package-lifecycle>` and manage the build process. <package-lifecycle>` and manage the build process.
:mod:`spack.packages` :mod:`spack.util.naming`
Contains all of the packages in Spack and methods for managing them. Contains functions for mapping between Spack package names,
Functions like :func:`packages.get <spack.packages.get>` and Python module names, and Python class names. Functions like
:func:`class_name_for_package_name :func:`~spack.util.naming.mod_to_class` handle mapping package
<packages.class_name_for_package_name>` handle mapping package module module names to class names.
names to class names and dynamically instantiating packages by name
from module files.
:mod:`spack.relations` :mod:`spack.directives`
*Relations* are relationships between packages, like *Directives* are functions that can be called inside a package definition
:func:`depends_on <spack.relations.depends_on>` and :func:`provides to modify the package, like :func:`~spack.directives.depends_on`
<spack.relations.provides>`. See :ref:`dependencies` and and :func:`~spack.directives.provides`. See :ref:`dependencies`
:ref:`virtual-dependencies`. and :ref:`virtual-dependencies`.
:mod:`spack.multimethod` :mod:`spack.multimethod`
Implementation of the :func:`@when <spack.multimethod.when>` Implementation of the :func:`@when <spack.multimethod.when>`
@ -180,31 +178,27 @@ Spec-related modules
^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^
:mod:`spack.spec` :mod:`spack.spec`
Contains :class:`Spec <spack.spec.Spec>` and :class:`SpecParser Contains :class:`~spack.spec.Spec` and :class:`~spack.spec.SpecParser`.
<spack.spec.SpecParser>`. Also implements most of the logic for Also implements most of the logic for normalization and concretization
normalization and concretization of specs. of specs.
:mod:`spack.parse` :mod:`spack.parse`
Contains some base classes for implementing simple recursive descent Contains some base classes for implementing simple recursive descent
parsers: :class:`Parser <spack.parse.Parser>` and :class:`Lexer parsers: :class:`~spack.parse.Parser` and :class:`~spack.parse.Lexer`.
<spack.parse.Lexer>`. Used by :class:`SpecParser Used by :class:`~spack.spec.SpecParser`.
<spack.spec.SpecParser>`.
:mod:`spack.concretize` :mod:`spack.concretize`
Contains :class:`DefaultConcretizer Contains :class:`~spack.concretize.Concretizer` implementation,
<spack.concretize.DefaultConcretizer>` implementation, which allows which allows site administrators to change Spack's :ref:`concretization-policies`.
site administrators to change Spack's :ref:`concretization-policies`.
:mod:`spack.version` :mod:`spack.version`
Implements a simple :class:`Version <spack.version.Version>` class Implements a simple :class:`~spack.version.Version` class with simple
with simple comparison semantics. Also implements comparison semantics. Also implements :class:`~spack.version.VersionRange`
:class:`VersionRange <spack.version.VersionRange>` and and :class:`~spack.version.VersionList`. All three are comparable with each
:class:`VersionList <spack.version.VersionList>`. All three are other and offer union and intersection operations. Spack uses these classes
comparable with each other and offer union and intersection to compare versions and to manage version constraints on specs. Comparison
operations. Spack uses these classes to compare versions and to semantics are similar to the ``LooseVersion`` class in ``distutils`` and to
manage version constraints on specs. Comparison semantics are the way RPM compares version strings.
similar to the ``LooseVersion`` class in ``distutils`` and to the
way RPM compares version strings.
:mod:`spack.compilers` :mod:`spack.compilers`
Submodules contains descriptors for all valid compilers in Spack. Submodules contains descriptors for all valid compilers in Spack.
@ -232,7 +226,7 @@ Build environment
:mod:`spack.stage` :mod:`spack.stage`
Handles creating temporary directories for builds. Handles creating temporary directories for builds.
:mod:`spack.compilation` :mod:`spack.build_environment`
This contains utility functions used by the compiler wrapper script, This contains utility functions used by the compiler wrapper script,
``cc``. ``cc``.
@ -257,22 +251,19 @@ Unit tests
Implements Spack's test suite. Add a module and put its name in Implements Spack's test suite. Add a module and put its name in
the test suite in ``__init__.py`` to add more unit tests. the test suite in ``__init__.py`` to add more unit tests.
:mod:`spack.test.mock_packages`
This is a fake package hierarchy used to mock up packages for
Spack's test suite.
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Research and Monitoring Modules Research and Monitoring Modules
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
:mod:`spack.monitor` :mod:`spack.monitor`
Contains :class:`SpackMonitor <spack.monitor.SpackMonitor>`. This is accessed Contains :class:`~spack.monitor.SpackMonitorClient`. This is accessed from
from the ``spack install`` and ``spack analyze`` commands to send build the ``spack install`` and ``spack analyze`` commands to send build and
and package metadada up to a `Spack Monitor <https://github.com/spack/spack-monitor>`_ server. package metadata up to a `Spack Monitor
<https://github.com/spack/spack-monitor>`_ server.
:mod:`spack.analyzers` :mod:`spack.analyzers`
A module folder with a :class:`AnalyzerBase <spack.analyzers.analyzer_base.AnalyzerBase>` A module folder with a :class:`~spack.analyzers.analyzer_base.AnalyzerBase`
that provides base functions to run, save, and (optionally) upload analysis that provides base functions to run, save, and (optionally) upload analysis
results to a `Spack Monitor <https://github.com/spack/spack-monitor>`_ server. results to a `Spack Monitor <https://github.com/spack/spack-monitor>`_ server.
@ -286,7 +277,7 @@ Other Modules
tarball URLs. tarball URLs.
:mod:`spack.error` :mod:`spack.error`
:class:`SpackError <spack.error.SpackError>`, the base class for :class:`~spack.error.SpackError`, the base class for
Spack's exception hierarchy. Spack's exception hierarchy.
:mod:`llnl.util.tty` :mod:`llnl.util.tty`
@ -335,8 +326,8 @@ Writing analyzers
To write an analyzer, you should add a new python file to the To write an analyzer, you should add a new python file to the
analyzers module directory at ``lib/spack/spack/analyzers`` . analyzers module directory at ``lib/spack/spack/analyzers`` .
Your analyzer should be a subclass of the :class:`AnalyzerBase <spack.analyzers.analyzer_base.AnalyzerBase>`. For example, if you want Your analyzer should be a subclass of the :class:`AnalyzerBase <spack.analyzers.analyzer_base.AnalyzerBase>`. For example, if you want
to add an analyzer class ``Myanalyzer`` you woul write to to add an analyzer class ``Myanalyzer`` you would write to
``spack/analyzers/myanalyzer.py`` and import and ``spack/analyzers/myanalyzer.py`` and import and
use the base as follows: use the base as follows:
.. code-block:: python .. code-block:: python
@ -347,7 +338,7 @@ use the base as follows:
Note that the class name is your module file name, all lowercase Note that the class name is your module file name, all lowercase
except for the first capital letter. You can look at other analyzers in except for the first capital letter. You can look at other analyzers in
that analyzer directory for examples. The guide here will tell you about the basic functions needed. that analyzer directory for examples. The guide here will tell you about the basic functions needed.
^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^^
@ -356,13 +347,13 @@ Analyzer Output Directory
By default, when you run ``spack analyze run`` an analyzer output directory will By default, when you run ``spack analyze run`` an analyzer output directory will
be created in your spack user directory in your ``$HOME``. The reason we output here be created in your spack user directory in your ``$HOME``. The reason we output here
is because the install directory might not always be writable. is because the install directory might not always be writable.
.. code-block:: console .. code-block:: console
~/.spack/ ~/.spack/
analyzers analyzers
Result files will be written here, organized in subfolders in the same structure Result files will be written here, organized in subfolders in the same structure
as the package, with each analyzer owning it's own subfolder. for example: as the package, with each analyzer owning it's own subfolder. for example:
@ -380,11 +371,11 @@ as the package, with each analyzer owning it's own subfolder. for example:
│   └── spack-analyzer-install-files.json │   └── spack-analyzer-install-files.json
└── libabigail └── libabigail
└── lib └── lib
└── spack-analyzer-libabigail-libz.so.1.2.11.xml └── spack-analyzer-libabigail-libz.so.1.2.11.xml
Notice that for the libabigail analyzer, since results are generated per object, Notice that for the libabigail analyzer, since results are generated per object,
we honor the object's folder in case there are equivalently named files in we honor the object's folder in case there are equivalently named files in
different folders. The result files are typically written as json so they can be easily read and uploaded in a future interaction with a monitor. different folders. The result files are typically written as json so they can be easily read and uploaded in a future interaction with a monitor.
@ -426,7 +417,7 @@ and then return the object with a key as the analyzer name. The result data
should be a list of objects, each with a name, ``analyzer_name``, ``install_file``, should be a list of objects, each with a name, ``analyzer_name``, ``install_file``,
and one of ``value`` or ``binary_value``. The install file should be for a relative and one of ``value`` or ``binary_value``. The install file should be for a relative
path, and not the absolute path. For example, let's say we extract a metric called path, and not the absolute path. For example, let's say we extract a metric called
``metric`` for ``bin/wget`` using our analyzer ``thebest-analyzer``. ``metric`` for ``bin/wget`` using our analyzer ``thebest-analyzer``.
We might have data that looks like this: We might have data that looks like this:
.. code-block:: python .. code-block:: python
@ -482,7 +473,7 @@ Saving Analyzer Results
The analyzer will have ``save_result`` called, with the result object generated The analyzer will have ``save_result`` called, with the result object generated
to save it to the filesystem, and if the user has added the ``--monitor`` flag to save it to the filesystem, and if the user has added the ``--monitor`` flag
to upload it to a monitor server. If your result follows an accepted result to upload it to a monitor server. If your result follows an accepted result
format and you don't need to parse it further, you don't need to add this format and you don't need to parse it further, you don't need to add this
function to your class. However, if your result data is large or otherwise function to your class. However, if your result data is large or otherwise
needs additional parsing, you can define it. If you define the function, it needs additional parsing, you can define it. If you define the function, it
is useful to know about the ``output_dir`` property, which you can join is useful to know about the ``output_dir`` property, which you can join
@ -548,7 +539,7 @@ each one (separately) to the monitor:
Notice that this function, if you define it, requires a result object (generated by Notice that this function, if you define it, requires a result object (generated by
``run()``, a monitor (if you want to send), and a boolean ``overwrite`` to be used ``run()``, a monitor (if you want to send), and a boolean ``overwrite`` to be used
to check if a result exists first, and not write to it if the result exists and to check if a result exists first, and not write to it if the result exists and
overwrite is False. Also notice that since we already saved these files to the analyzer metadata folder, we return early if a monitor isn't defined, because this function serves to send results to the monitor. If you haven't saved anything to the analyzer metadata folder overwrite is False. Also notice that since we already saved these files to the analyzer metadata folder, we return early if a monitor isn't defined, because this function serves to send results to the monitor. If you haven't saved anything to the analyzer metadata folder
yet, you might want to do that here. You should also use ``tty.info`` to give yet, you might want to do that here. You should also use ``tty.info`` to give
the user a message of "Writing result to $DIRNAME." the user a message of "Writing result to $DIRNAME."
@ -616,7 +607,7 @@ types of hooks in the ``__init__.py``, and then python files in that folder
can use hook functions. The files are automatically parsed, so if you write can use hook functions. The files are automatically parsed, so if you write
a new file for some integration (e.g., ``lib/spack/spack/hooks/myintegration.py`` a new file for some integration (e.g., ``lib/spack/spack/hooks/myintegration.py``
you can then write hook functions in that file that will be automatically detected, you can then write hook functions in that file that will be automatically detected,
and run whenever your hook is called. This section will cover the basic kind and run whenever your hook is called. This section will cover the basic kind
of hooks, and how to write them. of hooks, and how to write them.
^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^
@ -624,7 +615,7 @@ Types of Hooks
^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^
The following hooks are currently implemented to make it easy for you, The following hooks are currently implemented to make it easy for you,
the developer, to add hooks at different stages of a spack install or similar. the developer, to add hooks at different stages of a spack install or similar.
If there is a hook that you would like and is missing, you can propose to add a new one. If there is a hook that you would like and is missing, you can propose to add a new one.
""""""""""""""""""""" """""""""""""""""""""
@ -632,9 +623,9 @@ If there is a hook that you would like and is missing, you can propose to add a
""""""""""""""""""""" """""""""""""""""""""
A ``pre_install`` hook is run within an install subprocess, directly before A ``pre_install`` hook is run within an install subprocess, directly before
the install starts. It expects a single argument of a spec, and is run in the install starts. It expects a single argument of a spec, and is run in
a multiprocessing subprocess. Note that if you see ``pre_install`` functions associated with packages these are not hooks a multiprocessing subprocess. Note that if you see ``pre_install`` functions associated with packages these are not hooks
as we have defined them here, but rather callback functions associated with as we have defined them here, but rather callback functions associated with
a package install. a package install.
@ -657,7 +648,7 @@ here.
This hook is run at the beginning of ``lib/spack/spack/installer.py``, This hook is run at the beginning of ``lib/spack/spack/installer.py``,
in the install function of a ``PackageInstaller``, in the install function of a ``PackageInstaller``,
and importantly is not part of a build process, but before it. This is when and importantly is not part of a build process, but before it. This is when
we have just newly grabbed the task, and are preparing to install. If you we have just newly grabbed the task, and are preparing to install. If you
write a hook of this type, you should provide the spec to it. write a hook of this type, you should provide the spec to it.
.. code-block:: python .. code-block:: python
@ -666,7 +657,7 @@ write a hook of this type, you should provide the spec to it.
"""On start of an install, we want to... """On start of an install, we want to...
""" """
print('on_install_start') print('on_install_start')
"""""""""""""""""""""""""""" """"""""""""""""""""""""""""
``on_install_success(spec)`` ``on_install_success(spec)``
@ -744,8 +735,8 @@ to trigger after anything is written to a logger. You would add it as follows:
post_install = HookRunner('post_install') post_install = HookRunner('post_install')
# hooks related to logging # hooks related to logging
post_log_write = HookRunner('post_log_write') # <- here is my new hook! post_log_write = HookRunner('post_log_write') # <- here is my new hook!
You then need to decide what arguments my hook would expect. Since this is You then need to decide what arguments my hook would expect. Since this is
related to logging, let's say that you want a message and level. That means related to logging, let's say that you want a message and level. That means
@ -775,7 +766,7 @@ In this example, we use it outside of a logger that is already defined:
This is not to say that this would be the best way to implement an integration This is not to say that this would be the best way to implement an integration
with the logger (you'd probably want to write a custom logger, or you could with the logger (you'd probably want to write a custom logger, or you could
have the hook defined within the logger) but serves as an example of writing a hook. have the hook defined within the logger) but serves as an example of writing a hook.
---------- ----------
Unit tests Unit tests
@ -905,7 +896,7 @@ just like you would with the normal ``python`` command.
^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^
Spack blame is a way to quickly see contributors to packages or files Spack blame is a way to quickly see contributors to packages or files
in the spack repository. You should provide a target package name or in the spack repository. You should provide a target package name or
file name to the command. Here is an example asking to see contributions file name to the command. Here is an example asking to see contributions
for the package "python": for the package "python":
@ -915,8 +906,8 @@ for the package "python":
LAST_COMMIT LINES % AUTHOR EMAIL LAST_COMMIT LINES % AUTHOR EMAIL
2 weeks ago 3 0.3 Mickey Mouse <cheddar@gmouse.org> 2 weeks ago 3 0.3 Mickey Mouse <cheddar@gmouse.org>
a month ago 927 99.7 Minnie Mouse <swiss@mouse.org> a month ago 927 99.7 Minnie Mouse <swiss@mouse.org>
2 weeks ago 930 100.0 2 weeks ago 930 100.0
By default, you will get a table view (shown above) sorted by date of contribution, By default, you will get a table view (shown above) sorted by date of contribution,
@ -1287,7 +1278,7 @@ Publishing a release on GitHub
#. Create the release in GitHub. #. Create the release in GitHub.
* Go to * Go to
`github.com/spack/spack/releases <https://github.com/spack/spack/releases>`_ `github.com/spack/spack/releases <https://github.com/spack/spack/releases>`_
and click ``Draft a new release``. and click ``Draft a new release``.

View File

@ -2884,52 +2884,52 @@ The package base class, usually specialized for a given build system, determines
actual set of entities available for overriding. actual set of entities available for overriding.
The classes that are currently provided by Spack are: The classes that are currently provided by Spack are:
+-------------------------------+----------------------------------+ +-------------------------=--------------------------------+----------------------------------+
| **Base Class** | **Purpose** | | **Base Class** | **Purpose** |
+===============================+==================================+ +==========================================================+==================================+
| :py:class:`.Package` | General base class not | | :class:`~spack.package.Package` | General base class not |
| | specialized for any build system | | | specialized for any build system |
+-------------------------------+----------------------------------+ +----------------------------------------------------------+----------------------------------+
| :py:class:`.MakefilePackage` | Specialized class for packages | | :class:`~spack.build_systems.makefile.MakefilePackage` | Specialized class for packages |
| | built invoking | | | built invoking |
| | hand-written Makefiles | | | hand-written Makefiles |
+-------------------------------+----------------------------------+ +----------------------------------------------------------+----------------------------------+
| :py:class:`.AutotoolsPackage` | Specialized class for packages | | :class:`~spack.build_systems.autotools.AutotoolsPackage` | Specialized class for packages |
| | built using GNU Autotools | | | built using GNU Autotools |
+-------------------------------+----------------------------------+ +----------------------------------------------------------+----------------------------------+
| :py:class:`.CMakePackage` | Specialized class for packages | | :class:`~spack.build_systems.cmake.CMakePackage` | Specialized class for packages |
| | built using CMake | | | built using CMake |
+-------------------------------+----------------------------------+ +----------------------------------------------------------+----------------------------------+
| :py:class:`.CudaPackage` | A helper class for packages that | | :class:`~spack.build_systems.cuda.CudaPackage` | A helper class for packages that |
| | use CUDA | | | use CUDA |
+-------------------------------+----------------------------------+ +----------------------------------------------------------+----------------------------------+
| :py:class:`.QMakePackage` | Specialized class for packages | | :class:`~spack.build_systems.qmake.QMakePackage` | Specialized class for packages |
| | build using QMake | | | built using QMake |
+-------------------------------+----------------------------------+ +----------------------------------------------------------+----------------------------------+
| :py:class:`.ROCmPackage` | A helper class for packages that | | :class:`~spack.build_systems.rocm.ROCmPackage` | A helper class for packages that |
| | use ROCm | | | use ROCm |
+-------------------------------+----------------------------------+ +----------------------------------------------------------+----------------------------------+
| :py:class:`.SConsPackage` | Specialized class for packages | | :class:`~spack.build_systems.scons.SConsPackage` | Specialized class for packages |
| | built using SCons | | | built using SCons |
+-------------------------------+----------------------------------+ +----------------------------------------------------------+----------------------------------+
| :py:class:`.WafPackage` | Specialized class for packages | | :class:`~spack.build_systems.waf.WafPackage` | Specialized class for packages |
| | built using Waf | | | built using Waf |
+-------------------------------+----------------------------------+ +----------------------------------------------------------+----------------------------------+
| :py:class:`.RPackage` | Specialized class for | | :class:`~spack.build_systems.r.RPackage` | Specialized class for |
| | :py:class:`.R` extensions | | | R extensions |
+-------------------------------+----------------------------------+ +----------------------------------------------------------+----------------------------------+
| :py:class:`.OctavePackage` | Specialized class for | | :class:`~spack.build_systems.octave.OctavePackage` | Specialized class for |
| | :py:class:`.Octave` packages | | | Octave packages |
+-------------------------------+----------------------------------+ +----------------------------------------------------------+----------------------------------+
| :py:class:`.PythonPackage` | Specialized class for | | :class:`~spack.build_systems.python.PythonPackage` | Specialized class for |
| | :py:class:`.Python` extensions | | | Python extensions |
+-------------------------------+----------------------------------+ +----------------------------------------------------------+----------------------------------+
| :py:class:`.PerlPackage` | Specialized class for | | :class:`~spack.build_systems.perl.PerlPackage` | Specialized class for |
| | :py:class:`.Perl` extensions | | | Perl extensions |
+-------------------------------+----------------------------------+ +----------------------------------------------------------+----------------------------------+
| :py:class:`.IntelPackage` | Specialized class for licensed | | :class:`~spack.build_systems.intel.IntelPackage` | Specialized class for licensed |
| | Intel software | | | Intel software |
+-------------------------------+----------------------------------+ +----------------------------------------------------------+----------------------------------+
.. note:: .. note::
@ -2939,7 +2939,7 @@ The classes that are currently provided by Spack are:
rare cases where manual intervention is needed we need to stress that a rare cases where manual intervention is needed we need to stress that a
package base class depends on the *build system* being used, not the language of the package. package base class depends on the *build system* being used, not the language of the package.
For example, a Python extension installed with CMake would ``extends('python')`` and For example, a Python extension installed with CMake would ``extends('python')`` and
subclass from :py:class:`.CMakePackage`. subclass from :class:`~spack.build_systems.cmake.CMakePackage`.
^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^
Installation pipeline Installation pipeline
@ -4079,7 +4079,7 @@ prefix **before** ``make install``. Builds like this can falsely report
success when an error occurs before the installation is complete. Simple success when an error occurs before the installation is complete. Simple
sanity checks can be used to identify files and or directories that are sanity checks can be used to identify files and or directories that are
required of a successful installation. Spack checks for the presence of required of a successful installation. Spack checks for the presence of
the files and directories after ``install()`` runs. the files and directories after ``install()`` runs.
If any of the listed files or directories are missing, then the build will If any of the listed files or directories are missing, then the build will
fail and the install prefix will be removed. If they all exist, then Spack fail and the install prefix will be removed. If they all exist, then Spack
@ -4193,7 +4193,7 @@ need to use two decorators for each phase test method:
The first decorator tells Spack when in the installation process to The first decorator tells Spack when in the installation process to
run your test method installation process; namely *after* the provided run your test method installation process; namely *after* the provided
installation phase. The second decorator tells Spack to only run the installation phase. The second decorator tells Spack to only run the
checks when the ``--test`` option is provided on the command line. checks when the ``--test`` option is provided on the command line.
.. note:: .. note::
@ -4267,17 +4267,17 @@ tests can be performed days, even weeks, after the software is installed.
Stand-alone tests are checks that should run relatively quickly -- as Stand-alone tests are checks that should run relatively quickly -- as
in on the order of at most a few minutes -- and ideally execute all in on the order of at most a few minutes -- and ideally execute all
aspects of the installed software, or at least key functionality. aspects of the installed software, or at least key functionality.
.. note:: .. note::
Execution speed is important because these tests are intended Execution speed is important because these tests are intended
to quickly assess whether the installed software works on the to quickly assess whether the installed software works on the
system. system.
Failing stand-alone tests indicate that there is no reason to Failing stand-alone tests indicate that there is no reason to
proceed with more resource-intensive tests. proceed with more resource-intensive tests.
Passing stand-alone (or smoke) tests can lead to more thorough Passing stand-alone (or smoke) tests can lead to more thorough
testing, such as extensive unit or regression tests, or tests testing, such as extensive unit or regression tests, or tests
that run at scale. Spack support for more thorough testing is that run at scale. Spack support for more thorough testing is
@ -4307,7 +4307,7 @@ file such that:
test_stage: /path/to/stage test_stage: /path/to/stage
The package can access this path **during test processing** using The package can access this path **during test processing** using
`self.test_suite.stage`. `self.test_suite.stage`.
.. note:: .. note::
@ -4388,7 +4388,7 @@ can be implemented as shown below.
@run_after('install') @run_after('install')
def copy_test_sources(self): def copy_test_sources(self):
srcs = ['tests', srcs = ['tests',
join_path('examples', 'foo.c'), join_path('examples', 'foo.c'),
join_path('examples', 'bar.c')] join_path('examples', 'bar.c')]
self.cache_extra_test_sources(srcs) self.cache_extra_test_sources(srcs)
@ -4446,7 +4446,7 @@ Examples include:
- expected test output - expected test output
These extra files should be added to the ``test`` subdirectory of the These extra files should be added to the ``test`` subdirectory of the
package in the Spack repository. package in the Spack repository.
Spack will **automatically copy** the contents of that directory to the Spack will **automatically copy** the contents of that directory to the
test staging directory for stand-alone testing. The ``test`` method can test staging directory for stand-alone testing. The ``test`` method can
@ -4471,7 +4471,7 @@ The signature for ``get_escaped_text_output`` is:
where ``filename`` is the path to the file containing the expected output. where ``filename`` is the path to the file containing the expected output.
The ``filename`` for a :ref:`custom file <cache_custom_files>` can be The ``filename`` for a :ref:`custom file <cache_custom_files>` can be
accessed and used as illustrated by a simplified version of an ``sqlite`` accessed and used as illustrated by a simplified version of an ``sqlite``
package check: package check:
@ -4591,10 +4591,10 @@ where each argument has the following meaning:
Options are a list of strings to be passed to the executable when Options are a list of strings to be passed to the executable when
it runs. it runs.
The default is ``[]``, which means no options are provided to the The default is ``[]``, which means no options are provided to the
executable. executable.
* ``expected`` is an optional list of expected output strings. * ``expected`` is an optional list of expected output strings.
Spack requires every string in ``expected`` to be a regex matching Spack requires every string in ``expected`` to be a regex matching
@ -4605,31 +4605,31 @@ where each argument has the following meaning:
The expected output can be :ref:`read from a file The expected output can be :ref:`read from a file
<expected_test_output_from_file>`. <expected_test_output_from_file>`.
The default is ``expected=[]``, so Spack will not check the output. The default is ``expected=[]``, so Spack will not check the output.
* ``status`` is the optional expected return code(s). * ``status`` is the optional expected return code(s).
A list of return codes corresponding to successful execution can A list of return codes corresponding to successful execution can
be provided (e.g., ``status=[0,3,7]``). Support for non-zero return be provided (e.g., ``status=[0,3,7]``). Support for non-zero return
codes allows for basic **expected failure** tests as well as different codes allows for basic **expected failure** tests as well as different
return codes across versions of the software. return codes across versions of the software.
The default is ``status=[0]``, which corresponds to **successful** The default is ``status=[0]``, which corresponds to **successful**
execution in the sense that the executable does not exit with a execution in the sense that the executable does not exit with a
failure code or raise an exception. failure code or raise an exception.
* ``installed`` is used to require ``exe`` to be within the package * ``installed`` is used to require ``exe`` to be within the package
prefix. prefix.
If ``True``, then the path for ``exe`` is required to be within the If ``True``, then the path for ``exe`` is required to be within the
package prefix; otherwise, the path is not constrained. package prefix; otherwise, the path is not constrained.
The default is ``False``, so the fully qualified path for ``exe`` The default is ``False``, so the fully qualified path for ``exe``
does **not** need to be within the installation directory. does **not** need to be within the installation directory.
* ``purpose`` is an optional heading describing the the test part. * ``purpose`` is an optional heading describing the the test part.
Output from the test is written to a test log file so this argument Output from the test is written to a test log file so this argument
serves as a searchable heading in text logs to highlight the start serves as a searchable heading in text logs to highlight the start
of the test part. Having a description can be helpful when debugging of the test part. Having a description can be helpful when debugging
@ -4644,10 +4644,10 @@ where each argument has the following meaning:
The default is ``False``, which means the test executable must be The default is ``False``, which means the test executable must be
present for any installable version of the software. present for any installable version of the software.
* ``work_dir`` is the path to the directory from which the executable * ``work_dir`` is the path to the directory from which the executable
will run. will run.
The default of ``None`` corresponds to the current directory (``'.'``). The default of ``None`` corresponds to the current directory (``'.'``).
""""""""""""""""""""""""""""""""""""""""" """""""""""""""""""""""""""""""""""""""""
@ -4754,7 +4754,7 @@ where only the outputs for the first of each set are shown:
Copyright (C) 2018 Free Software Foundation, Inc. Copyright (C) 2018 Free Software Foundation, Inc.
This is free software; see the source for copying conditions. There is NO This is free software; see the source for copying conditions. There is NO
warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
PASSED PASSED
... ...
==> [2021-04-26-17:35:20.493921] test: checking mpirun output ==> [2021-04-26-17:35:20.493921] test: checking mpirun output
@ -4915,7 +4915,7 @@ This is already part of the boilerplate for packages created with
Filtering functions Filtering functions
^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^
:py:func:`filter_file(regex, repl, *filenames, **kwargs) <spack.filter_file>` :py:func:`filter_file(regex, repl, *filenames, **kwargs) <llnl.util.filesystem.filter_file>`
Works like ``sed`` but with Python regular expression syntax. Takes Works like ``sed`` but with Python regular expression syntax. Takes
a regular expression, a replacement, and a set of files. ``repl`` a regular expression, a replacement, and a set of files. ``repl``
can be a raw string or a callable function. If it is a raw string, can be a raw string or a callable function. If it is a raw string,
@ -4953,7 +4953,7 @@ Filtering functions
filter_file('CXX="c++"', 'CXX="%s"' % self.compiler.cxx, filter_file('CXX="c++"', 'CXX="%s"' % self.compiler.cxx,
prefix.bin.mpicxx) prefix.bin.mpicxx)
:py:func:`change_sed_delimiter(old_delim, new_delim, *filenames) <spack.change_sed_delim>` :py:func:`change_sed_delimiter(old_delim, new_delim, *filenames) <llnl.util.filesystem.change_sed_delimiter>`
Some packages, like TAU, have a build system that can't install Some packages, like TAU, have a build system that can't install
into directories with, e.g. '@' in the name, because they use into directories with, e.g. '@' in the name, because they use
hard-coded ``sed`` commands in their build. hard-coded ``sed`` commands in their build.
@ -4975,14 +4975,14 @@ Filtering functions
File functions File functions
^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^
:py:func:`ancestor(dir, n=1) <spack.ancestor>` :py:func:`ancestor(dir, n=1) <llnl.util.filesystem.ancestor>`
Get the n\ :sup:`th` ancestor of the directory ``dir``. Get the n\ :sup:`th` ancestor of the directory ``dir``.
:py:func:`can_access(path) <spack.can_access>` :py:func:`can_access(path) <llnl.util.filesystem.can_access>`
True if we can read and write to the file at ``path``. Same as True if we can read and write to the file at ``path``. Same as
native python ``os.access(file_name, os.R_OK|os.W_OK)``. native python ``os.access(file_name, os.R_OK|os.W_OK)``.
:py:func:`install(src, dest) <spack.install>` :py:func:`install(src, dest) <llnl.util.filesystem.install>`
Install a file to a particular location. For example, install a Install a file to a particular location. For example, install a
header into the ``include`` directory under the install ``prefix``: header into the ``include`` directory under the install ``prefix``:
@ -4990,14 +4990,14 @@ File functions
install('my-header.h', prefix.include) install('my-header.h', prefix.include)
:py:func:`join_path(*paths) <spack.join_path>` :py:func:`join_path(*paths) <llnl.util.filesystem.join_path>`
An alias for ``os.path.join``. This joins paths using the OS path separator. An alias for ``os.path.join``. This joins paths using the OS path separator.
:py:func:`mkdirp(*paths) <spack.mkdirp>` :py:func:`mkdirp(*paths) <llnl.util.filesystem.mkdirp>`
Create each of the directories in ``paths``, creating any parent Create each of the directories in ``paths``, creating any parent
directories if they do not exist. directories if they do not exist.
:py:func:`working_dir(dirname, kwargs) <spack.working_dir>` :py:func:`working_dir(dirname, kwargs) <llnl.util.filesystem.working_dir>`
This is a Python `Context Manager This is a Python `Context Manager
<https://docs.python.org/2/library/contextlib.html>`_ that makes it <https://docs.python.org/2/library/contextlib.html>`_ that makes it
easier to work with subdirectories in builds. You use this with the easier to work with subdirectories in builds. You use this with the
@ -5039,7 +5039,7 @@ File functions
The ``create=True`` keyword argument causes the command to create The ``create=True`` keyword argument causes the command to create
the directory if it does not exist. the directory if it does not exist.
:py:func:`touch(path) <spack.touch>` :py:func:`touch(path) <llnl.util.filesystem.touch>`
Create an empty file at ``path``. Create an empty file at ``path``.
.. _make-package-findable: .. _make-package-findable:

View File

@ -326,7 +326,7 @@ def end_function(self, prog=None):
"""Returns the syntax needed to end a function definition. """Returns the syntax needed to end a function definition.
Parameters: Parameters:
prog (str, optional): the command name prog (str or None): the command name
Returns: Returns:
str: the function definition ending str: the function definition ending

View File

@ -444,7 +444,7 @@ def copy_tree(src, dest, symlinks=True, ignore=None, _permissions=False):
src (str): the directory to copy src (str): the directory to copy
dest (str): the destination directory dest (str): the destination directory
symlinks (bool): whether or not to preserve symlinks symlinks (bool): whether or not to preserve symlinks
ignore (function): function indicating which files to ignore ignore (typing.Callable): function indicating which files to ignore
_permissions (bool): for internal use only _permissions (bool): for internal use only
Raises: Raises:
@ -518,7 +518,7 @@ def install_tree(src, dest, symlinks=True, ignore=None):
src (str): the directory to install src (str): the directory to install
dest (str): the destination directory dest (str): the destination directory
symlinks (bool): whether or not to preserve symlinks symlinks (bool): whether or not to preserve symlinks
ignore (function): function indicating which files to ignore ignore (typing.Callable): function indicating which files to ignore
Raises: Raises:
IOError: if *src* does not match any files or directories IOError: if *src* does not match any files or directories
@ -557,12 +557,12 @@ def mkdirp(*paths, **kwargs):
paths (str): paths to create with mkdirp paths (str): paths to create with mkdirp
Keyword Aguments: Keyword Aguments:
mode (permission bits or None, optional): optional permissions to set mode (permission bits or None): optional permissions to set
on the created directory -- use OS default if not provided on the created directory -- use OS default if not provided
group (group name or None, optional): optional group for permissions of group (group name or None): optional group for permissions of
final created directory -- use OS default if not provided. Only final created directory -- use OS default if not provided. Only
used if world write permissions are not set used if world write permissions are not set
default_perms ('parents' or 'args', optional): The default permissions default_perms (str or None): one of 'parents' or 'args'. The default permissions
that are set for directories that are not themselves an argument that are set for directories that are not themselves an argument
for mkdirp. 'parents' means intermediate directories get the for mkdirp. 'parents' means intermediate directories get the
permissions of their direct parent directory, 'args' means permissions of their direct parent directory, 'args' means
@ -866,7 +866,7 @@ def traverse_tree(source_root, dest_root, rel_path='', **kwargs):
Keyword Arguments: Keyword Arguments:
order (str): Whether to do pre- or post-order traversal. Accepted order (str): Whether to do pre- or post-order traversal. Accepted
values are 'pre' and 'post' values are 'pre' and 'post'
ignore (function): function indicating which files to ignore ignore (typing.Callable): function indicating which files to ignore
follow_nonexisting (bool): Whether to descend into directories in follow_nonexisting (bool): Whether to descend into directories in
``src`` that do not exit in ``dest``. Default is True ``src`` that do not exit in ``dest``. Default is True
follow_links (bool): Whether to descend into symlinks in ``src`` follow_links (bool): Whether to descend into symlinks in ``src``
@ -1114,11 +1114,11 @@ def find(root, files, recursive=True):
Parameters: Parameters:
root (str): The root directory to start searching from root (str): The root directory to start searching from
files (str or Sequence): Library name(s) to search for files (str or Sequence): Library name(s) to search for
recurse (bool, optional): if False search only root folder, recursive (bool): if False search only root folder,
if True descends top-down from the root. Defaults to True. if True descends top-down from the root. Defaults to True.
Returns: Returns:
list of strings: The files that have been found list: The files that have been found
""" """
if isinstance(files, six.string_types): if isinstance(files, six.string_types):
files = [files] files = [files]
@ -1200,7 +1200,7 @@ def directories(self):
['/dir1', '/dir2'] ['/dir1', '/dir2']
Returns: Returns:
list of strings: A list of directories list: A list of directories
""" """
return list(dedupe( return list(dedupe(
os.path.dirname(x) for x in self.files if os.path.dirname(x) os.path.dirname(x) for x in self.files if os.path.dirname(x)
@ -1218,7 +1218,7 @@ def basenames(self):
['a.h', 'b.h'] ['a.h', 'b.h']
Returns: Returns:
list of strings: A list of base-names list: A list of base-names
""" """
return list(dedupe(os.path.basename(x) for x in self.files)) return list(dedupe(os.path.basename(x) for x in self.files))
@ -1305,7 +1305,7 @@ def headers(self):
"""Stable de-duplication of the headers. """Stable de-duplication of the headers.
Returns: Returns:
list of strings: A list of header files list: A list of header files
""" """
return self.files return self.files
@ -1318,7 +1318,7 @@ def names(self):
['a', 'b'] ['a', 'b']
Returns: Returns:
list of strings: A list of files without extensions list: A list of files without extensions
""" """
names = [] names = []
@ -1409,9 +1409,9 @@ def find_headers(headers, root, recursive=False):
======= ==================================== ======= ====================================
Parameters: Parameters:
headers (str or list of str): Header name(s) to search for headers (str or list): Header name(s) to search for
root (str): The root directory to start searching from root (str): The root directory to start searching from
recursive (bool, optional): if False search only root folder, recursive (bool): if False search only root folder,
if True descends top-down from the root. Defaults to False. if True descends top-down from the root. Defaults to False.
Returns: Returns:
@ -1447,7 +1447,7 @@ def find_all_headers(root):
in the directory passed as argument. in the directory passed as argument.
Args: Args:
root (path): directory where to look recursively for header files root (str): directory where to look recursively for header files
Returns: Returns:
List of all headers found in ``root`` and subdirectories. List of all headers found in ``root`` and subdirectories.
@ -1467,7 +1467,7 @@ def libraries(self):
"""Stable de-duplication of library files. """Stable de-duplication of library files.
Returns: Returns:
list of strings: A list of library files list: A list of library files
""" """
return self.files return self.files
@ -1480,7 +1480,7 @@ def names(self):
['a', 'b'] ['a', 'b']
Returns: Returns:
list of strings: A list of library names list: A list of library names
""" """
names = [] names = []
@ -1565,8 +1565,8 @@ def find_system_libraries(libraries, shared=True):
======= ==================================== ======= ====================================
Parameters: Parameters:
libraries (str or list of str): Library name(s) to search for libraries (str or list): Library name(s) to search for
shared (bool, optional): if True searches for shared libraries, shared (bool): if True searches for shared libraries,
otherwise for static. Defaults to True. otherwise for static. Defaults to True.
Returns: Returns:
@ -1616,11 +1616,11 @@ def find_libraries(libraries, root, shared=True, recursive=False):
======= ==================================== ======= ====================================
Parameters: Parameters:
libraries (str or list of str): Library name(s) to search for libraries (str or list): Library name(s) to search for
root (str): The root directory to start searching from root (str): The root directory to start searching from
shared (bool, optional): if True searches for shared libraries, shared (bool): if True searches for shared libraries,
otherwise for static. Defaults to True. otherwise for static. Defaults to True.
recursive (bool, optional): if False search only root folder, recursive (bool): if False search only root folder,
if True descends top-down from the root. Defaults to False. if True descends top-down from the root. Defaults to False.
Returns: Returns:

View File

@ -573,8 +573,8 @@ def pretty_date(time, now=None):
"""Convert a datetime or timestamp to a pretty, relative date. """Convert a datetime or timestamp to a pretty, relative date.
Args: Args:
time (datetime or int): date to print prettily time (datetime.datetime or int): date to print prettily
now (datetime): dateimte for 'now', i.e. the date the pretty date now (datetime.datetime): datetime for 'now', i.e. the date the pretty date
is relative to (default is datetime.now()) is relative to (default is datetime.now())
Returns: Returns:
@ -648,7 +648,7 @@ def pretty_string_to_date(date_str, now=None):
or be a *pretty date* (like ``yesterday`` or ``two months ago``) or be a *pretty date* (like ``yesterday`` or ``two months ago``)
Returns: Returns:
(datetime): datetime object corresponding to ``date_str`` (datetime.datetime): datetime object corresponding to ``date_str``
""" """
pattern = {} pattern = {}

View File

@ -14,9 +14,19 @@
import spack.util.string import spack.util.string
__all__ = ['Lock', 'LockTransaction', 'WriteTransaction', 'ReadTransaction', __all__ = [
'LockError', 'LockTimeoutError', 'Lock',
'LockPermissionError', 'LockROFileError', 'CantCreateLockError'] 'LockDowngradeError',
'LockUpgradeError',
'LockTransaction',
'WriteTransaction',
'ReadTransaction',
'LockError',
'LockTimeoutError',
'LockPermissionError',
'LockROFileError',
'CantCreateLockError'
]
#: Mapping of supported locks to description #: Mapping of supported locks to description
lock_type = {fcntl.LOCK_SH: 'read', fcntl.LOCK_EX: 'write'} lock_type = {fcntl.LOCK_SH: 'read', fcntl.LOCK_EX: 'write'}
@ -401,7 +411,7 @@ def release_read(self, release_fn=None):
"""Releases a read lock. """Releases a read lock.
Arguments: Arguments:
release_fn (callable): function to call *before* the last recursive release_fn (typing.Callable): function to call *before* the last recursive
lock (read or write) is released. lock (read or write) is released.
If the last recursive lock will be released, then this will call If the last recursive lock will be released, then this will call
@ -437,7 +447,7 @@ def release_write(self, release_fn=None):
"""Releases a write lock. """Releases a write lock.
Arguments: Arguments:
release_fn (callable): function to call before the last recursive release_fn (typing.Callable): function to call before the last recursive
write is released. write is released.
If the last recursive *write* lock will be released, then this If the last recursive *write* lock will be released, then this
@ -533,10 +543,10 @@ class LockTransaction(object):
Arguments: Arguments:
lock (Lock): underlying lock for this transaction to be accquired on lock (Lock): underlying lock for this transaction to be accquired on
enter and released on exit enter and released on exit
acquire (callable or contextmanager): function to be called after lock acquire (typing.Callable or contextlib.contextmanager): function to be called
is acquired, or contextmanager to enter after acquire and leave after lock is acquired, or contextmanager to enter after acquire and leave
before release. before release.
release (callable): function to be called before release. If release (typing.Callable): function to be called before release. If
``acquire`` is a contextmanager, this will be called *after* ``acquire`` is a contextmanager, this will be called *after*
exiting the nexted context and before the lock is released. exiting the nexted context and before the lock is released.
timeout (float): number of seconds to set for the timeout when timeout (float): number of seconds to set for the timeout when

View File

@ -109,19 +109,17 @@ def colify(elts, **options):
using ``str()``. using ``str()``.
Keyword Arguments: Keyword Arguments:
output (stream): A file object to write to. Default is ``sys.stdout`` output (typing.IO): A file object to write to. Default is ``sys.stdout``
indent (int): Optionally indent all columns by some number of spaces indent (int): Optionally indent all columns by some number of spaces
padding (int): Spaces between columns. Default is 2 padding (int): Spaces between columns. Default is 2
width (int): Width of the output. Default is 80 if tty not detected width (int): Width of the output. Default is 80 if tty not detected
cols (int): Force number of columns. Default is to size to cols (int): Force number of columns. Default is to size to terminal, or
terminal, or single-column if no tty single-column if no tty
tty (bool): Whether to attempt to write to a tty. Default is to tty (bool): Whether to attempt to write to a tty. Default is to autodetect a
autodetect a tty. Set to False to force single-column tty. Set to False to force single-column output
output method (str): Method to use to fit columns. Options are variable or uniform.
method (str): Method to use to fit columns. Options are variable or Variable-width columns are tighter, uniform columns are all the same width
uniform. Variable-width columns are tighter, uniform and fit less data on the screen
columns are all the same width and fit less data on
the screen
""" """
# Get keyword arguments or set defaults # Get keyword arguments or set defaults
cols = options.pop("cols", 0) cols = options.pop("cols", 0)

View File

@ -193,8 +193,8 @@ def optimization_flags(self, compiler):
the compiler passed as argument. the compiler passed as argument.
Args: Args:
compiler (CompilerSpec or Compiler): object that contains both the compiler (spack.spec.CompilerSpec or spack.compiler.Compiler): object that
name and the version of the compiler we want to use contains both the name and the version of the compiler we want to use
""" """
# Mixed toolchains are not supported yet # Mixed toolchains are not supported yet
import spack.compilers import spack.compilers

View File

@ -206,7 +206,7 @@ def find_built_spec(self, spec):
The cache can be updated by calling ``update()`` on the cache. The cache can be updated by calling ``update()`` on the cache.
Args: Args:
spec (Spec): Concrete spec to find spec (spack.spec.Spec): Concrete spec to find
Returns: Returns:
An list of objects containing the found specs and mirror url where An list of objects containing the found specs and mirror url where
@ -1079,14 +1079,14 @@ def download_tarball(spec, preferred_mirrors=None):
path to downloaded tarball if successful, None otherwise. path to downloaded tarball if successful, None otherwise.
Args: Args:
spec (Spec): Concrete spec spec (spack.spec.Spec): Concrete spec
preferred_mirrors (list): If provided, this is a list of preferred preferred_mirrors (list): If provided, this is a list of preferred
mirror urls. Other configured mirrors will only be used if the mirror urls. Other configured mirrors will only be used if the
tarball can't be retrieved from one of these. tarball can't be retrieved from one of these.
Returns: Returns:
Path to the downloaded tarball, or ``None`` if the tarball could not Path to the downloaded tarball, or ``None`` if the tarball could not
be downloaded from any configured mirrors. be downloaded from any configured mirrors.
""" """
if not spack.mirror.MirrorCollection(): if not spack.mirror.MirrorCollection():
tty.die("Please add a spack mirror to allow " + tty.die("Please add a spack mirror to allow " +
@ -1455,7 +1455,7 @@ def get_mirrors_for_spec(spec=None, full_hash_match=False,
indicating the mirrors on which it can be found indicating the mirrors on which it can be found
Args: Args:
spec (Spec): The spec to look for in binary mirrors spec (spack.spec.Spec): The spec to look for in binary mirrors
full_hash_match (bool): If True, only includes mirrors where the spec full_hash_match (bool): If True, only includes mirrors where the spec
full hash matches the locally computed full hash of the ``spec`` full hash matches the locally computed full hash of the ``spec``
argument. If False, any mirror which has a matching DAG hash argument. If False, any mirror which has a matching DAG hash
@ -1732,11 +1732,11 @@ def check_specs_against_mirrors(mirrors, specs, output_file=None,
Arguments: Arguments:
mirrors (dict): Mirrors to check against mirrors (dict): Mirrors to check against
specs (iterable): Specs to check against mirrors specs (typing.Iterable): Specs to check against mirrors
output_file (string): Path to output file to be written. If provided, output_file (str): Path to output file to be written. If provided,
mirrors with missing or out-of-date specs will be formatted as a mirrors with missing or out-of-date specs will be formatted as a
JSON object and written to this file. JSON object and written to this file.
rebuild_on_errors (boolean): Treat any errors encountered while rebuild_on_errors (bool): Treat any errors encountered while
checking specs as a signal to rebuild package. checking specs as a signal to rebuild package.
Returns: 1 if any spec was out-of-date on any mirror, 0 otherwise. Returns: 1 if any spec was out-of-date on any mirror, 0 otherwise.

View File

@ -134,7 +134,7 @@ def get_executable(exe, spec=None, install=False):
Args: Args:
exe (str): needed executable name exe (str): needed executable name
spec (Spec or str): spec to search for exe in (default exe) spec (spack.spec.Spec or str): spec to search for exe in (default exe)
install (bool): install spec if not available install (bool): install spec if not available
When ``install`` is True, Spack will use the python used to run Spack as an When ``install`` is True, Spack will use the python used to run Spack as an

View File

@ -455,11 +455,11 @@ def determine_number_of_jobs(
cap to the number of CPUs available to avoid oversubscription. cap to the number of CPUs available to avoid oversubscription.
Parameters: Parameters:
parallel (bool): true when package supports parallel builds parallel (bool or None): true when package supports parallel builds
command_line (int/None): command line override command_line (int or None): command line override
config_default (int/None): config default number of jobs config_default (int or None): config default number of jobs
max_cpus (int/None): maximum number of CPUs available. When None, this max_cpus (int or None): maximum number of CPUs available. When None, this
value is automatically determined. value is automatically determined.
""" """
if not parallel: if not parallel:
return 1 return 1
@ -685,14 +685,14 @@ def get_std_cmake_args(pkg):
"""List of standard arguments used if a package is a CMakePackage. """List of standard arguments used if a package is a CMakePackage.
Returns: Returns:
list of str: standard arguments that would be used if this list: standard arguments that would be used if this
package were a CMakePackage instance. package were a CMakePackage instance.
Args: Args:
pkg (PackageBase): package under consideration pkg (spack.package.PackageBase): package under consideration
Returns: Returns:
list of str: arguments for cmake list: arguments for cmake
""" """
return spack.build_systems.cmake.CMakePackage._std_args(pkg) return spack.build_systems.cmake.CMakePackage._std_args(pkg)
@ -701,14 +701,14 @@ def get_std_meson_args(pkg):
"""List of standard arguments used if a package is a MesonPackage. """List of standard arguments used if a package is a MesonPackage.
Returns: Returns:
list of str: standard arguments that would be used if this list: standard arguments that would be used if this
package were a MesonPackage instance. package were a MesonPackage instance.
Args: Args:
pkg (PackageBase): package under consideration pkg (spack.package.PackageBase): package under consideration
Returns: Returns:
list of str: arguments for meson list: arguments for meson
""" """
return spack.build_systems.meson.MesonPackage._std_args(pkg) return spack.build_systems.meson.MesonPackage._std_args(pkg)
@ -738,7 +738,7 @@ def load_external_modules(pkg):
associated with them. associated with them.
Args: Args:
pkg (PackageBase): package to load deps for pkg (spack.package.PackageBase): package to load deps for
""" """
for dep in list(pkg.spec.traverse()): for dep in list(pkg.spec.traverse()):
external_modules = dep.external_modules or [] external_modules = dep.external_modules or []
@ -864,7 +864,7 @@ def modifications_from_dependencies(spec, context, custom_mods_only=True):
CMAKE_PREFIX_PATH, or PKG_CONFIG_PATH). CMAKE_PREFIX_PATH, or PKG_CONFIG_PATH).
Args: Args:
spec (Spec): spec for which we want the modifications spec (spack.spec.Spec): spec for which we want the modifications
context (str): either 'build' for build-time modifications or 'run' context (str): either 'build' for build-time modifications or 'run'
for run-time modifications for run-time modifications
""" """
@ -1062,9 +1062,9 @@ def start_build_process(pkg, function, kwargs):
Args: Args:
pkg (PackageBase): package whose environment we should set up the pkg (spack.package.PackageBase): package whose environment we should set up the
child process for. child process for.
function (callable): argless function to run in the child function (typing.Callable): argless function to run in the child
process. process.
Usage:: Usage::
@ -1149,7 +1149,7 @@ def get_package_context(traceback, context=3):
"""Return some context for an error message when the build fails. """Return some context for an error message when the build fails.
Args: Args:
traceback (traceback): A traceback from some exception raised during traceback: A traceback from some exception raised during
install install
context (int): Lines of context to show before and after the line context (int): Lines of context to show before and after the line

View File

@ -30,7 +30,7 @@ class AutotoolsPackage(PackageBase):
They all have sensible defaults and for many packages the only thing They all have sensible defaults and for many packages the only thing
necessary will be to override the helper method necessary will be to override the helper method
:py:meth:`~.AutotoolsPackage.configure_args`. :meth:`~spack.build_systems.autotools.AutotoolsPackage.configure_args`.
For a finer tuning you may also override: For a finer tuning you may also override:
+-----------------------------------------------+--------------------+ +-----------------------------------------------+--------------------+
@ -331,7 +331,7 @@ def flags_to_build_system_args(self, flags):
def configure(self, spec, prefix): def configure(self, spec, prefix):
"""Runs configure with the arguments specified in """Runs configure with the arguments specified in
:py:meth:`~.AutotoolsPackage.configure_args` :meth:`~spack.build_systems.autotools.AutotoolsPackage.configure_args`
and an appropriately set prefix. and an appropriately set prefix.
""" """
options = getattr(self, 'configure_flag_args', []) options = getattr(self, 'configure_flag_args', [])
@ -376,8 +376,8 @@ def _activate_or_not(
activation_value=None activation_value=None
): ):
"""This function contains the current implementation details of """This function contains the current implementation details of
:py:meth:`~.AutotoolsPackage.with_or_without` and :meth:`~spack.build_systems.autotools.AutotoolsPackage.with_or_without` and
:py:meth:`~.AutotoolsPackage.enable_or_disable`. :meth:`~spack.build_systems.autotools.AutotoolsPackage.enable_or_disable`.
Args: Args:
name (str): name of the variant that is being processed name (str): name of the variant that is being processed
@ -385,7 +385,7 @@ def _activate_or_not(
case of ``with_or_without``) case of ``with_or_without``)
deactivation_word (str): the default deactivation word ('without' deactivation_word (str): the default deactivation word ('without'
in the case of ``with_or_without``) in the case of ``with_or_without``)
activation_value (callable): callable that accepts a single activation_value (typing.Callable): callable that accepts a single
value. This value is either one of the allowed values for a value. This value is either one of the allowed values for a
multi-valued variant or the name of a bool-valued variant. multi-valued variant or the name of a bool-valued variant.
Returns the parameter to be used when the value is activated. Returns the parameter to be used when the value is activated.
@ -420,7 +420,7 @@ def _activate_or_not(
for ``<spec-name> foo=x +bar`` for ``<spec-name> foo=x +bar``
Returns: Returns:
list of strings that corresponds to the activation/deactivation list: list of strings that corresponds to the activation/deactivation
of the variant that has been processed of the variant that has been processed
Raises: Raises:
@ -501,7 +501,7 @@ def with_or_without(self, name, activation_value=None):
Args: Args:
name (str): name of a valid multi-valued variant name (str): name of a valid multi-valued variant
activation_value (callable): callable that accepts a single activation_value (typing.Callable): callable that accepts a single
value and returns the parameter to be used leading to an entry value and returns the parameter to be used leading to an entry
of the type ``--with-{name}={parameter}``. of the type ``--with-{name}={parameter}``.
@ -514,12 +514,13 @@ def with_or_without(self, name, activation_value=None):
return self._activate_or_not(name, 'with', 'without', activation_value) return self._activate_or_not(name, 'with', 'without', activation_value)
def enable_or_disable(self, name, activation_value=None): def enable_or_disable(self, name, activation_value=None):
"""Same as :py:meth:`~.AutotoolsPackage.with_or_without` but substitute """Same as
``with`` with ``enable`` and ``without`` with ``disable``. :meth:`~spack.build_systems.autotools.AutotoolsPackage.with_or_without`
but substitute ``with`` with ``enable`` and ``without`` with ``disable``.
Args: Args:
name (str): name of a valid multi-valued variant name (str): name of a valid multi-valued variant
activation_value (callable): if present accepts a single value activation_value (typing.Callable): if present accepts a single value
and returns the parameter to be used leading to an entry of the and returns the parameter to be used leading to an entry of the
type ``--enable-{name}={parameter}`` type ``--enable-{name}={parameter}``

View File

@ -236,7 +236,7 @@ def define_from_variant(self, cmake_var, variant=None):
of ``cmake_var``. of ``cmake_var``.
This utility function is similar to This utility function is similar to
:py:meth:`~.AutotoolsPackage.with_or_without`. :meth:`~spack.build_systems.autotools.AutotoolsPackage.with_or_without`.
Examples: Examples:

View File

@ -368,7 +368,7 @@ def normalize_suite_dir(self, suite_dir_name, version_globs=['*.*.*']):
toplevel psxevars.sh or equivalent file to source (and thus by toplevel psxevars.sh or equivalent file to source (and thus by
the modulefiles that Spack produces). the modulefiles that Spack produces).
version_globs (list of str): Suffix glob patterns (most specific version_globs (list): Suffix glob patterns (most specific
first) expected to qualify suite_dir_name to its fully first) expected to qualify suite_dir_name to its fully
version-specific install directory (as opposed to a version-specific install directory (as opposed to a
compatibility directory or symlink). compatibility directory or symlink).

View File

@ -216,10 +216,10 @@ def disambiguate_spec(spec, env, local=False, installed=True, first=False):
spec (spack.spec.Spec): a spec to disambiguate spec (spack.spec.Spec): a spec to disambiguate
env (spack.environment.Environment): a spack environment, env (spack.environment.Environment): a spack environment,
if one is active, or None if no environment is active if one is active, or None if no environment is active
local (boolean, default False): do not search chained spack instances local (bool): do not search chained spack instances
installed (boolean or any, or spack.database.InstallStatus or iterable installed (bool or spack.database.InstallStatus or typing.Iterable):
of spack.database.InstallStatus): install status argument passed to install status argument passed to database query.
database query. See ``spack.database.Database._query`` for details. See ``spack.database.Database._query`` for details.
""" """
hashes = env.all_hashes() if env else None hashes = env.all_hashes() if env else None
return disambiguate_spec_from_hashes(spec, hashes, local, installed, first) return disambiguate_spec_from_hashes(spec, hashes, local, installed, first)
@ -231,11 +231,11 @@ def disambiguate_spec_from_hashes(spec, hashes, local=False,
Arguments: Arguments:
spec (spack.spec.Spec): a spec to disambiguate spec (spack.spec.Spec): a spec to disambiguate
hashes (iterable): a set of hashes of specs among which to disambiguate hashes (typing.Iterable): a set of hashes of specs among which to disambiguate
local (boolean, default False): do not search chained spack instances local (bool): do not search chained spack instances
installed (boolean or any, or spack.database.InstallStatus or iterable installed (bool or spack.database.InstallStatus or typing.Iterable):
of spack.database.InstallStatus): install status argument passed to install status argument passed to database query.
database query. See ``spack.database.Database._query`` for details. See ``spack.database.Database._query`` for details.
""" """
if local: if local:
matching_specs = spack.store.db.query_local(spec, hashes=hashes, matching_specs = spack.store.db.query_local(spec, hashes=hashes,
@ -333,9 +333,8 @@ def display_specs(specs, args=None, **kwargs):
namespace. namespace.
Args: Args:
specs (list of spack.spec.Spec): the specs to display specs (list): the specs to display
args (optional argparse.Namespace): namespace containing args (argparse.Namespace or None): namespace containing formatting arguments
formatting arguments
Keyword Args: Keyword Args:
paths (bool): Show paths with each displayed spec paths (bool): Show paths with each displayed spec
@ -348,9 +347,9 @@ def display_specs(specs, args=None, **kwargs):
indent (int): indent each line this much indent (int): indent each line this much
groups (bool): display specs grouped by arch/compiler (default True) groups (bool): display specs grouped by arch/compiler (default True)
decorators (dict): dictionary mappng specs to decorators decorators (dict): dictionary mappng specs to decorators
header_callback (function): called at start of arch/compiler groups header_callback (typing.Callable): called at start of arch/compiler groups
all_headers (bool): show headers even when arch/compiler aren't defined all_headers (bool): show headers even when arch/compiler aren't defined
output (stream): A file object to write to. Default is ``sys.stdout`` output (typing.IO): A file object to write to. Default is ``sys.stdout``
""" """
def get_arg(name, default=None): def get_arg(name, default=None):

View File

@ -58,9 +58,9 @@ def analyze_spec(spec, analyzers=None, outdir=None, monitor=None, overwrite=Fals
analyze_spec(spec, args.analyzers, args.outdir, monitor) analyze_spec(spec, args.analyzers, args.outdir, monitor)
Args: Args:
spec (Spec): spec object of installed package spec (spack.spec.Spec): spec object of installed package
analyzers (list): list of analyzer (keys) to run analyzers (list): list of analyzer (keys) to run
monitor (monitor.SpackMonitorClient): a monitor client monitor (spack.monitor.SpackMonitorClient): a monitor client
overwrite (bool): overwrite result if already exists overwrite (bool): overwrite result if already exists
""" """
analyzers = analyzers or list(spack.analyzers.analyzer_types.keys()) analyzers = analyzers or list(spack.analyzers.analyzer_types.keys())

View File

@ -239,12 +239,13 @@ def find_matching_specs(pkgs, allow_multiple_matches=False, env=None):
concretized specs given from cli concretized specs given from cli
Args: Args:
pkgs (string): spec to be matched against installed packages pkgs (str): spec to be matched against installed packages
allow_multiple_matches (bool): if True multiple matches are admitted allow_multiple_matches (bool): if True multiple matches are admitted
env (Environment): active environment, or ``None`` if there is not one env (spack.environment.Environment or None): active environment, or ``None``
if there is not one
Return: Return:
list of specs list: list of specs
""" """
hashes = env.all_hashes() if env else None hashes = env.all_hashes() if env else None

View File

@ -636,7 +636,7 @@ def get_name(args):
provided, extract the name from that. Otherwise, use a default. provided, extract the name from that. Otherwise, use a default.
Args: Args:
args (param argparse.Namespace): The arguments given to args (argparse.Namespace): The arguments given to
``spack create`` ``spack create``
Returns: Returns:
@ -709,8 +709,7 @@ def get_versions(args, name):
name (str): The name of the package name (str): The name of the package
Returns: Returns:
str and BuildSystemGuesser: Versions and hashes, and a tuple: versions and hashes, and a BuildSystemGuesser object
BuildSystemGuesser object
""" """
# Default version with hash # Default version with hash
@ -794,7 +793,8 @@ def get_repository(args, name):
name (str): The name of the package to create name (str): The name of the package to create
Returns: Returns:
Repo: A Repo object capable of determining the path to the package file spack.repo.Repo: A Repo object capable of determining the path to the
package file
""" """
spec = Spec(name) spec = Spec(name)
# Figure out namespace for spec # Figure out namespace for spec

View File

@ -59,7 +59,7 @@ def get_dependents(pkg_name, ideps, transitive=False, dependents=None):
Args: Args:
pkg_name (str): name of the package whose dependents should be returned pkg_name (str): name of the package whose dependents should be returned
ideps (dict): dictionary of dependents, from inverted_dependencies() ideps (dict): dictionary of dependents, from inverted_dependencies()
transitive (bool, optional): return transitive dependents when True transitive (bool or None): return transitive dependents when True
""" """
if dependents is None: if dependents is None:
dependents = set() dependents = set()

View File

@ -198,9 +198,9 @@ def install_specs(cli_args, kwargs, specs):
"""Do the actual installation. """Do the actual installation.
Args: Args:
cli_args (Namespace): argparse namespace with command arguments cli_args (argparse.Namespace): argparse namespace with command arguments
kwargs (dict): keyword arguments kwargs (dict): keyword arguments
specs (list of tuples): list of (abstract, concrete) spec tuples specs (list): list of (abstract, concrete) spec tuples
""" """
# handle active environment, if any # handle active environment, if any

View File

@ -69,12 +69,13 @@ def find_matching_specs(env, specs, allow_multiple_matches=False, force=False):
concretized specs given from cli concretized specs given from cli
Args: Args:
env (Environment): active environment, or ``None`` if there is not one env (spack.environment.Environment): active environment, or ``None``
if there is not one
specs (list): list of specs to be matched against installed packages specs (list): list of specs to be matched against installed packages
allow_multiple_matches (bool): if True multiple matches are admitted allow_multiple_matches (bool): if True multiple matches are admitted
Return: Return:
list of specs list: list of specs
""" """
# constrain uninstall resolution to current environment if one is active # constrain uninstall resolution to current environment if one is active
hashes = env.all_hashes() if env else None hashes = env.all_hashes() if env else None
@ -118,15 +119,13 @@ def installed_dependents(specs, env):
Args: Args:
specs (list): list of Specs specs (list): list of Specs
env (Environment): the active environment, or None env (spack.environment.Environment or None): the active environment, or None
Returns: Returns:
(tuple of dicts): two mappings: one from specs to their dependent tuple: two mappings: one from specs to their dependent environments in the
environments in the active environment (or global scope if active environment (or global scope if there is no environment), and one from
there is no environment), and one from specs to their specs to their dependents in *inactive* environments (empty if there is no
dependents in *inactive* environments (empty if there is no environment
environment
""" """
active_dpts = {} active_dpts = {}
inactive_dpts = {} inactive_dpts = {}
@ -155,9 +154,9 @@ def dependent_environments(specs):
Args: Args:
specs (list): list of Specs specs (list): list of Specs
Returns:
(dict): mapping from spec to lists of dependent Environments
Returns:
dict: mapping from spec to lists of dependent Environments
""" """
dependents = {} dependents = {}
for env in ev.all_environments(): for env in ev.all_environments():
@ -176,9 +175,10 @@ def inactive_dependent_environments(spec_envs):
have no dependent environments. Return the result. have no dependent environments. Return the result.
Args: Args:
(dict): mapping from spec to lists of dependent Environments spec_envs (dict): mapping from spec to lists of dependent Environments
Returns: Returns:
(dict): mapping from spec to lists of *inactive* dependent Environments dict: mapping from spec to lists of *inactive* dependent Environments
""" """
spec_inactive_envs = {} spec_inactive_envs = {}
for spec, de_list in spec_envs.items(): for spec, de_list in spec_envs.items():
@ -203,7 +203,8 @@ def do_uninstall(env, specs, force):
"""Uninstalls all the specs in a list. """Uninstalls all the specs in a list.
Args: Args:
env (Environment): active environment, or ``None`` if there is not one env (spack.environment.Environment or None): active environment, or ``None``
if there is not one
specs (list): list of specs to be uninstalled specs (list): list of specs to be uninstalled
force (bool): force uninstallation (boolean) force (bool): force uninstallation (boolean)
""" """

View File

@ -502,7 +502,7 @@ def remove_separators(version):
Unfortunately, this also means that 1.23 and 12.3 are equal. Unfortunately, this also means that 1.23 and 12.3 are equal.
Args: Args:
version (str or Version): A version version (str or spack.version.Version): A version
Returns: Returns:
str: The version with all separator characters removed str: The version with all separator characters removed

View File

@ -135,8 +135,8 @@ def add_compilers_to_config(compilers, scope=None, init_config=True):
"""Add compilers to the config for the specified architecture. """Add compilers to the config for the specified architecture.
Arguments: Arguments:
- compilers: a list of Compiler objects. compilers: a list of Compiler objects.
- scope: configuration scope to modify. scope: configuration scope to modify.
""" """
compiler_config = get_compiler_config(scope, init_config) compiler_config = get_compiler_config(scope, init_config)
for compiler in compilers: for compiler in compilers:
@ -151,8 +151,8 @@ def remove_compiler_from_config(compiler_spec, scope=None):
"""Remove compilers from the config, by spec. """Remove compilers from the config, by spec.
Arguments: Arguments:
- compiler_specs: a list of CompilerSpec objects. compiler_specs: a list of CompilerSpec objects.
- scope: configuration scope to modify. scope: configuration scope to modify.
""" """
# Need a better way for this # Need a better way for this
global _cache_config_file global _cache_config_file
@ -544,8 +544,8 @@ def arguments_to_detect_version_fn(operating_system, paths):
function by providing a method called with the same name. function by providing a method called with the same name.
Args: Args:
operating_system (OperatingSystem): the operating system on which operating_system (spack.architecture.OperatingSystem): the operating system
we are looking for compilers on which we are looking for compilers
paths: paths to search for compilers paths: paths to search for compilers
Returns: Returns:
@ -649,7 +649,7 @@ def make_compiler_list(detected_versions):
valid version valid version
Returns: Returns:
list of Compiler objects list: list of Compiler objects
""" """
group_fn = lambda x: (x.id, x.variation, x.language) group_fn = lambda x: (x.id, x.variation, x.language)
sorted_compilers = sorted(detected_versions, key=group_fn) sorted_compilers = sorted(detected_versions, key=group_fn)
@ -715,7 +715,7 @@ def is_mixed_toolchain(compiler):
False otherwise. False otherwise.
Args: Args:
compiler (Compiler): a valid compiler object compiler (spack.compiler.Compiler): a valid compiler object
""" """
cc = os.path.basename(compiler.cc or '') cc = os.path.basename(compiler.cc or '')
cxx = os.path.basename(compiler.cxx or '') cxx = os.path.basename(compiler.cxx or '')

View File

@ -17,8 +17,8 @@
And corresponding :ref:`per-platform scopes <platform-scopes>`. Important And corresponding :ref:`per-platform scopes <platform-scopes>`. Important
functions in this module are: functions in this module are:
* :py:func:`get_config` * :func:`~spack.config.Configuration.get_config`
* :py:func:`update_config` * :func:`~spack.config.Configuration.update_config`
``get_config`` reads in YAML data for a particular scope and returns ``get_config`` reads in YAML data for a particular scope and returns
it. Callers can then modify the data and write it back with it. Callers can then modify the data and write it back with
@ -722,7 +722,7 @@ def override(path_or_scope, value=None):
Arguments: Arguments:
path_or_scope (ConfigScope or str): scope or single option to override path_or_scope (ConfigScope or str): scope or single option to override
value (object, optional): value for the single option value (object or None): value for the single option
Temporarily push a scope on the current configuration, then remove it Temporarily push a scope on the current configuration, then remove it
after the context completes. If a single option is provided, create after the context completes. If a single option is provided, create
@ -1163,7 +1163,7 @@ def default_modify_scope(section='config'):
priority scope. priority scope.
Arguments: Arguments:
section (boolean): Section for which to get the default scope. section (bool): Section for which to get the default scope.
If this is not 'compilers', a general (non-platform) scope is used. If this is not 'compilers', a general (non-platform) scope is used.
""" """
if section == 'compilers': if section == 'compilers':

View File

@ -171,13 +171,13 @@ class InstallRecord(object):
dependents left. dependents left.
Args: Args:
spec (Spec): spec tracked by the install record spec (spack.spec.Spec): spec tracked by the install record
path (str): path where the spec has been installed path (str): path where the spec has been installed
installed (bool): whether or not the spec is currently installed installed (bool): whether or not the spec is currently installed
ref_count (int): number of specs that depend on this one ref_count (int): number of specs that depend on this one
explicit (bool, optional): whether or not this spec was explicitly explicit (bool or None): whether or not this spec was explicitly
installed, or pulled-in as a dependency of something else installed, or pulled-in as a dependency of something else
installation_time (time, optional): time of the installation installation_time (datetime.datetime or None): time of the installation
""" """
def __init__( def __init__(
@ -256,36 +256,36 @@ def __getattribute__(self, name):
database. If it is a spec, we'll evaluate database. If it is a spec, we'll evaluate
``spec.satisfies(query_spec)`` ``spec.satisfies(query_spec)``
known (bool or any, optional): Specs that are "known" are those known (bool or None): Specs that are "known" are those
for which Spack can locate a ``package.py`` file -- i.e., for which Spack can locate a ``package.py`` file -- i.e.,
Spack "knows" how to install them. Specs that are unknown may Spack "knows" how to install them. Specs that are unknown may
represent packages that existed in a previous version of represent packages that existed in a previous version of
Spack, but have since either changed their name or Spack, but have since either changed their name or
been removed been removed
installed (bool or any, or InstallStatus or iterable of installed (bool or InstallStatus or typing.Iterable or None):
InstallStatus, optional): if ``True``, includes only installed if ``True``, includes only installed
specs in the search; if ``False`` only missing specs, and if specs in the search; if ``False`` only missing specs, and if
``any``, all specs in database. If an InstallStatus or iterable ``any``, all specs in database. If an InstallStatus or iterable
of InstallStatus, returns specs whose install status of InstallStatus, returns specs whose install status
(installed, deprecated, or missing) matches (one of) the (installed, deprecated, or missing) matches (one of) the
InstallStatus. (default: True) InstallStatus. (default: True)
explicit (bool or any, optional): A spec that was installed explicit (bool or None): A spec that was installed
following a specific user request is marked as explicit. If following a specific user request is marked as explicit. If
instead it was pulled-in as a dependency of a user requested instead it was pulled-in as a dependency of a user requested
spec it's considered implicit. spec it's considered implicit.
start_date (datetime, optional): filters the query discarding start_date (datetime.datetime or None): filters the query
specs that have been installed before ``start_date``. discarding specs that have been installed before ``start_date``.
end_date (datetime, optional): filters the query discarding end_date (datetime.datetime or None): filters the query discarding
specs that have been installed after ``end_date``. specs that have been installed after ``end_date``.
hashes (container): list or set of hashes that we can use to hashes (typing.Container): list or set of hashes that we can use to
restrict the search restrict the search
in_buildcache (bool or any, optional): Specs that are marked in in_buildcache (bool or None): Specs that are marked in
this database as part of an associated binary cache are this database as part of an associated binary cache are
``in_buildcache``. All other specs are not. This field is used ``in_buildcache``. All other specs are not. This field is used
for querying mirror indices. Default is ``any``. for querying mirror indices. Default is ``any``.
@ -449,7 +449,7 @@ def clear_failure(self, spec, force=False):
see `mark_failed()`. see `mark_failed()`.
Args: Args:
spec (Spec): the spec whose failure indicators are being removed spec (spack.spec.Spec): the spec whose failure indicators are being removed
force (bool): True if the failure information should be cleared force (bool): True if the failure information should be cleared
when a prefix failure lock exists for the file or False if when a prefix failure lock exists for the file or False if
the failure should not be cleared (e.g., it may be the failure should not be cleared (e.g., it may be
@ -1391,10 +1391,10 @@ def get_by_hash_local(self, *args, **kwargs):
Arguments: Arguments:
dag_hash (str): hash (or hash prefix) to look up dag_hash (str): hash (or hash prefix) to look up
default (object, optional): default value to return if dag_hash is default (object or None): default value to return if dag_hash is
not in the DB (default: None) not in the DB (default: None)
installed (bool or any, or InstallStatus or iterable of installed (bool or InstallStatus or typing.Iterable or None):
InstallStatus, optional): if ``True``, includes only installed if ``True``, includes only installed
specs in the search; if ``False`` only missing specs, and if specs in the search; if ``False`` only missing specs, and if
``any``, all specs in database. If an InstallStatus or iterable ``any``, all specs in database. If an InstallStatus or iterable
of InstallStatus, returns specs whose install status of InstallStatus, returns specs whose install status
@ -1417,14 +1417,13 @@ def get_by_hash(self, dag_hash, default=None, installed=any):
Arguments: Arguments:
dag_hash (str): hash (or hash prefix) to look up dag_hash (str): hash (or hash prefix) to look up
default (object, optional): default value to return if dag_hash is default (object or None): default value to return if dag_hash is
not in the DB (default: None) not in the DB (default: None)
installed (bool or any, or InstallStatus or iterable of installed (bool or InstallStatus or typing.Iterable or None):
InstallStatus, optional): if ``True``, includes only installed if ``True``, includes only installed specs in the search; if ``False``
specs in the search; if ``False`` only missing specs, and if only missing specs, and if ``any``, all specs in database. If an
``any``, all specs in database. If an InstallStatus or iterable InstallStatus or iterable of InstallStatus, returns specs whose install
of InstallStatus, returns specs whose install status status (installed, deprecated, or missing) matches (one of) the
(installed, deprecated, or missing) matches (one of) the
InstallStatus. (default: any) InstallStatus. (default: any)
``installed`` defaults to ``any`` so that we can refer to any ``installed`` defaults to ``any`` so that we can refer to any
@ -1596,7 +1595,7 @@ def update_explicit(self, spec, explicit):
Update the spec's explicit state in the database. Update the spec's explicit state in the database.
Args: Args:
spec (Spec): the spec whose install record is being updated spec (spack.spec.Spec): the spec whose install record is being updated
explicit (bool): ``True`` if the package was requested explicitly explicit (bool): ``True`` if the package was requested explicitly
by the user, ``False`` if it was pulled in as a dependency of by the user, ``False`` if it was pulled in as a dependency of
an explicit package. an explicit package.

View File

@ -54,7 +54,7 @@ class OpenMpi(Package):
from collections import Sequence from collections import Sequence
__all__ = [] __all__ = ['DirectiveError', 'DirectiveMeta']
#: These are variant names used by Spack internally; packages can't use them #: These are variant names used by Spack internally; packages can't use them
reserved_names = ['patches', 'dev_path'] reserved_names = ['patches', 'dev_path']
@ -85,7 +85,7 @@ def make_when_spec(value):
as part of concretization. as part of concretization.
Arguments: Arguments:
value (Spec or bool): a conditional Spec or a constant ``bool`` value (spack.spec.Spec or bool): a conditional Spec or a constant ``bool``
value indicating when a directive should be applied. value indicating when a directive should be applied.
""" """
@ -187,12 +187,16 @@ def directive(dicts=None):
Here's an example directive: Here's an example directive:
.. code-block:: python
@directive(dicts='versions') @directive(dicts='versions')
version(pkg, ...): version(pkg, ...):
... ...
This directive allows you write: This directive allows you write:
.. code-block:: python
class Foo(Package): class Foo(Package):
version(...) version(...)
@ -392,8 +396,8 @@ def conflicts(conflict_spec, when=None, msg=None):
conflicts('%intel', when='+foo') conflicts('%intel', when='+foo')
Args: Args:
conflict_spec (Spec): constraint defining the known conflict conflict_spec (spack.spec.Spec): constraint defining the known conflict
when (Spec): optional constraint that triggers the conflict when (spack.spec.Spec): optional constraint that triggers the conflict
msg (str): optional user defined message msg (str): optional user defined message
""" """
def _execute_conflicts(pkg): def _execute_conflicts(pkg):
@ -413,11 +417,11 @@ def depends_on(spec, when=None, type=default_deptype, patches=None):
"""Creates a dict of deps with specs defining when they apply. """Creates a dict of deps with specs defining when they apply.
Args: Args:
spec (Spec or str): the package and constraints depended on spec (spack.spec.Spec or str): the package and constraints depended on
when (Spec or str): when the dependent satisfies this, it has when (spack.spec.Spec or str): when the dependent satisfies this, it has
the dependency represented by ``spec`` the dependency represented by ``spec``
type (str or tuple of str): str or tuple of legal Spack deptypes type (str or tuple): str or tuple of legal Spack deptypes
patches (obj or list): single result of ``patch()`` directive, a patches (typing.Callable or list): single result of ``patch()`` directive, a
``str`` to be passed to ``patch``, or a list of these ``str`` to be passed to ``patch``, or a list of these
This directive is to be used inside a Package definition to declare This directive is to be used inside a Package definition to declare
@ -495,7 +499,7 @@ def patch(url_or_filename, level=1, when=None, working_dir=".", **kwargs):
Args: Args:
url_or_filename (str): url or relative filename of the patch url_or_filename (str): url or relative filename of the patch
level (int): patch level (as in the patch shell command) level (int): patch level (as in the patch shell command)
when (Spec): optional anonymous spec that specifies when to apply when (spack.spec.Spec): optional anonymous spec that specifies when to apply
the patch the patch
working_dir (str): dir to change to before applying working_dir (str): dir to change to before applying
@ -559,12 +563,12 @@ def variant(
specified otherwise the default will be False for a boolean specified otherwise the default will be False for a boolean
variant and 'nothing' for a multi-valued variant variant and 'nothing' for a multi-valued variant
description (str): description of the purpose of the variant description (str): description of the purpose of the variant
values (tuple or callable): either a tuple of strings containing the values (tuple or typing.Callable): either a tuple of strings containing the
allowed values, or a callable accepting one value and returning allowed values, or a callable accepting one value and returning
True if it is valid True if it is valid
multi (bool): if False only one value per spec is allowed for multi (bool): if False only one value per spec is allowed for
this variant this variant
validator (callable): optional group validator to enforce additional validator (typing.Callable): optional group validator to enforce additional
logic. It receives the package name, the variant name and a tuple logic. It receives the package name, the variant name and a tuple
of values and should raise an instance of SpackError if the group of values and should raise an instance of SpackError if the group
doesn't meet the additional constraints doesn't meet the additional constraints

View File

@ -116,11 +116,12 @@ def activate(
use_env_repo (bool): use the packages exactly as they appear in the use_env_repo (bool): use the packages exactly as they appear in the
environment's repository environment's repository
add_view (bool): generate commands to add view to path variables add_view (bool): generate commands to add view to path variables
shell (string): One of `sh`, `csh`, `fish`. shell (str): One of `sh`, `csh`, `fish`.
prompt (string): string to add to the users prompt, or None prompt (str): string to add to the users prompt, or None
Returns: Returns:
cmds: Shell commands to activate environment. str: Shell commands to activate environment.
TODO: environment to use the activated spack environment. TODO: environment to use the activated spack environment.
""" """
global _active_environment global _active_environment
@ -198,10 +199,10 @@ def deactivate(shell='sh'):
"""Undo any configuration or repo settings modified by ``activate()``. """Undo any configuration or repo settings modified by ``activate()``.
Arguments: Arguments:
shell (string): One of `sh`, `csh`, `fish`. Shell style to use. shell (str): One of `sh`, `csh`, `fish`. Shell style to use.
Returns: Returns:
(string): shell commands for `shell` to undo environment variables str: shell commands for `shell` to undo environment variables
""" """
global _active_environment global _active_environment
@ -272,7 +273,7 @@ def find_environment(args):
If an environment is found, read it in. If not, return None. If an environment is found, read it in. If not, return None.
Arguments: Arguments:
args (Namespace): argparse namespace wtih command arguments args (argparse.Namespace): argparse namespace wtih command arguments
Returns: Returns:
(Environment): a found environment, or ``None`` (Environment): a found environment, or ``None``
@ -322,7 +323,7 @@ def get_env(args, cmd_name, required=False):
message that says the calling command *needs* an active environment. message that says the calling command *needs* an active environment.
Arguments: Arguments:
args (Namespace): argparse namespace wtih command arguments args (argparse.Namespace): argparse namespace wtih command arguments
cmd_name (str): name of calling command cmd_name (str): name of calling command
required (bool): if ``True``, raise an exception when no environment required (bool): if ``True``, raise an exception when no environment
is found; if ``False``, just return ``None`` is found; if ``False``, just return ``None``
@ -550,7 +551,7 @@ def view(self, new=None):
Raise if new is None and there is no current view Raise if new is None and there is no current view
Arguments: Arguments:
new (string or None): If a string, create a FilesystemView new (str or None): If a string, create a FilesystemView
rooted at that path. Default None. This should only be used to rooted at that path. Default None. This should only be used to
regenerate the view, and cannot be used to access specs. regenerate the view, and cannot be used to access specs.
""" """
@ -851,7 +852,7 @@ def clear(self, re_read=False):
"""Clear the contents of the environment """Clear the contents of the environment
Arguments: Arguments:
re_read (boolean): If True, do not clear ``new_specs`` nor re_read (bool): If True, do not clear ``new_specs`` nor
``new_installs`` values. These values cannot be read from ``new_installs`` values. These values cannot be read from
yaml, and need to be maintained when re-reading an existing yaml, and need to be maintained when re-reading an existing
environment. environment.
@ -1119,11 +1120,11 @@ def develop(self, spec, path, clone=False):
"""Add dev-build info for package """Add dev-build info for package
Args: Args:
spec (Spec): Set constraints on development specs. Must include a spec (spack.spec.Spec): Set constraints on development specs. Must include a
concrete version. concrete version.
path (string): Path to find code for developer builds. Relative path (str): Path to find code for developer builds. Relative
paths will be resolved relative to the environment. paths will be resolved relative to the environment.
clone (bool, default False): Clone the package code to the path. clone (bool): Clone the package code to the path.
If clone is False Spack will assume the code is already present If clone is False Spack will assume the code is already present
at ``path``. at ``path``.
@ -1552,7 +1553,7 @@ def install_all(self, args=None, **install_args):
that needs to be done separately with a call to write(). that needs to be done separately with a call to write().
Args: Args:
args (Namespace): argparse namespace with command arguments args (argparse.Namespace): argparse namespace with command arguments
install_args (dict): keyword install arguments install_args (dict): keyword install arguments
""" """
self.install_specs(None, args=args, **install_args) self.install_specs(None, args=args, **install_args)

View File

@ -1254,8 +1254,9 @@ def __init__(self, **kwargs):
@property @property
def hg(self): def hg(self):
""":returns: The hg executable """
:rtype: Executable Returns:
Executable: the hg executable
""" """
if not self._hg: if not self._hg:
self._hg = which('hg', required=True) self._hg = which('hg', required=True)
@ -1405,7 +1406,7 @@ def from_kwargs(**kwargs):
``version()`` directive in a package. ``version()`` directive in a package.
Returns: Returns:
fetch_strategy: The fetch strategy that matches the args, based typing.Callable: The fetch strategy that matches the args, based
on attribute names (e.g., ``git``, ``hg``, etc.) on attribute names (e.g., ``git``, ``hg``, etc.)
Raises: Raises:

View File

@ -28,7 +28,7 @@ def get_escaped_text_output(filename):
filename (str): path to the file filename (str): path to the file
Returns: Returns:
(list of str): escaped text lines read from the file list: escaped text lines read from the file
""" """
with open(filename, 'r') as f: with open(filename, 'r') as f:
# Ensure special characters are escaped as needed # Ensure special characters are escaped as needed

View File

@ -93,7 +93,7 @@ def _check_last_phase(pkg):
package already. package already.
Args: Args:
pkg (PackageBase): the package being installed pkg (spack.package.PackageBase): the package being installed
Raises: Raises:
``BadInstallPhase`` if stop_before or last phase is invalid ``BadInstallPhase`` if stop_before or last phase is invalid
@ -115,10 +115,11 @@ def _handle_external_and_upstream(pkg, explicit):
database if it is external package. database if it is external package.
Args: Args:
pkg (Package): the package whose installation is under consideration pkg (spack.package.Package): the package whose installation is under
consideration
explicit (bool): the package was explicitly requested by the user explicit (bool): the package was explicitly requested by the user
Return: Return:
(bool): ``True`` if the package is external or upstream (so not to bool: ``True`` if the package is external or upstream (so not to
be installed locally), otherwise, ``True`` be installed locally), otherwise, ``True``
""" """
# For external packages the workflow is simplified, and basically # For external packages the workflow is simplified, and basically
@ -148,7 +149,7 @@ def _do_fake_install(pkg):
and libraries. and libraries.
Args: Args:
pkg (PackageBase): the package whose installation is to be faked pkg (spack.package.PackageBase): the package whose installation is to be faked
""" """
command = pkg.name command = pkg.name
@ -194,15 +195,14 @@ def _packages_needed_to_bootstrap_compiler(compiler, architecture, pkgs):
compiler (CompilerSpec): the compiler to bootstrap compiler (CompilerSpec): the compiler to bootstrap
architecture (ArchSpec): the architecture for which to boostrap the architecture (ArchSpec): the architecture for which to boostrap the
compiler compiler
pkgs (list of PackageBase): the packages that may need their compiler pkgs (list): the packages that may need their compiler
installed installed
Return: Return:
(list) list of tuples, (PackageBase, bool), for concretized compiler- list: list of tuples, (PackageBase, bool), for concretized compiler-related
-related packages that need to be installed and bool values packages that need to be installed and bool values specify whether the
specify whether the package is the bootstrap compiler package is the bootstrap compiler (``True``) or one of its dependencies
(``True``) or one of its dependencies (``False``). The list (``False``). The list will be empty if there are no compilers.
will be empty if there are no compilers.
""" """
tty.debug('Bootstrapping {0} compiler'.format(compiler)) tty.debug('Bootstrapping {0} compiler'.format(compiler))
compilers = spack.compilers.compilers_for_spec( compilers = spack.compilers.compilers_for_spec(
@ -260,7 +260,7 @@ def _install_from_cache(pkg, cache_only, explicit, unsigned=False,
Extract the package from binary cache Extract the package from binary cache
Args: Args:
pkg (PackageBase): the package to install from the binary cache pkg (spack.package.PackageBase): the package to install from the binary cache
cache_only (bool): only extract from binary cache cache_only (bool): only extract from binary cache
explicit (bool): ``True`` if installing the package was explicitly explicit (bool): ``True`` if installing the package was explicitly
requested by the user, otherwise, ``False`` requested by the user, otherwise, ``False``
@ -268,7 +268,7 @@ def _install_from_cache(pkg, cache_only, explicit, unsigned=False,
otherwise, ``False`` otherwise, ``False``
Return: Return:
(bool) ``True`` if the package was extract from binary cache, bool: ``True`` if the package was extract from binary cache,
``False`` otherwise ``False`` otherwise
""" """
installed_from_cache = _try_install_from_binary_cache( installed_from_cache = _try_install_from_binary_cache(
@ -350,8 +350,8 @@ def _process_binary_cache_tarball(pkg, binary_spec, explicit, unsigned,
Process the binary cache tarball. Process the binary cache tarball.
Args: Args:
pkg (PackageBase): the package being installed pkg (spack.package.PackageBase): the package being installed
binary_spec (Spec): the spec whose cache has been confirmed binary_spec (spack.spec.Spec): the spec whose cache has been confirmed
explicit (bool): the package was explicitly requested by the user explicit (bool): the package was explicitly requested by the user
unsigned (bool): ``True`` if binary package signatures to be checked, unsigned (bool): ``True`` if binary package signatures to be checked,
otherwise, ``False`` otherwise, ``False``
@ -359,7 +359,7 @@ def _process_binary_cache_tarball(pkg, binary_spec, explicit, unsigned,
attempting to download the tarball attempting to download the tarball
Return: Return:
(bool) ``True`` if the package was extracted from binary cache, bool: ``True`` if the package was extracted from binary cache,
else ``False`` else ``False``
""" """
tarball = binary_distribution.download_tarball( tarball = binary_distribution.download_tarball(
@ -385,7 +385,7 @@ def _try_install_from_binary_cache(pkg, explicit, unsigned=False,
Try to extract the package from binary cache. Try to extract the package from binary cache.
Args: Args:
pkg (PackageBase): the package to be extracted from binary cache pkg (spack.package.PackageBase): the package to be extracted from binary cache
explicit (bool): the package was explicitly requested by the user explicit (bool): the package was explicitly requested by the user
unsigned (bool): ``True`` if binary package signatures to be checked, unsigned (bool): ``True`` if binary package signatures to be checked,
otherwise, ``False`` otherwise, ``False``
@ -423,7 +423,7 @@ def combine_phase_logs(phase_log_files, log_path):
Args: Args:
phase_log_files (list): a list or iterator of logs to combine phase_log_files (list): a list or iterator of logs to combine
log_path (path): the path to combine them to log_path (str): the path to combine them to
""" """
with open(log_path, 'w') as log_file: with open(log_path, 'w') as log_file:
@ -441,7 +441,7 @@ def dump_packages(spec, path):
node in the DAG. node in the DAG.
Args: Args:
spec (Spec): the Spack spec whose package information is to be dumped spec (spack.spec.Spec): the Spack spec whose package information is to be dumped
path (str): the path to the build packages directory path (str): the path to the build packages directory
""" """
fs.mkdirp(path) fs.mkdirp(path)
@ -498,10 +498,10 @@ def get_dependent_ids(spec):
Return a list of package ids for the spec's dependents Return a list of package ids for the spec's dependents
Args: Args:
spec (Spec): Concretized spec spec (spack.spec.Spec): Concretized spec
Returns: Returns:
(list of str): list of package ids list: list of package ids
""" """
return [package_id(d.package) for d in spec.dependents()] return [package_id(d.package) for d in spec.dependents()]
@ -512,10 +512,10 @@ def install_msg(name, pid):
Args: Args:
name (str): Name/id of the package being installed name (str): Name/id of the package being installed
pid (id): id of the installer process pid (int): id of the installer process
Return: Return:
(str) Colorized installing message str: Colorized installing message
""" """
pre = '{0}: '.format(pid) if tty.show_pid() else '' pre = '{0}: '.format(pid) if tty.show_pid() else ''
return pre + colorize('@*{Installing} @*g{%s}' % name) return pre + colorize('@*{Installing} @*g{%s}' % name)
@ -526,7 +526,7 @@ def log(pkg):
Copy provenance into the install directory on success Copy provenance into the install directory on success
Args: Args:
pkg (Package): the package that was built and installed pkg (spack.package.Package): the package that was built and installed
""" """
packages_dir = spack.store.layout.build_packages_path(pkg.spec) packages_dir = spack.store.layout.build_packages_path(pkg.spec)
@ -608,7 +608,8 @@ def package_id(pkg):
and packages for combinatorial environments. and packages for combinatorial environments.
Args: Args:
pkg (PackageBase): the package from which the identifier is derived pkg (spack.package.PackageBase): the package from which the identifier is
derived
""" """
if not pkg.spec.concrete: if not pkg.spec.concrete:
raise ValueError("Cannot provide a unique, readable id when " raise ValueError("Cannot provide a unique, readable id when "
@ -631,11 +632,11 @@ def __init__(self, installs=[]):
""" Initialize the installer. """ Initialize the installer.
Args: Args:
installs (list of (pkg, install_args)): list of tuples, where each installs (list): list of tuples, where each
tuple consists of a package (PackageBase) and its associated tuple consists of a package (PackageBase) and its associated
install arguments (dict) install arguments (dict)
Return: Return:
(PackageInstaller) instance PackageInstaller: instance
""" """
# List of build requests # List of build requests
self.build_requests = [BuildRequest(pkg, install_args) self.build_requests = [BuildRequest(pkg, install_args)
@ -691,7 +692,8 @@ def _add_bootstrap_compilers(
Args: Args:
compiler: the compiler to boostrap compiler: the compiler to boostrap
architecture: the architecture for which to bootstrap the compiler architecture: the architecture for which to bootstrap the compiler
pkgs (PackageBase): the package with possible compiler dependencies pkgs (spack.package.PackageBase): the package with possible compiler
dependencies
request (BuildRequest): the associated install request request (BuildRequest): the associated install request
all_deps (defaultdict(set)): dictionary of all dependencies and all_deps (defaultdict(set)): dictionary of all dependencies and
associated dependents associated dependents
@ -707,7 +709,7 @@ def _add_init_task(self, pkg, request, is_compiler, all_deps):
Creates and queus the initial build task for the package. Creates and queus the initial build task for the package.
Args: Args:
pkg (Package): the package to be built and installed pkg (spack.package.Package): the package to be built and installed
request (BuildRequest or None): the associated install request request (BuildRequest or None): the associated install request
where ``None`` can be used to indicate the package was where ``None`` can be used to indicate the package was
explicitly requested by the user explicitly requested by the user
@ -726,7 +728,7 @@ def _check_db(self, spec):
"""Determine if the spec is flagged as installed in the database """Determine if the spec is flagged as installed in the database
Args: Args:
spec (Spec): spec whose database install status is being checked spec (spack.spec.Spec): spec whose database install status is being checked
Return: Return:
(rec, installed_in_db) tuple where rec is the database record, or (rec, installed_in_db) tuple where rec is the database record, or
@ -887,7 +889,7 @@ def _cleanup_task(self, pkg):
Cleanup the build task for the spec Cleanup the build task for the spec
Args: Args:
pkg (PackageBase): the package being installed pkg (spack.package.PackageBase): the package being installed
""" """
self._remove_task(package_id(pkg)) self._remove_task(package_id(pkg))
@ -901,7 +903,7 @@ def _ensure_install_ready(self, pkg):
already locked. already locked.
Args: Args:
pkg (PackageBase): the package being locally installed pkg (spack.package.PackageBase): the package being locally installed
""" """
pkg_id = package_id(pkg) pkg_id = package_id(pkg)
pre = "{0} cannot be installed locally:".format(pkg_id) pre = "{0} cannot be installed locally:".format(pkg_id)
@ -933,7 +935,7 @@ def _ensure_locked(self, lock_type, pkg):
Args: Args:
lock_type (str): 'read' for a read lock, 'write' for a write lock lock_type (str): 'read' for a read lock, 'write' for a write lock
pkg (PackageBase): the package whose spec is being installed pkg (spack.package.PackageBase): the package whose spec is being installed
Return: Return:
(lock_type, lock) tuple where lock will be None if it could not (lock_type, lock) tuple where lock will be None if it could not
@ -1294,7 +1296,7 @@ def _setup_install_dir(self, pkg):
Write a small metadata file with the current spack environment. Write a small metadata file with the current spack environment.
Args: Args:
pkg (Package): the package to be built and installed pkg (spack.package.Package): the package to be built and installed
""" """
if not os.path.exists(pkg.spec.prefix): if not os.path.exists(pkg.spec.prefix):
tty.verbose('Creating the installation directory {0}' tty.verbose('Creating the installation directory {0}'
@ -1369,9 +1371,9 @@ def _flag_installed(self, pkg, dependent_ids=None):
known dependents. known dependents.
Args: Args:
pkg (Package): Package that has been installed locally, externally pkg (spack.package.Package): Package that has been installed locally,
or upstream externally or upstream
dependent_ids (list of str or None): list of the package's dependent_ids (list or None): list of the package's
dependent ids, or None if the dependent ids are limited to dependent ids, or None if the dependent ids are limited to
those maintained in the package (dependency DAG) those maintained in the package (dependency DAG)
""" """
@ -1422,7 +1424,7 @@ def install(self):
Install the requested package(s) and or associated dependencies. Install the requested package(s) and or associated dependencies.
Args: Args:
pkg (Package): the package to be built and installed""" pkg (spack.package.Package): the package to be built and installed"""
self._init_queue() self._init_queue()
fail_fast_err = 'Terminating after first install failure' fail_fast_err = 'Terminating after first install failure'
@ -1833,7 +1835,7 @@ def __init__(self, pkg, request, compiler, start, attempts, status,
Instantiate a build task for a package. Instantiate a build task for a package.
Args: Args:
pkg (Package): the package to be built and installed pkg (spack.package.Package): the package to be built and installed
request (BuildRequest or None): the associated install request request (BuildRequest or None): the associated install request
where ``None`` can be used to indicate the package was where ``None`` can be used to indicate the package was
explicitly requested by the user explicitly requested by the user
@ -1841,7 +1843,7 @@ def __init__(self, pkg, request, compiler, start, attempts, status,
start (int): the initial start time for the package, in seconds start (int): the initial start time for the package, in seconds
attempts (int): the number of attempts to install the package attempts (int): the number of attempts to install the package
status (str): the installation status status (str): the installation status
installed (list of str): the identifiers of packages that have installed (list): the identifiers of packages that have
been installed so far been installed so far
""" """
@ -1983,7 +1985,7 @@ def flag_installed(self, installed):
Ensure the dependency is not considered to still be uninstalled. Ensure the dependency is not considered to still be uninstalled.
Args: Args:
installed (list of str): the identifiers of packages that have installed (list): the identifiers of packages that have
been installed so far been installed so far
""" """
now_installed = self.uninstalled_deps & set(installed) now_installed = self.uninstalled_deps & set(installed)
@ -2024,7 +2026,7 @@ def __init__(self, pkg, install_args):
Instantiate a build request for a package. Instantiate a build request for a package.
Args: Args:
pkg (Package): the package to be built and installed pkg (spack.package.Package): the package to be built and installed
install_args (dict): the install arguments associated with ``pkg`` install_args (dict): the install arguments associated with ``pkg``
""" """
# Ensure dealing with a package that has a concrete spec # Ensure dealing with a package that has a concrete spec
@ -2099,10 +2101,11 @@ def get_deptypes(self, pkg):
"""Determine the required dependency types for the associated package. """Determine the required dependency types for the associated package.
Args: Args:
pkg (PackageBase): explicit or implicit package being installed pkg (spack.package.PackageBase): explicit or implicit package being
installed
Returns: Returns:
(tuple) required dependency type(s) for the package tuple: required dependency type(s) for the package
""" """
deptypes = ['link', 'run'] deptypes = ['link', 'run']
include_build_deps = self.install_args.get('include_build_deps') include_build_deps = self.install_args.get('include_build_deps')
@ -2121,10 +2124,11 @@ def run_tests(self, pkg):
"""Determine if the tests should be run for the provided packages """Determine if the tests should be run for the provided packages
Args: Args:
pkg (PackageBase): explicit or implicit package being installed pkg (spack.package.PackageBase): explicit or implicit package being
installed
Returns: Returns:
(bool) ``True`` if they should be run; ``False`` otherwise bool: ``True`` if they should be run; ``False`` otherwise
""" """
tests = self.install_args.get('tests', False) tests = self.install_args.get('tests', False)
return tests is True or (tests and pkg.name in tests) return tests is True or (tests and pkg.name in tests)

View File

@ -530,7 +530,7 @@ def __call__(self, *argv, **kwargs):
"""Invoke this SpackCommand. """Invoke this SpackCommand.
Args: Args:
argv (list of str): command line arguments. argv (list): command line arguments.
Keyword Args: Keyword Args:
fail_on_error (optional bool): Don't raise an exception on error fail_on_error (optional bool): Don't raise an exception on error
@ -625,7 +625,7 @@ def print_setup_info(*info):
"""Print basic information needed by setup-env.[c]sh. """Print basic information needed by setup-env.[c]sh.
Args: Args:
info (list of str): list of things to print: comma-separated list info (list): list of things to print: comma-separated list
of 'csh', 'sh', or 'modules' of 'csh', 'sh', or 'modules'
This is in ``main.py`` to make it fast; the setup scripts need to This is in ``main.py`` to make it fast; the setup scripts need to
@ -689,7 +689,7 @@ def main(argv=None):
"""This is the entry point for the Spack command. """This is the entry point for the Spack command.
Args: Args:
argv (list of str or None): command line arguments, NOT including argv (list or None): command line arguments, NOT including
the executable name. If None, parses from sys.argv. the executable name. If None, parses from sys.argv.
""" """
# Create a parser with a simple positional argument first. We'll # Create a parser with a simple positional argument first. We'll

View File

@ -19,7 +19,8 @@
import llnl.util.filesystem import llnl.util.filesystem
__all__ = [ __all__ = [
'filter_compiler_wrappers' 'filter_compiler_wrappers',
'PackageMixinsMeta',
] ]

View File

@ -30,7 +30,7 @@ def configuration(module_set_name):
return config return config
#: Caches the configuration {spec_hash: configuration} # Caches the configuration {spec_hash: configuration}
configuration_registry = {} # type: Dict[str, Any] configuration_registry = {} # type: Dict[str, Any]

View File

@ -29,7 +29,7 @@ def configuration(module_set_name):
return config return config
#: Caches the configuration {spec_hash: configuration} # Caches the configuration {spec_hash: configuration}
configuration_registry = {} # type: Dict[str, Any] configuration_registry = {} # type: Dict[str, Any]

View File

@ -465,7 +465,7 @@ def test_log_pathname(test_stage, spec):
Args: Args:
test_stage (str): path to the test stage directory test_stage (str): path to the test stage directory
spec (Spec): instance of the spec under test spec (spack.spec.Spec): instance of the spec under test
Returns: Returns:
(str): the pathname of the test log file (str): the pathname of the test log file
@ -725,14 +725,14 @@ def possible_dependencies(
"""Return dict of possible dependencies of this package. """Return dict of possible dependencies of this package.
Args: Args:
transitive (bool, optional): return all transitive dependencies if transitive (bool or None): return all transitive dependencies if
True, only direct dependencies if False (default True).. True, only direct dependencies if False (default True)..
expand_virtuals (bool, optional): expand virtual dependencies into expand_virtuals (bool or None): expand virtual dependencies into
all possible implementations (default True) all possible implementations (default True)
deptype (str or tuple, optional): dependency types to consider deptype (str or tuple or None): dependency types to consider
visited (dicct, optional): dict of names of dependencies visited so visited (dict or None): dict of names of dependencies visited so
far, mapped to their immediate dependencies' names. far, mapped to their immediate dependencies' names.
missing (dict, optional): dict to populate with packages and their missing (dict or None): dict to populate with packages and their
*missing* dependencies. *missing* dependencies.
virtuals (set): if provided, populate with virtuals seen so far. virtuals (set): if provided, populate with virtuals seen so far.
@ -1756,7 +1756,7 @@ def cache_extra_test_sources(self, srcs):
during install testing. during install testing.
Args: Args:
srcs (str or list of str): relative path for files and or srcs (str or list): relative path for files and or
subdirectories located in the staged source path that are to subdirectories located in the staged source path that are to
be copied to the corresponding location(s) under the install be copied to the corresponding location(s) under the install
testing directory. testing directory.
@ -1803,10 +1803,10 @@ def run_test(self, exe, options=[], expected=[], status=0,
Args: Args:
exe (str): the name of the executable exe (str): the name of the executable
options (str or list of str): list of options to pass to the runner options (str or list): list of options to pass to the runner
expected (str or list of str): list of expected output strings. expected (str or list): list of expected output strings.
Each string is a regex expected to match part of the output. Each string is a regex expected to match part of the output.
status (int or list of int): possible passing status values status (int or list): possible passing status values
with 0 meaning the test is expected to succeed with 0 meaning the test is expected to succeed
installed (bool): if ``True``, the executable must be in the installed (bool): if ``True``, the executable must be in the
install prefix install prefix
@ -2010,9 +2010,9 @@ def setup_build_environment(self, env):
Spack's store. Spack's store.
Args: Args:
env (EnvironmentModifications): environment modifications to be env (spack.util.environment.EnvironmentModifications): environment
applied when the package is built. Package authors can call modifications to be applied when the package is built. Package authors
methods on it to alter the build environment. can call methods on it to alter the build environment.
""" """
legacy_fn = self._get_legacy_environment_method('setup_environment') legacy_fn = self._get_legacy_environment_method('setup_environment')
if legacy_fn: if legacy_fn:
@ -2023,9 +2023,9 @@ def setup_run_environment(self, env):
"""Sets up the run environment for a package. """Sets up the run environment for a package.
Args: Args:
env (EnvironmentModifications): environment modifications to be env (spack.util.environment.EnvironmentModifications): environment
applied when the package is run. Package authors can call modifications to be applied when the package is run. Package authors
methods on it to alter the run environment. can call methods on it to alter the run environment.
""" """
legacy_fn = self._get_legacy_environment_method('setup_environment') legacy_fn = self._get_legacy_environment_method('setup_environment')
if legacy_fn: if legacy_fn:
@ -2052,11 +2052,11 @@ def setup_dependent_build_environment(self, env, dependent_spec):
variable. variable.
Args: Args:
env (EnvironmentModifications): environment modifications to be env (spack.util.environment.EnvironmentModifications): environment
applied when the dependent package is built. Package authors modifications to be applied when the dependent package is built.
can call methods on it to alter the build environment. Package authors can call methods on it to alter the build environment.
dependent_spec (Spec): the spec of the dependent package dependent_spec (spack.spec.Spec): the spec of the dependent package
about to be built. This allows the extendee (self) to query about to be built. This allows the extendee (self) to query
the dependent's state. Note that *this* package's spec is the dependent's state. Note that *this* package's spec is
available as ``self.spec`` available as ``self.spec``
@ -2079,11 +2079,11 @@ def setup_dependent_run_environment(self, env, dependent_spec):
for dependencies. for dependencies.
Args: Args:
env (EnvironmentModifications): environment modifications to be env (spack.util.environment.EnvironmentModifications): environment
applied when the dependent package is run. Package authors modifications to be applied when the dependent package is run.
can call methods on it to alter the build environment. Package authors can call methods on it to alter the build environment.
dependent_spec (Spec): The spec of the dependent package dependent_spec (spack.spec.Spec): The spec of the dependent package
about to be run. This allows the extendee (self) to query about to be run. This allows the extendee (self) to query
the dependent's state. Note that *this* package's spec is the dependent's state. Note that *this* package's spec is
available as ``self.spec`` available as ``self.spec``
@ -2125,7 +2125,7 @@ def setup_dependent_package(self, module, dependent_spec):
object of the dependent package. Packages can use this to set object of the dependent package. Packages can use this to set
module-scope variables for the dependent to use. module-scope variables for the dependent to use.
dependent_spec (Spec): The spec of the dependent package dependent_spec (spack.spec.Spec): The spec of the dependent package
about to be built. This allows the extendee (self) to about to be built. This allows the extendee (self) to
query the dependent's state. Note that *this* query the dependent's state. Note that *this*
package's spec is available as ``self.spec``. package's spec is available as ``self.spec``.

View File

@ -28,7 +28,7 @@ def apply_patch(stage, patch_path, level=1, working_dir='.'):
Args: Args:
stage (spack.stage.Stage): stage with code that will be patched stage (spack.stage.Stage): stage with code that will be patched
patch_path (str): filesystem location for the patch to apply patch_path (str): filesystem location for the patch to apply
level (int, optional): patch level (default 1) level (int or None): patch level (default 1)
working_dir (str): relative path *within* the stage to change to working_dir (str): relative path *within* the stage to change to
(default '.') (default '.')
""" """

View File

@ -869,7 +869,7 @@ def is_relocatable(spec):
"""Returns True if an installed spec is relocatable. """Returns True if an installed spec is relocatable.
Args: Args:
spec (Spec): spec to be analyzed spec (spack.spec.Spec): spec to be analyzed
Returns: Returns:
True if the binaries of an installed spec True if the binaries of an installed spec

View File

@ -679,14 +679,14 @@ def condition(self, required_spec, imposed_spec=None, name=None):
"""Generate facts for a dependency or virtual provider condition. """Generate facts for a dependency or virtual provider condition.
Arguments: Arguments:
required_spec (Spec): the spec that triggers this condition required_spec (spack.spec.Spec): the spec that triggers this condition
imposed_spec (optional, Spec): the sepc with constraints that imposed_spec (spack.spec.Spec or None): the sepc with constraints that
are imposed when this condition is triggered are imposed when this condition is triggered
name (optional, str): name for `required_spec` (required if name (str or None): name for `required_spec` (required if
required_spec is anonymous, ignored if not) required_spec is anonymous, ignored if not)
Returns: Returns:
(int): id of the condition created by this function int: id of the condition created by this function
""" """
named_cond = required_spec.copy() named_cond = required_spec.copy()
named_cond.name = named_cond.name or name named_cond.name = named_cond.name or name
@ -922,7 +922,7 @@ def spec_clauses(self, spec, body=False, transitive=True):
"""Return a list of clauses for a spec mandates are true. """Return a list of clauses for a spec mandates are true.
Arguments: Arguments:
spec (Spec): the spec to analyze spec (spack.spec.Spec): the spec to analyze
body (bool): if True, generate clauses to be used in rule bodies body (bool): if True, generate clauses to be used in rule bodies
(final values) instead of rule heads (setters). (final values) instead of rule heads (setters).
transitive (bool): if False, don't generate clauses from transitive (bool): if False, don't generate clauses from

View File

@ -122,7 +122,9 @@
__all__ = [ __all__ = [
'CompilerSpec',
'Spec', 'Spec',
'SpecParser',
'parse', 'parse',
'SpecParseError', 'SpecParseError',
'DuplicateDependencyError', 'DuplicateDependencyError',
@ -143,7 +145,9 @@
'AmbiguousHashError', 'AmbiguousHashError',
'InvalidHashError', 'InvalidHashError',
'NoSuchHashError', 'NoSuchHashError',
'RedundantSpecError'] 'RedundantSpecError',
'SpecDeprecatedError',
]
#: Valid pattern for an identifier in Spack #: Valid pattern for an identifier in Spack
identifier_re = r'\w[\w-]*' identifier_re = r'\w[\w-]*'
@ -1495,7 +1499,7 @@ def _spec_hash(self, hash):
"""Utility method for computing different types of Spec hashes. """Utility method for computing different types of Spec hashes.
Arguments: Arguments:
hash (SpecHashDescriptor): type of hash to generate. hash (spack.hash_types.SpecHashDescriptor): type of hash to generate.
""" """
# TODO: curently we strip build dependencies by default. Rethink # TODO: curently we strip build dependencies by default. Rethink
# this when we move to using package hashing on all specs. # this when we move to using package hashing on all specs.
@ -1513,7 +1517,7 @@ def _cached_hash(self, hash, length=None):
in the supplied attribute on this spec. in the supplied attribute on this spec.
Arguments: Arguments:
hash (SpecHashDescriptor): type of hash to generate. hash (spack.hash_types.SpecHashDescriptor): type of hash to generate.
""" """
if not hash.attr: if not hash.attr:
return self._spec_hash(hash)[:length] return self._spec_hash(hash)[:length]
@ -1615,7 +1619,7 @@ def to_node_dict(self, hash=ht.dag_hash):
hashes). hashes).
Arguments: Arguments:
hash (SpecHashDescriptor) type of hash to generate. hash (spack.hash_types.SpecHashDescriptor) type of hash to generate.
""" """
d = syaml.syaml_dict() d = syaml.syaml_dict()
@ -2987,7 +2991,7 @@ def ensure_valid_variants(spec):
spec (Spec): spec to be analyzed spec (Spec): spec to be analyzed
Raises: Raises:
UnknownVariantError: on the first unknown variant found spack.variant.UnknownVariantError: on the first unknown variant found
""" """
pkg_cls = spec.package_class pkg_cls = spec.package_class
pkg_variants = pkg_cls.variants pkg_variants = pkg_cls.variants
@ -4437,6 +4441,7 @@ def __init__(self):
class SpecParser(spack.parse.Parser): class SpecParser(spack.parse.Parser):
"""Parses specs."""
def __init__(self, initial_spec=None): def __init__(self, initial_spec=None):
"""Construct a new SpecParser. """Construct a new SpecParser.

View File

@ -565,8 +565,9 @@ def cache_mirror(self, mirror, stats):
"""Perform a fetch if the resource is not already cached """Perform a fetch if the resource is not already cached
Arguments: Arguments:
mirror (MirrorCache): the mirror to cache this Stage's resource in mirror (spack.caches.MirrorCache): the mirror to cache this Stage's
stats (MirrorStats): this is updated depending on whether the resource in
stats (spack.mirror.MirrorStats): this is updated depending on whether the
caching operation succeeded or failed caching operation succeeded or failed
""" """
if isinstance(self.default_fetcher, fs.BundleFetchStrategy): if isinstance(self.default_fetcher, fs.BundleFetchStrategy):
@ -835,7 +836,7 @@ def get_checksums_for_versions(
Args: Args:
url_dict (dict): A dictionary of the form: version -> URL url_dict (dict): A dictionary of the form: version -> URL
name (str): The name of the package name (str): The name of the package
first_stage_function (callable): function that takes a Stage and a URL; first_stage_function (typing.Callable): function that takes a Stage and a URL;
this is run on the stage of the first URL downloaded this is run on the stage of the first URL downloaded
keep_stage (bool): whether to keep staging area when command completes keep_stage (bool): whether to keep staging area when command completes
batch (bool): whether to ask user how many versions to fetch (false) batch (bool): whether to ask user how many versions to fetch (false)

View File

@ -65,7 +65,7 @@ def create_build_task(pkg, install_args={}):
Create a built task for the given (concretized) package Create a built task for the given (concretized) package
Args: Args:
pkg (PackageBase): concretized package associated with the task pkg (spack.package.PackageBase): concretized package associated with the task
install_args (dict): dictionary of kwargs (or install args) install_args (dict): dictionary of kwargs (or install args)
Return: Return:
@ -80,10 +80,10 @@ def create_installer(installer_args):
Create an installer using the concretized spec for each arg Create an installer using the concretized spec for each arg
Args: Args:
installer_args (list of tuples): the list of (spec name, kwargs) tuples installer_args (list): the list of (spec name, kwargs) tuples
Return: Return:
installer (PackageInstaller): the associated package installer spack.installer.PackageInstaller: the associated package installer
""" """
const_arg = [(spec.package, kwargs) for spec, kwargs in installer_args] const_arg = [(spec.package, kwargs) for spec, kwargs in installer_args]
return inst.PackageInstaller(const_arg) return inst.PackageInstaller(const_arg)
@ -93,11 +93,11 @@ def installer_args(spec_names, kwargs={}):
"""Return a the installer argument with each spec paired with kwargs """Return a the installer argument with each spec paired with kwargs
Args: Args:
spec_names (list of str): list of spec names spec_names (list): list of spec names
kwargs (dict or None): install arguments to apply to all of the specs kwargs (dict or None): install arguments to apply to all of the specs
Returns: Returns:
list of (spec, kwargs): the installer constructor argument list: list of (spec, kwargs), the installer constructor argument
""" """
arg = [] arg = []
for name in spec_names: for name in spec_names:

View File

@ -417,11 +417,11 @@ def parse_version_offset(path):
path (str): The filename or URL for the package path (str): The filename or URL for the package
Returns: Returns:
tuple of (Version, int, int, int, str): A tuple containing: tuple: A tuple containing:
version of the package, version of the package,
first index of version, first index of version,
length of version string, length of version string,
the index of the matching regex the index of the matching regex,
the matching regex the matching regex
Raises: Raises:
@ -632,11 +632,11 @@ def parse_name_offset(path, v=None):
v (str): The version of the package v (str): The version of the package
Returns: Returns:
tuple of (str, int, int, int, str): A tuple containing: tuple: A tuple containing:
name of the package, name of the package,
first index of name, first index of name,
length of name, length of name,
the index of the matching regex the index of the matching regex,
the matching regex the matching regex
Raises: Raises:
@ -774,9 +774,7 @@ def parse_name_and_version(path):
path (str): The filename or URL for the package path (str): The filename or URL for the package
Returns: Returns:
tuple of (str, Version)A tuple containing: tuple: a tuple containing the package (name, version)
The name of the package
The version of the package
Raises: Raises:
UndetectableVersionError: If the URL does not match any regexes UndetectableVersionError: If the URL does not match any regexes

View File

@ -18,7 +18,7 @@ def prefix_inspections(platform):
"""Get list of prefix inspections for platform """Get list of prefix inspections for platform
Arguments: Arguments:
platform (string): the name of the platform to consider. The platform platform (str): the name of the platform to consider. The platform
determines what environment variables Spack will use for some determines what environment variables Spack will use for some
inspections. inspections.

View File

@ -64,7 +64,7 @@ def editor(*args, **kwargs):
searching the full list above, we'll raise an error. searching the full list above, we'll raise an error.
Arguments: Arguments:
args (list of str): args to pass to editor args (list): args to pass to editor
Optional Arguments: Optional Arguments:
_exec_func (function): invoke this function instead of ``os.execv()`` _exec_func (function): invoke this function instead of ``os.execv()``

View File

@ -623,7 +623,7 @@ def from_sourcing_file(filename, *arguments, **kwargs):
Args: Args:
filename (str): the file to be sourced filename (str): the file to be sourced
*arguments (list of str): arguments to pass on the command line *arguments (list): arguments to pass on the command line
Keyword Args: Keyword Args:
shell (str): the shell to use (default: ``bash``) shell (str): the shell to use (default: ``bash``)
@ -867,7 +867,7 @@ def inspect_path(root, inspections, exclude=None):
modifications are not performed immediately, but stored in a modifications are not performed immediately, but stored in a
command object that is returned to client command object that is returned to client
exclude (callable): optional callable. If present it must accept an exclude (typing.Callable): optional callable. If present it must accept an
absolute path and return True if it should be excluded from the absolute path and return True if it should be excluded from the
inspection inspection
@ -920,7 +920,7 @@ def preserve_environment(*variables):
explicitly unset on exit. explicitly unset on exit.
Args: Args:
variables (list of str): list of environment variables to be preserved variables (list): list of environment variables to be preserved
""" """
cache = {} cache = {}
for var in variables: for var in variables:
@ -1031,9 +1031,9 @@ def sanitize(environment, blacklist, whitelist):
Args: Args:
environment (dict): input dictionary environment (dict): input dictionary
blacklist (list of str): literals or regex patterns to be blacklist (list): literals or regex patterns to be
blacklisted blacklisted
whitelist (list of str): literals or regex patterns to be whitelist (list): literals or regex patterns to be
whitelisted whitelisted
""" """

View File

@ -297,7 +297,7 @@ def which(*args, **kwargs):
*args (str): One or more executables to search for *args (str): One or more executables to search for
Keyword Arguments: Keyword Arguments:
path (:func:`list` or str): The path to search. Defaults to ``PATH`` path (list or str): The path to search. Defaults to ``PATH``
required (bool): If set to True, raise an error if executable not found required (bool): If set to True, raise an error if executable not found
Returns: Returns:

View File

@ -28,11 +28,11 @@ def load_source(full_name, path, prepend=None):
Args: Args:
full_name (str): full name of the module to be loaded full_name (str): full name of the module to be loaded
path (str): path to the file that should be loaded path (str): path to the file that should be loaded
prepend (str, optional): some optional code to prepend to the prepend (str or None): some optional code to prepend to the
loaded module; e.g., can be used to inject import statements loaded module; e.g., can be used to inject import statements
Returns: Returns:
(ModuleType): the loaded module the loaded module
""" """
with import_lock(): with import_lock():
if prepend is None: if prepend is None:

View File

@ -37,11 +37,11 @@ def load_source(full_name, path, prepend=None):
Args: Args:
full_name (str): full name of the module to be loaded full_name (str): full name of the module to be loaded
path (str): path to the file that should be loaded path (str): path to the file that should be loaded
prepend (str, optional): some optional code to prepend to the prepend (str or None): some optional code to prepend to the
loaded module; e.g., can be used to inject import statements loaded module; e.g., can be used to inject import statements
Returns: Returns:
(ModuleType): the loaded module the loaded module
""" """
# use our custom loader # use our custom loader
loader = PrependFileLoader(full_name, path, prepend) loader = PrependFileLoader(full_name, path, prepend)

View File

@ -20,7 +20,7 @@ def parse_log_events(stream, context=6, jobs=None, profile=False):
"""Extract interesting events from a log file as a list of LogEvent. """Extract interesting events from a log file as a list of LogEvent.
Args: Args:
stream (str or fileobject): build log name or file object stream (str or typing.IO): build log name or file object
context (int): lines of context to extract around each log event context (int): lines of context to extract around each log event
jobs (int): number of jobs to parse with; default ncpus jobs (int): number of jobs to parse with; default ncpus
profile (bool): print out profile information for parsing profile (bool): print out profile information for parsing
@ -60,7 +60,7 @@ def make_log_context(log_events, width=None):
"""Get error context from a log file. """Get error context from a log file.
Args: Args:
log_events (list of LogEvent): list of events created by log_events (list): list of events created by
``ctest_log_parser.parse()`` ``ctest_log_parser.parse()``
width (int or None): wrap width; ``0`` for no limit; ``None`` to width (int or None): wrap width; ``0`` for no limit; ``None`` to
auto-size for terminal auto-size for terminal

View File

@ -49,10 +49,10 @@ def is_directive(self, node):
callbacks are sometimes represented). callbacks are sometimes represented).
Args: Args:
node (AST): the AST node being checked node (ast.AST): the AST node being checked
Returns: Returns:
(bool): ``True`` if the node represents a known directive, bool: ``True`` if the node represents a known directive,
``False`` otherwise ``False`` otherwise
""" """
return (isinstance(node, ast.Expr) and return (isinstance(node, ast.Expr) and

View File

@ -41,7 +41,7 @@ def composite(interface=None, method_list=None, container=list):
interface (type): class exposing the interface to which the interface (type): class exposing the interface to which the
composite object must conform. Only non-private and composite object must conform. Only non-private and
non-special methods will be taken into account non-special methods will be taken into account
method_list (list of str): names of methods that should be part method_list (list): names of methods that should be part
of the composite of the composite
container (MutableSequence): container for the composite object container (MutableSequence): container for the composite object
(default = list). Must fulfill the MutableSequence (default = list). Must fulfill the MutableSequence

View File

@ -41,7 +41,7 @@ def plural(n, singular, plural=None, show_n=True):
Arguments: Arguments:
n (int): number of things there are n (int): number of things there are
singular (str): singular form of word singular (str): singular form of word
plural (str, optional): optional plural form, for when it's not just plural (str or None): optional plural form, for when it's not just
singular + 's' singular + 's'
show_n (bool): whether to include n in the result string (default True) show_n (bool): whether to include n in the result string (default True)

View File

@ -367,7 +367,7 @@ def spider(root_urls, depth=0, concurrency=32):
up to <depth> levels of links from each root. up to <depth> levels of links from each root.
Args: Args:
root_urls (str or list of str): root urls used as a starting point root_urls (str or list): root urls used as a starting point
for spidering for spidering
depth (int): level of recursion into links depth (int): level of recursion into links
concurrency (int): number of simultaneous requests that can be sent concurrency (int): number of simultaneous requests that can be sent

View File

@ -94,8 +94,8 @@ def validate_or_raise(self, vspec, pkg=None):
exception if any error is found. exception if any error is found.
Args: Args:
vspec (VariantSpec): instance to be validated vspec (Variant): instance to be validated
pkg (Package): the package that required the validation, pkg (spack.package.Package): the package that required the validation,
if available if available
Raises: Raises:
@ -254,7 +254,7 @@ def value(self):
the variant. the variant.
Returns: Returns:
tuple of str: values stored in the variant tuple: values stored in the variant
""" """
return self._value return self._value
@ -296,7 +296,7 @@ def copy(self):
"""Returns an instance of a variant equivalent to self """Returns an instance of a variant equivalent to self
Returns: Returns:
any variant type: a copy of self AbstractVariant: a copy of self
>>> a = MultiValuedVariant('foo', True) >>> a = MultiValuedVariant('foo', True)
>>> b = a.copy() >>> b = a.copy()
@ -667,7 +667,7 @@ class DisjointSetsOfValues(Sequence):
and therefore no other set can contain the item ``'none'``. and therefore no other set can contain the item ``'none'``.
Args: Args:
*sets (list of tuples): mutually exclusive sets of values *sets (list): mutually exclusive sets of values
""" """
_empty_set = set(('none',)) _empty_set = set(('none',))