API Docs: fix broken reference targets

This commit is contained in:
Adam J. Stewart 2021-07-03 17:10:13 -05:00 committed by Todd Gamblin
parent c37df94932
commit b8afc0fd29
57 changed files with 510 additions and 471 deletions

View File

@ -2,7 +2,7 @@
#
# You can set these variables from the command line.
SPHINXOPTS = -W
SPHINXOPTS = -W --keep-going
SPHINXBUILD = sphinx-build
PAPER =
BUILDDIR = _build

View File

@ -130,8 +130,8 @@ Adding flags to cmake
To add additional flags to the ``cmake`` call, simply override the
``cmake_args`` function. The following example defines values for the flags
``WHATEVER``, ``ENABLE_BROKEN_FEATURE``, ``DETECT_HDF5``, and ``THREADS`` with
and without the :py:meth:`~.CMakePackage.define` and
:py:meth:`~.CMakePackage.define_from_variant` helper functions:
and without the :meth:`~spack.build_systems.cmake.CMakePackage.define` and
:meth:`~spack.build_systems.cmake.CMakePackage.define_from_variant` helper functions:
.. code-block:: python

View File

@ -101,11 +101,14 @@ def setup(sphinx):
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc',
'sphinx.ext.graphviz',
'sphinx.ext.napoleon',
'sphinx.ext.todo',
'sphinxcontrib.programoutput']
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.graphviz',
'sphinx.ext.intersphinx',
'sphinx.ext.napoleon',
'sphinx.ext.todo',
'sphinxcontrib.programoutput',
]
# Set default graphviz options
graphviz_dot_args = [
@ -164,6 +167,19 @@ def setup(sphinx):
# directories to ignore when looking for source files.
exclude_patterns = ['_build', '_spack_root', '.spack-env']
nitpicky = True
nitpick_ignore = [
# Python classes that intersphinx is unable to resolve
('py:class', 'argparse.HelpFormatter'),
('py:class', 'contextlib.contextmanager'),
('py:class', 'module'),
('py:class', '_io.BufferedReader'),
('py:class', 'unittest.case.TestCase'),
('py:class', '_frozen_importlib_external.SourceFileLoader'),
# Spack classes that are private and we don't want to expose
('py:class', 'spack.provider_index._IndexBase'),
]
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
@ -358,3 +374,11 @@ class SpackStyle(DefaultStyle):
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# -- Extension configuration -------------------------------------------------
# sphinx.ext.intersphinx
intersphinx_mapping = {
"python": ("https://docs.python.org/3", None),
}

View File

@ -108,9 +108,9 @@ with a high level view of Spack's directory structure:
spack/ <- spack module; contains Python code
analyzers/ <- modules to run analysis on installed packages
build_systems/ <- modules for different build systems
build_systems/ <- modules for different build systems
cmd/ <- each file in here is a spack subcommand
compilers/ <- compiler description files
compilers/ <- compiler description files
container/ <- module for spack containerize
hooks/ <- hook modules to run at different points
modules/ <- modules for lmod, tcl, etc.
@ -151,24 +151,22 @@ Package-related modules
^^^^^^^^^^^^^^^^^^^^^^^
:mod:`spack.package`
Contains the :class:`Package <spack.package.Package>` class, which
Contains the :class:`~spack.package.Package` class, which
is the superclass for all packages in Spack. Methods on ``Package``
implement all phases of the :ref:`package lifecycle
<package-lifecycle>` and manage the build process.
:mod:`spack.packages`
Contains all of the packages in Spack and methods for managing them.
Functions like :func:`packages.get <spack.packages.get>` and
:func:`class_name_for_package_name
<packages.class_name_for_package_name>` handle mapping package module
names to class names and dynamically instantiating packages by name
from module files.
:mod:`spack.util.naming`
Contains functions for mapping between Spack package names,
Python module names, and Python class names. Functions like
:func:`~spack.util.naming.mod_to_class` handle mapping package
module names to class names.
:mod:`spack.relations`
*Relations* are relationships between packages, like
:func:`depends_on <spack.relations.depends_on>` and :func:`provides
<spack.relations.provides>`. See :ref:`dependencies` and
:ref:`virtual-dependencies`.
:mod:`spack.directives`
*Directives* are functions that can be called inside a package definition
to modify the package, like :func:`~spack.directives.depends_on`
and :func:`~spack.directives.provides`. See :ref:`dependencies`
and :ref:`virtual-dependencies`.
:mod:`spack.multimethod`
Implementation of the :func:`@when <spack.multimethod.when>`
@ -180,31 +178,27 @@ Spec-related modules
^^^^^^^^^^^^^^^^^^^^
:mod:`spack.spec`
Contains :class:`Spec <spack.spec.Spec>` and :class:`SpecParser
<spack.spec.SpecParser>`. Also implements most of the logic for
normalization and concretization of specs.
Contains :class:`~spack.spec.Spec` and :class:`~spack.spec.SpecParser`.
Also implements most of the logic for normalization and concretization
of specs.
:mod:`spack.parse`
Contains some base classes for implementing simple recursive descent
parsers: :class:`Parser <spack.parse.Parser>` and :class:`Lexer
<spack.parse.Lexer>`. Used by :class:`SpecParser
<spack.spec.SpecParser>`.
parsers: :class:`~spack.parse.Parser` and :class:`~spack.parse.Lexer`.
Used by :class:`~spack.spec.SpecParser`.
:mod:`spack.concretize`
Contains :class:`DefaultConcretizer
<spack.concretize.DefaultConcretizer>` implementation, which allows
site administrators to change Spack's :ref:`concretization-policies`.
Contains :class:`~spack.concretize.Concretizer` implementation,
which allows site administrators to change Spack's :ref:`concretization-policies`.
:mod:`spack.version`
Implements a simple :class:`Version <spack.version.Version>` class
with simple comparison semantics. Also implements
:class:`VersionRange <spack.version.VersionRange>` and
:class:`VersionList <spack.version.VersionList>`. All three are
comparable with each other and offer union and intersection
operations. Spack uses these classes to compare versions and to
manage version constraints on specs. Comparison semantics are
similar to the ``LooseVersion`` class in ``distutils`` and to the
way RPM compares version strings.
Implements a simple :class:`~spack.version.Version` class with simple
comparison semantics. Also implements :class:`~spack.version.VersionRange`
and :class:`~spack.version.VersionList`. All three are comparable with each
other and offer union and intersection operations. Spack uses these classes
to compare versions and to manage version constraints on specs. Comparison
semantics are similar to the ``LooseVersion`` class in ``distutils`` and to
the way RPM compares version strings.
:mod:`spack.compilers`
Submodules contains descriptors for all valid compilers in Spack.
@ -232,7 +226,7 @@ Build environment
:mod:`spack.stage`
Handles creating temporary directories for builds.
:mod:`spack.compilation`
:mod:`spack.build_environment`
This contains utility functions used by the compiler wrapper script,
``cc``.
@ -257,22 +251,19 @@ Unit tests
Implements Spack's test suite. Add a module and put its name in
the test suite in ``__init__.py`` to add more unit tests.
:mod:`spack.test.mock_packages`
This is a fake package hierarchy used to mock up packages for
Spack's test suite.
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Research and Monitoring Modules
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
:mod:`spack.monitor`
Contains :class:`SpackMonitor <spack.monitor.SpackMonitor>`. This is accessed
from the ``spack install`` and ``spack analyze`` commands to send build
and package metadada up to a `Spack Monitor <https://github.com/spack/spack-monitor>`_ server.
Contains :class:`~spack.monitor.SpackMonitorClient`. This is accessed from
the ``spack install`` and ``spack analyze`` commands to send build and
package metadata up to a `Spack Monitor
<https://github.com/spack/spack-monitor>`_ server.
:mod:`spack.analyzers`
A module folder with a :class:`AnalyzerBase <spack.analyzers.analyzer_base.AnalyzerBase>`
A module folder with a :class:`~spack.analyzers.analyzer_base.AnalyzerBase`
that provides base functions to run, save, and (optionally) upload analysis
results to a `Spack Monitor <https://github.com/spack/spack-monitor>`_ server.
@ -286,7 +277,7 @@ Other Modules
tarball URLs.
:mod:`spack.error`
:class:`SpackError <spack.error.SpackError>`, the base class for
:class:`~spack.error.SpackError`, the base class for
Spack's exception hierarchy.
:mod:`llnl.util.tty`
@ -335,8 +326,8 @@ Writing analyzers
To write an analyzer, you should add a new python file to the
analyzers module directory at ``lib/spack/spack/analyzers`` .
Your analyzer should be a subclass of the :class:`AnalyzerBase <spack.analyzers.analyzer_base.AnalyzerBase>`. For example, if you want
to add an analyzer class ``Myanalyzer`` you woul write to
``spack/analyzers/myanalyzer.py`` and import and
to add an analyzer class ``Myanalyzer`` you would write to
``spack/analyzers/myanalyzer.py`` and import and
use the base as follows:
.. code-block:: python
@ -347,7 +338,7 @@ use the base as follows:
Note that the class name is your module file name, all lowercase
except for the first capital letter. You can look at other analyzers in
except for the first capital letter. You can look at other analyzers in
that analyzer directory for examples. The guide here will tell you about the basic functions needed.
^^^^^^^^^^^^^^^^^^^^^^^^^
@ -356,13 +347,13 @@ Analyzer Output Directory
By default, when you run ``spack analyze run`` an analyzer output directory will
be created in your spack user directory in your ``$HOME``. The reason we output here
is because the install directory might not always be writable.
is because the install directory might not always be writable.
.. code-block:: console
~/.spack/
analyzers
Result files will be written here, organized in subfolders in the same structure
as the package, with each analyzer owning it's own subfolder. for example:
@ -380,11 +371,11 @@ as the package, with each analyzer owning it's own subfolder. for example:
│   └── spack-analyzer-install-files.json
└── libabigail
└── lib
└── spack-analyzer-libabigail-libz.so.1.2.11.xml
└── spack-analyzer-libabigail-libz.so.1.2.11.xml
Notice that for the libabigail analyzer, since results are generated per object,
we honor the object's folder in case there are equivalently named files in
we honor the object's folder in case there are equivalently named files in
different folders. The result files are typically written as json so they can be easily read and uploaded in a future interaction with a monitor.
@ -426,7 +417,7 @@ and then return the object with a key as the analyzer name. The result data
should be a list of objects, each with a name, ``analyzer_name``, ``install_file``,
and one of ``value`` or ``binary_value``. The install file should be for a relative
path, and not the absolute path. For example, let's say we extract a metric called
``metric`` for ``bin/wget`` using our analyzer ``thebest-analyzer``.
``metric`` for ``bin/wget`` using our analyzer ``thebest-analyzer``.
We might have data that looks like this:
.. code-block:: python
@ -482,7 +473,7 @@ Saving Analyzer Results
The analyzer will have ``save_result`` called, with the result object generated
to save it to the filesystem, and if the user has added the ``--monitor`` flag
to upload it to a monitor server. If your result follows an accepted result
format and you don't need to parse it further, you don't need to add this
format and you don't need to parse it further, you don't need to add this
function to your class. However, if your result data is large or otherwise
needs additional parsing, you can define it. If you define the function, it
is useful to know about the ``output_dir`` property, which you can join
@ -548,7 +539,7 @@ each one (separately) to the monitor:
Notice that this function, if you define it, requires a result object (generated by
``run()``, a monitor (if you want to send), and a boolean ``overwrite`` to be used
to check if a result exists first, and not write to it if the result exists and
to check if a result exists first, and not write to it if the result exists and
overwrite is False. Also notice that since we already saved these files to the analyzer metadata folder, we return early if a monitor isn't defined, because this function serves to send results to the monitor. If you haven't saved anything to the analyzer metadata folder
yet, you might want to do that here. You should also use ``tty.info`` to give
the user a message of "Writing result to $DIRNAME."
@ -616,7 +607,7 @@ types of hooks in the ``__init__.py``, and then python files in that folder
can use hook functions. The files are automatically parsed, so if you write
a new file for some integration (e.g., ``lib/spack/spack/hooks/myintegration.py``
you can then write hook functions in that file that will be automatically detected,
and run whenever your hook is called. This section will cover the basic kind
and run whenever your hook is called. This section will cover the basic kind
of hooks, and how to write them.
^^^^^^^^^^^^^^
@ -624,7 +615,7 @@ Types of Hooks
^^^^^^^^^^^^^^
The following hooks are currently implemented to make it easy for you,
the developer, to add hooks at different stages of a spack install or similar.
the developer, to add hooks at different stages of a spack install or similar.
If there is a hook that you would like and is missing, you can propose to add a new one.
"""""""""""""""""""""
@ -632,9 +623,9 @@ If there is a hook that you would like and is missing, you can propose to add a
"""""""""""""""""""""
A ``pre_install`` hook is run within an install subprocess, directly before
the install starts. It expects a single argument of a spec, and is run in
the install starts. It expects a single argument of a spec, and is run in
a multiprocessing subprocess. Note that if you see ``pre_install`` functions associated with packages these are not hooks
as we have defined them here, but rather callback functions associated with
as we have defined them here, but rather callback functions associated with
a package install.
@ -657,7 +648,7 @@ here.
This hook is run at the beginning of ``lib/spack/spack/installer.py``,
in the install function of a ``PackageInstaller``,
and importantly is not part of a build process, but before it. This is when
we have just newly grabbed the task, and are preparing to install. If you
we have just newly grabbed the task, and are preparing to install. If you
write a hook of this type, you should provide the spec to it.
.. code-block:: python
@ -666,7 +657,7 @@ write a hook of this type, you should provide the spec to it.
"""On start of an install, we want to...
"""
print('on_install_start')
""""""""""""""""""""""""""""
``on_install_success(spec)``
@ -744,8 +735,8 @@ to trigger after anything is written to a logger. You would add it as follows:
post_install = HookRunner('post_install')
# hooks related to logging
post_log_write = HookRunner('post_log_write') # <- here is my new hook!
post_log_write = HookRunner('post_log_write') # <- here is my new hook!
You then need to decide what arguments my hook would expect. Since this is
related to logging, let's say that you want a message and level. That means
@ -775,7 +766,7 @@ In this example, we use it outside of a logger that is already defined:
This is not to say that this would be the best way to implement an integration
with the logger (you'd probably want to write a custom logger, or you could
have the hook defined within the logger) but serves as an example of writing a hook.
have the hook defined within the logger) but serves as an example of writing a hook.
----------
Unit tests
@ -905,7 +896,7 @@ just like you would with the normal ``python`` command.
^^^^^^^^^^^^^^^
Spack blame is a way to quickly see contributors to packages or files
in the spack repository. You should provide a target package name or
in the spack repository. You should provide a target package name or
file name to the command. Here is an example asking to see contributions
for the package "python":
@ -915,8 +906,8 @@ for the package "python":
LAST_COMMIT LINES % AUTHOR EMAIL
2 weeks ago 3 0.3 Mickey Mouse <cheddar@gmouse.org>
a month ago 927 99.7 Minnie Mouse <swiss@mouse.org>
2 weeks ago 930 100.0
2 weeks ago 930 100.0
By default, you will get a table view (shown above) sorted by date of contribution,
@ -1287,7 +1278,7 @@ Publishing a release on GitHub
#. Create the release in GitHub.
* Go to
* Go to
`github.com/spack/spack/releases <https://github.com/spack/spack/releases>`_
and click ``Draft a new release``.

View File

@ -2884,52 +2884,52 @@ The package base class, usually specialized for a given build system, determines
actual set of entities available for overriding.
The classes that are currently provided by Spack are:
+-------------------------------+----------------------------------+
| **Base Class** | **Purpose** |
+===============================+==================================+
| :py:class:`.Package` | General base class not |
| | specialized for any build system |
+-------------------------------+----------------------------------+
| :py:class:`.MakefilePackage` | Specialized class for packages |
| | built invoking |
| | hand-written Makefiles |
+-------------------------------+----------------------------------+
| :py:class:`.AutotoolsPackage` | Specialized class for packages |
| | built using GNU Autotools |
+-------------------------------+----------------------------------+
| :py:class:`.CMakePackage` | Specialized class for packages |
| | built using CMake |
+-------------------------------+----------------------------------+
| :py:class:`.CudaPackage` | A helper class for packages that |
| | use CUDA |
+-------------------------------+----------------------------------+
| :py:class:`.QMakePackage` | Specialized class for packages |
| | build using QMake |
+-------------------------------+----------------------------------+
| :py:class:`.ROCmPackage` | A helper class for packages that |
| | use ROCm |
+-------------------------------+----------------------------------+
| :py:class:`.SConsPackage` | Specialized class for packages |
| | built using SCons |
+-------------------------------+----------------------------------+
| :py:class:`.WafPackage` | Specialized class for packages |
| | built using Waf |
+-------------------------------+----------------------------------+
| :py:class:`.RPackage` | Specialized class for |
| | :py:class:`.R` extensions |
+-------------------------------+----------------------------------+
| :py:class:`.OctavePackage` | Specialized class for |
| | :py:class:`.Octave` packages |
+-------------------------------+----------------------------------+
| :py:class:`.PythonPackage` | Specialized class for |
| | :py:class:`.Python` extensions |
+-------------------------------+----------------------------------+
| :py:class:`.PerlPackage` | Specialized class for |
| | :py:class:`.Perl` extensions |
+-------------------------------+----------------------------------+
| :py:class:`.IntelPackage` | Specialized class for licensed |
| | Intel software |
+-------------------------------+----------------------------------+
+-------------------------=--------------------------------+----------------------------------+
| **Base Class** | **Purpose** |
+==========================================================+==================================+
| :class:`~spack.package.Package` | General base class not |
| | specialized for any build system |
+----------------------------------------------------------+----------------------------------+
| :class:`~spack.build_systems.makefile.MakefilePackage` | Specialized class for packages |
| | built invoking |
| | hand-written Makefiles |
+----------------------------------------------------------+----------------------------------+
| :class:`~spack.build_systems.autotools.AutotoolsPackage` | Specialized class for packages |
| | built using GNU Autotools |
+----------------------------------------------------------+----------------------------------+
| :class:`~spack.build_systems.cmake.CMakePackage` | Specialized class for packages |
| | built using CMake |
+----------------------------------------------------------+----------------------------------+
| :class:`~spack.build_systems.cuda.CudaPackage` | A helper class for packages that |
| | use CUDA |
+----------------------------------------------------------+----------------------------------+
| :class:`~spack.build_systems.qmake.QMakePackage` | Specialized class for packages |
| | built using QMake |
+----------------------------------------------------------+----------------------------------+
| :class:`~spack.build_systems.rocm.ROCmPackage` | A helper class for packages that |
| | use ROCm |
+----------------------------------------------------------+----------------------------------+
| :class:`~spack.build_systems.scons.SConsPackage` | Specialized class for packages |
| | built using SCons |
+----------------------------------------------------------+----------------------------------+
| :class:`~spack.build_systems.waf.WafPackage` | Specialized class for packages |
| | built using Waf |
+----------------------------------------------------------+----------------------------------+
| :class:`~spack.build_systems.r.RPackage` | Specialized class for |
| | R extensions |
+----------------------------------------------------------+----------------------------------+
| :class:`~spack.build_systems.octave.OctavePackage` | Specialized class for |
| | Octave packages |
+----------------------------------------------------------+----------------------------------+
| :class:`~spack.build_systems.python.PythonPackage` | Specialized class for |
| | Python extensions |
+----------------------------------------------------------+----------------------------------+
| :class:`~spack.build_systems.perl.PerlPackage` | Specialized class for |
| | Perl extensions |
+----------------------------------------------------------+----------------------------------+
| :class:`~spack.build_systems.intel.IntelPackage` | Specialized class for licensed |
| | Intel software |
+----------------------------------------------------------+----------------------------------+
.. note::
@ -2939,7 +2939,7 @@ The classes that are currently provided by Spack are:
rare cases where manual intervention is needed we need to stress that a
package base class depends on the *build system* being used, not the language of the package.
For example, a Python extension installed with CMake would ``extends('python')`` and
subclass from :py:class:`.CMakePackage`.
subclass from :class:`~spack.build_systems.cmake.CMakePackage`.
^^^^^^^^^^^^^^^^^^^^^
Installation pipeline
@ -4079,7 +4079,7 @@ prefix **before** ``make install``. Builds like this can falsely report
success when an error occurs before the installation is complete. Simple
sanity checks can be used to identify files and or directories that are
required of a successful installation. Spack checks for the presence of
the files and directories after ``install()`` runs.
the files and directories after ``install()`` runs.
If any of the listed files or directories are missing, then the build will
fail and the install prefix will be removed. If they all exist, then Spack
@ -4193,7 +4193,7 @@ need to use two decorators for each phase test method:
The first decorator tells Spack when in the installation process to
run your test method installation process; namely *after* the provided
installation phase. The second decorator tells Spack to only run the
checks when the ``--test`` option is provided on the command line.
checks when the ``--test`` option is provided on the command line.
.. note::
@ -4267,17 +4267,17 @@ tests can be performed days, even weeks, after the software is installed.
Stand-alone tests are checks that should run relatively quickly -- as
in on the order of at most a few minutes -- and ideally execute all
aspects of the installed software, or at least key functionality.
aspects of the installed software, or at least key functionality.
.. note::
Execution speed is important because these tests are intended
to quickly assess whether the installed software works on the
system.
Failing stand-alone tests indicate that there is no reason to
proceed with more resource-intensive tests.
Passing stand-alone (or smoke) tests can lead to more thorough
testing, such as extensive unit or regression tests, or tests
that run at scale. Spack support for more thorough testing is
@ -4307,7 +4307,7 @@ file such that:
test_stage: /path/to/stage
The package can access this path **during test processing** using
`self.test_suite.stage`.
`self.test_suite.stage`.
.. note::
@ -4388,7 +4388,7 @@ can be implemented as shown below.
@run_after('install')
def copy_test_sources(self):
srcs = ['tests',
join_path('examples', 'foo.c'),
join_path('examples', 'foo.c'),
join_path('examples', 'bar.c')]
self.cache_extra_test_sources(srcs)
@ -4446,7 +4446,7 @@ Examples include:
- expected test output
These extra files should be added to the ``test`` subdirectory of the
package in the Spack repository.
package in the Spack repository.
Spack will **automatically copy** the contents of that directory to the
test staging directory for stand-alone testing. The ``test`` method can
@ -4471,7 +4471,7 @@ The signature for ``get_escaped_text_output`` is:
where ``filename`` is the path to the file containing the expected output.
The ``filename`` for a :ref:`custom file <cache_custom_files>` can be
The ``filename`` for a :ref:`custom file <cache_custom_files>` can be
accessed and used as illustrated by a simplified version of an ``sqlite``
package check:
@ -4591,10 +4591,10 @@ where each argument has the following meaning:
Options are a list of strings to be passed to the executable when
it runs.
The default is ``[]``, which means no options are provided to the
executable.
* ``expected`` is an optional list of expected output strings.
Spack requires every string in ``expected`` to be a regex matching
@ -4605,31 +4605,31 @@ where each argument has the following meaning:
The expected output can be :ref:`read from a file
<expected_test_output_from_file>`.
The default is ``expected=[]``, so Spack will not check the output.
* ``status`` is the optional expected return code(s).
A list of return codes corresponding to successful execution can
be provided (e.g., ``status=[0,3,7]``). Support for non-zero return
codes allows for basic **expected failure** tests as well as different
return codes across versions of the software.
The default is ``status=[0]``, which corresponds to **successful**
execution in the sense that the executable does not exit with a
failure code or raise an exception.
* ``installed`` is used to require ``exe`` to be within the package
prefix.
If ``True``, then the path for ``exe`` is required to be within the
package prefix; otherwise, the path is not constrained.
The default is ``False``, so the fully qualified path for ``exe``
does **not** need to be within the installation directory.
* ``purpose`` is an optional heading describing the the test part.
Output from the test is written to a test log file so this argument
serves as a searchable heading in text logs to highlight the start
of the test part. Having a description can be helpful when debugging
@ -4644,10 +4644,10 @@ where each argument has the following meaning:
The default is ``False``, which means the test executable must be
present for any installable version of the software.
* ``work_dir`` is the path to the directory from which the executable
will run.
The default of ``None`` corresponds to the current directory (``'.'``).
"""""""""""""""""""""""""""""""""""""""""
@ -4754,7 +4754,7 @@ where only the outputs for the first of each set are shown:
Copyright (C) 2018 Free Software Foundation, Inc.
This is free software; see the source for copying conditions. There is NO
warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
PASSED
...
==> [2021-04-26-17:35:20.493921] test: checking mpirun output
@ -4915,7 +4915,7 @@ This is already part of the boilerplate for packages created with
Filtering functions
^^^^^^^^^^^^^^^^^^^
:py:func:`filter_file(regex, repl, *filenames, **kwargs) <spack.filter_file>`
:py:func:`filter_file(regex, repl, *filenames, **kwargs) <llnl.util.filesystem.filter_file>`
Works like ``sed`` but with Python regular expression syntax. Takes
a regular expression, a replacement, and a set of files. ``repl``
can be a raw string or a callable function. If it is a raw string,
@ -4953,7 +4953,7 @@ Filtering functions
filter_file('CXX="c++"', 'CXX="%s"' % self.compiler.cxx,
prefix.bin.mpicxx)
:py:func:`change_sed_delimiter(old_delim, new_delim, *filenames) <spack.change_sed_delim>`
:py:func:`change_sed_delimiter(old_delim, new_delim, *filenames) <llnl.util.filesystem.change_sed_delimiter>`
Some packages, like TAU, have a build system that can't install
into directories with, e.g. '@' in the name, because they use
hard-coded ``sed`` commands in their build.
@ -4975,14 +4975,14 @@ Filtering functions
File functions
^^^^^^^^^^^^^^
:py:func:`ancestor(dir, n=1) <spack.ancestor>`
:py:func:`ancestor(dir, n=1) <llnl.util.filesystem.ancestor>`
Get the n\ :sup:`th` ancestor of the directory ``dir``.
:py:func:`can_access(path) <spack.can_access>`
:py:func:`can_access(path) <llnl.util.filesystem.can_access>`
True if we can read and write to the file at ``path``. Same as
native python ``os.access(file_name, os.R_OK|os.W_OK)``.
:py:func:`install(src, dest) <spack.install>`
:py:func:`install(src, dest) <llnl.util.filesystem.install>`
Install a file to a particular location. For example, install a
header into the ``include`` directory under the install ``prefix``:
@ -4990,14 +4990,14 @@ File functions
install('my-header.h', prefix.include)
:py:func:`join_path(*paths) <spack.join_path>`
:py:func:`join_path(*paths) <llnl.util.filesystem.join_path>`
An alias for ``os.path.join``. This joins paths using the OS path separator.
:py:func:`mkdirp(*paths) <spack.mkdirp>`
:py:func:`mkdirp(*paths) <llnl.util.filesystem.mkdirp>`
Create each of the directories in ``paths``, creating any parent
directories if they do not exist.
:py:func:`working_dir(dirname, kwargs) <spack.working_dir>`
:py:func:`working_dir(dirname, kwargs) <llnl.util.filesystem.working_dir>`
This is a Python `Context Manager
<https://docs.python.org/2/library/contextlib.html>`_ that makes it
easier to work with subdirectories in builds. You use this with the
@ -5039,7 +5039,7 @@ File functions
The ``create=True`` keyword argument causes the command to create
the directory if it does not exist.
:py:func:`touch(path) <spack.touch>`
:py:func:`touch(path) <llnl.util.filesystem.touch>`
Create an empty file at ``path``.
.. _make-package-findable:

View File

@ -326,7 +326,7 @@ def end_function(self, prog=None):
"""Returns the syntax needed to end a function definition.
Parameters:
prog (str, optional): the command name
prog (str or None): the command name
Returns:
str: the function definition ending

View File

@ -444,7 +444,7 @@ def copy_tree(src, dest, symlinks=True, ignore=None, _permissions=False):
src (str): the directory to copy
dest (str): the destination directory
symlinks (bool): whether or not to preserve symlinks
ignore (function): function indicating which files to ignore
ignore (typing.Callable): function indicating which files to ignore
_permissions (bool): for internal use only
Raises:
@ -518,7 +518,7 @@ def install_tree(src, dest, symlinks=True, ignore=None):
src (str): the directory to install
dest (str): the destination directory
symlinks (bool): whether or not to preserve symlinks
ignore (function): function indicating which files to ignore
ignore (typing.Callable): function indicating which files to ignore
Raises:
IOError: if *src* does not match any files or directories
@ -557,12 +557,12 @@ def mkdirp(*paths, **kwargs):
paths (str): paths to create with mkdirp
Keyword Aguments:
mode (permission bits or None, optional): optional permissions to set
mode (permission bits or None): optional permissions to set
on the created directory -- use OS default if not provided
group (group name or None, optional): optional group for permissions of
group (group name or None): optional group for permissions of
final created directory -- use OS default if not provided. Only
used if world write permissions are not set
default_perms ('parents' or 'args', optional): The default permissions
default_perms (str or None): one of 'parents' or 'args'. The default permissions
that are set for directories that are not themselves an argument
for mkdirp. 'parents' means intermediate directories get the
permissions of their direct parent directory, 'args' means
@ -866,7 +866,7 @@ def traverse_tree(source_root, dest_root, rel_path='', **kwargs):
Keyword Arguments:
order (str): Whether to do pre- or post-order traversal. Accepted
values are 'pre' and 'post'
ignore (function): function indicating which files to ignore
ignore (typing.Callable): function indicating which files to ignore
follow_nonexisting (bool): Whether to descend into directories in
``src`` that do not exit in ``dest``. Default is True
follow_links (bool): Whether to descend into symlinks in ``src``
@ -1114,11 +1114,11 @@ def find(root, files, recursive=True):
Parameters:
root (str): The root directory to start searching from
files (str or Sequence): Library name(s) to search for
recurse (bool, optional): if False search only root folder,
recursive (bool): if False search only root folder,
if True descends top-down from the root. Defaults to True.
Returns:
list of strings: The files that have been found
list: The files that have been found
"""
if isinstance(files, six.string_types):
files = [files]
@ -1200,7 +1200,7 @@ def directories(self):
['/dir1', '/dir2']
Returns:
list of strings: A list of directories
list: A list of directories
"""
return list(dedupe(
os.path.dirname(x) for x in self.files if os.path.dirname(x)
@ -1218,7 +1218,7 @@ def basenames(self):
['a.h', 'b.h']
Returns:
list of strings: A list of base-names
list: A list of base-names
"""
return list(dedupe(os.path.basename(x) for x in self.files))
@ -1305,7 +1305,7 @@ def headers(self):
"""Stable de-duplication of the headers.
Returns:
list of strings: A list of header files
list: A list of header files
"""
return self.files
@ -1318,7 +1318,7 @@ def names(self):
['a', 'b']
Returns:
list of strings: A list of files without extensions
list: A list of files without extensions
"""
names = []
@ -1409,9 +1409,9 @@ def find_headers(headers, root, recursive=False):
======= ====================================
Parameters:
headers (str or list of str): Header name(s) to search for
headers (str or list): Header name(s) to search for
root (str): The root directory to start searching from
recursive (bool, optional): if False search only root folder,
recursive (bool): if False search only root folder,
if True descends top-down from the root. Defaults to False.
Returns:
@ -1447,7 +1447,7 @@ def find_all_headers(root):
in the directory passed as argument.
Args:
root (path): directory where to look recursively for header files
root (str): directory where to look recursively for header files
Returns:
List of all headers found in ``root`` and subdirectories.
@ -1467,7 +1467,7 @@ def libraries(self):
"""Stable de-duplication of library files.
Returns:
list of strings: A list of library files
list: A list of library files
"""
return self.files
@ -1480,7 +1480,7 @@ def names(self):
['a', 'b']
Returns:
list of strings: A list of library names
list: A list of library names
"""
names = []
@ -1565,8 +1565,8 @@ def find_system_libraries(libraries, shared=True):
======= ====================================
Parameters:
libraries (str or list of str): Library name(s) to search for
shared (bool, optional): if True searches for shared libraries,
libraries (str or list): Library name(s) to search for
shared (bool): if True searches for shared libraries,
otherwise for static. Defaults to True.
Returns:
@ -1616,11 +1616,11 @@ def find_libraries(libraries, root, shared=True, recursive=False):
======= ====================================
Parameters:
libraries (str or list of str): Library name(s) to search for
libraries (str or list): Library name(s) to search for
root (str): The root directory to start searching from
shared (bool, optional): if True searches for shared libraries,
shared (bool): if True searches for shared libraries,
otherwise for static. Defaults to True.
recursive (bool, optional): if False search only root folder,
recursive (bool): if False search only root folder,
if True descends top-down from the root. Defaults to False.
Returns:

View File

@ -573,8 +573,8 @@ def pretty_date(time, now=None):
"""Convert a datetime or timestamp to a pretty, relative date.
Args:
time (datetime or int): date to print prettily
now (datetime): dateimte for 'now', i.e. the date the pretty date
time (datetime.datetime or int): date to print prettily
now (datetime.datetime): datetime for 'now', i.e. the date the pretty date
is relative to (default is datetime.now())
Returns:
@ -648,7 +648,7 @@ def pretty_string_to_date(date_str, now=None):
or be a *pretty date* (like ``yesterday`` or ``two months ago``)
Returns:
(datetime): datetime object corresponding to ``date_str``
(datetime.datetime): datetime object corresponding to ``date_str``
"""
pattern = {}

View File

@ -14,9 +14,19 @@
import spack.util.string
__all__ = ['Lock', 'LockTransaction', 'WriteTransaction', 'ReadTransaction',
'LockError', 'LockTimeoutError',
'LockPermissionError', 'LockROFileError', 'CantCreateLockError']
__all__ = [
'Lock',
'LockDowngradeError',
'LockUpgradeError',
'LockTransaction',
'WriteTransaction',
'ReadTransaction',
'LockError',
'LockTimeoutError',
'LockPermissionError',
'LockROFileError',
'CantCreateLockError'
]
#: Mapping of supported locks to description
lock_type = {fcntl.LOCK_SH: 'read', fcntl.LOCK_EX: 'write'}
@ -401,7 +411,7 @@ def release_read(self, release_fn=None):
"""Releases a read lock.
Arguments:
release_fn (callable): function to call *before* the last recursive
release_fn (typing.Callable): function to call *before* the last recursive
lock (read or write) is released.
If the last recursive lock will be released, then this will call
@ -437,7 +447,7 @@ def release_write(self, release_fn=None):
"""Releases a write lock.
Arguments:
release_fn (callable): function to call before the last recursive
release_fn (typing.Callable): function to call before the last recursive
write is released.
If the last recursive *write* lock will be released, then this
@ -533,10 +543,10 @@ class LockTransaction(object):
Arguments:
lock (Lock): underlying lock for this transaction to be accquired on
enter and released on exit
acquire (callable or contextmanager): function to be called after lock
is acquired, or contextmanager to enter after acquire and leave
acquire (typing.Callable or contextlib.contextmanager): function to be called
after lock is acquired, or contextmanager to enter after acquire and leave
before release.
release (callable): function to be called before release. If
release (typing.Callable): function to be called before release. If
``acquire`` is a contextmanager, this will be called *after*
exiting the nexted context and before the lock is released.
timeout (float): number of seconds to set for the timeout when

View File

@ -109,19 +109,17 @@ def colify(elts, **options):
using ``str()``.
Keyword Arguments:
output (stream): A file object to write to. Default is ``sys.stdout``
indent (int): Optionally indent all columns by some number of spaces
padding (int): Spaces between columns. Default is 2
width (int): Width of the output. Default is 80 if tty not detected
cols (int): Force number of columns. Default is to size to
terminal, or single-column if no tty
tty (bool): Whether to attempt to write to a tty. Default is to
autodetect a tty. Set to False to force single-column
output
method (str): Method to use to fit columns. Options are variable or
uniform. Variable-width columns are tighter, uniform
columns are all the same width and fit less data on
the screen
output (typing.IO): A file object to write to. Default is ``sys.stdout``
indent (int): Optionally indent all columns by some number of spaces
padding (int): Spaces between columns. Default is 2
width (int): Width of the output. Default is 80 if tty not detected
cols (int): Force number of columns. Default is to size to terminal, or
single-column if no tty
tty (bool): Whether to attempt to write to a tty. Default is to autodetect a
tty. Set to False to force single-column output
method (str): Method to use to fit columns. Options are variable or uniform.
Variable-width columns are tighter, uniform columns are all the same width
and fit less data on the screen
"""
# Get keyword arguments or set defaults
cols = options.pop("cols", 0)

View File

@ -193,8 +193,8 @@ def optimization_flags(self, compiler):
the compiler passed as argument.
Args:
compiler (CompilerSpec or Compiler): object that contains both the
name and the version of the compiler we want to use
compiler (spack.spec.CompilerSpec or spack.compiler.Compiler): object that
contains both the name and the version of the compiler we want to use
"""
# Mixed toolchains are not supported yet
import spack.compilers

View File

@ -206,7 +206,7 @@ def find_built_spec(self, spec):
The cache can be updated by calling ``update()`` on the cache.
Args:
spec (Spec): Concrete spec to find
spec (spack.spec.Spec): Concrete spec to find
Returns:
An list of objects containing the found specs and mirror url where
@ -1079,14 +1079,14 @@ def download_tarball(spec, preferred_mirrors=None):
path to downloaded tarball if successful, None otherwise.
Args:
spec (Spec): Concrete spec
spec (spack.spec.Spec): Concrete spec
preferred_mirrors (list): If provided, this is a list of preferred
mirror urls. Other configured mirrors will only be used if the
tarball can't be retrieved from one of these.
mirror urls. Other configured mirrors will only be used if the
tarball can't be retrieved from one of these.
Returns:
Path to the downloaded tarball, or ``None`` if the tarball could not
be downloaded from any configured mirrors.
be downloaded from any configured mirrors.
"""
if not spack.mirror.MirrorCollection():
tty.die("Please add a spack mirror to allow " +
@ -1455,7 +1455,7 @@ def get_mirrors_for_spec(spec=None, full_hash_match=False,
indicating the mirrors on which it can be found
Args:
spec (Spec): The spec to look for in binary mirrors
spec (spack.spec.Spec): The spec to look for in binary mirrors
full_hash_match (bool): If True, only includes mirrors where the spec
full hash matches the locally computed full hash of the ``spec``
argument. If False, any mirror which has a matching DAG hash
@ -1732,11 +1732,11 @@ def check_specs_against_mirrors(mirrors, specs, output_file=None,
Arguments:
mirrors (dict): Mirrors to check against
specs (iterable): Specs to check against mirrors
output_file (string): Path to output file to be written. If provided,
specs (typing.Iterable): Specs to check against mirrors
output_file (str): Path to output file to be written. If provided,
mirrors with missing or out-of-date specs will be formatted as a
JSON object and written to this file.
rebuild_on_errors (boolean): Treat any errors encountered while
rebuild_on_errors (bool): Treat any errors encountered while
checking specs as a signal to rebuild package.
Returns: 1 if any spec was out-of-date on any mirror, 0 otherwise.

View File

@ -134,7 +134,7 @@ def get_executable(exe, spec=None, install=False):
Args:
exe (str): needed executable name
spec (Spec or str): spec to search for exe in (default exe)
spec (spack.spec.Spec or str): spec to search for exe in (default exe)
install (bool): install spec if not available
When ``install`` is True, Spack will use the python used to run Spack as an

View File

@ -455,11 +455,11 @@ def determine_number_of_jobs(
cap to the number of CPUs available to avoid oversubscription.
Parameters:
parallel (bool): true when package supports parallel builds
command_line (int/None): command line override
config_default (int/None): config default number of jobs
max_cpus (int/None): maximum number of CPUs available. When None, this
value is automatically determined.
parallel (bool or None): true when package supports parallel builds
command_line (int or None): command line override
config_default (int or None): config default number of jobs
max_cpus (int or None): maximum number of CPUs available. When None, this
value is automatically determined.
"""
if not parallel:
return 1
@ -685,14 +685,14 @@ def get_std_cmake_args(pkg):
"""List of standard arguments used if a package is a CMakePackage.
Returns:
list of str: standard arguments that would be used if this
list: standard arguments that would be used if this
package were a CMakePackage instance.
Args:
pkg (PackageBase): package under consideration
pkg (spack.package.PackageBase): package under consideration
Returns:
list of str: arguments for cmake
list: arguments for cmake
"""
return spack.build_systems.cmake.CMakePackage._std_args(pkg)
@ -701,14 +701,14 @@ def get_std_meson_args(pkg):
"""List of standard arguments used if a package is a MesonPackage.
Returns:
list of str: standard arguments that would be used if this
list: standard arguments that would be used if this
package were a MesonPackage instance.
Args:
pkg (PackageBase): package under consideration
pkg (spack.package.PackageBase): package under consideration
Returns:
list of str: arguments for meson
list: arguments for meson
"""
return spack.build_systems.meson.MesonPackage._std_args(pkg)
@ -738,7 +738,7 @@ def load_external_modules(pkg):
associated with them.
Args:
pkg (PackageBase): package to load deps for
pkg (spack.package.PackageBase): package to load deps for
"""
for dep in list(pkg.spec.traverse()):
external_modules = dep.external_modules or []
@ -864,7 +864,7 @@ def modifications_from_dependencies(spec, context, custom_mods_only=True):
CMAKE_PREFIX_PATH, or PKG_CONFIG_PATH).
Args:
spec (Spec): spec for which we want the modifications
spec (spack.spec.Spec): spec for which we want the modifications
context (str): either 'build' for build-time modifications or 'run'
for run-time modifications
"""
@ -1062,9 +1062,9 @@ def start_build_process(pkg, function, kwargs):
Args:
pkg (PackageBase): package whose environment we should set up the
pkg (spack.package.PackageBase): package whose environment we should set up the
child process for.
function (callable): argless function to run in the child
function (typing.Callable): argless function to run in the child
process.
Usage::
@ -1149,7 +1149,7 @@ def get_package_context(traceback, context=3):
"""Return some context for an error message when the build fails.
Args:
traceback (traceback): A traceback from some exception raised during
traceback: A traceback from some exception raised during
install
context (int): Lines of context to show before and after the line

View File

@ -30,7 +30,7 @@ class AutotoolsPackage(PackageBase):
They all have sensible defaults and for many packages the only thing
necessary will be to override the helper method
:py:meth:`~.AutotoolsPackage.configure_args`.
:meth:`~spack.build_systems.autotools.AutotoolsPackage.configure_args`.
For a finer tuning you may also override:
+-----------------------------------------------+--------------------+
@ -331,7 +331,7 @@ def flags_to_build_system_args(self, flags):
def configure(self, spec, prefix):
"""Runs configure with the arguments specified in
:py:meth:`~.AutotoolsPackage.configure_args`
:meth:`~spack.build_systems.autotools.AutotoolsPackage.configure_args`
and an appropriately set prefix.
"""
options = getattr(self, 'configure_flag_args', [])
@ -376,8 +376,8 @@ def _activate_or_not(
activation_value=None
):
"""This function contains the current implementation details of
:py:meth:`~.AutotoolsPackage.with_or_without` and
:py:meth:`~.AutotoolsPackage.enable_or_disable`.
:meth:`~spack.build_systems.autotools.AutotoolsPackage.with_or_without` and
:meth:`~spack.build_systems.autotools.AutotoolsPackage.enable_or_disable`.
Args:
name (str): name of the variant that is being processed
@ -385,7 +385,7 @@ def _activate_or_not(
case of ``with_or_without``)
deactivation_word (str): the default deactivation word ('without'
in the case of ``with_or_without``)
activation_value (callable): callable that accepts a single
activation_value (typing.Callable): callable that accepts a single
value. This value is either one of the allowed values for a
multi-valued variant or the name of a bool-valued variant.
Returns the parameter to be used when the value is activated.
@ -420,7 +420,7 @@ def _activate_or_not(
for ``<spec-name> foo=x +bar``
Returns:
list of strings that corresponds to the activation/deactivation
list: list of strings that corresponds to the activation/deactivation
of the variant that has been processed
Raises:
@ -501,7 +501,7 @@ def with_or_without(self, name, activation_value=None):
Args:
name (str): name of a valid multi-valued variant
activation_value (callable): callable that accepts a single
activation_value (typing.Callable): callable that accepts a single
value and returns the parameter to be used leading to an entry
of the type ``--with-{name}={parameter}``.
@ -514,12 +514,13 @@ def with_or_without(self, name, activation_value=None):
return self._activate_or_not(name, 'with', 'without', activation_value)
def enable_or_disable(self, name, activation_value=None):
"""Same as :py:meth:`~.AutotoolsPackage.with_or_without` but substitute
``with`` with ``enable`` and ``without`` with ``disable``.
"""Same as
:meth:`~spack.build_systems.autotools.AutotoolsPackage.with_or_without`
but substitute ``with`` with ``enable`` and ``without`` with ``disable``.
Args:
name (str): name of a valid multi-valued variant
activation_value (callable): if present accepts a single value
activation_value (typing.Callable): if present accepts a single value
and returns the parameter to be used leading to an entry of the
type ``--enable-{name}={parameter}``

View File

@ -236,7 +236,7 @@ def define_from_variant(self, cmake_var, variant=None):
of ``cmake_var``.
This utility function is similar to
:py:meth:`~.AutotoolsPackage.with_or_without`.
:meth:`~spack.build_systems.autotools.AutotoolsPackage.with_or_without`.
Examples:

View File

@ -368,7 +368,7 @@ def normalize_suite_dir(self, suite_dir_name, version_globs=['*.*.*']):
toplevel psxevars.sh or equivalent file to source (and thus by
the modulefiles that Spack produces).
version_globs (list of str): Suffix glob patterns (most specific
version_globs (list): Suffix glob patterns (most specific
first) expected to qualify suite_dir_name to its fully
version-specific install directory (as opposed to a
compatibility directory or symlink).

View File

@ -216,10 +216,10 @@ def disambiguate_spec(spec, env, local=False, installed=True, first=False):
spec (spack.spec.Spec): a spec to disambiguate
env (spack.environment.Environment): a spack environment,
if one is active, or None if no environment is active
local (boolean, default False): do not search chained spack instances
installed (boolean or any, or spack.database.InstallStatus or iterable
of spack.database.InstallStatus): install status argument passed to
database query. See ``spack.database.Database._query`` for details.
local (bool): do not search chained spack instances
installed (bool or spack.database.InstallStatus or typing.Iterable):
install status argument passed to database query.
See ``spack.database.Database._query`` for details.
"""
hashes = env.all_hashes() if env else None
return disambiguate_spec_from_hashes(spec, hashes, local, installed, first)
@ -231,11 +231,11 @@ def disambiguate_spec_from_hashes(spec, hashes, local=False,
Arguments:
spec (spack.spec.Spec): a spec to disambiguate
hashes (iterable): a set of hashes of specs among which to disambiguate
local (boolean, default False): do not search chained spack instances
installed (boolean or any, or spack.database.InstallStatus or iterable
of spack.database.InstallStatus): install status argument passed to
database query. See ``spack.database.Database._query`` for details.
hashes (typing.Iterable): a set of hashes of specs among which to disambiguate
local (bool): do not search chained spack instances
installed (bool or spack.database.InstallStatus or typing.Iterable):
install status argument passed to database query.
See ``spack.database.Database._query`` for details.
"""
if local:
matching_specs = spack.store.db.query_local(spec, hashes=hashes,
@ -333,9 +333,8 @@ def display_specs(specs, args=None, **kwargs):
namespace.
Args:
specs (list of spack.spec.Spec): the specs to display
args (optional argparse.Namespace): namespace containing
formatting arguments
specs (list): the specs to display
args (argparse.Namespace or None): namespace containing formatting arguments
Keyword Args:
paths (bool): Show paths with each displayed spec
@ -348,9 +347,9 @@ def display_specs(specs, args=None, **kwargs):
indent (int): indent each line this much
groups (bool): display specs grouped by arch/compiler (default True)
decorators (dict): dictionary mappng specs to decorators
header_callback (function): called at start of arch/compiler groups
header_callback (typing.Callable): called at start of arch/compiler groups
all_headers (bool): show headers even when arch/compiler aren't defined
output (stream): A file object to write to. Default is ``sys.stdout``
output (typing.IO): A file object to write to. Default is ``sys.stdout``
"""
def get_arg(name, default=None):

View File

@ -58,9 +58,9 @@ def analyze_spec(spec, analyzers=None, outdir=None, monitor=None, overwrite=Fals
analyze_spec(spec, args.analyzers, args.outdir, monitor)
Args:
spec (Spec): spec object of installed package
spec (spack.spec.Spec): spec object of installed package
analyzers (list): list of analyzer (keys) to run
monitor (monitor.SpackMonitorClient): a monitor client
monitor (spack.monitor.SpackMonitorClient): a monitor client
overwrite (bool): overwrite result if already exists
"""
analyzers = analyzers or list(spack.analyzers.analyzer_types.keys())

View File

@ -239,12 +239,13 @@ def find_matching_specs(pkgs, allow_multiple_matches=False, env=None):
concretized specs given from cli
Args:
pkgs (string): spec to be matched against installed packages
pkgs (str): spec to be matched against installed packages
allow_multiple_matches (bool): if True multiple matches are admitted
env (Environment): active environment, or ``None`` if there is not one
env (spack.environment.Environment or None): active environment, or ``None``
if there is not one
Return:
list of specs
list: list of specs
"""
hashes = env.all_hashes() if env else None

View File

@ -636,7 +636,7 @@ def get_name(args):
provided, extract the name from that. Otherwise, use a default.
Args:
args (param argparse.Namespace): The arguments given to
args (argparse.Namespace): The arguments given to
``spack create``
Returns:
@ -709,8 +709,7 @@ def get_versions(args, name):
name (str): The name of the package
Returns:
str and BuildSystemGuesser: Versions and hashes, and a
BuildSystemGuesser object
tuple: versions and hashes, and a BuildSystemGuesser object
"""
# Default version with hash
@ -794,7 +793,8 @@ def get_repository(args, name):
name (str): The name of the package to create
Returns:
Repo: A Repo object capable of determining the path to the package file
spack.repo.Repo: A Repo object capable of determining the path to the
package file
"""
spec = Spec(name)
# Figure out namespace for spec

View File

@ -59,7 +59,7 @@ def get_dependents(pkg_name, ideps, transitive=False, dependents=None):
Args:
pkg_name (str): name of the package whose dependents should be returned
ideps (dict): dictionary of dependents, from inverted_dependencies()
transitive (bool, optional): return transitive dependents when True
transitive (bool or None): return transitive dependents when True
"""
if dependents is None:
dependents = set()

View File

@ -198,9 +198,9 @@ def install_specs(cli_args, kwargs, specs):
"""Do the actual installation.
Args:
cli_args (Namespace): argparse namespace with command arguments
cli_args (argparse.Namespace): argparse namespace with command arguments
kwargs (dict): keyword arguments
specs (list of tuples): list of (abstract, concrete) spec tuples
specs (list): list of (abstract, concrete) spec tuples
"""
# handle active environment, if any

View File

@ -69,12 +69,13 @@ def find_matching_specs(env, specs, allow_multiple_matches=False, force=False):
concretized specs given from cli
Args:
env (Environment): active environment, or ``None`` if there is not one
env (spack.environment.Environment): active environment, or ``None``
if there is not one
specs (list): list of specs to be matched against installed packages
allow_multiple_matches (bool): if True multiple matches are admitted
Return:
list of specs
list: list of specs
"""
# constrain uninstall resolution to current environment if one is active
hashes = env.all_hashes() if env else None
@ -118,15 +119,13 @@ def installed_dependents(specs, env):
Args:
specs (list): list of Specs
env (Environment): the active environment, or None
env (spack.environment.Environment or None): the active environment, or None
Returns:
(tuple of dicts): two mappings: one from specs to their dependent
environments in the active environment (or global scope if
there is no environment), and one from specs to their
dependents in *inactive* environments (empty if there is no
environment
tuple: two mappings: one from specs to their dependent environments in the
active environment (or global scope if there is no environment), and one from
specs to their dependents in *inactive* environments (empty if there is no
environment
"""
active_dpts = {}
inactive_dpts = {}
@ -155,9 +154,9 @@ def dependent_environments(specs):
Args:
specs (list): list of Specs
Returns:
(dict): mapping from spec to lists of dependent Environments
Returns:
dict: mapping from spec to lists of dependent Environments
"""
dependents = {}
for env in ev.all_environments():
@ -176,9 +175,10 @@ def inactive_dependent_environments(spec_envs):
have no dependent environments. Return the result.
Args:
(dict): mapping from spec to lists of dependent Environments
spec_envs (dict): mapping from spec to lists of dependent Environments
Returns:
(dict): mapping from spec to lists of *inactive* dependent Environments
dict: mapping from spec to lists of *inactive* dependent Environments
"""
spec_inactive_envs = {}
for spec, de_list in spec_envs.items():
@ -203,7 +203,8 @@ def do_uninstall(env, specs, force):
"""Uninstalls all the specs in a list.
Args:
env (Environment): active environment, or ``None`` if there is not one
env (spack.environment.Environment or None): active environment, or ``None``
if there is not one
specs (list): list of specs to be uninstalled
force (bool): force uninstallation (boolean)
"""

View File

@ -502,7 +502,7 @@ def remove_separators(version):
Unfortunately, this also means that 1.23 and 12.3 are equal.
Args:
version (str or Version): A version
version (str or spack.version.Version): A version
Returns:
str: The version with all separator characters removed

View File

@ -135,8 +135,8 @@ def add_compilers_to_config(compilers, scope=None, init_config=True):
"""Add compilers to the config for the specified architecture.
Arguments:
- compilers: a list of Compiler objects.
- scope: configuration scope to modify.
compilers: a list of Compiler objects.
scope: configuration scope to modify.
"""
compiler_config = get_compiler_config(scope, init_config)
for compiler in compilers:
@ -151,8 +151,8 @@ def remove_compiler_from_config(compiler_spec, scope=None):
"""Remove compilers from the config, by spec.
Arguments:
- compiler_specs: a list of CompilerSpec objects.
- scope: configuration scope to modify.
compiler_specs: a list of CompilerSpec objects.
scope: configuration scope to modify.
"""
# Need a better way for this
global _cache_config_file
@ -544,8 +544,8 @@ def arguments_to_detect_version_fn(operating_system, paths):
function by providing a method called with the same name.
Args:
operating_system (OperatingSystem): the operating system on which
we are looking for compilers
operating_system (spack.architecture.OperatingSystem): the operating system
on which we are looking for compilers
paths: paths to search for compilers
Returns:
@ -649,7 +649,7 @@ def make_compiler_list(detected_versions):
valid version
Returns:
list of Compiler objects
list: list of Compiler objects
"""
group_fn = lambda x: (x.id, x.variation, x.language)
sorted_compilers = sorted(detected_versions, key=group_fn)
@ -715,7 +715,7 @@ def is_mixed_toolchain(compiler):
False otherwise.
Args:
compiler (Compiler): a valid compiler object
compiler (spack.compiler.Compiler): a valid compiler object
"""
cc = os.path.basename(compiler.cc or '')
cxx = os.path.basename(compiler.cxx or '')

View File

@ -17,8 +17,8 @@
And corresponding :ref:`per-platform scopes <platform-scopes>`. Important
functions in this module are:
* :py:func:`get_config`
* :py:func:`update_config`
* :func:`~spack.config.Configuration.get_config`
* :func:`~spack.config.Configuration.update_config`
``get_config`` reads in YAML data for a particular scope and returns
it. Callers can then modify the data and write it back with
@ -722,7 +722,7 @@ def override(path_or_scope, value=None):
Arguments:
path_or_scope (ConfigScope or str): scope or single option to override
value (object, optional): value for the single option
value (object or None): value for the single option
Temporarily push a scope on the current configuration, then remove it
after the context completes. If a single option is provided, create
@ -1163,7 +1163,7 @@ def default_modify_scope(section='config'):
priority scope.
Arguments:
section (boolean): Section for which to get the default scope.
section (bool): Section for which to get the default scope.
If this is not 'compilers', a general (non-platform) scope is used.
"""
if section == 'compilers':

View File

@ -171,13 +171,13 @@ class InstallRecord(object):
dependents left.
Args:
spec (Spec): spec tracked by the install record
spec (spack.spec.Spec): spec tracked by the install record
path (str): path where the spec has been installed
installed (bool): whether or not the spec is currently installed
ref_count (int): number of specs that depend on this one
explicit (bool, optional): whether or not this spec was explicitly
explicit (bool or None): whether or not this spec was explicitly
installed, or pulled-in as a dependency of something else
installation_time (time, optional): time of the installation
installation_time (datetime.datetime or None): time of the installation
"""
def __init__(
@ -256,36 +256,36 @@ def __getattribute__(self, name):
database. If it is a spec, we'll evaluate
``spec.satisfies(query_spec)``
known (bool or any, optional): Specs that are "known" are those
known (bool or None): Specs that are "known" are those
for which Spack can locate a ``package.py`` file -- i.e.,
Spack "knows" how to install them. Specs that are unknown may
represent packages that existed in a previous version of
Spack, but have since either changed their name or
been removed
installed (bool or any, or InstallStatus or iterable of
InstallStatus, optional): if ``True``, includes only installed
installed (bool or InstallStatus or typing.Iterable or None):
if ``True``, includes only installed
specs in the search; if ``False`` only missing specs, and if
``any``, all specs in database. If an InstallStatus or iterable
of InstallStatus, returns specs whose install status
(installed, deprecated, or missing) matches (one of) the
InstallStatus. (default: True)
explicit (bool or any, optional): A spec that was installed
explicit (bool or None): A spec that was installed
following a specific user request is marked as explicit. If
instead it was pulled-in as a dependency of a user requested
spec it's considered implicit.
start_date (datetime, optional): filters the query discarding
specs that have been installed before ``start_date``.
start_date (datetime.datetime or None): filters the query
discarding specs that have been installed before ``start_date``.
end_date (datetime, optional): filters the query discarding
end_date (datetime.datetime or None): filters the query discarding
specs that have been installed after ``end_date``.
hashes (container): list or set of hashes that we can use to
hashes (typing.Container): list or set of hashes that we can use to
restrict the search
in_buildcache (bool or any, optional): Specs that are marked in
in_buildcache (bool or None): Specs that are marked in
this database as part of an associated binary cache are
``in_buildcache``. All other specs are not. This field is used
for querying mirror indices. Default is ``any``.
@ -449,7 +449,7 @@ def clear_failure(self, spec, force=False):
see `mark_failed()`.
Args:
spec (Spec): the spec whose failure indicators are being removed
spec (spack.spec.Spec): the spec whose failure indicators are being removed
force (bool): True if the failure information should be cleared
when a prefix failure lock exists for the file or False if
the failure should not be cleared (e.g., it may be
@ -1391,10 +1391,10 @@ def get_by_hash_local(self, *args, **kwargs):
Arguments:
dag_hash (str): hash (or hash prefix) to look up
default (object, optional): default value to return if dag_hash is
default (object or None): default value to return if dag_hash is
not in the DB (default: None)
installed (bool or any, or InstallStatus or iterable of
InstallStatus, optional): if ``True``, includes only installed
installed (bool or InstallStatus or typing.Iterable or None):
if ``True``, includes only installed
specs in the search; if ``False`` only missing specs, and if
``any``, all specs in database. If an InstallStatus or iterable
of InstallStatus, returns specs whose install status
@ -1417,14 +1417,13 @@ def get_by_hash(self, dag_hash, default=None, installed=any):
Arguments:
dag_hash (str): hash (or hash prefix) to look up
default (object, optional): default value to return if dag_hash is
default (object or None): default value to return if dag_hash is
not in the DB (default: None)
installed (bool or any, or InstallStatus or iterable of
InstallStatus, optional): if ``True``, includes only installed
specs in the search; if ``False`` only missing specs, and if
``any``, all specs in database. If an InstallStatus or iterable
of InstallStatus, returns specs whose install status
(installed, deprecated, or missing) matches (one of) the
installed (bool or InstallStatus or typing.Iterable or None):
if ``True``, includes only installed specs in the search; if ``False``
only missing specs, and if ``any``, all specs in database. If an
InstallStatus or iterable of InstallStatus, returns specs whose install
status (installed, deprecated, or missing) matches (one of) the
InstallStatus. (default: any)
``installed`` defaults to ``any`` so that we can refer to any
@ -1596,7 +1595,7 @@ def update_explicit(self, spec, explicit):
Update the spec's explicit state in the database.
Args:
spec (Spec): the spec whose install record is being updated
spec (spack.spec.Spec): the spec whose install record is being updated
explicit (bool): ``True`` if the package was requested explicitly
by the user, ``False`` if it was pulled in as a dependency of
an explicit package.

View File

@ -54,7 +54,7 @@ class OpenMpi(Package):
from collections import Sequence
__all__ = []
__all__ = ['DirectiveError', 'DirectiveMeta']
#: These are variant names used by Spack internally; packages can't use them
reserved_names = ['patches', 'dev_path']
@ -85,7 +85,7 @@ def make_when_spec(value):
as part of concretization.
Arguments:
value (Spec or bool): a conditional Spec or a constant ``bool``
value (spack.spec.Spec or bool): a conditional Spec or a constant ``bool``
value indicating when a directive should be applied.
"""
@ -187,12 +187,16 @@ def directive(dicts=None):
Here's an example directive:
.. code-block:: python
@directive(dicts='versions')
version(pkg, ...):
...
This directive allows you write:
.. code-block:: python
class Foo(Package):
version(...)
@ -392,8 +396,8 @@ def conflicts(conflict_spec, when=None, msg=None):
conflicts('%intel', when='+foo')
Args:
conflict_spec (Spec): constraint defining the known conflict
when (Spec): optional constraint that triggers the conflict
conflict_spec (spack.spec.Spec): constraint defining the known conflict
when (spack.spec.Spec): optional constraint that triggers the conflict
msg (str): optional user defined message
"""
def _execute_conflicts(pkg):
@ -413,11 +417,11 @@ def depends_on(spec, when=None, type=default_deptype, patches=None):
"""Creates a dict of deps with specs defining when they apply.
Args:
spec (Spec or str): the package and constraints depended on
when (Spec or str): when the dependent satisfies this, it has
spec (spack.spec.Spec or str): the package and constraints depended on
when (spack.spec.Spec or str): when the dependent satisfies this, it has
the dependency represented by ``spec``
type (str or tuple of str): str or tuple of legal Spack deptypes
patches (obj or list): single result of ``patch()`` directive, a
type (str or tuple): str or tuple of legal Spack deptypes
patches (typing.Callable or list): single result of ``patch()`` directive, a
``str`` to be passed to ``patch``, or a list of these
This directive is to be used inside a Package definition to declare
@ -495,7 +499,7 @@ def patch(url_or_filename, level=1, when=None, working_dir=".", **kwargs):
Args:
url_or_filename (str): url or relative filename of the patch
level (int): patch level (as in the patch shell command)
when (Spec): optional anonymous spec that specifies when to apply
when (spack.spec.Spec): optional anonymous spec that specifies when to apply
the patch
working_dir (str): dir to change to before applying
@ -559,12 +563,12 @@ def variant(
specified otherwise the default will be False for a boolean
variant and 'nothing' for a multi-valued variant
description (str): description of the purpose of the variant
values (tuple or callable): either a tuple of strings containing the
values (tuple or typing.Callable): either a tuple of strings containing the
allowed values, or a callable accepting one value and returning
True if it is valid
multi (bool): if False only one value per spec is allowed for
this variant
validator (callable): optional group validator to enforce additional
validator (typing.Callable): optional group validator to enforce additional
logic. It receives the package name, the variant name and a tuple
of values and should raise an instance of SpackError if the group
doesn't meet the additional constraints

View File

@ -116,11 +116,12 @@ def activate(
use_env_repo (bool): use the packages exactly as they appear in the
environment's repository
add_view (bool): generate commands to add view to path variables
shell (string): One of `sh`, `csh`, `fish`.
prompt (string): string to add to the users prompt, or None
shell (str): One of `sh`, `csh`, `fish`.
prompt (str): string to add to the users prompt, or None
Returns:
cmds: Shell commands to activate environment.
str: Shell commands to activate environment.
TODO: environment to use the activated spack environment.
"""
global _active_environment
@ -198,10 +199,10 @@ def deactivate(shell='sh'):
"""Undo any configuration or repo settings modified by ``activate()``.
Arguments:
shell (string): One of `sh`, `csh`, `fish`. Shell style to use.
shell (str): One of `sh`, `csh`, `fish`. Shell style to use.
Returns:
(string): shell commands for `shell` to undo environment variables
str: shell commands for `shell` to undo environment variables
"""
global _active_environment
@ -272,7 +273,7 @@ def find_environment(args):
If an environment is found, read it in. If not, return None.
Arguments:
args (Namespace): argparse namespace wtih command arguments
args (argparse.Namespace): argparse namespace wtih command arguments
Returns:
(Environment): a found environment, or ``None``
@ -322,7 +323,7 @@ def get_env(args, cmd_name, required=False):
message that says the calling command *needs* an active environment.
Arguments:
args (Namespace): argparse namespace wtih command arguments
args (argparse.Namespace): argparse namespace wtih command arguments
cmd_name (str): name of calling command
required (bool): if ``True``, raise an exception when no environment
is found; if ``False``, just return ``None``
@ -550,7 +551,7 @@ def view(self, new=None):
Raise if new is None and there is no current view
Arguments:
new (string or None): If a string, create a FilesystemView
new (str or None): If a string, create a FilesystemView
rooted at that path. Default None. This should only be used to
regenerate the view, and cannot be used to access specs.
"""
@ -851,7 +852,7 @@ def clear(self, re_read=False):
"""Clear the contents of the environment
Arguments:
re_read (boolean): If True, do not clear ``new_specs`` nor
re_read (bool): If True, do not clear ``new_specs`` nor
``new_installs`` values. These values cannot be read from
yaml, and need to be maintained when re-reading an existing
environment.
@ -1119,11 +1120,11 @@ def develop(self, spec, path, clone=False):
"""Add dev-build info for package
Args:
spec (Spec): Set constraints on development specs. Must include a
spec (spack.spec.Spec): Set constraints on development specs. Must include a
concrete version.
path (string): Path to find code for developer builds. Relative
path (str): Path to find code for developer builds. Relative
paths will be resolved relative to the environment.
clone (bool, default False): Clone the package code to the path.
clone (bool): Clone the package code to the path.
If clone is False Spack will assume the code is already present
at ``path``.
@ -1552,7 +1553,7 @@ def install_all(self, args=None, **install_args):
that needs to be done separately with a call to write().
Args:
args (Namespace): argparse namespace with command arguments
args (argparse.Namespace): argparse namespace with command arguments
install_args (dict): keyword install arguments
"""
self.install_specs(None, args=args, **install_args)

View File

@ -1254,8 +1254,9 @@ def __init__(self, **kwargs):
@property
def hg(self):
""":returns: The hg executable
:rtype: Executable
"""
Returns:
Executable: the hg executable
"""
if not self._hg:
self._hg = which('hg', required=True)
@ -1405,7 +1406,7 @@ def from_kwargs(**kwargs):
``version()`` directive in a package.
Returns:
fetch_strategy: The fetch strategy that matches the args, based
typing.Callable: The fetch strategy that matches the args, based
on attribute names (e.g., ``git``, ``hg``, etc.)
Raises:

View File

@ -28,7 +28,7 @@ def get_escaped_text_output(filename):
filename (str): path to the file
Returns:
(list of str): escaped text lines read from the file
list: escaped text lines read from the file
"""
with open(filename, 'r') as f:
# Ensure special characters are escaped as needed

View File

@ -93,7 +93,7 @@ def _check_last_phase(pkg):
package already.
Args:
pkg (PackageBase): the package being installed
pkg (spack.package.PackageBase): the package being installed
Raises:
``BadInstallPhase`` if stop_before or last phase is invalid
@ -115,10 +115,11 @@ def _handle_external_and_upstream(pkg, explicit):
database if it is external package.
Args:
pkg (Package): the package whose installation is under consideration
pkg (spack.package.Package): the package whose installation is under
consideration
explicit (bool): the package was explicitly requested by the user
Return:
(bool): ``True`` if the package is external or upstream (so not to
bool: ``True`` if the package is external or upstream (so not to
be installed locally), otherwise, ``True``
"""
# For external packages the workflow is simplified, and basically
@ -148,7 +149,7 @@ def _do_fake_install(pkg):
and libraries.
Args:
pkg (PackageBase): the package whose installation is to be faked
pkg (spack.package.PackageBase): the package whose installation is to be faked
"""
command = pkg.name
@ -194,15 +195,14 @@ def _packages_needed_to_bootstrap_compiler(compiler, architecture, pkgs):
compiler (CompilerSpec): the compiler to bootstrap
architecture (ArchSpec): the architecture for which to boostrap the
compiler
pkgs (list of PackageBase): the packages that may need their compiler
pkgs (list): the packages that may need their compiler
installed
Return:
(list) list of tuples, (PackageBase, bool), for concretized compiler-
-related packages that need to be installed and bool values
specify whether the package is the bootstrap compiler
(``True``) or one of its dependencies (``False``). The list
will be empty if there are no compilers.
list: list of tuples, (PackageBase, bool), for concretized compiler-related
packages that need to be installed and bool values specify whether the
package is the bootstrap compiler (``True``) or one of its dependencies
(``False``). The list will be empty if there are no compilers.
"""
tty.debug('Bootstrapping {0} compiler'.format(compiler))
compilers = spack.compilers.compilers_for_spec(
@ -260,7 +260,7 @@ def _install_from_cache(pkg, cache_only, explicit, unsigned=False,
Extract the package from binary cache
Args:
pkg (PackageBase): the package to install from the binary cache
pkg (spack.package.PackageBase): the package to install from the binary cache
cache_only (bool): only extract from binary cache
explicit (bool): ``True`` if installing the package was explicitly
requested by the user, otherwise, ``False``
@ -268,7 +268,7 @@ def _install_from_cache(pkg, cache_only, explicit, unsigned=False,
otherwise, ``False``
Return:
(bool) ``True`` if the package was extract from binary cache,
bool: ``True`` if the package was extract from binary cache,
``False`` otherwise
"""
installed_from_cache = _try_install_from_binary_cache(
@ -350,8 +350,8 @@ def _process_binary_cache_tarball(pkg, binary_spec, explicit, unsigned,
Process the binary cache tarball.
Args:
pkg (PackageBase): the package being installed
binary_spec (Spec): the spec whose cache has been confirmed
pkg (spack.package.PackageBase): the package being installed
binary_spec (spack.spec.Spec): the spec whose cache has been confirmed
explicit (bool): the package was explicitly requested by the user
unsigned (bool): ``True`` if binary package signatures to be checked,
otherwise, ``False``
@ -359,7 +359,7 @@ def _process_binary_cache_tarball(pkg, binary_spec, explicit, unsigned,
attempting to download the tarball
Return:
(bool) ``True`` if the package was extracted from binary cache,
bool: ``True`` if the package was extracted from binary cache,
else ``False``
"""
tarball = binary_distribution.download_tarball(
@ -385,7 +385,7 @@ def _try_install_from_binary_cache(pkg, explicit, unsigned=False,
Try to extract the package from binary cache.
Args:
pkg (PackageBase): the package to be extracted from binary cache
pkg (spack.package.PackageBase): the package to be extracted from binary cache
explicit (bool): the package was explicitly requested by the user
unsigned (bool): ``True`` if binary package signatures to be checked,
otherwise, ``False``
@ -423,7 +423,7 @@ def combine_phase_logs(phase_log_files, log_path):
Args:
phase_log_files (list): a list or iterator of logs to combine
log_path (path): the path to combine them to
log_path (str): the path to combine them to
"""
with open(log_path, 'w') as log_file:
@ -441,7 +441,7 @@ def dump_packages(spec, path):
node in the DAG.
Args:
spec (Spec): the Spack spec whose package information is to be dumped
spec (spack.spec.Spec): the Spack spec whose package information is to be dumped
path (str): the path to the build packages directory
"""
fs.mkdirp(path)
@ -498,10 +498,10 @@ def get_dependent_ids(spec):
Return a list of package ids for the spec's dependents
Args:
spec (Spec): Concretized spec
spec (spack.spec.Spec): Concretized spec
Returns:
(list of str): list of package ids
list: list of package ids
"""
return [package_id(d.package) for d in spec.dependents()]
@ -512,10 +512,10 @@ def install_msg(name, pid):
Args:
name (str): Name/id of the package being installed
pid (id): id of the installer process
pid (int): id of the installer process
Return:
(str) Colorized installing message
str: Colorized installing message
"""
pre = '{0}: '.format(pid) if tty.show_pid() else ''
return pre + colorize('@*{Installing} @*g{%s}' % name)
@ -526,7 +526,7 @@ def log(pkg):
Copy provenance into the install directory on success
Args:
pkg (Package): the package that was built and installed
pkg (spack.package.Package): the package that was built and installed
"""
packages_dir = spack.store.layout.build_packages_path(pkg.spec)
@ -608,7 +608,8 @@ def package_id(pkg):
and packages for combinatorial environments.
Args:
pkg (PackageBase): the package from which the identifier is derived
pkg (spack.package.PackageBase): the package from which the identifier is
derived
"""
if not pkg.spec.concrete:
raise ValueError("Cannot provide a unique, readable id when "
@ -631,11 +632,11 @@ def __init__(self, installs=[]):
""" Initialize the installer.
Args:
installs (list of (pkg, install_args)): list of tuples, where each
installs (list): list of tuples, where each
tuple consists of a package (PackageBase) and its associated
install arguments (dict)
Return:
(PackageInstaller) instance
PackageInstaller: instance
"""
# List of build requests
self.build_requests = [BuildRequest(pkg, install_args)
@ -691,7 +692,8 @@ def _add_bootstrap_compilers(
Args:
compiler: the compiler to boostrap
architecture: the architecture for which to bootstrap the compiler
pkgs (PackageBase): the package with possible compiler dependencies
pkgs (spack.package.PackageBase): the package with possible compiler
dependencies
request (BuildRequest): the associated install request
all_deps (defaultdict(set)): dictionary of all dependencies and
associated dependents
@ -707,7 +709,7 @@ def _add_init_task(self, pkg, request, is_compiler, all_deps):
Creates and queus the initial build task for the package.
Args:
pkg (Package): the package to be built and installed
pkg (spack.package.Package): the package to be built and installed
request (BuildRequest or None): the associated install request
where ``None`` can be used to indicate the package was
explicitly requested by the user
@ -726,7 +728,7 @@ def _check_db(self, spec):
"""Determine if the spec is flagged as installed in the database
Args:
spec (Spec): spec whose database install status is being checked
spec (spack.spec.Spec): spec whose database install status is being checked
Return:
(rec, installed_in_db) tuple where rec is the database record, or
@ -887,7 +889,7 @@ def _cleanup_task(self, pkg):
Cleanup the build task for the spec
Args:
pkg (PackageBase): the package being installed
pkg (spack.package.PackageBase): the package being installed
"""
self._remove_task(package_id(pkg))
@ -901,7 +903,7 @@ def _ensure_install_ready(self, pkg):
already locked.
Args:
pkg (PackageBase): the package being locally installed
pkg (spack.package.PackageBase): the package being locally installed
"""
pkg_id = package_id(pkg)
pre = "{0} cannot be installed locally:".format(pkg_id)
@ -933,7 +935,7 @@ def _ensure_locked(self, lock_type, pkg):
Args:
lock_type (str): 'read' for a read lock, 'write' for a write lock
pkg (PackageBase): the package whose spec is being installed
pkg (spack.package.PackageBase): the package whose spec is being installed
Return:
(lock_type, lock) tuple where lock will be None if it could not
@ -1294,7 +1296,7 @@ def _setup_install_dir(self, pkg):
Write a small metadata file with the current spack environment.
Args:
pkg (Package): the package to be built and installed
pkg (spack.package.Package): the package to be built and installed
"""
if not os.path.exists(pkg.spec.prefix):
tty.verbose('Creating the installation directory {0}'
@ -1369,9 +1371,9 @@ def _flag_installed(self, pkg, dependent_ids=None):
known dependents.
Args:
pkg (Package): Package that has been installed locally, externally
or upstream
dependent_ids (list of str or None): list of the package's
pkg (spack.package.Package): Package that has been installed locally,
externally or upstream
dependent_ids (list or None): list of the package's
dependent ids, or None if the dependent ids are limited to
those maintained in the package (dependency DAG)
"""
@ -1422,7 +1424,7 @@ def install(self):
Install the requested package(s) and or associated dependencies.
Args:
pkg (Package): the package to be built and installed"""
pkg (spack.package.Package): the package to be built and installed"""
self._init_queue()
fail_fast_err = 'Terminating after first install failure'
@ -1833,7 +1835,7 @@ def __init__(self, pkg, request, compiler, start, attempts, status,
Instantiate a build task for a package.
Args:
pkg (Package): the package to be built and installed
pkg (spack.package.Package): the package to be built and installed
request (BuildRequest or None): the associated install request
where ``None`` can be used to indicate the package was
explicitly requested by the user
@ -1841,7 +1843,7 @@ def __init__(self, pkg, request, compiler, start, attempts, status,
start (int): the initial start time for the package, in seconds
attempts (int): the number of attempts to install the package
status (str): the installation status
installed (list of str): the identifiers of packages that have
installed (list): the identifiers of packages that have
been installed so far
"""
@ -1983,7 +1985,7 @@ def flag_installed(self, installed):
Ensure the dependency is not considered to still be uninstalled.
Args:
installed (list of str): the identifiers of packages that have
installed (list): the identifiers of packages that have
been installed so far
"""
now_installed = self.uninstalled_deps & set(installed)
@ -2024,7 +2026,7 @@ def __init__(self, pkg, install_args):
Instantiate a build request for a package.
Args:
pkg (Package): the package to be built and installed
pkg (spack.package.Package): the package to be built and installed
install_args (dict): the install arguments associated with ``pkg``
"""
# Ensure dealing with a package that has a concrete spec
@ -2099,10 +2101,11 @@ def get_deptypes(self, pkg):
"""Determine the required dependency types for the associated package.
Args:
pkg (PackageBase): explicit or implicit package being installed
pkg (spack.package.PackageBase): explicit or implicit package being
installed
Returns:
(tuple) required dependency type(s) for the package
tuple: required dependency type(s) for the package
"""
deptypes = ['link', 'run']
include_build_deps = self.install_args.get('include_build_deps')
@ -2121,10 +2124,11 @@ def run_tests(self, pkg):
"""Determine if the tests should be run for the provided packages
Args:
pkg (PackageBase): explicit or implicit package being installed
pkg (spack.package.PackageBase): explicit or implicit package being
installed
Returns:
(bool) ``True`` if they should be run; ``False`` otherwise
bool: ``True`` if they should be run; ``False`` otherwise
"""
tests = self.install_args.get('tests', False)
return tests is True or (tests and pkg.name in tests)

View File

@ -530,7 +530,7 @@ def __call__(self, *argv, **kwargs):
"""Invoke this SpackCommand.
Args:
argv (list of str): command line arguments.
argv (list): command line arguments.
Keyword Args:
fail_on_error (optional bool): Don't raise an exception on error
@ -625,7 +625,7 @@ def print_setup_info(*info):
"""Print basic information needed by setup-env.[c]sh.
Args:
info (list of str): list of things to print: comma-separated list
info (list): list of things to print: comma-separated list
of 'csh', 'sh', or 'modules'
This is in ``main.py`` to make it fast; the setup scripts need to
@ -689,7 +689,7 @@ def main(argv=None):
"""This is the entry point for the Spack command.
Args:
argv (list of str or None): command line arguments, NOT including
argv (list or None): command line arguments, NOT including
the executable name. If None, parses from sys.argv.
"""
# Create a parser with a simple positional argument first. We'll

View File

@ -19,7 +19,8 @@
import llnl.util.filesystem
__all__ = [
'filter_compiler_wrappers'
'filter_compiler_wrappers',
'PackageMixinsMeta',
]

View File

@ -30,7 +30,7 @@ def configuration(module_set_name):
return config
#: Caches the configuration {spec_hash: configuration}
# Caches the configuration {spec_hash: configuration}
configuration_registry = {} # type: Dict[str, Any]

View File

@ -29,7 +29,7 @@ def configuration(module_set_name):
return config
#: Caches the configuration {spec_hash: configuration}
# Caches the configuration {spec_hash: configuration}
configuration_registry = {} # type: Dict[str, Any]

View File

@ -465,7 +465,7 @@ def test_log_pathname(test_stage, spec):
Args:
test_stage (str): path to the test stage directory
spec (Spec): instance of the spec under test
spec (spack.spec.Spec): instance of the spec under test
Returns:
(str): the pathname of the test log file
@ -725,14 +725,14 @@ def possible_dependencies(
"""Return dict of possible dependencies of this package.
Args:
transitive (bool, optional): return all transitive dependencies if
transitive (bool or None): return all transitive dependencies if
True, only direct dependencies if False (default True)..
expand_virtuals (bool, optional): expand virtual dependencies into
expand_virtuals (bool or None): expand virtual dependencies into
all possible implementations (default True)
deptype (str or tuple, optional): dependency types to consider
visited (dicct, optional): dict of names of dependencies visited so
deptype (str or tuple or None): dependency types to consider
visited (dict or None): dict of names of dependencies visited so
far, mapped to their immediate dependencies' names.
missing (dict, optional): dict to populate with packages and their
missing (dict or None): dict to populate with packages and their
*missing* dependencies.
virtuals (set): if provided, populate with virtuals seen so far.
@ -1756,7 +1756,7 @@ def cache_extra_test_sources(self, srcs):
during install testing.
Args:
srcs (str or list of str): relative path for files and or
srcs (str or list): relative path for files and or
subdirectories located in the staged source path that are to
be copied to the corresponding location(s) under the install
testing directory.
@ -1803,10 +1803,10 @@ def run_test(self, exe, options=[], expected=[], status=0,
Args:
exe (str): the name of the executable
options (str or list of str): list of options to pass to the runner
expected (str or list of str): list of expected output strings.
options (str or list): list of options to pass to the runner
expected (str or list): list of expected output strings.
Each string is a regex expected to match part of the output.
status (int or list of int): possible passing status values
status (int or list): possible passing status values
with 0 meaning the test is expected to succeed
installed (bool): if ``True``, the executable must be in the
install prefix
@ -2010,9 +2010,9 @@ def setup_build_environment(self, env):
Spack's store.
Args:
env (EnvironmentModifications): environment modifications to be
applied when the package is built. Package authors can call
methods on it to alter the build environment.
env (spack.util.environment.EnvironmentModifications): environment
modifications to be applied when the package is built. Package authors
can call methods on it to alter the build environment.
"""
legacy_fn = self._get_legacy_environment_method('setup_environment')
if legacy_fn:
@ -2023,9 +2023,9 @@ def setup_run_environment(self, env):
"""Sets up the run environment for a package.
Args:
env (EnvironmentModifications): environment modifications to be
applied when the package is run. Package authors can call
methods on it to alter the run environment.
env (spack.util.environment.EnvironmentModifications): environment
modifications to be applied when the package is run. Package authors
can call methods on it to alter the run environment.
"""
legacy_fn = self._get_legacy_environment_method('setup_environment')
if legacy_fn:
@ -2052,11 +2052,11 @@ def setup_dependent_build_environment(self, env, dependent_spec):
variable.
Args:
env (EnvironmentModifications): environment modifications to be
applied when the dependent package is built. Package authors
can call methods on it to alter the build environment.
env (spack.util.environment.EnvironmentModifications): environment
modifications to be applied when the dependent package is built.
Package authors can call methods on it to alter the build environment.
dependent_spec (Spec): the spec of the dependent package
dependent_spec (spack.spec.Spec): the spec of the dependent package
about to be built. This allows the extendee (self) to query
the dependent's state. Note that *this* package's spec is
available as ``self.spec``
@ -2079,11 +2079,11 @@ def setup_dependent_run_environment(self, env, dependent_spec):
for dependencies.
Args:
env (EnvironmentModifications): environment modifications to be
applied when the dependent package is run. Package authors
can call methods on it to alter the build environment.
env (spack.util.environment.EnvironmentModifications): environment
modifications to be applied when the dependent package is run.
Package authors can call methods on it to alter the build environment.
dependent_spec (Spec): The spec of the dependent package
dependent_spec (spack.spec.Spec): The spec of the dependent package
about to be run. This allows the extendee (self) to query
the dependent's state. Note that *this* package's spec is
available as ``self.spec``
@ -2125,7 +2125,7 @@ def setup_dependent_package(self, module, dependent_spec):
object of the dependent package. Packages can use this to set
module-scope variables for the dependent to use.
dependent_spec (Spec): The spec of the dependent package
dependent_spec (spack.spec.Spec): The spec of the dependent package
about to be built. This allows the extendee (self) to
query the dependent's state. Note that *this*
package's spec is available as ``self.spec``.

View File

@ -28,7 +28,7 @@ def apply_patch(stage, patch_path, level=1, working_dir='.'):
Args:
stage (spack.stage.Stage): stage with code that will be patched
patch_path (str): filesystem location for the patch to apply
level (int, optional): patch level (default 1)
level (int or None): patch level (default 1)
working_dir (str): relative path *within* the stage to change to
(default '.')
"""

View File

@ -869,7 +869,7 @@ def is_relocatable(spec):
"""Returns True if an installed spec is relocatable.
Args:
spec (Spec): spec to be analyzed
spec (spack.spec.Spec): spec to be analyzed
Returns:
True if the binaries of an installed spec

View File

@ -679,14 +679,14 @@ def condition(self, required_spec, imposed_spec=None, name=None):
"""Generate facts for a dependency or virtual provider condition.
Arguments:
required_spec (Spec): the spec that triggers this condition
imposed_spec (optional, Spec): the sepc with constraints that
required_spec (spack.spec.Spec): the spec that triggers this condition
imposed_spec (spack.spec.Spec or None): the sepc with constraints that
are imposed when this condition is triggered
name (optional, str): name for `required_spec` (required if
name (str or None): name for `required_spec` (required if
required_spec is anonymous, ignored if not)
Returns:
(int): id of the condition created by this function
int: id of the condition created by this function
"""
named_cond = required_spec.copy()
named_cond.name = named_cond.name or name
@ -922,7 +922,7 @@ def spec_clauses(self, spec, body=False, transitive=True):
"""Return a list of clauses for a spec mandates are true.
Arguments:
spec (Spec): the spec to analyze
spec (spack.spec.Spec): the spec to analyze
body (bool): if True, generate clauses to be used in rule bodies
(final values) instead of rule heads (setters).
transitive (bool): if False, don't generate clauses from

View File

@ -122,7 +122,9 @@
__all__ = [
'CompilerSpec',
'Spec',
'SpecParser',
'parse',
'SpecParseError',
'DuplicateDependencyError',
@ -143,7 +145,9 @@
'AmbiguousHashError',
'InvalidHashError',
'NoSuchHashError',
'RedundantSpecError']
'RedundantSpecError',
'SpecDeprecatedError',
]
#: Valid pattern for an identifier in Spack
identifier_re = r'\w[\w-]*'
@ -1495,7 +1499,7 @@ def _spec_hash(self, hash):
"""Utility method for computing different types of Spec hashes.
Arguments:
hash (SpecHashDescriptor): type of hash to generate.
hash (spack.hash_types.SpecHashDescriptor): type of hash to generate.
"""
# TODO: curently we strip build dependencies by default. Rethink
# this when we move to using package hashing on all specs.
@ -1513,7 +1517,7 @@ def _cached_hash(self, hash, length=None):
in the supplied attribute on this spec.
Arguments:
hash (SpecHashDescriptor): type of hash to generate.
hash (spack.hash_types.SpecHashDescriptor): type of hash to generate.
"""
if not hash.attr:
return self._spec_hash(hash)[:length]
@ -1615,7 +1619,7 @@ def to_node_dict(self, hash=ht.dag_hash):
hashes).
Arguments:
hash (SpecHashDescriptor) type of hash to generate.
hash (spack.hash_types.SpecHashDescriptor) type of hash to generate.
"""
d = syaml.syaml_dict()
@ -2987,7 +2991,7 @@ def ensure_valid_variants(spec):
spec (Spec): spec to be analyzed
Raises:
UnknownVariantError: on the first unknown variant found
spack.variant.UnknownVariantError: on the first unknown variant found
"""
pkg_cls = spec.package_class
pkg_variants = pkg_cls.variants
@ -4437,6 +4441,7 @@ def __init__(self):
class SpecParser(spack.parse.Parser):
"""Parses specs."""
def __init__(self, initial_spec=None):
"""Construct a new SpecParser.

View File

@ -565,8 +565,9 @@ def cache_mirror(self, mirror, stats):
"""Perform a fetch if the resource is not already cached
Arguments:
mirror (MirrorCache): the mirror to cache this Stage's resource in
stats (MirrorStats): this is updated depending on whether the
mirror (spack.caches.MirrorCache): the mirror to cache this Stage's
resource in
stats (spack.mirror.MirrorStats): this is updated depending on whether the
caching operation succeeded or failed
"""
if isinstance(self.default_fetcher, fs.BundleFetchStrategy):
@ -835,7 +836,7 @@ def get_checksums_for_versions(
Args:
url_dict (dict): A dictionary of the form: version -> URL
name (str): The name of the package
first_stage_function (callable): function that takes a Stage and a URL;
first_stage_function (typing.Callable): function that takes a Stage and a URL;
this is run on the stage of the first URL downloaded
keep_stage (bool): whether to keep staging area when command completes
batch (bool): whether to ask user how many versions to fetch (false)

View File

@ -65,7 +65,7 @@ def create_build_task(pkg, install_args={}):
Create a built task for the given (concretized) package
Args:
pkg (PackageBase): concretized package associated with the task
pkg (spack.package.PackageBase): concretized package associated with the task
install_args (dict): dictionary of kwargs (or install args)
Return:
@ -80,10 +80,10 @@ def create_installer(installer_args):
Create an installer using the concretized spec for each arg
Args:
installer_args (list of tuples): the list of (spec name, kwargs) tuples
installer_args (list): the list of (spec name, kwargs) tuples
Return:
installer (PackageInstaller): the associated package installer
spack.installer.PackageInstaller: the associated package installer
"""
const_arg = [(spec.package, kwargs) for spec, kwargs in installer_args]
return inst.PackageInstaller(const_arg)
@ -93,11 +93,11 @@ def installer_args(spec_names, kwargs={}):
"""Return a the installer argument with each spec paired with kwargs
Args:
spec_names (list of str): list of spec names
spec_names (list): list of spec names
kwargs (dict or None): install arguments to apply to all of the specs
Returns:
list of (spec, kwargs): the installer constructor argument
list: list of (spec, kwargs), the installer constructor argument
"""
arg = []
for name in spec_names:

View File

@ -417,11 +417,11 @@ def parse_version_offset(path):
path (str): The filename or URL for the package
Returns:
tuple of (Version, int, int, int, str): A tuple containing:
tuple: A tuple containing:
version of the package,
first index of version,
length of version string,
the index of the matching regex
the index of the matching regex,
the matching regex
Raises:
@ -632,11 +632,11 @@ def parse_name_offset(path, v=None):
v (str): The version of the package
Returns:
tuple of (str, int, int, int, str): A tuple containing:
tuple: A tuple containing:
name of the package,
first index of name,
length of name,
the index of the matching regex
the index of the matching regex,
the matching regex
Raises:
@ -774,9 +774,7 @@ def parse_name_and_version(path):
path (str): The filename or URL for the package
Returns:
tuple of (str, Version)A tuple containing:
The name of the package
The version of the package
tuple: a tuple containing the package (name, version)
Raises:
UndetectableVersionError: If the URL does not match any regexes

View File

@ -18,7 +18,7 @@ def prefix_inspections(platform):
"""Get list of prefix inspections for platform
Arguments:
platform (string): the name of the platform to consider. The platform
platform (str): the name of the platform to consider. The platform
determines what environment variables Spack will use for some
inspections.

View File

@ -64,7 +64,7 @@ def editor(*args, **kwargs):
searching the full list above, we'll raise an error.
Arguments:
args (list of str): args to pass to editor
args (list): args to pass to editor
Optional Arguments:
_exec_func (function): invoke this function instead of ``os.execv()``

View File

@ -623,7 +623,7 @@ def from_sourcing_file(filename, *arguments, **kwargs):
Args:
filename (str): the file to be sourced
*arguments (list of str): arguments to pass on the command line
*arguments (list): arguments to pass on the command line
Keyword Args:
shell (str): the shell to use (default: ``bash``)
@ -867,7 +867,7 @@ def inspect_path(root, inspections, exclude=None):
modifications are not performed immediately, but stored in a
command object that is returned to client
exclude (callable): optional callable. If present it must accept an
exclude (typing.Callable): optional callable. If present it must accept an
absolute path and return True if it should be excluded from the
inspection
@ -920,7 +920,7 @@ def preserve_environment(*variables):
explicitly unset on exit.
Args:
variables (list of str): list of environment variables to be preserved
variables (list): list of environment variables to be preserved
"""
cache = {}
for var in variables:
@ -1031,9 +1031,9 @@ def sanitize(environment, blacklist, whitelist):
Args:
environment (dict): input dictionary
blacklist (list of str): literals or regex patterns to be
blacklist (list): literals or regex patterns to be
blacklisted
whitelist (list of str): literals or regex patterns to be
whitelist (list): literals or regex patterns to be
whitelisted
"""

View File

@ -297,7 +297,7 @@ def which(*args, **kwargs):
*args (str): One or more executables to search for
Keyword Arguments:
path (:func:`list` or str): The path to search. Defaults to ``PATH``
path (list or str): The path to search. Defaults to ``PATH``
required (bool): If set to True, raise an error if executable not found
Returns:

View File

@ -28,11 +28,11 @@ def load_source(full_name, path, prepend=None):
Args:
full_name (str): full name of the module to be loaded
path (str): path to the file that should be loaded
prepend (str, optional): some optional code to prepend to the
prepend (str or None): some optional code to prepend to the
loaded module; e.g., can be used to inject import statements
Returns:
(ModuleType): the loaded module
the loaded module
"""
with import_lock():
if prepend is None:

View File

@ -37,11 +37,11 @@ def load_source(full_name, path, prepend=None):
Args:
full_name (str): full name of the module to be loaded
path (str): path to the file that should be loaded
prepend (str, optional): some optional code to prepend to the
prepend (str or None): some optional code to prepend to the
loaded module; e.g., can be used to inject import statements
Returns:
(ModuleType): the loaded module
the loaded module
"""
# use our custom loader
loader = PrependFileLoader(full_name, path, prepend)

View File

@ -20,7 +20,7 @@ def parse_log_events(stream, context=6, jobs=None, profile=False):
"""Extract interesting events from a log file as a list of LogEvent.
Args:
stream (str or fileobject): build log name or file object
stream (str or typing.IO): build log name or file object
context (int): lines of context to extract around each log event
jobs (int): number of jobs to parse with; default ncpus
profile (bool): print out profile information for parsing
@ -60,7 +60,7 @@ def make_log_context(log_events, width=None):
"""Get error context from a log file.
Args:
log_events (list of LogEvent): list of events created by
log_events (list): list of events created by
``ctest_log_parser.parse()``
width (int or None): wrap width; ``0`` for no limit; ``None`` to
auto-size for terminal

View File

@ -49,10 +49,10 @@ def is_directive(self, node):
callbacks are sometimes represented).
Args:
node (AST): the AST node being checked
node (ast.AST): the AST node being checked
Returns:
(bool): ``True`` if the node represents a known directive,
bool: ``True`` if the node represents a known directive,
``False`` otherwise
"""
return (isinstance(node, ast.Expr) and

View File

@ -41,7 +41,7 @@ def composite(interface=None, method_list=None, container=list):
interface (type): class exposing the interface to which the
composite object must conform. Only non-private and
non-special methods will be taken into account
method_list (list of str): names of methods that should be part
method_list (list): names of methods that should be part
of the composite
container (MutableSequence): container for the composite object
(default = list). Must fulfill the MutableSequence

View File

@ -41,7 +41,7 @@ def plural(n, singular, plural=None, show_n=True):
Arguments:
n (int): number of things there are
singular (str): singular form of word
plural (str, optional): optional plural form, for when it's not just
plural (str or None): optional plural form, for when it's not just
singular + 's'
show_n (bool): whether to include n in the result string (default True)

View File

@ -367,7 +367,7 @@ def spider(root_urls, depth=0, concurrency=32):
up to <depth> levels of links from each root.
Args:
root_urls (str or list of str): root urls used as a starting point
root_urls (str or list): root urls used as a starting point
for spidering
depth (int): level of recursion into links
concurrency (int): number of simultaneous requests that can be sent

View File

@ -94,8 +94,8 @@ def validate_or_raise(self, vspec, pkg=None):
exception if any error is found.
Args:
vspec (VariantSpec): instance to be validated
pkg (Package): the package that required the validation,
vspec (Variant): instance to be validated
pkg (spack.package.Package): the package that required the validation,
if available
Raises:
@ -254,7 +254,7 @@ def value(self):
the variant.
Returns:
tuple of str: values stored in the variant
tuple: values stored in the variant
"""
return self._value
@ -296,7 +296,7 @@ def copy(self):
"""Returns an instance of a variant equivalent to self
Returns:
any variant type: a copy of self
AbstractVariant: a copy of self
>>> a = MultiValuedVariant('foo', True)
>>> b = a.copy()
@ -667,7 +667,7 @@ class DisjointSetsOfValues(Sequence):
and therefore no other set can contain the item ``'none'``.
Args:
*sets (list of tuples): mutually exclusive sets of values
*sets (list): mutually exclusive sets of values
"""
_empty_set = set(('none',))