Merge remote-tracking branch 'upstream/develop' into develop

This commit is contained in:
Quellyn Snead 2021-07-28 15:04:49 -06:00
commit 5f7fdf148c
386 changed files with 6363 additions and 2067 deletions

View File

@ -1,38 +0,0 @@
# -*- conf -*-
# .coveragerc to control coverage.py
[run]
parallel = True
concurrency = multiprocessing
branch = True
source =
bin
lib
omit =
lib/spack/spack/test/*
lib/spack/docs/*
lib/spack/external/*
share/spack/qa/*
[report]
# Regexes for lines to exclude from consideration
exclude_lines =
# Have to re-enable the standard pragma
pragma: no cover
# Don't complain about missing debug-only code:
def __repr__
if self\.debug
# Don't complain if tests don't hit defensive assertion code:
raise AssertionError
raise NotImplementedError
# Don't complain if non-runnable code isn't run:
if 0:
if False:
if __name__ == .__main__.:
ignore_errors = True
[html]
directory = htmlcov

View File

@ -39,7 +39,7 @@ jobs:
python-version: 3.9
- name: Install Python packages
run: |
pip install --upgrade pip six setuptools flake8 isort>=4.3.5 mypy>=0.800 black types-six
pip install --upgrade pip six setuptools types-six
- name: Setup git configuration
run: |
# Need this for the git tests to succeed.
@ -130,14 +130,19 @@ jobs:
sudo apt-get -y update
# Needed for unit tests
sudo apt-get -y install \
coreutils cvs gfortran graphviz gnupg2 mercurial ninja-build \
coreutils cvs gfortran graphviz gnupg2 mercurial ninja-build \
patchelf
# Needed for kcov
sudo apt-get -y install cmake binutils-dev libcurl4-openssl-dev
sudo apt-get -y install zlib1g-dev libdw-dev libiberty-dev
- name: Install Python packages
run: |
pip install --upgrade pip six setuptools codecov coverage
pip install --upgrade pip six setuptools codecov coverage[toml]
# ensure style checks are not skipped in unit tests for python >= 3.6
# note that true/false (i.e., 1/0) are opposite in conditions in python and bash
if python -c 'import sys; sys.exit(not sys.version_info >= (3, 6))'; then
pip install --upgrade flake8 isort>=4.3.5 mypy>=0.900 black
fi
- name: Setup git configuration
run: |
# Need this for the git tests to succeed.
@ -180,7 +185,7 @@ jobs:
SPACK_TEST_SOLVER: ${{ matrix.concretizer }}
run: |
share/spack/qa/run-unit-tests
- uses: codecov/codecov-action@v1
- uses: codecov/codecov-action@v2.0.2
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
with:
flags: unittests,linux,${{ matrix.concretizer }}
@ -205,7 +210,7 @@ jobs:
sudo apt-get -y install zlib1g-dev libdw-dev libiberty-dev
- name: Install Python packages
run: |
pip install --upgrade pip six setuptools codecov coverage
pip install --upgrade pip six setuptools codecov coverage[toml]
- name: Setup git configuration
run: |
# Need this for the git tests to succeed.
@ -232,7 +237,7 @@ jobs:
COVERAGE: true
run: |
share/spack/qa/run-shell-tests
- uses: codecov/codecov-action@v1
- uses: codecov/codecov-action@v2.0.2
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
with:
flags: shelltests,linux
@ -326,7 +331,7 @@ jobs:
make -C ${KCOV_ROOT}/build && sudo make -C ${KCOV_ROOT}/build install
- name: Install Python packages
run: |
pip install --upgrade pip six setuptools codecov coverage clingo
pip install --upgrade pip six setuptools codecov coverage[toml] clingo
- name: Setup git configuration
run: |
# Need this for the git tests to succeed.
@ -348,7 +353,7 @@ jobs:
SPACK_TEST_SOLVER: clingo
run: |
share/spack/qa/run-unit-tests
- uses: codecov/codecov-action@v1
- uses: codecov/codecov-action@v2.0.2
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
with:
flags: unittests,linux,clingo
@ -369,8 +374,7 @@ jobs:
- name: Install Python packages
run: |
pip install --upgrade pip six setuptools
pip install --upgrade codecov coverage
pip install --upgrade flake8 isort>=4.3.5 mypy>=0.800
pip install --upgrade codecov coverage[toml]
- name: Setup Homebrew packages
run: |
brew install dash fish gcc gnupg2 kcov
@ -384,12 +388,15 @@ jobs:
coverage run $(which spack) unit-test -x
coverage combine
coverage xml
# Delete the symlink going from ./lib/spack/docs/_spack_root back to
# the initial directory, since it causes ELOOP errors with codecov/actions@2
rm lib/spack/docs/_spack_root
else
echo "ONLY PACKAGE RECIPES CHANGED [skipping coverage]"
$(which spack) unit-test -x -m "not maybeslow" -k "package_sanity"
fi
- uses: codecov/codecov-action@v1
- uses: codecov/codecov-action@v2.0.2
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
with:
file: ./coverage.xml
files: ./coverage.xml
flags: unittests,macos

View File

@ -1,35 +0,0 @@
[mypy]
python_version = 3.7
files=lib/spack/llnl/**/*.py,lib/spack/spack/**/*.py
mypy_path=bin,lib/spack,lib/spack/external,var/spack/repos/builtin
# This and a generated import file allows supporting packages
namespace_packages=True
# To avoid re-factoring all the externals, ignore errors and missing imports
# globally, then turn back on in spack and spack submodules
ignore_errors=True
ignore_missing_imports=True
[mypy-spack.*]
ignore_errors=False
ignore_missing_imports=False
[mypy-packages.*]
ignore_errors=False
ignore_missing_imports=False
[mypy-llnl.*]
ignore_errors=False
ignore_missing_imports=False
[mypy-spack.test.packages]
ignore_errors=True
# ignore errors in fake import path for packages
[mypy-spack.pkg.*]
ignore_errors=True
ignore_missing_imports=True
# jinja has syntax in it that requires python3 and causes a parse error
# skip importing it
[mypy-jinja2]
follow_imports=skip

View File

@ -36,6 +36,8 @@ Documentation
[**Full documentation**](https://spack.readthedocs.io/) is available, or
run `spack help` or `spack help --all`.
For a cheat sheet on Spack syntax, run `spack help --spec`.
Tutorial
----------------

View File

@ -0,0 +1,7 @@
bootstrap:
# If set to false Spack will not bootstrap missing software,
# but will instead raise an error.
enable: true
# Root directory for bootstrapping work. The software bootstrapped
# by Spack is installed in a "store" subfolder of this root directory
root: ~/.spack/bootstrap

View File

@ -43,6 +43,7 @@ packages:
opencl: [pocl]
onedal: [intel-oneapi-dal]
osmesa: [mesa+osmesa, mesa18+osmesa]
pbs: [openpbs, torque]
pil: [py-pillow]
pkgconfig: [pkgconf, pkg-config]
rpc: [libtirpc]

View File

@ -2,7 +2,7 @@
#
# You can set these variables from the command line.
SPHINXOPTS = -W
SPHINXOPTS = -W --keep-going
SPHINXBUILD = sphinx-build
PAPER =
BUILDDIR = _build

View File

@ -130,8 +130,8 @@ Adding flags to cmake
To add additional flags to the ``cmake`` call, simply override the
``cmake_args`` function. The following example defines values for the flags
``WHATEVER``, ``ENABLE_BROKEN_FEATURE``, ``DETECT_HDF5``, and ``THREADS`` with
and without the :py:meth:`~.CMakePackage.define` and
:py:meth:`~.CMakePackage.define_from_variant` helper functions:
and without the :meth:`~spack.build_systems.cmake.CMakePackage.define` and
:meth:`~spack.build_systems.cmake.CMakePackage.define_from_variant` helper functions:
.. code-block:: python

View File

@ -97,15 +97,18 @@ def setup(sphinx):
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
needs_sphinx = '1.8'
needs_sphinx = '3.4'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc',
'sphinx.ext.graphviz',
'sphinx.ext.napoleon',
'sphinx.ext.todo',
'sphinxcontrib.programoutput']
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.graphviz',
'sphinx.ext.intersphinx',
'sphinx.ext.napoleon',
'sphinx.ext.todo',
'sphinxcontrib.programoutput',
]
# Set default graphviz options
graphviz_dot_args = [
@ -164,6 +167,19 @@ def setup(sphinx):
# directories to ignore when looking for source files.
exclude_patterns = ['_build', '_spack_root', '.spack-env']
nitpicky = True
nitpick_ignore = [
# Python classes that intersphinx is unable to resolve
('py:class', 'argparse.HelpFormatter'),
('py:class', 'contextlib.contextmanager'),
('py:class', 'module'),
('py:class', '_io.BufferedReader'),
('py:class', 'unittest.case.TestCase'),
('py:class', '_frozen_importlib_external.SourceFileLoader'),
# Spack classes that are private and we don't want to expose
('py:class', 'spack.provider_index._IndexBase'),
]
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
@ -358,3 +374,11 @@ class SpackStyle(DefaultStyle):
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# -- Extension configuration -------------------------------------------------
# sphinx.ext.intersphinx
intersphinx_mapping = {
"python": ("https://docs.python.org/3", None),
}

View File

@ -108,9 +108,9 @@ with a high level view of Spack's directory structure:
spack/ <- spack module; contains Python code
analyzers/ <- modules to run analysis on installed packages
build_systems/ <- modules for different build systems
build_systems/ <- modules for different build systems
cmd/ <- each file in here is a spack subcommand
compilers/ <- compiler description files
compilers/ <- compiler description files
container/ <- module for spack containerize
hooks/ <- hook modules to run at different points
modules/ <- modules for lmod, tcl, etc.
@ -151,24 +151,22 @@ Package-related modules
^^^^^^^^^^^^^^^^^^^^^^^
:mod:`spack.package`
Contains the :class:`Package <spack.package.Package>` class, which
Contains the :class:`~spack.package.Package` class, which
is the superclass for all packages in Spack. Methods on ``Package``
implement all phases of the :ref:`package lifecycle
<package-lifecycle>` and manage the build process.
:mod:`spack.packages`
Contains all of the packages in Spack and methods for managing them.
Functions like :func:`packages.get <spack.packages.get>` and
:func:`class_name_for_package_name
<packages.class_name_for_package_name>` handle mapping package module
names to class names and dynamically instantiating packages by name
from module files.
:mod:`spack.util.naming`
Contains functions for mapping between Spack package names,
Python module names, and Python class names. Functions like
:func:`~spack.util.naming.mod_to_class` handle mapping package
module names to class names.
:mod:`spack.relations`
*Relations* are relationships between packages, like
:func:`depends_on <spack.relations.depends_on>` and :func:`provides
<spack.relations.provides>`. See :ref:`dependencies` and
:ref:`virtual-dependencies`.
:mod:`spack.directives`
*Directives* are functions that can be called inside a package definition
to modify the package, like :func:`~spack.directives.depends_on`
and :func:`~spack.directives.provides`. See :ref:`dependencies`
and :ref:`virtual-dependencies`.
:mod:`spack.multimethod`
Implementation of the :func:`@when <spack.multimethod.when>`
@ -180,31 +178,27 @@ Spec-related modules
^^^^^^^^^^^^^^^^^^^^
:mod:`spack.spec`
Contains :class:`Spec <spack.spec.Spec>` and :class:`SpecParser
<spack.spec.SpecParser>`. Also implements most of the logic for
normalization and concretization of specs.
Contains :class:`~spack.spec.Spec` and :class:`~spack.spec.SpecParser`.
Also implements most of the logic for normalization and concretization
of specs.
:mod:`spack.parse`
Contains some base classes for implementing simple recursive descent
parsers: :class:`Parser <spack.parse.Parser>` and :class:`Lexer
<spack.parse.Lexer>`. Used by :class:`SpecParser
<spack.spec.SpecParser>`.
parsers: :class:`~spack.parse.Parser` and :class:`~spack.parse.Lexer`.
Used by :class:`~spack.spec.SpecParser`.
:mod:`spack.concretize`
Contains :class:`DefaultConcretizer
<spack.concretize.DefaultConcretizer>` implementation, which allows
site administrators to change Spack's :ref:`concretization-policies`.
Contains :class:`~spack.concretize.Concretizer` implementation,
which allows site administrators to change Spack's :ref:`concretization-policies`.
:mod:`spack.version`
Implements a simple :class:`Version <spack.version.Version>` class
with simple comparison semantics. Also implements
:class:`VersionRange <spack.version.VersionRange>` and
:class:`VersionList <spack.version.VersionList>`. All three are
comparable with each other and offer union and intersection
operations. Spack uses these classes to compare versions and to
manage version constraints on specs. Comparison semantics are
similar to the ``LooseVersion`` class in ``distutils`` and to the
way RPM compares version strings.
Implements a simple :class:`~spack.version.Version` class with simple
comparison semantics. Also implements :class:`~spack.version.VersionRange`
and :class:`~spack.version.VersionList`. All three are comparable with each
other and offer union and intersection operations. Spack uses these classes
to compare versions and to manage version constraints on specs. Comparison
semantics are similar to the ``LooseVersion`` class in ``distutils`` and to
the way RPM compares version strings.
:mod:`spack.compilers`
Submodules contains descriptors for all valid compilers in Spack.
@ -232,7 +226,7 @@ Build environment
:mod:`spack.stage`
Handles creating temporary directories for builds.
:mod:`spack.compilation`
:mod:`spack.build_environment`
This contains utility functions used by the compiler wrapper script,
``cc``.
@ -257,22 +251,19 @@ Unit tests
Implements Spack's test suite. Add a module and put its name in
the test suite in ``__init__.py`` to add more unit tests.
:mod:`spack.test.mock_packages`
This is a fake package hierarchy used to mock up packages for
Spack's test suite.
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Research and Monitoring Modules
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
:mod:`spack.monitor`
Contains :class:`SpackMonitor <spack.monitor.SpackMonitor>`. This is accessed
from the ``spack install`` and ``spack analyze`` commands to send build
and package metadada up to a `Spack Monitor <https://github.com/spack/spack-monitor>`_ server.
Contains :class:`~spack.monitor.SpackMonitorClient`. This is accessed from
the ``spack install`` and ``spack analyze`` commands to send build and
package metadata up to a `Spack Monitor
<https://github.com/spack/spack-monitor>`_ server.
:mod:`spack.analyzers`
A module folder with a :class:`AnalyzerBase <spack.analyzers.analyzer_base.AnalyzerBase>`
A module folder with a :class:`~spack.analyzers.analyzer_base.AnalyzerBase`
that provides base functions to run, save, and (optionally) upload analysis
results to a `Spack Monitor <https://github.com/spack/spack-monitor>`_ server.
@ -286,7 +277,7 @@ Other Modules
tarball URLs.
:mod:`spack.error`
:class:`SpackError <spack.error.SpackError>`, the base class for
:class:`~spack.error.SpackError`, the base class for
Spack's exception hierarchy.
:mod:`llnl.util.tty`
@ -335,8 +326,8 @@ Writing analyzers
To write an analyzer, you should add a new python file to the
analyzers module directory at ``lib/spack/spack/analyzers`` .
Your analyzer should be a subclass of the :class:`AnalyzerBase <spack.analyzers.analyzer_base.AnalyzerBase>`. For example, if you want
to add an analyzer class ``Myanalyzer`` you woul write to
``spack/analyzers/myanalyzer.py`` and import and
to add an analyzer class ``Myanalyzer`` you would write to
``spack/analyzers/myanalyzer.py`` and import and
use the base as follows:
.. code-block:: python
@ -347,7 +338,7 @@ use the base as follows:
Note that the class name is your module file name, all lowercase
except for the first capital letter. You can look at other analyzers in
except for the first capital letter. You can look at other analyzers in
that analyzer directory for examples. The guide here will tell you about the basic functions needed.
^^^^^^^^^^^^^^^^^^^^^^^^^
@ -356,13 +347,13 @@ Analyzer Output Directory
By default, when you run ``spack analyze run`` an analyzer output directory will
be created in your spack user directory in your ``$HOME``. The reason we output here
is because the install directory might not always be writable.
is because the install directory might not always be writable.
.. code-block:: console
~/.spack/
analyzers
Result files will be written here, organized in subfolders in the same structure
as the package, with each analyzer owning it's own subfolder. for example:
@ -380,11 +371,11 @@ as the package, with each analyzer owning it's own subfolder. for example:
│   └── spack-analyzer-install-files.json
└── libabigail
└── lib
└── spack-analyzer-libabigail-libz.so.1.2.11.xml
└── spack-analyzer-libabigail-libz.so.1.2.11.xml
Notice that for the libabigail analyzer, since results are generated per object,
we honor the object's folder in case there are equivalently named files in
we honor the object's folder in case there are equivalently named files in
different folders. The result files are typically written as json so they can be easily read and uploaded in a future interaction with a monitor.
@ -426,7 +417,7 @@ and then return the object with a key as the analyzer name. The result data
should be a list of objects, each with a name, ``analyzer_name``, ``install_file``,
and one of ``value`` or ``binary_value``. The install file should be for a relative
path, and not the absolute path. For example, let's say we extract a metric called
``metric`` for ``bin/wget`` using our analyzer ``thebest-analyzer``.
``metric`` for ``bin/wget`` using our analyzer ``thebest-analyzer``.
We might have data that looks like this:
.. code-block:: python
@ -482,7 +473,7 @@ Saving Analyzer Results
The analyzer will have ``save_result`` called, with the result object generated
to save it to the filesystem, and if the user has added the ``--monitor`` flag
to upload it to a monitor server. If your result follows an accepted result
format and you don't need to parse it further, you don't need to add this
format and you don't need to parse it further, you don't need to add this
function to your class. However, if your result data is large or otherwise
needs additional parsing, you can define it. If you define the function, it
is useful to know about the ``output_dir`` property, which you can join
@ -548,7 +539,7 @@ each one (separately) to the monitor:
Notice that this function, if you define it, requires a result object (generated by
``run()``, a monitor (if you want to send), and a boolean ``overwrite`` to be used
to check if a result exists first, and not write to it if the result exists and
to check if a result exists first, and not write to it if the result exists and
overwrite is False. Also notice that since we already saved these files to the analyzer metadata folder, we return early if a monitor isn't defined, because this function serves to send results to the monitor. If you haven't saved anything to the analyzer metadata folder
yet, you might want to do that here. You should also use ``tty.info`` to give
the user a message of "Writing result to $DIRNAME."
@ -616,7 +607,7 @@ types of hooks in the ``__init__.py``, and then python files in that folder
can use hook functions. The files are automatically parsed, so if you write
a new file for some integration (e.g., ``lib/spack/spack/hooks/myintegration.py``
you can then write hook functions in that file that will be automatically detected,
and run whenever your hook is called. This section will cover the basic kind
and run whenever your hook is called. This section will cover the basic kind
of hooks, and how to write them.
^^^^^^^^^^^^^^
@ -624,7 +615,7 @@ Types of Hooks
^^^^^^^^^^^^^^
The following hooks are currently implemented to make it easy for you,
the developer, to add hooks at different stages of a spack install or similar.
the developer, to add hooks at different stages of a spack install or similar.
If there is a hook that you would like and is missing, you can propose to add a new one.
"""""""""""""""""""""
@ -632,9 +623,9 @@ If there is a hook that you would like and is missing, you can propose to add a
"""""""""""""""""""""
A ``pre_install`` hook is run within an install subprocess, directly before
the install starts. It expects a single argument of a spec, and is run in
the install starts. It expects a single argument of a spec, and is run in
a multiprocessing subprocess. Note that if you see ``pre_install`` functions associated with packages these are not hooks
as we have defined them here, but rather callback functions associated with
as we have defined them here, but rather callback functions associated with
a package install.
@ -657,7 +648,7 @@ here.
This hook is run at the beginning of ``lib/spack/spack/installer.py``,
in the install function of a ``PackageInstaller``,
and importantly is not part of a build process, but before it. This is when
we have just newly grabbed the task, and are preparing to install. If you
we have just newly grabbed the task, and are preparing to install. If you
write a hook of this type, you should provide the spec to it.
.. code-block:: python
@ -666,7 +657,7 @@ write a hook of this type, you should provide the spec to it.
"""On start of an install, we want to...
"""
print('on_install_start')
""""""""""""""""""""""""""""
``on_install_success(spec)``
@ -744,8 +735,8 @@ to trigger after anything is written to a logger. You would add it as follows:
post_install = HookRunner('post_install')
# hooks related to logging
post_log_write = HookRunner('post_log_write') # <- here is my new hook!
post_log_write = HookRunner('post_log_write') # <- here is my new hook!
You then need to decide what arguments my hook would expect. Since this is
related to logging, let's say that you want a message and level. That means
@ -775,7 +766,7 @@ In this example, we use it outside of a logger that is already defined:
This is not to say that this would be the best way to implement an integration
with the logger (you'd probably want to write a custom logger, or you could
have the hook defined within the logger) but serves as an example of writing a hook.
have the hook defined within the logger) but serves as an example of writing a hook.
----------
Unit tests
@ -785,6 +776,38 @@ Unit tests
Unit testing
------------
---------------------
Developer environment
---------------------
.. warning::
This is an experimental feature. It is expected to change and you should
not use it in a production environment.
When installing a package, we currently have support to export environment
variables to specify adding debug flags to the build. By default, a package
install will build without any debug flag. However, if you want to add them,
you can export:
.. code-block:: console
export SPACK_ADD_DEBUG_FLAGS=true
spack install zlib
If you want to add custom flags, you should export an additional variable:
.. code-block:: console
export SPACK_ADD_DEBUG_FLAGS=true
export SPACK_DEBUG_FLAGS="-g"
spack install zlib
These environment variables will eventually be integrated into spack so
they are set from the command line.
------------------
Developer commands
------------------
@ -795,6 +818,29 @@ Developer commands
``spack doc``
^^^^^^^^^^^^^
.. _cmd-spack-style:
^^^^^^^^^^^^^^^
``spack style``
^^^^^^^^^^^^^^^
spack style exists to help the developer user to check imports and style with
mypy, flake8, isort, and (soon) black. To run all style checks, simply do:
.. code-block:: console
$ spack style
To run automatic fixes for isort you can do:
.. code-block:: console
$ spack style --fix
You do not need any of these Python packages installed on your system for
the checks to work! Spack will bootstrap install them from packages for
your use.
^^^^^^^^^^^^^^^^^^^
``spack unit-test``
^^^^^^^^^^^^^^^^^^^
@ -873,7 +919,7 @@ just like you would with the normal ``python`` command.
^^^^^^^^^^^^^^^
Spack blame is a way to quickly see contributors to packages or files
in the spack repository. You should provide a target package name or
in the spack repository. You should provide a target package name or
file name to the command. Here is an example asking to see contributions
for the package "python":
@ -883,8 +929,8 @@ for the package "python":
LAST_COMMIT LINES % AUTHOR EMAIL
2 weeks ago 3 0.3 Mickey Mouse <cheddar@gmouse.org>
a month ago 927 99.7 Minnie Mouse <swiss@mouse.org>
2 weeks ago 930 100.0
2 weeks ago 930 100.0
By default, you will get a table view (shown above) sorted by date of contribution,
@ -1255,7 +1301,7 @@ Publishing a release on GitHub
#. Create the release in GitHub.
* Go to
* Go to
`github.com/spack/spack/releases <https://github.com/spack/spack/releases>`_
and click ``Draft a new release``.

View File

@ -777,7 +777,7 @@ an OpenMPI installed in /opt/local, one would use:
buildable: False
In general, Spack is easier to use and more reliable if it builds all of
its own dependencies. However, there are two packages for which one
its own dependencies. However, there are several packages for which one
commonly needs to use system versions:
^^^

View File

@ -2884,52 +2884,52 @@ The package base class, usually specialized for a given build system, determines
actual set of entities available for overriding.
The classes that are currently provided by Spack are:
+-------------------------------+----------------------------------+
| **Base Class** | **Purpose** |
+===============================+==================================+
| :py:class:`.Package` | General base class not |
| | specialized for any build system |
+-------------------------------+----------------------------------+
| :py:class:`.MakefilePackage` | Specialized class for packages |
| | built invoking |
| | hand-written Makefiles |
+-------------------------------+----------------------------------+
| :py:class:`.AutotoolsPackage` | Specialized class for packages |
| | built using GNU Autotools |
+-------------------------------+----------------------------------+
| :py:class:`.CMakePackage` | Specialized class for packages |
| | built using CMake |
+-------------------------------+----------------------------------+
| :py:class:`.CudaPackage` | A helper class for packages that |
| | use CUDA |
+-------------------------------+----------------------------------+
| :py:class:`.QMakePackage` | Specialized class for packages |
| | build using QMake |
+-------------------------------+----------------------------------+
| :py:class:`.ROCmPackage` | A helper class for packages that |
| | use ROCm |
+-------------------------------+----------------------------------+
| :py:class:`.SConsPackage` | Specialized class for packages |
| | built using SCons |
+-------------------------------+----------------------------------+
| :py:class:`.WafPackage` | Specialized class for packages |
| | built using Waf |
+-------------------------------+----------------------------------+
| :py:class:`.RPackage` | Specialized class for |
| | :py:class:`.R` extensions |
+-------------------------------+----------------------------------+
| :py:class:`.OctavePackage` | Specialized class for |
| | :py:class:`.Octave` packages |
+-------------------------------+----------------------------------+
| :py:class:`.PythonPackage` | Specialized class for |
| | :py:class:`.Python` extensions |
+-------------------------------+----------------------------------+
| :py:class:`.PerlPackage` | Specialized class for |
| | :py:class:`.Perl` extensions |
+-------------------------------+----------------------------------+
| :py:class:`.IntelPackage` | Specialized class for licensed |
| | Intel software |
+-------------------------------+----------------------------------+
+-------------------------=--------------------------------+----------------------------------+
| **Base Class** | **Purpose** |
+==========================================================+==================================+
| :class:`~spack.package.Package` | General base class not |
| | specialized for any build system |
+----------------------------------------------------------+----------------------------------+
| :class:`~spack.build_systems.makefile.MakefilePackage` | Specialized class for packages |
| | built invoking |
| | hand-written Makefiles |
+----------------------------------------------------------+----------------------------------+
| :class:`~spack.build_systems.autotools.AutotoolsPackage` | Specialized class for packages |
| | built using GNU Autotools |
+----------------------------------------------------------+----------------------------------+
| :class:`~spack.build_systems.cmake.CMakePackage` | Specialized class for packages |
| | built using CMake |
+----------------------------------------------------------+----------------------------------+
| :class:`~spack.build_systems.cuda.CudaPackage` | A helper class for packages that |
| | use CUDA |
+----------------------------------------------------------+----------------------------------+
| :class:`~spack.build_systems.qmake.QMakePackage` | Specialized class for packages |
| | built using QMake |
+----------------------------------------------------------+----------------------------------+
| :class:`~spack.build_systems.rocm.ROCmPackage` | A helper class for packages that |
| | use ROCm |
+----------------------------------------------------------+----------------------------------+
| :class:`~spack.build_systems.scons.SConsPackage` | Specialized class for packages |
| | built using SCons |
+----------------------------------------------------------+----------------------------------+
| :class:`~spack.build_systems.waf.WafPackage` | Specialized class for packages |
| | built using Waf |
+----------------------------------------------------------+----------------------------------+
| :class:`~spack.build_systems.r.RPackage` | Specialized class for |
| | R extensions |
+----------------------------------------------------------+----------------------------------+
| :class:`~spack.build_systems.octave.OctavePackage` | Specialized class for |
| | Octave packages |
+----------------------------------------------------------+----------------------------------+
| :class:`~spack.build_systems.python.PythonPackage` | Specialized class for |
| | Python extensions |
+----------------------------------------------------------+----------------------------------+
| :class:`~spack.build_systems.perl.PerlPackage` | Specialized class for |
| | Perl extensions |
+----------------------------------------------------------+----------------------------------+
| :class:`~spack.build_systems.intel.IntelPackage` | Specialized class for licensed |
| | Intel software |
+----------------------------------------------------------+----------------------------------+
.. note::
@ -2939,7 +2939,7 @@ The classes that are currently provided by Spack are:
rare cases where manual intervention is needed we need to stress that a
package base class depends on the *build system* being used, not the language of the package.
For example, a Python extension installed with CMake would ``extends('python')`` and
subclass from :py:class:`.CMakePackage`.
subclass from :class:`~spack.build_systems.cmake.CMakePackage`.
^^^^^^^^^^^^^^^^^^^^^
Installation pipeline
@ -4079,7 +4079,7 @@ prefix **before** ``make install``. Builds like this can falsely report
success when an error occurs before the installation is complete. Simple
sanity checks can be used to identify files and or directories that are
required of a successful installation. Spack checks for the presence of
the files and directories after ``install()`` runs.
the files and directories after ``install()`` runs.
If any of the listed files or directories are missing, then the build will
fail and the install prefix will be removed. If they all exist, then Spack
@ -4193,7 +4193,7 @@ need to use two decorators for each phase test method:
The first decorator tells Spack when in the installation process to
run your test method installation process; namely *after* the provided
installation phase. The second decorator tells Spack to only run the
checks when the ``--test`` option is provided on the command line.
checks when the ``--test`` option is provided on the command line.
.. note::
@ -4267,17 +4267,17 @@ tests can be performed days, even weeks, after the software is installed.
Stand-alone tests are checks that should run relatively quickly -- as
in on the order of at most a few minutes -- and ideally execute all
aspects of the installed software, or at least key functionality.
aspects of the installed software, or at least key functionality.
.. note::
Execution speed is important because these tests are intended
to quickly assess whether the installed software works on the
system.
Failing stand-alone tests indicate that there is no reason to
proceed with more resource-intensive tests.
Passing stand-alone (or smoke) tests can lead to more thorough
testing, such as extensive unit or regression tests, or tests
that run at scale. Spack support for more thorough testing is
@ -4307,7 +4307,7 @@ file such that:
test_stage: /path/to/stage
The package can access this path **during test processing** using
`self.test_suite.stage`.
`self.test_suite.stage`.
.. note::
@ -4388,7 +4388,7 @@ can be implemented as shown below.
@run_after('install')
def copy_test_sources(self):
srcs = ['tests',
join_path('examples', 'foo.c'),
join_path('examples', 'foo.c'),
join_path('examples', 'bar.c')]
self.cache_extra_test_sources(srcs)
@ -4446,7 +4446,7 @@ Examples include:
- expected test output
These extra files should be added to the ``test`` subdirectory of the
package in the Spack repository.
package in the Spack repository.
Spack will **automatically copy** the contents of that directory to the
test staging directory for stand-alone testing. The ``test`` method can
@ -4471,7 +4471,7 @@ The signature for ``get_escaped_text_output`` is:
where ``filename`` is the path to the file containing the expected output.
The ``filename`` for a :ref:`custom file <cache_custom_files>` can be
The ``filename`` for a :ref:`custom file <cache_custom_files>` can be
accessed and used as illustrated by a simplified version of an ``sqlite``
package check:
@ -4591,10 +4591,10 @@ where each argument has the following meaning:
Options are a list of strings to be passed to the executable when
it runs.
The default is ``[]``, which means no options are provided to the
executable.
* ``expected`` is an optional list of expected output strings.
Spack requires every string in ``expected`` to be a regex matching
@ -4605,31 +4605,31 @@ where each argument has the following meaning:
The expected output can be :ref:`read from a file
<expected_test_output_from_file>`.
The default is ``expected=[]``, so Spack will not check the output.
* ``status`` is the optional expected return code(s).
A list of return codes corresponding to successful execution can
be provided (e.g., ``status=[0,3,7]``). Support for non-zero return
codes allows for basic **expected failure** tests as well as different
return codes across versions of the software.
The default is ``status=[0]``, which corresponds to **successful**
execution in the sense that the executable does not exit with a
failure code or raise an exception.
* ``installed`` is used to require ``exe`` to be within the package
prefix.
If ``True``, then the path for ``exe`` is required to be within the
package prefix; otherwise, the path is not constrained.
The default is ``False``, so the fully qualified path for ``exe``
does **not** need to be within the installation directory.
* ``purpose`` is an optional heading describing the the test part.
Output from the test is written to a test log file so this argument
serves as a searchable heading in text logs to highlight the start
of the test part. Having a description can be helpful when debugging
@ -4644,10 +4644,10 @@ where each argument has the following meaning:
The default is ``False``, which means the test executable must be
present for any installable version of the software.
* ``work_dir`` is the path to the directory from which the executable
will run.
The default of ``None`` corresponds to the current directory (``'.'``).
"""""""""""""""""""""""""""""""""""""""""
@ -4754,7 +4754,7 @@ where only the outputs for the first of each set are shown:
Copyright (C) 2018 Free Software Foundation, Inc.
This is free software; see the source for copying conditions. There is NO
warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
PASSED
...
==> [2021-04-26-17:35:20.493921] test: checking mpirun output
@ -4915,7 +4915,7 @@ This is already part of the boilerplate for packages created with
Filtering functions
^^^^^^^^^^^^^^^^^^^
:py:func:`filter_file(regex, repl, *filenames, **kwargs) <spack.filter_file>`
:py:func:`filter_file(regex, repl, *filenames, **kwargs) <llnl.util.filesystem.filter_file>`
Works like ``sed`` but with Python regular expression syntax. Takes
a regular expression, a replacement, and a set of files. ``repl``
can be a raw string or a callable function. If it is a raw string,
@ -4953,7 +4953,7 @@ Filtering functions
filter_file('CXX="c++"', 'CXX="%s"' % self.compiler.cxx,
prefix.bin.mpicxx)
:py:func:`change_sed_delimiter(old_delim, new_delim, *filenames) <spack.change_sed_delim>`
:py:func:`change_sed_delimiter(old_delim, new_delim, *filenames) <llnl.util.filesystem.change_sed_delimiter>`
Some packages, like TAU, have a build system that can't install
into directories with, e.g. '@' in the name, because they use
hard-coded ``sed`` commands in their build.
@ -4975,14 +4975,14 @@ Filtering functions
File functions
^^^^^^^^^^^^^^
:py:func:`ancestor(dir, n=1) <spack.ancestor>`
:py:func:`ancestor(dir, n=1) <llnl.util.filesystem.ancestor>`
Get the n\ :sup:`th` ancestor of the directory ``dir``.
:py:func:`can_access(path) <spack.can_access>`
:py:func:`can_access(path) <llnl.util.filesystem.can_access>`
True if we can read and write to the file at ``path``. Same as
native python ``os.access(file_name, os.R_OK|os.W_OK)``.
:py:func:`install(src, dest) <spack.install>`
:py:func:`install(src, dest) <llnl.util.filesystem.install>`
Install a file to a particular location. For example, install a
header into the ``include`` directory under the install ``prefix``:
@ -4990,14 +4990,14 @@ File functions
install('my-header.h', prefix.include)
:py:func:`join_path(*paths) <spack.join_path>`
:py:func:`join_path(*paths) <llnl.util.filesystem.join_path>`
An alias for ``os.path.join``. This joins paths using the OS path separator.
:py:func:`mkdirp(*paths) <spack.mkdirp>`
:py:func:`mkdirp(*paths) <llnl.util.filesystem.mkdirp>`
Create each of the directories in ``paths``, creating any parent
directories if they do not exist.
:py:func:`working_dir(dirname, kwargs) <spack.working_dir>`
:py:func:`working_dir(dirname, kwargs) <llnl.util.filesystem.working_dir>`
This is a Python `Context Manager
<https://docs.python.org/2/library/contextlib.html>`_ that makes it
easier to work with subdirectories in builds. You use this with the
@ -5039,7 +5039,7 @@ File functions
The ``create=True`` keyword argument causes the command to create
the directory if it does not exist.
:py:func:`touch(path) <spack.touch>`
:py:func:`touch(path) <llnl.util.filesystem.touch>`
Create an empty file at ``path``.
.. _make-package-findable:

View File

@ -1,7 +1,7 @@
# These dependencies should be installed using pip in order
# to build the documentation.
sphinx
sphinx>=3.4,!=4.1.2
sphinxcontrib-programoutput
sphinx-rtd-theme
python-levenshtein

View File

@ -8,12 +8,20 @@
# these commands in this directory to install Sphinx and its plugins,
# then build the docs:
#
# spack install
# spack env activate .
# spack install
# make
#
spack:
specs:
- py-sphinx
# Sphinx
- "py-sphinx@3.4:4.1.1,4.1.3:"
- py-sphinxcontrib-programoutput
- py-sphinx-rtd-theme
# VCS
- git
- mercurial
- subversion
# Plotting
- graphviz
concretization: together

42
lib/spack/env/cc vendored
View File

@ -40,6 +40,14 @@ parameters=(
SPACK_SYSTEM_DIRS
)
# Optional parameters that aren't required to be set
# Boolean (true/false/custom) if we want to add debug flags
# SPACK_ADD_DEBUG_FLAGS
# If a custom flag is requested, it will be defined
# SPACK_DEBUG_FLAGS
# The compiler input variables are checked for sanity later:
# SPACK_CC, SPACK_CXX, SPACK_F77, SPACK_FC
# The default compiler flags are passed from these variables:
@ -87,6 +95,25 @@ for param in "${parameters[@]}"; do
fi
done
# Check if optional parameters are defined
# If we aren't asking for debug flags, don't add them
if [[ -z ${SPACK_ADD_DEBUG_FLAGS+x} ]]; then
SPACK_ADD_DEBUG_FLAGS="false"
fi
# SPACK_ADD_DEBUG_FLAGS must be true/false/custom
is_valid="false"
for param in "true" "false" "custom"; do
if [ "$param" == "$SPACK_ADD_DEBUG_FLAGS" ]; then
is_valid="true"
fi
done
# Exit with error if we are given an incorrect value
if [ "$is_valid" == "false" ]; then
die "SPACK_ADD_DEBUG_FLAGS, if defined, must be one of 'true' 'false' or 'custom'"
fi
# Figure out the type of compiler, the language, and the mode so that
# the compiler script knows what to do.
#
@ -106,30 +133,35 @@ comp="CC"
case "$command" in
cpp)
mode=cpp
debug_flags="-g"
;;
cc|c89|c99|gcc|clang|armclang|icc|icx|pgcc|nvc|xlc|xlc_r|fcc)
command="$SPACK_CC"
language="C"
comp="CC"
lang_flags=C
debug_flags="-g"
;;
c++|CC|g++|clang++|armclang++|icpc|icpx|pgc++|nvc++|xlc++|xlc++_r|FCC)
command="$SPACK_CXX"
language="C++"
comp="CXX"
lang_flags=CXX
debug_flags="-g"
;;
ftn|f90|fc|f95|gfortran|flang|armflang|ifort|ifx|pgfortran|nvfortran|xlf90|xlf90_r|nagfor|frt)
command="$SPACK_FC"
language="Fortran 90"
comp="FC"
lang_flags=F
debug_flags="-g"
;;
f77|xlf|xlf_r|pgf77)
command="$SPACK_F77"
language="Fortran 77"
comp="F77"
lang_flags=F
debug_flags="-g"
;;
ld)
mode=ld
@ -415,6 +447,16 @@ done
#
flags=()
# Add debug flags
if [ "${SPACK_ADD_DEBUG_FLAGS}" == "true" ]; then
flags=("${flags[@]}" "${debug_flags}")
# If a custom flag is requested, derive from environment
elif [ "$SPACK_ADD_DEBUG_FLAGS" == "custom" ]; then
IFS=' ' read -ra SPACK_DEBUG_FLAGS <<< "$SPACK_DEBUG_FLAGS"
flags=("${flags[@]}" "${SPACK_DEBUG_FLAGS[@]}")
fi
# Fortran flags come before CPPFLAGS
case "$mode" in
cc|ccld)

View File

@ -11,7 +11,7 @@
* Homepage: https://pypi.python.org/pypi/archspec
* Usage: Labeling, comparison and detection of microarchitectures
* Version: 0.1.2 (commit 26dec9d47e509daf8c970de4c89da200da52ad20)
* Version: 0.1.2 (commit 4dbf253daf37e4a008e4beb6489f347b4a35aed4)
argparse
--------

View File

@ -1942,6 +1942,12 @@
"versions": "5:",
"flags" : "-march=armv8.2-a+fp16+rcpc+dotprod+crypto"
}
],
"arm" : [
{
"versions": "20:",
"flags" : "-march=armv8.2-a+fp16+rcpc+dotprod+crypto"
}
]
}
},

View File

@ -326,7 +326,7 @@ def end_function(self, prog=None):
"""Returns the syntax needed to end a function definition.
Parameters:
prog (str, optional): the command name
prog (str or None): the command name
Returns:
str: the function definition ending

View File

@ -444,7 +444,7 @@ def copy_tree(src, dest, symlinks=True, ignore=None, _permissions=False):
src (str): the directory to copy
dest (str): the destination directory
symlinks (bool): whether or not to preserve symlinks
ignore (function): function indicating which files to ignore
ignore (typing.Callable): function indicating which files to ignore
_permissions (bool): for internal use only
Raises:
@ -518,7 +518,7 @@ def install_tree(src, dest, symlinks=True, ignore=None):
src (str): the directory to install
dest (str): the destination directory
symlinks (bool): whether or not to preserve symlinks
ignore (function): function indicating which files to ignore
ignore (typing.Callable): function indicating which files to ignore
Raises:
IOError: if *src* does not match any files or directories
@ -557,12 +557,12 @@ def mkdirp(*paths, **kwargs):
paths (str): paths to create with mkdirp
Keyword Aguments:
mode (permission bits or None, optional): optional permissions to set
mode (permission bits or None): optional permissions to set
on the created directory -- use OS default if not provided
group (group name or None, optional): optional group for permissions of
group (group name or None): optional group for permissions of
final created directory -- use OS default if not provided. Only
used if world write permissions are not set
default_perms ('parents' or 'args', optional): The default permissions
default_perms (str or None): one of 'parents' or 'args'. The default permissions
that are set for directories that are not themselves an argument
for mkdirp. 'parents' means intermediate directories get the
permissions of their direct parent directory, 'args' means
@ -866,7 +866,7 @@ def traverse_tree(source_root, dest_root, rel_path='', **kwargs):
Keyword Arguments:
order (str): Whether to do pre- or post-order traversal. Accepted
values are 'pre' and 'post'
ignore (function): function indicating which files to ignore
ignore (typing.Callable): function indicating which files to ignore
follow_nonexisting (bool): Whether to descend into directories in
``src`` that do not exit in ``dest``. Default is True
follow_links (bool): Whether to descend into symlinks in ``src``
@ -1114,11 +1114,11 @@ def find(root, files, recursive=True):
Parameters:
root (str): The root directory to start searching from
files (str or Sequence): Library name(s) to search for
recurse (bool, optional): if False search only root folder,
recursive (bool): if False search only root folder,
if True descends top-down from the root. Defaults to True.
Returns:
list of strings: The files that have been found
list: The files that have been found
"""
if isinstance(files, six.string_types):
files = [files]
@ -1200,7 +1200,7 @@ def directories(self):
['/dir1', '/dir2']
Returns:
list of strings: A list of directories
list: A list of directories
"""
return list(dedupe(
os.path.dirname(x) for x in self.files if os.path.dirname(x)
@ -1218,7 +1218,7 @@ def basenames(self):
['a.h', 'b.h']
Returns:
list of strings: A list of base-names
list: A list of base-names
"""
return list(dedupe(os.path.basename(x) for x in self.files))
@ -1305,7 +1305,7 @@ def headers(self):
"""Stable de-duplication of the headers.
Returns:
list of strings: A list of header files
list: A list of header files
"""
return self.files
@ -1318,7 +1318,7 @@ def names(self):
['a', 'b']
Returns:
list of strings: A list of files without extensions
list: A list of files without extensions
"""
names = []
@ -1409,9 +1409,9 @@ def find_headers(headers, root, recursive=False):
======= ====================================
Parameters:
headers (str or list of str): Header name(s) to search for
headers (str or list): Header name(s) to search for
root (str): The root directory to start searching from
recursive (bool, optional): if False search only root folder,
recursive (bool): if False search only root folder,
if True descends top-down from the root. Defaults to False.
Returns:
@ -1447,7 +1447,7 @@ def find_all_headers(root):
in the directory passed as argument.
Args:
root (path): directory where to look recursively for header files
root (str): directory where to look recursively for header files
Returns:
List of all headers found in ``root`` and subdirectories.
@ -1467,7 +1467,7 @@ def libraries(self):
"""Stable de-duplication of library files.
Returns:
list of strings: A list of library files
list: A list of library files
"""
return self.files
@ -1480,7 +1480,7 @@ def names(self):
['a', 'b']
Returns:
list of strings: A list of library names
list: A list of library names
"""
names = []
@ -1565,8 +1565,8 @@ def find_system_libraries(libraries, shared=True):
======= ====================================
Parameters:
libraries (str or list of str): Library name(s) to search for
shared (bool, optional): if True searches for shared libraries,
libraries (str or list): Library name(s) to search for
shared (bool): if True searches for shared libraries,
otherwise for static. Defaults to True.
Returns:
@ -1616,11 +1616,11 @@ def find_libraries(libraries, root, shared=True, recursive=False):
======= ====================================
Parameters:
libraries (str or list of str): Library name(s) to search for
libraries (str or list): Library name(s) to search for
root (str): The root directory to start searching from
shared (bool, optional): if True searches for shared libraries,
shared (bool): if True searches for shared libraries,
otherwise for static. Defaults to True.
recursive (bool, optional): if False search only root folder,
recursive (bool): if False search only root folder,
if True descends top-down from the root. Defaults to False.
Returns:

View File

@ -573,8 +573,8 @@ def pretty_date(time, now=None):
"""Convert a datetime or timestamp to a pretty, relative date.
Args:
time (datetime or int): date to print prettily
now (datetime): dateimte for 'now', i.e. the date the pretty date
time (datetime.datetime or int): date to print prettily
now (datetime.datetime): datetime for 'now', i.e. the date the pretty date
is relative to (default is datetime.now())
Returns:
@ -648,7 +648,7 @@ def pretty_string_to_date(date_str, now=None):
or be a *pretty date* (like ``yesterday`` or ``two months ago``)
Returns:
(datetime): datetime object corresponding to ``date_str``
(datetime.datetime): datetime object corresponding to ``date_str``
"""
pattern = {}

View File

@ -14,9 +14,19 @@
import spack.util.string
__all__ = ['Lock', 'LockTransaction', 'WriteTransaction', 'ReadTransaction',
'LockError', 'LockTimeoutError',
'LockPermissionError', 'LockROFileError', 'CantCreateLockError']
__all__ = [
'Lock',
'LockDowngradeError',
'LockUpgradeError',
'LockTransaction',
'WriteTransaction',
'ReadTransaction',
'LockError',
'LockTimeoutError',
'LockPermissionError',
'LockROFileError',
'CantCreateLockError'
]
#: Mapping of supported locks to description
lock_type = {fcntl.LOCK_SH: 'read', fcntl.LOCK_EX: 'write'}
@ -401,7 +411,7 @@ def release_read(self, release_fn=None):
"""Releases a read lock.
Arguments:
release_fn (callable): function to call *before* the last recursive
release_fn (typing.Callable): function to call *before* the last recursive
lock (read or write) is released.
If the last recursive lock will be released, then this will call
@ -437,7 +447,7 @@ def release_write(self, release_fn=None):
"""Releases a write lock.
Arguments:
release_fn (callable): function to call before the last recursive
release_fn (typing.Callable): function to call before the last recursive
write is released.
If the last recursive *write* lock will be released, then this
@ -533,10 +543,10 @@ class LockTransaction(object):
Arguments:
lock (Lock): underlying lock for this transaction to be accquired on
enter and released on exit
acquire (callable or contextmanager): function to be called after lock
is acquired, or contextmanager to enter after acquire and leave
acquire (typing.Callable or contextlib.contextmanager): function to be called
after lock is acquired, or contextmanager to enter after acquire and leave
before release.
release (callable): function to be called before release. If
release (typing.Callable): function to be called before release. If
``acquire`` is a contextmanager, this will be called *after*
exiting the nexted context and before the lock is released.
timeout (float): number of seconds to set for the timeout when

View File

@ -109,19 +109,17 @@ def colify(elts, **options):
using ``str()``.
Keyword Arguments:
output (stream): A file object to write to. Default is ``sys.stdout``
indent (int): Optionally indent all columns by some number of spaces
padding (int): Spaces between columns. Default is 2
width (int): Width of the output. Default is 80 if tty not detected
cols (int): Force number of columns. Default is to size to
terminal, or single-column if no tty
tty (bool): Whether to attempt to write to a tty. Default is to
autodetect a tty. Set to False to force single-column
output
method (str): Method to use to fit columns. Options are variable or
uniform. Variable-width columns are tighter, uniform
columns are all the same width and fit less data on
the screen
output (typing.IO): A file object to write to. Default is ``sys.stdout``
indent (int): Optionally indent all columns by some number of spaces
padding (int): Spaces between columns. Default is 2
width (int): Width of the output. Default is 80 if tty not detected
cols (int): Force number of columns. Default is to size to terminal, or
single-column if no tty
tty (bool): Whether to attempt to write to a tty. Default is to autodetect a
tty. Set to False to force single-column output
method (str): Method to use to fit columns. Options are variable or uniform.
Variable-width columns are tighter, uniform columns are all the same width
and fit less data on the screen
"""
# Get keyword arguments or set defaults
cols = options.pop("cols", 0)

View File

@ -436,7 +436,7 @@ class log_output(object):
"""
def __init__(self, file_like=None, echo=False, debug=0, buffer=False,
env=None):
env=None, filter_fn=None):
"""Create a new output log context manager.
Args:
@ -446,6 +446,8 @@ def __init__(self, file_like=None, echo=False, debug=0, buffer=False,
debug (int): positive to enable tty debug mode during logging
buffer (bool): pass buffer=True to skip unbuffering output; note
this doesn't set up any *new* buffering
filter_fn (callable, optional): Callable[str] -> str to filter each
line of output
log_output can take either a file object or a filename. If a
filename is passed, the file will be opened and closed entirely
@ -465,6 +467,7 @@ def __init__(self, file_like=None, echo=False, debug=0, buffer=False,
self.debug = debug
self.buffer = buffer
self.env = env # the environment to use for _writer_daemon
self.filter_fn = filter_fn
self._active = False # used to prevent re-entry
@ -543,7 +546,7 @@ def __enter__(self):
target=_writer_daemon,
args=(
input_multiprocess_fd, read_multiprocess_fd, write_fd,
self.echo, self.log_file, child_pipe
self.echo, self.log_file, child_pipe, self.filter_fn
)
)
self.process.daemon = True # must set before start()
@ -667,7 +670,7 @@ def force_echo(self):
def _writer_daemon(stdin_multiprocess_fd, read_multiprocess_fd, write_fd, echo,
log_file_wrapper, control_pipe):
log_file_wrapper, control_pipe, filter_fn):
"""Daemon used by ``log_output`` to write to a log file and to ``stdout``.
The daemon receives output from the parent process and writes it both
@ -712,6 +715,7 @@ def _writer_daemon(stdin_multiprocess_fd, read_multiprocess_fd, write_fd, echo,
log_file_wrapper (FileWrapper): file to log all output
control_pipe (Pipe): multiprocessing pipe on which to send control
information to the parent
filter_fn (callable, optional): function to filter each line of output
"""
# If this process was forked, then it will inherit file descriptors from
@ -784,7 +788,10 @@ def _writer_daemon(stdin_multiprocess_fd, read_multiprocess_fd, write_fd, echo,
# Echo to stdout if requested or forced.
if echo or force_echo:
sys.stdout.write(clean_line)
output_line = clean_line
if filter_fn:
output_line = filter_fn(clean_line)
sys.stdout.write(output_line)
# Stripped output to log file.
log_file.write(_strip(clean_line))

View File

@ -3,7 +3,6 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
#: major, minor, patch version for Spack, in a tuple
spack_version_info = (0, 16, 1)

View File

@ -193,8 +193,8 @@ def optimization_flags(self, compiler):
the compiler passed as argument.
Args:
compiler (CompilerSpec or Compiler): object that contains both the
name and the version of the compiler we want to use
compiler (spack.spec.CompilerSpec or spack.compiler.Compiler): object that
contains both the name and the version of the compiler we want to use
"""
# Mixed toolchains are not supported yet
import spack.compilers

View File

@ -13,6 +13,7 @@
import sys
import tarfile
import tempfile
import traceback
from contextlib import closing
import ruamel.yaml as yaml
@ -27,6 +28,7 @@
import spack.config as config
import spack.database as spack_db
import spack.fetch_strategy as fs
import spack.hash_types as ht
import spack.mirror
import spack.relocate as relocate
import spack.util.file_cache as file_cache
@ -204,7 +206,7 @@ def find_built_spec(self, spec):
The cache can be updated by calling ``update()`` on the cache.
Args:
spec (Spec): Concrete spec to find
spec (spack.spec.Spec): Concrete spec to find
Returns:
An list of objects containing the found specs and mirror url where
@ -581,7 +583,7 @@ def get_buildfile_manifest(spec):
added = True
if relocate.needs_binary_relocation(m_type, m_subtype):
if ((m_subtype in ('x-executable', 'x-sharedlib')
if ((m_subtype in ('x-executable', 'x-sharedlib', 'x-pie-executable')
and sys.platform != 'darwin') or
(m_subtype in ('x-mach-binary')
and sys.platform == 'darwin') or
@ -709,12 +711,6 @@ def generate_package_index(cache_prefix):
cache_prefix. This page contains a link for each binary package (.yaml)
under cache_prefix.
"""
tmpdir = tempfile.mkdtemp()
db_root_dir = os.path.join(tmpdir, 'db_root')
db = spack_db.Database(None, db_dir=db_root_dir,
enable_transaction_locking=False,
record_fields=['spec', 'ref_count', 'in_buildcache'])
try:
file_list = (
entry
@ -735,22 +731,90 @@ def generate_package_index(cache_prefix):
tty.debug('Retrieving spec.yaml files from {0} to build index'.format(
cache_prefix))
all_mirror_specs = {}
for file_path in file_list:
try:
yaml_url = url_util.join(cache_prefix, file_path)
tty.debug('fetching {0}'.format(yaml_url))
_, _, yaml_file = web_util.read_from_url(yaml_url)
yaml_contents = codecs.getreader('utf-8')(yaml_file).read()
# yaml_obj = syaml.load(yaml_contents)
# s = Spec.from_yaml(yaml_obj)
spec_dict = syaml.load(yaml_contents)
s = Spec.from_yaml(yaml_contents)
db.add(s, None)
db.mark(s, 'in_buildcache', True)
all_mirror_specs[s.dag_hash()] = {
'yaml_url': yaml_url,
'spec': s,
'num_deps': len(list(s.traverse(root=False))),
'binary_cache_checksum': spec_dict['binary_cache_checksum'],
'buildinfo': spec_dict['buildinfo'],
}
except (URLError, web_util.SpackWebError) as url_err:
tty.error('Error reading spec.yaml: {0}'.format(file_path))
tty.error(url_err)
sorted_specs = sorted(all_mirror_specs.keys(),
key=lambda k: all_mirror_specs[k]['num_deps'])
tmpdir = tempfile.mkdtemp()
db_root_dir = os.path.join(tmpdir, 'db_root')
db = spack_db.Database(None, db_dir=db_root_dir,
enable_transaction_locking=False,
record_fields=['spec', 'ref_count', 'in_buildcache'])
try:
tty.debug('Specs sorted by number of dependencies:')
for dag_hash in sorted_specs:
spec_record = all_mirror_specs[dag_hash]
s = spec_record['spec']
num_deps = spec_record['num_deps']
tty.debug(' {0}/{1} -> {2}'.format(
s.name, dag_hash[:7], num_deps))
if num_deps > 0:
# Check each of this spec's dependencies (which we have already
# processed), as they are the source of truth for their own
# full hash. If the full hash we have for any deps does not
# match what those deps have themselves, then we need to splice
# this spec with those deps, and push this spliced spec
# (spec.yaml file) back to the mirror, as well as update the
# all_mirror_specs dictionary with this spliced spec.
to_splice = []
for dep in s.dependencies():
dep_dag_hash = dep.dag_hash()
if dep_dag_hash in all_mirror_specs:
true_dep = all_mirror_specs[dep_dag_hash]['spec']
if true_dep.full_hash() != dep.full_hash():
to_splice.append(true_dep)
if to_splice:
tty.debug(' needs the following deps spliced:')
for true_dep in to_splice:
tty.debug(' {0}/{1}'.format(
true_dep.name, true_dep.dag_hash()[:7]))
s = s.splice(true_dep, True)
# Push this spliced spec back to the mirror
spliced_yaml = s.to_dict(hash=ht.full_hash)
for key in ['binary_cache_checksum', 'buildinfo']:
spliced_yaml[key] = spec_record[key]
temp_yaml_path = os.path.join(tmpdir, 'spliced.spec.yaml')
with open(temp_yaml_path, 'w') as fd:
fd.write(syaml.dump(spliced_yaml))
spliced_yaml_url = spec_record['yaml_url']
web_util.push_to_url(
temp_yaml_path, spliced_yaml_url, keep_original=False)
tty.debug(' spliced and wrote {0}'.format(
spliced_yaml_url))
spec_record['spec'] = s
db.add(s, None)
db.mark(s, 'in_buildcache', True)
# Now that we have fixed any old spec yamls that might have had the wrong
# full hash for their dependencies, we can generate the index, compute
# the hash, and push those files to the mirror.
index_json_path = os.path.join(db_root_dir, 'index.json')
with open(index_json_path, 'w') as f:
db._write_to_file(f)
@ -782,6 +846,7 @@ def generate_package_index(cache_prefix):
msg = 'Encountered problem pushing package index to {0}: {1}'.format(
cache_prefix, err)
tty.warn(msg)
tty.debug('\n' + traceback.format_exc())
finally:
shutil.rmtree(tmpdir)
@ -1014,14 +1079,14 @@ def download_tarball(spec, preferred_mirrors=None):
path to downloaded tarball if successful, None otherwise.
Args:
spec (Spec): Concrete spec
spec (spack.spec.Spec): Concrete spec
preferred_mirrors (list): If provided, this is a list of preferred
mirror urls. Other configured mirrors will only be used if the
tarball can't be retrieved from one of these.
mirror urls. Other configured mirrors will only be used if the
tarball can't be retrieved from one of these.
Returns:
Path to the downloaded tarball, or ``None`` if the tarball could not
be downloaded from any configured mirrors.
be downloaded from any configured mirrors.
"""
if not spack.mirror.MirrorCollection():
tty.die("Please add a spack mirror to allow " +
@ -1390,7 +1455,7 @@ def get_mirrors_for_spec(spec=None, full_hash_match=False,
indicating the mirrors on which it can be found
Args:
spec (Spec): The spec to look for in binary mirrors
spec (spack.spec.Spec): The spec to look for in binary mirrors
full_hash_match (bool): If True, only includes mirrors where the spec
full hash matches the locally computed full hash of the ``spec``
argument. If False, any mirror which has a matching DAG hash
@ -1667,11 +1732,11 @@ def check_specs_against_mirrors(mirrors, specs, output_file=None,
Arguments:
mirrors (dict): Mirrors to check against
specs (iterable): Specs to check against mirrors
output_file (string): Path to output file to be written. If provided,
specs (typing.Iterable): Specs to check against mirrors
output_file (str): Path to output file to be written. If provided,
mirrors with missing or out-of-date specs will be formatted as a
JSON object and written to this file.
rebuild_on_errors (boolean): Treat any errors encountered while
rebuild_on_errors (bool): Treat any errors encountered while
checking specs as a signal to rebuild package.
Returns: 1 if any spec was out-of-date on any mirror, 0 otherwise.

View File

@ -25,6 +25,7 @@
import spack.store
import spack.user_environment as uenv
import spack.util.executable
import spack.util.path
from spack.util.environment import EnvironmentModifications
@ -92,17 +93,14 @@ def make_module_available(module, spec=None, install=False):
for ispec in installed_specs:
# TODO: make sure run-environment is appropriate
module_path = os.path.join(ispec.prefix,
ispec['python'].package.site_packages_dir)
module_path_64 = module_path.replace('/lib/', '/lib64/')
module_path = ispec['python'].package.get_python_lib(prefix=ispec.prefix)
try:
sys.path.append(module_path)
sys.path.append(module_path_64)
__import__(module)
return
except ImportError:
tty.warn("Spec %s did not provide module %s" % (ispec, module))
sys.path = sys.path[:-2]
sys.path = sys.path[:-1]
def _raise_error(module_name, module_spec):
error_msg = 'cannot import module "{0}"'.format(module_name)
@ -119,16 +117,13 @@ def _raise_error(module_name, module_spec):
spec.concretize()
spec.package.do_install()
module_path = os.path.join(spec.prefix,
spec['python'].package.site_packages_dir)
module_path_64 = module_path.replace('/lib/', '/lib64/')
module_path = spec['python'].package.get_python_lib(prefix=spec.prefix)
try:
sys.path.append(module_path)
sys.path.append(module_path_64)
__import__(module)
return
except ImportError:
sys.path = sys.path[:-2]
sys.path = sys.path[:-1]
_raise_error(module, spec)
@ -137,7 +132,7 @@ def get_executable(exe, spec=None, install=False):
Args:
exe (str): needed executable name
spec (Spec or str): spec to search for exe in (default exe)
spec (spack.spec.Spec or str): spec to search for exe in (default exe)
install (bool): install spec if not available
When ``install`` is True, Spack will use the python used to run Spack as an
@ -216,9 +211,10 @@ def _bootstrap_config_scopes():
@contextlib.contextmanager
def ensure_bootstrap_configuration():
bootstrap_store_path = store_path()
with spack.architecture.use_platform(spack.architecture.real_platform()):
with spack.repo.use_repositories(spack.paths.packages_path):
with spack.store.use_store(spack.paths.user_bootstrap_store):
with spack.store.use_store(bootstrap_store_path):
# Default configuration scopes excluding command line
# and builtin but accounting for platform specific scopes
config_scopes = _bootstrap_config_scopes()
@ -227,6 +223,23 @@ def ensure_bootstrap_configuration():
yield
def store_path():
"""Path to the store used for bootstrapped software"""
enabled = spack.config.get('bootstrap:enable', True)
if not enabled:
msg = ('bootstrapping is currently disabled. '
'Use "spack bootstrap enable" to enable it')
raise RuntimeError(msg)
bootstrap_root_path = spack.config.get(
'bootstrap:root', spack.paths.user_bootstrap_path
)
bootstrap_store_path = spack.util.path.canonicalize_path(
os.path.join(bootstrap_root_path, 'store')
)
return bootstrap_store_path
def clingo_root_spec():
# Construct the root spec that will be used to bootstrap clingo
spec_str = 'clingo-bootstrap@spack+python'

View File

@ -455,11 +455,11 @@ def determine_number_of_jobs(
cap to the number of CPUs available to avoid oversubscription.
Parameters:
parallel (bool): true when package supports parallel builds
command_line (int/None): command line override
config_default (int/None): config default number of jobs
max_cpus (int/None): maximum number of CPUs available. When None, this
value is automatically determined.
parallel (bool or None): true when package supports parallel builds
command_line (int or None): command line override
config_default (int or None): config default number of jobs
max_cpus (int or None): maximum number of CPUs available. When None, this
value is automatically determined.
"""
if not parallel:
return 1
@ -685,14 +685,14 @@ def get_std_cmake_args(pkg):
"""List of standard arguments used if a package is a CMakePackage.
Returns:
list of str: standard arguments that would be used if this
list: standard arguments that would be used if this
package were a CMakePackage instance.
Args:
pkg (PackageBase): package under consideration
pkg (spack.package.PackageBase): package under consideration
Returns:
list of str: arguments for cmake
list: arguments for cmake
"""
return spack.build_systems.cmake.CMakePackage._std_args(pkg)
@ -701,14 +701,14 @@ def get_std_meson_args(pkg):
"""List of standard arguments used if a package is a MesonPackage.
Returns:
list of str: standard arguments that would be used if this
list: standard arguments that would be used if this
package were a MesonPackage instance.
Args:
pkg (PackageBase): package under consideration
pkg (spack.package.PackageBase): package under consideration
Returns:
list of str: arguments for meson
list: arguments for meson
"""
return spack.build_systems.meson.MesonPackage._std_args(pkg)
@ -738,7 +738,7 @@ def load_external_modules(pkg):
associated with them.
Args:
pkg (PackageBase): package to load deps for
pkg (spack.package.PackageBase): package to load deps for
"""
for dep in list(pkg.spec.traverse()):
external_modules = dep.external_modules or []
@ -864,7 +864,7 @@ def modifications_from_dependencies(spec, context, custom_mods_only=True):
CMAKE_PREFIX_PATH, or PKG_CONFIG_PATH).
Args:
spec (Spec): spec for which we want the modifications
spec (spack.spec.Spec): spec for which we want the modifications
context (str): either 'build' for build-time modifications or 'run'
for run-time modifications
"""
@ -1062,9 +1062,9 @@ def start_build_process(pkg, function, kwargs):
Args:
pkg (PackageBase): package whose environment we should set up the
pkg (spack.package.PackageBase): package whose environment we should set up the
child process for.
function (callable): argless function to run in the child
function (typing.Callable): argless function to run in the child
process.
Usage::
@ -1149,7 +1149,7 @@ def get_package_context(traceback, context=3):
"""Return some context for an error message when the build fails.
Args:
traceback (traceback): A traceback from some exception raised during
traceback: A traceback from some exception raised during
install
context (int): Lines of context to show before and after the line

View File

@ -30,7 +30,7 @@ class AutotoolsPackage(PackageBase):
They all have sensible defaults and for many packages the only thing
necessary will be to override the helper method
:py:meth:`~.AutotoolsPackage.configure_args`.
:meth:`~spack.build_systems.autotools.AutotoolsPackage.configure_args`.
For a finer tuning you may also override:
+-----------------------------------------------+--------------------+
@ -331,7 +331,7 @@ def flags_to_build_system_args(self, flags):
def configure(self, spec, prefix):
"""Runs configure with the arguments specified in
:py:meth:`~.AutotoolsPackage.configure_args`
:meth:`~spack.build_systems.autotools.AutotoolsPackage.configure_args`
and an appropriately set prefix.
"""
options = getattr(self, 'configure_flag_args', [])
@ -376,8 +376,8 @@ def _activate_or_not(
activation_value=None
):
"""This function contains the current implementation details of
:py:meth:`~.AutotoolsPackage.with_or_without` and
:py:meth:`~.AutotoolsPackage.enable_or_disable`.
:meth:`~spack.build_systems.autotools.AutotoolsPackage.with_or_without` and
:meth:`~spack.build_systems.autotools.AutotoolsPackage.enable_or_disable`.
Args:
name (str): name of the variant that is being processed
@ -385,7 +385,7 @@ def _activate_or_not(
case of ``with_or_without``)
deactivation_word (str): the default deactivation word ('without'
in the case of ``with_or_without``)
activation_value (callable): callable that accepts a single
activation_value (typing.Callable): callable that accepts a single
value. This value is either one of the allowed values for a
multi-valued variant or the name of a bool-valued variant.
Returns the parameter to be used when the value is activated.
@ -420,7 +420,7 @@ def _activate_or_not(
for ``<spec-name> foo=x +bar``
Returns:
list of strings that corresponds to the activation/deactivation
list: list of strings that corresponds to the activation/deactivation
of the variant that has been processed
Raises:
@ -501,7 +501,7 @@ def with_or_without(self, name, activation_value=None):
Args:
name (str): name of a valid multi-valued variant
activation_value (callable): callable that accepts a single
activation_value (typing.Callable): callable that accepts a single
value and returns the parameter to be used leading to an entry
of the type ``--with-{name}={parameter}``.
@ -514,12 +514,13 @@ def with_or_without(self, name, activation_value=None):
return self._activate_or_not(name, 'with', 'without', activation_value)
def enable_or_disable(self, name, activation_value=None):
"""Same as :py:meth:`~.AutotoolsPackage.with_or_without` but substitute
``with`` with ``enable`` and ``without`` with ``disable``.
"""Same as
:meth:`~spack.build_systems.autotools.AutotoolsPackage.with_or_without`
but substitute ``with`` with ``enable`` and ``without`` with ``disable``.
Args:
name (str): name of a valid multi-valued variant
activation_value (callable): if present accepts a single value
activation_value (typing.Callable): if present accepts a single value
and returns the parameter to be used leading to an entry of the
type ``--enable-{name}={parameter}``

View File

@ -108,21 +108,6 @@ def initconfig_compiler_entries(self):
if fflags:
entries.append(cmake_cache_string("CMAKE_Fortran_FLAGS", fflags))
# Override XL compiler family
familymsg = ("Override to proper compiler family for XL")
if "xlf" in (self.compiler.fc or ''): # noqa: F821
entries.append(cmake_cache_string(
"CMAKE_Fortran_COMPILER_ID", "XL",
familymsg))
if "xlc" in self.compiler.cc: # noqa: F821
entries.append(cmake_cache_string(
"CMAKE_C_COMPILER_ID", "XL",
familymsg))
if "xlC" in self.compiler.cxx: # noqa: F821
entries.append(cmake_cache_string(
"CMAKE_CXX_COMPILER_ID", "XL",
familymsg))
return entries
def initconfig_mpi_entries(self):

View File

@ -236,7 +236,7 @@ def define_from_variant(self, cmake_var, variant=None):
of ``cmake_var``.
This utility function is similar to
:py:meth:`~.AutotoolsPackage.with_or_without`.
:meth:`~spack.build_systems.autotools.AutotoolsPackage.with_or_without`.
Examples:

View File

@ -368,7 +368,7 @@ def normalize_suite_dir(self, suite_dir_name, version_globs=['*.*.*']):
toplevel psxevars.sh or equivalent file to source (and thus by
the modulefiles that Spack produces).
version_globs (list of str): Suffix glob patterns (most specific
version_globs (list): Suffix glob patterns (most specific
first) expected to qualify suite_dir_name to its fully
version-specific install directory (as opposed to a
compatibility directory or symlink).

View File

@ -69,6 +69,9 @@ def install(self, spec, prefix, installer_path=None):
# Installer writes files in ~/intel set HOME so it goes to prefix
bash.add_default_env('HOME', prefix)
# Installer checks $XDG_RUNTIME_DIR/.bootstrapper_lock_file as well
bash.add_default_env('XDG_RUNTIME_DIR',
join_path(self.stage.path, 'runtime'))
bash(installer_path,
'-s', '-a', '-s', '--action', 'install',

View File

@ -127,24 +127,22 @@ def import_modules(self):
list: list of strings of module names
"""
modules = []
root = self.spec['python'].package.get_python_lib(prefix=self.prefix)
# Python libraries may be installed in lib or lib64
# See issues #18520 and #17126
for lib in ['lib', 'lib64']:
root = os.path.join(self.prefix, lib, 'python{0}'.format(
self.spec['python'].version.up_to(2)), 'site-packages')
# Some Python libraries are packages: collections of modules
# distributed in directories containing __init__.py files
for path in find(root, '__init__.py', recursive=True):
modules.append(path.replace(root + os.sep, '', 1).replace(
os.sep + '__init__.py', '').replace('/', '.'))
# Some Python libraries are modules: individual *.py files
# found in the site-packages directory
for path in find(root, '*.py', recursive=False):
modules.append(path.replace(root + os.sep, '', 1).replace(
'.py', '').replace('/', '.'))
# Some Python libraries are packages: collections of modules
# distributed in directories containing __init__.py files
for path in find(root, '__init__.py', recursive=True):
modules.append(path.replace(root + os.sep, '', 1).replace(
os.sep + '__init__.py', '').replace('/', '.'))
# Some Python libraries are modules: individual *.py files
# found in the site-packages directory
for path in find(root, '*.py', recursive=False):
modules.append(path.replace(root + os.sep, '', 1).replace(
'.py', '').replace('/', '.'))
tty.debug('Detected the following modules: {0}'.format(modules))
return modules
def setup_file(self):
@ -254,15 +252,12 @@ def install_args(self, spec, prefix):
# Get all relative paths since we set the root to `prefix`
# We query the python with which these will be used for the lib and inc
# directories. This ensures we use `lib`/`lib64` as expected by python.
python = spec['python'].package.command
command_start = 'print(distutils.sysconfig.'
commands = ';'.join([
'import distutils.sysconfig',
command_start + 'get_python_lib(plat_specific=False, prefix=""))',
command_start + 'get_python_lib(plat_specific=True, prefix=""))',
command_start + 'get_python_inc(plat_specific=True, prefix=""))'])
pure_site_packages_dir, plat_site_packages_dir, inc_dir = python(
'-c', commands, output=str, error=str).strip().split('\n')
pure_site_packages_dir = spec['python'].package.get_python_lib(
plat_specific=False, prefix='')
plat_site_packages_dir = spec['python'].package.get_python_lib(
plat_specific=True, prefix='')
inc_dir = spec['python'].package.get_python_inc(
plat_specific=True, prefix='')
args += ['--root=%s' % prefix,
'--install-purelib=%s' % pure_site_packages_dir,

View File

@ -64,24 +64,22 @@ def import_modules(self):
list: list of strings of module names
"""
modules = []
root = self.spec['python'].package.get_python_lib(prefix=self.prefix)
# Python libraries may be installed in lib or lib64
# See issues #18520 and #17126
for lib in ['lib', 'lib64']:
root = os.path.join(self.prefix, lib, 'python{0}'.format(
self.spec['python'].version.up_to(2)), 'site-packages')
# Some Python libraries are packages: collections of modules
# distributed in directories containing __init__.py files
for path in find(root, '__init__.py', recursive=True):
modules.append(path.replace(root + os.sep, '', 1).replace(
os.sep + '__init__.py', '').replace('/', '.'))
# Some Python libraries are modules: individual *.py files
# found in the site-packages directory
for path in find(root, '*.py', recursive=False):
modules.append(path.replace(root + os.sep, '', 1).replace(
'.py', '').replace('/', '.'))
# Some Python libraries are packages: collections of modules
# distributed in directories containing __init__.py files
for path in find(root, '__init__.py', recursive=True):
modules.append(path.replace(root + os.sep, '', 1).replace(
os.sep + '__init__.py', '').replace('/', '.'))
# Some Python libraries are modules: individual *.py files
# found in the site-packages directory
for path in find(root, '*.py', recursive=False):
modules.append(path.replace(root + os.sep, '', 1).replace(
'.py', '').replace('/', '.'))
tty.debug('Detected the following modules: {0}'.format(modules))
return modules
def python(self, *args, **kwargs):

View File

@ -1395,7 +1395,7 @@ def push_mirror_contents(env, spec, yaml_path, mirror_url, sign_binaries):
# BaseException
# object
err_msg = 'Error msg: {0}'.format(inst)
if 'Access Denied' in err_msg:
if any(x in err_msg for x in ['Access Denied', 'InvalidAccessKeyId']):
tty.msg('Permission problem writing to {0}'.format(
mirror_url))
tty.msg(err_msg)

View File

@ -216,10 +216,10 @@ def disambiguate_spec(spec, env, local=False, installed=True, first=False):
spec (spack.spec.Spec): a spec to disambiguate
env (spack.environment.Environment): a spack environment,
if one is active, or None if no environment is active
local (boolean, default False): do not search chained spack instances
installed (boolean or any, or spack.database.InstallStatus or iterable
of spack.database.InstallStatus): install status argument passed to
database query. See ``spack.database.Database._query`` for details.
local (bool): do not search chained spack instances
installed (bool or spack.database.InstallStatus or typing.Iterable):
install status argument passed to database query.
See ``spack.database.Database._query`` for details.
"""
hashes = env.all_hashes() if env else None
return disambiguate_spec_from_hashes(spec, hashes, local, installed, first)
@ -231,11 +231,11 @@ def disambiguate_spec_from_hashes(spec, hashes, local=False,
Arguments:
spec (spack.spec.Spec): a spec to disambiguate
hashes (iterable): a set of hashes of specs among which to disambiguate
local (boolean, default False): do not search chained spack instances
installed (boolean or any, or spack.database.InstallStatus or iterable
of spack.database.InstallStatus): install status argument passed to
database query. See ``spack.database.Database._query`` for details.
hashes (typing.Iterable): a set of hashes of specs among which to disambiguate
local (bool): do not search chained spack instances
installed (bool or spack.database.InstallStatus or typing.Iterable):
install status argument passed to database query.
See ``spack.database.Database._query`` for details.
"""
if local:
matching_specs = spack.store.db.query_local(spec, hashes=hashes,
@ -333,9 +333,8 @@ def display_specs(specs, args=None, **kwargs):
namespace.
Args:
specs (list of spack.spec.Spec): the specs to display
args (optional argparse.Namespace): namespace containing
formatting arguments
specs (list): the specs to display
args (argparse.Namespace or None): namespace containing formatting arguments
Keyword Args:
paths (bool): Show paths with each displayed spec
@ -348,9 +347,9 @@ def display_specs(specs, args=None, **kwargs):
indent (int): indent each line this much
groups (bool): display specs grouped by arch/compiler (default True)
decorators (dict): dictionary mappng specs to decorators
header_callback (function): called at start of arch/compiler groups
header_callback (typing.Callable): called at start of arch/compiler groups
all_headers (bool): show headers even when arch/compiler aren't defined
output (stream): A file object to write to. Default is ``sys.stdout``
output (typing.IO): A file object to write to. Default is ``sys.stdout``
"""
def get_arg(name, default=None):

View File

@ -58,9 +58,9 @@ def analyze_spec(spec, analyzers=None, outdir=None, monitor=None, overwrite=Fals
analyze_spec(spec, args.analyzers, args.outdir, monitor)
Args:
spec (Spec): spec object of installed package
spec (spack.spec.Spec): spec object of installed package
analyzers (list): list of analyzer (keys) to run
monitor (monitor.SpackMonitorClient): a monitor client
monitor (spack.monitor.SpackMonitorClient): a monitor client
overwrite (bool): overwrite result if already exists
"""
analyzers = analyzers or list(spack.analyzers.analyzer_types.keys())

View File

@ -0,0 +1,110 @@
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os.path
import shutil
import llnl.util.tty
import spack.cmd.common.arguments
import spack.config
import spack.main
import spack.util.path
description = "manage bootstrap configuration"
section = "system"
level = "long"
def _add_scope_option(parser):
scopes = spack.config.scopes()
scopes_metavar = spack.config.scopes_metavar
parser.add_argument(
'--scope', choices=scopes, metavar=scopes_metavar,
help="configuration scope to read/modify"
)
def setup_parser(subparser):
sp = subparser.add_subparsers(dest='subcommand')
enable = sp.add_parser('enable', help='enable bootstrapping')
_add_scope_option(enable)
disable = sp.add_parser('disable', help='disable bootstrapping')
_add_scope_option(disable)
reset = sp.add_parser(
'reset', help='reset bootstrapping configuration to Spack defaults'
)
spack.cmd.common.arguments.add_common_arguments(
reset, ['yes_to_all']
)
root = sp.add_parser(
'root', help='get/set the root bootstrap directory'
)
_add_scope_option(root)
root.add_argument(
'path', nargs='?', default=None,
help='set the bootstrap directory to this value'
)
def _enable_or_disable(args):
# Set to True if we called "enable", otherwise set to false
value = args.subcommand == 'enable'
spack.config.set('bootstrap:enable', value, scope=args.scope)
def _reset(args):
if not args.yes_to_all:
msg = [
"Bootstrapping configuration is being reset to Spack's defaults. "
"Current configuration will be lost.\n",
"Do you want to continue?"
]
ok_to_continue = llnl.util.tty.get_yes_or_no(
''.join(msg), default=True
)
if not ok_to_continue:
raise RuntimeError('Aborting')
for scope in spack.config.config.file_scopes:
# The default scope should stay untouched
if scope.name == 'defaults':
continue
# If we are in an env scope we can't delete a file, but the best we
# can do is nullify the corresponding configuration
if (scope.name.startswith('env') and
spack.config.get('bootstrap', scope=scope.name)):
spack.config.set('bootstrap', {}, scope=scope.name)
continue
# If we are outside of an env scope delete the bootstrap.yaml file
bootstrap_yaml = os.path.join(scope.path, 'bootstrap.yaml')
backup_file = bootstrap_yaml + '.bkp'
if os.path.exists(bootstrap_yaml):
shutil.move(bootstrap_yaml, backup_file)
def _root(args):
if args.path:
spack.config.set('bootstrap:root', args.path, scope=args.scope)
root = spack.config.get('bootstrap:root', default=None, scope=args.scope)
if root:
root = spack.util.path.canonicalize_path(root)
print(root)
def bootstrap(parser, args):
callbacks = {
'enable': _enable_or_disable,
'disable': _enable_or_disable,
'reset': _reset,
'root': _root
}
callbacks[args.subcommand](args)

View File

@ -239,12 +239,13 @@ def find_matching_specs(pkgs, allow_multiple_matches=False, env=None):
concretized specs given from cli
Args:
pkgs (string): spec to be matched against installed packages
pkgs (str): spec to be matched against installed packages
allow_multiple_matches (bool): if True multiple matches are admitted
env (Environment): active environment, or ``None`` if there is not one
env (spack.environment.Environment or None): active environment, or ``None``
if there is not one
Return:
list of specs
list: list of specs
"""
hashes = env.all_hashes() if env else None

View File

@ -9,6 +9,7 @@
import llnl.util.tty as tty
import spack.bootstrap
import spack.caches
import spack.cmd.common.arguments as arguments
import spack.cmd.test
@ -102,7 +103,7 @@ def clean(parser, args):
if args.bootstrap:
msg = 'Removing software in "{0}"'
tty.msg(msg.format(spack.paths.user_bootstrap_store))
with spack.store.use_store(spack.paths.user_bootstrap_store):
tty.msg(msg.format(spack.bootstrap.store_path()))
with spack.store.use_store(spack.bootstrap.store_path()):
uninstall = spack.main.SpackCommand('uninstall')
uninstall('-a', '-y')

View File

@ -636,7 +636,7 @@ def get_name(args):
provided, extract the name from that. Otherwise, use a default.
Args:
args (param argparse.Namespace): The arguments given to
args (argparse.Namespace): The arguments given to
``spack create``
Returns:
@ -709,8 +709,7 @@ def get_versions(args, name):
name (str): The name of the package
Returns:
str and BuildSystemGuesser: Versions and hashes, and a
BuildSystemGuesser object
tuple: versions and hashes, and a BuildSystemGuesser object
"""
# Default version with hash
@ -794,7 +793,8 @@ def get_repository(args, name):
name (str): The name of the package to create
Returns:
Repo: A Repo object capable of determining the path to the package file
spack.repo.Repo: A Repo object capable of determining the path to the
package file
"""
spec = Spec(name)
# Figure out namespace for spec

View File

@ -59,7 +59,7 @@ def get_dependents(pkg_name, ideps, transitive=False, dependents=None):
Args:
pkg_name (str): name of the package whose dependents should be returned
ideps (dict): dictionary of dependents, from inverted_dependencies()
transitive (bool, optional): return transitive dependents when True
transitive (bool or None): return transitive dependents when True
"""
if dependents is None:
dependents = set()

View File

@ -13,6 +13,7 @@
import llnl.util.tty as tty
import llnl.util.tty.color as color
import spack.bootstrap
import spack.cmd as cmd
import spack.cmd.common.arguments as arguments
import spack.environment as ev
@ -207,9 +208,10 @@ def find(parser, args):
q_args = query_arguments(args)
# Query the current store or the internal bootstrap store if required
if args.bootstrap:
bootstrap_store_path = spack.bootstrap.store_path()
msg = 'Showing internal bootstrap store at "{0}"'
tty.msg(msg.format(spack.paths.user_bootstrap_store))
with spack.store.use_store(spack.paths.user_bootstrap_store):
tty.msg(msg.format(bootstrap_store_path))
with spack.store.use_store(bootstrap_store_path):
results = args.specs(**q_args)
else:
results = args.specs(**q_args)

View File

@ -198,9 +198,9 @@ def install_specs(cli_args, kwargs, specs):
"""Do the actual installation.
Args:
cli_args (Namespace): argparse namespace with command arguments
cli_args (argparse.Namespace): argparse namespace with command arguments
kwargs (dict): keyword arguments
specs (list of tuples): list of (abstract, concrete) spec tuples
specs (list): list of (abstract, concrete) spec tuples
"""
# handle active environment, if any

View File

@ -14,6 +14,7 @@
import llnl.util.tty.color as color
from llnl.util.filesystem import working_dir
import spack.bootstrap
import spack.paths
from spack.util.executable import which
@ -38,15 +39,20 @@ def grouper(iterable, n, fillvalue=None):
yield filter(None, group)
#: directory where spack style started, for relativizing paths
initial_working_dir = None
#: List of directories to exclude from checks -- relative to spack root
exclude_directories = [
os.path.relpath(spack.paths.external_path, spack.paths.prefix),
]
#: List of directories to exclude from checks.
exclude_directories = [spack.paths.external_path]
#: order in which tools should be run. flake8 is last so that it can
#: Order in which tools should be run. flake8 is last so that it can
#: double-check the results of other tools (if, e.g., --fix was provided)
tool_order = ["isort", "mypy", "black", "flake8"]
#: The list maps an executable name to a spack spec needed to install it.
tool_order = [
("isort", "py-isort@4.3.5:"),
("mypy", "py-mypy@0.900:"),
("black", "py-black"),
("flake8", "py-flake8"),
]
#: tools we run in spack style
tools = {}
@ -59,7 +65,7 @@ def is_package(f):
packages, since we allow `from spack import *` and poking globals
into packages.
"""
return f.startswith("var/spack/repos/") or "docs/tutorial/examples" in f
return f.startswith("var/spack/repos/")
#: decorator for adding tools to the list
@ -73,14 +79,29 @@ def __call__(self, fun):
return fun
def changed_files(base=None, untracked=True, all_files=False):
"""Get list of changed files in the Spack repository."""
def changed_files(base="develop", untracked=True, all_files=False, root=None):
"""Get list of changed files in the Spack repository.
Arguments:
base (str): name of base branch to evaluate differences with.
untracked (bool): include untracked files in the list.
all_files (bool): list all files in the repository.
root (str): use this directory instead of the Spack prefix.
"""
if root is None:
root = spack.paths.prefix
git = which("git", required=True)
# GITHUB_BASE_REF is set to the base branch for pull request actions
if base is None:
base = os.environ.get("GITHUB_BASE_REF", "develop")
# ensure base is in the repo
git("show-ref", "--verify", "--quiet", "refs/heads/%s" % base,
fail_on_error=False)
if git.returncode != 0:
tty.die(
"This repository does not have a '%s' branch." % base,
"spack style needs this branch to determine which files changed.",
"Ensure that '%s' exists, or specify files to check explicitly." % base
)
range = "{0}...".format(base)
@ -101,7 +122,10 @@ def changed_files(base=None, untracked=True, all_files=False):
if all_files:
git_args.append(["ls-files", "--exclude-standard"])
excludes = [os.path.realpath(f) for f in exclude_directories]
excludes = [
os.path.realpath(os.path.join(root, f))
for f in exclude_directories
]
changed = set()
for arg_list in git_args:
@ -126,8 +150,8 @@ def setup_parser(subparser):
"-b",
"--base",
action="store",
default=None,
help="select base branch for collecting list of modified files",
default="develop",
help="branch to compare against to determine changed files (default: develop)",
)
subparser.add_argument(
"-a",
@ -181,14 +205,20 @@ def setup_parser(subparser):
action="store_true",
help="run black if available (default: skip black)",
)
subparser.add_argument(
"--root",
action="store",
default=None,
help="style check a different spack instance",
)
subparser.add_argument(
"files", nargs=argparse.REMAINDER, help="specific files to check"
)
def cwd_relative(path):
def cwd_relative(path, args):
"""Translate prefix-relative path to current working directory-relative."""
return os.path.relpath(os.path.join(spack.paths.prefix, path), initial_working_dir)
return os.path.relpath(os.path.join(args.root, path), args.initial_working_dir)
def rewrite_and_print_output(
@ -198,7 +228,7 @@ def rewrite_and_print_output(
# print results relative to current working directory
def translate(match):
return replacement.format(
cwd_relative(match.group(1)), *list(match.groups()[1:])
cwd_relative(match.group(1), args), *list(match.groups()[1:])
)
for line in output.split("\n"):
@ -210,15 +240,15 @@ def translate(match):
def print_style_header(file_list, args):
tools = [tool for tool in tool_order if getattr(args, tool)]
tty.msg("Running style checks on spack:", "selected: " + ", ".join(tools))
tools = [tool for tool, _ in tool_order if getattr(args, tool)]
tty.msg("Running style checks on spack", "selected: " + ", ".join(tools))
# translate modified paths to cwd_relative if needed
paths = [filename.strip() for filename in file_list]
if not args.root_relative:
paths = [cwd_relative(filename) for filename in paths]
paths = [cwd_relative(filename, args) for filename in paths]
tty.msg("Modified files:", *paths)
tty.msg("Modified files", *paths)
sys.stdout.flush()
@ -242,12 +272,9 @@ def run_flake8(flake8_cmd, file_list, args):
# run in chunks of 100 at a time to avoid line length limit
# filename parameter in config *does not work* for this reliably
for chunk in grouper(file_list, 100):
output = flake8_cmd(
# use .flake8 implicitly to work around bug in flake8 upstream
# append-config is ignored if `--config` is explicitly listed
# see: https://gitlab.com/pycqa/flake8/-/issues/455
# "--config=.flake8",
# always run with config from running spack prefix
"--config=%s" % os.path.join(spack.paths.prefix, ".flake8"),
*chunk,
fail_on_error=False,
output=str
@ -262,12 +289,18 @@ def run_flake8(flake8_cmd, file_list, args):
@tool("mypy")
def run_mypy(mypy_cmd, file_list, args):
mpy_args = ["--package", "spack", "--package", "llnl", "--show-error-codes"]
# always run with config from running spack prefix
mypy_args = [
"--config-file", os.path.join(spack.paths.prefix, "pyproject.toml"),
"--package", "spack",
"--package", "llnl",
"--show-error-codes",
]
# not yet, need other updates to enable this
# if any([is_package(f) for f in file_list]):
# mpy_args.extend(["--package", "packages"])
# mypy_args.extend(["--package", "packages"])
output = mypy_cmd(*mpy_args, fail_on_error=False, output=str)
output = mypy_cmd(*mypy_args, fail_on_error=False, output=str)
returncode = mypy_cmd.returncode
rewrite_and_print_output(output, args)
@ -278,13 +311,16 @@ def run_mypy(mypy_cmd, file_list, args):
@tool("isort")
def run_isort(isort_cmd, file_list, args):
check_fix_args = () if args.fix else ("--check", "--diff")
# always run with config from running spack prefix
isort_args = ("--settings-file", os.path.join(spack.paths.prefix, "pyproject.toml"))
if not args.fix:
isort_args += ("--check", "--diff")
pat = re.compile("ERROR: (.*) Imports are incorrectly sorted")
replacement = "ERROR: {0} Imports are incorrectly sorted"
returncode = 0
for chunk in grouper(file_list, 100):
packed_args = check_fix_args + tuple(chunk)
packed_args = isort_args + tuple(chunk)
output = isort_cmd(*packed_args, fail_on_error=False, output=str, error=str)
returncode |= isort_cmd.returncode
@ -296,7 +332,12 @@ def run_isort(isort_cmd, file_list, args):
@tool("black")
def run_black(black_cmd, file_list, args):
check_fix_args = () if args.fix else ("--check", "--diff", "--color")
# always run with config from running spack prefix
black_args = ("--config", os.path.join(spack.paths.prefix, "pyproject.toml"))
if not args.fix:
black_args += ("--check", "--diff")
if color.get_color_when(): # only show color when spack would
black_args += ("--color",)
pat = re.compile("would reformat +(.*)")
replacement = "would reformat {0}"
@ -305,50 +346,77 @@ def run_black(black_cmd, file_list, args):
# run in chunks of 100 at a time to avoid line length limit
# filename parameter in config *does not work* for this reliably
for chunk in grouper(file_list, 100):
packed_args = check_fix_args + tuple(chunk)
packed_args = black_args + tuple(chunk)
output = black_cmd(*packed_args, fail_on_error=False, output=str, error=str)
returncode |= black_cmd.returncode
rewrite_and_print_output(output, args, pat, replacement)
print_tool_result("black", returncode)
return returncode
def style(parser, args):
# ensure python version is new enough
if sys.version_info < (3, 6):
tty.die("spack style requires Python 3.6 or later.")
# save initial working directory for relativizing paths later
global initial_working_dir
initial_working_dir = os.getcwd()
args.initial_working_dir = os.getcwd()
# ensure that the config files we need actually exist in the spack prefix.
# assertions b/c users should not ever see these errors -- they're checked in CI.
assert os.path.isfile(os.path.join(spack.paths.prefix, "pyproject.toml"))
assert os.path.isfile(os.path.join(spack.paths.prefix, ".flake8"))
# validate spack root if the user provided one
args.root = os.path.realpath(args.root) if args.root else spack.paths.prefix
spack_script = os.path.join(args.root, "bin", "spack")
if not os.path.exists(spack_script):
tty.die(
"This does not look like a valid spack root.",
"No such file: '%s'" % spack_script
)
file_list = args.files
if file_list:
def prefix_relative(path):
return os.path.relpath(
os.path.abspath(os.path.realpath(path)), spack.paths.prefix
)
return os.path.relpath(os.path.abspath(os.path.realpath(path)), args.root)
file_list = [prefix_relative(p) for p in file_list]
returncode = 0
with working_dir(spack.paths.prefix):
with working_dir(args.root):
if not file_list:
file_list = changed_files(args.base, args.untracked, args.all)
print_style_header(file_list, args)
# run tools in order defined in tool_order
returncode = 0
for tool_name in tool_order:
for tool_name, tool_spec in tool_order:
if getattr(args, tool_name):
run_function, required = tools[tool_name]
print_tool_header(tool_name)
cmd = which(tool_name, required=required)
if not cmd:
color.cprint(" @y{%s not in PATH, skipped}" % tool_name)
continue
try:
# Bootstrap tools so we don't need to require install
with spack.bootstrap.ensure_bootstrap_configuration():
spec = spack.spec.Spec(tool_spec)
cmd = None
cmd = spack.bootstrap.get_executable(
tool_name, spec=spec, install=True
)
if not cmd:
color.cprint(" @y{%s not in PATH, skipped}" % tool_name)
continue
returncode |= run_function(cmd, file_list, args)
returncode |= run_function(cmd, file_list, args)
except Exception as e:
raise spack.error.SpackError(
"Couldn't bootstrap %s:" % tool_name, str(e)
)
if returncode == 0:
tty.msg(color.colorize("@*{spack style checks were clean}"))

View File

@ -69,12 +69,13 @@ def find_matching_specs(env, specs, allow_multiple_matches=False, force=False):
concretized specs given from cli
Args:
env (Environment): active environment, or ``None`` if there is not one
env (spack.environment.Environment): active environment, or ``None``
if there is not one
specs (list): list of specs to be matched against installed packages
allow_multiple_matches (bool): if True multiple matches are admitted
Return:
list of specs
list: list of specs
"""
# constrain uninstall resolution to current environment if one is active
hashes = env.all_hashes() if env else None
@ -118,15 +119,13 @@ def installed_dependents(specs, env):
Args:
specs (list): list of Specs
env (Environment): the active environment, or None
env (spack.environment.Environment or None): the active environment, or None
Returns:
(tuple of dicts): two mappings: one from specs to their dependent
environments in the active environment (or global scope if
there is no environment), and one from specs to their
dependents in *inactive* environments (empty if there is no
environment
tuple: two mappings: one from specs to their dependent environments in the
active environment (or global scope if there is no environment), and one from
specs to their dependents in *inactive* environments (empty if there is no
environment
"""
active_dpts = {}
inactive_dpts = {}
@ -155,9 +154,9 @@ def dependent_environments(specs):
Args:
specs (list): list of Specs
Returns:
(dict): mapping from spec to lists of dependent Environments
Returns:
dict: mapping from spec to lists of dependent Environments
"""
dependents = {}
for env in ev.all_environments():
@ -176,9 +175,10 @@ def inactive_dependent_environments(spec_envs):
have no dependent environments. Return the result.
Args:
(dict): mapping from spec to lists of dependent Environments
spec_envs (dict): mapping from spec to lists of dependent Environments
Returns:
(dict): mapping from spec to lists of *inactive* dependent Environments
dict: mapping from spec to lists of *inactive* dependent Environments
"""
spec_inactive_envs = {}
for spec, de_list in spec_envs.items():
@ -203,7 +203,8 @@ def do_uninstall(env, specs, force):
"""Uninstalls all the specs in a list.
Args:
env (Environment): active environment, or ``None`` if there is not one
env (spack.environment.Environment or None): active environment, or ``None``
if there is not one
specs (list): list of specs to be uninstalled
force (bool): force uninstallation (boolean)
"""

View File

@ -502,7 +502,7 @@ def remove_separators(version):
Unfortunately, this also means that 1.23 and 12.3 are equal.
Args:
version (str or Version): A version
version (str or spack.version.Version): A version
Returns:
str: The version with all separator characters removed

View File

@ -135,8 +135,8 @@ def add_compilers_to_config(compilers, scope=None, init_config=True):
"""Add compilers to the config for the specified architecture.
Arguments:
- compilers: a list of Compiler objects.
- scope: configuration scope to modify.
compilers: a list of Compiler objects.
scope: configuration scope to modify.
"""
compiler_config = get_compiler_config(scope, init_config)
for compiler in compilers:
@ -151,8 +151,8 @@ def remove_compiler_from_config(compiler_spec, scope=None):
"""Remove compilers from the config, by spec.
Arguments:
- compiler_specs: a list of CompilerSpec objects.
- scope: configuration scope to modify.
compiler_specs: a list of CompilerSpec objects.
scope: configuration scope to modify.
"""
# Need a better way for this
global _cache_config_file
@ -544,8 +544,8 @@ def arguments_to_detect_version_fn(operating_system, paths):
function by providing a method called with the same name.
Args:
operating_system (OperatingSystem): the operating system on which
we are looking for compilers
operating_system (spack.architecture.OperatingSystem): the operating system
on which we are looking for compilers
paths: paths to search for compilers
Returns:
@ -649,7 +649,7 @@ def make_compiler_list(detected_versions):
valid version
Returns:
list of Compiler objects
list: list of Compiler objects
"""
group_fn = lambda x: (x.id, x.variation, x.language)
sorted_compilers = sorted(detected_versions, key=group_fn)
@ -715,7 +715,7 @@ def is_mixed_toolchain(compiler):
False otherwise.
Args:
compiler (Compiler): a valid compiler object
compiler (spack.compiler.Compiler): a valid compiler object
"""
cc = os.path.basename(compiler.cc or '')
cxx = os.path.basename(compiler.cxx or '')

View File

@ -17,8 +17,8 @@
And corresponding :ref:`per-platform scopes <platform-scopes>`. Important
functions in this module are:
* :py:func:`get_config`
* :py:func:`update_config`
* :func:`~spack.config.Configuration.get_config`
* :func:`~spack.config.Configuration.update_config`
``get_config`` reads in YAML data for a particular scope and returns
it. Callers can then modify the data and write it back with
@ -51,6 +51,7 @@
import spack.compilers
import spack.paths
import spack.schema
import spack.schema.bootstrap
import spack.schema.compilers
import spack.schema.config
import spack.schema.env
@ -74,6 +75,7 @@
'modules': spack.schema.modules.schema,
'config': spack.schema.config.schema,
'upstreams': spack.schema.upstreams.schema,
'bootstrap': spack.schema.bootstrap.schema
}
# Same as above, but including keys for environments
@ -720,7 +722,7 @@ def override(path_or_scope, value=None):
Arguments:
path_or_scope (ConfigScope or str): scope or single option to override
value (object, optional): value for the single option
value (object or None): value for the single option
Temporarily push a scope on the current configuration, then remove it
after the context completes. If a single option is provided, create
@ -1161,7 +1163,7 @@ def default_modify_scope(section='config'):
priority scope.
Arguments:
section (boolean): Section for which to get the default scope.
section (bool): Section for which to get the default scope.
If this is not 'compilers', a general (non-platform) scope is used.
"""
if section == 'compilers':

View File

@ -171,13 +171,13 @@ class InstallRecord(object):
dependents left.
Args:
spec (Spec): spec tracked by the install record
spec (spack.spec.Spec): spec tracked by the install record
path (str): path where the spec has been installed
installed (bool): whether or not the spec is currently installed
ref_count (int): number of specs that depend on this one
explicit (bool, optional): whether or not this spec was explicitly
explicit (bool or None): whether or not this spec was explicitly
installed, or pulled-in as a dependency of something else
installation_time (time, optional): time of the installation
installation_time (datetime.datetime or None): time of the installation
"""
def __init__(
@ -256,36 +256,36 @@ def __getattribute__(self, name):
database. If it is a spec, we'll evaluate
``spec.satisfies(query_spec)``
known (bool or any, optional): Specs that are "known" are those
known (bool or None): Specs that are "known" are those
for which Spack can locate a ``package.py`` file -- i.e.,
Spack "knows" how to install them. Specs that are unknown may
represent packages that existed in a previous version of
Spack, but have since either changed their name or
been removed
installed (bool or any, or InstallStatus or iterable of
InstallStatus, optional): if ``True``, includes only installed
installed (bool or InstallStatus or typing.Iterable or None):
if ``True``, includes only installed
specs in the search; if ``False`` only missing specs, and if
``any``, all specs in database. If an InstallStatus or iterable
of InstallStatus, returns specs whose install status
(installed, deprecated, or missing) matches (one of) the
InstallStatus. (default: True)
explicit (bool or any, optional): A spec that was installed
explicit (bool or None): A spec that was installed
following a specific user request is marked as explicit. If
instead it was pulled-in as a dependency of a user requested
spec it's considered implicit.
start_date (datetime, optional): filters the query discarding
specs that have been installed before ``start_date``.
start_date (datetime.datetime or None): filters the query
discarding specs that have been installed before ``start_date``.
end_date (datetime, optional): filters the query discarding
end_date (datetime.datetime or None): filters the query discarding
specs that have been installed after ``end_date``.
hashes (container): list or set of hashes that we can use to
hashes (typing.Container): list or set of hashes that we can use to
restrict the search
in_buildcache (bool or any, optional): Specs that are marked in
in_buildcache (bool or None): Specs that are marked in
this database as part of an associated binary cache are
``in_buildcache``. All other specs are not. This field is used
for querying mirror indices. Default is ``any``.
@ -449,7 +449,7 @@ def clear_failure(self, spec, force=False):
see `mark_failed()`.
Args:
spec (Spec): the spec whose failure indicators are being removed
spec (spack.spec.Spec): the spec whose failure indicators are being removed
force (bool): True if the failure information should be cleared
when a prefix failure lock exists for the file or False if
the failure should not be cleared (e.g., it may be
@ -1391,10 +1391,10 @@ def get_by_hash_local(self, *args, **kwargs):
Arguments:
dag_hash (str): hash (or hash prefix) to look up
default (object, optional): default value to return if dag_hash is
default (object or None): default value to return if dag_hash is
not in the DB (default: None)
installed (bool or any, or InstallStatus or iterable of
InstallStatus, optional): if ``True``, includes only installed
installed (bool or InstallStatus or typing.Iterable or None):
if ``True``, includes only installed
specs in the search; if ``False`` only missing specs, and if
``any``, all specs in database. If an InstallStatus or iterable
of InstallStatus, returns specs whose install status
@ -1417,14 +1417,13 @@ def get_by_hash(self, dag_hash, default=None, installed=any):
Arguments:
dag_hash (str): hash (or hash prefix) to look up
default (object, optional): default value to return if dag_hash is
default (object or None): default value to return if dag_hash is
not in the DB (default: None)
installed (bool or any, or InstallStatus or iterable of
InstallStatus, optional): if ``True``, includes only installed
specs in the search; if ``False`` only missing specs, and if
``any``, all specs in database. If an InstallStatus or iterable
of InstallStatus, returns specs whose install status
(installed, deprecated, or missing) matches (one of) the
installed (bool or InstallStatus or typing.Iterable or None):
if ``True``, includes only installed specs in the search; if ``False``
only missing specs, and if ``any``, all specs in database. If an
InstallStatus or iterable of InstallStatus, returns specs whose install
status (installed, deprecated, or missing) matches (one of) the
InstallStatus. (default: any)
``installed`` defaults to ``any`` so that we can refer to any
@ -1596,7 +1595,7 @@ def update_explicit(self, spec, explicit):
Update the spec's explicit state in the database.
Args:
spec (Spec): the spec whose install record is being updated
spec (spack.spec.Spec): the spec whose install record is being updated
explicit (bool): ``True`` if the package was requested explicitly
by the user, ``False`` if it was pulled in as a dependency of
an explicit package.

View File

@ -54,7 +54,7 @@ class OpenMpi(Package):
from collections import Sequence
__all__ = []
__all__ = ['DirectiveError', 'DirectiveMeta']
#: These are variant names used by Spack internally; packages can't use them
reserved_names = ['patches', 'dev_path']
@ -85,7 +85,7 @@ def make_when_spec(value):
as part of concretization.
Arguments:
value (Spec or bool): a conditional Spec or a constant ``bool``
value (spack.spec.Spec or bool): a conditional Spec or a constant ``bool``
value indicating when a directive should be applied.
"""
@ -187,12 +187,16 @@ def directive(dicts=None):
Here's an example directive:
.. code-block:: python
@directive(dicts='versions')
version(pkg, ...):
...
This directive allows you write:
.. code-block:: python
class Foo(Package):
version(...)
@ -392,8 +396,8 @@ def conflicts(conflict_spec, when=None, msg=None):
conflicts('%intel', when='+foo')
Args:
conflict_spec (Spec): constraint defining the known conflict
when (Spec): optional constraint that triggers the conflict
conflict_spec (spack.spec.Spec): constraint defining the known conflict
when (spack.spec.Spec): optional constraint that triggers the conflict
msg (str): optional user defined message
"""
def _execute_conflicts(pkg):
@ -413,11 +417,11 @@ def depends_on(spec, when=None, type=default_deptype, patches=None):
"""Creates a dict of deps with specs defining when they apply.
Args:
spec (Spec or str): the package and constraints depended on
when (Spec or str): when the dependent satisfies this, it has
spec (spack.spec.Spec or str): the package and constraints depended on
when (spack.spec.Spec or str): when the dependent satisfies this, it has
the dependency represented by ``spec``
type (str or tuple of str): str or tuple of legal Spack deptypes
patches (obj or list): single result of ``patch()`` directive, a
type (str or tuple): str or tuple of legal Spack deptypes
patches (typing.Callable or list): single result of ``patch()`` directive, a
``str`` to be passed to ``patch``, or a list of these
This directive is to be used inside a Package definition to declare
@ -495,7 +499,7 @@ def patch(url_or_filename, level=1, when=None, working_dir=".", **kwargs):
Args:
url_or_filename (str): url or relative filename of the patch
level (int): patch level (as in the patch shell command)
when (Spec): optional anonymous spec that specifies when to apply
when (spack.spec.Spec): optional anonymous spec that specifies when to apply
the patch
working_dir (str): dir to change to before applying
@ -559,12 +563,12 @@ def variant(
specified otherwise the default will be False for a boolean
variant and 'nothing' for a multi-valued variant
description (str): description of the purpose of the variant
values (tuple or callable): either a tuple of strings containing the
values (tuple or typing.Callable): either a tuple of strings containing the
allowed values, or a callable accepting one value and returning
True if it is valid
multi (bool): if False only one value per spec is allowed for
this variant
validator (callable): optional group validator to enforce additional
validator (typing.Callable): optional group validator to enforce additional
logic. It receives the package name, the variant name and a tuple
of values and should raise an instance of SpackError if the group
doesn't meet the additional constraints

View File

@ -116,11 +116,12 @@ def activate(
use_env_repo (bool): use the packages exactly as they appear in the
environment's repository
add_view (bool): generate commands to add view to path variables
shell (string): One of `sh`, `csh`, `fish`.
prompt (string): string to add to the users prompt, or None
shell (str): One of `sh`, `csh`, `fish`.
prompt (str): string to add to the users prompt, or None
Returns:
cmds: Shell commands to activate environment.
str: Shell commands to activate environment.
TODO: environment to use the activated spack environment.
"""
global _active_environment
@ -198,10 +199,10 @@ def deactivate(shell='sh'):
"""Undo any configuration or repo settings modified by ``activate()``.
Arguments:
shell (string): One of `sh`, `csh`, `fish`. Shell style to use.
shell (str): One of `sh`, `csh`, `fish`. Shell style to use.
Returns:
(string): shell commands for `shell` to undo environment variables
str: shell commands for `shell` to undo environment variables
"""
global _active_environment
@ -272,7 +273,7 @@ def find_environment(args):
If an environment is found, read it in. If not, return None.
Arguments:
args (Namespace): argparse namespace wtih command arguments
args (argparse.Namespace): argparse namespace wtih command arguments
Returns:
(Environment): a found environment, or ``None``
@ -322,7 +323,7 @@ def get_env(args, cmd_name, required=False):
message that says the calling command *needs* an active environment.
Arguments:
args (Namespace): argparse namespace wtih command arguments
args (argparse.Namespace): argparse namespace wtih command arguments
cmd_name (str): name of calling command
required (bool): if ``True``, raise an exception when no environment
is found; if ``False``, just return ``None``
@ -550,7 +551,7 @@ def view(self, new=None):
Raise if new is None and there is no current view
Arguments:
new (string or None): If a string, create a FilesystemView
new (str or None): If a string, create a FilesystemView
rooted at that path. Default None. This should only be used to
regenerate the view, and cannot be used to access specs.
"""
@ -851,7 +852,7 @@ def clear(self, re_read=False):
"""Clear the contents of the environment
Arguments:
re_read (boolean): If True, do not clear ``new_specs`` nor
re_read (bool): If True, do not clear ``new_specs`` nor
``new_installs`` values. These values cannot be read from
yaml, and need to be maintained when re-reading an existing
environment.
@ -1119,11 +1120,11 @@ def develop(self, spec, path, clone=False):
"""Add dev-build info for package
Args:
spec (Spec): Set constraints on development specs. Must include a
spec (spack.spec.Spec): Set constraints on development specs. Must include a
concrete version.
path (string): Path to find code for developer builds. Relative
path (str): Path to find code for developer builds. Relative
paths will be resolved relative to the environment.
clone (bool, default False): Clone the package code to the path.
clone (bool): Clone the package code to the path.
If clone is False Spack will assume the code is already present
at ``path``.
@ -1552,7 +1553,7 @@ def install_all(self, args=None, **install_args):
that needs to be done separately with a call to write().
Args:
args (Namespace): argparse namespace with command arguments
args (argparse.Namespace): argparse namespace with command arguments
install_args (dict): keyword install arguments
"""
self.install_specs(None, args=args, **install_args)

View File

@ -1254,8 +1254,9 @@ def __init__(self, **kwargs):
@property
def hg(self):
""":returns: The hg executable
:rtype: Executable
"""
Returns:
Executable: the hg executable
"""
if not self._hg:
self._hg = which('hg', required=True)
@ -1405,7 +1406,7 @@ def from_kwargs(**kwargs):
``version()`` directive in a package.
Returns:
fetch_strategy: The fetch strategy that matches the args, based
typing.Callable: The fetch strategy that matches the args, based
on attribute names (e.g., ``git``, ``hg``, etc.)
Raises:

View File

@ -19,12 +19,15 @@ class SpecHashDescriptor(object):
We currently use different hashes for different use cases.
"""
hash_types = ('_dag_hash', '_build_hash', '_full_hash')
hash_types = ('_dag_hash', '_build_hash', '_full_hash', '_package_hash')
def __init__(self, deptype=('link', 'run'), package_hash=False, attr=None):
def __init__(self, deptype=('link', 'run'), package_hash=False, attr=None,
override=None):
self.deptype = dp.canonical_deptype(deptype)
self.package_hash = package_hash
self.attr = attr
# Allow spec hashes to have an alternate computation method
self.override = override
#: Default Hash descriptor, used by Spec.dag_hash() and stored in the DB.
@ -40,3 +43,9 @@ def __init__(self, deptype=('link', 'run'), package_hash=False, attr=None):
#: Full hash used in build pipelines to determine when to rebuild packages.
full_hash = SpecHashDescriptor(
deptype=('build', 'link', 'run'), package_hash=True, attr='_full_hash')
#: Package hash used as part of full hash
package_hash = SpecHashDescriptor(
deptype=(), package_hash=True, attr='_package_hash',
override=lambda s: s.package.content_hash())

View File

@ -28,7 +28,7 @@ def get_escaped_text_output(filename):
filename (str): path to the file
Returns:
(list of str): escaped text lines read from the file
list: escaped text lines read from the file
"""
with open(filename, 'r') as f:
# Ensure special characters are escaped as needed

View File

@ -93,7 +93,7 @@ def _check_last_phase(pkg):
package already.
Args:
pkg (PackageBase): the package being installed
pkg (spack.package.PackageBase): the package being installed
Raises:
``BadInstallPhase`` if stop_before or last phase is invalid
@ -115,10 +115,11 @@ def _handle_external_and_upstream(pkg, explicit):
database if it is external package.
Args:
pkg (Package): the package whose installation is under consideration
pkg (spack.package.Package): the package whose installation is under
consideration
explicit (bool): the package was explicitly requested by the user
Return:
(bool): ``True`` if the package is external or upstream (so not to
bool: ``True`` if the package is external or upstream (so not to
be installed locally), otherwise, ``True``
"""
# For external packages the workflow is simplified, and basically
@ -148,7 +149,7 @@ def _do_fake_install(pkg):
and libraries.
Args:
pkg (PackageBase): the package whose installation is to be faked
pkg (spack.package.PackageBase): the package whose installation is to be faked
"""
command = pkg.name
@ -194,15 +195,14 @@ def _packages_needed_to_bootstrap_compiler(compiler, architecture, pkgs):
compiler (CompilerSpec): the compiler to bootstrap
architecture (ArchSpec): the architecture for which to boostrap the
compiler
pkgs (list of PackageBase): the packages that may need their compiler
pkgs (list): the packages that may need their compiler
installed
Return:
(list) list of tuples, (PackageBase, bool), for concretized compiler-
-related packages that need to be installed and bool values
specify whether the package is the bootstrap compiler
(``True``) or one of its dependencies (``False``). The list
will be empty if there are no compilers.
list: list of tuples, (PackageBase, bool), for concretized compiler-related
packages that need to be installed and bool values specify whether the
package is the bootstrap compiler (``True``) or one of its dependencies
(``False``). The list will be empty if there are no compilers.
"""
tty.debug('Bootstrapping {0} compiler'.format(compiler))
compilers = spack.compilers.compilers_for_spec(
@ -260,7 +260,7 @@ def _install_from_cache(pkg, cache_only, explicit, unsigned=False,
Extract the package from binary cache
Args:
pkg (PackageBase): the package to install from the binary cache
pkg (spack.package.PackageBase): the package to install from the binary cache
cache_only (bool): only extract from binary cache
explicit (bool): ``True`` if installing the package was explicitly
requested by the user, otherwise, ``False``
@ -268,7 +268,7 @@ def _install_from_cache(pkg, cache_only, explicit, unsigned=False,
otherwise, ``False``
Return:
(bool) ``True`` if the package was extract from binary cache,
bool: ``True`` if the package was extract from binary cache,
``False`` otherwise
"""
installed_from_cache = _try_install_from_binary_cache(
@ -350,8 +350,8 @@ def _process_binary_cache_tarball(pkg, binary_spec, explicit, unsigned,
Process the binary cache tarball.
Args:
pkg (PackageBase): the package being installed
binary_spec (Spec): the spec whose cache has been confirmed
pkg (spack.package.PackageBase): the package being installed
binary_spec (spack.spec.Spec): the spec whose cache has been confirmed
explicit (bool): the package was explicitly requested by the user
unsigned (bool): ``True`` if binary package signatures to be checked,
otherwise, ``False``
@ -359,7 +359,7 @@ def _process_binary_cache_tarball(pkg, binary_spec, explicit, unsigned,
attempting to download the tarball
Return:
(bool) ``True`` if the package was extracted from binary cache,
bool: ``True`` if the package was extracted from binary cache,
else ``False``
"""
tarball = binary_distribution.download_tarball(
@ -385,7 +385,7 @@ def _try_install_from_binary_cache(pkg, explicit, unsigned=False,
Try to extract the package from binary cache.
Args:
pkg (PackageBase): the package to be extracted from binary cache
pkg (spack.package.PackageBase): the package to be extracted from binary cache
explicit (bool): the package was explicitly requested by the user
unsigned (bool): ``True`` if binary package signatures to be checked,
otherwise, ``False``
@ -423,7 +423,7 @@ def combine_phase_logs(phase_log_files, log_path):
Args:
phase_log_files (list): a list or iterator of logs to combine
log_path (path): the path to combine them to
log_path (str): the path to combine them to
"""
with open(log_path, 'w') as log_file:
@ -441,7 +441,7 @@ def dump_packages(spec, path):
node in the DAG.
Args:
spec (Spec): the Spack spec whose package information is to be dumped
spec (spack.spec.Spec): the Spack spec whose package information is to be dumped
path (str): the path to the build packages directory
"""
fs.mkdirp(path)
@ -498,10 +498,10 @@ def get_dependent_ids(spec):
Return a list of package ids for the spec's dependents
Args:
spec (Spec): Concretized spec
spec (spack.spec.Spec): Concretized spec
Returns:
(list of str): list of package ids
list: list of package ids
"""
return [package_id(d.package) for d in spec.dependents()]
@ -512,10 +512,10 @@ def install_msg(name, pid):
Args:
name (str): Name/id of the package being installed
pid (id): id of the installer process
pid (int): id of the installer process
Return:
(str) Colorized installing message
str: Colorized installing message
"""
pre = '{0}: '.format(pid) if tty.show_pid() else ''
return pre + colorize('@*{Installing} @*g{%s}' % name)
@ -526,7 +526,7 @@ def log(pkg):
Copy provenance into the install directory on success
Args:
pkg (Package): the package that was built and installed
pkg (spack.package.Package): the package that was built and installed
"""
packages_dir = spack.store.layout.build_packages_path(pkg.spec)
@ -608,7 +608,8 @@ def package_id(pkg):
and packages for combinatorial environments.
Args:
pkg (PackageBase): the package from which the identifier is derived
pkg (spack.package.PackageBase): the package from which the identifier is
derived
"""
if not pkg.spec.concrete:
raise ValueError("Cannot provide a unique, readable id when "
@ -631,11 +632,11 @@ def __init__(self, installs=[]):
""" Initialize the installer.
Args:
installs (list of (pkg, install_args)): list of tuples, where each
installs (list): list of tuples, where each
tuple consists of a package (PackageBase) and its associated
install arguments (dict)
Return:
(PackageInstaller) instance
PackageInstaller: instance
"""
# List of build requests
self.build_requests = [BuildRequest(pkg, install_args)
@ -691,7 +692,8 @@ def _add_bootstrap_compilers(
Args:
compiler: the compiler to boostrap
architecture: the architecture for which to bootstrap the compiler
pkgs (PackageBase): the package with possible compiler dependencies
pkgs (spack.package.PackageBase): the package with possible compiler
dependencies
request (BuildRequest): the associated install request
all_deps (defaultdict(set)): dictionary of all dependencies and
associated dependents
@ -707,7 +709,7 @@ def _add_init_task(self, pkg, request, is_compiler, all_deps):
Creates and queus the initial build task for the package.
Args:
pkg (Package): the package to be built and installed
pkg (spack.package.Package): the package to be built and installed
request (BuildRequest or None): the associated install request
where ``None`` can be used to indicate the package was
explicitly requested by the user
@ -726,7 +728,7 @@ def _check_db(self, spec):
"""Determine if the spec is flagged as installed in the database
Args:
spec (Spec): spec whose database install status is being checked
spec (spack.spec.Spec): spec whose database install status is being checked
Return:
(rec, installed_in_db) tuple where rec is the database record, or
@ -887,7 +889,7 @@ def _cleanup_task(self, pkg):
Cleanup the build task for the spec
Args:
pkg (PackageBase): the package being installed
pkg (spack.package.PackageBase): the package being installed
"""
self._remove_task(package_id(pkg))
@ -901,7 +903,7 @@ def _ensure_install_ready(self, pkg):
already locked.
Args:
pkg (PackageBase): the package being locally installed
pkg (spack.package.PackageBase): the package being locally installed
"""
pkg_id = package_id(pkg)
pre = "{0} cannot be installed locally:".format(pkg_id)
@ -933,7 +935,7 @@ def _ensure_locked(self, lock_type, pkg):
Args:
lock_type (str): 'read' for a read lock, 'write' for a write lock
pkg (PackageBase): the package whose spec is being installed
pkg (spack.package.PackageBase): the package whose spec is being installed
Return:
(lock_type, lock) tuple where lock will be None if it could not
@ -1294,7 +1296,7 @@ def _setup_install_dir(self, pkg):
Write a small metadata file with the current spack environment.
Args:
pkg (Package): the package to be built and installed
pkg (spack.package.Package): the package to be built and installed
"""
if not os.path.exists(pkg.spec.prefix):
tty.verbose('Creating the installation directory {0}'
@ -1369,9 +1371,9 @@ def _flag_installed(self, pkg, dependent_ids=None):
known dependents.
Args:
pkg (Package): Package that has been installed locally, externally
or upstream
dependent_ids (list of str or None): list of the package's
pkg (spack.package.Package): Package that has been installed locally,
externally or upstream
dependent_ids (list or None): list of the package's
dependent ids, or None if the dependent ids are limited to
those maintained in the package (dependency DAG)
"""
@ -1422,7 +1424,7 @@ def install(self):
Install the requested package(s) and or associated dependencies.
Args:
pkg (Package): the package to be built and installed"""
pkg (spack.package.Package): the package to be built and installed"""
self._init_queue()
fail_fast_err = 'Terminating after first install failure'
@ -1692,6 +1694,12 @@ def build_process(pkg, kwargs):
verbose = kwargs.get('verbose', False)
timer = Timer()
# If we are using a padded path, filter the output to compress padded paths
# The real log still has full-length paths.
filter_padding = spack.config.get("config:install_tree:padded_length", None)
filter_fn = spack.util.path.padding_filter if filter_padding else None
if not fake:
if not skip_patch:
pkg.do_patch()
@ -1764,8 +1772,10 @@ def build_process(pkg, kwargs):
try:
# DEBUGGING TIP - to debug this section, insert an IPython
# embed here, and run the sections below without log capture
with log_output(log_file, echo, True,
env=unmodified_env) as logger:
with log_output(
log_file, echo, True, env=unmodified_env,
filter_fn=filter_fn
) as logger:
with logger.force_echo():
inner_debug_level = tty.debug_level()
@ -1825,7 +1835,7 @@ def __init__(self, pkg, request, compiler, start, attempts, status,
Instantiate a build task for a package.
Args:
pkg (Package): the package to be built and installed
pkg (spack.package.Package): the package to be built and installed
request (BuildRequest or None): the associated install request
where ``None`` can be used to indicate the package was
explicitly requested by the user
@ -1833,7 +1843,7 @@ def __init__(self, pkg, request, compiler, start, attempts, status,
start (int): the initial start time for the package, in seconds
attempts (int): the number of attempts to install the package
status (str): the installation status
installed (list of str): the identifiers of packages that have
installed (list): the identifiers of packages that have
been installed so far
"""
@ -1975,7 +1985,7 @@ def flag_installed(self, installed):
Ensure the dependency is not considered to still be uninstalled.
Args:
installed (list of str): the identifiers of packages that have
installed (list): the identifiers of packages that have
been installed so far
"""
now_installed = self.uninstalled_deps & set(installed)
@ -2016,7 +2026,7 @@ def __init__(self, pkg, install_args):
Instantiate a build request for a package.
Args:
pkg (Package): the package to be built and installed
pkg (spack.package.Package): the package to be built and installed
install_args (dict): the install arguments associated with ``pkg``
"""
# Ensure dealing with a package that has a concrete spec
@ -2091,10 +2101,11 @@ def get_deptypes(self, pkg):
"""Determine the required dependency types for the associated package.
Args:
pkg (PackageBase): explicit or implicit package being installed
pkg (spack.package.PackageBase): explicit or implicit package being
installed
Returns:
(tuple) required dependency type(s) for the package
tuple: required dependency type(s) for the package
"""
deptypes = ['link', 'run']
include_build_deps = self.install_args.get('include_build_deps')
@ -2113,10 +2124,11 @@ def run_tests(self, pkg):
"""Determine if the tests should be run for the provided packages
Args:
pkg (PackageBase): explicit or implicit package being installed
pkg (spack.package.PackageBase): explicit or implicit package being
installed
Returns:
(bool) ``True`` if they should be run; ``False`` otherwise
bool: ``True`` if they should be run; ``False`` otherwise
"""
tests = self.install_args.get('tests', False)
return tests is True or (tests and pkg.name in tests)

View File

@ -530,7 +530,7 @@ def __call__(self, *argv, **kwargs):
"""Invoke this SpackCommand.
Args:
argv (list of str): command line arguments.
argv (list): command line arguments.
Keyword Args:
fail_on_error (optional bool): Don't raise an exception on error
@ -625,7 +625,7 @@ def print_setup_info(*info):
"""Print basic information needed by setup-env.[c]sh.
Args:
info (list of str): list of things to print: comma-separated list
info (list): list of things to print: comma-separated list
of 'csh', 'sh', or 'modules'
This is in ``main.py`` to make it fast; the setup scripts need to
@ -689,7 +689,7 @@ def main(argv=None):
"""This is the entry point for the Spack command.
Args:
argv (list of str or None): command line arguments, NOT including
argv (list or None): command line arguments, NOT including
the executable name. If None, parses from sys.argv.
"""
# Create a parser with a simple positional argument first. We'll
@ -782,7 +782,10 @@ def main(argv=None):
raise
sys.stderr.write('\n')
tty.error("Keyboard interrupt.")
return signal.SIGINT.value
if sys.version_info >= (3, 5):
return signal.SIGINT.value
else:
return signal.SIGINT
except SystemExit as e:
if spack.config.get('config:debug'):

View File

@ -19,7 +19,8 @@
import llnl.util.filesystem
__all__ = [
'filter_compiler_wrappers'
'filter_compiler_wrappers',
'PackageMixinsMeta',
]

View File

@ -30,7 +30,7 @@ def configuration(module_set_name):
return config
#: Caches the configuration {spec_hash: configuration}
# Caches the configuration {spec_hash: configuration}
configuration_registry = {} # type: Dict[str, Any]

View File

@ -29,7 +29,7 @@ def configuration(module_set_name):
return config
#: Caches the configuration {spec_hash: configuration}
# Caches the configuration {spec_hash: configuration}
configuration_registry = {} # type: Dict[str, Any]

View File

@ -465,7 +465,7 @@ def test_log_pathname(test_stage, spec):
Args:
test_stage (str): path to the test stage directory
spec (Spec): instance of the spec under test
spec (spack.spec.Spec): instance of the spec under test
Returns:
(str): the pathname of the test log file
@ -725,14 +725,14 @@ def possible_dependencies(
"""Return dict of possible dependencies of this package.
Args:
transitive (bool, optional): return all transitive dependencies if
transitive (bool or None): return all transitive dependencies if
True, only direct dependencies if False (default True)..
expand_virtuals (bool, optional): expand virtual dependencies into
expand_virtuals (bool or None): expand virtual dependencies into
all possible implementations (default True)
deptype (str or tuple, optional): dependency types to consider
visited (dicct, optional): dict of names of dependencies visited so
deptype (str or tuple or None): dependency types to consider
visited (dict or None): dict of names of dependencies visited so
far, mapped to their immediate dependencies' names.
missing (dict, optional): dict to populate with packages and their
missing (dict or None): dict to populate with packages and their
*missing* dependencies.
virtuals (set): if provided, populate with virtuals seen so far.
@ -1756,7 +1756,7 @@ def cache_extra_test_sources(self, srcs):
during install testing.
Args:
srcs (str or list of str): relative path for files and or
srcs (str or list): relative path for files and or
subdirectories located in the staged source path that are to
be copied to the corresponding location(s) under the install
testing directory.
@ -1803,10 +1803,10 @@ def run_test(self, exe, options=[], expected=[], status=0,
Args:
exe (str): the name of the executable
options (str or list of str): list of options to pass to the runner
expected (str or list of str): list of expected output strings.
options (str or list): list of options to pass to the runner
expected (str or list): list of expected output strings.
Each string is a regex expected to match part of the output.
status (int or list of int): possible passing status values
status (int or list): possible passing status values
with 0 meaning the test is expected to succeed
installed (bool): if ``True``, the executable must be in the
install prefix
@ -2010,9 +2010,9 @@ def setup_build_environment(self, env):
Spack's store.
Args:
env (EnvironmentModifications): environment modifications to be
applied when the package is built. Package authors can call
methods on it to alter the build environment.
env (spack.util.environment.EnvironmentModifications): environment
modifications to be applied when the package is built. Package authors
can call methods on it to alter the build environment.
"""
legacy_fn = self._get_legacy_environment_method('setup_environment')
if legacy_fn:
@ -2023,9 +2023,9 @@ def setup_run_environment(self, env):
"""Sets up the run environment for a package.
Args:
env (EnvironmentModifications): environment modifications to be
applied when the package is run. Package authors can call
methods on it to alter the run environment.
env (spack.util.environment.EnvironmentModifications): environment
modifications to be applied when the package is run. Package authors
can call methods on it to alter the run environment.
"""
legacy_fn = self._get_legacy_environment_method('setup_environment')
if legacy_fn:
@ -2052,11 +2052,11 @@ def setup_dependent_build_environment(self, env, dependent_spec):
variable.
Args:
env (EnvironmentModifications): environment modifications to be
applied when the dependent package is built. Package authors
can call methods on it to alter the build environment.
env (spack.util.environment.EnvironmentModifications): environment
modifications to be applied when the dependent package is built.
Package authors can call methods on it to alter the build environment.
dependent_spec (Spec): the spec of the dependent package
dependent_spec (spack.spec.Spec): the spec of the dependent package
about to be built. This allows the extendee (self) to query
the dependent's state. Note that *this* package's spec is
available as ``self.spec``
@ -2079,11 +2079,11 @@ def setup_dependent_run_environment(self, env, dependent_spec):
for dependencies.
Args:
env (EnvironmentModifications): environment modifications to be
applied when the dependent package is run. Package authors
can call methods on it to alter the build environment.
env (spack.util.environment.EnvironmentModifications): environment
modifications to be applied when the dependent package is run.
Package authors can call methods on it to alter the build environment.
dependent_spec (Spec): The spec of the dependent package
dependent_spec (spack.spec.Spec): The spec of the dependent package
about to be run. This allows the extendee (self) to query
the dependent's state. Note that *this* package's spec is
available as ``self.spec``
@ -2125,7 +2125,7 @@ def setup_dependent_package(self, module, dependent_spec):
object of the dependent package. Packages can use this to set
module-scope variables for the dependent to use.
dependent_spec (Spec): The spec of the dependent package
dependent_spec (spack.spec.Spec): The spec of the dependent package
about to be built. This allows the extendee (self) to
query the dependent's state. Note that *this*
package's spec is available as ``self.spec``.

View File

@ -28,7 +28,7 @@ def apply_patch(stage, patch_path, level=1, working_dir='.'):
Args:
stage (spack.stage.Stage): stage with code that will be patched
patch_path (str): filesystem location for the patch to apply
level (int, optional): patch level (default 1)
level (int or None): patch level (default 1)
working_dir (str): relative path *within* the stage to change to
(default '.')
"""

View File

@ -11,10 +11,10 @@
"""
import os
from llnl.util.filesystem import ancestor
import llnl.util.filesystem
#: This file lives in $prefix/lib/spack/spack/__file__
prefix = ancestor(__file__, 4)
prefix = llnl.util.filesystem.ancestor(__file__, 4)
#: synonym for prefix
spack_root = prefix
@ -53,7 +53,6 @@
#: User configuration location
user_config_path = os.path.expanduser('~/.spack')
user_bootstrap_path = os.path.join(user_config_path, 'bootstrap')
user_bootstrap_store = os.path.join(user_bootstrap_path, 'store')
reports_path = os.path.join(user_config_path, "reports")
monitor_path = os.path.join(reports_path, "monitor")

View File

@ -869,7 +869,7 @@ def is_relocatable(spec):
"""Returns True if an installed spec is relocatable.
Args:
spec (Spec): spec to be analyzed
spec (spack.spec.Spec): spec to be analyzed
Returns:
True if the binaries of an installed spec

View File

@ -662,7 +662,7 @@ def repo_for_pkg(self, spec):
if namespace:
fullspace = get_full_namespace(namespace)
if fullspace not in self.by_namespace:
raise UnknownNamespaceError(spec.namespace)
raise UnknownNamespaceError(namespace)
return self.by_namespace[fullspace]
# If there's no namespace, search in the RepoPath.

View File

@ -0,0 +1,26 @@
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Schema for bootstrap.yaml configuration file."""
properties = {
'bootstrap': {
'type': 'object',
'properties': {
'enable': {'type': 'boolean'},
'root': {
'type': 'string'
},
}
}
}
#: Full schema with metadata
schema = {
'$schema': 'http://json-schema.org/schema#',
'title': 'Spack bootstrap configuration file schema',
'type': 'object',
'additionalProperties': False,
'properties': properties,
}

View File

@ -10,6 +10,7 @@
"""
from llnl.util.lang import union_dicts
import spack.schema.bootstrap
import spack.schema.cdash
import spack.schema.compilers
import spack.schema.config
@ -23,6 +24,7 @@
#: Properties for inclusion in other schemas
properties = union_dicts(
spack.schema.bootstrap.properties,
spack.schema.cdash.properties,
spack.schema.compilers.properties,
spack.schema.config.properties,

View File

@ -679,14 +679,14 @@ def condition(self, required_spec, imposed_spec=None, name=None):
"""Generate facts for a dependency or virtual provider condition.
Arguments:
required_spec (Spec): the spec that triggers this condition
imposed_spec (optional, Spec): the sepc with constraints that
required_spec (spack.spec.Spec): the spec that triggers this condition
imposed_spec (spack.spec.Spec or None): the sepc with constraints that
are imposed when this condition is triggered
name (optional, str): name for `required_spec` (required if
name (str or None): name for `required_spec` (required if
required_spec is anonymous, ignored if not)
Returns:
(int): id of the condition created by this function
int: id of the condition created by this function
"""
named_cond = required_spec.copy()
named_cond.name = named_cond.name or name
@ -922,7 +922,7 @@ def spec_clauses(self, spec, body=False, transitive=True):
"""Return a list of clauses for a spec mandates are true.
Arguments:
spec (Spec): the spec to analyze
spec (spack.spec.Spec): the spec to analyze
body (bool): if True, generate clauses to be used in rule bodies
(final values) instead of rule heads (setters).
transitive (bool): if False, don't generate clauses from

View File

@ -122,7 +122,9 @@
__all__ = [
'CompilerSpec',
'Spec',
'SpecParser',
'parse',
'SpecParseError',
'DuplicateDependencyError',
@ -143,7 +145,9 @@
'AmbiguousHashError',
'InvalidHashError',
'NoSuchHashError',
'RedundantSpecError']
'RedundantSpecError',
'SpecDeprecatedError',
]
#: Valid pattern for an identifier in Spack
identifier_re = r'\w[\w-]*'
@ -1030,9 +1034,8 @@ class Spec(object):
#: Cache for spec's prefix, computed lazily in the corresponding property
_prefix = None
def __init__(self, spec_like=None,
normal=False, concrete=False, external_path=None,
external_modules=None, full_hash=None):
def __init__(self, spec_like=None, normal=False,
concrete=False, external_path=None, external_modules=None):
"""Create a new Spec.
Arguments:
@ -1046,8 +1049,6 @@ def __init__(self, spec_like=None,
self._concrete = concrete
self.external_path = external_path
self.external_module = external_module
self._full_hash = full_hash
"""
# Copy if spec_like is a Spec.
@ -1068,6 +1069,8 @@ def __init__(self, spec_like=None,
self._hash = None
self._build_hash = None
self._full_hash = None
self._package_hash = None
self._dunder_hash = None
self._package = None
@ -1082,7 +1085,6 @@ def __init__(self, spec_like=None,
self._concrete = concrete
self.external_path = external_path
self.external_modules = Spec._format_module_list(external_modules)
self._full_hash = full_hash
# Older spack versions did not compute full_hash or build_hash,
# and we may not have the necessary information to recompute them
@ -1497,10 +1499,12 @@ def _spec_hash(self, hash):
"""Utility method for computing different types of Spec hashes.
Arguments:
hash (SpecHashDescriptor): type of hash to generate.
hash (spack.hash_types.SpecHashDescriptor): type of hash to generate.
"""
# TODO: curently we strip build dependencies by default. Rethink
# this when we move to using package hashing on all specs.
if hash.override is not None:
return hash.override(self)
node_dict = self.to_node_dict(hash=hash)
yaml_text = syaml.dump(node_dict, default_flow_style=True)
return spack.util.hash.b32_hash(yaml_text)
@ -1513,7 +1517,7 @@ def _cached_hash(self, hash, length=None):
in the supplied attribute on this spec.
Arguments:
hash (SpecHashDescriptor): type of hash to generate.
hash (spack.hash_types.SpecHashDescriptor): type of hash to generate.
"""
if not hash.attr:
return self._spec_hash(hash)[:length]
@ -1528,6 +1532,10 @@ def _cached_hash(self, hash, length=None):
return hash_string[:length]
def package_hash(self):
"""Compute the hash of the contents of the package for this node"""
return self._cached_hash(ht.package_hash)
def dag_hash(self, length=None):
"""This is Spack's default hash, used to identify installations.
@ -1611,7 +1619,7 @@ def to_node_dict(self, hash=ht.dag_hash):
hashes).
Arguments:
hash (SpecHashDescriptor) type of hash to generate.
hash (spack.hash_types.SpecHashDescriptor) type of hash to generate.
"""
d = syaml.syaml_dict()
@ -1653,7 +1661,7 @@ def to_node_dict(self, hash=ht.dag_hash):
d['patches'] = variant._patches_in_order_of_appearance
if hash.package_hash:
package_hash = self.package.content_hash()
package_hash = self.package_hash()
# Full hashes are in bytes
if (not isinstance(package_hash, six.text_type)
@ -1822,6 +1830,7 @@ def from_node_dict(node):
spec._hash = node.get('hash', None)
spec._build_hash = node.get('build_hash', None)
spec._full_hash = node.get('full_hash', None)
spec._package_hash = node.get('package_hash', None)
if 'version' in node or 'versions' in node:
spec.versions = vn.VersionList.from_dict(node)
@ -2982,7 +2991,7 @@ def ensure_valid_variants(spec):
spec (Spec): spec to be analyzed
Raises:
UnknownVariantError: on the first unknown variant found
spack.variant.UnknownVariantError: on the first unknown variant found
"""
pkg_cls = spec.package_class
pkg_variants = pkg_cls.variants
@ -3441,12 +3450,14 @@ def _dup(self, other, deps=True, cleardeps=True, caches=None):
self._dunder_hash = other._dunder_hash
self._normal = other._normal
self._full_hash = other._full_hash
self._package_hash = other._package_hash
else:
self._hash = None
self._build_hash = None
self._dunder_hash = None
self._normal = False
self._full_hash = None
self._package_hash = None
return changed
@ -4281,19 +4292,22 @@ def splice(self, other, transitive):
# _dependents of these specs should not be trusted.
# Variants may also be ignored here for now...
# Keep all cached hashes because we will invalidate the ones that need
# invalidating later, and we don't want to invalidate unnecessarily
if transitive:
self_nodes = dict((s.name, s.copy(deps=False))
self_nodes = dict((s.name, s.copy(deps=False, caches=True))
for s in self.traverse(root=True)
if s.name not in other)
other_nodes = dict((s.name, s.copy(deps=False))
other_nodes = dict((s.name, s.copy(deps=False, caches=True))
for s in other.traverse(root=True))
else:
# If we're not doing a transitive splice, then we only want the
# root of other.
self_nodes = dict((s.name, s.copy(deps=False))
self_nodes = dict((s.name, s.copy(deps=False, caches=True))
for s in self.traverse(root=True)
if s.name != other.name)
other_nodes = {other.name: other.copy(deps=False)}
other_nodes = {other.name: other.copy(deps=False, caches=True)}
nodes = other_nodes.copy()
nodes.update(self_nodes)
@ -4314,17 +4328,41 @@ def splice(self, other, transitive):
if any(dep not in other_nodes for dep in dependencies):
nodes[name].build_spec = other[name].build_spec
# Clear cached hashes
nodes[self.name].clear_cached_hashes()
ret = nodes[self.name]
# Clear cached hashes for all affected nodes
# Do not touch unaffected nodes
for dep in ret.traverse(root=True, order='post'):
opposite = other_nodes if dep.name in self_nodes else self_nodes
if any(name in dep for name in opposite.keys()):
# Record whether hashes are already cached
# So we don't try to compute a hash from insufficient
# provenance later
has_build_hash = getattr(dep, ht.build_hash.attr, None)
has_full_hash = getattr(dep, ht.full_hash.attr, None)
# package hash cannot be affected by splice
dep.clear_cached_hashes(ignore=['_package_hash'])
# Since this is a concrete spec, we want to make sure hashes
# are cached writing specs only writes cached hashes in case
# the spec is too old to have full provenance for these hashes,
# so we can't rely on doing it at write time.
if has_build_hash:
_ = dep.build_hash()
if has_full_hash:
_ = dep.full_hash()
return nodes[self.name]
def clear_cached_hashes(self):
def clear_cached_hashes(self, ignore=()):
"""
Clears all cached hashes in a Spec, while preserving other properties.
"""
for attr in ht.SpecHashDescriptor.hash_types:
if hasattr(self, attr):
setattr(self, attr, None)
if attr not in ignore:
if hasattr(self, attr):
setattr(self, attr, None)
def __hash__(self):
# If the spec is concrete, we leverage the DAG hash and just use
@ -4403,6 +4441,7 @@ def __init__(self):
class SpecParser(spack.parse.Parser):
"""Parses specs."""
def __init__(self, initial_spec=None):
"""Construct a new SpecParser.

View File

@ -565,8 +565,9 @@ def cache_mirror(self, mirror, stats):
"""Perform a fetch if the resource is not already cached
Arguments:
mirror (MirrorCache): the mirror to cache this Stage's resource in
stats (MirrorStats): this is updated depending on whether the
mirror (spack.caches.MirrorCache): the mirror to cache this Stage's
resource in
stats (spack.mirror.MirrorStats): this is updated depending on whether the
caching operation succeeded or failed
"""
if isinstance(self.default_fetcher, fs.BundleFetchStrategy):
@ -835,7 +836,7 @@ def get_checksums_for_versions(
Args:
url_dict (dict): A dictionary of the form: version -> URL
name (str): The name of the package
first_stage_function (callable): function that takes a Stage and a URL;
first_stage_function (typing.Callable): function that takes a Stage and a URL;
this is run on the stage of the first URL downloaded
keep_stage (bool): whether to keep staging area when command completes
batch (bool): whether to ask user how many versions to fetch (false)

View File

@ -193,6 +193,7 @@ def deserialize(token):
def _store():
"""Get the singleton store instance."""
import spack.bootstrap
config_dict = spack.config.get('config')
root, unpadded_root, projections = parse_install_tree(config_dict)
hash_length = spack.config.get('config:install_hash_length')
@ -201,7 +202,8 @@ def _store():
# reserved by Spack to bootstrap its own dependencies, since this would
# lead to bizarre behaviors (e.g. cleaning the bootstrap area would wipe
# user installed software)
if spack.paths.user_bootstrap_store == root:
enable_bootstrap = spack.config.get('bootstrap:enable', True)
if enable_bootstrap and spack.bootstrap.store_path() == root:
msg = ('please change the install tree root "{0}" in your '
'configuration [path reserved for Spack internal use]')
raise ValueError(msg.format(root))

View File

@ -10,12 +10,15 @@
import py
import pytest
import llnl.util.filesystem as fs
import spack.binary_distribution as bindist
import spack.config
import spack.hooks.sbang as sbang
import spack.main
import spack.mirror
import spack.repo
import spack.spec as spec
import spack.store
import spack.util.gpg
import spack.util.web as web_util
@ -589,3 +592,55 @@ def test_update_sbang(tmpdir, test_mirror):
open(str(installed_script_style_2_path)).read()
uninstall_cmd('-y', '/%s' % new_spec.dag_hash())
@pytest.mark.usefixtures(
'install_mockery_mutable_config', 'mock_packages', 'mock_fetch',
)
def test_update_index_fix_deps(monkeypatch, tmpdir, mutable_config):
"""Ensure spack buildcache update-index properly fixes up spec.yaml
files on the mirror when updating the buildcache index."""
# Create a temp mirror directory for buildcache usage
mirror_dir = tmpdir.join('mirror_dir')
mirror_url = 'file://{0}'.format(mirror_dir.strpath)
spack.config.set('mirrors', {'test': mirror_url})
a = Spec('a').concretized()
b = Spec('b').concretized()
new_b_full_hash = 'abcdef'
# Install package a with dep b
install_cmd('--no-cache', a.name)
# Create a buildcache for a and its dep b, and update index
buildcache_cmd('create', '-uad', mirror_dir.strpath, a.name)
buildcache_cmd('update-index', '-d', mirror_dir.strpath)
# Simulate an update to b that only affects full hash by simply overwriting
# the full hash in the spec.yaml file on the mirror
b_spec_yaml_name = bindist.tarball_name(b, '.spec.yaml')
b_spec_yaml_path = os.path.join(mirror_dir.strpath,
bindist.build_cache_relative_path(),
b_spec_yaml_name)
fs.filter_file(r"full_hash:\s[^\s]+$",
"full_hash: {0}".format(new_b_full_hash),
b_spec_yaml_path)
# When we update the index, spack should notice that a's notion of the
# full hash of b doesn't match b's notion of it's own full hash, and as
# a result, spack should fix the spec.yaml for a
buildcache_cmd('update-index', '-d', mirror_dir.strpath)
# Read in the concrete spec yaml of a
a_spec_yaml_name = bindist.tarball_name(a, '.spec.yaml')
a_spec_yaml_path = os.path.join(mirror_dir.strpath,
bindist.build_cache_relative_path(),
a_spec_yaml_name)
# Turn concrete spec yaml into a concrete spec (a)
with open(a_spec_yaml_path) as fd:
a_prime = spec.Spec.from_yaml(fd.read())
# Make sure the full hash of b in a's spec yaml matches the new value
assert(a_prime[b.name].full_hash() == new_b_full_hash)

View File

@ -6,6 +6,7 @@
import spack.bootstrap
import spack.store
import spack.util.path
@pytest.mark.regression('22294')
@ -22,5 +23,29 @@ def test_store_is_restored_correctly_after_bootstrap(mutable_config, tmpdir):
# Test that within the context manager we use the bootstrap store
# and that outside we restore the correct location
with spack.bootstrap.ensure_bootstrap_configuration():
assert spack.store.root == spack.paths.user_bootstrap_store
assert spack.store.root == spack.bootstrap.store_path()
assert spack.store.root == user_path
@pytest.mark.parametrize('config_value,expected', [
# Absolute path without expansion
('/opt/spack/bootstrap', '/opt/spack/bootstrap/store'),
# Path with placeholder
('$spack/opt/bootstrap', '$spack/opt/bootstrap/store'),
])
def test_store_path_customization(config_value, expected, mutable_config):
# Set the current configuration to a specific value
spack.config.set('bootstrap:root', config_value)
# Check the store path
current = spack.bootstrap.store_path()
assert current == spack.util.path.canonicalize_path(expected)
def test_raising_exception_if_bootstrap_disabled(mutable_config):
# Disable bootstrapping in config.yaml
spack.config.set('bootstrap:enable', False)
# Check the correct exception is raised
with pytest.raises(RuntimeError, match='bootstrapping is currently disabled'):
spack.bootstrap.store_path()

View File

@ -0,0 +1,101 @@
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os.path
import pytest
import spack.config
import spack.environment
import spack.main
_bootstrap = spack.main.SpackCommand('bootstrap')
@pytest.mark.parametrize('scope', [
None, 'site', 'system', 'user'
])
def test_enable_and_disable(mutable_config, scope):
scope_args = []
if scope:
scope_args = ['--scope={0}'.format(scope)]
_bootstrap('enable', *scope_args)
assert spack.config.get('bootstrap:enable', scope=scope) is True
_bootstrap('disable', *scope_args)
assert spack.config.get('bootstrap:enable', scope=scope) is False
@pytest.mark.parametrize('scope', [
None, 'site', 'system', 'user'
])
def test_root_get_and_set(mutable_config, scope):
scope_args, path = [], '/scratch/spack/bootstrap'
if scope:
scope_args = ['--scope={0}'.format(scope)]
_bootstrap('root', path, *scope_args)
out = _bootstrap('root', *scope_args, output=str)
assert out.strip() == path
@pytest.mark.parametrize('scopes', [
('site',),
('system', 'user')
])
def test_reset_in_file_scopes(mutable_config, scopes):
# Assert files are created in the right scopes
bootstrap_yaml_files = []
for s in scopes:
_bootstrap('disable', '--scope={0}'.format(s))
scope_path = spack.config.config.scopes[s].path
bootstrap_yaml = os.path.join(
scope_path, 'bootstrap.yaml'
)
assert os.path.exists(bootstrap_yaml)
bootstrap_yaml_files.append(bootstrap_yaml)
_bootstrap('reset', '-y')
for bootstrap_yaml in bootstrap_yaml_files:
assert not os.path.exists(bootstrap_yaml)
def test_reset_in_environment(mutable_mock_env_path, mutable_config):
env = spack.main.SpackCommand('env')
env('create', 'bootstrap-test')
current_environment = spack.environment.read('bootstrap-test')
with current_environment:
_bootstrap('disable')
assert spack.config.get('bootstrap:enable') is False
_bootstrap('reset', '-y')
# We have no default settings in tests
assert spack.config.get('bootstrap:enable') is None
# Check that reset didn't delete the entire file
spack_yaml = os.path.join(current_environment.path, 'spack.yaml')
assert os.path.exists(spack_yaml)
def test_reset_in_file_scopes_overwrites_backup_files(mutable_config):
# Create a bootstrap.yaml with some config
_bootstrap('disable', '--scope=site')
scope_path = spack.config.config.scopes['site'].path
bootstrap_yaml = os.path.join(scope_path, 'bootstrap.yaml')
assert os.path.exists(bootstrap_yaml)
# Reset the bootstrap configuration
_bootstrap('reset', '-y')
backup_file = bootstrap_yaml + '.bkp'
assert not os.path.exists(bootstrap_yaml)
assert os.path.exists(backup_file)
# Iterate another time
_bootstrap('disable', '--scope=site')
assert os.path.exists(bootstrap_yaml)
assert os.path.exists(backup_file)
_bootstrap('reset', '-y')
assert not os.path.exists(bootstrap_yaml)
assert os.path.exists(backup_file)

View File

@ -3,6 +3,7 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import filecmp
import os
import shutil
import sys
@ -17,8 +18,20 @@
from spack.cmd.style import changed_files
from spack.util.executable import which
#: directory with sample style files
style_data = os.path.join(spack.paths.test_path, 'data', 'style')
style = spack.main.SpackCommand("style")
# spack style requires git to run -- skip the tests if it's not there
pytestmark = pytest.mark.skipif(not which('git'), reason='requires git')
# The style tools have requirements to use newer Python versions. We simplify by
# requiring Python 3.6 or higher to run spack style.
skip_old_python = pytest.mark.skipif(
sys.version_info < (3, 6), reason='requires Python 3.6 or higher')
@pytest.fixture(scope="function")
def flake8_package():
@ -50,6 +63,9 @@ def flake8_package_with_errors(scope="function"):
shutil.copy(filename, tmp)
package = FileFilter(filename)
package.filter("state = 'unmodified'", "state = 'modified'", string=True)
package.filter(
"from spack import *", "from spack import *\nimport os", string=True
)
yield filename
finally:
shutil.move(tmp, filename)
@ -65,6 +81,24 @@ def test_changed_files(flake8_package):
assert flake8_package in files
def test_changed_no_base(tmpdir, capfd):
"""Ensure that we fail gracefully with no base branch."""
tmpdir.join("bin").ensure("spack")
git = which("git", required=True)
with tmpdir.as_cwd():
git("init")
git("config", "user.name", "test user")
git("config", "user.email", "test@user.com")
git("add", ".")
git("commit", "-m", "initial commit")
with pytest.raises(SystemExit):
changed_files(base="foobar")
out, err = capfd.readouterr()
assert "This repository does not have a 'foobar' branch." in err
def test_changed_files_all_files(flake8_package):
# it's hard to guarantee "all files", so do some sanity checks.
files = set([
@ -92,12 +126,134 @@ def test_changed_files_all_files(flake8_package):
assert not any(f.startswith(spack.paths.external_path) for f in files)
# As of flake8 3.0.0, Python 2.6 and 3.3 are no longer supported
# http://flake8.pycqa.org/en/latest/release-notes/3.0.0.html
skip_old_python = pytest.mark.skipif(
sys.version_info[:2] <= (2, 6) or (3, 0) <= sys.version_info[:2] <= (3, 3),
reason="flake8 no longer supports Python 2.6 or 3.3 and older",
)
@pytest.mark.skipif(sys.version_info >= (3, 6), reason="doesn't apply to newer python")
def test_fail_on_old_python():
"""Ensure that `spack style` runs but fails with older python."""
style(fail_on_error=False)
assert style.returncode != 0
@skip_old_python
def test_bad_root(tmpdir):
"""Ensure that `spack style` doesn't run on non-spack directories."""
style("--root", str(tmpdir), fail_on_error=False)
assert style.returncode != 0
def test_style_is_package(tmpdir):
"""Ensure the is_package() function works."""
assert spack.cmd.style.is_package(
"var/spack/repos/builtin/packages/hdf5/package.py"
)
assert spack.cmd.style.is_package(
"var/spack/repos/builtin/packages/zlib/package.py"
)
assert not spack.cmd.style.is_package("lib/spack/spack/spec.py")
assert not spack.cmd.style.is_package("lib/spack/external/pytest.py")
@skip_old_python
def test_bad_bootstrap(monkeypatch):
"""Ensure we fail gracefully when we can't bootstrap spack style."""
monkeypatch.setattr(spack.cmd.style, "tool_order", [
("foobartool", "foobartool"), # bad package to force concretization failure
])
style(fail_on_error=False)
assert style.returncode != 0
@pytest.fixture
def external_style_root(flake8_package_with_errors, tmpdir):
"""Create a mock git repository for running spack style."""
git = which("git", required=True)
# create a sort-of spack-looking directory
script = tmpdir / "bin" / "spack"
script.ensure()
spack_dir = tmpdir / "lib" / "spack" / "spack"
spack_dir.ensure("__init__.py")
llnl_dir = tmpdir / "lib" / "spack" / "llnl"
llnl_dir.ensure("__init__.py")
# create a base develop branch
with tmpdir.as_cwd():
git("init")
git("config", "user.name", "test user")
git("config", "user.email", "test@user.com")
git("add", ".")
git("commit", "-m", "initial commit")
git("branch", "-m", "develop")
git("checkout", "-b", "feature")
# copy the buggy package in
py_file = spack_dir / "dummy.py"
py_file.ensure()
shutil.copy(flake8_package_with_errors, str(py_file))
# add the buggy file on the feature branch
with tmpdir.as_cwd():
git("add", str(py_file))
git("commit", "-m", "add new file")
yield tmpdir, py_file
@skip_old_python
@pytest.mark.skipif(not which("black"), reason="black is not installed.")
@pytest.mark.skipif(not which("flake8"), reason="flake8 is not installed.")
def test_fix_style(external_style_root):
"""Make sure spack style --fix works."""
tmpdir, py_file = external_style_root
broken_dummy = os.path.join(style_data, "broken.dummy")
broken_py = str(tmpdir / "lib" / "spack" / "spack" / "broken.py")
fixed_py = os.path.join(style_data, "fixed.py")
shutil.copy(broken_dummy, broken_py)
assert not filecmp.cmp(broken_py, fixed_py)
output = style(
"--root", str(tmpdir),
"--no-mypy", # mypy doesn't fix, so skip it
"--no-flake8", # flake8 doesn't fix, so skip it
"--black",
"--fix",
)
print(output)
assert filecmp.cmp(broken_py, fixed_py)
@skip_old_python
@pytest.mark.skipif(not which("flake8"), reason="flake8 is not installed.")
@pytest.mark.skipif(not which("isort"), reason="isort is not installed.")
@pytest.mark.skipif(not which("mypy"), reason="mypy is not installed.")
@pytest.mark.skipif(not which("black"), reason="black is not installed.")
def test_external_root(external_style_root):
"""Ensure we can run in a separate root directory w/o configuration files."""
tmpdir, py_file = external_style_root
# make sure tools are finding issues with external root,
# not the real one.
output = style(
"--root-relative", "--black", "--root", str(tmpdir),
fail_on_error=False
)
# make sure it failed
assert style.returncode != 0
# isort error
assert "%s Imports are incorrectly sorted" % str(py_file) in output
# mypy error
assert 'lib/spack/spack/dummy.py:10: error: Name "Package" is not defined' in output
# black error
assert "--- lib/spack/spack/dummy.py" in output
assert "+++ lib/spack/spack/dummy.py" in output
# flake8 error
assert "lib/spack/spack/dummy.py:7: [F401] 'os' imported but unused" in output
@skip_old_python
@ -138,7 +294,7 @@ def test_style_with_errors(flake8_package_with_errors):
root_relative = os.path.relpath(flake8_package_with_errors, spack.paths.prefix)
output = style("--root-relative", flake8_package_with_errors, fail_on_error=False)
assert root_relative in output
assert style.returncode == 1
assert style.returncode != 0
assert "spack style found errors" in output
@ -148,5 +304,5 @@ def test_style_with_errors(flake8_package_with_errors):
def test_style_with_black(flake8_package_with_errors):
output = style("--black", flake8_package_with_errors, fail_on_error=False)
assert "black found errors" in output
assert style.returncode == 1
assert style.returncode != 0
assert "spack style found errors" in output

View File

@ -0,0 +1,12 @@
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import sys
import os
def this_is_a_function():
"""This is a docstring."""
def this_should_be_offset():
sys.stdout.write(os.name)

View File

@ -0,0 +1,14 @@
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
import sys
def this_is_a_function():
"""This is a docstring."""
def this_should_be_offset():
sys.stdout.write(os.name)

View File

@ -65,7 +65,7 @@ def create_build_task(pkg, install_args={}):
Create a built task for the given (concretized) package
Args:
pkg (PackageBase): concretized package associated with the task
pkg (spack.package.PackageBase): concretized package associated with the task
install_args (dict): dictionary of kwargs (or install args)
Return:
@ -80,10 +80,10 @@ def create_installer(installer_args):
Create an installer using the concretized spec for each arg
Args:
installer_args (list of tuples): the list of (spec name, kwargs) tuples
installer_args (list): the list of (spec name, kwargs) tuples
Return:
installer (PackageInstaller): the associated package installer
spack.installer.PackageInstaller: the associated package installer
"""
const_arg = [(spec.package, kwargs) for spec, kwargs in installer_args]
return inst.PackageInstaller(const_arg)
@ -93,11 +93,11 @@ def installer_args(spec_names, kwargs={}):
"""Return a the installer argument with each spec paired with kwargs
Args:
spec_names (list of str): list of spec names
spec_names (list): list of spec names
kwargs (dict or None): install arguments to apply to all of the specs
Returns:
list of (spec, kwargs): the installer constructor argument
list: list of (spec, kwargs), the installer constructor argument
"""
arg = []
for name in spec_names:

View File

@ -78,6 +78,39 @@ def test_log_python_output_and_echo_output(capfd, tmpdir):
assert capfd.readouterr()[0] == 'force echo\n'
def _log_filter_fn(string):
return string.replace("foo", "bar")
def test_log_output_with_filter(capfd, tmpdir):
with tmpdir.as_cwd():
with log_output('foo.txt', filter_fn=_log_filter_fn):
print('foo blah')
print('blah foo')
print('foo foo')
# foo.txt output is not filtered
with open('foo.txt') as f:
assert f.read() == 'foo blah\nblah foo\nfoo foo\n'
# output is not echoed
assert capfd.readouterr()[0] == ''
# now try with echo
with tmpdir.as_cwd():
with log_output('foo.txt', echo=True, filter_fn=_log_filter_fn):
print('foo blah')
print('blah foo')
print('foo foo')
# foo.txt output is still not filtered
with open('foo.txt') as f:
assert f.read() == 'foo blah\nblah foo\nfoo foo\n'
# echoed output is filtered.
assert capfd.readouterr()[0] == 'bar blah\nblah bar\nbar bar\n'
@pytest.mark.skipif(not which('echo'), reason="needs echo command")
def test_log_subproc_and_echo_output_no_capfd(capfd, tmpdir):
echo = which('echo')

View File

@ -1027,6 +1027,36 @@ def test_splice(self, transitive):
# Finally, the spec should know it's been spliced:
assert out.spliced
@pytest.mark.parametrize('transitive', [True, False])
def test_splice_with_cached_hashes(self, transitive):
spec = Spec('splice-t')
dep = Spec('splice-h+foo')
spec.concretize()
dep.concretize()
# monkeypatch hashes so we can test that they are cached
spec._full_hash = 'aaaaaa'
spec._build_hash = 'aaaaaa'
dep._full_hash = 'bbbbbb'
dep._build_hash = 'bbbbbb'
spec['splice-h']._full_hash = 'cccccc'
spec['splice-h']._build_hash = 'cccccc'
spec['splice-z']._full_hash = 'dddddd'
spec['splice-z']._build_hash = 'dddddd'
dep['splice-z']._full_hash = 'eeeeee'
dep['splice-z']._build_hash = 'eeeeee'
out = spec.splice(dep, transitive=transitive)
out_z_expected = (dep if transitive else spec)['splice-z']
assert out.full_hash() != spec.full_hash()
assert (out['splice-h'].full_hash() == dep.full_hash()) == transitive
assert out['splice-z'].full_hash() == out_z_expected.full_hash()
assert out.build_hash() != spec.build_hash()
assert (out['splice-h'].build_hash() == dep.build_hash()) == transitive
assert out['splice-z'].build_hash() == out_z_expected.build_hash()
@pytest.mark.parametrize('transitive', [True, False])
def test_splice_input_unchanged(self, transitive):
spec = Spec('splice-t').concretized()

View File

@ -0,0 +1,59 @@
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import pytest
import spack.util.path as sup
#: Some lines with lots of placeholders
padded_lines = [
"==> [2021-06-23-15:59:05.020387] './configure' '--prefix=/Users/gamblin2/padding-log-test/opt/__spack_path_placeholder__/__spack_path_placeholder__/__spack_path_placeholder__/__spack_path_placeholder__/__spack_path_placeholder__/__spack_path_placeholder__/__spack_path_placeholder__/__spack_path_placeholder__/__spack_path_placeholder__/__spack_path_placeholder__/__spack_path_placeholder__/__spack_path_placeholder__/__spack_path_placeholder__/__spack_path_placeholder__/__spack_path_placeholder__/__spack_path_placeholder__/__spack_path_placeholder__/__spack_path_pla/darwin-bigsur-skylake/apple-clang-12.0.5/zlib-1.2.11-74mwnxgn6nujehpyyalhwizwojwn5zga", # noqa: E501
"/Users/gamblin2/Workspace/spack/lib/spack/env/clang/clang -dynamiclib -install_name /Users/gamblin2/padding-log-test/opt/__spack_path_placeholder__/__spack_path_placeholder__/__spack_path_placeholder__/__spack_path_placeholder__/__spack_path_placeholder__/__spack_path_placeholder__/__spack_path_placeholder__/__spack_path_placeholder__/__spack_path_placeholder__/__spack_path_placeholder__/__spack_path_placeholder__/__spack_path_placeholder__/__spack_path_placeholder__/__spack_path_placeholder__/__spack_path_placeholder__/__spack_path_placeholder__/__spack_path_placeholder__/__spack_path_pla/darwin-bigsur-skylake/apple-clang-12.0.5/zlib-1.2.11-74mwnxgn6nujehpyyalhwizwojwn5zga/lib/libz.1.dylib -compatibility_version 1 -current_version 1.2.11 -fPIC -O2 -fPIC -DHAVE_HIDDEN -o libz.1.2.11.dylib adler32.lo crc32.lo deflate.lo infback.lo inffast.lo inflate.lo inftrees.lo trees.lo zutil.lo compress.lo uncompr.lo gzclose.lo gzlib.lo gzread.lo gzwrite.lo -lc", # noqa: E501
"rm -f /Users/gamblin2/padding-log-test/opt/__spack_path_placeholder__/__spack_path_placeholder__/__spack_path_placeholder__/__spack_path_placeholder__/__spack_path_placeholder__/__spack_path_placeholder__/__spack_path_placeholder__/__spack_path_placeholder__/__spack_path_placeholder__/__spack_path_placeholder__/__spack_path_placeholder__/__spack_path_placeholder__/__spack_path_placeholder__/__spack_path_placeholder__/__spack_path_placeholder__/__spack_path_placeholder__/__spack_path_placeholder__/__spack_path_pla/darwin-bigsur-skylake/apple-clang-12.0.5/zlib-1.2.11-74mwnxgn6nujehpyyalhwizwojwn5zga/lib/libz.a", # noqa: E501
]
#: unpadded versions of padded_lines, with [padded-to-X-chars] replacing the padding
fixed_lines = [
"==> [2021-06-23-15:59:05.020387] './configure' '--prefix=/Users/gamblin2/padding-log-test/opt/[padded-to-512-chars]/darwin-bigsur-skylake/apple-clang-12.0.5/zlib-1.2.11-74mwnxgn6nujehpyyalhwizwojwn5zga", # noqa: E501
"/Users/gamblin2/Workspace/spack/lib/spack/env/clang/clang -dynamiclib -install_name /Users/gamblin2/padding-log-test/opt/[padded-to-512-chars]/darwin-bigsur-skylake/apple-clang-12.0.5/zlib-1.2.11-74mwnxgn6nujehpyyalhwizwojwn5zga/lib/libz.1.dylib -compatibility_version 1 -current_version 1.2.11 -fPIC -O2 -fPIC -DHAVE_HIDDEN -o libz.1.2.11.dylib adler32.lo crc32.lo deflate.lo infback.lo inffast.lo inflate.lo inftrees.lo trees.lo zutil.lo compress.lo uncompr.lo gzclose.lo gzlib.lo gzread.lo gzwrite.lo -lc", # noqa: E501
"rm -f /Users/gamblin2/padding-log-test/opt/[padded-to-512-chars]/darwin-bigsur-skylake/apple-clang-12.0.5/zlib-1.2.11-74mwnxgn6nujehpyyalhwizwojwn5zga/lib/libz.a", # noqa: E501
]
@pytest.mark.parametrize("padded,fixed", zip(padded_lines, fixed_lines))
def test_padding_substitution(padded, fixed):
"""Ensure that all padded lines are unpadded correctly."""
assert fixed == sup.padding_filter(padded)
def test_no_substitution():
"""Ensure that a line not containing one full path placeholder is not modified."""
partial = "--prefix=/Users/gamblin2/padding-log-test/opt/__spack_path_pla/darwin-bigsur-skylake/apple-clang-12.0.5/zlib-1.2.11-74mwnxgn6nujehpyyalhwizwojwn5zga'" # noqa: E501
assert sup.padding_filter(partial) == partial
def test_short_substitution():
"""Ensure that a single placeholder path component is replaced"""
short = "--prefix=/Users/gamblin2/padding-log-test/opt/__spack_path_placeholder__/darwin-bigsur-skylake/apple-clang-12.0.5/zlib-1.2.11-74mwnxgn6nujehpyyalhwizwojwn5zga'" # noqa: E501
short_subst = "--prefix=/Users/gamblin2/padding-log-test/opt/[padded-to-63-chars]/darwin-bigsur-skylake/apple-clang-12.0.5/zlib-1.2.11-74mwnxgn6nujehpyyalhwizwojwn5zga'" # noqa: E501
assert short_subst == sup.padding_filter(short)
def test_partial_substitution():
"""Ensure that a single placeholder path component is replaced"""
short = "--prefix=/Users/gamblin2/padding-log-test/opt/__spack_path_placeholder__/__spack_p/darwin-bigsur-skylake/apple-clang-12.0.5/zlib-1.2.11-74mwnxgn6nujehpyyalhwizwojwn5zga'" # noqa: E501
short_subst = "--prefix=/Users/gamblin2/padding-log-test/opt/[padded-to-73-chars]/darwin-bigsur-skylake/apple-clang-12.0.5/zlib-1.2.11-74mwnxgn6nujehpyyalhwizwojwn5zga'" # noqa: E501
assert short_subst == sup.padding_filter(short)
def test_longest_prefix_re():
"""Test that longest_prefix_re generates correct regular expressions."""
assert "(s(?:t(?:r(?:i(?:ng?)?)?)?)?)" == sup.longest_prefix_re(
"string", capture=True
)
assert "(?:s(?:t(?:r(?:i(?:ng?)?)?)?)?)" == sup.longest_prefix_re(
"string", capture=False
)

View File

@ -279,3 +279,8 @@ def mock_create_s3_session(url):
fake_s3_url_does_not_exist = 's3://my-bucket/subdirectory/my-notfound-file'
assert(not spack.util.web.url_exists(fake_s3_url_does_not_exist))
def test_s3_url_parsing():
assert(spack.util.s3._parse_s3_endpoint_url("example.com") == 'https://example.com')
assert(spack.util.s3._parse_s3_endpoint_url("http://example.com") == 'http://example.com')

View File

@ -417,11 +417,11 @@ def parse_version_offset(path):
path (str): The filename or URL for the package
Returns:
tuple of (Version, int, int, int, str): A tuple containing:
tuple: A tuple containing:
version of the package,
first index of version,
length of version string,
the index of the matching regex
the index of the matching regex,
the matching regex
Raises:
@ -632,11 +632,11 @@ def parse_name_offset(path, v=None):
v (str): The version of the package
Returns:
tuple of (str, int, int, int, str): A tuple containing:
tuple: A tuple containing:
name of the package,
first index of name,
length of name,
the index of the matching regex
the index of the matching regex,
the matching regex
Raises:
@ -774,9 +774,7 @@ def parse_name_and_version(path):
path (str): The filename or URL for the package
Returns:
tuple of (str, Version)A tuple containing:
The name of the package
The version of the package
tuple: a tuple containing the package (name, version)
Raises:
UndetectableVersionError: If the URL does not match any regexes

View File

@ -18,7 +18,7 @@ def prefix_inspections(platform):
"""Get list of prefix inspections for platform
Arguments:
platform (string): the name of the platform to consider. The platform
platform (str): the name of the platform to consider. The platform
determines what environment variables Spack will use for some
inspections.

View File

@ -64,7 +64,7 @@ def editor(*args, **kwargs):
searching the full list above, we'll raise an error.
Arguments:
args (list of str): args to pass to editor
args (list): args to pass to editor
Optional Arguments:
_exec_func (function): invoke this function instead of ``os.execv()``

View File

@ -623,7 +623,7 @@ def from_sourcing_file(filename, *arguments, **kwargs):
Args:
filename (str): the file to be sourced
*arguments (list of str): arguments to pass on the command line
*arguments (list): arguments to pass on the command line
Keyword Args:
shell (str): the shell to use (default: ``bash``)
@ -867,7 +867,7 @@ def inspect_path(root, inspections, exclude=None):
modifications are not performed immediately, but stored in a
command object that is returned to client
exclude (callable): optional callable. If present it must accept an
exclude (typing.Callable): optional callable. If present it must accept an
absolute path and return True if it should be excluded from the
inspection
@ -920,7 +920,7 @@ def preserve_environment(*variables):
explicitly unset on exit.
Args:
variables (list of str): list of environment variables to be preserved
variables (list): list of environment variables to be preserved
"""
cache = {}
for var in variables:
@ -1031,9 +1031,9 @@ def sanitize(environment, blacklist, whitelist):
Args:
environment (dict): input dictionary
blacklist (list of str): literals or regex patterns to be
blacklist (list): literals or regex patterns to be
blacklisted
whitelist (list of str): literals or regex patterns to be
whitelist (list): literals or regex patterns to be
whitelisted
"""

View File

@ -297,7 +297,7 @@ def which(*args, **kwargs):
*args (str): One or more executables to search for
Keyword Arguments:
path (:func:`list` or str): The path to search. Defaults to ``PATH``
path (list or str): The path to search. Defaults to ``PATH``
required (bool): If set to True, raise an error if executable not found
Returns:

View File

@ -28,11 +28,11 @@ def load_source(full_name, path, prepend=None):
Args:
full_name (str): full name of the module to be loaded
path (str): path to the file that should be loaded
prepend (str, optional): some optional code to prepend to the
prepend (str or None): some optional code to prepend to the
loaded module; e.g., can be used to inject import statements
Returns:
(ModuleType): the loaded module
the loaded module
"""
with import_lock():
if prepend is None:

View File

@ -37,11 +37,11 @@ def load_source(full_name, path, prepend=None):
Args:
full_name (str): full name of the module to be loaded
path (str): path to the file that should be loaded
prepend (str, optional): some optional code to prepend to the
prepend (str or None): some optional code to prepend to the
loaded module; e.g., can be used to inject import statements
Returns:
(ModuleType): the loaded module
the loaded module
"""
# use our custom loader
loader = PrependFileLoader(full_name, path, prepend)

View File

@ -20,7 +20,7 @@ def parse_log_events(stream, context=6, jobs=None, profile=False):
"""Extract interesting events from a log file as a list of LogEvent.
Args:
stream (str or fileobject): build log name or file object
stream (str or typing.IO): build log name or file object
context (int): lines of context to extract around each log event
jobs (int): number of jobs to parse with; default ncpus
profile (bool): print out profile information for parsing
@ -60,7 +60,7 @@ def make_log_context(log_events, width=None):
"""Get error context from a log file.
Args:
log_events (list of LogEvent): list of events created by
log_events (list): list of events created by
``ctest_log_parser.parse()``
width (int or None): wrap width; ``0`` for no limit; ``None`` to
auto-size for terminal

View File

@ -49,10 +49,10 @@ def is_directive(self, node):
callbacks are sometimes represented).
Args:
node (AST): the AST node being checked
node (ast.AST): the AST node being checked
Returns:
(bool): ``True`` if the node represents a known directive,
bool: ``True`` if the node represents a known directive,
``False`` otherwise
"""
return (isinstance(node, ast.Expr) and

View File

@ -44,7 +44,11 @@
# ---------------------
# total -> 300
SPACK_MAX_INSTALL_PATH_LENGTH = 300
SPACK_PATH_PADDING_CHARS = 'spack_path_placeholder'
#: Padded paths comprise directories with this name (or some prefix of it). :
#: It starts with two underscores to make it unlikely that prefix matches would
#: include some other component of the intallation path.
SPACK_PATH_PADDING_CHARS = '__spack_path_placeholder__'
@memoized
@ -158,3 +162,73 @@ def canonicalize_path(path):
tty.debug("Using current working directory as base for abspath")
return os.path.normpath(path)
def longest_prefix_re(string, capture=True):
"""Return a regular expression that matches a the longest possible prefix of string.
i.e., if the input string is ``the_quick_brown_fox``, then::
m = re.compile(longest_prefix('the_quick_brown_fox'))
m.match('the_').group(1) == 'the_'
m.match('the_quick').group(1) == 'the_quick'
m.match('the_quick_brown_fox').group(1) == 'the_quick_brown_fox'
m.match('the_xquick_brown_fox').group(1) == 'the_'
m.match('the_quickx_brown_fox').group(1) == 'the_quick'
"""
if len(string) < 2:
return string
return "(%s%s%s?)" % (
"" if capture else "?:",
string[0],
longest_prefix_re(string[1:], capture=False)
)
#: regex cache for padding_filter function
_filter_re = None
def padding_filter(string):
"""Filter used to reduce output from path padding in log output.
This turns paths like this:
/foo/bar/__spack_path_placeholder__/__spack_path_placeholder__/...
Into paths like this:
/foo/bar/[padded-to-512-chars]/...
Where ``padded-to-512-chars`` indicates that the prefix was padded with
placeholders until it hit 512 characters. The actual value of this number
depends on what the `install_tree``'s ``padded_length`` is configured to.
For a path to match and be filtered, the placeholder must appear in its
entirety at least one time. e.g., "/spack/" would not be filtered, but
"/__spack_path_placeholder__/spack/" would be.
"""
global _filter_re
pad = spack.util.path.SPACK_PATH_PADDING_CHARS
if not _filter_re:
longest_prefix = longest_prefix_re(pad)
regex = (
r"((?:/[^/\s]*)*?)" # zero or more leading non-whitespace path components
r"(/{pad})+" # the padding string repeated one or more times
r"(/{longest_prefix})?(?=/)" # trailing prefix of padding as path component
)
regex = regex.replace("/", os.sep)
regex = regex.format(pad=pad, longest_prefix=longest_prefix)
_filter_re = re.compile(regex)
def replacer(match):
return "%s%s[padded-to-%d-chars]" % (
match.group(1),
os.sep,
len(match.group(0))
)
return _filter_re.sub(replacer, string)

View File

@ -41,7 +41,7 @@ def composite(interface=None, method_list=None, container=list):
interface (type): class exposing the interface to which the
composite object must conform. Only non-private and
non-special methods will be taken into account
method_list (list of str): names of methods that should be part
method_list (list): names of methods that should be part
of the composite
container (MutableSequence): container for the composite object
(default = list). Must fulfill the MutableSequence

View File

@ -11,6 +11,13 @@
import spack.util.url as url_util
def _parse_s3_endpoint_url(endpoint_url):
if not urllib_parse.urlparse(endpoint_url, scheme='').scheme:
endpoint_url = '://'.join(('https', endpoint_url))
return endpoint_url
def create_s3_session(url):
url = url_util.parse(url)
if url.scheme != 's3':
@ -30,10 +37,7 @@ def create_s3_session(url):
endpoint_url = os.environ.get('S3_ENDPOINT_URL')
if endpoint_url:
if urllib_parse.urlparse(endpoint_url, scheme=None).scheme is None:
endpoint_url = '://'.join(('https', endpoint_url))
s3_client_args['endpoint_url'] = endpoint_url
s3_client_args['endpoint_url'] = _parse_s3_endpoint_url(endpoint_url)
# if no access credentials provided above, then access anonymously
if not session.get_credentials():

View File

@ -41,7 +41,7 @@ def plural(n, singular, plural=None, show_n=True):
Arguments:
n (int): number of things there are
singular (str): singular form of word
plural (str, optional): optional plural form, for when it's not just
plural (str or None): optional plural form, for when it's not just
singular + 's'
show_n (bool): whether to include n in the result string (default True)

View File

@ -367,7 +367,7 @@ def spider(root_urls, depth=0, concurrency=32):
up to <depth> levels of links from each root.
Args:
root_urls (str or list of str): root urls used as a starting point
root_urls (str or list): root urls used as a starting point
for spidering
depth (int): level of recursion into links
concurrency (int): number of simultaneous requests that can be sent

View File

@ -94,8 +94,8 @@ def validate_or_raise(self, vspec, pkg=None):
exception if any error is found.
Args:
vspec (VariantSpec): instance to be validated
pkg (Package): the package that required the validation,
vspec (Variant): instance to be validated
pkg (spack.package.Package): the package that required the validation,
if available
Raises:
@ -254,7 +254,7 @@ def value(self):
the variant.
Returns:
tuple of str: values stored in the variant
tuple: values stored in the variant
"""
return self._value
@ -296,7 +296,7 @@ def copy(self):
"""Returns an instance of a variant equivalent to self
Returns:
any variant type: a copy of self
AbstractVariant: a copy of self
>>> a = MultiValuedVariant('foo', True)
>>> b = a.copy()
@ -667,7 +667,7 @@ class DisjointSetsOfValues(Sequence):
and therefore no other set can contain the item ``'none'``.
Args:
*sets (list of tuples): mutually exclusive sets of values
*sets (list): mutually exclusive sets of values
"""
_empty_set = set(('none',))

View File

@ -12,3 +12,83 @@ known_archspec = "archspec"
known_llnl = "llnl"
src_paths = "lib"
honor_noqa = true
[tool.mypy]
python_version = 3.7
files = ['lib/spack/llnl/**/*.py', 'lib/spack/spack/**/*.py']
mypy_path = ['bin', 'lib/spack', 'lib/spack/external', 'var/spack/repos/builtin']
# This and a generated import file allows supporting packages
namespace_packages = true
# To avoid re-factoring all the externals, ignore errors and missing imports
# globally, then turn back on in spack and spack submodules
ignore_errors = true
ignore_missing_imports = true
[[tool.mypy.overrides]]
module = 'spack.*'
ignore_errors = false
ignore_missing_imports = false
[[tool.mypy.overrides]]
module = 'packages.*'
ignore_errors = false
ignore_missing_imports = false
[[tool.mypy.overrides]]
module = 'llnl.*'
ignore_errors = false
ignore_missing_imports = false
[[tool.mypy.overrides]]
module = 'spack.test.packages'
ignore_errors = true
# ignore errors in fake import path for packages
[[tool.mypy.overrides]]
module = 'spack.pkg.*'
ignore_errors = true
ignore_missing_imports = true
# jinja has syntax in it that requires python3 and causes a parse error
# skip importing it
[[tool.mypy.overrides]]
module = 'jinja2'
follow_imports = 'skip'
[tool.coverage.run]
parallel = true
concurrency = ["multiprocessing"]
branch = true
source = ["bin", "lib"]
omit = [
'lib/spack/spack/test/*',
'lib/spack/docs/*',
'lib/spack/external/*',
'share/spack/qa/*',
]
[tool.coverage.report]
# Regexes for lines to exclude from consideration
exclude_lines = [
# Have to re-enable the standard pragma
'pragma: no cover',
# Don't complain about missing debug-only code:
'def __repr__',
'if self\.debug',
# Don't complain if tests don't hit defensive assertion code:
'raise AssertionError',
'raise NotImplementedError',
# Don't complain if non-runnable code isn't run:
'if 0:',
'if False:',
'if __name__ == .__main__.:',
]
ignore_errors = true
[tool.coverage.html]
directory = "htmlcov"

View File

@ -9,6 +9,9 @@ ENV DOCKERFILE_BASE=centos \
CURRENTLY_BUILDING_DOCKER_IMAGE=1 \
container=docker
# Make yum usable again with CentOS 6
RUN curl https://www.getpagespeed.com/files/centos6-eol.repo --output /etc/yum.repos.d/CentOS-Base.repo
RUN yum update -y \
&& yum install -y epel-release \
&& yum update -y \
@ -32,7 +35,6 @@ RUN yum update -y \
tcl \
unzip \
which \
&& pip install boto3 \
&& rm -rf /var/cache/yum \
&& yum clean all

Some files were not shown because too many files have changed in this diff Show More