Compare commits
2 Commits
cws/simmod
...
bugfix/bin
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d82b537339 | ||
|
|
3d8d1fe924 |
4
.github/workflows/bootstrap.yml
vendored
4
.github/workflows/bootstrap.yml
vendored
@@ -192,7 +192,7 @@ jobs:
|
||||
brew install tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- uses: actions/setup-python@c4e89fac7e8767b327bbad6cb4d859eda999cf08
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Bootstrap clingo
|
||||
@@ -211,7 +211,7 @@ jobs:
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- uses: actions/setup-python@c4e89fac7e8767b327bbad6cb4d859eda999cf08
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Setup repo
|
||||
|
||||
6
.github/workflows/macos_python.yml
vendored
6
.github/workflows/macos_python.yml
vendored
@@ -30,7 +30,7 @@ jobs:
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
- uses: actions/setup-python@c4e89fac7e8767b327bbad6cb4d859eda999cf08 # @v2
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb # @v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: spack install
|
||||
@@ -46,7 +46,7 @@ jobs:
|
||||
timeout-minutes: 700
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
- uses: actions/setup-python@c4e89fac7e8767b327bbad6cb4d859eda999cf08 # @v2
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb # @v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: spack install
|
||||
@@ -60,7 +60,7 @@ jobs:
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
- uses: actions/setup-python@c4e89fac7e8767b327bbad6cb4d859eda999cf08 # @v2
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb # @v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: spack install
|
||||
|
||||
14
.github/workflows/unit_tests.yaml
vendored
14
.github/workflows/unit_tests.yaml
vendored
@@ -21,7 +21,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
- uses: actions/setup-python@c4e89fac7e8767b327bbad6cb4d859eda999cf08 # @v2
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb # @v2
|
||||
with:
|
||||
python-version: '3.10'
|
||||
- name: Install Python Packages
|
||||
@@ -39,7 +39,7 @@ jobs:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@c4e89fac7e8767b327bbad6cb4d859eda999cf08 # @v2
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb # @v2
|
||||
with:
|
||||
python-version: '3.10'
|
||||
- name: Install Python packages
|
||||
@@ -114,7 +114,7 @@ jobs:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@c4e89fac7e8767b327bbad6cb4d859eda999cf08 # @v2
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb # @v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install System packages
|
||||
@@ -179,7 +179,7 @@ jobs:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@c4e89fac7e8767b327bbad6cb4d859eda999cf08 # @v2
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb # @v2
|
||||
with:
|
||||
python-version: '3.10'
|
||||
- name: Install System packages
|
||||
@@ -245,7 +245,7 @@ jobs:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@c4e89fac7e8767b327bbad6cb4d859eda999cf08 # @v2
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb # @v2
|
||||
with:
|
||||
python-version: '3.10'
|
||||
- name: Install System packages
|
||||
@@ -294,7 +294,7 @@ jobs:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@c4e89fac7e8767b327bbad6cb4d859eda999cf08 # @v2
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb # @v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install Python packages
|
||||
@@ -337,7 +337,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
- uses: actions/setup-python@c4e89fac7e8767b327bbad6cb4d859eda999cf08 # @v2
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb # @v2
|
||||
with:
|
||||
python-version: '3.10'
|
||||
- name: Install Python packages
|
||||
|
||||
14
.github/workflows/windows_python.yml
vendored
14
.github/workflows/windows_python.yml
vendored
@@ -23,7 +23,7 @@ jobs:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- uses: actions/setup-python@c4e89fac7e8767b327bbad6cb4d859eda999cf08
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python Packages
|
||||
@@ -41,7 +41,7 @@ jobs:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@c4e89fac7e8767b327bbad6cb4d859eda999cf08
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
@@ -63,7 +63,7 @@ jobs:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@c4e89fac7e8767b327bbad6cb4d859eda999cf08
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
@@ -83,7 +83,7 @@ jobs:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@c4e89fac7e8767b327bbad6cb4d859eda999cf08
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
@@ -103,7 +103,7 @@ jobs:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@c4e89fac7e8767b327bbad6cb4d859eda999cf08
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
@@ -128,7 +128,7 @@ jobs:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@c4e89fac7e8767b327bbad6cb4d859eda999cf08
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
@@ -159,7 +159,7 @@ jobs:
|
||||
run:
|
||||
shell: pwsh
|
||||
steps:
|
||||
- uses: actions/setup-python@c4e89fac7e8767b327bbad6cb4d859eda999cf08
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
|
||||
@@ -62,12 +62,11 @@ on these ideas for each distinct build system that Spack supports:
|
||||
|
||||
build_systems/bundlepackage
|
||||
build_systems/cudapackage
|
||||
build_systems/custompackage
|
||||
build_systems/inteloneapipackage
|
||||
build_systems/intelpackage
|
||||
build_systems/multiplepackage
|
||||
build_systems/rocmpackage
|
||||
build_systems/sourceforgepackage
|
||||
build_systems/custompackage
|
||||
build_systems/multiplepackage
|
||||
|
||||
For reference, the :py:mod:`Build System API docs <spack.build_systems>`
|
||||
provide a list of build systems and methods/attributes that can be
|
||||
|
||||
@@ -48,9 +48,8 @@ important to understand.
|
||||
**build backend**
|
||||
Libraries used to define how to build a wheel. Examples
|
||||
include `setuptools <https://setuptools.pypa.io/>`__,
|
||||
`flit <https://flit.readthedocs.io/>`_,
|
||||
`poetry <https://python-poetry.org/>`_, and
|
||||
`hatchling <https://hatch.pypa.io/latest/>`_.
|
||||
`flit <https://flit.readthedocs.io/>`_, and
|
||||
`poetry <https://python-poetry.org/>`_.
|
||||
|
||||
^^^^^^^^^^^
|
||||
Downloading
|
||||
@@ -327,33 +326,6 @@ for specifying the version requirements. Note that ``~=`` works
|
||||
differently in poetry than in setuptools and flit for versions that
|
||||
start with a zero.
|
||||
|
||||
"""""""""
|
||||
hatchling
|
||||
"""""""""
|
||||
|
||||
If the ``pyproject.toml`` lists ``hatchling.build`` as the
|
||||
``build-backend``, it uses the hatchling build system. Look for
|
||||
dependencies under the following keys:
|
||||
|
||||
* ``requires-python``
|
||||
|
||||
This specifies the version of Python that is required
|
||||
|
||||
* ``project.dependencies``
|
||||
|
||||
These packages are required for building and installation. You can
|
||||
add them with ``type=('build', 'run')``.
|
||||
|
||||
* ``project.optional-dependencies``
|
||||
|
||||
This section includes keys with lists of optional dependencies
|
||||
needed to enable those features. You should add a variant that
|
||||
optionally adds these dependencies. This variant should be ``False``
|
||||
by default.
|
||||
|
||||
See https://hatch.pypa.io/latest/config/dependency/ for more
|
||||
information.
|
||||
|
||||
""""""
|
||||
wheels
|
||||
""""""
|
||||
@@ -694,4 +666,3 @@ For more information on build backend tools, see:
|
||||
* setuptools: https://setuptools.pypa.io/
|
||||
* flit: https://flit.readthedocs.io/
|
||||
* poetry: https://python-poetry.org/
|
||||
* hatchling: https://hatch.pypa.io/latest/
|
||||
|
||||
@@ -1,55 +0,0 @@
|
||||
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
.. _sourceforgepackage:
|
||||
|
||||
------------------
|
||||
SourceforgePackage
|
||||
------------------
|
||||
|
||||
``SourceforgePackage`` is a
|
||||
`mixin-class <https://en.wikipedia.org/wiki/Mixin>`_. It automatically
|
||||
sets the URL based on a list of Sourceforge mirrors listed in
|
||||
`sourceforge_mirror_path`, which defaults to a half dozen known mirrors.
|
||||
Refer to the package source
|
||||
(`<https://github.com/spack/spack/blob/develop/lib/spack/spack/build_systems/sourceforge.py>`__) for the current list of mirrors used by Spack.
|
||||
|
||||
|
||||
^^^^^^^
|
||||
Methods
|
||||
^^^^^^^
|
||||
|
||||
This package provides a method for populating mirror URLs.
|
||||
|
||||
**urls**
|
||||
|
||||
This method returns a list of possible URLs for package source.
|
||||
It is decorated with `property` so its results are treated as
|
||||
a package attribute.
|
||||
|
||||
Refer to
|
||||
`<https://spack.readthedocs.io/en/latest/packaging_guide.html#mirrors-of-the-main-url>`__
|
||||
for information on how Spack uses the `urls` attribute during
|
||||
fetching.
|
||||
|
||||
^^^^^
|
||||
Usage
|
||||
^^^^^
|
||||
|
||||
This helper package can be added to your package by adding it as a base
|
||||
class of your package and defining the relative location of an archive
|
||||
file for one version of your software.
|
||||
|
||||
.. code-block:: python
|
||||
:emphasize-lines: 1,3
|
||||
|
||||
class MyPackage(AutotoolsPackage, SourceforgePackage):
|
||||
...
|
||||
sourceforge_mirror_path = "my-package/mypackage.1.0.0.tar.gz"
|
||||
...
|
||||
|
||||
Over 40 packages are using ``SourceforcePackage`` this mix-in as of
|
||||
July 2022 so there are multiple packages to choose from if you want
|
||||
to see a real example.
|
||||
@@ -107,6 +107,7 @@ with a high level view of Spack's directory structure:
|
||||
llnl/ <- some general-use libraries
|
||||
|
||||
spack/ <- spack module; contains Python code
|
||||
analyzers/ <- modules to run analysis on installed packages
|
||||
build_systems/ <- modules for different build systems
|
||||
cmd/ <- each file in here is a spack subcommand
|
||||
compilers/ <- compiler description files
|
||||
@@ -241,6 +242,22 @@ Unit tests
|
||||
Implements Spack's test suite. Add a module and put its name in
|
||||
the test suite in ``__init__.py`` to add more unit tests.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Research and Monitoring Modules
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
:mod:`spack.monitor`
|
||||
Contains :class:`~spack.monitor.SpackMonitorClient`. This is accessed from
|
||||
the ``spack install`` and ``spack analyze`` commands to send build and
|
||||
package metadata up to a `Spack Monitor
|
||||
<https://github.com/spack/spack-monitor>`_ server.
|
||||
|
||||
|
||||
:mod:`spack.analyzers`
|
||||
A module folder with a :class:`~spack.analyzers.analyzer_base.AnalyzerBase`
|
||||
that provides base functions to run, save, and (optionally) upload analysis
|
||||
results to a `Spack Monitor <https://github.com/spack/spack-monitor>`_ server.
|
||||
|
||||
|
||||
^^^^^^^^^^^^^
|
||||
Other Modules
|
||||
@@ -284,6 +301,240 @@ Most spack commands look something like this:
|
||||
The information in Package files is used at all stages in this
|
||||
process.
|
||||
|
||||
Conceptually, packages are overloaded. They contain:
|
||||
|
||||
-------------
|
||||
Stage objects
|
||||
-------------
|
||||
|
||||
|
||||
.. _writing-analyzers:
|
||||
|
||||
-----------------
|
||||
Writing analyzers
|
||||
-----------------
|
||||
|
||||
To write an analyzer, you should add a new python file to the
|
||||
analyzers module directory at ``lib/spack/spack/analyzers`` .
|
||||
Your analyzer should be a subclass of the :class:`AnalyzerBase <spack.analyzers.analyzer_base.AnalyzerBase>`. For example, if you want
|
||||
to add an analyzer class ``Myanalyzer`` you would write to
|
||||
``spack/analyzers/myanalyzer.py`` and import and
|
||||
use the base as follows:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
from .analyzer_base import AnalyzerBase
|
||||
|
||||
class Myanalyzer(AnalyzerBase):
|
||||
|
||||
|
||||
Note that the class name is your module file name, all lowercase
|
||||
except for the first capital letter. You can look at other analyzers in
|
||||
that analyzer directory for examples. The guide here will tell you about the basic functions needed.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Analyzer Output Directory
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
By default, when you run ``spack analyze run`` an analyzer output directory will
|
||||
be created in your spack user directory in your ``$HOME``. The reason we output here
|
||||
is because the install directory might not always be writable.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
~/.spack/
|
||||
analyzers
|
||||
|
||||
Result files will be written here, organized in subfolders in the same structure
|
||||
as the package, with each analyzer owning it's own subfolder. for example:
|
||||
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ tree ~/.spack/analyzers/
|
||||
/home/spackuser/.spack/analyzers/
|
||||
└── linux-ubuntu20.04-skylake
|
||||
└── gcc-9.3.0
|
||||
└── zlib-1.2.11-sl7m27mzkbejtkrajigj3a3m37ygv4u2
|
||||
├── environment_variables
|
||||
│ └── spack-analyzer-environment-variables.json
|
||||
├── install_files
|
||||
│ └── spack-analyzer-install-files.json
|
||||
└── libabigail
|
||||
└── lib
|
||||
└── spack-analyzer-libabigail-libz.so.1.2.11.xml
|
||||
|
||||
|
||||
Notice that for the libabigail analyzer, since results are generated per object,
|
||||
we honor the object's folder in case there are equivalently named files in
|
||||
different folders. The result files are typically written as json so they can be easily read and uploaded in a future interaction with a monitor.
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^
|
||||
Analyzer Metadata
|
||||
^^^^^^^^^^^^^^^^^
|
||||
|
||||
Your analyzer is required to have the class attributes ``name``, ``outfile``,
|
||||
and ``description``. These are printed to the user with they use the subcommand
|
||||
``spack analyze list-analyzers``. Here is an example.
|
||||
As we mentioned above, note that this analyzer would live in a module named
|
||||
``libabigail.py`` in the analyzers folder so that the class can be discovered.
|
||||
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Libabigail(AnalyzerBase):
|
||||
|
||||
name = "libabigail"
|
||||
outfile = "spack-analyzer-libabigail.json"
|
||||
description = "Application Binary Interface (ABI) features for objects"
|
||||
|
||||
|
||||
This means that the name and output file should be unique for your analyzer.
|
||||
Note that "all" cannot be the name of an analyzer, as this key is used to indicate
|
||||
that the user wants to run all analyzers.
|
||||
|
||||
.. _analyzer_run_function:
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
An analyzer run Function
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The core of an analyzer is its ``run()`` function, which should accept no
|
||||
arguments. You can assume your analyzer has the package spec of interest at ``self.spec``
|
||||
and it's up to the run function to generate whatever analysis data you need,
|
||||
and then return the object with a key as the analyzer name. The result data
|
||||
should be a list of objects, each with a name, ``analyzer_name``, ``install_file``,
|
||||
and one of ``value`` or ``binary_value``. The install file should be for a relative
|
||||
path, and not the absolute path. For example, let's say we extract a metric called
|
||||
``metric`` for ``bin/wget`` using our analyzer ``thebest-analyzer``.
|
||||
We might have data that looks like this:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
result = {"name": "metric", "analyzer_name": "thebest-analyzer", "value": "1", "install_file": "bin/wget"}
|
||||
|
||||
|
||||
We'd then return it as follows - note that they key is the analyzer name at ``self.name``.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
return {self.name: result}
|
||||
|
||||
This will save the complete result to the analyzer metadata folder, as described
|
||||
previously. If you want support for adding a different kind of metadata (e.g.,
|
||||
not associated with an install file) then the monitor server would need to be updated
|
||||
to support this first.
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
An analyzer init Function
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
If you don't need any extra dependencies or checks, you can skip defining an analyzer
|
||||
init function, as the base class will handle it. Typically, it will accept
|
||||
a spec, and an optional output directory (if the user does not want the default
|
||||
metadata folder for analyzer results). The analyzer init function should call
|
||||
it's parent init, and then do any extra checks or validation that are required to
|
||||
work. For example:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def __init__(self, spec, dirname=None):
|
||||
super(Myanalyzer, self).__init__(spec, dirname)
|
||||
|
||||
# install extra dependencies, do extra preparation and checks here
|
||||
|
||||
|
||||
At the end of the init, you will have available to you:
|
||||
|
||||
- **self.spec**: the spec object
|
||||
- **self.dirname**: an optional directory name the user as provided at init to save
|
||||
- **self.output_dir**: the analyzer metadata directory, where we save by default
|
||||
- **self.meta_dir**: the path to the package metadata directory (.spack) if you need it
|
||||
|
||||
And can proceed to write your analyzer.
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Saving Analyzer Results
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The analyzer will have ``save_result`` called, with the result object generated
|
||||
to save it to the filesystem, and if the user has added the ``--monitor`` flag
|
||||
to upload it to a monitor server. If your result follows an accepted result
|
||||
format and you don't need to parse it further, you don't need to add this
|
||||
function to your class. However, if your result data is large or otherwise
|
||||
needs additional parsing, you can define it. If you define the function, it
|
||||
is useful to know about the ``output_dir`` property, which you can join
|
||||
with your output file relative path of choice:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
outfile = os.path.join(self.output_dir, "my-output-file.txt")
|
||||
|
||||
|
||||
The directory will be provided by the ``output_dir`` property but it won't exist,
|
||||
so you should create it:
|
||||
|
||||
|
||||
.. code::block:: python
|
||||
|
||||
# Create the output directory
|
||||
if not os.path.exists(self._output_dir):
|
||||
os.makedirs(self._output_dir)
|
||||
|
||||
|
||||
If you are generating results that match to specific files in the package
|
||||
install directory, you should try to maintain those paths in the case that
|
||||
there are equivalently named files in different directories that would
|
||||
overwrite one another. As an example of an analyzer with a custom save,
|
||||
the Libabigail analyzer saves ``*.xml`` files to the analyzer metadata
|
||||
folder in ``run()``, as they are either binaries, or as xml (text) would
|
||||
usually be too big to pass in one request. For this reason, the files
|
||||
are saved during ``run()`` and the filenames added to the result object,
|
||||
and then when the result object is passed back into ``save_result()``,
|
||||
we skip saving to the filesystem, and instead read the file and send
|
||||
each one (separately) to the monitor:
|
||||
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def save_result(self, result, monitor=None, overwrite=False):
|
||||
"""ABI results are saved to individual files, so each one needs to be
|
||||
read and uploaded. Result here should be the lookup generated in run(),
|
||||
the key is the analyzer name, and each value is the result file.
|
||||
We currently upload the entire xml as text because libabigail can't
|
||||
easily read gzipped xml, but this will be updated when it can.
|
||||
"""
|
||||
if not monitor:
|
||||
return
|
||||
|
||||
name = self.spec.package.name
|
||||
|
||||
for obj, filename in result.get(self.name, {}).items():
|
||||
|
||||
# Don't include the prefix
|
||||
rel_path = obj.replace(self.spec.prefix + os.path.sep, "")
|
||||
|
||||
# We've already saved the results to file during run
|
||||
content = spack.monitor.read_file(filename)
|
||||
|
||||
# A result needs an analyzer, value or binary_value, and name
|
||||
data = {"value": content, "install_file": rel_path, "name": "abidw-xml"}
|
||||
tty.info("Sending result for %s %s to monitor." % (name, rel_path))
|
||||
monitor.send_analyze_metadata(self.spec.package, {"libabigail": [data]})
|
||||
|
||||
|
||||
|
||||
Notice that this function, if you define it, requires a result object (generated by
|
||||
``run()``, a monitor (if you want to send), and a boolean ``overwrite`` to be used
|
||||
to check if a result exists first, and not write to it if the result exists and
|
||||
overwrite is False. Also notice that since we already saved these files to the analyzer metadata folder, we return early if a monitor isn't defined, because this function serves to send results to the monitor. If you haven't saved anything to the analyzer metadata folder
|
||||
yet, you might want to do that here. You should also use ``tty.info`` to give
|
||||
the user a message of "Writing result to $DIRNAME."
|
||||
|
||||
|
||||
.. _writing-commands:
|
||||
|
||||
@@ -448,6 +699,23 @@ with a hook, and this is the purpose of this particular hook. Akin to
|
||||
``on_phase_success`` we require the same variables - the package that failed,
|
||||
the name of the phase, and the log file where we might find errors.
|
||||
|
||||
"""""""""""""""""""""""""""""""""
|
||||
``on_analyzer_save(pkg, result)``
|
||||
"""""""""""""""""""""""""""""""""
|
||||
|
||||
After an analyzer has saved some result for a package, this hook is called,
|
||||
and it provides the package that we just ran the analysis for, along with
|
||||
the loaded result. Typically, a result is structured to have the name
|
||||
of the analyzer as key, and the result object that is defined in detail in
|
||||
:ref:`analyzer_run_function`.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def on_analyzer_save(pkg, result):
|
||||
"""given a package and a result...
|
||||
"""
|
||||
print('Do something extra with a package analysis result here')
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
Adding a New Hook Type
|
||||
|
||||
@@ -1013,7 +1013,7 @@ The following advanced example shows how generated targets can be used in a
|
||||
|
||||
SPACK ?= spack
|
||||
|
||||
.PHONY: all clean env
|
||||
.PHONY: all clean fetch env
|
||||
|
||||
all: env
|
||||
|
||||
@@ -1022,6 +1022,9 @@ The following advanced example shows how generated targets can be used in a
|
||||
|
||||
env.mk: spack.lock
|
||||
$(SPACK) -e . env depfile -o $@ --make-target-prefix spack
|
||||
|
||||
fetch: spack/fetch
|
||||
$(info Environment fetched!)
|
||||
|
||||
env: spack/env
|
||||
$(info Environment installed!)
|
||||
@@ -1034,10 +1037,10 @@ The following advanced example shows how generated targets can be used in a
|
||||
endif
|
||||
|
||||
When ``make`` is invoked, it first "remakes" the missing include ``env.mk``
|
||||
from its rule, which triggers concretization. When done, the generated target
|
||||
``spack/env`` is available. In the above example, the ``env`` target uses this generated
|
||||
target as a prerequisite, meaning that it can make use of the installed packages in
|
||||
its commands.
|
||||
from its rule, which triggers concretization. When done, the generated targets
|
||||
``spack/fetch`` and ``spack/env`` are available. In the above
|
||||
example, the ``env`` target uses the latter as a prerequisite, meaning
|
||||
that it can make use of the installed packages in its commands.
|
||||
|
||||
As it is typically undesirable to remake ``env.mk`` as part of ``make clean``,
|
||||
the include is conditional.
|
||||
@@ -1045,6 +1048,7 @@ the include is conditional.
|
||||
.. note::
|
||||
|
||||
When including generated ``Makefile``\s, it is important to use
|
||||
the ``--make-target-prefix`` flag and use the non-phony target
|
||||
``<target-prefix>/env`` as prerequisite, instead of the phony target
|
||||
``<target-prefix>/all``.
|
||||
the ``--make-target-prefix`` flag and use the non-phony targets
|
||||
``<target-prefix>/env`` and ``<target-prefix>/fetch`` as
|
||||
prerequisites, instead of the phony targets ``<target-prefix>/all``
|
||||
and ``<target-prefix>/fetch-all`` respectively.
|
||||
|
||||
@@ -308,7 +308,7 @@ the variable ``FOOBAR`` will be unset.
|
||||
spec constraints are instead evaluated top to bottom.
|
||||
|
||||
""""""""""""""""""""""""""""""""""""""""""""
|
||||
Exclude or include specific module files
|
||||
Blacklist or whitelist specific module files
|
||||
""""""""""""""""""""""""""""""""""""""""""""
|
||||
|
||||
You can use anonymous specs also to prevent module files from being written or
|
||||
@@ -322,8 +322,8 @@ your system. If you write a configuration file like:
|
||||
modules:
|
||||
default:
|
||||
tcl:
|
||||
include: ['gcc', 'llvm'] # include will have precedence over exclude
|
||||
exclude: ['%gcc@4.4.7'] # Assuming gcc@4.4.7 is the system compiler
|
||||
whitelist: ['gcc', 'llvm'] # Whitelist will have precedence over blacklist
|
||||
blacklist: ['%gcc@4.4.7'] # Assuming gcc@4.4.7 is the system compiler
|
||||
|
||||
you will prevent the generation of module files for any package that
|
||||
is compiled with ``gcc@4.4.7``, with the only exception of any ``gcc``
|
||||
@@ -490,7 +490,7 @@ satisfies a default, Spack will generate the module file in the
|
||||
appropriate path, and will generate a default symlink to the module
|
||||
file as well.
|
||||
|
||||
.. warning::
|
||||
.. warning::
|
||||
If Spack is configured to generate multiple default packages in the
|
||||
same directory, the last modulefile to be generated will be the
|
||||
default module.
|
||||
@@ -589,7 +589,7 @@ Filter out environment modifications
|
||||
Modifications to certain environment variables in module files are there by
|
||||
default, for instance because they are generated by prefix inspections.
|
||||
If you want to prevent modifications to some environment variables, you can
|
||||
do so by using the ``exclude_env_vars``:
|
||||
do so by using the environment blacklist:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
@@ -599,7 +599,7 @@ do so by using the ``exclude_env_vars``:
|
||||
all:
|
||||
filter:
|
||||
# Exclude changes to any of these variables
|
||||
exclude_env_vars: ['CPATH', 'LIBRARY_PATH']
|
||||
environment_blacklist: ['CPATH', 'LIBRARY_PATH']
|
||||
|
||||
The configuration above will generate module files that will not contain
|
||||
modifications to either ``CPATH`` or ``LIBRARY_PATH``.
|
||||
|
||||
@@ -1072,15 +1072,3 @@ def __exit__(self, exc_type, exc_value, tb):
|
||||
# Suppress any exception from being re-raised:
|
||||
# https://docs.python.org/3/reference/datamodel.html#object.__exit__.
|
||||
return True
|
||||
|
||||
|
||||
class classproperty(object):
|
||||
"""Non-data descriptor to evaluate a class-level property. The function that performs
|
||||
the evaluation is injected at creation time and take an instance (could be None) and
|
||||
an owner (i.e. the class that originated the instance)
|
||||
"""
|
||||
def __init__(self, callback):
|
||||
self.callback = callback
|
||||
|
||||
def __get__(self, instance, owner):
|
||||
return self.callback(owner)
|
||||
|
||||
42
lib/spack/spack/analyzers/__init__.py
Normal file
42
lib/spack/spack/analyzers/__init__.py
Normal file
@@ -0,0 +1,42 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
"""This package contains code for creating analyzers to extract Application
|
||||
Binary Interface (ABI) information, along with simple analyses that just load
|
||||
existing metadata.
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.paths
|
||||
import spack.util.classes
|
||||
|
||||
mod_path = spack.paths.analyzers_path
|
||||
analyzers = spack.util.classes.list_classes("spack.analyzers", mod_path)
|
||||
|
||||
# The base analyzer does not have a name, and cannot do dict comprehension
|
||||
analyzer_types = {}
|
||||
for a in analyzers:
|
||||
if not hasattr(a, "name"):
|
||||
continue
|
||||
analyzer_types[a.name] = a
|
||||
|
||||
|
||||
def list_all():
|
||||
"""A helper function to list all analyzers and their descriptions
|
||||
"""
|
||||
for name, analyzer in analyzer_types.items():
|
||||
print("%-25s: %-35s" % (name, analyzer.description))
|
||||
|
||||
|
||||
def get_analyzer(name):
|
||||
"""Courtesy function to retrieve an analyzer, and exit on error if it
|
||||
does not exist.
|
||||
"""
|
||||
if name in analyzer_types:
|
||||
return analyzer_types[name]
|
||||
tty.die("Analyzer %s does not exist" % name)
|
||||
116
lib/spack/spack/analyzers/analyzer_base.py
Normal file
116
lib/spack/spack/analyzers/analyzer_base.py
Normal file
@@ -0,0 +1,116 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
"""An analyzer base provides basic functions to run the analysis, save results,
|
||||
and (optionally) interact with a Spack Monitor
|
||||
"""
|
||||
|
||||
import os
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.config
|
||||
import spack.hooks
|
||||
import spack.monitor
|
||||
import spack.util.path
|
||||
|
||||
|
||||
def get_analyzer_dir(spec, analyzer_dir=None):
|
||||
"""
|
||||
Given a spec, return the directory to save analyzer results.
|
||||
|
||||
We create the directory if it does not exist. We also check that the
|
||||
spec has an associated package. An analyzer cannot be run if the spec isn't
|
||||
associated with a package. If the user provides a custom analyzer_dir,
|
||||
we use it over checking the config and the default at ~/.spack/analyzers
|
||||
"""
|
||||
# An analyzer cannot be run if the spec isn't associated with a package
|
||||
if not hasattr(spec, "package") or not spec.package:
|
||||
tty.die("A spec can only be analyzed with an associated package.")
|
||||
|
||||
# The top level directory is in the user home, or a custom location
|
||||
if not analyzer_dir:
|
||||
analyzer_dir = spack.util.path.canonicalize_path(
|
||||
spack.config.get('config:analyzers_dir', '~/.spack/analyzers'))
|
||||
|
||||
# We follow the same convention as the spec install (this could be better)
|
||||
package_prefix = os.sep.join(spec.package.prefix.split('/')[-3:])
|
||||
meta_dir = os.path.join(analyzer_dir, package_prefix)
|
||||
return meta_dir
|
||||
|
||||
|
||||
class AnalyzerBase(object):
|
||||
|
||||
def __init__(self, spec, dirname=None):
|
||||
"""
|
||||
Verify that the analyzer has correct metadata.
|
||||
|
||||
An Analyzer is intended to run on one spec install, so the spec
|
||||
with its associated package is required on init. The child analyzer
|
||||
class should define an init function that super's the init here, and
|
||||
also check that the analyzer has all dependencies that it
|
||||
needs. If an analyzer subclass does not have dependencies, it does not
|
||||
need to define an init. An Analyzer should not be allowed to proceed
|
||||
if one or more dependencies are missing. The dirname, if defined,
|
||||
is an optional directory name to save to (instead of the default meta
|
||||
spack directory).
|
||||
"""
|
||||
self.spec = spec
|
||||
self.dirname = dirname
|
||||
self.meta_dir = os.path.dirname(spec.package.install_log_path)
|
||||
|
||||
for required in ["name", "outfile", "description"]:
|
||||
if not hasattr(self, required):
|
||||
tty.die("Please add a %s attribute on the analyzer." % required)
|
||||
|
||||
def run(self):
|
||||
"""
|
||||
Given a spec with an installed package, run the analyzer on it.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
@property
|
||||
def output_dir(self):
|
||||
"""
|
||||
The full path to the output directory.
|
||||
|
||||
This includes the nested analyzer directory structure. This function
|
||||
does not create anything.
|
||||
"""
|
||||
if not hasattr(self, "_output_dir"):
|
||||
output_dir = get_analyzer_dir(self.spec, self.dirname)
|
||||
self._output_dir = os.path.join(output_dir, self.name)
|
||||
|
||||
return self._output_dir
|
||||
|
||||
def save_result(self, result, overwrite=False):
|
||||
"""
|
||||
Save a result to the associated spack monitor, if defined.
|
||||
|
||||
This function is on the level of the analyzer because it might be
|
||||
the case that the result is large (appropriate for a single request)
|
||||
or that the data is organized differently (e.g., more than one
|
||||
request per result). If an analyzer subclass needs to over-write
|
||||
this function with a custom save, that is appropriate to do (see abi).
|
||||
"""
|
||||
# We maintain the structure in json with the analyzer as key so
|
||||
# that in the future, we could upload to a monitor server
|
||||
if result[self.name]:
|
||||
|
||||
outfile = os.path.join(self.output_dir, self.outfile)
|
||||
|
||||
# Only try to create the results directory if we have a result
|
||||
if not os.path.exists(self._output_dir):
|
||||
os.makedirs(self._output_dir)
|
||||
|
||||
# Don't overwrite an existing result if overwrite is False
|
||||
if os.path.exists(outfile) and not overwrite:
|
||||
tty.info("%s exists and overwrite is False, skipping." % outfile)
|
||||
else:
|
||||
tty.info("Writing result to %s" % outfile)
|
||||
spack.monitor.write_json(result[self.name], outfile)
|
||||
|
||||
# This hook runs after a save result
|
||||
spack.hooks.on_analyzer_save(self.spec.package, result)
|
||||
33
lib/spack/spack/analyzers/config_args.py
Normal file
33
lib/spack/spack/analyzers/config_args.py
Normal file
@@ -0,0 +1,33 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
"""A configargs analyzer is a class of analyzer that typically just uploads
|
||||
already existing metadata about config args from a package spec install
|
||||
directory."""
|
||||
|
||||
|
||||
import os
|
||||
|
||||
import spack.monitor
|
||||
|
||||
from .analyzer_base import AnalyzerBase
|
||||
|
||||
|
||||
class ConfigArgs(AnalyzerBase):
|
||||
|
||||
name = "config_args"
|
||||
outfile = "spack-analyzer-config-args.json"
|
||||
description = "config args loaded from spack-configure-args.txt"
|
||||
|
||||
def run(self):
|
||||
"""
|
||||
Load the configure-args.txt and save in json.
|
||||
|
||||
The run function will find the spack-config-args.txt file in the
|
||||
package install directory, and read it into a json structure that has
|
||||
the name of the analyzer as the key.
|
||||
"""
|
||||
config_file = os.path.join(self.meta_dir, "spack-configure-args.txt")
|
||||
return {self.name: spack.monitor.read_file(config_file)}
|
||||
54
lib/spack/spack/analyzers/environment_variables.py
Normal file
54
lib/spack/spack/analyzers/environment_variables.py
Normal file
@@ -0,0 +1,54 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
"""An environment analyzer will read and parse the environment variables
|
||||
file in the installed package directory, generating a json file that has
|
||||
an index of key, value pairs for environment variables."""
|
||||
|
||||
|
||||
import os
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
from spack.util.environment import EnvironmentModifications
|
||||
|
||||
from .analyzer_base import AnalyzerBase
|
||||
|
||||
|
||||
class EnvironmentVariables(AnalyzerBase):
|
||||
|
||||
name = "environment_variables"
|
||||
outfile = "spack-analyzer-environment-variables.json"
|
||||
description = "environment variables parsed from spack-build-env.txt"
|
||||
|
||||
def run(self):
|
||||
"""
|
||||
Load, parse, and save spack-build-env.txt to analyzers.
|
||||
|
||||
Read in the spack-build-env.txt file from the package install
|
||||
directory and parse the environment variables into key value pairs.
|
||||
The result should have the key for the analyzer, the name.
|
||||
"""
|
||||
env_file = os.path.join(self.meta_dir, "spack-build-env.txt")
|
||||
return {self.name: self._read_environment_file(env_file)}
|
||||
|
||||
def _read_environment_file(self, filename):
|
||||
"""
|
||||
Read and parse the environment file.
|
||||
|
||||
Given an environment file, we want to read it, split by semicolons
|
||||
and new lines, and then parse down to the subset of SPACK_* variables.
|
||||
We assume that all spack prefix variables are not secrets, and unlike
|
||||
the install_manifest.json, we don't (at least to start) parse the values
|
||||
to remove path prefixes specific to user systems.
|
||||
"""
|
||||
if not os.path.exists(filename):
|
||||
tty.warn("No environment file available")
|
||||
return
|
||||
|
||||
mods = EnvironmentModifications.from_sourcing_file(filename)
|
||||
env = {}
|
||||
mods.apply_modifications(env)
|
||||
return env
|
||||
31
lib/spack/spack/analyzers/install_files.py
Normal file
31
lib/spack/spack/analyzers/install_files.py
Normal file
@@ -0,0 +1,31 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
"""The install files json file (install_manifest.json) already exists in
|
||||
the package install folder, so this analyzer simply moves it to the user
|
||||
analyzer folder for further processing."""
|
||||
|
||||
|
||||
import os
|
||||
|
||||
import spack.monitor
|
||||
|
||||
from .analyzer_base import AnalyzerBase
|
||||
|
||||
|
||||
class InstallFiles(AnalyzerBase):
|
||||
|
||||
name = "install_files"
|
||||
outfile = "spack-analyzer-install-files.json"
|
||||
description = "install file listing read from install_manifest.json"
|
||||
|
||||
def run(self):
|
||||
"""
|
||||
Load in the install_manifest.json and save to analyzers.
|
||||
|
||||
We write it out to the analyzers folder, with key as the analyzer name.
|
||||
"""
|
||||
manifest_file = os.path.join(self.meta_dir, "install_manifest.json")
|
||||
return {self.name: spack.monitor.read_json(manifest_file)}
|
||||
114
lib/spack/spack/analyzers/libabigail.py
Normal file
114
lib/spack/spack/analyzers/libabigail.py
Normal file
@@ -0,0 +1,114 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack
|
||||
import spack.binary_distribution
|
||||
import spack.bootstrap
|
||||
import spack.error
|
||||
import spack.hooks
|
||||
import spack.monitor
|
||||
import spack.package_base
|
||||
import spack.repo
|
||||
import spack.util.executable
|
||||
|
||||
from .analyzer_base import AnalyzerBase
|
||||
|
||||
|
||||
class Libabigail(AnalyzerBase):
|
||||
|
||||
name = "libabigail"
|
||||
outfile = "spack-analyzer-libabigail.json"
|
||||
description = "Application Binary Interface (ABI) features for objects"
|
||||
|
||||
def __init__(self, spec, dirname=None):
|
||||
"""
|
||||
init for an analyzer ensures we have all needed dependencies.
|
||||
|
||||
For the libabigail analyzer, this means Libabigail.
|
||||
Since the output for libabigail is one file per object, we communicate
|
||||
with the monitor multiple times.
|
||||
"""
|
||||
super(Libabigail, self).__init__(spec, dirname)
|
||||
|
||||
# This doesn't seem to work to import on the module level
|
||||
tty.debug("Preparing to use Libabigail, will install if missing.")
|
||||
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
# libabigail won't install lib/bin/share without docs
|
||||
spec = spack.spec.Spec("libabigail+docs")
|
||||
spack.bootstrap.ensure_executables_in_path_or_raise(
|
||||
["abidw"], abstract_spec=spec
|
||||
)
|
||||
self.abidw = spack.util.executable.which('abidw')
|
||||
|
||||
def run(self):
|
||||
"""
|
||||
Run libabigail, and save results to filename.
|
||||
|
||||
This run function differs in that we write as we generate and then
|
||||
return a dict with the analyzer name as the key, and the value of a
|
||||
dict of results, where the key is the object name, and the value is
|
||||
the output file written to.
|
||||
"""
|
||||
manifest = spack.binary_distribution.get_buildfile_manifest(self.spec)
|
||||
|
||||
# This result will store a path to each file
|
||||
result = {}
|
||||
|
||||
# Generate an output file for each binary or object
|
||||
for obj in manifest.get("binary_to_relocate_fullpath", []):
|
||||
|
||||
# We want to preserve the path in the install directory in case
|
||||
# a library has an equivalenly named lib or executable, for example
|
||||
outdir = os.path.dirname(obj.replace(self.spec.package.prefix,
|
||||
'').strip(os.path.sep))
|
||||
outfile = "spack-analyzer-libabigail-%s.xml" % os.path.basename(obj)
|
||||
outfile = os.path.join(self.output_dir, outdir, outfile)
|
||||
outdir = os.path.dirname(outfile)
|
||||
|
||||
# Create the output directory
|
||||
if not os.path.exists(outdir):
|
||||
os.makedirs(outdir)
|
||||
|
||||
# Sometimes libabigail segfaults and dumps
|
||||
try:
|
||||
self.abidw(obj, "--out-file", outfile)
|
||||
result[obj] = outfile
|
||||
tty.info("Writing result to %s" % outfile)
|
||||
except spack.error.SpackError:
|
||||
tty.warn("Issue running abidw for %s" % obj)
|
||||
|
||||
return {self.name: result}
|
||||
|
||||
def save_result(self, result, overwrite=False):
|
||||
"""
|
||||
Read saved ABI results and upload to monitor server.
|
||||
|
||||
ABI results are saved to individual files, so each one needs to be
|
||||
read and uploaded. Result here should be the lookup generated in run(),
|
||||
the key is the analyzer name, and each value is the result file.
|
||||
We currently upload the entire xml as text because libabigail can't
|
||||
easily read gzipped xml, but this will be updated when it can.
|
||||
"""
|
||||
if not spack.monitor.cli:
|
||||
return
|
||||
|
||||
name = self.spec.package.name
|
||||
|
||||
for obj, filename in result.get(self.name, {}).items():
|
||||
|
||||
# Don't include the prefix
|
||||
rel_path = obj.replace(self.spec.prefix + os.path.sep, "")
|
||||
|
||||
# We've already saved the results to file during run
|
||||
content = spack.monitor.read_file(filename)
|
||||
|
||||
# A result needs an analyzer, value or binary_value, and name
|
||||
data = {"value": content, "install_file": rel_path, "name": "abidw-xml"}
|
||||
tty.info("Sending result for %s %s to monitor." % (name, rel_path))
|
||||
spack.hooks.on_analyzer_save(self.spec.package, {"libabigail": [data]})
|
||||
@@ -281,15 +281,15 @@ def _check_build_test_callbacks(pkgs, error_cls):
|
||||
"""Ensure stand-alone test method is not included in build-time callbacks"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
test_callbacks = pkg_cls.build_time_test_callbacks
|
||||
pkg = spack.repo.get(pkg_name)
|
||||
test_callbacks = pkg.build_time_test_callbacks
|
||||
|
||||
if test_callbacks and 'test' in test_callbacks:
|
||||
msg = ('{0} package contains "test" method in '
|
||||
'build_time_test_callbacks')
|
||||
instr = ('Remove "test" from: [{0}]'
|
||||
.format(', '.join(test_callbacks)))
|
||||
errors.append(error_cls(msg.format(pkg_name), [instr]))
|
||||
errors.append(error_cls(msg.format(pkg.name), [instr]))
|
||||
|
||||
return errors
|
||||
|
||||
@@ -304,8 +304,8 @@ def _check_patch_urls(pkgs, error_cls):
|
||||
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
for condition, patches in pkg_cls.patches.items():
|
||||
pkg = spack.repo.get(pkg_name)
|
||||
for condition, patches in pkg.patches.items():
|
||||
for patch in patches:
|
||||
if not isinstance(patch, spack.patch.UrlPatch):
|
||||
continue
|
||||
@@ -317,7 +317,7 @@ def _check_patch_urls(pkgs, error_cls):
|
||||
if not patch.url.endswith(full_index_arg):
|
||||
errors.append(error_cls(
|
||||
"patch URL in package {0} must end with {1}".format(
|
||||
pkg_cls.name, full_index_arg,
|
||||
pkg.name, full_index_arg,
|
||||
),
|
||||
[patch.url],
|
||||
))
|
||||
@@ -331,21 +331,21 @@ def _linting_package_file(pkgs, error_cls):
|
||||
"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg = spack.repo.get(pkg_name)
|
||||
|
||||
# Does the homepage have http, and if so, does https work?
|
||||
if pkg_cls.homepage.startswith('http://'):
|
||||
https = re.sub("http", "https", pkg_cls.homepage, 1)
|
||||
if pkg.homepage.startswith('http://'):
|
||||
https = re.sub("http", "https", pkg.homepage, 1)
|
||||
try:
|
||||
response = urlopen(https)
|
||||
except Exception as e:
|
||||
msg = 'Error with attempting https for "{0}": '
|
||||
errors.append(error_cls(msg.format(pkg_cls.name), [str(e)]))
|
||||
errors.append(error_cls(msg.format(pkg.name), [str(e)]))
|
||||
continue
|
||||
|
||||
if response.getcode() == 200:
|
||||
msg = 'Package "{0}" uses http but has a valid https endpoint.'
|
||||
errors.append(msg.format(pkg_cls.name))
|
||||
errors.append(msg.format(pkg.name))
|
||||
|
||||
return llnl.util.lang.dedupe(errors)
|
||||
|
||||
@@ -355,10 +355,10 @@ def _unknown_variants_in_directives(pkgs, error_cls):
|
||||
"""Report unknown or wrong variants in directives for this package"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg = spack.repo.get(pkg_name)
|
||||
|
||||
# Check "conflicts" directive
|
||||
for conflict, triggers in pkg_cls.conflicts.items():
|
||||
for conflict, triggers in pkg.conflicts.items():
|
||||
for trigger, _ in triggers:
|
||||
vrn = spack.spec.Spec(conflict)
|
||||
try:
|
||||
@@ -371,34 +371,34 @@ def _unknown_variants_in_directives(pkgs, error_cls):
|
||||
# When os and target constraints can be created independently of
|
||||
# the platform, TODO change this back to add an error.
|
||||
errors.extend(_analyze_variants_in_directive(
|
||||
pkg_cls, spack.spec.Spec(trigger),
|
||||
pkg, spack.spec.Spec(trigger),
|
||||
directive='conflicts', error_cls=error_cls
|
||||
))
|
||||
errors.extend(_analyze_variants_in_directive(
|
||||
pkg_cls, vrn, directive='conflicts', error_cls=error_cls
|
||||
pkg, vrn, directive='conflicts', error_cls=error_cls
|
||||
))
|
||||
|
||||
# Check "depends_on" directive
|
||||
for _, triggers in pkg_cls.dependencies.items():
|
||||
for _, triggers in pkg.dependencies.items():
|
||||
triggers = list(triggers)
|
||||
for trigger in list(triggers):
|
||||
vrn = spack.spec.Spec(trigger)
|
||||
errors.extend(_analyze_variants_in_directive(
|
||||
pkg_cls, vrn, directive='depends_on', error_cls=error_cls
|
||||
pkg, vrn, directive='depends_on', error_cls=error_cls
|
||||
))
|
||||
|
||||
# Check "patch" directive
|
||||
for _, triggers in pkg_cls.provided.items():
|
||||
for _, triggers in pkg.provided.items():
|
||||
triggers = [spack.spec.Spec(x) for x in triggers]
|
||||
for vrn in triggers:
|
||||
errors.extend(_analyze_variants_in_directive(
|
||||
pkg_cls, vrn, directive='patch', error_cls=error_cls
|
||||
pkg, vrn, directive='patch', error_cls=error_cls
|
||||
))
|
||||
|
||||
# Check "resource" directive
|
||||
for vrn in pkg_cls.resources:
|
||||
for vrn in pkg.resources:
|
||||
errors.extend(_analyze_variants_in_directive(
|
||||
pkg_cls, vrn, directive='resource', error_cls=error_cls
|
||||
pkg, vrn, directive='resource', error_cls=error_cls
|
||||
))
|
||||
|
||||
return llnl.util.lang.dedupe(errors)
|
||||
@@ -409,15 +409,15 @@ def _unknown_variants_in_dependencies(pkgs, error_cls):
|
||||
"""Report unknown dependencies and wrong variants for dependencies"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg = spack.repo.get(pkg_name)
|
||||
filename = spack.repo.path.filename_for_package_name(pkg_name)
|
||||
for dependency_name, dependency_data in pkg_cls.dependencies.items():
|
||||
for dependency_name, dependency_data in pkg.dependencies.items():
|
||||
# No need to analyze virtual packages
|
||||
if spack.repo.path.is_virtual(dependency_name):
|
||||
continue
|
||||
|
||||
try:
|
||||
dependency_pkg_cls = spack.repo.path.get_pkg_class(dependency_name)
|
||||
dependency_pkg = spack.repo.get(dependency_name)
|
||||
except spack.repo.UnknownPackageError:
|
||||
# This dependency is completely missing, so report
|
||||
# and continue the analysis
|
||||
@@ -433,8 +433,8 @@ def _unknown_variants_in_dependencies(pkgs, error_cls):
|
||||
dependency_variants = dependency_edge.spec.variants
|
||||
for name, value in dependency_variants.items():
|
||||
try:
|
||||
v, _ = dependency_pkg_cls.variants[name]
|
||||
v.validate_or_raise(value, pkg_cls=dependency_pkg_cls)
|
||||
v, _ = dependency_pkg.variants[name]
|
||||
v.validate_or_raise(value, pkg=dependency_pkg)
|
||||
except Exception as e:
|
||||
summary = (pkg_name + ": wrong variant used for a "
|
||||
"dependency in a 'depends_on' directive")
|
||||
@@ -456,10 +456,10 @@ def _version_constraints_are_satisfiable_by_some_version_in_repo(pkgs, error_cls
|
||||
"""Report if version constraints used in directives are not satisfiable"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg = spack.repo.get(pkg_name)
|
||||
filename = spack.repo.path.filename_for_package_name(pkg_name)
|
||||
dependencies_to_check = []
|
||||
for dependency_name, dependency_data in pkg_cls.dependencies.items():
|
||||
for dependency_name, dependency_data in pkg.dependencies.items():
|
||||
# Skip virtual dependencies for the time being, check on
|
||||
# their versions can be added later
|
||||
if spack.repo.path.is_virtual(dependency_name):
|
||||
@@ -470,19 +470,19 @@ def _version_constraints_are_satisfiable_by_some_version_in_repo(pkgs, error_cls
|
||||
)
|
||||
|
||||
for s in dependencies_to_check:
|
||||
dependency_pkg_cls = None
|
||||
dependency_pkg = None
|
||||
try:
|
||||
dependency_pkg_cls = spack.repo.path.get_pkg_class(s.name)
|
||||
dependency_pkg = spack.repo.get(s.name)
|
||||
assert any(
|
||||
v.satisfies(s.versions) for v in list(dependency_pkg_cls.versions)
|
||||
v.satisfies(s.versions) for v in list(dependency_pkg.versions)
|
||||
)
|
||||
except Exception:
|
||||
summary = ("{0}: dependency on {1} cannot be satisfied "
|
||||
"by known versions of {1.name}").format(pkg_name, s)
|
||||
details = ['happening in ' + filename]
|
||||
if dependency_pkg_cls is not None:
|
||||
if dependency_pkg is not None:
|
||||
details.append('known versions of {0.name} are {1}'.format(
|
||||
s, ', '.join([str(x) for x in dependency_pkg_cls.versions])
|
||||
s, ', '.join([str(x) for x in dependency_pkg.versions])
|
||||
))
|
||||
errors.append(error_cls(summary=summary, details=details))
|
||||
|
||||
@@ -500,7 +500,7 @@ def _analyze_variants_in_directive(pkg, constraint, directive, error_cls):
|
||||
for name, v in constraint.variants.items():
|
||||
try:
|
||||
variant, _ = pkg.variants[name]
|
||||
variant.validate_or_raise(v, pkg_cls=pkg)
|
||||
variant.validate_or_raise(v, pkg=pkg)
|
||||
except variant_exceptions as e:
|
||||
summary = pkg.name + ': wrong variant in "{0}" directive'
|
||||
summary = summary.format(directive)
|
||||
|
||||
@@ -618,7 +618,7 @@ def get_buildfile_manifest(spec):
|
||||
Return a data structure with information about a build, including
|
||||
text_to_relocate, binary_to_relocate, binary_to_relocate_fullpath
|
||||
link_to_relocate, and other, which means it doesn't fit any of previous
|
||||
checks (and should not be relocated). We exclude docs (man) and
|
||||
checks (and should not be relocated). We blacklist docs (man) and
|
||||
metadata (.spack). This can be used to find a particular kind of file
|
||||
in spack, or to generate the build metadata.
|
||||
"""
|
||||
@@ -626,12 +626,12 @@ def get_buildfile_manifest(spec):
|
||||
"link_to_relocate": [], "other": [],
|
||||
"binary_to_relocate_fullpath": []}
|
||||
|
||||
exclude_list = (".spack", "man")
|
||||
blacklist = (".spack", "man")
|
||||
|
||||
# Do this at during tarball creation to save time when tarball unpacked.
|
||||
# Used by make_package_relative to determine binaries to change.
|
||||
for root, dirs, files in os.walk(spec.prefix, topdown=True):
|
||||
dirs[:] = [d for d in dirs if d not in exclude_list]
|
||||
dirs[:] = [d for d in dirs if d not in blacklist]
|
||||
|
||||
# Directories may need to be relocated too.
|
||||
for directory in dirs:
|
||||
|
||||
@@ -652,10 +652,10 @@ def _add_compilers_if_missing():
|
||||
def _add_externals_if_missing():
|
||||
search_list = [
|
||||
# clingo
|
||||
spack.repo.path.get_pkg_class('cmake'),
|
||||
spack.repo.path.get_pkg_class('bison'),
|
||||
spack.repo.path.get('cmake'),
|
||||
spack.repo.path.get('bison'),
|
||||
# GnuPG
|
||||
spack.repo.path.get_pkg_class('gawk')
|
||||
spack.repo.path.get('gawk')
|
||||
]
|
||||
detected_packages = spack.detection.by_executable(search_list)
|
||||
spack.detection.update_configuration(detected_packages, scope='bootstrap')
|
||||
|
||||
@@ -45,16 +45,18 @@ def component_dir(self):
|
||||
raise NotImplementedError
|
||||
|
||||
@property
|
||||
def component_prefix(self):
|
||||
def component_path(self):
|
||||
"""Path to component <prefix>/<component>/<version>."""
|
||||
return self.prefix.join(join_path(self.component_dir, self.spec.version))
|
||||
return join_path(self.prefix, self.component_dir, str(self.spec.version))
|
||||
|
||||
def install(self, spec, prefix):
|
||||
self.install_component(basename(self.url_for_version(spec.version)))
|
||||
|
||||
def install_component(self, installer_path):
|
||||
def install(self, spec, prefix, installer_path=None):
|
||||
"""Shared install method for all oneapi packages."""
|
||||
|
||||
# intel-oneapi-compilers overrides the installer_path when
|
||||
# installing fortran, which comes from a spack resource
|
||||
if installer_path is None:
|
||||
installer_path = basename(self.url_for_version(spec.version))
|
||||
|
||||
if platform.system() == 'Linux':
|
||||
# Intel installer assumes and enforces that all components
|
||||
# are installed into a single prefix. Spack wants to
|
||||
@@ -75,7 +77,7 @@ def install_component(self, installer_path):
|
||||
bash = Executable('bash')
|
||||
|
||||
# Installer writes files in ~/intel set HOME so it goes to prefix
|
||||
bash.add_default_env('HOME', self.prefix)
|
||||
bash.add_default_env('HOME', prefix)
|
||||
# Installer checks $XDG_RUNTIME_DIR/.bootstrapper_lock_file as well
|
||||
bash.add_default_env('XDG_RUNTIME_DIR',
|
||||
join_path(self.stage.path, 'runtime'))
|
||||
@@ -83,13 +85,13 @@ def install_component(self, installer_path):
|
||||
bash(installer_path,
|
||||
'-s', '-a', '-s', '--action', 'install',
|
||||
'--eula', 'accept',
|
||||
'--install-dir', self.prefix)
|
||||
'--install-dir', prefix)
|
||||
|
||||
if getpass.getuser() == 'root':
|
||||
shutil.rmtree('/var/intel/installercache', ignore_errors=True)
|
||||
|
||||
# Some installers have a bug and do not return an error code when failing
|
||||
if not isdir(join_path(self.prefix, self.component_dir)):
|
||||
if not isdir(join_path(prefix, self.component_dir)):
|
||||
raise RuntimeError('install failed')
|
||||
|
||||
def setup_run_environment(self, env):
|
||||
@@ -102,7 +104,7 @@ def setup_run_environment(self, env):
|
||||
$ source {prefix}/{component}/{version}/env/vars.sh
|
||||
"""
|
||||
env.extend(EnvironmentModifications.from_sourcing_file(
|
||||
join_path(self.component_prefix, 'env', 'vars.sh')))
|
||||
join_path(self.component_path, 'env', 'vars.sh')))
|
||||
|
||||
|
||||
class IntelOneApiLibraryPackage(IntelOneApiPackage):
|
||||
@@ -116,12 +118,12 @@ class IntelOneApiLibraryPackage(IntelOneApiPackage):
|
||||
|
||||
@property
|
||||
def headers(self):
|
||||
include_path = join_path(self.component_prefix, 'include')
|
||||
include_path = join_path(self.component_path, 'include')
|
||||
return find_headers('*', include_path, recursive=True)
|
||||
|
||||
@property
|
||||
def libs(self):
|
||||
lib_path = join_path(self.component_prefix, 'lib', 'intel64')
|
||||
lib_path = join_path(self.component_path, 'lib', 'intel64')
|
||||
lib_path = lib_path if isdir(lib_path) else dirname(lib_path)
|
||||
return find_libraries('*', root=lib_path, shared=True, recursive=True)
|
||||
|
||||
|
||||
@@ -12,17 +12,14 @@
|
||||
from llnl.util.filesystem import (
|
||||
filter_file,
|
||||
find,
|
||||
find_all_headers,
|
||||
find_libraries,
|
||||
is_nonsymlink_exe_with_shebang,
|
||||
path_contains_subdirectory,
|
||||
same_path,
|
||||
working_dir,
|
||||
)
|
||||
from llnl.util.lang import classproperty, match_predicate
|
||||
from llnl.util.lang import match_predicate
|
||||
|
||||
from spack.directives import depends_on, extends
|
||||
from spack.error import NoHeadersError, NoLibrariesError
|
||||
from spack.package_base import PackageBase, run_after
|
||||
|
||||
|
||||
@@ -77,21 +74,24 @@ def _std_args(cls):
|
||||
'--no-index',
|
||||
]
|
||||
|
||||
@classproperty
|
||||
def homepage(cls):
|
||||
if cls.pypi:
|
||||
name = cls.pypi.split('/')[0]
|
||||
@property
|
||||
def homepage(self):
|
||||
if self.pypi:
|
||||
name = self.pypi.split('/')[0]
|
||||
return 'https://pypi.org/project/' + name + '/'
|
||||
|
||||
@classproperty
|
||||
def url(cls):
|
||||
if cls.pypi:
|
||||
return 'https://files.pythonhosted.org/packages/source/' + cls.pypi[0] + '/' + cls.pypi
|
||||
@property
|
||||
def url(self):
|
||||
if self.pypi:
|
||||
return (
|
||||
'https://files.pythonhosted.org/packages/source/'
|
||||
+ self.pypi[0] + '/' + self.pypi
|
||||
)
|
||||
|
||||
@classproperty
|
||||
def list_url(cls):
|
||||
if cls.pypi:
|
||||
name = cls.pypi.split('/')[0]
|
||||
@property
|
||||
def list_url(self):
|
||||
if self.pypi:
|
||||
name = self.pypi.split('/')[0]
|
||||
return 'https://pypi.org/simple/' + name + '/'
|
||||
|
||||
@property
|
||||
@@ -178,37 +178,6 @@ def install(self, spec, prefix):
|
||||
with working_dir(self.build_directory):
|
||||
pip(*args)
|
||||
|
||||
@property
|
||||
def headers(self):
|
||||
"""Discover header files in platlib."""
|
||||
|
||||
# Headers may be in either location
|
||||
include = inspect.getmodule(self).include
|
||||
platlib = inspect.getmodule(self).platlib
|
||||
headers = find_all_headers(include) + find_all_headers(platlib)
|
||||
|
||||
if headers:
|
||||
return headers
|
||||
|
||||
msg = 'Unable to locate {} headers in {} or {}'
|
||||
raise NoHeadersError(msg.format(self.spec.name, include, platlib))
|
||||
|
||||
@property
|
||||
def libs(self):
|
||||
"""Discover libraries in platlib."""
|
||||
|
||||
# Remove py- prefix in package name
|
||||
library = 'lib' + self.spec.name[3:].replace('-', '?')
|
||||
root = inspect.getmodule(self).platlib
|
||||
|
||||
for shared in [True, False]:
|
||||
libs = find_libraries(library, root, shared=shared, recursive=True)
|
||||
if libs:
|
||||
return libs
|
||||
|
||||
msg = 'Unable to recursively locate {} libraries in {}'
|
||||
raise NoLibrariesError(msg.format(self.spec.name, root))
|
||||
|
||||
# Testing
|
||||
|
||||
def test(self):
|
||||
|
||||
@@ -2,11 +2,11 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
|
||||
import inspect
|
||||
from typing import Optional
|
||||
|
||||
import llnl.util.lang as lang
|
||||
|
||||
from spack.directives import extends
|
||||
from spack.package_base import PackageBase, run_after
|
||||
|
||||
@@ -42,27 +42,27 @@ class RPackage(PackageBase):
|
||||
|
||||
extends('r')
|
||||
|
||||
@lang.classproperty
|
||||
def homepage(cls):
|
||||
if cls.cran:
|
||||
return 'https://cloud.r-project.org/package=' + cls.cran
|
||||
elif cls.bioc:
|
||||
return 'https://bioconductor.org/packages/' + cls.bioc
|
||||
@property
|
||||
def homepage(self):
|
||||
if self.cran:
|
||||
return 'https://cloud.r-project.org/package=' + self.cran
|
||||
elif self.bioc:
|
||||
return 'https://bioconductor.org/packages/' + self.bioc
|
||||
|
||||
@lang.classproperty
|
||||
def url(cls):
|
||||
if cls.cran:
|
||||
@property
|
||||
def url(self):
|
||||
if self.cran:
|
||||
return (
|
||||
'https://cloud.r-project.org/src/contrib/'
|
||||
+ cls.cran + '_' + str(list(cls.versions)[0]) + '.tar.gz'
|
||||
+ self.cran + '_' + str(list(self.versions)[0]) + '.tar.gz'
|
||||
)
|
||||
|
||||
@lang.classproperty
|
||||
def list_url(cls):
|
||||
if cls.cran:
|
||||
@property
|
||||
def list_url(self):
|
||||
if self.cran:
|
||||
return (
|
||||
'https://cloud.r-project.org/src/contrib/Archive/'
|
||||
+ cls.cran + '/'
|
||||
+ self.cran + '/'
|
||||
)
|
||||
|
||||
@property
|
||||
|
||||
@@ -5,7 +5,6 @@
|
||||
import os
|
||||
from typing import Optional
|
||||
|
||||
import llnl.util.lang as lang
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import working_dir
|
||||
|
||||
@@ -42,10 +41,10 @@ class RacketPackage(PackageBase):
|
||||
name = None # type: Optional[str]
|
||||
parallel = True
|
||||
|
||||
@lang.classproperty
|
||||
def homepage(cls):
|
||||
if cls.pkgs:
|
||||
return 'https://pkgs.racket-lang.org/package/{0}'.format(cls.name)
|
||||
@property
|
||||
def homepage(self):
|
||||
if self.pkgs:
|
||||
return 'https://pkgs.racket-lang.org/package/{0}'.format(self.name)
|
||||
|
||||
@property
|
||||
def build_directory(self):
|
||||
|
||||
@@ -90,10 +90,9 @@ class ROCmPackage(PackageBase):
|
||||
# https://llvm.org/docs/AMDGPUUsage.html
|
||||
# Possible architectures
|
||||
amdgpu_targets = (
|
||||
'gfx701', 'gfx801', 'gfx802', 'gfx803', 'gfx900', 'gfx900:xnack-',
|
||||
'gfx906', 'gfx908', 'gfx90a',
|
||||
'gfx906:xnack-', 'gfx908:xnack-', 'gfx90a:xnack-', 'gfx90a:xnack+',
|
||||
'gfx1010', 'gfx1011', 'gfx1012', 'gfx1030', 'gfx1031',
|
||||
'gfx701', 'gfx801', 'gfx802', 'gfx803',
|
||||
'gfx900', 'gfx906', 'gfx908', 'gfx90a', 'gfx1010',
|
||||
'gfx1011', 'gfx1012'
|
||||
)
|
||||
|
||||
variant('rocm', default=False, description='Enable ROCm support')
|
||||
|
||||
@@ -771,13 +771,9 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
mirrors_to_check = {
|
||||
'override': remote_mirror_override
|
||||
}
|
||||
|
||||
# If we have a remote override and we want generate pipeline using
|
||||
# --check-index-only, then the override mirror needs to be added to
|
||||
# the configured mirrors when bindist.update() is run, or else we
|
||||
# won't fetch its index and include in our local cache.
|
||||
spack.mirror.add(
|
||||
'ci_pr_mirror', remote_mirror_override, cfg.default_modify_scope())
|
||||
else:
|
||||
spack.mirror.add(
|
||||
'ci_pr_mirror', remote_mirror_override, cfg.default_modify_scope())
|
||||
|
||||
pipeline_artifacts_dir = artifacts_root
|
||||
if not pipeline_artifacts_dir:
|
||||
@@ -823,7 +819,7 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
user_artifacts_dir, ci_project_dir)
|
||||
|
||||
# Speed up staging by first fetching binary indices from all mirrors
|
||||
# (including the override mirror we may have just added above).
|
||||
# (including the per-PR mirror we may have just added above).
|
||||
try:
|
||||
bindist.binary_index.update()
|
||||
except bindist.FetchCacheError as e:
|
||||
@@ -857,7 +853,8 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
finally:
|
||||
# Clean up remote mirror override if enabled
|
||||
if remote_mirror_override:
|
||||
spack.mirror.remove('ci_pr_mirror', cfg.default_modify_scope())
|
||||
if spack_pipeline_type != 'spack_protected_branch':
|
||||
spack.mirror.remove('ci_pr_mirror', cfg.default_modify_scope())
|
||||
|
||||
all_job_names = []
|
||||
output_object = {}
|
||||
@@ -1628,9 +1625,8 @@ def copy_stage_logs_to_artifacts(job_spec, job_log_dir):
|
||||
job_log_dir (str): Path into which build log should be copied
|
||||
"""
|
||||
try:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(job_spec.name)
|
||||
job_pkg = pkg_cls(job_spec)
|
||||
tty.debug('job package: {0.fullname}'.format(job_pkg))
|
||||
job_pkg = spack.repo.get(job_spec)
|
||||
tty.debug('job package: {0}'.format(job_pkg))
|
||||
stage_dir = job_pkg.stage.path
|
||||
tty.debug('stage dir: {0}'.format(stage_dir))
|
||||
build_out_src = os.path.join(stage_dir, 'spack-build-out.txt')
|
||||
|
||||
116
lib/spack/spack/cmd/analyze.py
Normal file
116
lib/spack/spack/cmd/analyze.py
Normal file
@@ -0,0 +1,116 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import sys
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.analyzers
|
||||
import spack.build_environment
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.environment as ev
|
||||
import spack.fetch_strategy
|
||||
import spack.monitor
|
||||
import spack.paths
|
||||
import spack.report
|
||||
|
||||
description = "run analyzers on installed packages"
|
||||
section = "analysis"
|
||||
level = "long"
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='analyze_command')
|
||||
|
||||
sp.add_parser('list-analyzers',
|
||||
description="list available analyzers",
|
||||
help="show list of analyzers that are available to run.")
|
||||
|
||||
# This adds the monitor group to the subparser
|
||||
spack.monitor.get_monitor_group(subparser)
|
||||
|
||||
# Run Parser
|
||||
run_parser = sp.add_parser('run', description="run an analyzer",
|
||||
help="provide the name of the analyzer to run.")
|
||||
|
||||
run_parser.add_argument(
|
||||
'--overwrite', action='store_true',
|
||||
help="re-analyze even if the output file already exists.")
|
||||
run_parser.add_argument(
|
||||
'-p', '--path', default=None,
|
||||
dest='path',
|
||||
help="write output to a different directory than ~/.spack/analyzers")
|
||||
run_parser.add_argument(
|
||||
'-a', '--analyzers', default=None,
|
||||
dest="analyzers", action="append",
|
||||
help="add an analyzer (defaults to all available)")
|
||||
arguments.add_common_arguments(run_parser, ['spec'])
|
||||
|
||||
|
||||
def analyze_spec(spec, analyzers=None, outdir=None, monitor=None, overwrite=False):
|
||||
"""
|
||||
Do an analysis for a spec, optionally adding monitoring.
|
||||
|
||||
We also allow the user to specify a custom output directory.
|
||||
analyze_spec(spec, args.analyzers, args.outdir, monitor)
|
||||
|
||||
Args:
|
||||
spec (spack.spec.Spec): spec object of installed package
|
||||
analyzers (list): list of analyzer (keys) to run
|
||||
monitor (spack.monitor.SpackMonitorClient): a monitor client
|
||||
overwrite (bool): overwrite result if already exists
|
||||
"""
|
||||
analyzers = analyzers or list(spack.analyzers.analyzer_types.keys())
|
||||
|
||||
# Load the build environment from the spec install directory, and send
|
||||
# the spec to the monitor if it's not known
|
||||
if monitor:
|
||||
monitor.load_build_environment(spec)
|
||||
monitor.new_configuration([spec])
|
||||
|
||||
for name in analyzers:
|
||||
|
||||
# Instantiate the analyzer with the spec and outdir
|
||||
analyzer = spack.analyzers.get_analyzer(name)(spec, outdir)
|
||||
|
||||
# Run the analyzer to get a json result - results are returned as
|
||||
# a dictionary with a key corresponding to the analyzer type, so
|
||||
# we can just update the data
|
||||
result = analyzer.run()
|
||||
|
||||
# Send the result. We do them separately because:
|
||||
# 1. each analyzer might have differently organized output
|
||||
# 2. the size of a result can be large
|
||||
analyzer.save_result(result, overwrite)
|
||||
|
||||
|
||||
def analyze(parser, args, **kwargs):
|
||||
|
||||
# If the user wants to list analyzers, do so and exit
|
||||
if args.analyze_command == "list-analyzers":
|
||||
spack.analyzers.list_all()
|
||||
sys.exit(0)
|
||||
|
||||
# handle active environment, if any
|
||||
env = ev.active_environment()
|
||||
|
||||
# Get an disambiguate spec (we should only have one)
|
||||
specs = spack.cmd.parse_specs(args.spec)
|
||||
if not specs:
|
||||
tty.die("You must provide one or more specs to analyze.")
|
||||
spec = spack.cmd.disambiguate_spec(specs[0], env)
|
||||
|
||||
# The user wants to monitor builds using github.com/spack/spack-monitor
|
||||
# It is instantianted once here, and then available at spack.monitor.cli
|
||||
monitor = None
|
||||
if args.use_monitor:
|
||||
monitor = spack.monitor.get_client(
|
||||
host=args.monitor_host,
|
||||
prefix=args.monitor_prefix,
|
||||
)
|
||||
|
||||
# Run the analysis
|
||||
analyze_spec(spec, args.analyzers, args.path, monitor, args.overwrite)
|
||||
@@ -99,8 +99,8 @@ def blame(parser, args):
|
||||
blame_file = path
|
||||
|
||||
if not blame_file:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(args.package_or_file)
|
||||
blame_file = pkg_cls.module.__file__.rstrip('c') # .pyc -> .py
|
||||
pkg = spack.repo.get(args.package_or_file)
|
||||
blame_file = pkg.module.__file__.rstrip('c') # .pyc -> .py
|
||||
|
||||
# get git blame for the package
|
||||
with working_dir(spack.paths.prefix):
|
||||
|
||||
@@ -12,12 +12,11 @@
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.stage
|
||||
import spack.util.crypto
|
||||
from spack.package_base import preferred_version
|
||||
from spack.util.naming import valid_fully_qualified_module_name
|
||||
from spack.version import VersionBase, ver
|
||||
from spack.version import Version, ver
|
||||
|
||||
description = "checksum available versions of a package"
|
||||
section = "packaging"
|
||||
@@ -55,8 +54,7 @@ def checksum(parser, args):
|
||||
tty.die("`spack checksum` accepts package names, not URLs.")
|
||||
|
||||
# Get the package we're going to generate checksums for
|
||||
pkg_cls = spack.repo.path.get_pkg_class(args.package)
|
||||
pkg = pkg_cls(spack.spec.Spec(args.package))
|
||||
pkg = spack.repo.get(args.package)
|
||||
|
||||
url_dict = {}
|
||||
versions = args.versions
|
||||
@@ -67,7 +65,7 @@ def checksum(parser, args):
|
||||
remote_versions = None
|
||||
for version in versions:
|
||||
version = ver(version)
|
||||
if not isinstance(version, VersionBase):
|
||||
if not isinstance(version, Version):
|
||||
tty.die("Cannot generate checksums for version lists or "
|
||||
"version ranges. Use unambiguous versions.")
|
||||
url = pkg.find_valid_url_for_version(version)
|
||||
|
||||
@@ -58,21 +58,6 @@ def setup_parser(subparser):
|
||||
arguments.add_common_arguments(subparser, ['specs'])
|
||||
|
||||
|
||||
def remove_python_cache():
|
||||
for directory in [lib_path, var_path]:
|
||||
for root, dirs, files in os.walk(directory):
|
||||
for f in files:
|
||||
if f.endswith('.pyc') or f.endswith('.pyo'):
|
||||
fname = os.path.join(root, f)
|
||||
tty.debug('Removing {0}'.format(fname))
|
||||
os.remove(fname)
|
||||
for d in dirs:
|
||||
if d == '__pycache__':
|
||||
dname = os.path.join(root, d)
|
||||
tty.debug('Removing {0}'.format(dname))
|
||||
shutil.rmtree(dname)
|
||||
|
||||
|
||||
def clean(parser, args):
|
||||
# If nothing was set, activate the default
|
||||
if not any([args.specs, args.stage, args.downloads, args.failures,
|
||||
@@ -85,7 +70,8 @@ def clean(parser, args):
|
||||
for spec in specs:
|
||||
msg = 'Cleaning build stage [{0}]'
|
||||
tty.msg(msg.format(spec.short_spec))
|
||||
spec.package.do_clean()
|
||||
package = spack.repo.get(spec)
|
||||
package.do_clean()
|
||||
|
||||
if args.stage:
|
||||
tty.msg('Removing all temporary build stages')
|
||||
@@ -109,7 +95,18 @@ def clean(parser, args):
|
||||
|
||||
if args.python_cache:
|
||||
tty.msg('Removing python cache files')
|
||||
remove_python_cache()
|
||||
for directory in [lib_path, var_path]:
|
||||
for root, dirs, files in os.walk(directory):
|
||||
for f in files:
|
||||
if f.endswith('.pyc') or f.endswith('.pyo'):
|
||||
fname = os.path.join(root, f)
|
||||
tty.debug('Removing {0}'.format(fname))
|
||||
os.remove(fname)
|
||||
for d in dirs:
|
||||
if d == '__pycache__':
|
||||
dname = os.path.join(root, d)
|
||||
tty.debug('Removing {0}'.format(dname))
|
||||
shutil.rmtree(dname)
|
||||
|
||||
if args.bootstrap:
|
||||
bootstrap_prefix = spack.util.path.canonicalize_path(
|
||||
|
||||
@@ -403,4 +403,4 @@ def add_s3_connection_args(subparser, add_help):
|
||||
default=None)
|
||||
subparser.add_argument(
|
||||
'--s3-endpoint-url',
|
||||
help="Endpoint URL to use to connect to this S3 mirror")
|
||||
help="Access Token to use to connect to this S3 mirror")
|
||||
|
||||
@@ -9,6 +9,7 @@
|
||||
|
||||
import spack.container
|
||||
import spack.container.images
|
||||
import spack.monitor
|
||||
|
||||
description = ("creates recipes to build images for different"
|
||||
" container runtimes")
|
||||
@@ -17,6 +18,7 @@
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
monitor_group = spack.monitor.get_monitor_group(subparser) # noqa
|
||||
subparser.add_argument(
|
||||
'--list-os', action='store_true', default=False,
|
||||
help='list all the OS that can be used in the bootstrap phase and exit'
|
||||
@@ -44,5 +46,14 @@ def containerize(parser, args):
|
||||
raise ValueError(msg.format(config_file))
|
||||
|
||||
config = spack.container.validate(config_file)
|
||||
|
||||
# If we have a monitor request, add monitor metadata to config
|
||||
if args.use_monitor:
|
||||
config['spack']['monitor'] = {
|
||||
"host": args.monitor_host,
|
||||
"keep_going": args.monitor_keep_going,
|
||||
"prefix": args.monitor_prefix,
|
||||
"tags": args.monitor_tags
|
||||
}
|
||||
recipe = spack.container.recipe(config, last_phase=args.last_stage)
|
||||
print(recipe)
|
||||
|
||||
@@ -39,9 +39,9 @@ def inverted_dependencies():
|
||||
actual dependents.
|
||||
"""
|
||||
dag = {}
|
||||
for pkg_cls in spack.repo.path.all_package_classes():
|
||||
dag.setdefault(pkg_cls.name, set())
|
||||
for dep in pkg_cls.dependencies:
|
||||
for pkg in spack.repo.path.all_packages():
|
||||
dag.setdefault(pkg.name, set())
|
||||
for dep in pkg.dependencies:
|
||||
deps = [dep]
|
||||
|
||||
# expand virtuals if necessary
|
||||
@@ -49,7 +49,7 @@ def inverted_dependencies():
|
||||
deps += [s.name for s in spack.repo.path.providers_for(dep)]
|
||||
|
||||
for d in deps:
|
||||
dag.setdefault(d, set()).add(pkg_cls.name)
|
||||
dag.setdefault(d, set()).add(pkg.name)
|
||||
return dag
|
||||
|
||||
|
||||
|
||||
@@ -87,7 +87,9 @@ def dev_build(self, args):
|
||||
|
||||
# Forces the build to run out of the source directory.
|
||||
spec.constrain('dev_path=%s' % source_path)
|
||||
|
||||
spec.concretize()
|
||||
package = spack.repo.get(spec)
|
||||
|
||||
if spec.installed:
|
||||
tty.error("Already installed in %s" % spec.prefix)
|
||||
@@ -107,7 +109,7 @@ def dev_build(self, args):
|
||||
elif args.test == 'root':
|
||||
tests = [spec.name for spec in specs]
|
||||
|
||||
spec.package.do_install(
|
||||
package.do_install(
|
||||
tests=tests,
|
||||
make_jobs=args.jobs,
|
||||
keep_prefix=args.keep_prefix,
|
||||
@@ -120,5 +122,5 @@ def dev_build(self, args):
|
||||
|
||||
# drop into the build environment of the package?
|
||||
if args.shell is not None:
|
||||
spack.build_environment.setup_package(spec.package, dirty=False)
|
||||
spack.build_environment.setup_package(package, dirty=False)
|
||||
os.execvp(args.shell, [args.shell])
|
||||
|
||||
@@ -54,9 +54,8 @@ def develop(parser, args):
|
||||
tty.msg(msg)
|
||||
continue
|
||||
|
||||
spec = spack.spec.Spec(entry['spec'])
|
||||
pkg_cls = spack.repo.path.get_pkg_class(spec.name)
|
||||
pkg_cls(spec).stage.steal_source(abspath)
|
||||
stage = spack.spec.Spec(entry['spec']).package.stage
|
||||
stage.steal_source(abspath)
|
||||
|
||||
if not env.dev_specs:
|
||||
tty.warn("No develop specs to download")
|
||||
|
||||
@@ -104,9 +104,9 @@ def edit(parser, args):
|
||||
path = os.path.join(path, name)
|
||||
if not os.path.exists(path):
|
||||
files = glob.glob(path + '*')
|
||||
exclude_list = ['.pyc', '~'] # exclude binaries and backups
|
||||
blacklist = ['.pyc', '~'] # blacklist binaries and backups
|
||||
files = list(filter(
|
||||
lambda x: all(s not in x for s in exclude_list), files))
|
||||
lambda x: all(s not in x for s in blacklist), files))
|
||||
if len(files) > 1:
|
||||
m = 'Multiple files exist with the name {0}.'.format(name)
|
||||
m += ' Please specify a suffix. Files are:\n\n'
|
||||
|
||||
@@ -559,11 +559,11 @@ def env_depfile(args):
|
||||
target_prefix = args.make_target_prefix
|
||||
|
||||
def get_target(name):
|
||||
# The `all` and `clean` targets are phony. It doesn't make sense to
|
||||
# The `all`, `fetch` and `clean` targets are phony. It doesn't make sense to
|
||||
# have /abs/path/to/env/metadir/{all,clean} targets. But it *does* make
|
||||
# sense to have a prefix like `env/all`, `env/clean` when they are
|
||||
# sense to have a prefix like `env/all`, `env/fetch`, `env/clean` when they are
|
||||
# supposed to be included
|
||||
if name in ('all', 'clean') and os.path.isabs(target_prefix):
|
||||
if name in ('all', 'fetch-all', 'clean') and os.path.isabs(target_prefix):
|
||||
return name
|
||||
else:
|
||||
return os.path.join(target_prefix, name)
|
||||
@@ -571,6 +571,9 @@ def get_target(name):
|
||||
def get_install_target(name):
|
||||
return os.path.join(target_prefix, '.install', name)
|
||||
|
||||
def get_fetch_target(name):
|
||||
return os.path.join(target_prefix, '.fetch', name)
|
||||
|
||||
for _, spec in env.concretized_specs():
|
||||
for s in spec.traverse(root=True):
|
||||
hash_to_spec[s.dag_hash()] = s
|
||||
@@ -585,30 +588,46 @@ def get_install_target(name):
|
||||
# All package install targets, not just roots.
|
||||
all_install_targets = [get_install_target(h) for h in hash_to_spec.keys()]
|
||||
|
||||
# Fetch targets for all packages in the environment, not just roots.
|
||||
all_fetch_targets = [get_fetch_target(h) for h in hash_to_spec.keys()]
|
||||
|
||||
buf = six.StringIO()
|
||||
|
||||
buf.write("""SPACK ?= spack
|
||||
|
||||
.PHONY: {} {}
|
||||
.PHONY: {} {} {}
|
||||
|
||||
{}: {}
|
||||
|
||||
{}: {}
|
||||
|
||||
{}: {}
|
||||
\t@touch $@
|
||||
|
||||
{}: {}
|
||||
\t@touch $@
|
||||
|
||||
{}:
|
||||
\t@mkdir -p {}
|
||||
\t@mkdir -p {} {}
|
||||
|
||||
{}: | {}
|
||||
\t$(info Fetching $(SPEC))
|
||||
\t$(SPACK) -e '{}' fetch $(SPACK_FETCH_FLAGS) /$(notdir $@) && touch $@
|
||||
|
||||
{}: {}
|
||||
\t$(info Installing $(SPEC))
|
||||
\t{}$(SPACK) -e '{}' install $(SPACK_INSTALL_FLAGS) --only-concrete --only=package \
|
||||
--no-add /$(notdir $@) && touch $@
|
||||
|
||||
""".format(get_target('all'), get_target('clean'),
|
||||
""".format(get_target('all'), get_target('fetch-all'), get_target('clean'),
|
||||
get_target('all'), get_target('env'),
|
||||
get_target('fetch-all'), get_target('fetch'),
|
||||
get_target('env'), ' '.join(root_install_targets),
|
||||
get_target('dirs'), get_target('.install'),
|
||||
get_target('.install/%'), get_target('dirs'),
|
||||
get_target('fetch'), ' '.join(all_fetch_targets),
|
||||
get_target('dirs'), get_target('.fetch'), get_target('.install'),
|
||||
get_target('.fetch/%'), get_target('dirs'),
|
||||
env.path,
|
||||
get_target('.install/%'), get_target('.fetch/%'),
|
||||
'+' if args.jobserver else '', env.path))
|
||||
|
||||
# Targets are of the form <prefix>/<name>: [<prefix>/<depname>]...,
|
||||
@@ -638,9 +657,11 @@ def get_install_target(name):
|
||||
# --make-target-prefix can be any existing directory we do not control,
|
||||
# including empty string (which means deleting the containing folder
|
||||
# would delete the folder with the Makefile)
|
||||
buf.write("{}:\n\trm -f -- {} {}\n".format(
|
||||
buf.write("{}:\n\trm -f -- {} {} {} {}\n".format(
|
||||
get_target('clean'),
|
||||
get_target('env'),
|
||||
get_target('fetch'),
|
||||
' '.join(all_fetch_targets),
|
||||
' '.join(all_install_targets)))
|
||||
|
||||
makefile = buf.getvalue()
|
||||
|
||||
@@ -52,8 +52,8 @@ def extensions(parser, args):
|
||||
|
||||
extendable_pkgs = []
|
||||
for name in spack.repo.all_package_names():
|
||||
pkg_cls = spack.repo.path.get_pkg_class(name)
|
||||
if pkg_cls.extendable:
|
||||
pkg = spack.repo.get(name)
|
||||
if pkg.extendable:
|
||||
extendable_pkgs.append(name)
|
||||
|
||||
colify(extendable_pkgs, indent=4)
|
||||
@@ -64,12 +64,12 @@ def extensions(parser, args):
|
||||
if len(spec) > 1:
|
||||
tty.die("Can only list extensions for one package.")
|
||||
|
||||
if not spec[0].package.extendable:
|
||||
tty.die("%s is not an extendable package." % spec[0].name)
|
||||
|
||||
env = ev.active_environment()
|
||||
spec = cmd.disambiguate_spec(spec[0], env)
|
||||
|
||||
if not spec.package.extendable:
|
||||
tty.die("%s is not an extendable package." % spec[0].name)
|
||||
|
||||
if not spec.package.extendable:
|
||||
tty.die("%s does not have extensions." % spec.short_spec)
|
||||
|
||||
|
||||
@@ -5,7 +5,6 @@
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import errno
|
||||
import os
|
||||
import sys
|
||||
|
||||
@@ -94,21 +93,6 @@ def external_find(args):
|
||||
# It's fine to not find any manifest file if we are doing the
|
||||
# search implicitly (i.e. as part of 'spack external find')
|
||||
pass
|
||||
except Exception as e:
|
||||
# For most exceptions, just print a warning and continue.
|
||||
# Note that KeyboardInterrupt does not subclass Exception
|
||||
# (so CTRL-C will terminate the program as expected).
|
||||
skip_msg = ("Skipping manifest and continuing with other external "
|
||||
"checks")
|
||||
if ((isinstance(e, IOError) or isinstance(e, OSError)) and
|
||||
e.errno in [errno.EPERM, errno.EACCES]):
|
||||
# The manifest file does not have sufficient permissions enabled:
|
||||
# print a warning and keep going
|
||||
tty.warn("Unable to read manifest due to insufficient "
|
||||
"permissions.", skip_msg)
|
||||
else:
|
||||
tty.warn("Unable to read manifest, unexpected error: {0}"
|
||||
.format(str(e)), skip_msg)
|
||||
|
||||
# If the user didn't specify anything, search for build tools by default
|
||||
if not args.tags and not args.all and not args.packages:
|
||||
@@ -119,37 +103,34 @@ def external_find(args):
|
||||
args.tags = []
|
||||
|
||||
# Construct the list of possible packages to be detected
|
||||
pkg_cls_to_check = []
|
||||
packages_to_check = []
|
||||
|
||||
# Add the packages that have been required explicitly
|
||||
if args.packages:
|
||||
pkg_cls_to_check = [
|
||||
spack.repo.path.get_pkg_class(pkg) for pkg in args.packages
|
||||
]
|
||||
packages_to_check = list(spack.repo.get(pkg) for pkg in args.packages)
|
||||
if args.tags:
|
||||
allowed = set(spack.repo.path.packages_with_tags(*args.tags))
|
||||
pkg_cls_to_check = [x for x in pkg_cls_to_check if x.name in allowed]
|
||||
packages_to_check = [x for x in packages_to_check if x in allowed]
|
||||
|
||||
if args.tags and not pkg_cls_to_check:
|
||||
if args.tags and not packages_to_check:
|
||||
# If we arrived here we didn't have any explicit package passed
|
||||
# as argument, which means to search all packages.
|
||||
# Since tags are cached it's much faster to construct what we need
|
||||
# to search directly, rather than filtering after the fact
|
||||
pkg_cls_to_check = [
|
||||
spack.repo.path.get_pkg_class(pkg_name)
|
||||
for tag in args.tags
|
||||
for pkg_name in spack.repo.path.packages_with_tags(tag)
|
||||
packages_to_check = [
|
||||
spack.repo.get(pkg) for tag in args.tags for pkg in
|
||||
spack.repo.path.packages_with_tags(tag)
|
||||
]
|
||||
pkg_cls_to_check = list(set(pkg_cls_to_check))
|
||||
packages_to_check = list(set(packages_to_check))
|
||||
|
||||
# If the list of packages is empty, search for every possible package
|
||||
if not args.tags and not pkg_cls_to_check:
|
||||
pkg_cls_to_check = list(spack.repo.path.all_package_classes())
|
||||
if not args.tags and not packages_to_check:
|
||||
packages_to_check = list(spack.repo.path.all_packages())
|
||||
|
||||
detected_packages = spack.detection.by_executable(
|
||||
pkg_cls_to_check, path_hints=args.path)
|
||||
packages_to_check, path_hints=args.path)
|
||||
detected_packages.update(spack.detection.by_library(
|
||||
pkg_cls_to_check, path_hints=args.path))
|
||||
packages_to_check, path_hints=args.path))
|
||||
|
||||
new_entries = spack.detection.update_configuration(
|
||||
detected_packages, scope=args.scope, buildable=not args.not_buildable
|
||||
@@ -220,7 +201,7 @@ def _collect_and_consume_cray_manifest_files(
|
||||
|
||||
def external_list(args):
|
||||
# Trigger a read of all packages, might take a long time.
|
||||
list(spack.repo.path.all_package_classes())
|
||||
list(spack.repo.path.all_packages())
|
||||
# Print all the detectable packages
|
||||
tty.msg("Detectable packages per repository")
|
||||
for namespace, pkgs in sorted(spack.package_base.detectable_packages.items()):
|
||||
|
||||
@@ -292,9 +292,10 @@ def print_tests(pkg):
|
||||
v_specs = [spack.spec.Spec(v_name) for v_name in v_names]
|
||||
for v_spec in v_specs:
|
||||
try:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(v_spec.name)
|
||||
pkg = v_spec.package
|
||||
pkg_cls = pkg if inspect.isclass(pkg) else pkg.__class__
|
||||
if has_test_method(pkg_cls):
|
||||
names.append('{0}.test'.format(pkg_cls.name.lower()))
|
||||
names.append('{0}.test'.format(pkg.name.lower()))
|
||||
except spack.repo.UnknownPackageError:
|
||||
pass
|
||||
|
||||
@@ -385,9 +386,7 @@ def print_virtuals(pkg):
|
||||
|
||||
|
||||
def info(parser, args):
|
||||
spec = spack.spec.Spec(args.package)
|
||||
pkg_cls = spack.repo.path.get_pkg_class(spec.name)
|
||||
pkg = pkg_cls(spec)
|
||||
pkg = spack.repo.get(args.package)
|
||||
|
||||
# Output core package information
|
||||
header = section_title(
|
||||
|
||||
@@ -17,6 +17,7 @@
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.environment as ev
|
||||
import spack.fetch_strategy
|
||||
import spack.monitor
|
||||
import spack.paths
|
||||
import spack.report
|
||||
from spack.error import SpackError
|
||||
@@ -104,6 +105,8 @@ def setup_parser(subparser):
|
||||
'--cache-only', action='store_true', dest='cache_only', default=False,
|
||||
help="only install package from binary mirrors")
|
||||
|
||||
monitor_group = spack.monitor.get_monitor_group(subparser) # noqa
|
||||
|
||||
subparser.add_argument(
|
||||
'--include-build-deps', action='store_true', dest='include_build_deps',
|
||||
default=False, help="""include build deps when installing from cache,
|
||||
@@ -289,6 +292,15 @@ def install(parser, args, **kwargs):
|
||||
parser.print_help()
|
||||
return
|
||||
|
||||
# The user wants to monitor builds using github.com/spack/spack-monitor
|
||||
if args.use_monitor:
|
||||
monitor = spack.monitor.get_client(
|
||||
host=args.monitor_host,
|
||||
prefix=args.monitor_prefix,
|
||||
tags=args.monitor_tags,
|
||||
save_local=args.monitor_save_local,
|
||||
)
|
||||
|
||||
reporter = spack.report.collect_info(
|
||||
spack.package_base.PackageInstaller, '_install_task', args.log_format, args)
|
||||
if args.log_file:
|
||||
@@ -329,6 +341,10 @@ def get_tests(specs):
|
||||
reporter.filename = default_log_file(specs[0])
|
||||
reporter.specs = specs
|
||||
|
||||
# Tell the monitor about the specs
|
||||
if args.use_monitor and specs:
|
||||
monitor.new_configuration(specs)
|
||||
|
||||
tty.msg("Installing environment {0}".format(env.name))
|
||||
with reporter('build'):
|
||||
env.install_all(**kwargs)
|
||||
@@ -374,6 +390,10 @@ def get_tests(specs):
|
||||
except SpackError as e:
|
||||
tty.debug(e)
|
||||
reporter.concretization_report(e.message)
|
||||
|
||||
# Tell spack monitor about it
|
||||
if args.use_monitor and abstract_specs:
|
||||
monitor.failed_concretization(abstract_specs)
|
||||
raise
|
||||
|
||||
# 2. Concrete specs from yaml files
|
||||
@@ -434,4 +454,17 @@ def get_tests(specs):
|
||||
|
||||
# overwrite all concrete explicit specs from this build
|
||||
kwargs['overwrite'] = [spec.dag_hash() for spec in specs]
|
||||
|
||||
# Update install_args with the monitor args, needed for build task
|
||||
kwargs.update({
|
||||
"monitor_keep_going": args.monitor_keep_going,
|
||||
"monitor_host": args.monitor_host,
|
||||
"use_monitor": args.use_monitor,
|
||||
"monitor_prefix": args.monitor_prefix,
|
||||
})
|
||||
|
||||
# If we are using the monitor, we send configs. and create build
|
||||
# The dag_hash is the main package id
|
||||
if args.use_monitor and specs:
|
||||
monitor.new_configuration(specs)
|
||||
install_specs(args, kwargs, zip(abstract_specs, specs))
|
||||
|
||||
@@ -84,9 +84,9 @@ def match(p, f):
|
||||
if f.match(p):
|
||||
return True
|
||||
|
||||
pkg_cls = spack.repo.path.get_pkg_class(p)
|
||||
if pkg_cls.__doc__:
|
||||
return f.match(pkg_cls.__doc__)
|
||||
pkg = spack.repo.get(p)
|
||||
if pkg.__doc__:
|
||||
return f.match(pkg.__doc__)
|
||||
return False
|
||||
else:
|
||||
def match(p, f):
|
||||
@@ -133,7 +133,7 @@ def get_dependencies(pkg):
|
||||
@formatter
|
||||
def version_json(pkg_names, out):
|
||||
"""Print all packages with their latest versions."""
|
||||
pkg_classes = [spack.repo.path.get_pkg_class(name) for name in pkg_names]
|
||||
pkgs = [spack.repo.get(name) for name in pkg_names]
|
||||
|
||||
out.write('[\n')
|
||||
|
||||
@@ -147,14 +147,14 @@ def version_json(pkg_names, out):
|
||||
' "maintainers": {5},\n'
|
||||
' "dependencies": {6}'
|
||||
'}}'.format(
|
||||
pkg_cls.name,
|
||||
VersionList(pkg_cls.versions).preferred(),
|
||||
json.dumps([str(v) for v in reversed(sorted(pkg_cls.versions))]),
|
||||
pkg_cls.homepage,
|
||||
github_url(pkg_cls),
|
||||
json.dumps(pkg_cls.maintainers),
|
||||
json.dumps(get_dependencies(pkg_cls))
|
||||
) for pkg_cls in pkg_classes
|
||||
pkg.name,
|
||||
VersionList(pkg.versions).preferred(),
|
||||
json.dumps([str(v) for v in reversed(sorted(pkg.versions))]),
|
||||
pkg.homepage,
|
||||
github_url(pkg),
|
||||
json.dumps(pkg.maintainers),
|
||||
json.dumps(get_dependencies(pkg))
|
||||
) for pkg in pkgs
|
||||
])
|
||||
out.write(pkg_latest)
|
||||
# important: no trailing comma in JSON arrays
|
||||
@@ -172,7 +172,7 @@ def html(pkg_names, out):
|
||||
"""
|
||||
|
||||
# Read in all packages
|
||||
pkg_classes = [spack.repo.path.get_pkg_class(name) for name in pkg_names]
|
||||
pkgs = [spack.repo.get(name) for name in pkg_names]
|
||||
|
||||
# Start at 2 because the title of the page from Sphinx is id1.
|
||||
span_id = 2
|
||||
@@ -189,7 +189,7 @@ def head(n, span_id, title, anchor=None):
|
||||
# Start with the number of packages, skipping the title and intro
|
||||
# blurb, which we maintain in the RST file.
|
||||
out.write('<p>\n')
|
||||
out.write('Spack currently has %d mainline packages:\n' % len(pkg_classes))
|
||||
out.write('Spack currently has %d mainline packages:\n' % len(pkgs))
|
||||
out.write('</p>\n')
|
||||
|
||||
# Table of links to all packages
|
||||
@@ -209,9 +209,9 @@ def head(n, span_id, title, anchor=None):
|
||||
out.write('<hr class="docutils"/>\n')
|
||||
|
||||
# Output some text for each package.
|
||||
for pkg_cls in pkg_classes:
|
||||
out.write('<div class="section" id="%s">\n' % pkg_cls.name)
|
||||
head(2, span_id, pkg_cls.name)
|
||||
for pkg in pkgs:
|
||||
out.write('<div class="section" id="%s">\n' % pkg.name)
|
||||
head(2, span_id, pkg.name)
|
||||
span_id += 1
|
||||
|
||||
out.write('<dl class="docutils">\n')
|
||||
@@ -219,10 +219,10 @@ def head(n, span_id, title, anchor=None):
|
||||
out.write('<dt>Homepage:</dt>\n')
|
||||
out.write('<dd><ul class="first last simple">\n')
|
||||
|
||||
if pkg_cls.homepage:
|
||||
if pkg.homepage:
|
||||
out.write(('<li>'
|
||||
'<a class="reference external" href="%s">%s</a>'
|
||||
'</li>\n') % (pkg_cls.homepage, escape(pkg_cls.homepage, True)))
|
||||
'</li>\n') % (pkg.homepage, escape(pkg.homepage, True)))
|
||||
else:
|
||||
out.write('No homepage\n')
|
||||
out.write('</ul></dd>\n')
|
||||
@@ -231,19 +231,19 @@ def head(n, span_id, title, anchor=None):
|
||||
out.write('<dd><ul class="first last simple">\n')
|
||||
out.write(('<li>'
|
||||
'<a class="reference external" href="%s">%s/package.py</a>'
|
||||
'</li>\n') % (github_url(pkg_cls), pkg_cls.name))
|
||||
'</li>\n') % (github_url(pkg), pkg.name))
|
||||
out.write('</ul></dd>\n')
|
||||
|
||||
if pkg_cls.versions:
|
||||
if pkg.versions:
|
||||
out.write('<dt>Versions:</dt>\n')
|
||||
out.write('<dd>\n')
|
||||
out.write(', '.join(
|
||||
str(v) for v in reversed(sorted(pkg_cls.versions))))
|
||||
str(v) for v in reversed(sorted(pkg.versions))))
|
||||
out.write('\n')
|
||||
out.write('</dd>\n')
|
||||
|
||||
for deptype in spack.dependency.all_deptypes:
|
||||
deps = pkg_cls.dependencies_of_type(deptype)
|
||||
deps = pkg.dependencies_of_type(deptype)
|
||||
if deps:
|
||||
out.write('<dt>%s Dependencies:</dt>\n' % deptype.capitalize())
|
||||
out.write('<dd>\n')
|
||||
@@ -256,7 +256,7 @@ def head(n, span_id, title, anchor=None):
|
||||
|
||||
out.write('<dt>Description:</dt>\n')
|
||||
out.write('<dd>\n')
|
||||
out.write(escape(pkg_cls.format_doc(indent=2), True))
|
||||
out.write(escape(pkg.format_doc(indent=2), True))
|
||||
out.write('\n')
|
||||
out.write('</dd>\n')
|
||||
out.write('</dl>\n')
|
||||
|
||||
@@ -221,7 +221,7 @@ def _read_specs_from_file(filename):
|
||||
for i, string in enumerate(stream):
|
||||
try:
|
||||
s = Spec(string)
|
||||
spack.repo.path.get_pkg_class(s.name)
|
||||
s.package
|
||||
specs.append(s)
|
||||
except SpackError as e:
|
||||
tty.debug(e)
|
||||
|
||||
@@ -131,7 +131,7 @@ def check_module_set_name(name):
|
||||
|
||||
_missing_modules_warning = (
|
||||
"Modules have been omitted for one or more specs, either"
|
||||
" because they were excluded or because the spec is"
|
||||
" because they were blacklisted or because the spec is"
|
||||
" associated with a package that is installed upstream and"
|
||||
" that installation has not generated a module file. Rerun"
|
||||
" this command with debug output enabled for more details.")
|
||||
@@ -180,7 +180,7 @@ def loads(module_type, specs, args, out=None):
|
||||
for spec, mod in modules:
|
||||
if not mod:
|
||||
module_output_for_spec = (
|
||||
'## excluded or missing from upstream: {0}'.format(
|
||||
'## blacklisted or missing from upstream: {0}'.format(
|
||||
spec.format()))
|
||||
else:
|
||||
d['exclude'] = '## ' if spec.name in exclude_set else ''
|
||||
@@ -293,8 +293,8 @@ def refresh(module_type, specs, args):
|
||||
cls(spec, args.module_set_name) for spec in specs
|
||||
if spack.repo.path.exists(spec.name)]
|
||||
|
||||
# Filter excluded packages early
|
||||
writers = [x for x in writers if not x.conf.excluded]
|
||||
# Filter blacklisted packages early
|
||||
writers = [x for x in writers if not x.conf.blacklisted]
|
||||
|
||||
# Detect name clashes in module files
|
||||
file2writer = collections.defaultdict(list)
|
||||
|
||||
33
lib/spack/spack/cmd/monitor.py
Normal file
33
lib/spack/spack/cmd/monitor.py
Normal file
@@ -0,0 +1,33 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import spack.monitor
|
||||
|
||||
description = "interact with a monitor server"
|
||||
section = "analysis"
|
||||
level = "long"
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='monitor_command')
|
||||
|
||||
# This adds the monitor group to the subparser
|
||||
spack.monitor.get_monitor_group(subparser)
|
||||
|
||||
# Spack Monitor Uploads
|
||||
monitor_parser = sp.add_parser('upload', description="upload to spack monitor")
|
||||
monitor_parser.add_argument("upload_dir", help="directory root to upload")
|
||||
|
||||
|
||||
def monitor(parser, args, **kwargs):
|
||||
|
||||
if args.monitor_command == "upload":
|
||||
monitor = spack.monitor.get_client(
|
||||
host=args.monitor_host,
|
||||
prefix=args.monitor_prefix,
|
||||
)
|
||||
|
||||
# Upload the directory
|
||||
monitor.upload_local_save(args.upload_dir)
|
||||
@@ -31,4 +31,5 @@ def patch(parser, args):
|
||||
|
||||
specs = spack.cmd.parse_specs(args.specs, concretize=True)
|
||||
for spec in specs:
|
||||
spec.package.do_patch()
|
||||
package = spack.repo.get(spec)
|
||||
package.do_patch()
|
||||
|
||||
@@ -50,7 +50,7 @@ def _show_patch(sha256):
|
||||
owner = rec['owner']
|
||||
|
||||
if 'relative_path' in rec:
|
||||
pkg_dir = spack.repo.path.get_pkg_class(owner).package_dir
|
||||
pkg_dir = spack.repo.get(owner).package_dir
|
||||
path = os.path.join(pkg_dir, rec['relative_path'])
|
||||
print(" path: %s" % path)
|
||||
else:
|
||||
|
||||
@@ -24,4 +24,5 @@ def restage(parser, args):
|
||||
|
||||
specs = spack.cmd.parse_specs(args.specs, concretize=True)
|
||||
for spec in specs:
|
||||
spec.package.do_restage()
|
||||
package = spack.repo.get(spec)
|
||||
package.do_restage()
|
||||
|
||||
@@ -24,7 +24,6 @@ def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
'-p', '--path', dest='path',
|
||||
help="path to stage package, does not add to spack tree")
|
||||
arguments.add_concretizer_args(subparser)
|
||||
|
||||
|
||||
def stage(parser, args):
|
||||
@@ -59,7 +58,8 @@ def stage(parser, args):
|
||||
|
||||
for spec in specs:
|
||||
spec = spack.cmd.matching_spec_from_env(spec)
|
||||
package = spack.repo.get(spec)
|
||||
if custom_path:
|
||||
spec.package.path = custom_path
|
||||
spec.package.do_stage()
|
||||
tty.msg("Staged {0} in {1}".format(spec.package.name, spec.package.stage.path))
|
||||
package.path = custom_path
|
||||
package.do_stage()
|
||||
tty.msg("Staged {0} in {1}".format(package.name, package.stage.path))
|
||||
|
||||
@@ -14,7 +14,6 @@
|
||||
|
||||
import spack.fetch_strategy as fs
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.util.crypto as crypto
|
||||
from spack.url import (
|
||||
UndetectableNameError,
|
||||
@@ -148,13 +147,13 @@ def url_list(args):
|
||||
urls = set()
|
||||
|
||||
# Gather set of URLs from all packages
|
||||
for pkg_cls in spack.repo.path.all_package_classes():
|
||||
url = getattr(pkg_cls, 'url', None)
|
||||
urls = url_list_parsing(args, urls, url, pkg_cls)
|
||||
for pkg in spack.repo.path.all_packages():
|
||||
url = getattr(pkg, 'url', None)
|
||||
urls = url_list_parsing(args, urls, url, pkg)
|
||||
|
||||
for params in pkg_cls.versions.values():
|
||||
for params in pkg.versions.values():
|
||||
url = params.get('url', None)
|
||||
urls = url_list_parsing(args, urls, url, pkg_cls)
|
||||
urls = url_list_parsing(args, urls, url, pkg)
|
||||
|
||||
# Print URLs
|
||||
for url in sorted(urls):
|
||||
@@ -185,9 +184,8 @@ def url_summary(args):
|
||||
tty.msg('Generating a summary of URL parsing in Spack...')
|
||||
|
||||
# Loop through all packages
|
||||
for pkg_cls in spack.repo.path.all_package_classes():
|
||||
for pkg in spack.repo.path.all_packages():
|
||||
urls = set()
|
||||
pkg = pkg_cls(spack.spec.Spec(pkg_cls.name))
|
||||
|
||||
url = getattr(pkg, 'url', None)
|
||||
if url:
|
||||
@@ -320,20 +318,19 @@ def add(self, pkg_name, fetcher):
|
||||
version_stats = UrlStats()
|
||||
resource_stats = UrlStats()
|
||||
|
||||
for pkg_cls in spack.repo.path.all_package_classes():
|
||||
for pkg in spack.repo.path.all_packages():
|
||||
npkgs += 1
|
||||
|
||||
for v in pkg_cls.versions:
|
||||
for v in pkg.versions:
|
||||
try:
|
||||
pkg = pkg_cls(spack.spec.Spec(pkg_cls.name))
|
||||
fetcher = fs.for_package_version(pkg, v)
|
||||
except (fs.InvalidArgsError, fs.FetcherConflict):
|
||||
continue
|
||||
version_stats.add(pkg_cls.name, fetcher)
|
||||
version_stats.add(pkg.name, fetcher)
|
||||
|
||||
for _, resources in pkg_cls.resources.items():
|
||||
for _, resources in pkg.resources.items():
|
||||
for resource in resources:
|
||||
resource_stats.add(pkg_cls.name, resource.fetcher)
|
||||
resource_stats.add(pkg.name, resource.fetcher)
|
||||
|
||||
# print a nice summary table
|
||||
tty.msg("URL stats for %d packages:" % npkgs)
|
||||
@@ -393,8 +390,8 @@ def print_stat(indent, name, stat_name=None):
|
||||
tty.msg("Found %d issues." % total_issues)
|
||||
for issue_type, pkgs in issues.items():
|
||||
tty.msg("Package URLs with %s" % issue_type)
|
||||
for pkg_cls, pkg_issues in pkgs.items():
|
||||
color.cprint(" @*C{%s}" % pkg_cls)
|
||||
for pkg, pkg_issues in pkgs.items():
|
||||
color.cprint(" @*C{%s}" % pkg)
|
||||
for issue in pkg_issues:
|
||||
print(" %s" % issue)
|
||||
|
||||
|
||||
@@ -12,7 +12,6 @@
|
||||
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
from spack.version import infinity_versions, ver
|
||||
|
||||
description = "list available versions of a package"
|
||||
@@ -40,9 +39,7 @@ def setup_parser(subparser):
|
||||
|
||||
|
||||
def versions(parser, args):
|
||||
spec = spack.spec.Spec(args.package)
|
||||
pkg_cls = spack.repo.path.get_pkg_class(spec.name)
|
||||
pkg = pkg_cls(spec)
|
||||
pkg = spack.repo.get(args.package)
|
||||
|
||||
safe_versions = pkg.versions
|
||||
|
||||
|
||||
@@ -252,13 +252,6 @@ def find_new_compilers(path_hints=None, scope=None):
|
||||
merged configuration.
|
||||
"""
|
||||
compilers = find_compilers(path_hints)
|
||||
return select_new_compilers(compilers, scope)
|
||||
|
||||
|
||||
def select_new_compilers(compilers, scope=None):
|
||||
"""Given a list of compilers, remove those that are already defined in
|
||||
the configuration.
|
||||
"""
|
||||
compilers_not_in_config = []
|
||||
for c in compilers:
|
||||
arch_spec = spack.spec.ArchSpec((None, c.operating_system, c.target))
|
||||
|
||||
@@ -81,14 +81,6 @@ def cxx11_flag(self):
|
||||
def cxx14_flag(self):
|
||||
return "-std=c++14"
|
||||
|
||||
@property
|
||||
def cxx17_flag(self):
|
||||
return "-std=c++17"
|
||||
|
||||
@property
|
||||
def cxx20_flag(self):
|
||||
return "-std=c++20"
|
||||
|
||||
@property
|
||||
def c99_flag(self):
|
||||
return "-std=c99"
|
||||
|
||||
@@ -171,15 +171,34 @@ def strip(self):
|
||||
def paths(self):
|
||||
"""Important paths in the image"""
|
||||
Paths = collections.namedtuple('Paths', [
|
||||
'environment', 'store', 'hidden_view', 'view'
|
||||
'environment', 'store', 'view'
|
||||
])
|
||||
return Paths(
|
||||
environment='/opt/spack-environment',
|
||||
store='/opt/software',
|
||||
hidden_view='/opt/._view',
|
||||
view='/opt/view'
|
||||
)
|
||||
|
||||
@tengine.context_property
|
||||
def monitor(self):
|
||||
"""Enable using spack monitor during build."""
|
||||
Monitor = collections.namedtuple('Monitor', [
|
||||
'enabled', 'host', 'prefix', 'keep_going', 'tags'
|
||||
])
|
||||
monitor = self.config.get("monitor")
|
||||
|
||||
# If we don't have a monitor group, cut out early.
|
||||
if not monitor:
|
||||
return Monitor(False, None, None, None, None)
|
||||
|
||||
return Monitor(
|
||||
enabled=True,
|
||||
host=monitor.get('host'),
|
||||
prefix=monitor.get('prefix'),
|
||||
keep_going=monitor.get("keep_going"),
|
||||
tags=monitor.get('tags')
|
||||
)
|
||||
|
||||
@tengine.context_property
|
||||
def manifest(self):
|
||||
"""The spack.yaml file that should be used in the image"""
|
||||
@@ -188,6 +207,8 @@ def manifest(self):
|
||||
# Copy in the part of spack.yaml prescribed in the configuration file
|
||||
manifest = copy.deepcopy(self.config)
|
||||
manifest.pop('container')
|
||||
if "monitor" in manifest:
|
||||
manifest.pop("monitor")
|
||||
|
||||
# Ensure that a few paths are where they need to be
|
||||
manifest.setdefault('config', syaml.syaml_dict())
|
||||
|
||||
@@ -39,6 +39,10 @@ def translated_compiler_name(manifest_compiler_name):
|
||||
elif manifest_compiler_name in spack.compilers.supported_compilers():
|
||||
return manifest_compiler_name
|
||||
else:
|
||||
# Try to fail quickly. This can occur in two cases: (1) the compiler
|
||||
# definition (2) a spec can specify a compiler that doesn't exist; the
|
||||
# first will be caught when creating compiler definition. The second
|
||||
# will result in Specs with associated undefined compilers.
|
||||
raise spack.compilers.UnknownCompilerError(
|
||||
"Manifest parsing - unknown compiler: {0}"
|
||||
.format(manifest_compiler_name))
|
||||
@@ -86,13 +90,13 @@ def spec_from_entry(entry):
|
||||
arch=arch_str
|
||||
)
|
||||
|
||||
pkg_cls = spack.repo.path.get_pkg_class(entry['name'])
|
||||
package = spack.repo.get(entry['name'])
|
||||
|
||||
if 'parameters' in entry:
|
||||
variant_strs = list()
|
||||
for name, value in entry['parameters'].items():
|
||||
# TODO: also ensure that the variant value is valid?
|
||||
if not (name in pkg_cls.variants):
|
||||
if not (name in package.variants):
|
||||
tty.debug("Omitting variant {0} for entry {1}/{2}"
|
||||
.format(name, entry['name'], entry['hash'][:7]))
|
||||
continue
|
||||
@@ -182,8 +186,6 @@ def read(path, apply_updates):
|
||||
tty.debug("{0}: {1} compilers read from manifest".format(
|
||||
path,
|
||||
str(len(compilers))))
|
||||
# Filter out the compilers that already appear in the configuration
|
||||
compilers = spack.compilers.select_new_compilers(compilers)
|
||||
if apply_updates and compilers:
|
||||
spack.compilers.add_compilers_to_config(
|
||||
compilers, init_config=False)
|
||||
|
||||
@@ -220,7 +220,7 @@ def by_executable(packages_to_check, path_hints=None):
|
||||
searching by path.
|
||||
|
||||
Args:
|
||||
packages_to_check (list): list of package classes to be detected
|
||||
packages_to_check (list): list of packages to be detected
|
||||
path_hints (list): list of paths to be searched. If None the list will be
|
||||
constructed based on the PATH environment variable.
|
||||
"""
|
||||
@@ -228,7 +228,7 @@ def by_executable(packages_to_check, path_hints=None):
|
||||
exe_pattern_to_pkgs = collections.defaultdict(list)
|
||||
for pkg in packages_to_check:
|
||||
if hasattr(pkg, 'executables'):
|
||||
for exe in pkg.platform_executables():
|
||||
for exe in pkg.platform_executables:
|
||||
exe_pattern_to_pkgs[exe].append(pkg)
|
||||
# Add Windows specific, package related paths to the search paths
|
||||
path_hints.extend(compute_windows_program_path_for_package(pkg))
|
||||
|
||||
@@ -46,7 +46,7 @@ class OpenMpi(Package):
|
||||
from spack.dependency import Dependency, canonical_deptype, default_deptype
|
||||
from spack.fetch_strategy import from_kwargs
|
||||
from spack.resource import Resource
|
||||
from spack.version import GitVersion, Version, VersionChecksumError, VersionLookupError
|
||||
from spack.version import Version, VersionChecksumError
|
||||
|
||||
__all__ = ['DirectiveError', 'DirectiveMeta', 'version', 'conflicts', 'depends_on',
|
||||
'extends', 'provides', 'patch', 'variant', 'resource']
|
||||
@@ -330,17 +330,7 @@ def _execute_version(pkg):
|
||||
kwargs['checksum'] = checksum
|
||||
|
||||
# Store kwargs for the package to later with a fetch_strategy.
|
||||
version = Version(ver)
|
||||
if isinstance(version, GitVersion):
|
||||
if not hasattr(pkg, 'git') and 'git' not in kwargs:
|
||||
msg = "Spack version directives cannot include git hashes fetched from"
|
||||
msg += " URLs. Error in package '%s'\n" % pkg.name
|
||||
msg += " version('%s', " % version.string
|
||||
msg += ', '.join("%s='%s'" % (argname, value)
|
||||
for argname, value in kwargs.items())
|
||||
msg += ")"
|
||||
raise VersionLookupError(msg)
|
||||
pkg.versions[version] = kwargs
|
||||
pkg.versions[Version(ver)] = kwargs
|
||||
return _execute_version
|
||||
|
||||
|
||||
|
||||
@@ -21,6 +21,7 @@
|
||||
from llnl.util.lang import dedupe
|
||||
from llnl.util.symlink import symlink
|
||||
|
||||
import spack.binary_distribution
|
||||
import spack.bootstrap
|
||||
import spack.compilers
|
||||
import spack.concretize
|
||||
@@ -1113,13 +1114,8 @@ def develop(self, spec, path, clone=False):
|
||||
# "steal" the source code via staging API
|
||||
abspath = os.path.normpath(os.path.join(self.path, path))
|
||||
|
||||
# Stage, at the moment, requires a concrete Spec, since it needs the
|
||||
# dag_hash for the stage dir name. Below though we ask for a stage
|
||||
# to be created, to copy it afterwards somewhere else. It would be
|
||||
# better if we can create the `source_path` directly into its final
|
||||
# destination.
|
||||
pkg_cls = spack.repo.path.get_pkg_class(spec.name)
|
||||
pkg_cls(spec).stage.steal_source(abspath)
|
||||
stage = spec.package.stage
|
||||
stage.steal_source(abspath)
|
||||
|
||||
# If it wasn't already in the list, append it
|
||||
self.dev_specs[spec.name] = {'path': path, 'spec': str(spec)}
|
||||
@@ -1287,6 +1283,10 @@ def _concretize_separately(self, tests=False):
|
||||
# processes try to write the config file in parallel
|
||||
_ = spack.compilers.get_compiler_config()
|
||||
|
||||
# Ensure that buildcache index is updated if reuse is on
|
||||
if spack.config.get('config:reuse', False):
|
||||
spack.binary_distribution.binary_index.update()
|
||||
|
||||
# Early return if there is nothing to do
|
||||
if len(arguments) == 0:
|
||||
return []
|
||||
@@ -1617,10 +1617,9 @@ def install_specs(self, specs=None, **install_args):
|
||||
# ensure specs already installed are marked explicit
|
||||
all_specs = specs or [cs for _, cs in self.concretized_specs()]
|
||||
specs_installed = [s for s in all_specs if s.installed]
|
||||
if specs_installed:
|
||||
with spack.store.db.write_transaction(): # do all in one transaction
|
||||
for spec in specs_installed:
|
||||
spack.store.db.update_explicit(spec, True)
|
||||
with spack.store.db.write_transaction(): # do all in one transaction
|
||||
for spec in specs_installed:
|
||||
spack.store.db.update_explicit(spec, True)
|
||||
|
||||
if not specs_to_install:
|
||||
tty.msg('All of the packages are already installed')
|
||||
|
||||
@@ -337,7 +337,9 @@ def fetch(self):
|
||||
continue
|
||||
|
||||
try:
|
||||
self._fetch_from_url(url)
|
||||
partial_file, save_file = self._fetch_from_url(url)
|
||||
if save_file and (partial_file is not None):
|
||||
llnl.util.filesystem.rename(partial_file, save_file)
|
||||
break
|
||||
except FailedDownloadError as e:
|
||||
errors.append(str(e))
|
||||
@@ -387,7 +389,9 @@ def _check_headers(self, headers):
|
||||
|
||||
@_needs_stage
|
||||
def _fetch_urllib(self, url):
|
||||
save_file = self.stage.save_filename
|
||||
save_file = None
|
||||
if self.stage.save_filename:
|
||||
save_file = self.stage.save_filename
|
||||
tty.msg('Fetching {0}'.format(url))
|
||||
|
||||
# Run urllib but grab the mime type from the http headers
|
||||
@@ -397,18 +401,16 @@ def _fetch_urllib(self, url):
|
||||
# clean up archive on failure.
|
||||
if self.archive_file:
|
||||
os.remove(self.archive_file)
|
||||
if os.path.lexists(save_file):
|
||||
if save_file and os.path.exists(save_file):
|
||||
os.remove(save_file)
|
||||
msg = 'urllib failed to fetch with error {0}'.format(e)
|
||||
raise FailedDownloadError(url, msg)
|
||||
|
||||
if os.path.lexists(save_file):
|
||||
os.remove(save_file)
|
||||
|
||||
with open(save_file, 'wb') as _open_file:
|
||||
shutil.copyfileobj(response, _open_file)
|
||||
|
||||
self._check_headers(str(headers))
|
||||
return None, save_file
|
||||
|
||||
@_needs_stage
|
||||
def _fetch_curl(self, url):
|
||||
@@ -469,7 +471,7 @@ def _fetch_curl(self, url):
|
||||
if self.archive_file:
|
||||
os.remove(self.archive_file)
|
||||
|
||||
if partial_file and os.path.lexists(partial_file):
|
||||
if partial_file and os.path.exists(partial_file):
|
||||
os.remove(partial_file)
|
||||
|
||||
if curl.returncode == 22:
|
||||
@@ -496,9 +498,7 @@ def _fetch_curl(self, url):
|
||||
"Curl failed with error %d" % curl.returncode)
|
||||
|
||||
self._check_headers(headers)
|
||||
|
||||
if save_file and (partial_file is not None):
|
||||
fs.rename(partial_file, save_file)
|
||||
return partial_file, save_file
|
||||
|
||||
@property # type: ignore # decorated properties unsupported in mypy
|
||||
@_needs_stage
|
||||
@@ -613,7 +613,7 @@ def fetch(self):
|
||||
|
||||
# remove old symlink if one is there.
|
||||
filename = self.stage.save_filename
|
||||
if os.path.lexists(filename):
|
||||
if os.path.exists(filename):
|
||||
os.remove(filename)
|
||||
|
||||
# Symlink to local cached archive.
|
||||
@@ -1575,30 +1575,16 @@ def for_package_version(pkg, version):
|
||||
|
||||
check_pkg_attributes(pkg)
|
||||
|
||||
if not isinstance(version, spack.version.VersionBase):
|
||||
if not isinstance(version, spack.version.Version):
|
||||
version = spack.version.Version(version)
|
||||
|
||||
# if it's a commit, we must use a GitFetchStrategy
|
||||
if isinstance(version, spack.version.GitVersion):
|
||||
if not hasattr(pkg, "git"):
|
||||
raise FetchError(
|
||||
"Cannot fetch git version for %s. Package has no 'git' attribute" %
|
||||
pkg.name
|
||||
)
|
||||
if version.is_commit and hasattr(pkg, "git"):
|
||||
# Populate the version with comparisons to other commits
|
||||
version.generate_git_lookup(pkg.name)
|
||||
|
||||
# For GitVersion, we have no way to determine whether a ref is a branch or tag
|
||||
# Fortunately, we handle branches and tags identically, except tags are
|
||||
# handled slightly more conservatively for older versions of git.
|
||||
# We call all non-commit refs tags in this context, at the cost of a slight
|
||||
# performance hit for branches on older versions of git.
|
||||
# Branches cannot be cached, so we tell the fetcher not to cache tags/branches
|
||||
ref_type = 'commit' if version.is_commit else 'tag'
|
||||
version.generate_commit_lookup(pkg.name)
|
||||
kwargs = {
|
||||
'git': pkg.git,
|
||||
ref_type: version.ref,
|
||||
'no_cache': True,
|
||||
'commit': str(version)
|
||||
}
|
||||
kwargs['submodules'] = getattr(pkg, 'submodules', False)
|
||||
fetcher = GitFetchStrategy(**kwargs)
|
||||
|
||||
@@ -535,10 +535,9 @@ def graph_dot(specs, deptype='all', static=False, out=None):
|
||||
deptype = spack.dependency.canonical_deptype(deptype)
|
||||
|
||||
def static_graph(spec, deptype):
|
||||
pkg_cls = spack.repo.path.get_pkg_class(spec.name)
|
||||
possible = pkg_cls.possible_dependencies(
|
||||
expand_virtuals=True, deptype=deptype
|
||||
)
|
||||
pkg = spec.package
|
||||
possible = pkg.possible_dependencies(
|
||||
expand_virtuals=True, deptype=deptype)
|
||||
|
||||
nodes = set() # elements are (node name, node label)
|
||||
edges = set() # elements are (src key, dest key)
|
||||
|
||||
@@ -2,10 +2,10 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
"""Definitions that control how Spack creates Spec hashes."""
|
||||
|
||||
import spack.dependency as dp
|
||||
import spack.repo
|
||||
|
||||
hashes = []
|
||||
|
||||
@@ -51,16 +51,10 @@ def __call__(self, spec):
|
||||
)
|
||||
|
||||
|
||||
def _content_hash_override(spec):
|
||||
pkg_cls = spack.repo.path.get_pkg_class(spec.name)
|
||||
pkg = pkg_cls(spec)
|
||||
return pkg.content_hash()
|
||||
|
||||
|
||||
#: Package hash used as part of dag hash
|
||||
package_hash = SpecHashDescriptor(
|
||||
deptype=(), package_hash=True, name='package_hash',
|
||||
override=_content_hash_override)
|
||||
override=lambda s: s.package.content_hash())
|
||||
|
||||
|
||||
# Deprecated hash types, no longer used, but needed to understand old serialized
|
||||
|
||||
@@ -21,6 +21,7 @@
|
||||
* on_phase_success(pkg, phase_name, log_file)
|
||||
* on_phase_error(pkg, phase_name, log_file)
|
||||
* on_phase_error(pkg, phase_name, log_file)
|
||||
* on_analyzer_save(pkg, result)
|
||||
* post_env_write(env)
|
||||
|
||||
This can be used to implement support for things like module
|
||||
@@ -91,5 +92,8 @@ def __call__(self, *args, **kwargs):
|
||||
on_install_failure = _HookRunner('on_install_failure')
|
||||
on_install_cancel = _HookRunner('on_install_cancel')
|
||||
|
||||
# Analyzer hooks
|
||||
on_analyzer_save = _HookRunner('on_analyzer_save')
|
||||
|
||||
# Environment hooks
|
||||
post_env_write = _HookRunner('post_env_write')
|
||||
|
||||
85
lib/spack/spack/hooks/monitor.py
Normal file
85
lib/spack/spack/hooks/monitor.py
Normal file
@@ -0,0 +1,85 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.monitor
|
||||
|
||||
|
||||
def on_install_start(spec):
|
||||
"""On start of an install, we want to ping the server if it exists
|
||||
"""
|
||||
if not spack.monitor.cli:
|
||||
return
|
||||
|
||||
tty.debug("Running on_install_start for %s" % spec)
|
||||
build_id = spack.monitor.cli.new_build(spec)
|
||||
tty.verbose("Build created with id %s" % build_id)
|
||||
|
||||
|
||||
def on_install_success(spec):
|
||||
"""On the success of an install (after everything is complete)
|
||||
"""
|
||||
if not spack.monitor.cli:
|
||||
return
|
||||
|
||||
tty.debug("Running on_install_success for %s" % spec)
|
||||
result = spack.monitor.cli.update_build(spec, status="SUCCESS")
|
||||
tty.verbose(result.get('message'))
|
||||
|
||||
|
||||
def on_install_failure(spec):
|
||||
"""Triggered on failure of an install
|
||||
"""
|
||||
if not spack.monitor.cli:
|
||||
return
|
||||
|
||||
tty.debug("Running on_install_failure for %s" % spec)
|
||||
result = spack.monitor.cli.fail_task(spec)
|
||||
tty.verbose(result.get('message'))
|
||||
|
||||
|
||||
def on_install_cancel(spec):
|
||||
"""Triggered on cancel of an install
|
||||
"""
|
||||
if not spack.monitor.cli:
|
||||
return
|
||||
|
||||
tty.debug("Running on_install_cancel for %s" % spec)
|
||||
result = spack.monitor.cli.cancel_task(spec)
|
||||
tty.verbose(result.get('message'))
|
||||
|
||||
|
||||
def on_phase_success(pkg, phase_name, log_file):
|
||||
"""Triggered on a phase success
|
||||
"""
|
||||
if not spack.monitor.cli:
|
||||
return
|
||||
|
||||
tty.debug("Running on_phase_success %s, phase %s" % (pkg.name, phase_name))
|
||||
result = spack.monitor.cli.send_phase(pkg, phase_name, log_file, "SUCCESS")
|
||||
tty.verbose(result.get('message'))
|
||||
|
||||
|
||||
def on_phase_error(pkg, phase_name, log_file):
|
||||
"""Triggered on a phase error
|
||||
"""
|
||||
if not spack.monitor.cli:
|
||||
return
|
||||
|
||||
tty.debug("Running on_phase_error %s, phase %s" % (pkg.name, phase_name))
|
||||
result = spack.monitor.cli.send_phase(pkg, phase_name, log_file, "ERROR")
|
||||
tty.verbose(result.get('message'))
|
||||
|
||||
|
||||
def on_analyzer_save(pkg, result):
|
||||
"""given a package and a result, if we have a spack monitor, upload
|
||||
the result to it.
|
||||
"""
|
||||
if not spack.monitor.cli:
|
||||
return
|
||||
|
||||
# This hook runs after a save result
|
||||
spack.monitor.cli.send_analyze_metadata(pkg, result)
|
||||
@@ -49,6 +49,7 @@
|
||||
import spack.compilers
|
||||
import spack.error
|
||||
import spack.hooks
|
||||
import spack.monitor
|
||||
import spack.package_base
|
||||
import spack.package_prefs as prefs
|
||||
import spack.repo
|
||||
@@ -232,7 +233,6 @@ def _packages_needed_to_bootstrap_compiler(compiler, architecture, pkgs):
|
||||
)
|
||||
packages = [(s.package, False) for
|
||||
s in dep.traverse(order='post', root=False)]
|
||||
|
||||
packages.append((dep.package, True))
|
||||
return packages
|
||||
|
||||
@@ -2212,8 +2212,7 @@ def flag_installed(self, installed):
|
||||
@property
|
||||
def explicit(self):
|
||||
"""The package was explicitly requested by the user."""
|
||||
return self.pkg == self.request.pkg and \
|
||||
self.request.install_args.get('explicit', True)
|
||||
return self.pkg == self.request.pkg
|
||||
|
||||
@property
|
||||
def key(self):
|
||||
|
||||
@@ -391,8 +391,7 @@ def mirror_archive_paths(fetcher, per_package_ref, spec=None):
|
||||
storage path of the resource associated with the specified ``fetcher``."""
|
||||
ext = None
|
||||
if spec:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(spec.name)
|
||||
versions = pkg_cls.versions.get(spec.version, {})
|
||||
versions = spec.package.versions.get(spec.package.version, {})
|
||||
ext = versions.get('extension', None)
|
||||
# If the spec does not explicitly specify an extension (the default case),
|
||||
# then try to determine it automatically. An extension can only be
|
||||
|
||||
@@ -54,34 +54,6 @@
|
||||
import spack.util.spack_yaml as syaml
|
||||
|
||||
|
||||
def get_deprecated(dictionary, name, old_name, default):
|
||||
"""Get a deprecated property from a ``dict``.
|
||||
|
||||
Arguments:
|
||||
dictionary (dict): dictionary to get a value from.
|
||||
name (str): New name for the property. If present, supersedes ``old_name``.
|
||||
old_name (str): Deprecated name for the property. If present, a warning
|
||||
is printed.
|
||||
default (object): value to return if neither name is found.
|
||||
"""
|
||||
value = default
|
||||
|
||||
# always warn if old name is present
|
||||
if old_name in dictionary:
|
||||
value = dictionary.get(old_name, value)
|
||||
main_msg = "`{}:` is deprecated in module config and will be removed in v0.20."
|
||||
details = (
|
||||
"Use `{}:` instead. You can run `spack config update` to translate your "
|
||||
"configuration files automatically."
|
||||
)
|
||||
tty.warn(main_msg.format(old_name), details.format(name))
|
||||
|
||||
# name overrides old name if present
|
||||
value = dictionary.get(name, value)
|
||||
|
||||
return value
|
||||
|
||||
|
||||
#: config section for this file
|
||||
def configuration(module_set_name):
|
||||
config_path = 'modules:%s' % module_set_name
|
||||
@@ -379,14 +351,14 @@ def get_module(
|
||||
|
||||
Retrieve the module file for the given spec if it is available. If the
|
||||
module is not available, this will raise an exception unless the module
|
||||
is excluded or if the spec is installed upstream.
|
||||
is blacklisted or if the spec is installed upstream.
|
||||
|
||||
Args:
|
||||
module_type: the type of module we want to retrieve (e.g. lmod)
|
||||
spec: refers to the installed package that we want to retrieve a module
|
||||
for
|
||||
required: if the module is required but excluded, this function will
|
||||
print a debug message. If a module is missing but not excluded,
|
||||
required: if the module is required but blacklisted, this function will
|
||||
print a debug message. If a module is missing but not blacklisted,
|
||||
then an exception is raised (regardless of whether it is required)
|
||||
get_full_path: if ``True``, this returns the full path to the module.
|
||||
Otherwise, this returns the module name.
|
||||
@@ -414,13 +386,13 @@ def get_module(
|
||||
else:
|
||||
writer = spack.modules.module_types[module_type](spec, module_set_name)
|
||||
if not os.path.isfile(writer.layout.filename):
|
||||
if not writer.conf.excluded:
|
||||
if not writer.conf.blacklisted:
|
||||
err_msg = "No module available for package {0} at {1}".format(
|
||||
spec, writer.layout.filename
|
||||
)
|
||||
raise ModuleNotFoundError(err_msg)
|
||||
elif required:
|
||||
tty.debug("The module configuration has excluded {0}: "
|
||||
tty.debug("The module configuration has blacklisted {0}: "
|
||||
"omitting it".format(spec))
|
||||
else:
|
||||
return None
|
||||
@@ -511,30 +483,26 @@ def hash(self):
|
||||
return None
|
||||
|
||||
@property
|
||||
def excluded(self):
|
||||
"""Returns True if the module has been excluded, False otherwise."""
|
||||
|
||||
def blacklisted(self):
|
||||
"""Returns True if the module has been blacklisted,
|
||||
False otherwise.
|
||||
"""
|
||||
# A few variables for convenience of writing the method
|
||||
spec = self.spec
|
||||
conf = self.module.configuration(self.name)
|
||||
|
||||
# Compute the list of include rules that match
|
||||
# DEPRECATED: remove 'whitelist' in v0.20
|
||||
include_rules = get_deprecated(conf, "include", "whitelist", [])
|
||||
include_matches = [x for x in include_rules if spec.satisfies(x)]
|
||||
# Compute the list of whitelist rules that match
|
||||
wlrules = conf.get('whitelist', [])
|
||||
whitelist_matches = [x for x in wlrules if spec.satisfies(x)]
|
||||
|
||||
# Compute the list of exclude rules that match
|
||||
# DEPRECATED: remove 'blacklist' in v0.20
|
||||
exclude_rules = get_deprecated(conf, "exclude", "blacklist", [])
|
||||
exclude_matches = [x for x in exclude_rules if spec.satisfies(x)]
|
||||
# Compute the list of blacklist rules that match
|
||||
blrules = conf.get('blacklist', [])
|
||||
blacklist_matches = [x for x in blrules if spec.satisfies(x)]
|
||||
|
||||
# Should I exclude the module because it's implicit?
|
||||
# DEPRECATED: remove 'blacklist_implicits' in v0.20
|
||||
exclude_implicits = get_deprecated(
|
||||
conf, "exclude_implicits", "blacklist_implicits", None
|
||||
)
|
||||
# Should I blacklist the module because it's implicit?
|
||||
blacklist_implicits = conf.get('blacklist_implicits')
|
||||
installed_implicitly = not spec._installed_explicitly()
|
||||
excluded_as_implicit = exclude_implicits and installed_implicitly
|
||||
blacklisted_as_implicit = blacklist_implicits and installed_implicitly
|
||||
|
||||
def debug_info(line_header, match_list):
|
||||
if match_list:
|
||||
@@ -543,15 +511,15 @@ def debug_info(line_header, match_list):
|
||||
for rule in match_list:
|
||||
tty.debug('\t\tmatches rule: {0}'.format(rule))
|
||||
|
||||
debug_info('INCLUDE', include_matches)
|
||||
debug_info('EXCLUDE', exclude_matches)
|
||||
debug_info('WHITELIST', whitelist_matches)
|
||||
debug_info('BLACKLIST', blacklist_matches)
|
||||
|
||||
if excluded_as_implicit:
|
||||
msg = '\tEXCLUDED_AS_IMPLICIT : {0}'.format(spec.cshort_spec)
|
||||
if blacklisted_as_implicit:
|
||||
msg = '\tBLACKLISTED_AS_IMPLICIT : {0}'.format(spec.cshort_spec)
|
||||
tty.debug(msg)
|
||||
|
||||
is_excluded = exclude_matches or excluded_as_implicit
|
||||
if not include_matches and is_excluded:
|
||||
is_blacklisted = blacklist_matches or blacklisted_as_implicit
|
||||
if not whitelist_matches and is_blacklisted:
|
||||
return True
|
||||
|
||||
return False
|
||||
@@ -576,22 +544,17 @@ def specs_to_prereq(self):
|
||||
return self._create_list_for('prerequisites')
|
||||
|
||||
@property
|
||||
def exclude_env_vars(self):
|
||||
def environment_blacklist(self):
|
||||
"""List of variables that should be left unmodified."""
|
||||
filter = self.conf.get('filter', {})
|
||||
|
||||
# DEPRECATED: remove in v0.20
|
||||
return get_deprecated(
|
||||
filter, "exclude_env_vars", "environment_blacklist", {}
|
||||
)
|
||||
return self.conf.get('filter', {}).get('environment_blacklist', {})
|
||||
|
||||
def _create_list_for(self, what):
|
||||
include = []
|
||||
whitelist = []
|
||||
for item in self.conf[what]:
|
||||
conf = type(self)(item, self.name)
|
||||
if not conf.excluded:
|
||||
include.append(item)
|
||||
return include
|
||||
if not conf.blacklisted:
|
||||
whitelist.append(item)
|
||||
return whitelist
|
||||
|
||||
@property
|
||||
def verbose(self):
|
||||
@@ -770,8 +733,8 @@ def environment_modifications(self):
|
||||
# Modifications required from modules.yaml
|
||||
env.extend(self.conf.env)
|
||||
|
||||
# List of variables that are excluded in modules.yaml
|
||||
exclude = self.conf.exclude_env_vars
|
||||
# List of variables that are blacklisted in modules.yaml
|
||||
blacklist = self.conf.environment_blacklist
|
||||
|
||||
# We may have tokens to substitute in environment commands
|
||||
|
||||
@@ -795,7 +758,7 @@ def environment_modifications(self):
|
||||
pass
|
||||
x.name = str(x.name).replace('-', '_')
|
||||
|
||||
return [(type(x).__name__, x) for x in env if x.name not in exclude]
|
||||
return [(type(x).__name__, x) for x in env if x.name not in blacklist]
|
||||
|
||||
@tengine.context_property
|
||||
def autoload(self):
|
||||
@@ -868,9 +831,9 @@ def write(self, overwrite=False):
|
||||
existing file. If False the operation is skipped an we print
|
||||
a warning to the user.
|
||||
"""
|
||||
# Return immediately if the module is excluded
|
||||
if self.conf.excluded:
|
||||
msg = '\tNOT WRITING: {0} [EXCLUDED]'
|
||||
# Return immediately if the module is blacklisted
|
||||
if self.conf.blacklisted:
|
||||
msg = '\tNOT WRITING: {0} [BLACKLISTED]'
|
||||
tty.debug(msg.format(self.spec.cshort_spec))
|
||||
return
|
||||
|
||||
|
||||
738
lib/spack/spack/monitor.py
Normal file
738
lib/spack/spack/monitor.py
Normal file
@@ -0,0 +1,738 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
"""Interact with a Spack Monitor Service. Derived from
|
||||
https://github.com/spack/spack-monitor/blob/main/script/spackmoncli.py
|
||||
"""
|
||||
|
||||
import base64
|
||||
import hashlib
|
||||
import os
|
||||
import re
|
||||
from datetime import datetime
|
||||
|
||||
try:
|
||||
from urllib.error import URLError
|
||||
from urllib.request import Request, urlopen
|
||||
except ImportError:
|
||||
from urllib2 import urlopen, Request, URLError # type: ignore # novm
|
||||
|
||||
from copy import deepcopy
|
||||
from glob import glob
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack
|
||||
import spack.config
|
||||
import spack.hash_types as ht
|
||||
import spack.main
|
||||
import spack.paths
|
||||
import spack.store
|
||||
import spack.util.path
|
||||
import spack.util.spack_json as sjson
|
||||
import spack.util.spack_yaml as syaml
|
||||
|
||||
# A global client to instantiate once
|
||||
cli = None
|
||||
|
||||
|
||||
def get_client(host, prefix="ms1", allow_fail=False, tags=None, save_local=False):
|
||||
"""
|
||||
Get a monitor client for a particular host and prefix.
|
||||
|
||||
If the client is not running, we exit early, unless allow_fail is set
|
||||
to true, indicating that we should continue the build even if the
|
||||
server is not present. Note that this client is defined globally as "cli"
|
||||
so we can istantiate it once (checking for credentials, etc.) and then
|
||||
always have access to it via spack.monitor.cli. Also note that
|
||||
typically, we call the monitor by way of hooks in spack.hooks.monitor.
|
||||
So if you want the monitor to have a new interaction with some part of
|
||||
the codebase, it's recommended to write a hook first, and then have
|
||||
the monitor use it.
|
||||
"""
|
||||
global cli
|
||||
cli = SpackMonitorClient(host=host, prefix=prefix, allow_fail=allow_fail,
|
||||
tags=tags, save_local=save_local)
|
||||
|
||||
# Auth is always required unless we are saving locally
|
||||
if not save_local:
|
||||
cli.require_auth()
|
||||
|
||||
# We will exit early if the monitoring service is not running, but
|
||||
# only if we aren't doing a local save
|
||||
if not save_local:
|
||||
info = cli.service_info()
|
||||
|
||||
# If we allow failure, the response will be done
|
||||
if info:
|
||||
tty.debug("%s v.%s has status %s" % (
|
||||
info['id'],
|
||||
info['version'],
|
||||
info['status'])
|
||||
)
|
||||
return cli
|
||||
|
||||
|
||||
def get_monitor_group(subparser):
|
||||
"""
|
||||
Retrieve the monitor group for the argument parser.
|
||||
|
||||
Since the monitor group is shared between commands, we provide a common
|
||||
function to generate the group for it. The user can pass the subparser, and
|
||||
the group is added, and returned.
|
||||
"""
|
||||
# Monitoring via https://github.com/spack/spack-monitor
|
||||
monitor_group = subparser.add_argument_group()
|
||||
monitor_group.add_argument(
|
||||
'--monitor', action='store_true', dest='use_monitor', default=False,
|
||||
help="interact with a monitor server during builds.")
|
||||
monitor_group.add_argument(
|
||||
'--monitor-save-local', action='store_true', dest='monitor_save_local',
|
||||
default=False, help="save monitor results to .spack instead of server.")
|
||||
monitor_group.add_argument(
|
||||
'--monitor-tags', dest='monitor_tags', default=None,
|
||||
help="One or more (comma separated) tags for a build.")
|
||||
monitor_group.add_argument(
|
||||
'--monitor-keep-going', action='store_true', dest='monitor_keep_going',
|
||||
default=False, help="continue the build if a request to monitor fails.")
|
||||
monitor_group.add_argument(
|
||||
'--monitor-host', dest='monitor_host', default="http://127.0.0.1",
|
||||
help="If using a monitor, customize the host.")
|
||||
monitor_group.add_argument(
|
||||
'--monitor-prefix', dest='monitor_prefix', default="ms1",
|
||||
help="The API prefix for the monitor service.")
|
||||
return monitor_group
|
||||
|
||||
|
||||
class SpackMonitorClient:
|
||||
"""Client to interact with a spack monitor server.
|
||||
|
||||
We require the host url, along with the prefix to discover the
|
||||
service_info endpoint. If allow_fail is set to True, we will not exit
|
||||
on error with tty.die given that a request is not successful. The spack
|
||||
version is one of the fields to uniquely identify a spec, so we add it
|
||||
to the client on init.
|
||||
"""
|
||||
|
||||
def __init__(self, host=None, prefix="ms1", allow_fail=False, tags=None,
|
||||
save_local=False):
|
||||
# We can control setting an arbitrary version if needed
|
||||
sv = spack.main.get_version()
|
||||
self.spack_version = os.environ.get("SPACKMON_SPACK_VERSION") or sv
|
||||
|
||||
self.host = host or "http://127.0.0.1"
|
||||
self.baseurl = "%s/%s" % (self.host, prefix.strip("/"))
|
||||
self.token = os.environ.get("SPACKMON_TOKEN")
|
||||
self.username = os.environ.get("SPACKMON_USER")
|
||||
self.headers = {}
|
||||
self.allow_fail = allow_fail
|
||||
self.capture_build_environment()
|
||||
self.tags = tags
|
||||
self.save_local = save_local
|
||||
|
||||
# We key lookup of build_id by dag_hash
|
||||
self.build_ids = {}
|
||||
self.setup_save()
|
||||
|
||||
def setup_save(self):
|
||||
"""Given a local save "save_local" ensure the output directory exists.
|
||||
"""
|
||||
if not self.save_local:
|
||||
return
|
||||
|
||||
save_dir = spack.util.path.canonicalize_path(
|
||||
spack.config.get('config:monitor_dir', spack.paths.default_monitor_path)
|
||||
)
|
||||
|
||||
# Name based on timestamp
|
||||
now = datetime.now().strftime('%Y-%m-%d-%H-%M-%S-%s')
|
||||
self.save_dir = os.path.join(save_dir, now)
|
||||
if not os.path.exists(self.save_dir):
|
||||
os.makedirs(self.save_dir)
|
||||
|
||||
def save(self, obj, filename):
|
||||
"""
|
||||
Save a monitor json result to the save directory.
|
||||
"""
|
||||
filename = os.path.join(self.save_dir, filename)
|
||||
write_json(obj, filename)
|
||||
return {"message": "Build saved locally to %s" % filename}
|
||||
|
||||
def load_build_environment(self, spec):
|
||||
"""
|
||||
Load a build environment from install_environment.json.
|
||||
|
||||
If we are running an analyze command, we will need to load previously
|
||||
used build environment metadata from install_environment.json to capture
|
||||
what was done during the build.
|
||||
"""
|
||||
if not hasattr(spec, "package") or not spec.package:
|
||||
tty.die("A spec must have a package to load the environment.")
|
||||
|
||||
pkg_dir = os.path.dirname(spec.package.install_log_path)
|
||||
env_file = os.path.join(pkg_dir, "install_environment.json")
|
||||
build_environment = read_json(env_file)
|
||||
if not build_environment:
|
||||
tty.warn(
|
||||
"install_environment.json not found in package folder. "
|
||||
" This means that the current environment metadata will be used."
|
||||
)
|
||||
else:
|
||||
self.build_environment = build_environment
|
||||
|
||||
def capture_build_environment(self):
|
||||
"""
|
||||
Capture the environment for the build.
|
||||
|
||||
This uses spack.util.environment.get_host_environment_metadata to do so.
|
||||
This is important because it's a unique identifier, along with the spec,
|
||||
for a Build. It should look something like this:
|
||||
|
||||
{'host_os': 'ubuntu20.04',
|
||||
'platform': 'linux',
|
||||
'host_target': 'skylake',
|
||||
'hostname': 'vanessa-ThinkPad-T490s',
|
||||
'spack_version': '0.16.1-1455-52d5b55b65',
|
||||
'kernel_version': '#73-Ubuntu SMP Mon Jan 18 17:25:17 UTC 2021'}
|
||||
|
||||
This is saved to a package install's metadata folder as
|
||||
install_environment.json, and can be loaded by the monitor for uploading
|
||||
data relevant to a later analysis.
|
||||
"""
|
||||
from spack.util.environment import get_host_environment_metadata
|
||||
self.build_environment = get_host_environment_metadata()
|
||||
keys = list(self.build_environment.keys())
|
||||
|
||||
# Allow to customize any of these values via the environment
|
||||
for key in keys:
|
||||
envar_name = "SPACKMON_%s" % key.upper()
|
||||
envar = os.environ.get(envar_name)
|
||||
if envar:
|
||||
self.build_environment[key] = envar
|
||||
|
||||
def require_auth(self):
|
||||
"""
|
||||
Require authentication.
|
||||
|
||||
The token and username must not be unset
|
||||
"""
|
||||
if not self.save_local and (not self.token or not self.username):
|
||||
tty.die("You are required to export SPACKMON_TOKEN and SPACKMON_USER")
|
||||
|
||||
def set_header(self, name, value):
|
||||
self.headers.update({name: value})
|
||||
|
||||
def set_basic_auth(self, username, password):
|
||||
"""
|
||||
A wrapper to adding basic authentication to the Request
|
||||
"""
|
||||
auth_str = "%s:%s" % (username, password)
|
||||
auth_header = base64.b64encode(auth_str.encode("utf-8"))
|
||||
self.set_header("Authorization", "Basic %s" % auth_header.decode("utf-8"))
|
||||
|
||||
def reset(self):
|
||||
"""
|
||||
Reset and prepare for a new request.
|
||||
"""
|
||||
if "Authorization" in self.headers:
|
||||
self.headers = {"Authorization": self.headers['Authorization']}
|
||||
else:
|
||||
self.headers = {}
|
||||
|
||||
def prepare_request(self, endpoint, data, headers):
|
||||
"""
|
||||
Prepare a request given an endpoint, data, and headers.
|
||||
|
||||
If data is provided, urllib makes the request a POST
|
||||
"""
|
||||
# Always reset headers for new request.
|
||||
self.reset()
|
||||
|
||||
# Preserve previously used auth token
|
||||
headers = headers or self.headers
|
||||
|
||||
# The calling function can provide a full or partial url
|
||||
if not endpoint.startswith("http"):
|
||||
endpoint = "%s/%s" % (self.baseurl, endpoint)
|
||||
|
||||
# If we have data, the request will be POST
|
||||
if data:
|
||||
if not isinstance(data, str):
|
||||
data = sjson.dump(data)
|
||||
data = data.encode('ascii')
|
||||
|
||||
return Request(endpoint, data=data, headers=headers)
|
||||
|
||||
def issue_request(self, request, retry=True):
|
||||
"""
|
||||
Given a prepared request, issue it.
|
||||
|
||||
If we get an error, die. If
|
||||
there are times when we don't want to exit on error (but instead
|
||||
disable using the monitoring service) we could add that here.
|
||||
"""
|
||||
try:
|
||||
response = urlopen(request)
|
||||
except URLError as e:
|
||||
|
||||
# If we have an authorization request, retry once with auth
|
||||
if hasattr(e, "code") and e.code == 401 and retry:
|
||||
if self.authenticate_request(e):
|
||||
request = self.prepare_request(
|
||||
e.url,
|
||||
sjson.load(request.data.decode('utf-8')),
|
||||
self.headers
|
||||
)
|
||||
return self.issue_request(request, False)
|
||||
|
||||
# Handle permanent re-directs!
|
||||
elif hasattr(e, "code") and e.code == 308:
|
||||
location = e.headers.get('Location')
|
||||
|
||||
request_data = None
|
||||
if request.data:
|
||||
request_data = sjson.load(request.data.decode('utf-8'))[0]
|
||||
|
||||
if location:
|
||||
request = self.prepare_request(
|
||||
location,
|
||||
request_data,
|
||||
self.headers
|
||||
)
|
||||
return self.issue_request(request, True)
|
||||
|
||||
# Otherwise, relay the message and exit on error
|
||||
msg = ""
|
||||
if hasattr(e, 'reason'):
|
||||
msg = e.reason
|
||||
elif hasattr(e, 'code'):
|
||||
msg = e.code
|
||||
|
||||
# If we can parse the message, try it
|
||||
try:
|
||||
msg += "\n%s" % e.read().decode("utf8", 'ignore')
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if self.allow_fail:
|
||||
tty.warning("Request to %s was not successful, but continuing." % e.url)
|
||||
return
|
||||
|
||||
tty.die(msg)
|
||||
|
||||
return response
|
||||
|
||||
def do_request(self, endpoint, data=None, headers=None, url=None):
|
||||
"""
|
||||
Do the actual request.
|
||||
|
||||
If data is provided, it is POST, otherwise GET.
|
||||
If an entire URL is provided, don't use the endpoint
|
||||
"""
|
||||
request = self.prepare_request(endpoint, data, headers)
|
||||
|
||||
# If we have an authorization error, we retry with
|
||||
response = self.issue_request(request)
|
||||
|
||||
# A 200/201 response incidates success
|
||||
if response.code in [200, 201]:
|
||||
return sjson.load(response.read().decode('utf-8'))
|
||||
|
||||
return response
|
||||
|
||||
def authenticate_request(self, originalResponse):
|
||||
"""
|
||||
Authenticate the request.
|
||||
|
||||
Given a response (an HTTPError 401), look for a Www-Authenticate
|
||||
header to parse. We return True/False to indicate if the request
|
||||
should be retried.
|
||||
"""
|
||||
authHeaderRaw = originalResponse.headers.get("Www-Authenticate")
|
||||
if not authHeaderRaw:
|
||||
return False
|
||||
|
||||
# If we have a username and password, set basic auth automatically
|
||||
if self.token and self.username:
|
||||
self.set_basic_auth(self.username, self.token)
|
||||
|
||||
headers = deepcopy(self.headers)
|
||||
if "Authorization" not in headers:
|
||||
tty.error(
|
||||
"This endpoint requires a token. Please set "
|
||||
"client.set_basic_auth(username, password) first "
|
||||
"or export them to the environment."
|
||||
)
|
||||
return False
|
||||
|
||||
# Prepare request to retry
|
||||
h = parse_auth_header(authHeaderRaw)
|
||||
headers.update({
|
||||
"service": h.Service,
|
||||
"Accept": "application/json",
|
||||
"User-Agent": "spackmoncli"}
|
||||
)
|
||||
|
||||
# Currently we don't set a scope (it defaults to build)
|
||||
authResponse = self.do_request(h.Realm, headers=headers)
|
||||
|
||||
# Request the token
|
||||
token = authResponse.get("token")
|
||||
if not token:
|
||||
return False
|
||||
|
||||
# Set the token to the original request and retry
|
||||
self.headers.update({"Authorization": "Bearer %s" % token})
|
||||
return True
|
||||
|
||||
# Functions correspond to endpoints
|
||||
def service_info(self):
|
||||
"""
|
||||
Get the service information endpoint
|
||||
"""
|
||||
# Base endpoint provides service info
|
||||
return self.do_request("")
|
||||
|
||||
def new_configuration(self, specs):
|
||||
"""
|
||||
Given a list of specs, generate a new configuration for each.
|
||||
|
||||
We return a lookup of specs with their package names. This assumes
|
||||
that we are only installing one version of each package. We aren't
|
||||
starting or creating any builds, so we don't need a build environment.
|
||||
"""
|
||||
configs = {}
|
||||
|
||||
# There should only be one spec generally (what cases would have >1?)
|
||||
for spec in specs:
|
||||
# Not sure if this is needed here, but I see it elsewhere
|
||||
if spec.name in spack.repo.path or spec.virtual:
|
||||
spec.concretize()
|
||||
|
||||
# Remove extra level of nesting
|
||||
# This is the only place in Spack we still use full_hash, as `spack monitor`
|
||||
# requires specs with full_hash-keyed dependencies.
|
||||
as_dict = {"spec": spec.to_dict(hash=ht.full_hash)['spec'],
|
||||
"spack_version": self.spack_version}
|
||||
|
||||
if self.save_local:
|
||||
filename = "spec-%s-%s-config.json" % (spec.name, spec.version)
|
||||
self.save(as_dict, filename)
|
||||
else:
|
||||
response = self.do_request("specs/new/", data=sjson.dump(as_dict))
|
||||
configs[spec.package.name] = response.get('data', {})
|
||||
|
||||
return configs
|
||||
|
||||
def failed_concretization(self, specs):
|
||||
"""
|
||||
Given a list of abstract specs, tell spack monitor concretization failed.
|
||||
"""
|
||||
configs = {}
|
||||
|
||||
# There should only be one spec generally (what cases would have >1?)
|
||||
for spec in specs:
|
||||
|
||||
# update the spec to have build hash indicating that cannot be built
|
||||
meta = spec.to_dict()['spec']
|
||||
nodes = []
|
||||
for node in meta.get("nodes", []):
|
||||
node["full_hash"] = "FAILED_CONCRETIZATION"
|
||||
nodes.append(node)
|
||||
meta['nodes'] = nodes
|
||||
|
||||
# We can't concretize / hash
|
||||
as_dict = {"spec": meta,
|
||||
"spack_version": self.spack_version}
|
||||
|
||||
if self.save_local:
|
||||
filename = "spec-%s-%s-config.json" % (spec.name, spec.version)
|
||||
self.save(as_dict, filename)
|
||||
else:
|
||||
response = self.do_request("specs/new/", data=sjson.dump(as_dict))
|
||||
configs[spec.package.name] = response.get('data', {})
|
||||
|
||||
return configs
|
||||
|
||||
def new_build(self, spec):
|
||||
"""
|
||||
Create a new build.
|
||||
|
||||
This means sending the hash of the spec to be built,
|
||||
along with the build environment. These two sets of data uniquely can
|
||||
identify the build, and we will add objects (the binaries produced) to
|
||||
it. We return the build id to the calling client.
|
||||
"""
|
||||
return self.get_build_id(spec, return_response=True)
|
||||
|
||||
def get_build_id(self, spec, return_response=False, spec_exists=True):
|
||||
"""
|
||||
Retrieve a build id, either in the local cache, or query the server.
|
||||
"""
|
||||
dag_hash = spec.dag_hash()
|
||||
if dag_hash in self.build_ids:
|
||||
return self.build_ids[dag_hash]
|
||||
|
||||
# Prepare build environment data (including spack version)
|
||||
data = self.build_environment.copy()
|
||||
data['full_hash'] = dag_hash
|
||||
|
||||
# If the build should be tagged, add it
|
||||
if self.tags:
|
||||
data['tags'] = self.tags
|
||||
|
||||
# If we allow the spec to not exist (meaning we create it) we need to
|
||||
# include the full specfile here
|
||||
if not spec_exists:
|
||||
meta_dir = os.path.dirname(spec.package.install_log_path)
|
||||
spec_file = os.path.join(meta_dir, "spec.json")
|
||||
if os.path.exists(spec_file):
|
||||
data['spec'] = sjson.load(read_file(spec_file))
|
||||
else:
|
||||
spec_file = os.path.join(meta_dir, "spec.yaml")
|
||||
data['spec'] = syaml.load(read_file(spec_file))
|
||||
|
||||
if self.save_local:
|
||||
return self.get_local_build_id(data, dag_hash, return_response)
|
||||
return self.get_server_build_id(data, dag_hash, return_response)
|
||||
|
||||
def get_local_build_id(self, data, dag_hash, return_response):
|
||||
"""
|
||||
Generate a local build id based on hashing the expected data
|
||||
"""
|
||||
hasher = hashlib.md5()
|
||||
hasher.update(str(data).encode('utf-8'))
|
||||
bid = hasher.hexdigest()
|
||||
filename = "build-metadata-%s.json" % bid
|
||||
response = self.save(data, filename)
|
||||
if return_response:
|
||||
return response
|
||||
return bid
|
||||
|
||||
def get_server_build_id(self, data, dag_hash, return_response=False):
|
||||
"""
|
||||
Retrieve a build id from the spack monitor server
|
||||
"""
|
||||
response = self.do_request("builds/new/", data=sjson.dump(data))
|
||||
|
||||
# Add the build id to the lookup
|
||||
bid = self.build_ids[dag_hash] = response['data']['build']['build_id']
|
||||
self.build_ids[dag_hash] = bid
|
||||
|
||||
# If the function is called directly, the user might want output
|
||||
if return_response:
|
||||
return response
|
||||
return bid
|
||||
|
||||
def update_build(self, spec, status="SUCCESS"):
|
||||
"""
|
||||
Update a build with a new status.
|
||||
|
||||
This typically updates the relevant package to indicate a
|
||||
successful install. This endpoint can take a general status to update.
|
||||
"""
|
||||
data = {"build_id": self.get_build_id(spec), "status": status}
|
||||
if self.save_local:
|
||||
filename = "build-%s-status.json" % data['build_id']
|
||||
return self.save(data, filename)
|
||||
|
||||
return self.do_request("builds/update/", data=sjson.dump(data))
|
||||
|
||||
def fail_task(self, spec):
|
||||
"""Given a spec, mark it as failed. This means that Spack Monitor
|
||||
marks all dependencies as cancelled, unless they are already successful
|
||||
"""
|
||||
return self.update_build(spec, status="FAILED")
|
||||
|
||||
def cancel_task(self, spec):
|
||||
"""Given a spec, mark it as cancelled.
|
||||
"""
|
||||
return self.update_build(spec, status="CANCELLED")
|
||||
|
||||
def send_analyze_metadata(self, pkg, metadata):
|
||||
"""
|
||||
Send spack analyzer metadata to the spack monitor server.
|
||||
|
||||
Given a dictionary of analyzers (with key as analyzer type, and
|
||||
value as the data) upload the analyzer output to Spack Monitor.
|
||||
Spack Monitor should either have a known understanding of the analyzer,
|
||||
or if not (the key is not recognized), it's assumed to be a dictionary
|
||||
of objects/files, each with attributes to be updated. E.g.,
|
||||
|
||||
{"analyzer-name": {"object-file-path": {"feature1": "value1"}}}
|
||||
"""
|
||||
# Prepare build environment data (including spack version)
|
||||
# Since the build might not have been generated, we include the spec
|
||||
data = {"build_id": self.get_build_id(pkg.spec, spec_exists=False),
|
||||
"metadata": metadata}
|
||||
return self.do_request("analyze/builds/", data=sjson.dump(data))
|
||||
|
||||
def send_phase(self, pkg, phase_name, phase_output_file, status):
|
||||
"""
|
||||
Send the result of a phase during install.
|
||||
|
||||
Given a package, phase name, and status, update the monitor endpoint
|
||||
to alert of the status of the stage. This includes parsing the package
|
||||
metadata folder for phase output and error files
|
||||
"""
|
||||
data = {"build_id": self.get_build_id(pkg.spec)}
|
||||
|
||||
# Send output specific to the phase (does this include error?)
|
||||
data.update({"status": status,
|
||||
"output": read_file(phase_output_file),
|
||||
"phase_name": phase_name})
|
||||
|
||||
if self.save_local:
|
||||
filename = "build-%s-phase-%s.json" % (data['build_id'], phase_name)
|
||||
return self.save(data, filename)
|
||||
|
||||
return self.do_request("builds/phases/update/", data=sjson.dump(data))
|
||||
|
||||
def upload_specfile(self, filename):
|
||||
"""
|
||||
Upload a spec file to the spack monitor server.
|
||||
|
||||
Given a spec file (must be json) upload to the UploadSpec endpoint.
|
||||
This function is not used in the spack to server workflow, but could
|
||||
be useful is Spack Monitor is intended to send an already generated
|
||||
file in some kind of separate analysis. For the environment file, we
|
||||
parse out SPACK_* variables to include.
|
||||
"""
|
||||
# We load as json just to validate it
|
||||
spec = read_json(filename)
|
||||
data = {"spec": spec, "spack_verison": self.spack_version}
|
||||
|
||||
if self.save_local:
|
||||
filename = "spec-%s-%s.json" % (spec.name, spec.version)
|
||||
return self.save(data, filename)
|
||||
|
||||
return self.do_request("specs/new/", data=sjson.dump(data))
|
||||
|
||||
def iter_read(self, pattern):
|
||||
"""
|
||||
A helper to read json from a directory glob and return it loaded.
|
||||
"""
|
||||
for filename in glob(pattern):
|
||||
basename = os.path.basename(filename)
|
||||
tty.info("Reading %s" % basename)
|
||||
yield read_json(filename)
|
||||
|
||||
def upload_local_save(self, dirname):
|
||||
"""
|
||||
Upload results from a locally saved directory to spack monitor.
|
||||
|
||||
The general workflow will first include an install with save local:
|
||||
spack install --monitor --monitor-save-local
|
||||
And then a request to upload the root or specific directory.
|
||||
spack upload monitor ~/.spack/reports/monitor/<date>/
|
||||
"""
|
||||
dirname = os.path.abspath(dirname)
|
||||
if not os.path.exists(dirname):
|
||||
tty.die("%s does not exist." % dirname)
|
||||
|
||||
# We can't be sure the level of nesting the user has provided
|
||||
# So we walk recursively through and look for build metadata
|
||||
for subdir, dirs, files in os.walk(dirname):
|
||||
root = os.path.join(dirname, subdir)
|
||||
|
||||
# A metadata file signals a monitor export
|
||||
metadata = glob("%s%sbuild-metadata*" % (root, os.sep))
|
||||
if not metadata or not files or not root or not subdir:
|
||||
continue
|
||||
self._upload_local_save(root)
|
||||
tty.info("Upload complete")
|
||||
|
||||
def _upload_local_save(self, dirname):
|
||||
"""
|
||||
Given a found metadata file, upload results to spack monitor.
|
||||
"""
|
||||
# First find all the specs
|
||||
for spec in self.iter_read("%s%sspec*" % (dirname, os.sep)):
|
||||
self.do_request("specs/new/", data=sjson.dump(spec))
|
||||
|
||||
# Load build metadata to generate an id
|
||||
metadata = glob("%s%sbuild-metadata*" % (dirname, os.sep))
|
||||
if not metadata:
|
||||
tty.die("Build metadata file(s) missing in %s" % dirname)
|
||||
|
||||
# Create a build_id lookup based on hash
|
||||
hashes = {}
|
||||
for metafile in metadata:
|
||||
data = read_json(metafile)
|
||||
build = self.do_request("builds/new/", data=sjson.dump(data))
|
||||
localhash = os.path.basename(metafile).replace(".json", "")
|
||||
hashes[localhash.replace('build-metadata-', "")] = build
|
||||
|
||||
# Next upload build phases
|
||||
for phase in self.iter_read("%s%sbuild*phase*" % (dirname, os.sep)):
|
||||
build_id = hashes[phase['build_id']]['data']['build']['build_id']
|
||||
phase['build_id'] = build_id
|
||||
self.do_request("builds/phases/update/", data=sjson.dump(phase))
|
||||
|
||||
# Next find the status objects
|
||||
for status in self.iter_read("%s%sbuild*status*" % (dirname, os.sep)):
|
||||
build_id = hashes[status['build_id']]['data']['build']['build_id']
|
||||
status['build_id'] = build_id
|
||||
self.do_request("builds/update/", data=sjson.dump(status))
|
||||
|
||||
|
||||
# Helper functions
|
||||
|
||||
def parse_auth_header(authHeaderRaw):
|
||||
"""
|
||||
Parse an authentication header into relevant pieces
|
||||
"""
|
||||
regex = re.compile('([a-zA-z]+)="(.+?)"')
|
||||
matches = regex.findall(authHeaderRaw)
|
||||
lookup = dict()
|
||||
for match in matches:
|
||||
lookup[match[0]] = match[1]
|
||||
return authHeader(lookup)
|
||||
|
||||
|
||||
class authHeader:
|
||||
def __init__(self, lookup):
|
||||
"""Given a dictionary of values, match them to class attributes"""
|
||||
for key in lookup:
|
||||
if key in ["realm", "service", "scope"]:
|
||||
setattr(self, key.capitalize(), lookup[key])
|
||||
|
||||
|
||||
def read_file(filename):
|
||||
"""
|
||||
Read a file, if it exists. Otherwise return None
|
||||
"""
|
||||
if not os.path.exists(filename):
|
||||
return
|
||||
with open(filename, 'r') as fd:
|
||||
content = fd.read()
|
||||
return content
|
||||
|
||||
|
||||
def write_file(content, filename):
|
||||
"""
|
||||
Write content to file
|
||||
"""
|
||||
with open(filename, 'w') as fd:
|
||||
fd.writelines(content)
|
||||
return content
|
||||
|
||||
|
||||
def write_json(obj, filename):
|
||||
"""
|
||||
Write a json file, if the output directory exists.
|
||||
"""
|
||||
if not os.path.exists(os.path.dirname(filename)):
|
||||
return
|
||||
return write_file(sjson.dump(obj), filename)
|
||||
|
||||
|
||||
def read_json(filename):
|
||||
"""
|
||||
Read a file and load into json, if it exists. Otherwise return None.
|
||||
"""
|
||||
if not os.path.exists(filename):
|
||||
return
|
||||
return sjson.load(read_file(filename))
|
||||
@@ -33,7 +33,7 @@
|
||||
|
||||
import llnl.util.filesystem as fsys
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.lang import classproperty, match_predicate, memoized, nullcontext
|
||||
from llnl.util.lang import match_predicate, memoized, nullcontext
|
||||
from llnl.util.link_tree import LinkTree
|
||||
|
||||
import spack.compilers
|
||||
@@ -50,7 +50,6 @@
|
||||
import spack.multimethod
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.store
|
||||
import spack.url
|
||||
import spack.util.environment
|
||||
@@ -63,7 +62,7 @@
|
||||
from spack.util.executable import ProcessError, which
|
||||
from spack.util.package_hash import package_hash
|
||||
from spack.util.prefix import Prefix
|
||||
from spack.version import GitVersion, Version, VersionBase
|
||||
from spack.version import Version
|
||||
|
||||
if sys.version_info[0] >= 3:
|
||||
FLAG_HANDLER_RETURN_TYPE = Tuple[
|
||||
@@ -208,8 +207,8 @@ def __init__(cls, name, bases, attr_dict):
|
||||
# If a package has the executables or libraries attribute then it's
|
||||
# assumed to be detectable
|
||||
if hasattr(cls, 'executables') or hasattr(cls, 'libraries'):
|
||||
@classmethod
|
||||
def platform_executables(cls):
|
||||
@property
|
||||
def platform_executables(self):
|
||||
def to_windows_exe(exe):
|
||||
if exe.endswith('$'):
|
||||
exe = exe.replace('$', '%s$' % spack.util.path.win_exe_ext())
|
||||
@@ -217,8 +216,8 @@ def to_windows_exe(exe):
|
||||
exe += spack.util.path.win_exe_ext()
|
||||
return exe
|
||||
plat_exe = []
|
||||
if hasattr(cls, 'executables'):
|
||||
for exe in cls.executables:
|
||||
if hasattr(self, 'executables'):
|
||||
for exe in self.executables:
|
||||
if sys.platform == 'win32':
|
||||
exe = to_windows_exe(exe)
|
||||
plat_exe.append(exe)
|
||||
@@ -398,6 +397,63 @@ def _decorator(func):
|
||||
return func
|
||||
return _decorator
|
||||
|
||||
@property
|
||||
def package_dir(self):
|
||||
"""Directory where the package.py file lives."""
|
||||
return os.path.abspath(os.path.dirname(self.module.__file__))
|
||||
|
||||
@property
|
||||
def module(self):
|
||||
"""Module object (not just the name) that this package is defined in.
|
||||
|
||||
We use this to add variables to package modules. This makes
|
||||
install() methods easier to write (e.g., can call configure())
|
||||
"""
|
||||
return __import__(self.__module__, fromlist=[self.__name__])
|
||||
|
||||
@property
|
||||
def namespace(self):
|
||||
"""Spack namespace for the package, which identifies its repo."""
|
||||
return spack.repo.namespace_from_fullname(self.__module__)
|
||||
|
||||
@property
|
||||
def fullname(self):
|
||||
"""Name of this package, including the namespace"""
|
||||
return '%s.%s' % (self.namespace, self.name)
|
||||
|
||||
@property
|
||||
def fullnames(self):
|
||||
"""
|
||||
Fullnames for this package and any packages from which it inherits.
|
||||
"""
|
||||
fullnames = []
|
||||
for cls in inspect.getmro(self):
|
||||
namespace = getattr(cls, 'namespace', None)
|
||||
if namespace:
|
||||
fullnames.append('%s.%s' % (namespace, self.name))
|
||||
if namespace == 'builtin':
|
||||
# builtin packages cannot inherit from other repos
|
||||
break
|
||||
return fullnames
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""The name of this package.
|
||||
|
||||
The name of a package is the name of its Python module, without
|
||||
the containing module names.
|
||||
"""
|
||||
if self._name is None:
|
||||
self._name = self.module.__name__
|
||||
if '.' in self._name:
|
||||
self._name = self._name[self._name.rindex('.') + 1:]
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def global_license_dir(self):
|
||||
"""Returns the directory where license files for all packages are stored."""
|
||||
return spack.util.path.canonicalize_path(spack.config.get('config:license_dir'))
|
||||
|
||||
|
||||
def run_before(*phases):
|
||||
"""Registers a method of a package to be run before a given phase"""
|
||||
@@ -750,8 +806,7 @@ def __init__(self, spec):
|
||||
self._fetch_time = 0.0
|
||||
|
||||
if self.is_extension:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(self.extendee_spec.name)
|
||||
pkg_cls(self.extendee_spec)._check_extendable()
|
||||
spack.repo.get(self.extendee_spec)._check_extendable()
|
||||
|
||||
super(PackageBase, self).__init__()
|
||||
|
||||
@@ -847,60 +902,60 @@ def possible_dependencies(
|
||||
|
||||
return visited
|
||||
|
||||
@classproperty
|
||||
def package_dir(cls):
|
||||
def enum_constraints(self, visited=None):
|
||||
"""Return transitive dependency constraints on this package."""
|
||||
if visited is None:
|
||||
visited = set()
|
||||
visited.add(self.name)
|
||||
|
||||
names = []
|
||||
clauses = []
|
||||
|
||||
for name in self.dependencies:
|
||||
if name not in visited and not spack.spec.Spec(name).virtual:
|
||||
pkg = spack.repo.get(name)
|
||||
dvis, dnames, dclauses = pkg.enum_constraints(visited)
|
||||
visited |= dvis
|
||||
names.extend(dnames)
|
||||
clauses.extend(dclauses)
|
||||
|
||||
return visited
|
||||
|
||||
# package_dir and module are *class* properties (see PackageMeta),
|
||||
# but to make them work on instances we need these defs as well.
|
||||
@property
|
||||
def package_dir(self):
|
||||
"""Directory where the package.py file lives."""
|
||||
return os.path.abspath(os.path.dirname(cls.module.__file__))
|
||||
return type(self).package_dir
|
||||
|
||||
@classproperty
|
||||
def module(cls):
|
||||
"""Module object (not just the name) that this package is defined in.
|
||||
@property
|
||||
def module(self):
|
||||
"""Module object that this package is defined in."""
|
||||
return type(self).module
|
||||
|
||||
We use this to add variables to package modules. This makes
|
||||
install() methods easier to write (e.g., can call configure())
|
||||
"""
|
||||
return __import__(cls.__module__, fromlist=[cls.__name__])
|
||||
|
||||
@classproperty
|
||||
def namespace(cls):
|
||||
@property
|
||||
def namespace(self):
|
||||
"""Spack namespace for the package, which identifies its repo."""
|
||||
return spack.repo.namespace_from_fullname(cls.__module__)
|
||||
return type(self).namespace
|
||||
|
||||
@classproperty
|
||||
def fullname(cls):
|
||||
"""Name of this package, including the namespace"""
|
||||
return '%s.%s' % (cls.namespace, cls.name)
|
||||
@property
|
||||
def fullname(self):
|
||||
"""Name of this package, including namespace: namespace.name."""
|
||||
return type(self).fullname
|
||||
|
||||
@classproperty
|
||||
def fullnames(cls):
|
||||
"""Fullnames for this package and any packages from which it inherits."""
|
||||
fullnames = []
|
||||
for cls in inspect.getmro(cls):
|
||||
namespace = getattr(cls, 'namespace', None)
|
||||
if namespace:
|
||||
fullnames.append('%s.%s' % (namespace, cls.name))
|
||||
if namespace == 'builtin':
|
||||
# builtin packages cannot inherit from other repos
|
||||
break
|
||||
return fullnames
|
||||
@property
|
||||
def fullnames(self):
|
||||
return type(self).fullnames
|
||||
|
||||
@classproperty
|
||||
def name(cls):
|
||||
"""The name of this package.
|
||||
@property
|
||||
def name(self):
|
||||
"""Name of this package (the module without parent modules)."""
|
||||
return type(self).name
|
||||
|
||||
The name of a package is the name of its Python module, without
|
||||
the containing module names.
|
||||
"""
|
||||
if cls._name is None:
|
||||
cls._name = cls.module.__name__
|
||||
if '.' in cls._name:
|
||||
cls._name = cls._name[cls._name.rindex('.') + 1:]
|
||||
return cls._name
|
||||
|
||||
@classproperty
|
||||
def global_license_dir(cls):
|
||||
"""Returns the directory where license files for all packages are stored."""
|
||||
return spack.util.path.canonicalize_path(spack.config.get('config:license_dir'))
|
||||
@property
|
||||
def global_license_dir(self):
|
||||
"""Returns the directory where global license files are stored."""
|
||||
return type(self).global_license_dir
|
||||
|
||||
@property
|
||||
def global_license_file(self):
|
||||
@@ -918,9 +973,8 @@ def version(self):
|
||||
" does not have a concrete version.")
|
||||
return self.spec.versions[0]
|
||||
|
||||
@classmethod
|
||||
@memoized
|
||||
def version_urls(cls):
|
||||
def version_urls(self):
|
||||
"""OrderedDict of explicitly defined URLs for versions of this package.
|
||||
|
||||
Return:
|
||||
@@ -932,7 +986,7 @@ def version_urls(cls):
|
||||
if a package only defines ``url`` at the top level.
|
||||
"""
|
||||
version_urls = collections.OrderedDict()
|
||||
for v, args in sorted(cls.versions.items()):
|
||||
for v, args in sorted(self.versions.items()):
|
||||
if 'url' in args:
|
||||
version_urls[v] = args['url']
|
||||
return version_urls
|
||||
@@ -972,12 +1026,14 @@ def url_for_version(self, version):
|
||||
"""
|
||||
return self._implement_all_urls_for_version(version)[0]
|
||||
|
||||
def all_urls_for_version(self, version):
|
||||
"""Return all URLs derived from version_urls(), url, urls, and
|
||||
def all_urls_for_version(self, version, custom_url_for_version=None):
|
||||
"""Returns all URLs derived from version_urls(), url, urls, and
|
||||
list_url (if it contains a version) in a package in that order.
|
||||
|
||||
Args:
|
||||
version (spack.version.Version): the version for which a URL is sought
|
||||
version: class Version
|
||||
The version for which a URL is sought.
|
||||
|
||||
See Class Version (version.py)
|
||||
"""
|
||||
uf = None
|
||||
if type(self).url_for_version != Package.url_for_version:
|
||||
@@ -985,7 +1041,7 @@ def all_urls_for_version(self, version):
|
||||
return self._implement_all_urls_for_version(version, uf)
|
||||
|
||||
def _implement_all_urls_for_version(self, version, custom_url_for_version=None):
|
||||
if not isinstance(version, VersionBase):
|
||||
if not isinstance(version, Version):
|
||||
version = Version(version)
|
||||
|
||||
urls = []
|
||||
@@ -1273,8 +1329,7 @@ def fetcher(self, f):
|
||||
self._fetcher = f
|
||||
self._fetcher.set_package(self)
|
||||
|
||||
@classmethod
|
||||
def dependencies_of_type(cls, *deptypes):
|
||||
def dependencies_of_type(self, *deptypes):
|
||||
"""Get dependencies that can possibly have these deptypes.
|
||||
|
||||
This analyzes the package and determines which dependencies *can*
|
||||
@@ -1284,8 +1339,8 @@ def dependencies_of_type(cls, *deptypes):
|
||||
run dependency in another.
|
||||
"""
|
||||
return dict(
|
||||
(name, conds) for name, conds in cls.dependencies.items()
|
||||
if any(dt in cls.dependencies[name][cond].type
|
||||
(name, conds) for name, conds in self.dependencies.items()
|
||||
if any(dt in self.dependencies[name][cond].type
|
||||
for cond in conds for dt in deptypes))
|
||||
|
||||
@property
|
||||
@@ -1316,8 +1371,8 @@ def extendee_spec(self):
|
||||
# TODO: do something sane here with more than one extendee
|
||||
# If it's not concrete, then return the spec from the
|
||||
# extends() directive since that is all we know so far.
|
||||
spec_str, kwargs = next(iter(self.extendees.items()))
|
||||
return spack.spec.Spec(spec_str)
|
||||
spec, kwargs = next(iter(self.extendees.items()))
|
||||
return spec
|
||||
|
||||
@property
|
||||
def extendee_args(self):
|
||||
@@ -1450,7 +1505,7 @@ def do_fetch(self, mirror_only=False):
|
||||
checksum = spack.config.get('config:checksum')
|
||||
fetch = self.stage.managed_by_spack
|
||||
if checksum and fetch and (self.version not in self.versions) \
|
||||
and (not isinstance(self.version, GitVersion)):
|
||||
and (not self.version.is_commit):
|
||||
tty.warn("There is no checksum on file to fetch %s safely." %
|
||||
self.spec.cformat('{name}{@version}'))
|
||||
|
||||
@@ -2652,15 +2707,14 @@ def do_clean(self):
|
||||
|
||||
self.stage.destroy()
|
||||
|
||||
@classmethod
|
||||
def format_doc(cls, **kwargs):
|
||||
def format_doc(self, **kwargs):
|
||||
"""Wrap doc string at 72 characters and format nicely"""
|
||||
indent = kwargs.get('indent', 0)
|
||||
|
||||
if not cls.__doc__:
|
||||
if not self.__doc__:
|
||||
return ""
|
||||
|
||||
doc = re.sub(r'\s+', ' ', cls.__doc__)
|
||||
doc = re.sub(r'\s+', ' ', self.__doc__)
|
||||
lines = textwrap.wrap(doc, 72)
|
||||
results = six.StringIO()
|
||||
for line in lines:
|
||||
|
||||
@@ -138,8 +138,8 @@ def has_preferred_targets(cls, pkg_name):
|
||||
@classmethod
|
||||
def preferred_variants(cls, pkg_name):
|
||||
"""Return a VariantMap of preferred variants/values for a spec."""
|
||||
for pkg_cls in (pkg_name, 'all'):
|
||||
variants = spack.config.get('packages').get(pkg_cls, {}).get(
|
||||
for pkg in (pkg_name, 'all'):
|
||||
variants = spack.config.get('packages').get(pkg, {}).get(
|
||||
'variants', '')
|
||||
if variants:
|
||||
break
|
||||
@@ -149,26 +149,21 @@ def preferred_variants(cls, pkg_name):
|
||||
variants = " ".join(variants)
|
||||
|
||||
# Only return variants that are actually supported by the package
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg = spack.repo.get(pkg_name)
|
||||
spec = spack.spec.Spec("%s %s" % (pkg_name, variants))
|
||||
return dict((name, variant) for name, variant in spec.variants.items()
|
||||
if name in pkg_cls.variants)
|
||||
if name in pkg.variants)
|
||||
|
||||
|
||||
def spec_externals(spec):
|
||||
"""Return a list of external specs (w/external directory path filled in),
|
||||
one for each known external installation.
|
||||
"""
|
||||
one for each known external installation."""
|
||||
# break circular import.
|
||||
from spack.util.module_cmd import path_from_modules # NOQA: ignore=F401
|
||||
|
||||
def _package(maybe_abstract_spec):
|
||||
pkg_cls = spack.repo.path.get_pkg_class(spec.name)
|
||||
return pkg_cls(maybe_abstract_spec)
|
||||
|
||||
allpkgs = spack.config.get('packages')
|
||||
names = set([spec.name])
|
||||
names |= set(vspec.name for vspec in _package(spec).virtuals_provided)
|
||||
names |= set(vspec.name for vspec in spec.package.virtuals_provided)
|
||||
|
||||
external_specs = []
|
||||
for name in names:
|
||||
@@ -195,21 +190,17 @@ def _package(maybe_abstract_spec):
|
||||
|
||||
|
||||
def is_spec_buildable(spec):
|
||||
"""Return true if the spec is configured as buildable"""
|
||||
"""Return true if the spec pkgspec is configured as buildable"""
|
||||
|
||||
allpkgs = spack.config.get('packages')
|
||||
all_buildable = allpkgs.get('all', {}).get('buildable', True)
|
||||
|
||||
def _package(s):
|
||||
pkg_cls = spack.repo.path.get_pkg_class(s.name)
|
||||
return pkg_cls(s)
|
||||
|
||||
# Get the list of names for which all_buildable is overridden
|
||||
reverse = [name for name, entry in allpkgs.items()
|
||||
if entry.get('buildable', all_buildable) != all_buildable]
|
||||
# Does this spec override all_buildable
|
||||
spec_reversed = (spec.name in reverse or
|
||||
any(_package(spec).provides(name) for name in reverse))
|
||||
any(spec.package.provides(name) for name in reverse))
|
||||
return not all_buildable if spec_reversed else all_buildable
|
||||
|
||||
|
||||
|
||||
@@ -284,11 +284,11 @@ def from_dict(dictionary):
|
||||
owner = dictionary.get('owner')
|
||||
if 'owner' not in dictionary:
|
||||
raise ValueError('Invalid patch dictionary: %s' % dictionary)
|
||||
pkg_cls = spack.repo.path.get_pkg_class(owner)
|
||||
pkg = spack.repo.get(owner)
|
||||
|
||||
if 'url' in dictionary:
|
||||
return UrlPatch(
|
||||
pkg_cls,
|
||||
pkg,
|
||||
dictionary['url'],
|
||||
dictionary['level'],
|
||||
dictionary['working_dir'],
|
||||
@@ -297,7 +297,7 @@ def from_dict(dictionary):
|
||||
|
||||
elif 'relative_path' in dictionary:
|
||||
patch = FilePatch(
|
||||
pkg_cls,
|
||||
pkg,
|
||||
dictionary['relative_path'],
|
||||
dictionary['level'],
|
||||
dictionary['working_dir'])
|
||||
@@ -404,8 +404,8 @@ def update_package(self, pkg_fullname):
|
||||
del self.index[sha256]
|
||||
|
||||
# update the index with per-package patch indexes
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_fullname)
|
||||
partial_index = self._index_patches(pkg_cls)
|
||||
pkg = spack.repo.get(pkg_fullname)
|
||||
partial_index = self._index_patches(pkg)
|
||||
for sha256, package_to_patch in partial_index.items():
|
||||
p2p = self.index.setdefault(sha256, {})
|
||||
p2p.update(package_to_patch)
|
||||
@@ -432,10 +432,10 @@ def _index_patches(pkg_class):
|
||||
for cond, dependency in conditions.items():
|
||||
for pcond, patch_list in dependency.patches.items():
|
||||
for patch in patch_list:
|
||||
dspec_cls = spack.repo.path.get_pkg_class(dependency.spec.name)
|
||||
dspec = spack.repo.get(dependency.spec.name)
|
||||
patch_dict = patch.to_dict()
|
||||
patch_dict.pop('sha256') # save some space
|
||||
index[patch.sha256] = {dspec_cls.fullname: patch_dict}
|
||||
index[patch.sha256] = {dspec.fullname: patch_dict}
|
||||
|
||||
return index
|
||||
|
||||
|
||||
@@ -862,6 +862,10 @@ def packages_with_tags(self, *tags):
|
||||
r |= set(repo.packages_with_tags(*tags))
|
||||
return sorted(r)
|
||||
|
||||
def all_packages(self):
|
||||
for name in self.all_package_names():
|
||||
yield self.get(name)
|
||||
|
||||
def all_package_classes(self):
|
||||
for name in self.all_package_names():
|
||||
yield self.get_pkg_class(name)
|
||||
@@ -905,9 +909,7 @@ def providers_for(self, vpkg_spec):
|
||||
|
||||
@autospec
|
||||
def extensions_for(self, extendee_spec):
|
||||
return [pkg_cls(spack.spec.Spec(pkg_cls.name))
|
||||
for pkg_cls in self.all_package_classes()
|
||||
if pkg_cls(spack.spec.Spec(pkg_cls.name)).extends(extendee_spec)]
|
||||
return [p for p in self.all_packages() if p.extends(extendee_spec)]
|
||||
|
||||
def last_mtime(self):
|
||||
"""Time a package file in this repo was last updated."""
|
||||
@@ -943,10 +945,9 @@ def repo_for_pkg(self, spec):
|
||||
# that can operate on packages that don't exist yet.
|
||||
return self.first_repo()
|
||||
|
||||
@autospec
|
||||
def get(self, spec):
|
||||
"""Returns the package associated with the supplied spec."""
|
||||
msg = "RepoPath.get can only be called on concrete specs"
|
||||
assert isinstance(spec, spack.spec.Spec) and spec.concrete, msg
|
||||
return self.repo_for_pkg(spec).get(spec)
|
||||
|
||||
def get_pkg_class(self, pkg_name):
|
||||
@@ -1106,10 +1107,9 @@ def _read_config(self):
|
||||
tty.die("Error reading %s when opening %s"
|
||||
% (self.config_file, self.root))
|
||||
|
||||
@autospec
|
||||
def get(self, spec):
|
||||
"""Returns the package associated with the supplied spec."""
|
||||
msg = "Repo.get can only be called on concrete specs"
|
||||
assert isinstance(spec, spack.spec.Spec) and spec.concrete, msg
|
||||
# NOTE: we only check whether the package is None here, not whether it
|
||||
# actually exists, because we have to load it anyway, and that ends up
|
||||
# checking for existence. We avoid constructing FastPackageChecker,
|
||||
@@ -1199,9 +1199,7 @@ def providers_for(self, vpkg_spec):
|
||||
|
||||
@autospec
|
||||
def extensions_for(self, extendee_spec):
|
||||
return [pkg_cls(spack.spec.Spec(pkg_cls.name))
|
||||
for pkg_cls in self.all_package_classes()
|
||||
if pkg_cls(spack.spec.Spec(pkg_cls.name)).extends(extendee_spec)]
|
||||
return [p for p in self.all_packages() if p.extends(extendee_spec)]
|
||||
|
||||
def dirname_for_package_name(self, pkg_name):
|
||||
"""Get the directory name for a particular package. This is the
|
||||
@@ -1243,6 +1241,15 @@ def packages_with_tags(self, *tags):
|
||||
|
||||
return sorted(v)
|
||||
|
||||
def all_packages(self):
|
||||
"""Iterator over all packages in the repository.
|
||||
|
||||
Use this with care, because loading packages is slow.
|
||||
|
||||
"""
|
||||
for name in self.all_package_names():
|
||||
yield self.get(name)
|
||||
|
||||
def all_package_classes(self):
|
||||
"""Iterator over all package *classes* in the repository.
|
||||
|
||||
@@ -1391,6 +1398,11 @@ def _path(repo_dirs=None):
|
||||
sys.meta_path.append(ReposFinder())
|
||||
|
||||
|
||||
def get(spec):
|
||||
"""Convenience wrapper around ``spack.repo.get()``."""
|
||||
return path.get(spec)
|
||||
|
||||
|
||||
def all_package_names(include_virtuals=False):
|
||||
"""Convenience wrapper around ``spack.repo.all_package_names()``."""
|
||||
return path.all_package_names(include_virtuals)
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
"""Tools to produce reports of spec installations"""
|
||||
import codecs
|
||||
import collections
|
||||
@@ -280,9 +281,9 @@ def __init__(self, cls, function, format_name, args):
|
||||
.format(self.format_name))
|
||||
self.report_writer = report_writers[self.format_name](args)
|
||||
|
||||
def __call__(self, type, dir=None):
|
||||
def __call__(self, type, dir=os.getcwd()):
|
||||
self.type = type
|
||||
self.dir = dir or os.getcwd()
|
||||
self.dir = dir
|
||||
return self
|
||||
|
||||
def concretization_report(self, msg):
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from io import BufferedReader, IOBase
|
||||
from io import BufferedReader
|
||||
|
||||
import six
|
||||
import six.moves.urllib.error as urllib_error
|
||||
@@ -23,15 +23,11 @@
|
||||
# https://github.com/python/cpython/pull/3249
|
||||
class WrapStream(BufferedReader):
|
||||
def __init__(self, raw):
|
||||
# In botocore >=1.23.47, StreamingBody inherits from IOBase, so we
|
||||
# only add missing attributes in older versions.
|
||||
# https://github.com/boto/botocore/commit/a624815eabac50442ed7404f3c4f2664cd0aa784
|
||||
if not isinstance(raw, IOBase):
|
||||
raw.readable = lambda: True
|
||||
raw.writable = lambda: False
|
||||
raw.seekable = lambda: False
|
||||
raw.closed = False
|
||||
raw.flush = lambda: None
|
||||
raw.readable = lambda: True
|
||||
raw.writable = lambda: False
|
||||
raw.seekable = lambda: False
|
||||
raw.closed = False
|
||||
raw.flush = lambda: None
|
||||
super(WrapStream, self).__init__(raw)
|
||||
|
||||
def detach(self):
|
||||
|
||||
@@ -26,9 +26,6 @@
|
||||
"cpe-version": {"type": "string", "minLength": 1},
|
||||
"system-type": {"type": "string", "minLength": 1},
|
||||
"schema-version": {"type": "string", "minLength": 1},
|
||||
# Older schemas use did not have "cpe-version", just the
|
||||
# schema version; in that case it was just called "version"
|
||||
"version": {"type": "string", "minLength": 1},
|
||||
}
|
||||
},
|
||||
"compilers": {
|
||||
|
||||
@@ -18,13 +18,9 @@
|
||||
#:
|
||||
#: THIS NEEDS TO BE UPDATED FOR EVERY NEW KEYWORD THAT
|
||||
#: IS ADDED IMMEDIATELY BELOW THE MODULE TYPE ATTRIBUTE
|
||||
spec_regex = (
|
||||
r'(?!hierarchy|core_specs|verbose|hash_length|defaults|'
|
||||
r'whitelist|blacklist|' # DEPRECATED: remove in 0.20.
|
||||
r'include|exclude|' # use these more inclusive/consistent options
|
||||
r'projections|naming_scheme|core_compilers|all)(^\w[\w-]*)'
|
||||
|
||||
)
|
||||
spec_regex = r'(?!hierarchy|core_specs|verbose|hash_length|whitelist|' \
|
||||
r'blacklist|projections|naming_scheme|core_compilers|all|' \
|
||||
r'defaults)(^\w[\w-]*)'
|
||||
|
||||
#: Matches a valid name for a module set
|
||||
valid_module_set_name = r'^(?!arch_folder$|lmod$|roots$|enable$|prefix_inspections$|'\
|
||||
@@ -54,21 +50,12 @@
|
||||
'default': {},
|
||||
'additionalProperties': False,
|
||||
'properties': {
|
||||
# DEPRECATED: remove in 0.20.
|
||||
'environment_blacklist': {
|
||||
'type': 'array',
|
||||
'default': [],
|
||||
'items': {
|
||||
'type': 'string'
|
||||
}
|
||||
},
|
||||
# use exclude_env_vars instead
|
||||
'exclude_env_vars': {
|
||||
'type': 'array',
|
||||
'default': [],
|
||||
'items': {
|
||||
'type': 'string'
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -108,20 +95,12 @@
|
||||
'minimum': 0,
|
||||
'default': 7
|
||||
},
|
||||
# DEPRECATED: remove in 0.20.
|
||||
'whitelist': array_of_strings,
|
||||
'blacklist': array_of_strings,
|
||||
'blacklist_implicits': {
|
||||
'type': 'boolean',
|
||||
'default': False
|
||||
},
|
||||
# whitelist/blacklist have been replaced with include/exclude
|
||||
'include': array_of_strings,
|
||||
'exclude': array_of_strings,
|
||||
'exclude_implicits': {
|
||||
'type': 'boolean',
|
||||
'default': False
|
||||
},
|
||||
'defaults': array_of_strings,
|
||||
'naming_scheme': {
|
||||
'type': 'string' # Can we be more specific here?
|
||||
@@ -245,51 +224,14 @@ def deprecation_msg_default_module_set(instance, props):
|
||||
}
|
||||
|
||||
|
||||
# deprecated keys and their replacements
|
||||
exclude_include_translations = {
|
||||
"whitelist": "include",
|
||||
"blacklist": "exclude",
|
||||
"blacklist_implicits": "exclude_implicits",
|
||||
"environment_blacklist": "exclude_env_vars",
|
||||
}
|
||||
def update(data):
|
||||
"""Update the data in place to remove deprecated properties.
|
||||
|
||||
Args:
|
||||
data (dict): dictionary to be updated
|
||||
|
||||
def update_keys(data, key_translations):
|
||||
"""Change blacklist/whitelist to exclude/include.
|
||||
|
||||
Arguments:
|
||||
data (dict): data from a valid modules configuration.
|
||||
key_translations (dict): A dictionary of keys to translate to
|
||||
their respective values.
|
||||
|
||||
Return:
|
||||
(bool) whether anything was changed in data
|
||||
"""
|
||||
changed = False
|
||||
|
||||
if isinstance(data, dict):
|
||||
keys = list(data.keys())
|
||||
for key in keys:
|
||||
value = data[key]
|
||||
|
||||
translation = key_translations.get(key)
|
||||
if translation:
|
||||
data[translation] = data.pop(key)
|
||||
changed = True
|
||||
|
||||
changed |= update_keys(value, key_translations)
|
||||
|
||||
elif isinstance(data, list):
|
||||
for elt in data:
|
||||
changed |= update_keys(elt, key_translations)
|
||||
|
||||
return changed
|
||||
|
||||
|
||||
def update_default_module_set(data):
|
||||
"""Update module configuration to move top-level keys inside default module set.
|
||||
|
||||
This change was introduced in v0.18 (see 99083f1706 or #28659).
|
||||
Returns:
|
||||
True if data was changed, False otherwise
|
||||
"""
|
||||
changed = False
|
||||
|
||||
@@ -316,21 +258,3 @@ def update_default_module_set(data):
|
||||
data['default'] = default
|
||||
|
||||
return changed
|
||||
|
||||
|
||||
def update(data):
|
||||
"""Update the data in place to remove deprecated properties.
|
||||
|
||||
Args:
|
||||
data (dict): dictionary to be updated
|
||||
|
||||
Returns:
|
||||
True if data was changed, False otherwise
|
||||
"""
|
||||
# deprecated top-level module config (everything in default module set)
|
||||
changed = update_default_module_set(data)
|
||||
|
||||
# translate blacklist/whitelist to exclude/include
|
||||
changed |= update_keys(data, exclude_include_translations)
|
||||
|
||||
return changed
|
||||
|
||||
@@ -631,7 +631,7 @@ def visit(node):
|
||||
|
||||
# Load the file itself
|
||||
self.control.load(os.path.join(parent_dir, 'concretize.lp'))
|
||||
self.control.load(os.path.join(parent_dir, "os_compatibility.lp"))
|
||||
self.control.load(os.path.join(parent_dir, "os_facts.lp"))
|
||||
self.control.load(os.path.join(parent_dir, "display.lp"))
|
||||
timer.phase("load")
|
||||
|
||||
@@ -1310,16 +1310,13 @@ class Body(object):
|
||||
if not spec.concrete:
|
||||
reserved_names = spack.directives.reserved_names
|
||||
if not spec.virtual and vname not in reserved_names:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(spec.name)
|
||||
try:
|
||||
variant_def, _ = pkg_cls.variants[vname]
|
||||
variant_def, _ = spec.package.variants[vname]
|
||||
except KeyError:
|
||||
msg = 'variant "{0}" not found in package "{1}"'
|
||||
raise RuntimeError(msg.format(vname, spec.name))
|
||||
else:
|
||||
variant_def.validate_or_raise(
|
||||
variant, spack.repo.path.get_pkg_class(spec.name)
|
||||
)
|
||||
variant_def.validate_or_raise(variant, spec.package)
|
||||
|
||||
clauses.append(f.variant_value(spec.name, vname, value))
|
||||
|
||||
@@ -1394,7 +1391,7 @@ def build_version_dict(self, possible_pkgs, specs):
|
||||
packages_yaml = spack.config.get("packages")
|
||||
packages_yaml = _normalize_packages_yaml(packages_yaml)
|
||||
for pkg_name in possible_pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg = spack.repo.get(pkg_name)
|
||||
|
||||
# All the versions from the corresponding package.py file. Since concepts
|
||||
# like being a "develop" version or being preferred exist only at a
|
||||
@@ -1407,7 +1404,7 @@ def key_fn(item):
|
||||
return info.get('preferred', False), not version.isdevelop(), version
|
||||
|
||||
for idx, item in enumerate(sorted(
|
||||
pkg_cls.versions.items(), key=key_fn, reverse=True
|
||||
pkg.versions.items(), key=key_fn, reverse=True
|
||||
)):
|
||||
v, version_info = item
|
||||
self.possible_versions[pkg_name].add(v)
|
||||
@@ -1432,16 +1429,11 @@ def key_fn(item):
|
||||
continue
|
||||
|
||||
known_versions = self.possible_versions[dep.name]
|
||||
if (not isinstance(dep.version, spack.version.GitVersion) and
|
||||
if (not dep.version.is_commit and
|
||||
any(v.satisfies(dep.version) for v in known_versions)):
|
||||
# some version we know about satisfies this constraint, so we
|
||||
# should use that one. e.g, if the user asks for qt@5 and we
|
||||
# know about qt@5.5. This ensures we don't add under-specified
|
||||
# versions to the solver
|
||||
#
|
||||
# For git versions, we know the version is already fully specified
|
||||
# so we don't have to worry about whether it's an under-specified
|
||||
# version
|
||||
# know about qt@5.5.
|
||||
continue
|
||||
|
||||
# if there is a concrete version on the CLI *that we know nothing
|
||||
@@ -1686,7 +1678,7 @@ def define_virtual_constraints(self):
|
||||
|
||||
# extract all the real versions mentioned in version ranges
|
||||
def versions_for(v):
|
||||
if isinstance(v, spack.version.VersionBase):
|
||||
if isinstance(v, spack.version.Version):
|
||||
return [v]
|
||||
elif isinstance(v, spack.version.VersionRange):
|
||||
result = [v.start] if v.start else []
|
||||
@@ -2195,8 +2187,8 @@ def build_specs(self, function_tuples):
|
||||
# concretization process)
|
||||
for root in self._specs.values():
|
||||
for spec in root.traverse():
|
||||
if isinstance(spec.version, spack.version.GitVersion):
|
||||
spec.version.generate_git_lookup(spec.fullname)
|
||||
if spec.version.is_commit:
|
||||
spec.version.generate_commit_lookup(spec.fullname)
|
||||
|
||||
return self._specs
|
||||
|
||||
|
||||
@@ -1517,9 +1517,8 @@ def root(self):
|
||||
|
||||
@property
|
||||
def package(self):
|
||||
assert self.concrete, "Spec.package can only be called on concrete specs"
|
||||
if not self._package:
|
||||
self._package = spack.repo.path.get(self)
|
||||
self._package = spack.repo.get(self)
|
||||
return self._package
|
||||
|
||||
@property
|
||||
@@ -2501,9 +2500,8 @@ def validate_detection(self):
|
||||
assert isinstance(self.extra_attributes, Mapping), msg
|
||||
|
||||
# Validate the spec calling a package specific method
|
||||
pkg_cls = spack.repo.path.get_pkg_class(self.name)
|
||||
validate_fn = getattr(
|
||||
pkg_cls, 'validate_detected_spec', lambda x, y: None
|
||||
self.package, 'validate_detected_spec', lambda x, y: None
|
||||
)
|
||||
validate_fn(self, self.extra_attributes)
|
||||
|
||||
@@ -2731,8 +2729,7 @@ def _old_concretize(self, tests=False, deprecation_warning=True):
|
||||
visited_user_specs = set()
|
||||
for dep in self.traverse():
|
||||
visited_user_specs.add(dep.name)
|
||||
pkg_cls = spack.repo.path.get_pkg_class(dep.name)
|
||||
visited_user_specs.update(x.name for x in pkg_cls(dep).provided)
|
||||
visited_user_specs.update(x.name for x in dep.package.provided)
|
||||
|
||||
extra = set(user_spec_deps.keys()).difference(visited_user_specs)
|
||||
if extra:
|
||||
@@ -2866,12 +2863,10 @@ def ensure_external_path_if_external(external_spec):
|
||||
for mod in compiler.modules:
|
||||
md.load_module(mod)
|
||||
|
||||
# Get the path from the module the package can override the default
|
||||
# (this is mostly needed for Cray)
|
||||
pkg_cls = spack.repo.path.get_pkg_class(external_spec.name)
|
||||
package = pkg_cls(external_spec)
|
||||
# get the path from the module
|
||||
# the package can override the default
|
||||
external_spec.external_path = getattr(
|
||||
package, 'external_prefix',
|
||||
external_spec.package, 'external_prefix',
|
||||
md.path_from_modules(external_spec.external_modules)
|
||||
)
|
||||
|
||||
@@ -3382,7 +3377,7 @@ def validate_or_raise(self):
|
||||
for spec in self.traverse():
|
||||
# raise an UnknownPackageError if the spec's package isn't real.
|
||||
if (not spec.virtual) and spec.name:
|
||||
spack.repo.path.get_pkg_class(spec.fullname)
|
||||
spack.repo.get(spec.fullname)
|
||||
|
||||
# validate compiler in addition to the package name.
|
||||
if spec.compiler:
|
||||
@@ -3449,8 +3444,8 @@ def update_variant_validate(self, variant_name, values):
|
||||
variant = pkg_variant.make_variant(value)
|
||||
self.variants[variant_name] = variant
|
||||
|
||||
pkg_cls = spack.repo.path.get_pkg_class(self.name)
|
||||
pkg_variant.validate_or_raise(self.variants[variant_name], pkg_cls)
|
||||
pkg_variant.validate_or_raise(
|
||||
self.variants[variant_name], self.package)
|
||||
|
||||
def constrain(self, other, deps=True):
|
||||
"""Merge the constraints of other with self.
|
||||
@@ -3638,9 +3633,7 @@ def satisfies(self, other, deps=True, strict=False, strict_deps=False):
|
||||
# A concrete provider can satisfy a virtual dependency.
|
||||
if not self.virtual and other.virtual:
|
||||
try:
|
||||
# Here we might get an abstract spec
|
||||
pkg_cls = spack.repo.path.get_pkg_class(self.fullname)
|
||||
pkg = pkg_cls(self)
|
||||
pkg = spack.repo.get(self.fullname)
|
||||
except spack.repo.UnknownEntityError:
|
||||
# If we can't get package info on this spec, don't treat
|
||||
# it as a provider of this vdep.
|
||||
@@ -3778,8 +3771,7 @@ def patches(self):
|
||||
if self._patches_assigned():
|
||||
for sha256 in self.variants["patches"]._patches_in_order_of_appearance:
|
||||
index = spack.repo.path.patch_index
|
||||
pkg_cls = spack.repo.path.get_pkg_class(self.name)
|
||||
patch = index.patch_for_package(sha256, pkg_cls)
|
||||
patch = index.patch_for_package(sha256, self.package)
|
||||
self._patches.append(patch)
|
||||
|
||||
return self._patches
|
||||
@@ -5162,9 +5154,9 @@ def do_parse(self):
|
||||
# Note: VersionRange(x, x) is currently concrete, hence isinstance(...).
|
||||
if (
|
||||
spec.name and spec.versions.concrete and
|
||||
isinstance(spec.version, vn.GitVersion)
|
||||
isinstance(spec.version, vn.Version) and spec.version.is_commit
|
||||
):
|
||||
spec.version.generate_git_lookup(spec.fullname)
|
||||
spec.version.generate_commit_lookup(spec.fullname)
|
||||
|
||||
return specs
|
||||
|
||||
|
||||
@@ -118,7 +118,7 @@ def update_package(self, pkg_name):
|
||||
pkg_name (str): name of the package to be removed from the index
|
||||
|
||||
"""
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
package = spack.repo.path.get(pkg_name)
|
||||
|
||||
# Remove the package from the list of packages, if present
|
||||
for pkg_list in self._tag_dict.values():
|
||||
@@ -126,9 +126,9 @@ def update_package(self, pkg_name):
|
||||
pkg_list.remove(pkg_name)
|
||||
|
||||
# Add it again under the appropriate tags
|
||||
for tag in getattr(pkg_cls, 'tags', []):
|
||||
for tag in getattr(package, 'tags', []):
|
||||
tag = tag.lower()
|
||||
self._tag_dict[tag].append(pkg_cls.name)
|
||||
self._tag_dict[tag].append(package.name)
|
||||
|
||||
|
||||
class TagIndexError(spack.error.SpackError):
|
||||
|
||||
@@ -7,7 +7,6 @@
|
||||
import os
|
||||
import sys
|
||||
|
||||
import py.path
|
||||
import pytest
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
@@ -25,77 +24,131 @@
|
||||
reason="does not run on windows")
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def concretize_and_setup():
|
||||
def _func(spec_str):
|
||||
s = Spec('mpich').concretized()
|
||||
setup_package(s.package, False)
|
||||
return s
|
||||
return _func
|
||||
@pytest.mark.parametrize(
|
||||
'directory',
|
||||
glob.iglob(os.path.join(DATA_PATH, 'make', 'affirmative', '*'))
|
||||
)
|
||||
def test_affirmative_make_check(directory, config, mock_packages, working_env):
|
||||
"""Tests that Spack correctly detects targets in a Makefile."""
|
||||
|
||||
# Get a fake package
|
||||
s = Spec('mpich')
|
||||
s.concretize()
|
||||
pkg = spack.repo.get(s)
|
||||
setup_package(pkg, False)
|
||||
|
||||
with fs.working_dir(directory):
|
||||
assert pkg._has_make_target('check')
|
||||
|
||||
pkg._if_make_target_execute('check')
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def test_dir(tmpdir):
|
||||
def _func(dir_str):
|
||||
py.path.local(dir_str).copy(tmpdir)
|
||||
return str(tmpdir)
|
||||
return _func
|
||||
@pytest.mark.parametrize(
|
||||
'directory',
|
||||
glob.iglob(os.path.join(DATA_PATH, 'make', 'negative', '*'))
|
||||
)
|
||||
@pytest.mark.regression('9067')
|
||||
def test_negative_make_check(directory, config, mock_packages, working_env):
|
||||
"""Tests that Spack correctly ignores false positives in a Makefile."""
|
||||
|
||||
# Get a fake package
|
||||
s = Spec('mpich')
|
||||
s.concretize()
|
||||
pkg = spack.repo.get(s)
|
||||
setup_package(pkg, False)
|
||||
|
||||
with fs.working_dir(directory):
|
||||
assert not pkg._has_make_target('check')
|
||||
|
||||
pkg._if_make_target_execute('check')
|
||||
|
||||
|
||||
@pytest.mark.usefixtures('config', 'mock_packages', 'working_env')
|
||||
class TestTargets(object):
|
||||
@pytest.mark.parametrize(
|
||||
'input_dir', glob.iglob(os.path.join(DATA_PATH, 'make', 'affirmative', '*'))
|
||||
)
|
||||
def test_affirmative_make_check(self, input_dir, test_dir, concretize_and_setup):
|
||||
"""Tests that Spack correctly detects targets in a Makefile."""
|
||||
s = concretize_and_setup('mpich')
|
||||
with fs.working_dir(test_dir(input_dir)):
|
||||
assert s.package._has_make_target('check')
|
||||
s.package._if_make_target_execute('check')
|
||||
@pytest.mark.skipif(not which('ninja'), reason='ninja is not installed')
|
||||
@pytest.mark.parametrize(
|
||||
'directory',
|
||||
glob.iglob(os.path.join(DATA_PATH, 'ninja', 'affirmative', '*'))
|
||||
)
|
||||
def test_affirmative_ninja_check(
|
||||
directory, config, mock_packages, working_env):
|
||||
"""Tests that Spack correctly detects targets in a Ninja build script."""
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'input_dir', glob.iglob(os.path.join(DATA_PATH, 'make', 'negative', '*'))
|
||||
)
|
||||
@pytest.mark.regression('9067')
|
||||
def test_negative_make_check(self, input_dir, test_dir, concretize_and_setup):
|
||||
"""Tests that Spack correctly ignores false positives in a Makefile."""
|
||||
s = concretize_and_setup('mpich')
|
||||
with fs.working_dir(test_dir(input_dir)):
|
||||
assert not s.package._has_make_target('check')
|
||||
s.package._if_make_target_execute('check')
|
||||
# Get a fake package
|
||||
s = Spec('mpich')
|
||||
s.concretize()
|
||||
pkg = spack.repo.get(s)
|
||||
setup_package(pkg, False)
|
||||
|
||||
@pytest.mark.skipif(not which('ninja'), reason='ninja is not installed')
|
||||
@pytest.mark.parametrize(
|
||||
'input_dir', glob.iglob(os.path.join(DATA_PATH, 'ninja', 'affirmative', '*'))
|
||||
)
|
||||
def test_affirmative_ninja_check(self, input_dir, test_dir, concretize_and_setup):
|
||||
"""Tests that Spack correctly detects targets in a Ninja build script."""
|
||||
s = concretize_and_setup('mpich')
|
||||
with fs.working_dir(test_dir(input_dir)):
|
||||
assert s.package._has_ninja_target('check')
|
||||
s.package._if_ninja_target_execute('check')
|
||||
with fs.working_dir(directory):
|
||||
assert pkg._has_ninja_target('check')
|
||||
|
||||
@pytest.mark.skipif(not which('ninja'), reason='ninja is not installed')
|
||||
@pytest.mark.parametrize(
|
||||
'input_dir', glob.iglob(os.path.join(DATA_PATH, 'ninja', 'negative', '*'))
|
||||
)
|
||||
def test_negative_ninja_check(self, input_dir, test_dir, concretize_and_setup):
|
||||
"""Tests that Spack correctly ignores false positives in a Ninja
|
||||
build script.
|
||||
"""
|
||||
s = concretize_and_setup('mpich')
|
||||
with fs.working_dir(test_dir(input_dir)):
|
||||
assert not s.package._has_ninja_target('check')
|
||||
s.package._if_ninja_target_execute('check')
|
||||
pkg._if_ninja_target_execute('check')
|
||||
|
||||
# Clean up Ninja files
|
||||
for filename in glob.iglob('.ninja_*'):
|
||||
os.remove(filename)
|
||||
|
||||
|
||||
@pytest.mark.skipif(not which('ninja'), reason='ninja is not installed')
|
||||
@pytest.mark.parametrize(
|
||||
'directory',
|
||||
glob.iglob(os.path.join(DATA_PATH, 'ninja', 'negative', '*'))
|
||||
)
|
||||
def test_negative_ninja_check(directory, config, mock_packages, working_env):
|
||||
"""Tests that Spack correctly ignores false positives in a Ninja
|
||||
build script."""
|
||||
|
||||
# Get a fake package
|
||||
s = Spec('mpich')
|
||||
s.concretize()
|
||||
pkg = spack.repo.get(s)
|
||||
setup_package(pkg, False)
|
||||
|
||||
with fs.working_dir(directory):
|
||||
assert not pkg._has_ninja_target('check')
|
||||
|
||||
pkg._if_ninja_target_execute('check')
|
||||
|
||||
|
||||
def test_cmake_std_args(config, mock_packages):
|
||||
# Call the function on a CMakePackage instance
|
||||
s = Spec('cmake-client')
|
||||
s.concretize()
|
||||
pkg = spack.repo.get(s)
|
||||
assert pkg.std_cmake_args == get_std_cmake_args(pkg)
|
||||
|
||||
# Call it on another kind of package
|
||||
s = Spec('mpich')
|
||||
s.concretize()
|
||||
pkg = spack.repo.get(s)
|
||||
assert get_std_cmake_args(pkg)
|
||||
|
||||
|
||||
def test_cmake_bad_generator(config, mock_packages):
|
||||
s = Spec('cmake-client')
|
||||
s.concretize()
|
||||
pkg = spack.repo.get(s)
|
||||
pkg.generator = 'Yellow Sticky Notes'
|
||||
with pytest.raises(spack.package_base.InstallError):
|
||||
get_std_cmake_args(pkg)
|
||||
|
||||
|
||||
def test_cmake_secondary_generator(config, mock_packages):
|
||||
s = Spec('cmake-client')
|
||||
s.concretize()
|
||||
pkg = spack.repo.get(s)
|
||||
pkg.generator = 'CodeBlocks - Unix Makefiles'
|
||||
assert get_std_cmake_args(pkg)
|
||||
|
||||
|
||||
@pytest.mark.usefixtures('config', 'mock_packages')
|
||||
class TestAutotoolsPackage(object):
|
||||
|
||||
def test_with_or_without(self):
|
||||
s = Spec('a').concretized()
|
||||
options = s.package.with_or_without('foo')
|
||||
s = Spec('a')
|
||||
s.concretize()
|
||||
pkg = spack.repo.get(s)
|
||||
|
||||
options = pkg.with_or_without('foo')
|
||||
|
||||
# Ensure that values that are not representing a feature
|
||||
# are not used by with_or_without
|
||||
@@ -107,27 +160,30 @@ def test_with_or_without(self):
|
||||
def activate(value):
|
||||
return 'something'
|
||||
|
||||
options = s.package.with_or_without('foo', activation_value=activate)
|
||||
options = pkg.with_or_without('foo', activation_value=activate)
|
||||
assert '--without-none' not in options
|
||||
assert '--with-bar=something' in options
|
||||
assert '--without-baz' in options
|
||||
assert '--no-fee' in options
|
||||
|
||||
options = s.package.enable_or_disable('foo')
|
||||
options = pkg.enable_or_disable('foo')
|
||||
assert '--disable-none' not in options
|
||||
assert '--enable-bar' in options
|
||||
assert '--disable-baz' in options
|
||||
assert '--disable-fee' in options
|
||||
|
||||
options = s.package.with_or_without('bvv')
|
||||
options = pkg.with_or_without('bvv')
|
||||
assert '--with-bvv' in options
|
||||
|
||||
options = s.package.with_or_without('lorem-ipsum', variant='lorem_ipsum')
|
||||
options = pkg.with_or_without('lorem-ipsum', variant='lorem_ipsum')
|
||||
assert '--without-lorem-ipsum' in options
|
||||
|
||||
def test_none_is_allowed(self):
|
||||
s = Spec('a foo=none').concretized()
|
||||
options = s.package.with_or_without('foo')
|
||||
s = Spec('a foo=none')
|
||||
s.concretize()
|
||||
pkg = spack.repo.get(s)
|
||||
|
||||
options = pkg.with_or_without('foo')
|
||||
|
||||
# Ensure that values that are not representing a feature
|
||||
# are not used by with_or_without
|
||||
@@ -140,7 +196,8 @@ def test_libtool_archive_files_are_deleted_by_default(
|
||||
self, mutable_database
|
||||
):
|
||||
# Install a package that creates a mock libtool archive
|
||||
s = Spec('libtool-deletion').concretized()
|
||||
s = Spec('libtool-deletion')
|
||||
s.concretize()
|
||||
s.package.do_install(explicit=True)
|
||||
|
||||
# Assert the libtool archive is not there and we have
|
||||
@@ -157,7 +214,8 @@ def test_libtool_archive_files_might_be_installed_on_demand(
|
||||
):
|
||||
# Install a package that creates a mock libtool archive,
|
||||
# patch its package to preserve the installation
|
||||
s = Spec('libtool-deletion').concretized()
|
||||
s = Spec('libtool-deletion')
|
||||
s.concretize()
|
||||
monkeypatch.setattr(s.package, 'install_libtool_archives', True)
|
||||
s.package.do_install(explicit=True)
|
||||
|
||||
@@ -250,93 +308,135 @@ def test_broken_external_gnuconfig(self, mutable_database, tmpdir):
|
||||
|
||||
@pytest.mark.usefixtures('config', 'mock_packages')
|
||||
class TestCMakePackage(object):
|
||||
def test_cmake_std_args(self):
|
||||
# Call the function on a CMakePackage instance
|
||||
s = Spec('cmake-client').concretized()
|
||||
assert s.package.std_cmake_args == get_std_cmake_args(s.package)
|
||||
|
||||
# Call it on another kind of package
|
||||
s = Spec('mpich').concretized()
|
||||
assert get_std_cmake_args(s.package)
|
||||
|
||||
def test_cmake_bad_generator(self):
|
||||
s = Spec('cmake-client').concretized()
|
||||
s.package.generator = 'Yellow Sticky Notes'
|
||||
with pytest.raises(spack.package_base.InstallError):
|
||||
get_std_cmake_args(s.package)
|
||||
|
||||
def test_cmake_secondary_generator(config, mock_packages):
|
||||
s = Spec('cmake-client').concretized()
|
||||
s.package.generator = 'CodeBlocks - Unix Makefiles'
|
||||
assert get_std_cmake_args(s.package)
|
||||
|
||||
def test_define(self):
|
||||
s = Spec('cmake-client').concretized()
|
||||
s = Spec('cmake-client')
|
||||
s.concretize()
|
||||
pkg = spack.repo.get(s)
|
||||
|
||||
define = s.package.define
|
||||
for cls in (list, tuple):
|
||||
assert define('MULTI', cls(['right', 'up'])) == '-DMULTI:STRING=right;up'
|
||||
arg = pkg.define('MULTI', cls(['right', 'up']))
|
||||
assert arg == '-DMULTI:STRING=right;up'
|
||||
|
||||
file_list = fs.FileList(['/foo', '/bar'])
|
||||
assert define('MULTI', file_list) == '-DMULTI:STRING=/foo;/bar'
|
||||
arg = pkg.define('MULTI', fs.FileList(['/foo', '/bar']))
|
||||
assert arg == '-DMULTI:STRING=/foo;/bar'
|
||||
|
||||
assert define('ENABLE_TRUTH', False) == '-DENABLE_TRUTH:BOOL=OFF'
|
||||
assert define('ENABLE_TRUTH', True) == '-DENABLE_TRUTH:BOOL=ON'
|
||||
arg = pkg.define('ENABLE_TRUTH', False)
|
||||
assert arg == '-DENABLE_TRUTH:BOOL=OFF'
|
||||
arg = pkg.define('ENABLE_TRUTH', True)
|
||||
assert arg == '-DENABLE_TRUTH:BOOL=ON'
|
||||
|
||||
assert define('SINGLE', 'red') == '-DSINGLE:STRING=red'
|
||||
arg = pkg.define('SINGLE', 'red')
|
||||
assert arg == '-DSINGLE:STRING=red'
|
||||
|
||||
def test_define_from_variant(self):
|
||||
s = Spec('cmake-client multi=up,right ~truthy single=red').concretized()
|
||||
s = Spec('cmake-client multi=up,right ~truthy single=red')
|
||||
s.concretize()
|
||||
pkg = spack.repo.get(s)
|
||||
|
||||
arg = s.package.define_from_variant('MULTI')
|
||||
arg = pkg.define_from_variant('MULTI')
|
||||
assert arg == '-DMULTI:STRING=right;up'
|
||||
|
||||
arg = s.package.define_from_variant('ENABLE_TRUTH', 'truthy')
|
||||
arg = pkg.define_from_variant('ENABLE_TRUTH', 'truthy')
|
||||
assert arg == '-DENABLE_TRUTH:BOOL=OFF'
|
||||
|
||||
arg = s.package.define_from_variant('SINGLE')
|
||||
arg = pkg.define_from_variant('SINGLE')
|
||||
assert arg == '-DSINGLE:STRING=red'
|
||||
|
||||
with pytest.raises(KeyError, match="not a variant"):
|
||||
s.package.define_from_variant('NONEXISTENT')
|
||||
pkg.define_from_variant('NONEXISTENT')
|
||||
|
||||
|
||||
@pytest.mark.usefixtures('config', 'mock_packages')
|
||||
class TestDownloadMixins(object):
|
||||
"""Test GnuMirrorPackage, SourceforgePackage, SourcewarePackage and XorgPackage."""
|
||||
@pytest.mark.parametrize('spec_str,expected_url', [
|
||||
# GnuMirrorPackage
|
||||
('mirror-gnu', 'https://ftpmirror.gnu.org/make/make-4.2.1.tar.gz'),
|
||||
# SourceforgePackage
|
||||
('mirror-sourceforge',
|
||||
'https://prdownloads.sourceforge.net/tcl/tcl8.6.5-src.tar.gz'),
|
||||
# SourcewarePackage
|
||||
('mirror-sourceware', 'https://sourceware.org/pub/bzip2/bzip2-1.0.8.tar.gz'),
|
||||
# XorgPackage
|
||||
('mirror-xorg',
|
||||
'https://www.x.org/archive/individual/util/util-macros-1.19.1.tar.bz2')
|
||||
])
|
||||
def test_attributes_defined(self, spec_str, expected_url):
|
||||
s = Spec(spec_str).concretized()
|
||||
assert s.package.urls[0] == expected_url
|
||||
class TestGNUMirrorPackage(object):
|
||||
|
||||
@pytest.mark.parametrize('spec_str,error_fmt', [
|
||||
# GnuMirrorPackage
|
||||
('mirror-gnu-broken', r'{0} must define a `gnu_mirror_path` attribute'),
|
||||
# SourceforgePackage
|
||||
('mirror-sourceforge-broken',
|
||||
r'{0} must define a `sourceforge_mirror_path` attribute'),
|
||||
# SourcewarePackage
|
||||
('mirror-sourceware-broken',
|
||||
r'{0} must define a `sourceware_mirror_path` attribute'),
|
||||
# XorgPackage
|
||||
('mirror-xorg-broken', r'{0} must define a `xorg_mirror_path` attribute'),
|
||||
])
|
||||
def test_attributes_missing(self, spec_str, error_fmt):
|
||||
s = Spec(spec_str).concretized()
|
||||
error_msg = error_fmt.format(type(s.package).__name__)
|
||||
with pytest.raises(AttributeError, match=error_msg):
|
||||
s.package.urls
|
||||
def test_define(self):
|
||||
s = Spec('mirror-gnu')
|
||||
s.concretize()
|
||||
pkg = spack.repo.get(s)
|
||||
|
||||
s = Spec('mirror-gnu-broken')
|
||||
s.concretize()
|
||||
pkg_broken = spack.repo.get(s)
|
||||
|
||||
cls_name = type(pkg_broken).__name__
|
||||
with pytest.raises(AttributeError,
|
||||
match=r'{0} must define a `gnu_mirror_path` '
|
||||
r'attribute \[none defined\]'
|
||||
.format(cls_name)):
|
||||
pkg_broken.urls
|
||||
|
||||
assert pkg.urls[0] == 'https://ftpmirror.gnu.org/' \
|
||||
'make/make-4.2.1.tar.gz'
|
||||
|
||||
|
||||
@pytest.mark.usefixtures('config', 'mock_packages')
|
||||
class TestSourceforgePackage(object):
|
||||
|
||||
def test_define(self):
|
||||
s = Spec('mirror-sourceforge')
|
||||
s.concretize()
|
||||
pkg = spack.repo.get(s)
|
||||
|
||||
s = Spec('mirror-sourceforge-broken')
|
||||
s.concretize()
|
||||
pkg_broken = spack.repo.get(s)
|
||||
|
||||
cls_name = type(pkg_broken).__name__
|
||||
with pytest.raises(AttributeError,
|
||||
match=r'{0} must define a `sourceforge_mirror_path`'
|
||||
r' attribute \[none defined\]'
|
||||
.format(cls_name)):
|
||||
pkg_broken.urls
|
||||
|
||||
assert pkg.urls[0] == 'https://prdownloads.sourceforge.net/' \
|
||||
'tcl/tcl8.6.5-src.tar.gz'
|
||||
|
||||
|
||||
@pytest.mark.usefixtures('config', 'mock_packages')
|
||||
class TestSourcewarePackage(object):
|
||||
|
||||
def test_define(self):
|
||||
s = Spec('mirror-sourceware')
|
||||
s.concretize()
|
||||
pkg = spack.repo.get(s)
|
||||
|
||||
s = Spec('mirror-sourceware-broken')
|
||||
s.concretize()
|
||||
pkg_broken = spack.repo.get(s)
|
||||
|
||||
cls_name = type(pkg_broken).__name__
|
||||
with pytest.raises(AttributeError,
|
||||
match=r'{0} must define a `sourceware_mirror_path` '
|
||||
r'attribute \[none defined\]'
|
||||
.format(cls_name)):
|
||||
pkg_broken.urls
|
||||
|
||||
assert pkg.urls[0] == 'https://sourceware.org/pub/' \
|
||||
'bzip2/bzip2-1.0.8.tar.gz'
|
||||
|
||||
|
||||
@pytest.mark.usefixtures('config', 'mock_packages')
|
||||
class TestXorgPackage(object):
|
||||
|
||||
def test_define(self):
|
||||
s = Spec('mirror-xorg')
|
||||
s.concretize()
|
||||
pkg = spack.repo.get(s)
|
||||
|
||||
s = Spec('mirror-xorg-broken')
|
||||
s.concretize()
|
||||
pkg_broken = spack.repo.get(s)
|
||||
|
||||
cls_name = type(pkg_broken).__name__
|
||||
with pytest.raises(AttributeError,
|
||||
match=r'{0} must define a `xorg_mirror_path` '
|
||||
r'attribute \[none defined\]'
|
||||
.format(cls_name)):
|
||||
pkg_broken.urls
|
||||
|
||||
assert pkg.urls[0] == 'https://www.x.org/archive/individual/' \
|
||||
'util/util-macros-1.19.1.tar.bz2'
|
||||
|
||||
|
||||
def test_cmake_define_from_variant_conditional(config, mock_packages):
|
||||
|
||||
@@ -23,10 +23,9 @@ def test_build_request_errors(install_mockery):
|
||||
with pytest.raises(ValueError, match='must be a package'):
|
||||
inst.BuildRequest('abc', {})
|
||||
|
||||
spec = spack.spec.Spec('trivial-install-test-package')
|
||||
pkg_cls = spack.repo.path.get_pkg_class(spec.name)
|
||||
pkg = spack.repo.get('trivial-install-test-package')
|
||||
with pytest.raises(ValueError, match='must have a concrete spec'):
|
||||
inst.BuildRequest(pkg_cls(spec), {})
|
||||
inst.BuildRequest(pkg, {})
|
||||
|
||||
|
||||
def test_build_request_basics(install_mockery):
|
||||
|
||||
@@ -14,11 +14,11 @@ def test_build_task_errors(install_mockery):
|
||||
with pytest.raises(ValueError, match='must be a package'):
|
||||
inst.BuildTask('abc', None, False, 0, 0, 0, [])
|
||||
|
||||
spec = spack.spec.Spec('trivial-install-test-package')
|
||||
pkg_cls = spack.repo.path.get_pkg_class(spec.name)
|
||||
pkg = spack.repo.get('trivial-install-test-package')
|
||||
with pytest.raises(ValueError, match='must have a concrete spec'):
|
||||
inst.BuildTask(pkg_cls(spec), None, False, 0, 0, 0, [])
|
||||
inst.BuildTask(pkg, None, False, 0, 0, 0, [])
|
||||
|
||||
spec = spack.spec.Spec('trivial-install-test-package')
|
||||
spec.concretize()
|
||||
assert spec.concrete
|
||||
with pytest.raises(ValueError, match='must have a build request'):
|
||||
|
||||
180
lib/spack/spack/test/cmd/analyze.py
Normal file
180
lib/spack/spack/test/cmd/analyze.py
Normal file
@@ -0,0 +1,180 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
import spack.cmd.install
|
||||
import spack.config
|
||||
import spack.package_base
|
||||
import spack.util.spack_json as sjson
|
||||
from spack.main import SpackCommand
|
||||
from spack.spec import Spec
|
||||
|
||||
install = SpackCommand('install')
|
||||
analyze = SpackCommand('analyze')
|
||||
|
||||
pytestmark = pytest.mark.skipif(sys.platform == 'win32',
|
||||
reason="Test is unsupported on Windows")
|
||||
|
||||
|
||||
def test_test_package_not_installed(mock_fetch, install_mockery_mutable_config):
|
||||
# We cannot run an analysis for a package not installed
|
||||
out = analyze('run', 'libdwarf', fail_on_error=False)
|
||||
assert "==> Error: Spec 'libdwarf' matches no installed packages.\n" in out
|
||||
|
||||
|
||||
def test_analyzer_get_install_dir(mock_fetch, install_mockery_mutable_config):
|
||||
"""
|
||||
Test that we cannot get an analyzer directory without a spec package.
|
||||
"""
|
||||
spec = Spec('libdwarf').concretized()
|
||||
assert 'libdwarf' in spack.analyzers.analyzer_base.get_analyzer_dir(spec)
|
||||
|
||||
# Case 1: spec is missing attribute for package
|
||||
with pytest.raises(SystemExit):
|
||||
spack.analyzers.analyzer_base.get_analyzer_dir(None)
|
||||
|
||||
class Packageless(object):
|
||||
package = None
|
||||
|
||||
# Case 2: spec has package attribute, but it's None
|
||||
with pytest.raises(SystemExit):
|
||||
spack.analyzers.analyzer_base.get_analyzer_dir(Packageless())
|
||||
|
||||
|
||||
def test_malformed_analyzer(mock_fetch, install_mockery_mutable_config):
|
||||
"""
|
||||
Test that an analyzer missing needed attributes is invalid.
|
||||
"""
|
||||
from spack.analyzers.analyzer_base import AnalyzerBase
|
||||
|
||||
# Missing attribute description
|
||||
class MyAnalyzer(AnalyzerBase):
|
||||
name = "my_analyzer"
|
||||
outfile = "my_analyzer_output.txt"
|
||||
|
||||
spec = Spec('libdwarf').concretized()
|
||||
with pytest.raises(SystemExit):
|
||||
MyAnalyzer(spec)
|
||||
|
||||
|
||||
def test_analyze_output(tmpdir, mock_fetch, install_mockery_mutable_config):
|
||||
"""
|
||||
Test that an analyzer errors if requested name does not exist.
|
||||
"""
|
||||
install('libdwarf')
|
||||
install('python@3.8')
|
||||
analyzer_dir = tmpdir.join('analyzers')
|
||||
|
||||
# An analyzer that doesn't exist should not work
|
||||
out = analyze('run', '-a', 'pusheen', 'libdwarf', fail_on_error=False)
|
||||
assert '==> Error: Analyzer pusheen does not exist\n' in out
|
||||
|
||||
# We will output to this analyzer directory
|
||||
analyzer_dir = tmpdir.join('analyzers')
|
||||
out = analyze('run', '-a', 'install_files', '-p', str(analyzer_dir), 'libdwarf')
|
||||
|
||||
# Ensure that if we run again without over write, we don't run
|
||||
out = analyze('run', '-a', 'install_files', '-p', str(analyzer_dir), 'libdwarf')
|
||||
assert "skipping" in out
|
||||
|
||||
# With overwrite it should run
|
||||
out = analyze('run', '-a', 'install_files', '-p', str(analyzer_dir),
|
||||
'--overwrite', 'libdwarf')
|
||||
assert "==> Writing result to" in out
|
||||
|
||||
|
||||
def _run_analyzer(name, package, tmpdir):
|
||||
"""
|
||||
A shared function to test that an analyzer runs.
|
||||
|
||||
We return the output file for further inspection.
|
||||
"""
|
||||
analyzer = spack.analyzers.get_analyzer(name)
|
||||
analyzer_dir = tmpdir.join('analyzers')
|
||||
out = analyze('run', '-a', analyzer.name, '-p', str(analyzer_dir), package)
|
||||
|
||||
assert "==> Writing result to" in out
|
||||
assert "/%s/%s\n" % (analyzer.name, analyzer.outfile) in out
|
||||
|
||||
# The output file should exist
|
||||
output_file = out.strip('\n').split(' ')[-1].strip()
|
||||
assert os.path.exists(output_file)
|
||||
return output_file
|
||||
|
||||
|
||||
def test_installfiles_analyzer(tmpdir, mock_fetch, install_mockery_mutable_config):
|
||||
"""
|
||||
test the install files analyzer
|
||||
"""
|
||||
install('libdwarf')
|
||||
output_file = _run_analyzer("install_files", "libdwarf", tmpdir)
|
||||
|
||||
# Ensure it's the correct content
|
||||
with open(output_file, 'r') as fd:
|
||||
content = sjson.load(fd.read())
|
||||
|
||||
basenames = set()
|
||||
for key, attrs in content.items():
|
||||
basenames.add(os.path.basename(key))
|
||||
|
||||
# Check for a few expected files
|
||||
for key in ['.spack', 'libdwarf', 'packages', 'repo.yaml', 'repos']:
|
||||
assert key in basenames
|
||||
|
||||
|
||||
def test_environment_analyzer(tmpdir, mock_fetch, install_mockery_mutable_config):
|
||||
"""
|
||||
test the environment variables analyzer.
|
||||
"""
|
||||
install('libdwarf')
|
||||
output_file = _run_analyzer("environment_variables", "libdwarf", tmpdir)
|
||||
with open(output_file, 'r') as fd:
|
||||
content = sjson.load(fd.read())
|
||||
|
||||
# Check a few expected keys
|
||||
for key in ['SPACK_CC', 'SPACK_COMPILER_SPEC', 'SPACK_ENV_PATH']:
|
||||
assert key in content
|
||||
|
||||
# The analyzer should return no result if the output file does not exist.
|
||||
spec = Spec('libdwarf').concretized()
|
||||
env_file = os.path.join(spec.package.prefix, '.spack', 'spack-build-env.txt')
|
||||
assert os.path.exists(env_file)
|
||||
os.remove(env_file)
|
||||
analyzer = spack.analyzers.get_analyzer("environment_variables")
|
||||
analyzer_dir = tmpdir.join('analyzers')
|
||||
result = analyzer(spec, analyzer_dir).run()
|
||||
assert "environment_variables" in result
|
||||
assert not result['environment_variables']
|
||||
|
||||
|
||||
def test_list_analyzers():
|
||||
"""
|
||||
test that listing analyzers shows all the possible analyzers.
|
||||
"""
|
||||
from spack.analyzers import analyzer_types
|
||||
|
||||
# all cannot be an analyzer
|
||||
assert "all" not in analyzer_types
|
||||
|
||||
# All types should be present!
|
||||
out = analyze('list-analyzers')
|
||||
for analyzer_type in analyzer_types:
|
||||
assert analyzer_type in out
|
||||
|
||||
|
||||
def test_configargs_analyzer(tmpdir, mock_fetch, install_mockery_mutable_config):
|
||||
"""
|
||||
test the config args analyzer.
|
||||
|
||||
Since we don't have any, this should return an empty result.
|
||||
"""
|
||||
install('libdwarf')
|
||||
analyzer_dir = tmpdir.join('analyzers')
|
||||
out = analyze('run', '-a', 'config_args', '-p', str(analyzer_dir), 'libdwarf')
|
||||
assert out == ''
|
||||
@@ -57,8 +57,9 @@ def _get_number(*args, **kwargs):
|
||||
|
||||
|
||||
def test_checksum_versions(mock_packages, mock_fetch, mock_stage):
|
||||
pkg_cls = spack.repo.path.get_pkg_class('preferred-test')
|
||||
versions = [str(v) for v in pkg_cls.versions if not v.isdevelop()]
|
||||
pkg = spack.repo.get('preferred-test')
|
||||
|
||||
versions = [str(v) for v in pkg.versions if not v.isdevelop()]
|
||||
output = spack_checksum('preferred-test', versions[0])
|
||||
assert 'Found 1 version' in output
|
||||
assert 'version(' in output
|
||||
|
||||
@@ -3,13 +3,10 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
|
||||
import spack.caches
|
||||
import spack.main
|
||||
import spack.package_base
|
||||
@@ -44,13 +41,11 @@ def __call__(self, *args, **kwargs):
|
||||
spack.caches.misc_cache, 'destroy', Counter('caches'))
|
||||
monkeypatch.setattr(
|
||||
spack.installer, 'clear_failures', Counter('failures'))
|
||||
monkeypatch.setattr(spack.cmd.clean, 'remove_python_cache',
|
||||
Counter('python_cache'))
|
||||
|
||||
yield counts
|
||||
|
||||
|
||||
all_effects = ['stages', 'downloads', 'caches', 'failures', 'python_cache']
|
||||
all_effects = ['stages', 'downloads', 'caches', 'failures']
|
||||
|
||||
|
||||
@pytest.mark.usefixtures(
|
||||
@@ -62,7 +57,6 @@ def __call__(self, *args, **kwargs):
|
||||
('-sd', ['stages', 'downloads']),
|
||||
('-m', ['caches']),
|
||||
('-f', ['failures']),
|
||||
('-p', ['python_cache']),
|
||||
('-a', all_effects),
|
||||
('', []),
|
||||
])
|
||||
@@ -75,43 +69,3 @@ def test_function_calls(command_line, effects, mock_calls_for_clean):
|
||||
# number of times
|
||||
for name in ['package'] + all_effects:
|
||||
assert mock_calls_for_clean[name] == (1 if name in effects else 0)
|
||||
|
||||
|
||||
def test_remove_python_cache(tmpdir, monkeypatch):
|
||||
cache_files = ['file1.pyo', 'file2.pyc']
|
||||
source_file = 'file1.py'
|
||||
|
||||
def _setup_files(directory):
|
||||
# Create a python cache and source file.
|
||||
cache_dir = fs.join_path(directory, '__pycache__')
|
||||
fs.mkdirp(cache_dir)
|
||||
fs.touch(fs.join_path(directory, source_file))
|
||||
fs.touch(fs.join_path(directory, cache_files[0]))
|
||||
for filename in cache_files:
|
||||
# Ensure byte code files in python cache directory
|
||||
fs.touch(fs.join_path(cache_dir, filename))
|
||||
|
||||
def _check_files(directory):
|
||||
# Ensure the python cache created by _setup_files is removed
|
||||
# and the source file is not.
|
||||
assert os.path.exists(fs.join_path(directory, source_file))
|
||||
assert not os.path.exists(fs.join_path(directory, cache_files[0]))
|
||||
assert not os.path.exists(fs.join_path(directory, '__pycache__'))
|
||||
|
||||
source_dir = fs.join_path(tmpdir, 'lib', 'spack', 'spack')
|
||||
var_dir = fs.join_path(tmpdir, 'var', 'spack', 'stuff')
|
||||
|
||||
for d in [source_dir, var_dir]:
|
||||
_setup_files(d)
|
||||
|
||||
# Patching the path variables from-import'd by spack.cmd.clean is needed
|
||||
# to ensure the paths used by the command for this test reflect the
|
||||
# temporary directory locations and not those from spack.paths when
|
||||
# the clean command's module was imported.
|
||||
monkeypatch.setattr(spack.cmd.clean, "lib_path", source_dir)
|
||||
monkeypatch.setattr(spack.cmd.clean, "var_path", var_dir)
|
||||
|
||||
spack.cmd.clean.remove_python_cache()
|
||||
|
||||
for d in [source_dir, var_dir]:
|
||||
_check_files(d)
|
||||
|
||||
@@ -262,19 +262,17 @@ def test_dev_build_multiple(tmpdir, mock_packages, install_mockery,
|
||||
# root and dependency if they wanted a dev build for both.
|
||||
leaf_dir = tmpdir.mkdir('leaf')
|
||||
leaf_spec = spack.spec.Spec('dev-build-test-install@0.0.0')
|
||||
leaf_pkg_cls = spack.repo.path.get_pkg_class(leaf_spec.name)
|
||||
with leaf_dir.as_cwd():
|
||||
with open(leaf_pkg_cls.filename, 'w') as f:
|
||||
f.write(leaf_pkg_cls.original_string)
|
||||
with open(leaf_spec.package.filename, 'w') as f:
|
||||
f.write(leaf_spec.package.original_string)
|
||||
|
||||
# setup dev-build-test-dependent package for dev build
|
||||
# don't concretize outside environment -- dev info will be wrong
|
||||
root_dir = tmpdir.mkdir('root')
|
||||
root_spec = spack.spec.Spec('dev-build-test-dependent@0.0.0')
|
||||
root_pkg_cls = spack.repo.path.get_pkg_class(root_spec.name)
|
||||
with root_dir.as_cwd():
|
||||
with open(root_pkg_cls.filename, 'w') as f:
|
||||
f.write(root_pkg_cls.original_string)
|
||||
with open(root_spec.package.filename, 'w') as f:
|
||||
f.write(root_spec.package.original_string)
|
||||
|
||||
# setup environment
|
||||
envdir = tmpdir.mkdir('env')
|
||||
@@ -321,9 +319,8 @@ def test_dev_build_env_dependency(tmpdir, mock_packages, install_mockery,
|
||||
dep_spec = spack.spec.Spec('dev-build-test-install')
|
||||
|
||||
with build_dir.as_cwd():
|
||||
dep_pkg_cls = spack.repo.path.get_pkg_class(dep_spec.name)
|
||||
with open(dep_pkg_cls.filename, 'w') as f:
|
||||
f.write(dep_pkg_cls.original_string)
|
||||
with open(dep_spec.package.filename, 'w') as f:
|
||||
f.write(dep_spec.package.original_string)
|
||||
|
||||
# setup environment
|
||||
envdir = tmpdir.mkdir('env')
|
||||
|
||||
@@ -22,8 +22,7 @@
|
||||
|
||||
|
||||
@pytest.mark.usefixtures(
|
||||
'mutable_mock_env_path', 'mock_packages', 'mock_fetch', 'config'
|
||||
)
|
||||
'mutable_mock_env_path', 'mock_packages', 'mock_fetch')
|
||||
class TestDevelop(object):
|
||||
def check_develop(self, env, spec, path=None):
|
||||
path = path or spec.name
|
||||
|
||||
@@ -452,9 +452,9 @@ def test_env_repo():
|
||||
with ev.read('test'):
|
||||
concretize()
|
||||
|
||||
pkg_cls = e.repo.get_pkg_class('mpileaks')
|
||||
assert pkg_cls.name == 'mpileaks'
|
||||
assert pkg_cls.namespace == 'builtin.mock'
|
||||
package = e.repo.get('mpileaks')
|
||||
assert package.name == 'mpileaks'
|
||||
assert package.namespace == 'builtin.mock'
|
||||
|
||||
|
||||
def test_user_removed_spec():
|
||||
@@ -2898,8 +2898,14 @@ def test_environment_depfile_makefile(tmpdir, mock_packages):
|
||||
with ev.read('test') as e:
|
||||
for _, root in e.concretized_specs():
|
||||
for spec in root.traverse(root=True):
|
||||
tgt = os.path.join('prefix', '.install', spec.dag_hash())
|
||||
assert 'touch {}'.format(tgt) in all_out
|
||||
for task in ('.fetch', '.install'):
|
||||
tgt = os.path.join('prefix', task, spec.dag_hash())
|
||||
assert 'touch {}'.format(tgt) in all_out
|
||||
|
||||
# Check whether make prefix/fetch-all only fetches
|
||||
fetch_out = make('prefix/fetch-all', '-n', '-f', makefile, output=str)
|
||||
assert '.install/' not in fetch_out
|
||||
assert '.fetch/' in fetch_out
|
||||
|
||||
|
||||
def test_environment_depfile_out(tmpdir, mock_packages):
|
||||
|
||||
@@ -8,8 +8,6 @@
|
||||
|
||||
import pytest
|
||||
|
||||
from llnl.util.filesystem import getuid, touch
|
||||
|
||||
import spack
|
||||
import spack.detection
|
||||
import spack.detection.path
|
||||
@@ -45,7 +43,7 @@ def define_plat_exe(exe):
|
||||
|
||||
def test_find_external_single_package(mock_executable, executables_found,
|
||||
_platform_executables):
|
||||
pkgs_to_check = [spack.repo.path.get_pkg_class('cmake')]
|
||||
pkgs_to_check = [spack.repo.get('cmake')]
|
||||
executables_found({
|
||||
mock_executable("cmake", output='echo cmake version 1.foo'):
|
||||
define_plat_exe('cmake')
|
||||
@@ -61,7 +59,7 @@ def test_find_external_single_package(mock_executable, executables_found,
|
||||
|
||||
def test_find_external_two_instances_same_package(mock_executable, executables_found,
|
||||
_platform_executables):
|
||||
pkgs_to_check = [spack.repo.path.get_pkg_class('cmake')]
|
||||
pkgs_to_check = [spack.repo.get('cmake')]
|
||||
|
||||
# Each of these cmake instances is created in a different prefix
|
||||
# In Windows, quoted strings are echo'd with quotes includes
|
||||
@@ -196,53 +194,6 @@ def test_find_external_empty_default_manifest_dir(
|
||||
external('find')
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == 'win32',
|
||||
reason="Can't chmod on Windows")
|
||||
@pytest.mark.skipif(getuid() == 0, reason='user is root')
|
||||
def test_find_external_manifest_with_bad_permissions(
|
||||
mutable_config, working_env, mock_executable, mutable_mock_repo,
|
||||
_platform_executables, tmpdir, monkeypatch):
|
||||
"""The user runs 'spack external find'; the default path for storing
|
||||
manifest files exists but with insufficient permissions. Check that
|
||||
the command does not fail.
|
||||
"""
|
||||
test_manifest_dir = str(tmpdir.mkdir('manifest_dir'))
|
||||
test_manifest_file_path = os.path.join(test_manifest_dir, 'badperms.json')
|
||||
touch(test_manifest_file_path)
|
||||
monkeypatch.setenv('PATH', '')
|
||||
monkeypatch.setattr(spack.cray_manifest, 'default_path',
|
||||
test_manifest_dir)
|
||||
try:
|
||||
os.chmod(test_manifest_file_path, 0)
|
||||
output = external('find')
|
||||
assert 'insufficient permissions' in output
|
||||
assert 'Skipping manifest and continuing' in output
|
||||
finally:
|
||||
os.chmod(test_manifest_file_path, 0o700)
|
||||
|
||||
|
||||
def test_find_external_manifest_failure(
|
||||
mutable_config, mutable_mock_repo, tmpdir, monkeypatch):
|
||||
"""The user runs 'spack external find'; the manifest parsing fails with
|
||||
some exception. Ensure that the command still succeeds (i.e. moves on
|
||||
to other external detection mechanisms).
|
||||
"""
|
||||
# First, create an empty manifest file (without a file to read, the
|
||||
# manifest parsing is skipped)
|
||||
test_manifest_dir = str(tmpdir.mkdir('manifest_dir'))
|
||||
test_manifest_file_path = os.path.join(test_manifest_dir, 'test.json')
|
||||
touch(test_manifest_file_path)
|
||||
|
||||
def fail():
|
||||
raise Exception()
|
||||
|
||||
monkeypatch.setattr(
|
||||
spack.cmd.external, '_collect_and_consume_cray_manifest_files', fail)
|
||||
monkeypatch.setenv('PATH', '')
|
||||
output = external('find')
|
||||
assert 'Skipping manifest and continuing' in output
|
||||
|
||||
|
||||
def test_find_external_nonempty_default_manifest_dir(
|
||||
mutable_database, mutable_mock_repo,
|
||||
_platform_executables, tmpdir, monkeypatch,
|
||||
|
||||
@@ -27,13 +27,16 @@
|
||||
|
||||
@pytest.fixture
|
||||
def mock_spec():
|
||||
spec = spack.spec.Spec('externaltest').concretized()
|
||||
pkg = spack.repo.get(spec)
|
||||
|
||||
# Make it look like the source was actually expanded.
|
||||
s = spack.spec.Spec('externaltest').concretized()
|
||||
source_path = s.package.stage.source_path
|
||||
source_path = pkg.stage.source_path
|
||||
mkdirp(source_path)
|
||||
yield s, s.package
|
||||
yield spec, pkg
|
||||
|
||||
# Remove the spec from the mock stage area.
|
||||
shutil.rmtree(s.package.stage.path)
|
||||
shutil.rmtree(pkg.stage.path)
|
||||
|
||||
|
||||
def test_location_build_dir(mock_spec):
|
||||
|
||||
@@ -85,13 +85,13 @@ def test_mirror_from_env(tmpdir, mock_packages, mock_fetch, config,
|
||||
|
||||
@pytest.fixture
|
||||
def source_for_pkg_with_hash(mock_packages, tmpdir):
|
||||
s = spack.spec.Spec('trivial-pkg-with-valid-hash').concretized()
|
||||
local_url_basename = os.path.basename(s.package.url)
|
||||
pkg = spack.repo.get('trivial-pkg-with-valid-hash')
|
||||
local_url_basename = os.path.basename(pkg.url)
|
||||
local_path = os.path.join(str(tmpdir), local_url_basename)
|
||||
with open(local_path, 'w') as f:
|
||||
f.write(s.package.hashed_content)
|
||||
f.write(pkg.hashed_content)
|
||||
local_url = "file://" + local_path
|
||||
s.package.versions[spack.version.Version('1.0')]['url'] = local_url
|
||||
pkg.versions[spack.version.Version('1.0')]['url'] = local_url
|
||||
|
||||
|
||||
def test_mirror_skip_unstable(tmpdir_factory, mock_packages, config,
|
||||
|
||||
@@ -149,20 +149,16 @@ def test_find_recursive():
|
||||
|
||||
|
||||
@pytest.mark.db
|
||||
# DEPRECATED: remove blacklist in v0.20
|
||||
@pytest.mark.parametrize("config_name", ["exclude", "blacklist"])
|
||||
def test_find_recursive_excluded(database, module_configuration, config_name):
|
||||
module_configuration(config_name)
|
||||
def test_find_recursive_blacklisted(database, module_configuration):
|
||||
module_configuration('blacklist')
|
||||
|
||||
module('lmod', 'refresh', '-y', '--delete-tree')
|
||||
module('lmod', 'find', '-r', 'mpileaks ^mpich')
|
||||
|
||||
|
||||
@pytest.mark.db
|
||||
# DEPRECATED: remove blacklist in v0.20
|
||||
@pytest.mark.parametrize("config_name", ["exclude", "blacklist"])
|
||||
def test_loads_recursive_excluded(database, module_configuration, config_name):
|
||||
module_configuration(config_name)
|
||||
def test_loads_recursive_blacklisted(database, module_configuration):
|
||||
module_configuration('blacklist')
|
||||
|
||||
module('lmod', 'refresh', '-y', '--delete-tree')
|
||||
output = module('lmod', 'loads', '-r', 'mpileaks ^mpich')
|
||||
@@ -170,7 +166,7 @@ def test_loads_recursive_excluded(database, module_configuration, config_name):
|
||||
|
||||
assert any(re.match(r'[^#]*module load.*mpileaks', ln) for ln in lines)
|
||||
assert not any(re.match(r'[^#]module load.*callpath', ln) for ln in lines)
|
||||
assert any(re.match(r'## excluded or missing.*callpath', ln)
|
||||
assert any(re.match(r'## blacklisted or missing.*callpath', ln)
|
||||
for ln in lines)
|
||||
|
||||
# TODO: currently there is no way to separate stdout and stderr when
|
||||
|
||||
@@ -8,7 +8,6 @@
|
||||
|
||||
import pytest
|
||||
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.repo
|
||||
from spack.main import SpackCommand
|
||||
@@ -123,13 +122,3 @@ def fake_stage(pkg, mirror_only=False):
|
||||
|
||||
# assert that all were staged
|
||||
assert len(expected) == 0
|
||||
|
||||
|
||||
@pytest.mark.disable_clean_stage_check
|
||||
def test_concretizer_arguments(mock_packages, mock_fetch):
|
||||
"""Make sure stage also has --reuse and --fresh flags."""
|
||||
stage("--reuse", "trivial-install-test-package")
|
||||
assert spack.config.get("concretizer:reuse", None) is True
|
||||
|
||||
stage("--fresh", "trivial-install-test-package")
|
||||
assert spack.config.get("concretizer:reuse", None) is False
|
||||
|
||||
@@ -47,8 +47,9 @@ class tag_path():
|
||||
|
||||
|
||||
def test_tags_installed(install_mockery, mock_fetch):
|
||||
s = spack.spec.Spec('mpich').concretized()
|
||||
s.package.do_install()
|
||||
spec = spack.spec.Spec('mpich').concretized()
|
||||
pkg = spack.repo.get(spec)
|
||||
pkg.do_install()
|
||||
|
||||
out = tags('-i')
|
||||
for tag in ['tag1', 'tag2']:
|
||||
|
||||
@@ -22,6 +22,7 @@
|
||||
import spack.variant as vt
|
||||
from spack.concretize import find_spec
|
||||
from spack.spec import Spec
|
||||
from spack.util.mock_package import MockPackageMultiRepo
|
||||
from spack.version import ver
|
||||
|
||||
is_windows = sys.platform == 'win32'
|
||||
@@ -43,7 +44,7 @@ def check_spec(abstract, concrete):
|
||||
cflag = concrete.compiler_flags[flag]
|
||||
assert set(aflag) <= set(cflag)
|
||||
|
||||
for name in spack.repo.path.get_pkg_class(abstract.name).variants:
|
||||
for name in abstract.package.variants:
|
||||
assert name in concrete.variants
|
||||
|
||||
for flag in concrete.compiler_flags.valid_compiler_flags():
|
||||
@@ -355,11 +356,20 @@ def test_architecture_deep_inheritance(self, mock_targets):
|
||||
information from the root even when partial architecture information
|
||||
is provided by an intermediate dependency.
|
||||
"""
|
||||
spec_str = ('mpileaks %gcc@4.5.0 os=CNL target=nocona'
|
||||
' ^dyninst os=CNL ^callpath os=CNL')
|
||||
spec = Spec(spec_str).concretized()
|
||||
for s in spec.traverse(root=False):
|
||||
assert s.architecture.target == spec.architecture.target
|
||||
default_dep = ('link', 'build')
|
||||
|
||||
mock_repo = MockPackageMultiRepo()
|
||||
bazpkg = mock_repo.add_package('bazpkg', [], [])
|
||||
barpkg = mock_repo.add_package('barpkg', [bazpkg], [default_dep])
|
||||
mock_repo.add_package('foopkg', [barpkg], [default_dep])
|
||||
|
||||
with spack.repo.use_repositories(mock_repo):
|
||||
spec = Spec('foopkg %gcc@4.5.0 os=CNL target=nocona' +
|
||||
' ^barpkg os=CNL ^bazpkg os=CNL')
|
||||
spec.concretize()
|
||||
|
||||
for s in spec.traverse(root=False):
|
||||
assert s.architecture.target == spec.architecture.target
|
||||
|
||||
def test_compiler_flags_from_user_are_grouped(self):
|
||||
spec = Spec('a%gcc cflags="-O -foo-flag foo-val" platform=test')
|
||||
|
||||
@@ -1160,16 +1160,15 @@ def test_bad_path_double_override(config):
|
||||
|
||||
def test_license_dir_config(mutable_config, mock_packages):
|
||||
"""Ensure license directory is customizable"""
|
||||
expected_dir = spack.paths.default_license_dir
|
||||
assert spack.config.get("config:license_dir") == expected_dir
|
||||
assert spack.package.Package.global_license_dir == expected_dir
|
||||
assert spack.repo.path.get_pkg_class("a").global_license_dir == expected_dir
|
||||
assert spack.config.get("config:license_dir") == spack.paths.default_license_dir
|
||||
assert spack.package.Package.global_license_dir == spack.paths.default_license_dir
|
||||
assert spack.repo.get("a").global_license_dir == spack.paths.default_license_dir
|
||||
|
||||
rel_path = os.path.join(os.path.sep, "foo", "bar", "baz")
|
||||
spack.config.set("config:license_dir", rel_path)
|
||||
assert spack.config.get("config:license_dir") == rel_path
|
||||
assert spack.package.Package.global_license_dir == rel_path
|
||||
assert spack.repo.path.get_pkg_class("a").global_license_dir == rel_path
|
||||
assert spack.repo.get("a").global_license_dir == rel_path
|
||||
|
||||
|
||||
@pytest.mark.regression('22547')
|
||||
|
||||
@@ -751,7 +751,8 @@ def _populate(mock_db):
|
||||
"""
|
||||
def _install(spec):
|
||||
s = spack.spec.Spec(spec).concretized()
|
||||
s.package.do_install(fake=True, explicit=True)
|
||||
pkg = spack.repo.get(s)
|
||||
pkg.do_install(fake=True, explicit=True)
|
||||
|
||||
_install('mpileaks ^mpich')
|
||||
_install('mpileaks ^mpich2')
|
||||
@@ -1003,7 +1004,7 @@ def __call__(self, filename):
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def module_configuration(monkeypatch, request, mutable_config):
|
||||
def module_configuration(monkeypatch, request):
|
||||
"""Reads the module configuration file from the mock ones prepared
|
||||
for tests and monkeypatches the right classes to hook it in.
|
||||
"""
|
||||
@@ -1018,8 +1019,6 @@ def module_configuration(monkeypatch, request, mutable_config):
|
||||
spack.paths.test_path, 'data', 'modules', writer_key
|
||||
)
|
||||
|
||||
# ConfigUpdate, when called, will modify configuration, so we need to use
|
||||
# the mutable_config fixture
|
||||
return ConfigUpdate(root_for_conf, writer_mod, writer_key, monkeypatch)
|
||||
|
||||
|
||||
|
||||
@@ -10,7 +10,6 @@
|
||||
logic needs to consume all related specs in a single pass).
|
||||
"""
|
||||
import json
|
||||
import os
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -33,7 +32,7 @@
|
||||
},
|
||||
"compiler": {
|
||||
"name": "gcc",
|
||||
"version": "10.2.0.cray"
|
||||
"version": "10.2.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"packagey": {
|
||||
@@ -157,7 +156,7 @@ def spec_json(self):
|
||||
# Intended to match example_compiler_entry above
|
||||
_common_compiler = JsonCompilerEntry(
|
||||
name='gcc',
|
||||
version='10.2.0.cray',
|
||||
version='10.2.0',
|
||||
arch={
|
||||
"os": "centos8",
|
||||
"target": "x86_64"
|
||||
@@ -310,16 +309,8 @@ def test_failed_translate_compiler_name():
|
||||
|
||||
def create_manifest_content():
|
||||
return {
|
||||
# Note: the cray_manifest module doesn't use the _meta section right
|
||||
# now, but it is anticipated to be useful
|
||||
'_meta': {
|
||||
"file-type": "cray-pe-json",
|
||||
"system-type": "test",
|
||||
"schema-version": "1.3",
|
||||
"cpe-version": "22.06"
|
||||
},
|
||||
'specs': list(x.to_dict() for x in generate_openmpi_entries()),
|
||||
'compilers': [_common_compiler.compiler_json()]
|
||||
'compilers': []
|
||||
}
|
||||
|
||||
|
||||
@@ -345,45 +336,3 @@ def test_read_cray_manifest(
|
||||
' ^/openmpifakehasha'.split(),
|
||||
concretize=True)
|
||||
assert concretized_specs[0]['hwloc'].dag_hash() == 'hwlocfakehashaaa'
|
||||
|
||||
|
||||
def test_read_cray_manifest_twice_no_compiler_duplicates(
|
||||
tmpdir, mutable_config, mock_packages, mutable_database):
|
||||
if spack.config.get('config:concretizer') == 'clingo':
|
||||
pytest.skip("The ASP-based concretizer is currently picky about "
|
||||
" OS matching and will fail.")
|
||||
|
||||
with tmpdir.as_cwd():
|
||||
test_db_fname = 'external-db.json'
|
||||
with open(test_db_fname, 'w') as db_file:
|
||||
json.dump(create_manifest_content(), db_file)
|
||||
|
||||
# Read the manifest twice
|
||||
cray_manifest.read(test_db_fname, True)
|
||||
cray_manifest.read(test_db_fname, True)
|
||||
|
||||
compilers = spack.compilers.all_compilers()
|
||||
filtered = list(c for c in compilers if
|
||||
c.spec == spack.spec.CompilerSpec('gcc@10.2.0.cray'))
|
||||
assert(len(filtered) == 1)
|
||||
|
||||
|
||||
def test_read_old_manifest_v1_2(
|
||||
tmpdir, mutable_config, mock_packages, mutable_database):
|
||||
"""Test reading a file using the older format
|
||||
('version' instead of 'schema-version').
|
||||
"""
|
||||
manifest_dir = str(tmpdir.mkdir('manifest_dir'))
|
||||
manifest_file_path = os.path.join(manifest_dir, 'test.json')
|
||||
with open(manifest_file_path, 'w') as manifest_file:
|
||||
manifest_file.write("""\
|
||||
{
|
||||
"_meta": {
|
||||
"file-type": "cray-pe-json",
|
||||
"system-type": "EX",
|
||||
"version": "1.3"
|
||||
},
|
||||
"specs": []
|
||||
}
|
||||
""")
|
||||
cray_manifest.read(manifest_file_path, True)
|
||||
|
||||
@@ -9,6 +9,8 @@
|
||||
|
||||
from llnl.util.filesystem import mkdirp, touch, working_dir
|
||||
|
||||
import spack.config
|
||||
import spack.repo
|
||||
from spack.fetch_strategy import CvsFetchStrategy
|
||||
from spack.spec import Spec
|
||||
from spack.stage import Stage
|
||||
@@ -45,14 +47,16 @@ def test_fetch(
|
||||
get_date = mock_cvs_repository.get_date
|
||||
|
||||
# Construct the package under test
|
||||
spec = Spec('cvs-test').concretized()
|
||||
spec.package.versions[ver('cvs')] = test.args
|
||||
spec = Spec('cvs-test')
|
||||
spec.concretize()
|
||||
pkg = spack.repo.get(spec)
|
||||
pkg.versions[ver('cvs')] = test.args
|
||||
|
||||
# Enter the stage directory and check some properties
|
||||
with spec.package.stage:
|
||||
spec.package.do_stage()
|
||||
with pkg.stage:
|
||||
pkg.do_stage()
|
||||
|
||||
with working_dir(spec.package.stage.source_path):
|
||||
with working_dir(pkg.stage.source_path):
|
||||
# Check branch
|
||||
if test.branch is not None:
|
||||
assert get_branch() == test.branch
|
||||
@@ -61,8 +65,8 @@ def test_fetch(
|
||||
if test.date is not None:
|
||||
assert get_date() <= test.date
|
||||
|
||||
file_path = os.path.join(spec.package.stage.source_path, test.file)
|
||||
assert os.path.isdir(spec.package.stage.source_path)
|
||||
file_path = os.path.join(pkg.stage.source_path, test.file)
|
||||
assert os.path.isdir(pkg.stage.source_path)
|
||||
assert os.path.isfile(file_path)
|
||||
|
||||
os.unlink(file_path)
|
||||
@@ -71,10 +75,10 @@ def test_fetch(
|
||||
untracked_file = 'foobarbaz'
|
||||
touch(untracked_file)
|
||||
assert os.path.isfile(untracked_file)
|
||||
spec.package.do_restage()
|
||||
pkg.do_restage()
|
||||
assert not os.path.isfile(untracked_file)
|
||||
|
||||
assert os.path.isdir(spec.package.stage.source_path)
|
||||
assert os.path.isdir(pkg.stage.source_path)
|
||||
assert os.path.isfile(file_path)
|
||||
|
||||
|
||||
|
||||
@@ -10,7 +10,7 @@ lmod:
|
||||
all:
|
||||
autoload: none
|
||||
filter:
|
||||
exclude_env_vars:
|
||||
environment_blacklist:
|
||||
- CMAKE_PREFIX_PATH
|
||||
environment:
|
||||
set:
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user