Compare commits
24 Commits
develop-20
...
hs/fix/rep
Author | SHA1 | Date | |
---|---|---|---|
![]() |
931fa7ff51 | ||
![]() |
68b08498b7 | ||
![]() |
d951c4f112 | ||
![]() |
ef2b596e3f | ||
![]() |
f07789febf | ||
![]() |
4316c4fb00 | ||
![]() |
72871ebde8 | ||
![]() |
3b2163c718 | ||
![]() |
16067871e2 | ||
![]() |
53ae44163d | ||
![]() |
02880a866c | ||
![]() |
a242e77e81 | ||
![]() |
e9b822a86a | ||
![]() |
3ea16482a6 | ||
![]() |
a13557ac94 | ||
![]() |
69e9841262 | ||
![]() |
65279dc6f3 | ||
![]() |
3623d5d20e | ||
![]() |
50cc87500c | ||
![]() |
b4e039ad7b | ||
![]() |
6227bd7986 | ||
![]() |
da9fa24d15 | ||
![]() |
2929ea02a1 | ||
![]() |
c99e654650 |
@@ -66,7 +66,7 @@ on these ideas for each distinct build system that Spack supports:
|
||||
build_systems/rocmpackage
|
||||
build_systems/sourceforgepackage
|
||||
|
||||
For reference, the :py:mod:`Build System API docs <spack.build_systems>`
|
||||
For reference, the :py:mod:`Build System API docs <spack_repo.builtin.build_systems>`
|
||||
provide a list of build systems and methods/attributes that can be
|
||||
overridden. If you are curious about the implementation of a particular
|
||||
build system, you can view the source code by running:
|
||||
@@ -90,7 +90,7 @@ packages. You can quickly find examples by running:
|
||||
You can then view these packages with ``spack edit``.
|
||||
|
||||
This guide is intended to supplement the
|
||||
:py:mod:`Build System API docs <spack.build_systems>` with examples of
|
||||
:py:mod:`Build System API docs <spack_repo.builtin.build_systems>` with examples of
|
||||
how to override commonly used methods. It also provides rules of thumb
|
||||
and suggestions for package developers who are unfamiliar with a
|
||||
particular build system.
|
||||
|
@@ -129,8 +129,8 @@ Adding flags to cmake
|
||||
To add additional flags to the ``cmake`` call, simply override the
|
||||
``cmake_args`` function. The following example defines values for the flags
|
||||
``WHATEVER``, ``ENABLE_BROKEN_FEATURE``, ``DETECT_HDF5``, and ``THREADS`` with
|
||||
and without the :meth:`~spack.build_systems.cmake.CMakeBuilder.define` and
|
||||
:meth:`~spack.build_systems.cmake.CMakeBuilder.define_from_variant` helper functions:
|
||||
and without the :meth:`~spack_repo.builtin.build_systems.cmake.CMakeBuilder.define` and
|
||||
:meth:`~spack_repo.builtin.build_systems.cmake.CMakeBuilder.define_from_variant` helper functions:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
|
@@ -36,6 +36,7 @@
|
||||
os.symlink(os.path.abspath("../../.."), link_name, target_is_directory=True)
|
||||
sys.path.insert(0, os.path.abspath("_spack_root/lib/spack/external"))
|
||||
sys.path.append(os.path.abspath("_spack_root/lib/spack/"))
|
||||
sys.path.append(os.path.abspath("_spack_root/var/spack/repos/"))
|
||||
|
||||
# Add the Spack bin directory to the path so that we can use its output in docs.
|
||||
os.environ["SPACK_ROOT"] = os.path.abspath("_spack_root")
|
||||
@@ -75,11 +76,20 @@
|
||||
apidoc_args
|
||||
+ [
|
||||
"_spack_root/lib/spack/spack",
|
||||
"_spack_root/lib/spack/spack/package.py", # sphinx struggles with os.chdir re-export.
|
||||
"_spack_root/lib/spack/spack/test/*.py",
|
||||
"_spack_root/lib/spack/spack/test/cmd/*.py",
|
||||
]
|
||||
)
|
||||
sphinx_apidoc(apidoc_args + ["_spack_root/lib/spack/llnl"])
|
||||
sphinx_apidoc(
|
||||
apidoc_args
|
||||
+ [
|
||||
"--implicit-namespaces",
|
||||
"_spack_root/var/spack/repos/spack_repo",
|
||||
"_spack_root/var/spack/repos/spack_repo/builtin/packages",
|
||||
]
|
||||
)
|
||||
|
||||
# Enable todo items
|
||||
todo_include_todos = True
|
||||
@@ -208,7 +218,7 @@ def setup(sphinx):
|
||||
# Spack classes that are private and we don't want to expose
|
||||
("py:class", "spack.provider_index._IndexBase"),
|
||||
("py:class", "spack.repo._PrependFileLoader"),
|
||||
("py:class", "spack.build_systems._checks.BuilderWithDefaults"),
|
||||
("py:class", "spack_repo.builtin.build_systems._checks.BuilderWithDefaults"),
|
||||
# Spack classes that intersphinx is unable to resolve
|
||||
("py:class", "spack.version.StandardVersion"),
|
||||
("py:class", "spack.spec.DependencySpec"),
|
||||
|
@@ -103,6 +103,7 @@ or refer to the full manual below.
|
||||
:caption: API Docs
|
||||
|
||||
Spack API Docs <spack>
|
||||
Spack Builtin Repo <spack_repo>
|
||||
LLNL API Docs <llnl>
|
||||
|
||||
==================
|
||||
|
@@ -69,7 +69,7 @@ An example for ``CMake`` is, for instance:
|
||||
|
||||
The predefined steps for each build system are called "phases".
|
||||
In general, the name and order in which the phases will be executed can be
|
||||
obtained by either reading the API docs at :py:mod:`~.spack.build_systems`, or
|
||||
obtained by either reading the API docs at :py:mod:`~.spack_repo.builtin.build_systems`, or
|
||||
using the ``spack info`` command:
|
||||
|
||||
.. code-block:: console
|
||||
@@ -158,7 +158,7 @@ builder class explicitly. Using the same example as above, this reads:
|
||||
url_fmt = "https://github.com/uclouvain/openjpeg/archive/version.{0}.tar.gz"
|
||||
return url_fmt.format(version)
|
||||
|
||||
class CMakeBuilder(spack.build_systems.cmake.CMakeBuilder):
|
||||
class CMakeBuilder(spack_repo.builtin.build_systems.cmake.CMakeBuilder):
|
||||
def cmake_args(self):
|
||||
args = [
|
||||
self.define_from_variant("BUILD_CODEC", "codec"),
|
||||
@@ -256,7 +256,7 @@ for details):
|
||||
#
|
||||
# See the Spack documentation for more information on packaging.
|
||||
# ----------------------------------------------------------------------------
|
||||
import spack.build_systems.autotools
|
||||
import spack_repo.builtin.build_systems.autotools
|
||||
from spack.package import *
|
||||
|
||||
|
||||
@@ -3697,60 +3697,57 @@ the build system. The build systems currently supported by Spack are:
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| **API docs** | **Description** |
|
||||
+==========================================================+==================================+
|
||||
| :class:`~spack.build_systems.generic` | Generic build system without any |
|
||||
| :class:`~spack_repo.builtin.build_systems.generic` | Generic build system without any |
|
||||
| | base implementation |
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack.build_systems.makefile` | Specialized build system for |
|
||||
| :class:`~spack_repo.builtin.build_systems.makefile` | Specialized build system for |
|
||||
| | software built invoking |
|
||||
| | hand-written Makefiles |
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack.build_systems.autotools` | Specialized build system for |
|
||||
| :class:`~spack_repo.builtin.build_systems.autotools` | Specialized build system for |
|
||||
| | software built using |
|
||||
| | GNU Autotools |
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack.build_systems.cmake` | Specialized build system for |
|
||||
| :class:`~spack_repo.builtin.build_systems.cmake` | Specialized build system for |
|
||||
| | software built using CMake |
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack.build_systems.maven` | Specialized build system for |
|
||||
| :class:`~spack_repo.builtin.build_systems.maven` | Specialized build system for |
|
||||
| | software built using Maven |
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack.build_systems.meson` | Specialized build system for |
|
||||
| :class:`~spack_repo.builtin.build_systems.meson` | Specialized build system for |
|
||||
| | software built using Meson |
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack.build_systems.nmake` | Specialized build system for |
|
||||
| :class:`~spack_repo.builtin.build_systems.nmake` | Specialized build system for |
|
||||
| | software built using NMake |
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack.build_systems.qmake` | Specialized build system for |
|
||||
| :class:`~spack_repo.builtin.build_systems.qmake` | Specialized build system for |
|
||||
| | software built using QMake |
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack.build_systems.scons` | Specialized build system for |
|
||||
| :class:`~spack_repo.builtin.build_systems.scons` | Specialized build system for |
|
||||
| | software built using SCons |
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack.build_systems.waf` | Specialized build system for |
|
||||
| :class:`~spack_repo.builtin.build_systems.waf` | Specialized build system for |
|
||||
| | software built using Waf |
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack.build_systems.r` | Specialized build system for |
|
||||
| :class:`~spack_repo.builtin.build_systems.r` | Specialized build system for |
|
||||
| | R extensions |
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack.build_systems.octave` | Specialized build system for |
|
||||
| :class:`~spack_repo.builtin.build_systems.octave` | Specialized build system for |
|
||||
| | Octave packages |
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack.build_systems.python` | Specialized build system for |
|
||||
| :class:`~spack_repo.builtin.build_systems.python` | Specialized build system for |
|
||||
| | Python extensions |
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack.build_systems.perl` | Specialized build system for |
|
||||
| :class:`~spack_repo.builtin.build_systems.perl` | Specialized build system for |
|
||||
| | Perl extensions |
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack.build_systems.ruby` | Specialized build system for |
|
||||
| :class:`~spack_repo.builtin.build_systems.ruby` | Specialized build system for |
|
||||
| | Ruby extensions |
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack.build_systems.intel` | Specialized build system for |
|
||||
| | licensed Intel software |
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack.build_systems.oneapi` | Specialized build system for |
|
||||
| :class:`~spack_repo.builtin.build_systems.oneapi` | Specialized build system for |
|
||||
| | Intel oneAPI software |
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack.build_systems.aspell_dict` | Specialized build system for |
|
||||
| :class:`~spack_repo.builtin.build_systems.aspell_dict` | Specialized build system for |
|
||||
| | Aspell dictionaries |
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
|
||||
@@ -3762,7 +3759,7 @@ the build system. The build systems currently supported by Spack are:
|
||||
rare cases where manual intervention is needed we need to stress that a
|
||||
package base class depends on the *build system* being used, not the language of the package.
|
||||
For example, a Python extension installed with CMake would ``extends("python")`` and
|
||||
subclass from :class:`~spack.build_systems.cmake.CMakePackage`.
|
||||
subclass from :class:`~spack_repo.builtin.build_systems.cmake.CMakePackage`.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Overriding builder methods
|
||||
@@ -3770,7 +3767,7 @@ Overriding builder methods
|
||||
|
||||
Build-system "phases" have default implementations that fit most of the common cases:
|
||||
|
||||
.. literalinclude:: _spack_root/lib/spack/spack/build_systems/autotools.py
|
||||
.. literalinclude:: _spack_root/var/spack/repos/spack_repo/builtin/build_systems/autotools.py
|
||||
:pyobject: AutotoolsBuilder.configure
|
||||
:linenos:
|
||||
|
||||
@@ -3784,7 +3781,7 @@ configure arguments:
|
||||
|
||||
Each specific build system has a list of attributes and methods that can be overridden to
|
||||
fine-tune the installation of a package without overriding an entire phase. To
|
||||
have more information on them the place to go is the API docs of the :py:mod:`~.spack.build_systems`
|
||||
have more information on them the place to go is the API docs of the :py:mod:`~.spack_repo.builtin.build_systems`
|
||||
module.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
@@ -3826,7 +3823,7 @@ If the ``package.py`` has build instructions in a separate
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class CMakeBuilder(spack.build_systems.cmake.CMakeBuilder):
|
||||
class CMakeBuilder(spack_repo.builtin.build_systems.cmake.CMakeBuilder):
|
||||
def install(self, pkg, spec, prefix):
|
||||
...
|
||||
|
||||
@@ -3839,31 +3836,32 @@ Mixin base classes
|
||||
Besides build systems, there are other cases where common metadata and behavior can be extracted
|
||||
and reused by many packages. For instance, packages that depend on ``Cuda`` or ``Rocm``, share
|
||||
common dependencies and constraints. To factor these attributes into a single place, Spack provides
|
||||
a few mixin classes in the ``spack.build_systems`` module:
|
||||
a few mixin classes in the ``spack_repo.builtin.build_systems`` module:
|
||||
|
||||
+---------------------------------------------------------------+----------------------------------+
|
||||
| **API docs** | **Description** |
|
||||
+===============================================================+==================================+
|
||||
| :class:`~spack.build_systems.cuda.CudaPackage` | A helper class for packages that |
|
||||
| | use CUDA |
|
||||
+---------------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack.build_systems.rocm.ROCmPackage` | A helper class for packages that |
|
||||
| | use ROCm |
|
||||
+---------------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack.build_systems.gnu.GNUMirrorPackage` | A helper class for GNU packages |
|
||||
+---------------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack.build_systems.python.PythonExtension` | A helper class for Python |
|
||||
| | extensions |
|
||||
+---------------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack.build_systems.sourceforge.SourceforgePackage` | A helper class for packages |
|
||||
| | from sourceforge.org |
|
||||
+---------------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack.build_systems.sourceware.SourcewarePackage` | A helper class for packages |
|
||||
| | from sourceware.org |
|
||||
+---------------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack.build_systems.xorg.XorgPackage` | A helper class for x.org |
|
||||
| | packages |
|
||||
+---------------------------------------------------------------+----------------------------------+
|
||||
+----------------------------------------------------------------------------+----------------------------------+
|
||||
| **API docs** | **Description** |
|
||||
+============================================================================+==================================+
|
||||
| :class:`~spack_repo.builtin.build_systems.cuda.CudaPackage` | A helper class for packages that |
|
||||
| | use CUDA |
|
||||
+----------------------------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack_repo.builtin.build_systems.rocm.ROCmPackage` | A helper class for packages that |
|
||||
| | use ROCm |
|
||||
+----------------------------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack_repo.builtin.build_systems.gnu.GNUMirrorPackage` | A helper class for GNU packages |
|
||||
| | |
|
||||
+----------------------------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack_repo.builtin.build_systems.python.PythonExtension` | A helper class for Python |
|
||||
| | extensions |
|
||||
+----------------------------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack_repo.builtin.build_systems.sourceforge.SourceforgePackage` | A helper class for packages |
|
||||
| | from sourceforge.org |
|
||||
+----------------------------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack_repo.builtin.build_systems.sourceware.SourcewarePackage` | A helper class for packages |
|
||||
| | from sourceware.org |
|
||||
+----------------------------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack_repo.builtin.build_systems.xorg.XorgPackage` | A helper class for x.org |
|
||||
| | packages |
|
||||
+----------------------------------------------------------------------------+----------------------------------+
|
||||
|
||||
These classes should be used by adding them to the inheritance tree of the package that needs them,
|
||||
for instance:
|
||||
@@ -3907,13 +3905,13 @@ Additional build instructions are split into separate builder classes:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class CMakeBuilder(spack.build_systems.cmake.CMakeBuilder):
|
||||
class CMakeBuilder(spack_repo.builtin.build_systems.cmake.CMakeBuilder):
|
||||
def cmake_args(self):
|
||||
return [
|
||||
self.define_from_variant("MY_FEATURE", "my_feature")
|
||||
]
|
||||
|
||||
class AutotoolsBuilder(spack.build_systems.autotools.AutotoolsBuilder):
|
||||
class AutotoolsBuilder(spack_repo.builtin.build_systems.autotools.AutotoolsBuilder):
|
||||
def configure_args(self):
|
||||
return self.with_or_without("my-feature", variant="my_feature")
|
||||
|
||||
|
@@ -1,660 +0,0 @@
|
||||
====================================
|
||||
Development Notes on Intel Packages
|
||||
====================================
|
||||
|
||||
These are notes for concepts and development of
|
||||
lib/spack/spack/build_systems/intel.py .
|
||||
|
||||
For documentation on how to *use* ``IntelPackage``, see
|
||||
lib/spack/docs/build_systems/intelpackage.rst .
|
||||
|
||||
-------------------------------------------------------------------------------
|
||||
Installation and path handling as implemented in ./intel.py
|
||||
-------------------------------------------------------------------------------
|
||||
|
||||
|
||||
***************************************************************************
|
||||
Prefix differences between Spack-external and Spack-internal installations
|
||||
***************************************************************************
|
||||
|
||||
|
||||
Problem summary
|
||||
~~~~~~~~~~~~~~~~
|
||||
|
||||
For Intel packages that were installed external to Spack, ``self.prefix`` will
|
||||
be a *component-specific* path (e.g. to an MKL-specific dir hierarchy), whereas
|
||||
for a package installed by Spack itself, ``self.prefix`` will be a
|
||||
*vendor-level* path that holds one or more components (or parts thereof), and
|
||||
must be further qualified down to a particular desired component.
|
||||
|
||||
It is possible that a similar conceptual difference is inherent to other
|
||||
package families that use a common vendor-style installer.
|
||||
|
||||
|
||||
Description
|
||||
~~~~~~~~~~~~
|
||||
|
||||
Spack makes packages available through two routes, let's call them A and B:
|
||||
|
||||
A. Packages pre-installed external to Spack and configured *for* Spack
|
||||
B. Packages built and installed *by* Spack.
|
||||
|
||||
For a user who is interested in building end-user applications, it should not
|
||||
matter through which route any of its dependent packages has been installed.
|
||||
Most packages natively support a ``prefix`` concept which unifies the two
|
||||
routes just fine.
|
||||
|
||||
Intel packages, however, are more complicated because they consist of a number
|
||||
of components that are released as a suite of varying extent, like "Intel
|
||||
Parallel Studio *Foo* Edition", or subsetted into products like "MKL" or "MPI",
|
||||
each of which also contain libraries from other components like the compiler
|
||||
runtime and multithreading libraries. For this reason, an Intel package is
|
||||
"anchored" during installation at a directory level higher than just the
|
||||
user-facing directory that has the conventional hierarchy of ``bin``, ``lib``,
|
||||
and others relevant for the end-product.
|
||||
|
||||
As a result, internal to Spack, there is a conceptual difference in what
|
||||
``self.prefix`` represents for the two routes.
|
||||
|
||||
For route A, consider MKL installed outside of Spack. It will likely be one
|
||||
product component among other products, at one particular release among others
|
||||
that are installed in sibling or cousin directories on the local system.
|
||||
Therefore, the path given to Spack in ``packages.yaml`` should be a
|
||||
*product-specific and fully version-specific* directory. E.g., for an
|
||||
``intel-mkl`` package, ``self.prefix`` should look like::
|
||||
|
||||
/opt/intel/compilers_and_libraries_2018.1.163/linux/mkl
|
||||
|
||||
In this route, the interaction point with the user is encapsulated in an
|
||||
environment variable which will be (in pseudo-code)::
|
||||
|
||||
MKLROOT := {self.prefix}
|
||||
|
||||
For route B, a Spack-based installation of MKL will be placed in the directory
|
||||
given to the ``./install.sh`` script of Intel's package distribution. This
|
||||
directory is taken to be the *vendor*-specific anchor directory, playing the
|
||||
same role as the default ``/opt/intel``. In this case, ``self.prefix`` will
|
||||
be::
|
||||
|
||||
$SPACK_ROOT/opt/spack/linux-centos6-x86_64/gcc-4.9.3/intel-mkl-2018.1.163-<HASH>
|
||||
|
||||
However, now the environment variable will have to be constructed as *several
|
||||
directory levels down*::
|
||||
|
||||
MKLROOT := {self.prefix}/compilers_and_libraries_2018.1.163/linux/mkl
|
||||
|
||||
A recent post on the Spack mailing list illustrates the confusion when route A
|
||||
was taken while route B was the only one that was coded in Spack:
|
||||
https://groups.google.com/d/msg/spack/x28qlmqPAys/Ewx6220uAgAJ
|
||||
|
||||
|
||||
Solution
|
||||
~~~~~~~~~
|
||||
|
||||
Introduce a series of functions which will return the appropriate
|
||||
directories, regardless of whether the Intel package has been installed
|
||||
external or internal to Spack:
|
||||
|
||||
========================== ==================================================
|
||||
Function Example return values
|
||||
-------------------------- --------------------------------------------------
|
||||
normalize_suite_dir() Spack-external installation:
|
||||
/opt/intel/compilers_and_libraries_2018.1.163
|
||||
Spack-internal installation:
|
||||
$SPACK_ROOT/...<HASH>/compilers_and_libraries_2018.1.163
|
||||
-------------------------- --------------------------------------------------
|
||||
normalize_path('mkl') <suite_dir>/linux/mkl
|
||||
component_bin_dir() <suite_dir>/linux/mkl/bin
|
||||
component_lib_dir() <suite_dir>/linux/mkl/lib/intel64
|
||||
-------------------------- --------------------------------------------------
|
||||
normalize_path('mpi') <suite_dir>/linux/mpi
|
||||
component_bin_dir('mpi') <suite_dir>/linux/mpi/intel64/bin
|
||||
component_lib_dir('mpi') <suite_dir>/linux/mpi/intel64/lib
|
||||
========================== ==================================================
|
||||
|
||||
|
||||
*********************************
|
||||
Analysis of directory layouts
|
||||
*********************************
|
||||
|
||||
Let's look at some sample directory layouts, using ``ls -lF``,
|
||||
but focusing on names and symlinks only.
|
||||
|
||||
Spack-born installation of ``intel-mkl@2018.1.163``
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
::
|
||||
|
||||
$ ls -l <prefix>
|
||||
|
||||
bin/
|
||||
- compilervars.*sh (symlinked) ONLY
|
||||
|
||||
compilers_and_libraries -> compilers_and_libraries_2018
|
||||
- generically-named entry point, stable across versions (one hopes)
|
||||
|
||||
compilers_and_libraries_2018/
|
||||
- vaguely-versioned dirname, holding a stub hierarchy --ignorable
|
||||
|
||||
$ ls -l compilers_and_libraries_2018/linux/
|
||||
bin - actual compilervars.*sh (reg. files) ONLY
|
||||
documentation -> ../../documentation_2018/
|
||||
lib -> ../../compilers_and_libraries_2018.1.163/linux/compiler/lib/
|
||||
mkl -> ../../compilers_and_libraries_2018.1.163/linux/mkl/
|
||||
pkg_bin -> ../../compilers_and_libraries_2018.1.163/linux/bin/
|
||||
samples -> ../../samples_2018/
|
||||
tbb -> ../../compilers_and_libraries_2018.1.163/linux/tbb/
|
||||
|
||||
compilers_and_libraries_2018.1.163/
|
||||
- Main "product" + a minimal set of libs from related products
|
||||
|
||||
$ ls -l compilers_and_libraries_2018.1.163/linux/
|
||||
bin/ - compilervars.*sh, link_install*sh ONLY
|
||||
mkl/ - Main Product ==> to be assigned to MKLROOT
|
||||
compiler/ - lib/intel64_lin/libiomp5* ONLY
|
||||
tbb/ - tbb/lib/intel64_lin/gcc4.[147]/libtbb*.so* ONLY
|
||||
|
||||
parallel_studio_xe_2018 -> parallel_studio_xe_2018.1.038/
|
||||
parallel_studio_xe_2018.1.038/
|
||||
- Alternate product packaging - ignorable
|
||||
|
||||
$ ls -l parallel_studio_xe_2018.1.038/
|
||||
bin/ - actual psxevars.*sh (reg. files)
|
||||
compilers_and_libraries_2018 -> <full_path>/comp...aries_2018.1.163
|
||||
documentation_2018 -> <full_path_prefix>/documentation_2018
|
||||
samples_2018 -> <full_path_prefix>/samples_2018
|
||||
...
|
||||
|
||||
documentation_2018/
|
||||
samples_2018/
|
||||
lib -> compilers_and_libraries/linux/lib/
|
||||
mkl -> compilers_and_libraries/linux/mkl/
|
||||
tbb -> compilers_and_libraries/linux/tbb/
|
||||
- auxiliaries and convenience links
|
||||
|
||||
Spack-external installation of Intel-MPI 2018
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
For MPI, the layout is slightly different than MKL. The prefix will have to
|
||||
include an architecture directory (typically ``intel64``), which then contains
|
||||
bin/, lib/, ..., all without further architecture branching. The environment
|
||||
variable ``I_MPI_ROOT`` from the API documentation, however, must be the
|
||||
package's top directory, not including the architecture.
|
||||
|
||||
FIXME: For MANPATH, need the parent dir.
|
||||
|
||||
::
|
||||
|
||||
$ ls -lF /opt/intel/compilers_and_libraries_2018.1.163/linux/mpi/
|
||||
bin64 -> intel64/bin/
|
||||
etc64 -> intel64/etc/
|
||||
include64 -> intel64/include/
|
||||
lib64 -> intel64/lib/
|
||||
|
||||
benchmarks/
|
||||
binding/
|
||||
intel64/
|
||||
man/
|
||||
test/
|
||||
|
||||
The package contains an MPI-2019 preview; Curiously, its release notes contain
|
||||
the tag: "File structure clean-up." I could not find further documentation on
|
||||
this, however, so it is unclear what, if any, changes will make it to release.
|
||||
|
||||
https://software.intel.com/en-us/articles/restoring-legacy-path-structure-on-intel-mpi-library-2019
|
||||
|
||||
::
|
||||
|
||||
$ ls -lF /opt/intel/compilers_and_libraries_2018.1.163/linux/mpi_2019/
|
||||
binding/
|
||||
doc/
|
||||
imb/
|
||||
intel64/
|
||||
man/
|
||||
test/
|
||||
|
||||
Spack-external installation of Intel Parallel Studio 2018
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
This is the main product bundle that I actually downloaded and installed on my
|
||||
system. Its nominal installation directory mostly holds merely symlinks
|
||||
to components installed in sibling dirs::
|
||||
|
||||
$ ls -lF /opt/intel/parallel_studio_xe_2018.1.038/
|
||||
advisor_2018 -> /opt/intel/advisor_2018/
|
||||
clck_2018 -> /opt/intel/clck/2018.1/
|
||||
compilers_and_libraries_2018 -> /opt/intel/comp....aries_2018.1.163/
|
||||
documentation_2018 -> /opt/intel/documentation_2018/
|
||||
ide_support_2018 -> /opt/intel/ide_support_2018/
|
||||
inspector_2018 -> /opt/intel/inspector_2018/
|
||||
itac_2018 -> /opt/intel/itac/2018.1.017/
|
||||
man -> /opt/intel/man/
|
||||
samples_2018 -> /opt/intel/samples_2018/
|
||||
vtune_amplifier_2018 -> /opt/intel/vtune_amplifier_2018/
|
||||
|
||||
psxevars.csh -> ./bin/psxevars.csh*
|
||||
psxevars.sh -> ./bin/psxevars.sh*
|
||||
bin/ - *vars.*sh scripts + sshconnectivity.exp ONLY
|
||||
|
||||
licensing/
|
||||
uninstall*
|
||||
|
||||
The only relevant regular files are ``*vars.*sh``, but those also just churn
|
||||
through the subordinate vars files of the components.
|
||||
|
||||
Installation model
|
||||
~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Intel packages come with an ``install.sh`` script that is normally run
|
||||
interactively (in either text or GUI mode) but can run unattended with a
|
||||
``--silent <file>`` option, which is of course what Spack uses.
|
||||
|
||||
Format of configuration file
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
The configuration file is conventionally called ``silent.cfg`` and has a simple
|
||||
``token=value`` syntax. Before using the configuration file, the installer
|
||||
calls ``<staging_dir>/pset/check.awk`` to validate it. Example paths to the
|
||||
validator are::
|
||||
|
||||
.../l_mkl_2018.1.163/pset/check.awk .
|
||||
.../parallel_studio_xe_2018_update1_cluster_edition/pset/check.awk
|
||||
|
||||
The tokens that are accepted in the configuration file vary between packages.
|
||||
Tokens not supported for a given package **will cause the installer to stop
|
||||
and fail.** This is particularly relevant for license-related tokens, which are
|
||||
accepted only for packages that actually require a license.
|
||||
|
||||
Reference: [Intel's documentation](https://software.intel.com/en-us/articles/configuration-file-format)
|
||||
|
||||
See also: https://software.intel.com/en-us/articles/silent-installation-guide-for-intel-parallel-studio-xe-composer-edition-for-os-x
|
||||
|
||||
The following is from ``.../parallel_studio_xe_2018_update1_cluster_edition/pset/check.awk``:
|
||||
|
||||
* Tokens valid for all packages encountered::
|
||||
|
||||
ACCEPT_EULA {accept, decline}
|
||||
CONTINUE_WITH_OPTIONAL_ERROR {yes, no}
|
||||
PSET_INSTALL_DIR {/opt/intel, , filepat}
|
||||
CONTINUE_WITH_INSTALLDIR_OVERWRITE {yes, no}
|
||||
COMPONENTS {ALL, DEFAULTS, , anythingpat}
|
||||
PSET_MODE {install, repair, uninstall}
|
||||
NONRPM_DB_DIR {, filepat}
|
||||
|
||||
SIGNING_ENABLED {yes, no}
|
||||
ARCH_SELECTED {IA32, INTEL64, ALL}
|
||||
|
||||
* Mentioned but unexplained in ``check.awk``::
|
||||
|
||||
NO_VALIDATE (?!)
|
||||
|
||||
* Only for licensed packages::
|
||||
|
||||
ACTIVATION_SERIAL_NUMBER {, snpat}
|
||||
ACTIVATION_LICENSE_FILE {, lspat, filepat}
|
||||
ACTIVATION_TYPE {exist_lic, license_server,
|
||||
license_file, trial_lic,
|
||||
|
||||
PHONEHOME_SEND_USAGE_DATA {yes, no}
|
||||
serial_number}
|
||||
|
||||
* Only for Amplifier (obviously)::
|
||||
|
||||
AMPLIFIER_SAMPLING_DRIVER_INSTALL_TYPE {build, kit}
|
||||
AMPLIFIER_DRIVER_ACCESS_GROUP {, anythingpat, vtune}
|
||||
AMPLIFIER_DRIVER_PERMISSIONS {, anythingpat, 666}
|
||||
AMPLIFIER_LOAD_DRIVER {yes, no}
|
||||
AMPLIFIER_C_COMPILER {, filepat, auto, none}
|
||||
AMPLIFIER_KERNEL_SRC_DIR {, filepat, auto, none}
|
||||
AMPLIFIER_MAKE_COMMAND {, filepat, auto, none}
|
||||
AMPLIFIER_INSTALL_BOOT_SCRIPT {yes, no}
|
||||
AMPLIFIER_DRIVER_PER_USER_MODE {yes, no}
|
||||
|
||||
* Only for MKL and Studio::
|
||||
|
||||
CLUSTER_INSTALL_REMOTE {yes, no}
|
||||
CLUSTER_INSTALL_TEMP {, filepat}
|
||||
CLUSTER_INSTALL_MACHINES_FILE {, filepat}
|
||||
|
||||
* "backward compatibility" (?)::
|
||||
|
||||
INSTALL_MODE {RPM, NONRPM}
|
||||
download_only {yes}
|
||||
download_dir {, filepat}
|
||||
|
||||
|
||||
Details for licensing tokens
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Quoted from
|
||||
https://software.intel.com/en-us/articles/configuration-file-format,
|
||||
for reference:
|
||||
|
||||
[ed. note: As of 2018-05, the page incorrectly references ``ACTIVATION``, which
|
||||
was used only until about 2012; this is corrected to ``ACTIVATION_TYPE`` here.]
|
||||
|
||||
...
|
||||
|
||||
``ACTIVATION_TYPE=exist_lic``
|
||||
This directive tells the install program to look for an existing
|
||||
license during the install process. This is the preferred method for
|
||||
silent installs. Take the time to register your serial number and get
|
||||
a license file (see below). Having a license file on the system
|
||||
simplifies the process. In addition, as an administrator it is good
|
||||
practice to know WHERE your licenses are saved on your system.
|
||||
License files are plain text files with a .lic extension. By default
|
||||
these are saved in /opt/intel/licenses which is searched by default.
|
||||
If you save your license elsewhere, perhaps under an NFS folder, set
|
||||
environment variable **INTEL_LICENSE_FILE** to the full path to your
|
||||
license file prior to starting the installation or use the
|
||||
configuration file directive ``ACTIVATION_LICENSE_FILE`` to specify the
|
||||
full pathname to the license file.
|
||||
|
||||
Options for ``ACTIVATION_TYPE`` are ``{ exist_lic, license_file, server_lic,
|
||||
serial_number, trial_lic }``
|
||||
|
||||
``exist_lic``
|
||||
directs the installer to search for a valid license on the server.
|
||||
Searches will utilize the environment variable **INTEL_LICENSE_FILE**,
|
||||
search the default license directory /opt/intel/licenses, or use the
|
||||
``ACTIVATION_LICENSE_FILE`` directive to find a valid license file.
|
||||
|
||||
``license_file``
|
||||
is similar to exist_lic but directs the installer to use
|
||||
``ACTIVATION_LICENSE_FILE`` to find the license file.
|
||||
|
||||
``server_lic``
|
||||
is similar to exist_lic and exist_lic but directs the installer that
|
||||
this is a client installation and a floating license server will be
|
||||
contacted to active the product. This option will contact your
|
||||
floating license server on your network to retrieve the license
|
||||
information. BEFORE using this option make sure your client is
|
||||
correctly set up for your network including all networking, routing,
|
||||
name service, and firewall configuration. Insure that your client has
|
||||
direct access to your floating license server and that firewalls are
|
||||
set up to allow TCP/IP access for the 2 license server ports.
|
||||
server_lic will use **INTEL_LICENSE_FILE** containing a port@host format
|
||||
OR a client license file. The formats for these are described here
|
||||
https://software.intel.com/en-us/articles/licensing-setting-up-the-client-floating-license
|
||||
|
||||
``serial_number``
|
||||
directs the installer to use directive ``ACTIVATION_SERIAL_NUMBER`` for
|
||||
activation. This method will require the installer to contact an
|
||||
external Intel activation server over the Internet to confirm your
|
||||
serial number. Due to user and company firewalls, this method is more
|
||||
complex and hence error prone of the available activation methods. We
|
||||
highly recommend using a license file or license server for activation
|
||||
instead.
|
||||
|
||||
``trial_lic``
|
||||
is used only if you do not have an existing license and intend to
|
||||
temporarily evaluate the compiler. This method creates a temporary
|
||||
trial license in Trusted Storage on your system.
|
||||
|
||||
...
|
||||
|
||||
*******************
|
||||
vars files
|
||||
*******************
|
||||
|
||||
Intel's product packages contain a number of shell initialization files let's call them vars files.
|
||||
|
||||
There are three kinds:
|
||||
|
||||
#. Component-specific vars files, such as `mklvars` or `tbbvars`.
|
||||
#. Toplevel vars files such as "psxevars". They will scan for all
|
||||
component-specific vars files associated with the product, and source them
|
||||
if found.
|
||||
#. Symbolic links to either of them. Links may appear under a different name
|
||||
for backward compatibility.
|
||||
|
||||
At present, IntelPackage class is only concerned with the toplevel vars files,
|
||||
generally found in the product's toplevel bin/ directory.
|
||||
|
||||
For reference, here is an overview of the names and locations of the vars files
|
||||
in the 2018 product releases, as seen for Spack-native installation. NB: May be
|
||||
incomplete as some components may have been omitted during installation.
|
||||
|
||||
Names of vars files seen::
|
||||
|
||||
$ cd opt/spack/linux-centos6-x86_64
|
||||
$ find intel* -name \*vars.sh -printf '%f\n' | sort -u | nl
|
||||
1 advixe-vars.sh
|
||||
2 amplxe-vars.sh
|
||||
3 apsvars.sh
|
||||
4 compilervars.sh
|
||||
5 daalvars.sh
|
||||
6 debuggervars.sh
|
||||
7 iccvars.sh
|
||||
8 ifortvars.sh
|
||||
9 inspxe-vars.sh
|
||||
10 ippvars.sh
|
||||
11 mklvars.sh
|
||||
12 mpivars.sh
|
||||
13 pstlvars.sh
|
||||
14 psxevars.sh
|
||||
15 sep_vars.sh
|
||||
16 tbbvars.sh
|
||||
|
||||
Names and locations of vars files, sorted by Spack package name::
|
||||
|
||||
$ cd opt/spack/linux-centos6-x86_64
|
||||
$ find intel* -name \*vars.sh -printf '%y\t%-15f\t%h\n' \
|
||||
| cut -d/ -f1,4- \
|
||||
| sed '/iccvars\|ifortvars/d; s,/,\t\t,; s,\.sh,,; s, */\(intel[/-]\),\1,' \
|
||||
| sort -k3,3 -k2,2 \
|
||||
| nl \
|
||||
| awk '{printf "%6i %-2s %-16s %-24s %s\n", $1, $2, $3, $4, $5}'
|
||||
|
||||
--------------------------------------------------------------------------------------------------------
|
||||
item no.
|
||||
file or link
|
||||
name of vars file
|
||||
Spack package name
|
||||
dir relative to Spack install dir
|
||||
--------------------------------------------------------------------------------------------------------
|
||||
|
||||
1 f mpivars intel compilers_and_libraries_2018.1.163/linux/mpi/intel64/bin
|
||||
2 f mpivars intel compilers_and_libraries_2018.1.163/linux/mpirt/bin/ia32_lin
|
||||
3 f tbbvars intel compilers_and_libraries_2018.1.163/linux/tbb/bin
|
||||
4 f pstlvars intel compilers_and_libraries_2018.1.163/linux/pstl/bin
|
||||
5 f compilervars intel compilers_and_libraries_2018.1.163/linux/bin
|
||||
6 f compilervars intel compilers_and_libraries_2018/linux/bin
|
||||
7 l compilervars intel bin
|
||||
8 f daalvars intel-daal compilers_and_libraries_2018.2.199/linux/daal/bin
|
||||
9 f psxevars intel-daal parallel_studio_xe_2018.2.046/bin
|
||||
10 l psxevars intel-daal parallel_studio_xe_2018.2.046
|
||||
11 f compilervars intel-daal compilers_and_libraries_2018.2.199/linux/bin
|
||||
12 f compilervars intel-daal compilers_and_libraries_2018/linux/bin
|
||||
13 l compilervars intel-daal bin
|
||||
14 f ippvars intel-ipp compilers_and_libraries_2018.2.199/linux/ipp/bin
|
||||
15 f psxevars intel-ipp parallel_studio_xe_2018.2.046/bin
|
||||
16 l psxevars intel-ipp parallel_studio_xe_2018.2.046
|
||||
17 f compilervars intel-ipp compilers_and_libraries_2018.2.199/linux/bin
|
||||
18 f compilervars intel-ipp compilers_and_libraries_2018/linux/bin
|
||||
19 l compilervars intel-ipp bin
|
||||
20 f mklvars intel-mkl compilers_and_libraries_2018.2.199/linux/mkl/bin
|
||||
21 f psxevars intel-mkl parallel_studio_xe_2018.2.046/bin
|
||||
22 l psxevars intel-mkl parallel_studio_xe_2018.2.046
|
||||
23 f compilervars intel-mkl compilers_and_libraries_2018.2.199/linux/bin
|
||||
24 f compilervars intel-mkl compilers_and_libraries_2018/linux/bin
|
||||
25 l compilervars intel-mkl bin
|
||||
26 f mpivars intel-mpi compilers_and_libraries_2018.2.199/linux/mpi_2019/intel64/bin
|
||||
27 f mpivars intel-mpi compilers_and_libraries_2018.2.199/linux/mpi/intel64/bin
|
||||
28 f psxevars intel-mpi parallel_studio_xe_2018.2.046/bin
|
||||
29 l psxevars intel-mpi parallel_studio_xe_2018.2.046
|
||||
30 f compilervars intel-mpi compilers_and_libraries_2018.2.199/linux/bin
|
||||
31 f compilervars intel-mpi compilers_and_libraries_2018/linux/bin
|
||||
32 l compilervars intel-mpi bin
|
||||
33 f apsvars intel-parallel-studio vtune_amplifier_2018.1.0.535340
|
||||
34 l apsvars intel-parallel-studio performance_snapshots_2018.1.0.535340
|
||||
35 f ippvars intel-parallel-studio compilers_and_libraries_2018.1.163/linux/ipp/bin
|
||||
36 f ippvars intel-parallel-studio composer_xe_2015.6.233/ipp/bin
|
||||
37 f mklvars intel-parallel-studio compilers_and_libraries_2018.1.163/linux/mkl/bin
|
||||
38 f mklvars intel-parallel-studio composer_xe_2015.6.233/mkl/bin
|
||||
39 f mpivars intel-parallel-studio compilers_and_libraries_2018.1.163/linux/mpi/intel64/bin
|
||||
40 f mpivars intel-parallel-studio compilers_and_libraries_2018.1.163/linux/mpirt/bin/ia32_lin
|
||||
41 f tbbvars intel-parallel-studio compilers_and_libraries_2018.1.163/linux/tbb/bin
|
||||
42 f tbbvars intel-parallel-studio composer_xe_2015.6.233/tbb/bin
|
||||
43 f daalvars intel-parallel-studio compilers_and_libraries_2018.1.163/linux/daal/bin
|
||||
44 f pstlvars intel-parallel-studio compilers_and_libraries_2018.1.163/linux/pstl/bin
|
||||
45 f psxevars intel-parallel-studio parallel_studio_xe_2018.1.038/bin
|
||||
46 l psxevars intel-parallel-studio parallel_studio_xe_2018.1.038
|
||||
47 f sep_vars intel-parallel-studio vtune_amplifier_2018.1.0.535340
|
||||
48 f sep_vars intel-parallel-studio vtune_amplifier_2018.1.0.535340/target/android_v4.1_x86_64
|
||||
49 f advixe-vars intel-parallel-studio advisor_2018.1.1.535164
|
||||
50 f amplxe-vars intel-parallel-studio vtune_amplifier_2018.1.0.535340
|
||||
51 f inspxe-vars intel-parallel-studio inspector_2018.1.1.535159
|
||||
52 f compilervars intel-parallel-studio compilers_and_libraries_2018.1.163/linux/bin
|
||||
53 f compilervars intel-parallel-studio compilers_and_libraries_2018/linux/bin
|
||||
54 l compilervars intel-parallel-studio bin
|
||||
55 f debuggervars intel-parallel-studio debugger_2018/bin
|
||||
|
||||
|
||||
********************
|
||||
MPI linkage
|
||||
********************
|
||||
|
||||
|
||||
Library selection
|
||||
~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
In the Spack code so far, the library selections for MPI are:
|
||||
|
||||
::
|
||||
|
||||
libnames = ['libmpifort', 'libmpi']
|
||||
if 'cxx' in self.spec.last_query.extra_parameters:
|
||||
libnames = ['libmpicxx'] + libnames
|
||||
return find_libraries(libnames,
|
||||
root=self.component_lib_dir('mpi'),
|
||||
shared=True, recursive=False)
|
||||
|
||||
The problem is that there are multiple library versions under ``component_lib_dir``::
|
||||
|
||||
$ cd $I_MPI_ROOT
|
||||
$ find . -name libmpi.so | sort
|
||||
./intel64/lib/debug/libmpi.so
|
||||
./intel64/lib/debug_mt/libmpi.so
|
||||
./intel64/lib/libmpi.so
|
||||
./intel64/lib/release/libmpi.so
|
||||
./intel64/lib/release_mt/libmpi.so
|
||||
|
||||
"mt" refers to multi-threading, not in the explicit sense but in the sense of being thread-safe::
|
||||
|
||||
$ mpiifort -help | grep mt
|
||||
-mt_mpi link the thread safe version of the Intel(R) MPI Library
|
||||
|
||||
Well, why should we not inspect what the canonical script does? The wrapper
|
||||
has its own hardcoded "prefix=..." and can thus tell us what it will do, from a
|
||||
*wiped environment* no less!::
|
||||
|
||||
$ env - intel64/bin/mpiicc -show hello.c | ld-unwrap-args
|
||||
icc 'hello.c' \
|
||||
-I/opt/intel/compilers_and_libraries_2018.1.163/linux/mpi/intel64/include \
|
||||
-L/opt/intel/compilers_and_libraries_2018.1.163/linux/mpi/intel64/lib/release_mt \
|
||||
-L/opt/intel/compilers_and_libraries_2018.1.163/linux/mpi/intel64/lib \
|
||||
-Xlinker --enable-new-dtags \
|
||||
-Xlinker -rpath=/opt/intel/compilers_and_libraries_2018.1.163/linux/mpi/intel64/lib/release_mt \
|
||||
-Xlinker -rpath=/opt/intel/compilers_and_libraries_2018.1.163/linux/mpi/intel64/lib \
|
||||
-Xlinker -rpath=/opt/intel/mpi-rt/2017.0.0/intel64/lib/release_mt \
|
||||
-Xlinker -rpath=/opt/intel/mpi-rt/2017.0.0/intel64/lib \
|
||||
-lmpifort \
|
||||
-lmpi \
|
||||
-lmpigi \
|
||||
-ldl \
|
||||
-lrt \
|
||||
-lpthread
|
||||
|
||||
|
||||
MPI Wrapper options
|
||||
~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
For reference, here's the wrapper's builtin help output::
|
||||
|
||||
$ mpiifort -help
|
||||
Simple script to compile and/or link MPI programs.
|
||||
Usage: mpiifort [options] <files>
|
||||
----------------------------------------------------------------------------
|
||||
The following options are supported:
|
||||
-fc=<name> | -f90=<name>
|
||||
specify a FORTRAN compiler name: i.e. -fc=ifort
|
||||
-echo print the scripts during their execution
|
||||
-show show command lines without real calling
|
||||
-config=<name> specify a configuration file: i.e. -config=ifort for mpif90-ifort.conf file
|
||||
-v print version info of mpiifort and its native compiler
|
||||
-profile=<name> specify a profile configuration file (an MPI profiling
|
||||
library): i.e. -profile=myprofile for the myprofile.cfg file.
|
||||
As a special case, lib<name>.so or lib<name>.a may be used
|
||||
if the library is found
|
||||
-check_mpi link against the Intel(R) Trace Collector (-profile=vtmc).
|
||||
-static_mpi link the Intel(R) MPI Library statically
|
||||
-mt_mpi link the thread safe version of the Intel(R) MPI Library
|
||||
-ilp64 link the ILP64 support of the Intel(R) MPI Library
|
||||
-no_ilp64 disable ILP64 support explicitly
|
||||
-fast the same as -static_mpi + pass -fast option to a compiler.
|
||||
-t or -trace
|
||||
link against the Intel(R) Trace Collector
|
||||
-trace-imbalance
|
||||
link against the Intel(R) Trace Collector imbalance library
|
||||
(-profile=vtim)
|
||||
-dynamic_log link against the Intel(R) Trace Collector dynamically
|
||||
-static use static linkage method
|
||||
-nostrip turn off the debug information stripping during static linking
|
||||
-O enable optimization
|
||||
-link_mpi=<name>
|
||||
link against the specified version of the Intel(R) MPI Library
|
||||
All other options will be passed to the compiler without changing.
|
||||
----------------------------------------------------------------------------
|
||||
The following environment variables are used:
|
||||
I_MPI_ROOT the Intel(R) MPI Library installation directory path
|
||||
I_MPI_F90 or MPICH_F90
|
||||
the path/name of the underlying compiler to be used
|
||||
I_MPI_FC_PROFILE or I_MPI_F90_PROFILE or MPIF90_PROFILE
|
||||
the name of profile file (without extension)
|
||||
I_MPI_COMPILER_CONFIG_DIR
|
||||
the folder which contains configuration files *.conf
|
||||
I_MPI_TRACE_PROFILE
|
||||
specify a default profile for the -trace option
|
||||
I_MPI_CHECK_PROFILE
|
||||
specify a default profile for the -check_mpi option
|
||||
I_MPI_CHECK_COMPILER
|
||||
enable compiler setup checks
|
||||
I_MPI_LINK specify the version of the Intel(R) MPI Library
|
||||
I_MPI_DEBUG_INFO_STRIP
|
||||
turn on/off the debug information stripping during static linking
|
||||
I_MPI_FCFLAGS
|
||||
special flags needed for compilation
|
||||
I_MPI_LDFLAGS
|
||||
special flags needed for linking
|
||||
----------------------------------------------------------------------------
|
||||
|
||||
|
||||
Side Note: MPI version divergence in 2015 release
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
The package `intel-parallel-studio@cluster.2015.6` contains both a full MPI
|
||||
development version in `$prefix/impi` and an MPI Runtime under the
|
||||
`composer_xe*` suite directory. Curiously, these have *different versions*,
|
||||
with a release date nearly 1 year apart::
|
||||
|
||||
$ $SPACK_ROOT/...uaxaw7/impi/5.0.3.049/intel64/bin/mpiexec --version
|
||||
Intel(R) MPI Library for Linux* OS, Version 5.0 Update 3 Build 20150804 (build id: 12452)
|
||||
Copyright (C) 2003-2015, Intel Corporation. All rights reserved.
|
||||
|
||||
$ $SPACK_ROOT/...uaxaw7/composer_xe_2015.6.233/mpirt/bin/intel64/mpiexec --version
|
||||
Intel(R) MPI Library for Linux* OS, Version 5.0 Update 1 Build 20140709
|
||||
Copyright (C) 2003-2014, Intel Corporation. All rights reserved.
|
||||
|
||||
I'm not sure what to make of it.
|
||||
|
||||
|
||||
**************
|
||||
macOS support
|
||||
**************
|
||||
|
||||
- On macOS, the Spack methods here only include support to integrate an
|
||||
externally installed MKL.
|
||||
|
||||
- URLs in child packages will be Linux-specific; macOS download packages
|
||||
are located in differently numbered dirs and are named m_*.dmg.
|
File diff suppressed because it is too large
Load Diff
@@ -23,7 +23,7 @@
|
||||
_BUILDERS: Dict[int, "Builder"] = {}
|
||||
|
||||
|
||||
def builder(build_system_name: str):
|
||||
def register_builder(build_system_name: str):
|
||||
"""Class decorator used to register the default builder
|
||||
for a given build-system.
|
||||
|
||||
|
@@ -52,6 +52,7 @@
|
||||
# See the Spack documentation for more information on packaging.
|
||||
# ----------------------------------------------------------------------------
|
||||
|
||||
{package_class_import}
|
||||
from spack.package import *
|
||||
|
||||
|
||||
@@ -85,6 +86,7 @@ class BundlePackageTemplate:
|
||||
"""
|
||||
|
||||
base_class_name = "BundlePackage"
|
||||
package_class_import = "from spack_repo.builtin.build_systems.bundle import BundlePackage"
|
||||
|
||||
dependencies = """\
|
||||
# FIXME: Add dependencies if required.
|
||||
@@ -114,6 +116,7 @@ def write(self, pkg_path):
|
||||
name=self.name,
|
||||
class_name=self.class_name,
|
||||
base_class_name=self.base_class_name,
|
||||
package_class_import=self.package_class_import,
|
||||
url_def=self.url_def,
|
||||
versions=self.versions,
|
||||
dependencies="\n".join(all_deps),
|
||||
@@ -126,6 +129,7 @@ class PackageTemplate(BundlePackageTemplate):
|
||||
"""Provides the default values to be used for the package file template"""
|
||||
|
||||
base_class_name = "Package"
|
||||
package_class_import = "from spack_repo.builtin.build_systems.generic import Package"
|
||||
|
||||
body_def = """\
|
||||
def install(self, spec, prefix):
|
||||
@@ -146,6 +150,9 @@ class AutotoolsPackageTemplate(PackageTemplate):
|
||||
that *do* come with a ``configure`` script"""
|
||||
|
||||
base_class_name = "AutotoolsPackage"
|
||||
package_class_import = (
|
||||
"from spack_repo.builtin.build_systems.autotools import AutotoolsPackage"
|
||||
)
|
||||
|
||||
body_def = """\
|
||||
def configure_args(self):
|
||||
@@ -160,6 +167,9 @@ class AutoreconfPackageTemplate(PackageTemplate):
|
||||
that *do not* come with a ``configure`` script"""
|
||||
|
||||
base_class_name = "AutotoolsPackage"
|
||||
package_class_import = (
|
||||
"from spack_repo.builtin.build_systems.autotools import AutotoolsPackage"
|
||||
)
|
||||
|
||||
dependencies = """\
|
||||
depends_on("autoconf", type="build")
|
||||
@@ -186,6 +196,7 @@ class CargoPackageTemplate(PackageTemplate):
|
||||
"""Provides appropriate overrides for cargo-based packages"""
|
||||
|
||||
base_class_name = "CargoPackage"
|
||||
package_class_import = "from spack_repo.builtin.build_systems.cargo import CargoPackage"
|
||||
|
||||
body_def = ""
|
||||
|
||||
@@ -194,6 +205,7 @@ class CMakePackageTemplate(PackageTemplate):
|
||||
"""Provides appropriate overrides for CMake-based packages"""
|
||||
|
||||
base_class_name = "CMakePackage"
|
||||
package_class_import = "from spack_repo.builtin.build_systems.cmake import CMakePackage"
|
||||
|
||||
body_def = """\
|
||||
def cmake_args(self):
|
||||
@@ -208,6 +220,7 @@ class GoPackageTemplate(PackageTemplate):
|
||||
"""Provides appropriate overrides for Go-module-based packages"""
|
||||
|
||||
base_class_name = "GoPackage"
|
||||
package_class_import = "from spack_repo.builtin.build_systems.go import GoPackage"
|
||||
|
||||
body_def = ""
|
||||
|
||||
@@ -216,6 +229,7 @@ class LuaPackageTemplate(PackageTemplate):
|
||||
"""Provides appropriate overrides for LuaRocks-based packages"""
|
||||
|
||||
base_class_name = "LuaPackage"
|
||||
package_class_import = "from spack_repo.builtin.build_systems.lua import LuaPackage"
|
||||
|
||||
body_def = """\
|
||||
def luarocks_args(self):
|
||||
@@ -237,6 +251,7 @@ class MesonPackageTemplate(PackageTemplate):
|
||||
"""Provides appropriate overrides for meson-based packages"""
|
||||
|
||||
base_class_name = "MesonPackage"
|
||||
package_class_import = "from spack_repo.builtin.build_systems.meson import MesonPackage"
|
||||
|
||||
body_def = """\
|
||||
def meson_args(self):
|
||||
@@ -249,6 +264,7 @@ class QMakePackageTemplate(PackageTemplate):
|
||||
"""Provides appropriate overrides for QMake-based packages"""
|
||||
|
||||
base_class_name = "QMakePackage"
|
||||
package_class_import = "from spack_repo.builtin.build_systems.qmake import QMakePackage"
|
||||
|
||||
body_def = """\
|
||||
def qmake_args(self):
|
||||
@@ -261,6 +277,7 @@ class MavenPackageTemplate(PackageTemplate):
|
||||
"""Provides appropriate overrides for Maven-based packages"""
|
||||
|
||||
base_class_name = "MavenPackage"
|
||||
package_class_import = "from spack_repo.builtin.build_systems.maven import MavenPackage"
|
||||
|
||||
body_def = """\
|
||||
def build(self, spec, prefix):
|
||||
@@ -272,6 +289,7 @@ class SconsPackageTemplate(PackageTemplate):
|
||||
"""Provides appropriate overrides for SCons-based packages"""
|
||||
|
||||
base_class_name = "SConsPackage"
|
||||
package_class_import = "from spack_repo.builtin.build_systems.scons import SConsPackage"
|
||||
|
||||
body_def = """\
|
||||
def build_args(self, spec, prefix):
|
||||
@@ -285,6 +303,7 @@ class WafPackageTemplate(PackageTemplate):
|
||||
"""Provides appropriate override for Waf-based packages"""
|
||||
|
||||
base_class_name = "WafPackage"
|
||||
package_class_import = "from spack_repo.builtin.build_systems.waf import WafPackage"
|
||||
|
||||
body_def = """\
|
||||
# FIXME: Override configure_args(), build_args(),
|
||||
@@ -308,6 +327,7 @@ class RacketPackageTemplate(PackageTemplate):
|
||||
"""Provides approriate overrides for Racket extensions"""
|
||||
|
||||
base_class_name = "RacketPackage"
|
||||
package_class_import = "from spack_repo.builtin.build_systems.racket import RacketPackage"
|
||||
|
||||
url_line = """\
|
||||
# FIXME: set the proper location from which to fetch your package
|
||||
@@ -345,6 +365,7 @@ class PythonPackageTemplate(PackageTemplate):
|
||||
"""Provides appropriate overrides for python extensions"""
|
||||
|
||||
base_class_name = "PythonPackage"
|
||||
package_class_import = "from spack_repo.builtin.build_systems.python import PythonPackage"
|
||||
|
||||
dependencies = """\
|
||||
# FIXME: Only add the python/pip/wheel dependencies if you need specific versions
|
||||
@@ -432,6 +453,7 @@ class RPackageTemplate(PackageTemplate):
|
||||
"""Provides appropriate overrides for R extensions"""
|
||||
|
||||
base_class_name = "RPackage"
|
||||
package_class_import = "from spack_repo.builtin.build_systems.r import RPackage"
|
||||
|
||||
dependencies = """\
|
||||
# FIXME: Add dependencies if required.
|
||||
@@ -472,6 +494,7 @@ class PerlmakePackageTemplate(PackageTemplate):
|
||||
that come with a Makefile.PL"""
|
||||
|
||||
base_class_name = "PerlPackage"
|
||||
package_class_import = "from spack_repo.builtin.build_systems.perl import PerlPackage"
|
||||
|
||||
dependencies = """\
|
||||
# FIXME: Add dependencies if required:
|
||||
@@ -509,6 +532,7 @@ class OctavePackageTemplate(PackageTemplate):
|
||||
"""Provides appropriate overrides for octave packages"""
|
||||
|
||||
base_class_name = "OctavePackage"
|
||||
package_class_import = "from spack_repo.builtin.build_systems.octave import OctavePackage"
|
||||
|
||||
dependencies = """\
|
||||
extends("octave")
|
||||
@@ -531,6 +555,7 @@ class RubyPackageTemplate(PackageTemplate):
|
||||
"""Provides appropriate overrides for Ruby packages"""
|
||||
|
||||
base_class_name = "RubyPackage"
|
||||
package_class_import = "from spack_repo.builtin.build_systems.ruby import RubyPackage"
|
||||
|
||||
dependencies = """\
|
||||
# FIXME: Add dependencies if required. Only add the ruby dependency
|
||||
@@ -559,6 +584,7 @@ class MakefilePackageTemplate(PackageTemplate):
|
||||
"""Provides appropriate overrides for Makefile packages"""
|
||||
|
||||
base_class_name = "MakefilePackage"
|
||||
package_class_import = "from spack_repo.builtin.build_systems.makefile import MakefilePackage"
|
||||
|
||||
body_def = """\
|
||||
def edit(self, spec, prefix):
|
||||
@@ -573,6 +599,7 @@ class IntelPackageTemplate(PackageTemplate):
|
||||
"""Provides appropriate overrides for licensed Intel software"""
|
||||
|
||||
base_class_name = "IntelOneApiPackage"
|
||||
package_class_import = "from spack_repo.builtin.build_systems.oneapi import IntelOneApiPackage"
|
||||
|
||||
body_def = """\
|
||||
# FIXME: Override `setup_environment` if necessary."""
|
||||
@@ -582,6 +609,7 @@ class SIPPackageTemplate(PackageTemplate):
|
||||
"""Provides appropriate overrides for SIP packages."""
|
||||
|
||||
base_class_name = "SIPPackage"
|
||||
package_class_import = "from spack_repo.builtin.build_systems.sip import SIPPackage"
|
||||
|
||||
body_def = """\
|
||||
def configure_args(self, spec, prefix):
|
||||
|
@@ -28,7 +28,7 @@ def setup_parser(subparser):
|
||||
"--build-system",
|
||||
dest="path",
|
||||
action="store_const",
|
||||
const=spack.paths.build_systems_path,
|
||||
const=os.path.join(spack.repo.PATH.repos[0].root, "build_systems"),
|
||||
help="edit the build system with the supplied name",
|
||||
)
|
||||
excl_args.add_argument(
|
||||
|
@@ -183,7 +183,7 @@ def pkg_grep(args, unknown_args):
|
||||
grep.add_default_arg("--color=auto")
|
||||
|
||||
# determines number of files to grep at a time
|
||||
grouper = lambda e: e[0] // 500
|
||||
grouper = lambda e: e[0] // 100
|
||||
|
||||
# set up iterator and save the first group to ensure we don't end up with a group of size 1
|
||||
groups = itertools.groupby(enumerate(spack.repo.PATH.all_package_paths()), grouper)
|
||||
|
@@ -332,18 +332,8 @@ def process_files(file_list, is_args):
|
||||
|
||||
rewrite_and_print_output(output, args, pat, replacement)
|
||||
|
||||
packages_isort_args = (
|
||||
"--rm",
|
||||
"spack.pkgkit",
|
||||
"--rm",
|
||||
"spack.package_defs",
|
||||
"-a",
|
||||
"from spack.package import *",
|
||||
)
|
||||
packages_isort_args = packages_isort_args + isort_args
|
||||
|
||||
# packages
|
||||
process_files(filter(is_package, file_list), packages_isort_args)
|
||||
process_files(filter(is_package, file_list), isort_args)
|
||||
# non-packages
|
||||
process_files(filter(lambda f: not is_package(f), file_list), isort_args)
|
||||
|
||||
|
@@ -5,6 +5,7 @@
|
||||
import collections.abc
|
||||
import contextlib
|
||||
import errno
|
||||
import glob
|
||||
import os
|
||||
import pathlib
|
||||
import re
|
||||
@@ -2424,19 +2425,11 @@ def display_specs(specs):
|
||||
|
||||
def make_repo_path(root):
|
||||
"""Make a RepoPath from the repo subdirectories in an environment."""
|
||||
path = spack.repo.RepoPath(cache=spack.caches.MISC_CACHE)
|
||||
|
||||
if os.path.isdir(root):
|
||||
for repo_root in os.listdir(root):
|
||||
repo_root = os.path.join(root, repo_root)
|
||||
|
||||
if not os.path.isdir(repo_root):
|
||||
continue
|
||||
|
||||
repo = spack.repo.from_path(repo_root)
|
||||
path.put_last(repo)
|
||||
|
||||
return path
|
||||
repos = [
|
||||
spack.repo.from_path(os.path.dirname(p))
|
||||
for p in glob.glob(os.path.join(root, "**", "repo.yaml"), recursive=True)
|
||||
]
|
||||
return spack.repo.RepoPath(*repos, cache=spack.caches.MISC_CACHE)
|
||||
|
||||
|
||||
def manifest_file(env_name_or_dir):
|
||||
|
@@ -2,7 +2,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
# flake8: noqa: F401, E402
|
||||
"""spack.package defines the public API for Spack packages, by re-exporting useful symbols from
|
||||
other modules. Packages should import this module, instead of importing from spack.* directly
|
||||
to ensure forward compatibility with future versions of Spack."""
|
||||
@@ -13,17 +12,6 @@
|
||||
# import most common types used in packages
|
||||
from typing import Dict, List, Optional
|
||||
|
||||
|
||||
class tty:
|
||||
import llnl.util.tty as _tty
|
||||
|
||||
debug = _tty.debug
|
||||
error = _tty.error
|
||||
info = _tty.info
|
||||
msg = _tty.msg
|
||||
warn = _tty.warn
|
||||
|
||||
|
||||
from llnl.util.filesystem import (
|
||||
FileFilter,
|
||||
FileList,
|
||||
@@ -61,52 +49,7 @@ class tty:
|
||||
from llnl.util.symlink import symlink
|
||||
|
||||
from spack.build_environment import MakeExecutable
|
||||
from spack.build_systems.aspell_dict import AspellDictPackage
|
||||
from spack.build_systems.autotools import AutotoolsPackage
|
||||
from spack.build_systems.bundle import BundlePackage
|
||||
from spack.build_systems.cached_cmake import (
|
||||
CachedCMakePackage,
|
||||
cmake_cache_filepath,
|
||||
cmake_cache_option,
|
||||
cmake_cache_path,
|
||||
cmake_cache_string,
|
||||
)
|
||||
from spack.build_systems.cargo import CargoPackage
|
||||
from spack.build_systems.cmake import CMakePackage, generator
|
||||
from spack.build_systems.compiler import CompilerPackage
|
||||
from spack.build_systems.cuda import CudaPackage
|
||||
from spack.build_systems.generic import Package
|
||||
from spack.build_systems.gnu import GNUMirrorPackage
|
||||
from spack.build_systems.go import GoPackage
|
||||
from spack.build_systems.intel import IntelPackage
|
||||
from spack.build_systems.lua import LuaPackage
|
||||
from spack.build_systems.makefile import MakefilePackage
|
||||
from spack.build_systems.maven import MavenPackage
|
||||
from spack.build_systems.meson import MesonPackage
|
||||
from spack.build_systems.msbuild import MSBuildPackage
|
||||
from spack.build_systems.nmake import NMakePackage
|
||||
from spack.build_systems.octave import OctavePackage
|
||||
from spack.build_systems.oneapi import (
|
||||
INTEL_MATH_LIBRARIES,
|
||||
IntelOneApiLibraryPackage,
|
||||
IntelOneApiLibraryPackageWithSdk,
|
||||
IntelOneApiPackage,
|
||||
IntelOneApiStaticLibraryList,
|
||||
)
|
||||
from spack.build_systems.perl import PerlPackage
|
||||
from spack.build_systems.python import PythonExtension, PythonPackage
|
||||
from spack.build_systems.qmake import QMakePackage
|
||||
from spack.build_systems.r import RPackage
|
||||
from spack.build_systems.racket import RacketPackage
|
||||
from spack.build_systems.rocm import ROCmPackage
|
||||
from spack.build_systems.ruby import RubyPackage
|
||||
from spack.build_systems.scons import SConsPackage
|
||||
from spack.build_systems.sip import SIPPackage
|
||||
from spack.build_systems.sourceforge import SourceforgePackage
|
||||
from spack.build_systems.sourceware import SourcewarePackage
|
||||
from spack.build_systems.waf import WafPackage
|
||||
from spack.build_systems.xorg import XorgPackage
|
||||
from spack.builder import BaseBuilder
|
||||
from spack.builder import BaseBuilder, Builder, register_builder
|
||||
from spack.config import determine_number_of_jobs
|
||||
from spack.deptypes import ALL_TYPES as all_deptypes
|
||||
from spack.directives import (
|
||||
@@ -138,7 +81,13 @@ class tty:
|
||||
)
|
||||
from spack.mixins import filter_compiler_wrappers
|
||||
from spack.multimethod import default_args, when
|
||||
from spack.package_base import build_system_flags, env_flags, inject_flags, on_package_attributes
|
||||
from spack.package_base import (
|
||||
PackageBase,
|
||||
build_system_flags,
|
||||
env_flags,
|
||||
inject_flags,
|
||||
on_package_attributes,
|
||||
)
|
||||
from spack.package_completions import (
|
||||
bash_completion_path,
|
||||
fish_completion_path,
|
||||
@@ -158,6 +107,126 @@ class tty:
|
||||
cd = chdir
|
||||
pwd = getcwd
|
||||
|
||||
|
||||
class tty:
|
||||
import llnl.util.tty as _tty
|
||||
|
||||
debug = _tty.debug
|
||||
error = _tty.error
|
||||
info = _tty.info
|
||||
msg = _tty.msg
|
||||
warn = _tty.warn
|
||||
|
||||
|
||||
__all__ = [
|
||||
"chdir",
|
||||
"environ",
|
||||
"getcwd",
|
||||
"makedirs",
|
||||
"mkdir",
|
||||
"remove",
|
||||
"removedirs",
|
||||
"move",
|
||||
"rmtree",
|
||||
"Dict",
|
||||
"List",
|
||||
"Optional",
|
||||
"FileFilter",
|
||||
"FileList",
|
||||
"HeaderList",
|
||||
"LibraryList",
|
||||
"ancestor",
|
||||
"can_access",
|
||||
"change_sed_delimiter",
|
||||
"copy",
|
||||
"copy_tree",
|
||||
"filter_file",
|
||||
"find",
|
||||
"find_all_headers",
|
||||
"find_first",
|
||||
"find_headers",
|
||||
"find_libraries",
|
||||
"find_system_libraries",
|
||||
"force_remove",
|
||||
"force_symlink",
|
||||
"install",
|
||||
"install_tree",
|
||||
"is_exe",
|
||||
"join_path",
|
||||
"keep_modification_time",
|
||||
"library_extensions",
|
||||
"mkdirp",
|
||||
"remove_directory_contents",
|
||||
"remove_linked_tree",
|
||||
"rename",
|
||||
"set_executable",
|
||||
"set_install_permissions",
|
||||
"touch",
|
||||
"working_dir",
|
||||
"symlink",
|
||||
"MakeExecutable",
|
||||
"BaseBuilder",
|
||||
"determine_number_of_jobs",
|
||||
"all_deptypes",
|
||||
"build_system",
|
||||
"can_splice",
|
||||
"conditional",
|
||||
"conflicts",
|
||||
"depends_on",
|
||||
"extends",
|
||||
"license",
|
||||
"maintainers",
|
||||
"patch",
|
||||
"provides",
|
||||
"redistribute",
|
||||
"requires",
|
||||
"resource",
|
||||
"variant",
|
||||
"version",
|
||||
"InstallError",
|
||||
"NoHeadersError",
|
||||
"NoLibrariesError",
|
||||
"SkipTest",
|
||||
"cache_extra_test_sources",
|
||||
"check_outputs",
|
||||
"find_required_file",
|
||||
"get_escaped_text_output",
|
||||
"install_test_root",
|
||||
"test_part",
|
||||
"filter_compiler_wrappers",
|
||||
"default_args",
|
||||
"when",
|
||||
"build_system_flags",
|
||||
"env_flags",
|
||||
"inject_flags",
|
||||
"on_package_attributes",
|
||||
"bash_completion_path",
|
||||
"fish_completion_path",
|
||||
"zsh_completion_path",
|
||||
"run_after",
|
||||
"run_before",
|
||||
"Spec",
|
||||
"EnvironmentModifications",
|
||||
"Executable",
|
||||
"ProcessError",
|
||||
"which",
|
||||
"which_string",
|
||||
"fix_darwin_install_name",
|
||||
"Prefix",
|
||||
"any_combination_of",
|
||||
"auto_or_any_combination_of",
|
||||
"disjoint_sets",
|
||||
"Version",
|
||||
"ver",
|
||||
"env",
|
||||
"cd",
|
||||
"pwd",
|
||||
"tty",
|
||||
"Builder",
|
||||
"PackageBase",
|
||||
"register_builder",
|
||||
]
|
||||
|
||||
# These are just here for editor support; they may be set when the build env is set up.
|
||||
configure: Executable
|
||||
make_jobs: int
|
||||
|
@@ -583,7 +583,7 @@ class PackageBase(WindowsRPath, PackageViewMixin, metaclass=PackageMeta):
|
||||
like ``homepage`` and, for a code-based package, ``url``, or functions
|
||||
such as ``install()``.
|
||||
There are many custom ``Package`` subclasses in the
|
||||
``spack.build_systems`` package that make things even easier for
|
||||
``spack_repo.builtin.build_systems`` package that make things even easier for
|
||||
specific build systems.
|
||||
|
||||
"""
|
||||
|
@@ -58,7 +58,7 @@
|
||||
repos_path = os.path.join(var_path, "repos")
|
||||
test_repos_path = os.path.join(var_path, "test_repos")
|
||||
packages_path = os.path.join(repos_path, "spack_repo", "builtin")
|
||||
mock_packages_path = os.path.join(test_repos_path, "builtin.mock")
|
||||
mock_packages_path = os.path.join(test_repos_path, "spack_repo", "builtin_mock")
|
||||
|
||||
#
|
||||
# Writable things in $spack/var/spack
|
||||
|
@@ -85,7 +85,7 @@ def __init__(self, fullname: str, repo: "Repo", package_name: str) -> None:
|
||||
self.package_name = package_name
|
||||
path = repo.filename_for_package_name(package_name)
|
||||
self.fullname = fullname
|
||||
self.prepend = b"from spack.build_systems._package_api_v1 import *\n"
|
||||
self.prepend = b"from spack_repo.builtin.build_systems._package_api_v1 import *\n"
|
||||
super().__init__(self.fullname, path)
|
||||
|
||||
def path_stats(self, path):
|
||||
@@ -173,7 +173,7 @@ def compute_loader(self, fullname: str):
|
||||
def builtin_repo() -> "Repo":
|
||||
"""Get the test repo if it is active, otherwise the builtin repo."""
|
||||
try:
|
||||
return PATH.get_repo("builtin.mock")
|
||||
return PATH.get_repo("builtin_mock")
|
||||
except UnknownNamespaceError:
|
||||
return PATH.get_repo("builtin")
|
||||
|
||||
|
@@ -3,6 +3,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import ast
|
||||
import difflib
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
@@ -82,7 +83,8 @@ def migrate_v1_to_v2(
|
||||
|
||||
errors = False
|
||||
|
||||
stack: List[Tuple[str, int]] = [(repo.root, 0)]
|
||||
stack: List[Tuple[str, int]] = [(repo.packages_path, 0)]
|
||||
|
||||
while stack:
|
||||
path, depth = stack.pop()
|
||||
|
||||
@@ -100,7 +102,11 @@ def migrate_v1_to_v2(
|
||||
ino_to_relpath[entry.inode()] = entry.path[prefix_len:]
|
||||
|
||||
if entry.is_symlink():
|
||||
symlink_to_ino[rel_path] = entry.stat(follow_symlinks=True).st_ino
|
||||
try:
|
||||
symlink_to_ino[rel_path] = entry.stat(follow_symlinks=True).st_ino
|
||||
except OSError:
|
||||
symlink_to_ino[rel_path] = -1 # dangling or no access
|
||||
|
||||
continue
|
||||
|
||||
elif entry.is_dir(follow_symlinks=False):
|
||||
@@ -108,11 +114,7 @@ def migrate_v1_to_v2(
|
||||
continue
|
||||
|
||||
# check if this is a package
|
||||
if (
|
||||
depth == 1
|
||||
and rel_path.startswith(f"{subdirectory}{os.sep}")
|
||||
and os.path.exists(os.path.join(entry.path, "package.py"))
|
||||
):
|
||||
if depth == 0 and os.path.exists(os.path.join(entry.path, "package.py")):
|
||||
if "_" in entry.name:
|
||||
print(
|
||||
f"Invalid package name '{entry.name}': underscores are not allowed in "
|
||||
@@ -140,12 +142,16 @@ def migrate_v1_to_v2(
|
||||
rename_regex = re.compile("^(" + "|".join(re.escape(k) for k in rename.keys()) + ")")
|
||||
|
||||
if fix:
|
||||
os.makedirs(new_root, exist_ok=True)
|
||||
os.makedirs(os.path.join(new_root, repo.subdirectory), exist_ok=True)
|
||||
|
||||
def _relocate(rel_path: str) -> Tuple[str, str]:
|
||||
return os.path.join(repo.root, rel_path), os.path.join(
|
||||
new_root, rename_regex.sub(lambda m: rename[m.group(0)], rel_path)
|
||||
)
|
||||
old = os.path.join(repo.root, rel_path)
|
||||
if rename:
|
||||
new_rel = rename_regex.sub(lambda m: rename[m.group(0)], rel_path)
|
||||
else:
|
||||
new_rel = rel_path
|
||||
new = os.path.join(new_root, new_rel)
|
||||
return old, new
|
||||
|
||||
if not fix:
|
||||
print("The following directories, files and symlinks will be created:\n", file=out)
|
||||
@@ -215,6 +221,16 @@ def _relocate(rel_path: str) -> Tuple[str, str]:
|
||||
return result, (updated_repo if fix else None)
|
||||
|
||||
|
||||
def _spack_pkg_to_spack_repo(modulename: str) -> str:
|
||||
# rewrite spack.pkg.builtin.foo -> spack_repo.builtin.packages.foo.package
|
||||
parts = modulename.split(".")
|
||||
assert parts[:2] == ["spack", "pkg"]
|
||||
parts[0:2] = ["spack_repo"]
|
||||
parts.insert(2, "packages")
|
||||
parts.append("package")
|
||||
return ".".join(parts)
|
||||
|
||||
|
||||
def migrate_v2_imports(
|
||||
packages_dir: str, root: str, fix: bool, out: IO[str] = sys.stdout, err: IO[str] = sys.stderr
|
||||
) -> bool:
|
||||
@@ -239,7 +255,6 @@ def migrate_v2_imports(
|
||||
"Package": "spack_repo.builtin.build_systems.generic",
|
||||
"GNUMirrorPackage": "spack_repo.builtin.build_systems.gnu",
|
||||
"GoPackage": "spack_repo.builtin.build_systems.go",
|
||||
"IntelPackage": "spack_repo.builtin.build_systems.intel",
|
||||
"LuaPackage": "spack_repo.builtin.build_systems.lua",
|
||||
"MakefilePackage": "spack_repo.builtin.build_systems.makefile",
|
||||
"MavenPackage": "spack_repo.builtin.build_systems.maven",
|
||||
@@ -292,12 +307,41 @@ def migrate_v2_imports(
|
||||
#: Set of symbols of interest that are already defined through imports, assignments, or
|
||||
#: function definitions.
|
||||
defined_symbols: Set[str] = set()
|
||||
|
||||
best_line: Optional[int] = None
|
||||
|
||||
seen_import = False
|
||||
module_replacements: Dict[str, str] = {}
|
||||
parent: Dict[int, ast.AST] = {}
|
||||
|
||||
#: List of (line, col start, old, new) tuples of strings to be replaced inline.
|
||||
inline_updates: List[Tuple[int, int, str, str]] = []
|
||||
|
||||
#: List of (line from, line to, new lines) tuples of line replacements
|
||||
multiline_updates: List[Tuple[int, int, List[str]]] = []
|
||||
|
||||
with open(pkg_path, "r", encoding="utf-8", newline="") as file:
|
||||
original_lines = file.readlines()
|
||||
|
||||
if len(original_lines) < 2: # assume packagepy files have at least 2 lines...
|
||||
continue
|
||||
|
||||
if original_lines[0].endswith("\r\n"):
|
||||
newline = "\r\n"
|
||||
elif original_lines[0].endswith("\n"):
|
||||
newline = "\n"
|
||||
elif original_lines[0].endswith("\r"):
|
||||
newline = "\r"
|
||||
else:
|
||||
success = False
|
||||
print(f"{pkg_path}: unknown line ending, cannot fix", file=err)
|
||||
continue
|
||||
|
||||
updated_lines = original_lines.copy()
|
||||
|
||||
for node in ast.walk(tree):
|
||||
for child in ast.iter_child_nodes(node):
|
||||
if isinstance(child, ast.Attribute):
|
||||
parent[id(child)] = node
|
||||
|
||||
# Get the last import statement from the first block of top-level imports
|
||||
if isinstance(node, ast.Module):
|
||||
for child in ast.iter_child_nodes(node):
|
||||
@@ -317,7 +361,7 @@ def migrate_v2_imports(
|
||||
|
||||
if is_import:
|
||||
if isinstance(child, (ast.stmt, ast.expr)):
|
||||
best_line = (child.end_lineno or child.lineno) + 1
|
||||
best_line = (getattr(child, "end_lineno", None) or child.lineno) + 1
|
||||
|
||||
if not seen_import and is_import:
|
||||
seen_import = True
|
||||
@@ -346,12 +390,89 @@ def migrate_v2_imports(
|
||||
elif isinstance(node, ast.Name) and node.id in symbol_to_module:
|
||||
referenced_symbols.add(node.id)
|
||||
|
||||
# Register imported symbols to make this operation idempotent
|
||||
# Find lines where spack.pkg is used.
|
||||
elif (
|
||||
isinstance(node, ast.Attribute)
|
||||
and isinstance(node.value, ast.Name)
|
||||
and node.value.id == "spack"
|
||||
and node.attr == "pkg"
|
||||
):
|
||||
# go as many attrs up until we reach a known module name to be replaced
|
||||
known_module = "spack.pkg"
|
||||
ancestor = node
|
||||
while True:
|
||||
next_parent = parent.get(id(ancestor))
|
||||
if next_parent is None or not isinstance(next_parent, ast.Attribute):
|
||||
break
|
||||
ancestor = next_parent
|
||||
known_module = f"{known_module}.{ancestor.attr}"
|
||||
if known_module in module_replacements:
|
||||
break
|
||||
|
||||
inline_updates.append(
|
||||
(
|
||||
ancestor.lineno,
|
||||
ancestor.col_offset,
|
||||
known_module,
|
||||
module_replacements[known_module],
|
||||
)
|
||||
)
|
||||
|
||||
elif isinstance(node, ast.ImportFrom):
|
||||
# Keep track of old style spack.pkg imports, to be replaced.
|
||||
if node.module and node.module.startswith("spack.pkg.") and node.level == 0:
|
||||
|
||||
depth = node.module.count(".")
|
||||
|
||||
# simple case of find and replace
|
||||
# from spack.pkg.builtin.my_pkg import MyPkg
|
||||
# -> from spack_repo.builtin.packages.my_pkg.package import MyPkg
|
||||
if depth == 3:
|
||||
module_replacements[node.module] = _spack_pkg_to_spack_repo(node.module)
|
||||
inline_updates.append(
|
||||
(
|
||||
node.lineno,
|
||||
node.col_offset,
|
||||
node.module,
|
||||
module_replacements[node.module],
|
||||
)
|
||||
)
|
||||
|
||||
# non-trivial possible multiline case
|
||||
# from spack.pkg.builtin import (boost, cmake as foo)
|
||||
# -> import spack_repo.builtin.packages.boost.package as boost
|
||||
# -> import spack_repo.builtin.packages.cmake.package as foo
|
||||
elif depth == 2 and node.end_lineno is not None:
|
||||
_, _, namespace = node.module.rpartition(".")
|
||||
indent = original_lines[node.lineno - 1][: node.col_offset]
|
||||
multiline_updates.append(
|
||||
(
|
||||
node.lineno,
|
||||
node.end_lineno + 1,
|
||||
[
|
||||
f"{indent}import spack_repo.{namespace}.packages."
|
||||
f"{alias.name}.package as {alias.asname or alias.name}"
|
||||
f"{newline}"
|
||||
for alias in node.names
|
||||
],
|
||||
)
|
||||
)
|
||||
|
||||
else:
|
||||
success = False
|
||||
print(
|
||||
f"{pkg_path}:{node.lineno}: don't know how to rewrite `{node.module}`",
|
||||
file=err,
|
||||
)
|
||||
|
||||
# Subtract the symbols that are imported so we don't repeatedly add imports.
|
||||
for alias in node.names:
|
||||
if alias.name in symbol_to_module:
|
||||
defined_symbols.add(alias.name)
|
||||
if node.module == "spack.package":
|
||||
if alias.asname is None:
|
||||
defined_symbols.add(alias.name)
|
||||
|
||||
# error when symbols are explicitly imported that are no longer available
|
||||
if node.module == "spack.package" and node.level == 0:
|
||||
success = False
|
||||
print(
|
||||
f"{pkg_path}:{node.lineno}: `{alias.name}` is imported from "
|
||||
@@ -362,59 +483,84 @@ def migrate_v2_imports(
|
||||
if alias.asname and alias.asname in symbol_to_module:
|
||||
defined_symbols.add(alias.asname)
|
||||
|
||||
elif isinstance(node, ast.Import):
|
||||
# normal imports are easy find and replace since they are single lines.
|
||||
for alias in node.names:
|
||||
if alias.asname and alias.asname in symbol_to_module:
|
||||
defined_symbols.add(alias.name)
|
||||
elif alias.asname is None and alias.name.startswith("spack.pkg."):
|
||||
module_replacements[alias.name] = _spack_pkg_to_spack_repo(alias.name)
|
||||
inline_updates.append(
|
||||
(
|
||||
alias.lineno,
|
||||
alias.col_offset,
|
||||
alias.name,
|
||||
module_replacements[alias.name],
|
||||
)
|
||||
)
|
||||
|
||||
# Remove imported symbols from the referenced symbols
|
||||
referenced_symbols.difference_update(defined_symbols)
|
||||
|
||||
if not referenced_symbols:
|
||||
# Sort from last to first so we can modify without messing up the line / col offsets
|
||||
inline_updates.sort(reverse=True)
|
||||
|
||||
# Nothing to change here.
|
||||
if not inline_updates and not referenced_symbols:
|
||||
continue
|
||||
|
||||
if best_line is None:
|
||||
print(f"{pkg_path}: failed to update imports", file=err)
|
||||
success = False
|
||||
continue
|
||||
# First do module replacements of spack.pkg imports
|
||||
for line, col, old, new in inline_updates:
|
||||
updated_lines[line - 1] = updated_lines[line - 1][:col] + updated_lines[line - 1][
|
||||
col:
|
||||
].replace(old, new, 1)
|
||||
|
||||
# Add the missing imports right after the last import statement
|
||||
with open(pkg_path, "r", encoding="utf-8", newline="") as file:
|
||||
lines = file.readlines()
|
||||
# Then insert new imports for symbols referenced in the package
|
||||
if referenced_symbols:
|
||||
if best_line is None:
|
||||
print(f"{pkg_path}: failed to update imports", file=err)
|
||||
success = False
|
||||
continue
|
||||
|
||||
# Group missing symbols by their module
|
||||
missing_imports_by_module: Dict[str, list] = {}
|
||||
for symbol in referenced_symbols:
|
||||
module = symbol_to_module[symbol]
|
||||
if module not in missing_imports_by_module:
|
||||
missing_imports_by_module[module] = []
|
||||
missing_imports_by_module[module].append(symbol)
|
||||
# Group missing symbols by their module
|
||||
missing_imports_by_module: Dict[str, list] = {}
|
||||
for symbol in referenced_symbols:
|
||||
module = symbol_to_module[symbol]
|
||||
if module not in missing_imports_by_module:
|
||||
missing_imports_by_module[module] = []
|
||||
missing_imports_by_module[module].append(symbol)
|
||||
|
||||
new_lines = [
|
||||
f"from {module} import {', '.join(sorted(symbols))}\n"
|
||||
for module, symbols in sorted(missing_imports_by_module.items())
|
||||
]
|
||||
new_lines = [
|
||||
f"from {module} import {', '.join(sorted(symbols))}{newline}"
|
||||
for module, symbols in sorted(missing_imports_by_module.items())
|
||||
]
|
||||
|
||||
if not seen_import:
|
||||
new_lines.extend(("\n", "\n"))
|
||||
if not seen_import:
|
||||
new_lines.extend((newline, newline))
|
||||
|
||||
if not fix: # only print the diff
|
||||
success = False # packages need to be fixed, but we didn't do it
|
||||
diff_start, diff_end = max(1, best_line - 3), min(best_line + 2, len(lines))
|
||||
num_changed = diff_end - diff_start + 1
|
||||
num_added = num_changed + len(new_lines)
|
||||
multiline_updates.append((best_line, best_line, new_lines))
|
||||
|
||||
multiline_updates.sort(reverse=True)
|
||||
for start, end, new_lines in multiline_updates:
|
||||
updated_lines[start - 1 : end - 1] = new_lines
|
||||
|
||||
if not fix:
|
||||
rel_pkg_path = os.path.relpath(pkg_path, start=root)
|
||||
out.write(f"--- a/{rel_pkg_path}\n+++ b/{rel_pkg_path}\n")
|
||||
out.write(f"@@ -{diff_start},{num_changed} +{diff_start},{num_added} @@\n")
|
||||
for line in lines[diff_start - 1 : best_line - 1]:
|
||||
out.write(f" {line}")
|
||||
for line in new_lines:
|
||||
out.write(f"+{line}")
|
||||
for line in lines[best_line - 1 : diff_end]:
|
||||
out.write(f" {line}")
|
||||
diff = difflib.unified_diff(
|
||||
original_lines,
|
||||
updated_lines,
|
||||
n=3,
|
||||
fromfile=f"a/{rel_pkg_path}",
|
||||
tofile=f"b/{rel_pkg_path}",
|
||||
)
|
||||
out.write("".join(diff))
|
||||
continue
|
||||
|
||||
lines[best_line - 1 : best_line - 1] = new_lines
|
||||
|
||||
tmp_file = pkg_path + ".tmp"
|
||||
|
||||
with open(tmp_file, "w", encoding="utf-8", newline="") as file:
|
||||
file.writelines(lines)
|
||||
# binary mode to avoid newline conversion issues; utf-8 was already required upon read.
|
||||
with open(tmp_file, "wb") as file:
|
||||
file.write("".join(updated_lines).encode("utf-8"))
|
||||
|
||||
os.replace(tmp_file, pkg_path)
|
||||
|
||||
|
@@ -12,8 +12,7 @@
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
|
||||
import spack.build_systems.autotools
|
||||
import spack.build_systems.cmake
|
||||
import spack
|
||||
import spack.builder
|
||||
import spack.concretize
|
||||
import spack.environment
|
||||
@@ -28,6 +27,8 @@
|
||||
|
||||
DATA_PATH = os.path.join(spack.paths.test_path, "data")
|
||||
|
||||
pytestmark = pytest.mark.skip(reason="build_systems module is moved out of spack")
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def concretize_and_setup(default_mock_concretization, monkeypatch):
|
||||
|
@@ -15,7 +15,7 @@
|
||||
|
||||
@pytest.fixture()
|
||||
def builder_test_repository(config):
|
||||
builder_test_path = os.path.join(spack.paths.test_repos_path, "builder.test")
|
||||
builder_test_path = os.path.join(spack.paths.test_repos_path, "spack_repo", "builder_test")
|
||||
with spack.repo.use_repositories(builder_test_path) as mock_repo:
|
||||
yield mock_repo
|
||||
|
||||
|
@@ -549,11 +549,10 @@ def test_url_buildcache_entry_v2_exists(
|
||||
):
|
||||
"""Test existence check for v2 buildcache entries"""
|
||||
test_mirror_path = v2_buildcache_layout("unsigned")
|
||||
mirror_url = f"file://{test_mirror_path}"
|
||||
mirror_url = pathlib.Path(test_mirror_path).as_uri()
|
||||
mirror("add", "v2mirror", mirror_url)
|
||||
|
||||
with capsys.disabled():
|
||||
output = buildcache("list", "-a", "-l")
|
||||
output = buildcache("list", "-a", "-l")
|
||||
|
||||
assert "Fetching an index from a v2 binary mirror layout" in output
|
||||
assert "is deprecated" in output
|
||||
|
@@ -2,134 +2,39 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os
|
||||
|
||||
import pytest
|
||||
|
||||
import spack.cmd.diff
|
||||
import spack.concretize
|
||||
import spack.main
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
import spack.util.spack_json as sjson
|
||||
from spack.test.conftest import create_test_repo
|
||||
|
||||
install_cmd = spack.main.SpackCommand("install")
|
||||
diff_cmd = spack.main.SpackCommand("diff")
|
||||
find_cmd = spack.main.SpackCommand("find")
|
||||
|
||||
|
||||
_p1 = (
|
||||
"p1",
|
||||
"""\
|
||||
from spack.package import *
|
||||
|
||||
class P1(Package):
|
||||
version("1.0")
|
||||
|
||||
variant("p1var", default=True)
|
||||
variant("usev1", default=True)
|
||||
|
||||
depends_on("p2")
|
||||
depends_on("v1", when="+usev1")
|
||||
""",
|
||||
)
|
||||
|
||||
|
||||
_p2 = (
|
||||
"p2",
|
||||
"""\
|
||||
from spack.package import *
|
||||
|
||||
class P2(Package):
|
||||
version("1.0")
|
||||
|
||||
variant("p2var", default=True)
|
||||
|
||||
depends_on("p3")
|
||||
""",
|
||||
)
|
||||
|
||||
|
||||
_p3 = (
|
||||
"p3",
|
||||
"""\
|
||||
from spack.package import *
|
||||
|
||||
class P3(Package):
|
||||
version("1.0")
|
||||
|
||||
variant("p3var", default=True)
|
||||
""",
|
||||
)
|
||||
|
||||
_i1 = (
|
||||
"i1",
|
||||
"""\
|
||||
from spack.package import *
|
||||
|
||||
class I1(Package):
|
||||
version("1.0")
|
||||
|
||||
provides("v1")
|
||||
|
||||
variant("i1var", default=True)
|
||||
|
||||
depends_on("p3")
|
||||
depends_on("p4")
|
||||
""",
|
||||
)
|
||||
|
||||
_i2 = (
|
||||
"i2",
|
||||
"""\
|
||||
from spack.package import *
|
||||
|
||||
class I2(Package):
|
||||
version("1.0")
|
||||
|
||||
provides("v1")
|
||||
|
||||
variant("i2var", default=True)
|
||||
|
||||
depends_on("p3")
|
||||
depends_on("p4")
|
||||
""",
|
||||
)
|
||||
|
||||
|
||||
_p4 = (
|
||||
"p4",
|
||||
"""\
|
||||
from spack.package import *
|
||||
|
||||
class P4(Package):
|
||||
version("1.0")
|
||||
|
||||
variant("p4var", default=True)
|
||||
""",
|
||||
)
|
||||
|
||||
|
||||
# Note that the hash of p1 will differ depending on the variant chosen
|
||||
# we probably always want to omit that from diffs
|
||||
@pytest.fixture
|
||||
def _create_test_repo(tmpdir, mutable_config):
|
||||
"""
|
||||
p1____
|
||||
| \
|
||||
p2 v1
|
||||
| ____/ |
|
||||
p3 p4
|
||||
# p1____
|
||||
# | \
|
||||
# p2 v1
|
||||
# | ____/ |
|
||||
# p3 p4
|
||||
|
||||
i1 and i2 provide v1 (and both have the same dependencies)
|
||||
# i1 and i2 provide v1 (and both have the same dependencies)
|
||||
|
||||
All packages have an associated variant
|
||||
"""
|
||||
yield create_test_repo(tmpdir, [_p1, _p2, _p3, _i1, _i2, _p4])
|
||||
# All packages have an associated variant
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def test_repo(_create_test_repo, monkeypatch, mock_stage):
|
||||
with spack.repo.use_repositories(_create_test_repo) as mock_repo_path:
|
||||
yield mock_repo_path
|
||||
def test_repo(config):
|
||||
builder_test_path = os.path.join(spack.paths.test_repos_path, "spack_repo", "diff")
|
||||
with spack.repo.use_repositories(builder_test_path) as mock_repo:
|
||||
yield mock_repo
|
||||
|
||||
|
||||
def test_diff_ignore(test_repo):
|
||||
|
@@ -6,7 +6,6 @@
|
||||
|
||||
import spack.repo
|
||||
import spack.util.editor
|
||||
from spack.build_systems import autotools, cmake
|
||||
from spack.main import SpackCommand
|
||||
|
||||
edit = SpackCommand("edit")
|
||||
@@ -29,13 +28,15 @@ def editor(*args: str, **kwargs):
|
||||
assert called
|
||||
|
||||
|
||||
def test_edit_files(monkeypatch):
|
||||
def test_edit_files(monkeypatch, mock_packages):
|
||||
"""Test spack edit --build-system autotools cmake"""
|
||||
called = False
|
||||
|
||||
def editor(*args: str, **kwargs):
|
||||
nonlocal called
|
||||
called = True
|
||||
from spack_repo.builtin_mock.build_systems import autotools, cmake # type: ignore
|
||||
|
||||
assert os.path.samefile(args[0], autotools.__file__)
|
||||
assert os.path.samefile(args[1], cmake.__file__)
|
||||
|
||||
|
@@ -886,12 +886,12 @@ def test_env_activate_broken_view(
|
||||
with spack.repo.use_repositories(mock_custom_repository):
|
||||
wrong_repo = env("activate", "--sh", "test")
|
||||
assert "Warning: could not load runtime environment" in wrong_repo
|
||||
assert "Unknown namespace: builtin.mock" in wrong_repo
|
||||
assert "Unknown namespace: builtin_mock" in wrong_repo
|
||||
|
||||
# test replacing repo fixes it
|
||||
normal_repo = env("activate", "--sh", "test")
|
||||
assert "Warning: could not load runtime environment" not in normal_repo
|
||||
assert "Unknown namespace: builtin.mock" not in normal_repo
|
||||
assert "Unknown namespace: builtin_mock" not in normal_repo
|
||||
|
||||
|
||||
def test_to_lockfile_dict():
|
||||
@@ -916,7 +916,7 @@ def test_env_repo():
|
||||
|
||||
pkg_cls = e.repo.get_pkg_class("mpileaks")
|
||||
assert pkg_cls.name == "mpileaks"
|
||||
assert pkg_cls.namespace == "builtin.mock"
|
||||
assert pkg_cls.namespace == "builtin_mock"
|
||||
|
||||
|
||||
def test_user_removed_spec(environment_from_manifest):
|
||||
|
@@ -102,25 +102,25 @@ def _determine_version(cls, exe):
|
||||
["detectable"],
|
||||
[],
|
||||
[
|
||||
"builtin.mock.cmake",
|
||||
"builtin.mock.find-externals1",
|
||||
"builtin.mock.gcc",
|
||||
"builtin.mock.intel-oneapi-compilers",
|
||||
"builtin.mock.llvm",
|
||||
"builtin.mock.mpich",
|
||||
"builtin_mock.cmake",
|
||||
"builtin_mock.find-externals1",
|
||||
"builtin_mock.gcc",
|
||||
"builtin_mock.intel-oneapi-compilers",
|
||||
"builtin_mock.llvm",
|
||||
"builtin_mock.mpich",
|
||||
],
|
||||
),
|
||||
# find --all --exclude find-externals1
|
||||
(
|
||||
None,
|
||||
["detectable"],
|
||||
["builtin.mock.find-externals1"],
|
||||
["builtin_mock.find-externals1"],
|
||||
[
|
||||
"builtin.mock.cmake",
|
||||
"builtin.mock.gcc",
|
||||
"builtin.mock.intel-oneapi-compilers",
|
||||
"builtin.mock.llvm",
|
||||
"builtin.mock.mpich",
|
||||
"builtin_mock.cmake",
|
||||
"builtin_mock.gcc",
|
||||
"builtin_mock.intel-oneapi-compilers",
|
||||
"builtin_mock.llvm",
|
||||
"builtin_mock.mpich",
|
||||
],
|
||||
),
|
||||
(
|
||||
@@ -128,11 +128,11 @@ def _determine_version(cls, exe):
|
||||
["detectable"],
|
||||
["find-externals1"],
|
||||
[
|
||||
"builtin.mock.cmake",
|
||||
"builtin.mock.gcc",
|
||||
"builtin.mock.intel-oneapi-compilers",
|
||||
"builtin.mock.llvm",
|
||||
"builtin.mock.mpich",
|
||||
"builtin_mock.cmake",
|
||||
"builtin_mock.gcc",
|
||||
"builtin_mock.intel-oneapi-compilers",
|
||||
"builtin_mock.llvm",
|
||||
"builtin_mock.mpich",
|
||||
],
|
||||
),
|
||||
# find hwloc (and mock hwloc is not detectable)
|
||||
|
@@ -14,12 +14,12 @@
|
||||
import spack.cmd.find
|
||||
import spack.concretize
|
||||
import spack.environment as ev
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
import spack.store
|
||||
import spack.user_environment as uenv
|
||||
from spack.enums import InstallRecordStatus
|
||||
from spack.main import SpackCommand
|
||||
from spack.test.conftest import create_test_repo
|
||||
from spack.test.utilities import SpackCommandArgs
|
||||
from spack.util.pattern import Bunch
|
||||
|
||||
@@ -129,7 +129,7 @@ def test_tag2_tag3(parser, specs):
|
||||
@pytest.mark.db
|
||||
def test_namespaces_shown_correctly(args, with_namespace, database):
|
||||
"""Test that --namespace(s) works. Old syntax is --namespace"""
|
||||
assert ("builtin.mock.zmpi" in find(*args)) == with_namespace
|
||||
assert ("builtin_mock.zmpi" in find(*args)) == with_namespace
|
||||
|
||||
|
||||
@pytest.mark.db
|
||||
@@ -462,89 +462,16 @@ def test_environment_with_version_range_in_compiler_doesnt_fail(tmp_path, mock_p
|
||||
assert "zlib" in output
|
||||
|
||||
|
||||
_pkga = (
|
||||
"a0",
|
||||
"""\
|
||||
from spack.package import *
|
||||
|
||||
class A0(Package):
|
||||
version("1.2")
|
||||
version("1.1")
|
||||
|
||||
depends_on("b0")
|
||||
depends_on("c0")
|
||||
""",
|
||||
)
|
||||
|
||||
|
||||
_pkgb = (
|
||||
"b0",
|
||||
"""\
|
||||
from spack.package import *
|
||||
|
||||
class B0(Package):
|
||||
version("1.2")
|
||||
version("1.1")
|
||||
""",
|
||||
)
|
||||
|
||||
|
||||
_pkgc = (
|
||||
"c0",
|
||||
"""\
|
||||
from spack.package import *
|
||||
|
||||
class C0(Package):
|
||||
version("1.2")
|
||||
version("1.1")
|
||||
|
||||
tags = ["tag0", "tag1"]
|
||||
""",
|
||||
)
|
||||
|
||||
|
||||
_pkgd = (
|
||||
"d0",
|
||||
"""\
|
||||
from spack.package import *
|
||||
|
||||
class D0(Package):
|
||||
version("1.2")
|
||||
version("1.1")
|
||||
|
||||
depends_on("c0")
|
||||
depends_on("e0")
|
||||
""",
|
||||
)
|
||||
|
||||
|
||||
_pkge = (
|
||||
"e0",
|
||||
"""\
|
||||
from spack.package import *
|
||||
|
||||
class E0(Package):
|
||||
tags = ["tag1", "tag2"]
|
||||
|
||||
version("1.2")
|
||||
version("1.1")
|
||||
""",
|
||||
)
|
||||
# a0 d0
|
||||
# / \ / \
|
||||
# b0 c0 e0
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def _create_test_repo(tmpdir, mutable_config):
|
||||
r"""
|
||||
a0 d0
|
||||
/ \ / \
|
||||
b0 c0 e0
|
||||
"""
|
||||
yield create_test_repo(tmpdir, [_pkga, _pkgb, _pkgc, _pkgd, _pkge])
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def test_repo(_create_test_repo, monkeypatch, mock_stage):
|
||||
with spack.repo.use_repositories(_create_test_repo) as mock_repo_path:
|
||||
def test_repo(mock_stage):
|
||||
with spack.repo.use_repositories(
|
||||
os.path.join(spack.paths.test_repos_path, "spack_repo", "find")
|
||||
) as mock_repo_path:
|
||||
yield mock_repo_path
|
||||
|
||||
|
||||
|
@@ -143,13 +143,13 @@ def test_list_count():
|
||||
|
||||
def test_list_repos():
|
||||
with spack.repo.use_repositories(
|
||||
os.path.join(spack.paths.test_repos_path, "builtin.mock"),
|
||||
os.path.join(spack.paths.test_repos_path, "builder.test"),
|
||||
os.path.join(spack.paths.test_repos_path, "spack_repo", "builtin_mock"),
|
||||
os.path.join(spack.paths.test_repos_path, "spack_repo", "builder_test"),
|
||||
):
|
||||
total_pkgs = len(list().strip().split())
|
||||
mock_pkgs = len(list("-r", "builtin.mock").strip().split())
|
||||
builder_pkgs = len(list("-r", "builder.test").strip().split())
|
||||
both_repos = len(list("-r", "builtin.mock", "-r", "builder.test").strip().split())
|
||||
mock_pkgs = len(list("-r", "builtin_mock").strip().split())
|
||||
builder_pkgs = len(list("-r", "builder_test").strip().split())
|
||||
both_repos = len(list("-r", "builtin_mock", "-r", "builder_test").strip().split())
|
||||
|
||||
assert total_pkgs > mock_pkgs > builder_pkgs
|
||||
assert both_repos == total_pkgs
|
||||
|
@@ -39,7 +39,9 @@ def install(self, spec, prefix):
|
||||
def mock_pkg_git_repo(git, tmp_path_factory):
|
||||
"""Copy the builtin.mock repo and make a mutable git repo inside it."""
|
||||
root_dir = tmp_path_factory.mktemp("mock_pkg_git_repo")
|
||||
repo_dir = root_dir / "builtin.mock"
|
||||
# create spack_repo subdir
|
||||
(root_dir / "spack_repo").mkdir()
|
||||
repo_dir = root_dir / "spack_repo" / "builtin_mock"
|
||||
shutil.copytree(spack.paths.mock_packages_path, str(repo_dir))
|
||||
|
||||
repo_cache = spack.util.file_cache.FileCache(root_dir / "cache")
|
||||
@@ -57,25 +59,25 @@ def mock_pkg_git_repo(git, tmp_path_factory):
|
||||
git("-c", "commit.gpgsign=false", "commit", "-m", "initial mock repo commit")
|
||||
|
||||
# add commit with mockpkg-a, mockpkg-b, mockpkg-c packages
|
||||
mkdirp("mockpkg-a", "mockpkg-b", "mockpkg-c")
|
||||
with open("mockpkg-a/package.py", "w", encoding="utf-8") as f:
|
||||
mkdirp("mockpkg_a", "mockpkg_b", "mockpkg_c")
|
||||
with open("mockpkg_a/package.py", "w", encoding="utf-8") as f:
|
||||
f.write(pkg_template.format(name="PkgA"))
|
||||
with open("mockpkg-b/package.py", "w", encoding="utf-8") as f:
|
||||
with open("mockpkg_b/package.py", "w", encoding="utf-8") as f:
|
||||
f.write(pkg_template.format(name="PkgB"))
|
||||
with open("mockpkg-c/package.py", "w", encoding="utf-8") as f:
|
||||
with open("mockpkg_c/package.py", "w", encoding="utf-8") as f:
|
||||
f.write(pkg_template.format(name="PkgC"))
|
||||
git("add", "mockpkg-a", "mockpkg-b", "mockpkg-c")
|
||||
git("add", "mockpkg_a", "mockpkg_b", "mockpkg_c")
|
||||
git("-c", "commit.gpgsign=false", "commit", "-m", "add mockpkg-a, mockpkg-b, mockpkg-c")
|
||||
|
||||
# remove mockpkg-c, add mockpkg-d
|
||||
with open("mockpkg-b/package.py", "a", encoding="utf-8") as f:
|
||||
with open("mockpkg_b/package.py", "a", encoding="utf-8") as f:
|
||||
f.write("\n# change mockpkg-b")
|
||||
git("add", "mockpkg-b")
|
||||
mkdirp("mockpkg-d")
|
||||
with open("mockpkg-d/package.py", "w", encoding="utf-8") as f:
|
||||
git("add", "mockpkg_b")
|
||||
mkdirp("mockpkg_d")
|
||||
with open("mockpkg_d/package.py", "w", encoding="utf-8") as f:
|
||||
f.write(pkg_template.format(name="PkgD"))
|
||||
git("add", "mockpkg-d")
|
||||
git("rm", "-rf", "mockpkg-c")
|
||||
git("add", "mockpkg_d")
|
||||
git("rm", "-rf", "mockpkg_c")
|
||||
git(
|
||||
"-c",
|
||||
"commit.gpgsign=false",
|
||||
@@ -90,7 +92,7 @@ def mock_pkg_git_repo(git, tmp_path_factory):
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def mock_pkg_names():
|
||||
repo = spack.repo.PATH.get_repo("builtin.mock")
|
||||
repo = spack.repo.PATH.get_repo("builtin_mock")
|
||||
|
||||
# Be sure to include virtual packages since packages with stand-alone
|
||||
# tests may inherit additional tests from the virtuals they provide,
|
||||
@@ -117,22 +119,22 @@ def test_builtin_repo():
|
||||
|
||||
|
||||
def test_mock_builtin_repo(mock_packages):
|
||||
assert spack.repo.builtin_repo() is spack.repo.PATH.get_repo("builtin.mock")
|
||||
assert spack.repo.builtin_repo() is spack.repo.PATH.get_repo("builtin_mock")
|
||||
|
||||
|
||||
def test_pkg_add(git, mock_pkg_git_repo):
|
||||
with working_dir(mock_pkg_git_repo):
|
||||
mkdirp("mockpkg-e")
|
||||
with open("mockpkg-e/package.py", "w", encoding="utf-8") as f:
|
||||
mkdirp("mockpkg_e")
|
||||
with open("mockpkg_e/package.py", "w", encoding="utf-8") as f:
|
||||
f.write(pkg_template.format(name="PkgE"))
|
||||
|
||||
pkg("add", "mockpkg-e")
|
||||
|
||||
with working_dir(mock_pkg_git_repo):
|
||||
try:
|
||||
assert "A mockpkg-e/package.py" in git("status", "--short", output=str)
|
||||
assert "A mockpkg_e/package.py" in git("status", "--short", output=str)
|
||||
finally:
|
||||
shutil.rmtree("mockpkg-e")
|
||||
shutil.rmtree("mockpkg_e")
|
||||
# Removing a package mid-run disrupts Spack's caching
|
||||
if spack.repo.PATH.repos[0]._fast_package_checker:
|
||||
spack.repo.PATH.repos[0]._fast_package_checker.invalidate()
|
||||
|
@@ -95,24 +95,47 @@ class _7zip(Package):
|
||||
pass
|
||||
"""
|
||||
|
||||
OLD_NUMPY = b"""\
|
||||
# some comment
|
||||
# this is written like this to be explicit about line endings and indentation
|
||||
OLD_NUMPY = (
|
||||
b"# some comment\r\n"
|
||||
b"\r\n"
|
||||
b"import spack.pkg.builtin.foo, spack.pkg.builtin.bar\r\n"
|
||||
b"from spack.package import *\r\n"
|
||||
b"from something.unrelated import AutotoolsPackage\r\n"
|
||||
b"\r\n"
|
||||
b"if True:\r\n"
|
||||
b"\tfrom spack.pkg.builtin import (\r\n"
|
||||
b"\t\tfoo,\r\n"
|
||||
b"\t\tbar as baz,\r\n"
|
||||
b"\t)\r\n"
|
||||
b"\r\n"
|
||||
b"class PyNumpy(CMakePackage, AutotoolsPackage):\r\n"
|
||||
b"\tgenerator('ninja')\r\n"
|
||||
b"\r\n"
|
||||
b"\tdef example(self):\r\n"
|
||||
b"\t\t# unchanged comment: spack.pkg.builtin.foo.something\r\n"
|
||||
b"\t\treturn spack.pkg.builtin.foo.example(), foo, baz\r\n"
|
||||
)
|
||||
|
||||
from spack.package import *
|
||||
|
||||
class PyNumpy(CMakePackage):
|
||||
generator("ninja")
|
||||
"""
|
||||
|
||||
NEW_NUMPY = b"""\
|
||||
# some comment
|
||||
|
||||
from spack_repo.builtin.build_systems.cmake import CMakePackage, generator
|
||||
from spack.package import *
|
||||
|
||||
class PyNumpy(CMakePackage):
|
||||
generator("ninja")
|
||||
"""
|
||||
NEW_NUMPY = (
|
||||
b"# some comment\r\n"
|
||||
b"\r\n"
|
||||
b"import spack_repo.builtin.packages.foo.package, spack_repo.builtin.packages.bar.package\r\n"
|
||||
b"from spack_repo.builtin.build_systems.cmake import CMakePackage, generator\r\n"
|
||||
b"from spack.package import *\r\n"
|
||||
b"from something.unrelated import AutotoolsPackage\r\n"
|
||||
b"\r\n"
|
||||
b"if True:\r\n"
|
||||
b"\timport spack_repo.builtin.packages.foo.package as foo\r\n"
|
||||
b"\timport spack_repo.builtin.packages.bar.package as baz\r\n"
|
||||
b"\r\n"
|
||||
b"class PyNumpy(CMakePackage, AutotoolsPackage):\r\n"
|
||||
b"\tgenerator('ninja')\r\n"
|
||||
b"\r\n"
|
||||
b"\tdef example(self):\r\n"
|
||||
b"\t\t# unchanged comment: spack.pkg.builtin.foo.something\r\n"
|
||||
b"\t\treturn spack_repo.builtin.packages.foo.package.example(), foo, baz\r\n"
|
||||
)
|
||||
|
||||
|
||||
def test_repo_migrate(tmp_path: pathlib.Path, config):
|
||||
@@ -142,7 +165,6 @@ def test_repo_migrate(tmp_path: pathlib.Path, config):
|
||||
assert pkg_py_numpy_new.read_bytes() == NEW_NUMPY
|
||||
|
||||
|
||||
@pytest.mark.not_on_windows("Known failure on windows")
|
||||
def test_migrate_diff(git: Executable, tmp_path: pathlib.Path):
|
||||
root, _ = spack.repo.create_repo(str(tmp_path), "foo", package_api=(2, 0))
|
||||
r = pathlib.Path(root)
|
||||
|
@@ -48,11 +48,13 @@ def test_resource_list(mock_packages, capfd):
|
||||
assert "path:" in out
|
||||
|
||||
assert (
|
||||
os.path.join("repos", "builtin.mock", "packages", "patch-a-dependency", "libelf.patch")
|
||||
os.path.join(
|
||||
"spack_repo", "builtin_mock", "packages", "patch_a_dependency", "libelf.patch"
|
||||
)
|
||||
in out
|
||||
)
|
||||
assert "applies to: builtin.mock.libelf" in out
|
||||
assert "patched by: builtin.mock.patch-a-dependency" in out
|
||||
assert "applies to: builtin_mock.libelf" in out
|
||||
assert "patched by: builtin_mock.patch-a-dependency" in out
|
||||
|
||||
|
||||
def test_resource_list_only_hashes(mock_packages, capfd):
|
||||
@@ -74,10 +76,12 @@ def test_resource_show(mock_packages, capfd):
|
||||
|
||||
assert out.startswith(test_hash)
|
||||
assert (
|
||||
os.path.join("repos", "builtin.mock", "packages", "patch-a-dependency", "libelf.patch")
|
||||
os.path.join(
|
||||
"spack_repo", "builtin_mock", "packages", "patch_a_dependency", "libelf.patch"
|
||||
)
|
||||
in out
|
||||
)
|
||||
assert "applies to: builtin.mock.libelf" in out
|
||||
assert "patched by: builtin.mock.patch-a-dependency" in out
|
||||
assert "applies to: builtin_mock.libelf" in out
|
||||
assert "patched by: builtin_mock.patch-a-dependency" in out
|
||||
|
||||
assert len(out.strip().split("\n")) == 4
|
||||
|
@@ -241,14 +241,14 @@ def test_external_root(external_style_root, capfd):
|
||||
assert "%s Imports are incorrectly sorted" % str(py_file) in output
|
||||
|
||||
# mypy error
|
||||
assert 'lib/spack/spack/dummy.py:9: error: Name "Package" is not defined' in output
|
||||
assert 'lib/spack/spack/dummy.py:47: error: Name "version" is not defined' in output
|
||||
|
||||
# black error
|
||||
assert "--- lib/spack/spack/dummy.py" in output
|
||||
assert "+++ lib/spack/spack/dummy.py" in output
|
||||
|
||||
# flake8 error
|
||||
assert "lib/spack/spack/dummy.py:6: [F401] 'os' imported but unused" in output
|
||||
assert "lib/spack/spack/dummy.py:8: [F401] 'os' imported but unused" in output
|
||||
|
||||
|
||||
@pytest.mark.skipif(not FLAKE8, reason="flake8 is not installed.")
|
||||
@@ -311,8 +311,10 @@ def test_run_import_check(tmp_path: pathlib.Path):
|
||||
import spack.repo
|
||||
import spack.repo_utils
|
||||
|
||||
from spack_repo.builtin_mock.build_systems import autotools
|
||||
|
||||
# this comment about spack.error should not be removed
|
||||
class Example(spack.build_systems.autotools.AutotoolsPackage):
|
||||
class Example(autotools.AutotoolsPackage):
|
||||
"""this is a docstring referencing unused spack.error.SpackError, which is fine"""
|
||||
pass
|
||||
|
||||
@@ -339,7 +341,6 @@ def foo(config: "spack.error.SpackError"):
|
||||
assert "issues.py: redundant import: spack.repo" in output
|
||||
assert "issues.py: redundant import: spack.config" not in output # comment prevents removal
|
||||
assert "issues.py: missing import: spack" in output # used by spack.__version__
|
||||
assert "issues.py: missing import: spack.build_systems.autotools" in output
|
||||
assert "issues.py: missing import: spack.util.executable" in output
|
||||
assert "issues.py: missing import: spack.error" not in output # not directly used
|
||||
assert exit_code == 1
|
||||
@@ -359,7 +360,6 @@ def foo(config: "spack.error.SpackError"):
|
||||
assert exit_code == 1
|
||||
assert "issues.py: redundant import: spack.cmd" in output
|
||||
assert "issues.py: missing import: spack" in output
|
||||
assert "issues.py: missing import: spack.build_systems.autotools" in output
|
||||
assert "issues.py: missing import: spack.util.executable" in output
|
||||
|
||||
# after fix a second fix is idempotent
|
||||
@@ -380,7 +380,6 @@ def foo(config: "spack.error.SpackError"):
|
||||
new_contents = file.read_text()
|
||||
assert "import spack.cmd" not in new_contents
|
||||
assert "import spack\n" in new_contents
|
||||
assert "import spack.build_systems.autotools\n" in new_contents
|
||||
assert "import spack.util.executable\n" in new_contents
|
||||
|
||||
|
||||
|
@@ -36,7 +36,7 @@ def test_remote_versions_only():
|
||||
@pytest.mark.usefixtures("mock_packages")
|
||||
def test_new_versions_only(monkeypatch):
|
||||
"""Test a package for which new versions should be available."""
|
||||
from spack.pkg.builtin.mock.brillig import Brillig # type: ignore[import]
|
||||
from spack_repo.builtin_mock.packages.brillig.package import Brillig # type: ignore[import]
|
||||
|
||||
def mock_fetch_remote_versions(*args, **kwargs):
|
||||
mock_remote_versions = {
|
||||
|
@@ -29,7 +29,7 @@ def _concretize_with_reuse(*, root_str, reused_str):
|
||||
|
||||
@pytest.fixture
|
||||
def runtime_repo(mutable_config):
|
||||
repo = os.path.join(spack.paths.test_repos_path, "compiler_runtime.test")
|
||||
repo = os.path.join(spack.paths.test_repos_path, "spack_repo", "compiler_runtime_test")
|
||||
with spack.repo.use_repositories(repo) as mock_repo:
|
||||
yield mock_repo
|
||||
|
||||
|
@@ -2,6 +2,7 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os
|
||||
import pathlib
|
||||
import platform
|
||||
import sys
|
||||
|
||||
@@ -79,7 +80,7 @@ def binary_compatibility(monkeypatch, request):
|
||||
return
|
||||
|
||||
if "mock_packages" not in request.fixturenames:
|
||||
# Only builtin.mock has a mock glibc package
|
||||
# Only builtin_mock has a mock glibc package
|
||||
return
|
||||
|
||||
if "database" in request.fixturenames or "mutable_database" in request.fixturenames:
|
||||
@@ -170,18 +171,12 @@ def reverser(pkg_name):
|
||||
|
||||
@pytest.fixture()
|
||||
def repo_with_changing_recipe(tmp_path_factory, mutable_mock_repo):
|
||||
repo_namespace = "changing"
|
||||
repo_dir = tmp_path_factory.mktemp(repo_namespace)
|
||||
repos_dir: pathlib.Path = tmp_path_factory.mktemp("repos_dir")
|
||||
root, _ = spack.repo.create_repo(str(repos_dir), "changing")
|
||||
packages_dir = pathlib.Path(root, "packages")
|
||||
|
||||
(repo_dir / "repo.yaml").write_text(
|
||||
"""
|
||||
repo:
|
||||
namespace: changing
|
||||
"""
|
||||
)
|
||||
|
||||
packages_dir = repo_dir / "packages"
|
||||
root_pkg_str = """
|
||||
from spack_repo.builtin_mock.build_systems.generic import Package
|
||||
from spack.package import *
|
||||
|
||||
class Root(Package):
|
||||
@@ -199,6 +194,7 @@ class Root(Package):
|
||||
package_py.write_text(root_pkg_str)
|
||||
|
||||
middle_pkg_str = """
|
||||
from spack_repo.builtin_mock.build_systems.generic import Package
|
||||
from spack.package import *
|
||||
|
||||
class Middle(Package):
|
||||
@@ -213,6 +209,7 @@ class Middle(Package):
|
||||
package_py.write_text(middle_pkg_str)
|
||||
|
||||
changing_template = """
|
||||
from spack_repo.builtin_mock.build_systems.generic import Package
|
||||
from spack.package import *
|
||||
|
||||
class Changing(Package):
|
||||
@@ -235,7 +232,7 @@ class Changing(Package):
|
||||
{% endif %}
|
||||
"""
|
||||
|
||||
with spack.repo.use_repositories(str(repo_dir), override=False) as repository:
|
||||
with spack.repo.use_repositories(root, override=False) as repos:
|
||||
|
||||
class _ChangingPackage:
|
||||
default_context = [
|
||||
@@ -244,27 +241,22 @@ class _ChangingPackage:
|
||||
("add_variant", False),
|
||||
]
|
||||
|
||||
def __init__(self, repo_directory):
|
||||
self.repo_dir = repo_directory
|
||||
def __init__(self):
|
||||
cache_dir = tmp_path_factory.mktemp("cache")
|
||||
self.repo_cache = spack.util.file_cache.FileCache(str(cache_dir))
|
||||
self.repo = spack.repo.Repo(str(repo_directory), cache=self.repo_cache)
|
||||
self.repo = spack.repo.Repo(root, cache=self.repo_cache)
|
||||
|
||||
def change(self, changes=None):
|
||||
changes = changes or {}
|
||||
context = dict(self.default_context)
|
||||
context.update(changes)
|
||||
# Remove the repo object and delete Python modules
|
||||
repository.remove(self.repo)
|
||||
repos.remove(self.repo)
|
||||
# TODO: this mocks a change in the recipe that should happen in a
|
||||
# TODO: different process space. Leaving this comment as a hint
|
||||
# TODO: in case tests using this fixture start failing.
|
||||
if sys.modules.get("spack.pkg.changing.changing"):
|
||||
del sys.modules["spack.pkg.changing.changing"]
|
||||
if sys.modules.get("spack.pkg.changing.root"):
|
||||
del sys.modules["spack.pkg.changing.root"]
|
||||
if sys.modules.get("spack.pkg.changing"):
|
||||
del sys.modules["spack.pkg.changing"]
|
||||
for module in [x for x in sys.modules if x.startswith("spack_repo.changing")]:
|
||||
del sys.modules[module]
|
||||
|
||||
# Change the recipe
|
||||
t = _vendoring.jinja2.Template(changing_template)
|
||||
@@ -274,10 +266,10 @@ def change(self, changes=None):
|
||||
package_py.write_text(changing_pkg_str)
|
||||
|
||||
# Re-add the repository
|
||||
self.repo = spack.repo.Repo(str(self.repo_dir), cache=self.repo_cache)
|
||||
repository.put_first(self.repo)
|
||||
self.repo = spack.repo.Repo(root, cache=self.repo_cache)
|
||||
repos.put_first(self.repo)
|
||||
|
||||
_changing_pkg = _ChangingPackage(repo_dir)
|
||||
_changing_pkg = _ChangingPackage()
|
||||
_changing_pkg.change(
|
||||
{"delete_version": False, "delete_variant": False, "add_variant": False}
|
||||
)
|
||||
@@ -374,11 +366,11 @@ def test_provides_handles_multiple_providers_of_same_version(self):
|
||||
# Note that providers are repo-specific, so we don't misinterpret
|
||||
# providers, but vdeps are not namespace-specific, so we can
|
||||
# associate vdeps across repos.
|
||||
assert Spec("builtin.mock.multi-provider-mpi@1.10.3") in providers
|
||||
assert Spec("builtin.mock.multi-provider-mpi@1.10.2") in providers
|
||||
assert Spec("builtin.mock.multi-provider-mpi@1.10.1") in providers
|
||||
assert Spec("builtin.mock.multi-provider-mpi@1.10.0") in providers
|
||||
assert Spec("builtin.mock.multi-provider-mpi@1.8.8") in providers
|
||||
assert Spec("builtin_mock.multi-provider-mpi@1.10.3") in providers
|
||||
assert Spec("builtin_mock.multi-provider-mpi@1.10.2") in providers
|
||||
assert Spec("builtin_mock.multi-provider-mpi@1.10.1") in providers
|
||||
assert Spec("builtin_mock.multi-provider-mpi@1.10.0") in providers
|
||||
assert Spec("builtin_mock.multi-provider-mpi@1.8.8") in providers
|
||||
|
||||
def test_different_compilers_get_different_flags(
|
||||
self, mutable_config, clang12_with_flags, gcc11_with_flags
|
||||
@@ -1716,12 +1708,12 @@ def test_reuse_with_unknown_namespace_dont_raise(
|
||||
):
|
||||
with spack.repo.use_repositories(mock_custom_repository, override=False):
|
||||
s = spack.concretize.concretize_one("pkg-c")
|
||||
assert s.namespace != "builtin.mock"
|
||||
assert s.namespace != "builtin_mock"
|
||||
PackageInstaller([s.package], fake=True, explicit=True).install()
|
||||
|
||||
with spack.config.override("concretizer:reuse", True):
|
||||
s = spack.concretize.concretize_one("pkg-c")
|
||||
assert s.namespace == "builtin.mock"
|
||||
assert s.namespace == "builtin_mock"
|
||||
|
||||
@pytest.mark.regression("45538")
|
||||
def test_reuse_from_other_namespace_no_raise(self, tmpdir, temporary_store, monkeypatch):
|
||||
@@ -1752,7 +1744,7 @@ def test_reuse_with_unknown_package_dont_raise(self, tmpdir, temporary_store, mo
|
||||
repos.repos[0]._pkg_checker.invalidate()
|
||||
with spack.config.override("concretizer:reuse", True):
|
||||
s = spack.concretize.concretize_one("pkg-c")
|
||||
assert s.namespace == "builtin.mock"
|
||||
assert s.namespace == "builtin_mock"
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"specs,checks",
|
||||
@@ -2329,10 +2321,10 @@ def test_reuse_python_from_cli_and_extension_from_db(self, mutable_database):
|
||||
"spec_str,expected_namespaces",
|
||||
[
|
||||
# Single node with fully qualified namespace
|
||||
("builtin.mock.gmake", {"gmake": "builtin.mock"}),
|
||||
("builtin_mock.gmake", {"gmake": "builtin_mock"}),
|
||||
# Dependency with fully qualified namespace
|
||||
("hdf5 ^builtin.mock.gmake", {"gmake": "builtin.mock", "hdf5": "duplicates.test"}),
|
||||
("hdf5 ^gmake", {"gmake": "duplicates.test", "hdf5": "duplicates.test"}),
|
||||
("hdf5 ^builtin_mock.gmake", {"gmake": "builtin_mock", "hdf5": "duplicates_test"}),
|
||||
("hdf5 ^gmake", {"gmake": "duplicates_test", "hdf5": "duplicates_test"}),
|
||||
],
|
||||
)
|
||||
def test_select_lower_priority_package_from_repository_stack(
|
||||
@@ -2341,8 +2333,10 @@ def test_select_lower_priority_package_from_repository_stack(
|
||||
"""Tests that a user can explicitly select a lower priority, fully qualified dependency
|
||||
from cli.
|
||||
"""
|
||||
# 'builtin.mock" and "duplicates.test" share a 'gmake' package
|
||||
additional_repo = os.path.join(spack.paths.test_repos_path, "duplicates.test")
|
||||
# 'builtin_mock" and "duplicates_test" share a 'gmake' package
|
||||
additional_repo = os.path.join(
|
||||
spack.paths.test_repos_path, "spack_repo", "duplicates_test"
|
||||
)
|
||||
with spack.repo.use_repositories(additional_repo, override=False):
|
||||
s = spack.concretize.concretize_one(spec_str)
|
||||
|
||||
@@ -2586,7 +2580,7 @@ def test_correct_external_is_selected_from_packages_yaml(self, mutable_config):
|
||||
|
||||
@pytest.fixture()
|
||||
def duplicates_test_repository():
|
||||
repository_path = os.path.join(spack.paths.test_repos_path, "duplicates.test")
|
||||
repository_path = os.path.join(spack.paths.test_repos_path, "spack_repo", "duplicates_test")
|
||||
with spack.repo.use_repositories(repository_path) as mock_repo:
|
||||
yield mock_repo
|
||||
|
||||
@@ -2821,7 +2815,7 @@ def test_adding_specs(self, input_specs, default_mock_concretization):
|
||||
|
||||
@pytest.fixture()
|
||||
def edges_test_repository():
|
||||
repository_path = os.path.join(spack.paths.test_repos_path, "edges.test")
|
||||
repository_path = os.path.join(spack.paths.test_repos_path, "spack_repo", "edges_test")
|
||||
with spack.repo.use_repositories(repository_path) as mock_repo:
|
||||
yield mock_repo
|
||||
|
||||
|
@@ -46,7 +46,7 @@
|
||||
|
||||
@pytest.fixture
|
||||
def test_repo(mutable_config, monkeypatch, mock_stage):
|
||||
repo_dir = pathlib.Path(spack.paths.test_repos_path) / "flags.test"
|
||||
repo_dir = pathlib.Path(spack.paths.test_repos_path) / "spack_repo" / "flags_test"
|
||||
with spack.repo.use_repositories(str(repo_dir)) as mock_repo_path:
|
||||
yield mock_repo_path
|
||||
|
||||
|
@@ -28,7 +28,7 @@ def update_packages_config(conf_str):
|
||||
|
||||
@pytest.fixture
|
||||
def test_repo(mutable_config, monkeypatch, mock_stage):
|
||||
repo_dir = pathlib.Path(spack.paths.test_repos_path) / "requirements.test"
|
||||
repo_dir = pathlib.Path(spack.paths.test_repos_path) / "spack_repo" / "requirements_test"
|
||||
with spack.repo.use_repositories(str(repo_dir)) as mock_repo_path:
|
||||
yield mock_repo_path
|
||||
|
||||
@@ -766,21 +766,21 @@ def test_skip_requirement_when_default_requirement_condition_cannot_be_met(
|
||||
|
||||
def test_requires_directive(mock_packages, config):
|
||||
# This package requires either clang or gcc
|
||||
s = spack.concretize.concretize_one("requires_clang_or_gcc")
|
||||
s = spack.concretize.concretize_one("requires-clang-or-gcc")
|
||||
assert s.satisfies("%gcc")
|
||||
s = spack.concretize.concretize_one("requires_clang_or_gcc %gcc")
|
||||
s = spack.concretize.concretize_one("requires-clang-or-gcc %gcc")
|
||||
assert s.satisfies("%gcc")
|
||||
s = spack.concretize.concretize_one("requires_clang_or_gcc %clang")
|
||||
s = spack.concretize.concretize_one("requires-clang-or-gcc %clang")
|
||||
# Test both the real package (llvm) and its alias (clang)
|
||||
assert s.satisfies("%llvm") and s.satisfies("%clang")
|
||||
|
||||
# This package can only be compiled with clang
|
||||
s = spack.concretize.concretize_one("requires_clang")
|
||||
s = spack.concretize.concretize_one("requires-clang")
|
||||
assert s.satisfies("%llvm")
|
||||
s = spack.concretize.concretize_one("requires_clang %clang")
|
||||
s = spack.concretize.concretize_one("requires-clang %clang")
|
||||
assert s.satisfies("%llvm")
|
||||
with pytest.raises(spack.error.SpackError, match="can only be compiled with Clang"):
|
||||
spack.concretize.concretize_one("requires_clang %gcc")
|
||||
spack.concretize.concretize_one("requires-clang %gcc")
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
|
@@ -654,7 +654,7 @@ def mock_pkg_install(monkeypatch):
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def mock_packages(mock_repo_path, mock_pkg_install, request):
|
||||
"""Use the 'builtin.mock' repository instead of 'builtin'"""
|
||||
"""Use the 'builtin_mock' repository instead of 'builtin'"""
|
||||
ensure_configuration_fixture_run_before(request)
|
||||
with spack.repo.use_repositories(mock_repo_path) as mock_repo:
|
||||
yield mock_repo
|
||||
@@ -1434,7 +1434,7 @@ def mock_git_repository(git, tmpdir_factory):
|
||||
of these refers to a repository with a single commit.
|
||||
|
||||
c0, c1, and c2 include information to define explicit versions in the
|
||||
associated builtin.mock package 'git-test'. c3 is a commit in the
|
||||
associated builtin_mock package 'git-test'. c3 is a commit in the
|
||||
repository but does not have an associated explicit package version.
|
||||
"""
|
||||
suburls = []
|
||||
@@ -2100,35 +2100,6 @@ def mock_modules_root(tmp_path, monkeypatch):
|
||||
monkeypatch.setattr(spack.modules.common, "root_path", fn)
|
||||
|
||||
|
||||
_repo_name_id = 0
|
||||
|
||||
|
||||
def create_test_repo(tmpdir, pkg_name_content_tuples):
|
||||
global _repo_name_id
|
||||
|
||||
repo_path = str(tmpdir)
|
||||
repo_yaml = tmpdir.join("repo.yaml")
|
||||
with open(str(repo_yaml), "w", encoding="utf-8") as f:
|
||||
f.write(
|
||||
f"""\
|
||||
repo:
|
||||
namespace: testrepo{str(_repo_name_id)}
|
||||
"""
|
||||
)
|
||||
|
||||
_repo_name_id += 1
|
||||
|
||||
packages_dir = tmpdir.join("packages")
|
||||
for pkg_name, pkg_str in pkg_name_content_tuples:
|
||||
pkg_dir = packages_dir.ensure(pkg_name, dir=True)
|
||||
pkg_file = pkg_dir.join("package.py")
|
||||
with open(str(pkg_file), "w", encoding="utf-8") as f:
|
||||
f.write(pkg_str)
|
||||
|
||||
repo_cache = spack.util.file_cache.FileCache(str(tmpdir.join("cache")))
|
||||
return spack.repo.Repo(repo_path, cache=repo_cache)
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def compiler_factory():
|
||||
"""Factory for a compiler dict, taking a spec and an OS as arguments."""
|
||||
|
@@ -2,10 +2,11 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from spack.package_base import PackageBase
|
||||
from spack.package import *
|
||||
|
||||
|
||||
class DiffTest(AutotoolsPackage):
|
||||
class DiffTest(PackageBase):
|
||||
"""zlib replacement with optimizations for next generation systems."""
|
||||
|
||||
homepage = "https://github.com/zlib-ng/zlib-ng"
|
||||
|
@@ -2,10 +2,11 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from spack.package_base import PackageBase
|
||||
from spack.package import *
|
||||
|
||||
|
||||
class DiffTest(AutotoolsPackage):
|
||||
class DiffTest(PackageBase):
|
||||
"""zlib replacement with optimizations for next generation systems."""
|
||||
|
||||
homepage = "https://github.com/zlib-ng/zlib-ng"
|
||||
|
@@ -2,10 +2,11 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from spack.package_base import PackageBase
|
||||
from spack.package import *
|
||||
|
||||
|
||||
class DiffTest(AutotoolsPackage):
|
||||
class DiffTest(PackageBase):
|
||||
"""zlib replacement with optimizations for next generation systems."""
|
||||
|
||||
homepage = "https://github.com/zlib-ng/zlib-ng"
|
||||
|
@@ -26,7 +26,7 @@
|
||||
"name": "clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin.mock",
|
||||
"namespace": "builtin_mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
@@ -70,7 +70,7 @@
|
||||
"name": "clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin.mock",
|
||||
"namespace": "builtin_mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
@@ -114,7 +114,7 @@
|
||||
"name": "clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin.mock",
|
||||
"namespace": "builtin_mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
@@ -137,7 +137,7 @@
|
||||
"name": "clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin.mock",
|
||||
"namespace": "builtin_mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
@@ -160,7 +160,7 @@
|
||||
"name": "clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin.mock",
|
||||
"namespace": "builtin_mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
@@ -183,7 +183,7 @@
|
||||
"name": "clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin.mock",
|
||||
"namespace": "builtin_mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
@@ -215,7 +215,7 @@
|
||||
"name": "clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin.mock",
|
||||
"namespace": "builtin_mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
@@ -253,7 +253,7 @@
|
||||
"name": "clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin.mock",
|
||||
"namespace": "builtin_mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
@@ -276,7 +276,7 @@
|
||||
"name": "clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin.mock",
|
||||
"namespace": "builtin_mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
@@ -314,7 +314,7 @@
|
||||
"name": "clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin.mock",
|
||||
"namespace": "builtin_mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
@@ -337,7 +337,7 @@
|
||||
"name": "clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin.mock",
|
||||
"namespace": "builtin_mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
@@ -368,7 +368,7 @@
|
||||
"name": "clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin.mock",
|
||||
"namespace": "builtin_mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
|
@@ -57,7 +57,7 @@
|
||||
"name": "apple-clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin.mock",
|
||||
"namespace": "builtin_mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
@@ -133,7 +133,7 @@
|
||||
"name": "apple-clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin.mock",
|
||||
"namespace": "builtin_mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
@@ -209,7 +209,7 @@
|
||||
"name": "apple-clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin.mock",
|
||||
"namespace": "builtin_mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
@@ -264,7 +264,7 @@
|
||||
"name": "apple-clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin.mock",
|
||||
"namespace": "builtin_mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
@@ -319,7 +319,7 @@
|
||||
"name": "apple-clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin.mock",
|
||||
"namespace": "builtin_mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
@@ -374,7 +374,7 @@
|
||||
"name": "apple-clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin.mock",
|
||||
"namespace": "builtin_mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
@@ -438,7 +438,7 @@
|
||||
"name": "apple-clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin.mock",
|
||||
"namespace": "builtin_mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
@@ -508,7 +508,7 @@
|
||||
"name": "apple-clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin.mock",
|
||||
"namespace": "builtin_mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
@@ -563,7 +563,7 @@
|
||||
"name": "apple-clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin.mock",
|
||||
"namespace": "builtin_mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
@@ -633,7 +633,7 @@
|
||||
"name": "apple-clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin.mock",
|
||||
"namespace": "builtin_mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
@@ -688,7 +688,7 @@
|
||||
"name": "apple-clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin.mock",
|
||||
"namespace": "builtin_mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
@@ -751,7 +751,7 @@
|
||||
"name": "apple-clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin.mock",
|
||||
"namespace": "builtin_mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
@@ -806,7 +806,7 @@
|
||||
"name": "apple-clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin.mock",
|
||||
"namespace": "builtin_mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
@@ -882,7 +882,7 @@
|
||||
"name": "apple-clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin.mock",
|
||||
"namespace": "builtin_mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
|
@@ -58,7 +58,7 @@
|
||||
"name": "apple-clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin.mock",
|
||||
"namespace": "builtin_mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
@@ -136,7 +136,7 @@
|
||||
"name": "apple-clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin.mock",
|
||||
"namespace": "builtin_mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
@@ -214,7 +214,7 @@
|
||||
"name": "apple-clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin.mock",
|
||||
"namespace": "builtin_mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
@@ -268,7 +268,7 @@
|
||||
"name": "apple-clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin.mock",
|
||||
"namespace": "builtin_mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
@@ -322,7 +322,7 @@
|
||||
"name": "apple-clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin.mock",
|
||||
"namespace": "builtin_mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
@@ -376,7 +376,7 @@
|
||||
"name": "apple-clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin.mock",
|
||||
"namespace": "builtin_mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
@@ -440,7 +440,7 @@
|
||||
"name": "apple-clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin.mock",
|
||||
"namespace": "builtin_mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
@@ -511,7 +511,7 @@
|
||||
"name": "apple-clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin.mock",
|
||||
"namespace": "builtin_mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
@@ -565,7 +565,7 @@
|
||||
"name": "apple-clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin.mock",
|
||||
"namespace": "builtin_mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
@@ -636,7 +636,7 @@
|
||||
"name": "apple-clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin.mock",
|
||||
"namespace": "builtin_mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
@@ -690,7 +690,7 @@
|
||||
"name": "apple-clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin.mock",
|
||||
"namespace": "builtin_mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
@@ -753,7 +753,7 @@
|
||||
"name": "apple-clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin.mock",
|
||||
"namespace": "builtin_mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
@@ -807,7 +807,7 @@
|
||||
"name": "apple-clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin.mock",
|
||||
"namespace": "builtin_mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
@@ -885,7 +885,7 @@
|
||||
"name": "apple-clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin.mock",
|
||||
"namespace": "builtin_mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
|
@@ -18,7 +18,7 @@ spec:
|
||||
compiler:
|
||||
name: gcc
|
||||
version: 4.5.0
|
||||
namespace: builtin.mock
|
||||
namespace: builtin_mock
|
||||
parameters:
|
||||
optimize: true
|
||||
pic: true
|
||||
|
@@ -1,29 +0,0 @@
|
||||
-----BEGIN PGP PUBLIC KEY BLOCK-----
|
||||
|
||||
mQINBGf23+EBEAC6UqaiE43cF9jFuVjA8xJ5j31BMhufpnk0cwoE5Iks/GgR/Hki
|
||||
LMYbzy36V7TZGObel+5DtFKipX+WCwWj2XsjbeqHeuCkxZhzHFwfi1UJl9FO2T28
|
||||
iNn6OsBiGeU6ULNmehSia2hx0uhj1re/FUwJExOAvuYv8nc7M+nozqi7Pp/WjP8v
|
||||
UTiqP2onzZJbidlSBvmZ2nheWk7G78e617gcV/ye+UyXZvciiF2UQBg9YV6D8JuD
|
||||
YhBbNAVOzJOiyOdTBmZmOkmYsGx58sEbFVqGeOMB0xoxZrqKjMm9NhvjqjJF/sWs
|
||||
hN/PD5ylW1UR05/fGxlG2GLKKfBInbdqnC101OFWXP5HenYHmKaBJoCKCAUfsoJ0
|
||||
r/t/GVh3z3w/99p0TRDONnTecKm5S9z3/5QjjE5RsWcd4ll7mRikUiVpe1WhKRwT
|
||||
4T76pQLq3XwNJqiOmuMQuSHoBE9OMufvRFiTYC0QHyLoCV2H5PCWtS2xSsIDN4PB
|
||||
0RNd0hnHKanVV7d2TkIrGOagoAo0wXqyW/Op6KUG1NdaFYYziDFEHeZxfGoPKytO
|
||||
iS5PEwZG2FqambAZhJU5OXwzgnCRIoE5DCZad4YS6U5YD/2zg+RrQ/5GUxl5Cc+W
|
||||
Zwesn9FV5jywx/oFePYbTSNQVPQ6jbUDvhmHvZ8c/OfGOVXQr0VpvfIwdwARAQAB
|
||||
tD1UZXN0IFNpZ25pbmcgS2V5IChHUEcgY3JlYXRlZCBmb3IgU3BhY2spIDxub2Jv
|
||||
ZHlAbm93aGVyZS5jb20+iQJRBBMBCAA7FiEEqYoEuILhnYX9Nu4GlWXYCwVckv8F
|
||||
Amf23+ECGwMFCwkIBwICIgIGFQoJCAsCBBYCAwECHgcCF4AACgkQlWXYCwVckv9i
|
||||
pg//eGjBR9ph9hUYRsekzKWM1xB5zFOFfNoqlpCut/W7LAfy0XXkFy/y6EvPdcgn
|
||||
lLWRWPsOFfsKGwZd7LgSovhEMQ2MRsAUUB/KNZx7s6vO/P773PmJspF3odQ/lcrM
|
||||
1fum2lShChWqimdBdNLrXxG+8duO9uWaMBIp28diBCyB25M/MqpHtKYu00FB/QJ6
|
||||
ZwQH4OsgXVQHRjyrtIGx/2FQoWt0ah3eJMJCEw46GgkgiojtoTfXQQc4fIJP324b
|
||||
O1sxz5lx3xVBG/EZYzyV3xnSoG9aZNJ1cJq8EKO7ZoNKc/8jwkVu5gewGaXYI0LK
|
||||
/WkOeiXcSHPMSdu7TpnitvLYFCjc9YAEKQnjooXdt7+BElwC3+5hZJNXEnoGPMzn
|
||||
3UL60sQE/ViCsGcW+l9rtzXPNTmLMjEg4rGRqOhX+UmwyhvGD2QYbZtXlayu5xn+
|
||||
5m/PfmdqgL1xsdvNsLo/BOo+6kizMdBk48Xfp0YM8AC4BzUEENypGzC4T0WYF0k1
|
||||
Jfc6/eSwiytIcIkJ42GlaVfEFE8UxfYc1/2zqTBN9EdzWJqy0Bh+mVOgOaeb0Dzi
|
||||
xWpUpChi1fBB3PXWJ5iAS/w0HSVn4G5/JAIEFAs7r6ju2YtKBfuk+u/K5Q28mo7W
|
||||
6LrZQywN44nBMTvSQUhhXpSNYG+juyotXJUJ3F2u9Cf/jVU=
|
||||
=TkbL
|
||||
-----END PGP PUBLIC KEY BLOCK-----
|
@@ -0,0 +1,29 @@
|
||||
-----BEGIN PGP PUBLIC KEY BLOCK-----
|
||||
|
||||
mQINBGgnhhYBEAC5LOSkJlxL4rRDBLDatswpzAw7NQnONW37hwOauEf6rlw/wk6J
|
||||
2D1l/jjmGwyo1iHOEu1/26fMuXMmG0vAxOQJFrkoKAgxDUD9nL0GqTJyg0+yTCN6
|
||||
xsWsrIZi+8oNDXYzLiejICZorc+ri11kcZdA+WE2hWPRStmJH75afpSd7XfNijqb
|
||||
MPfDZBcr+pLeARSH11BTfb8Dtm9qN//+X+pNIUqeHL9hLu/W9hb3GCfXqnsCQJA1
|
||||
WMFTrbCcPYm0R7EevMnscFvS8xbhocBPDwZ12f4W5CugrL29X4Vx9SaUlIyy/+SC
|
||||
2Gwi8Yq78Y4dTN7N5aA8L169/uqy4Tx7/966wMkUYXk7UxmH9E0ol5EZYnY9SCj6
|
||||
xLtMNKA+NLwESj0azaWEzxfztyNdTYfG8Eaa/QGFs1YVGhYdmcEp8KDbQg5FBeCA
|
||||
I6MUcH0XWOTJaZI/oEtukMYHzBt9jyyq6Gp45TiQvOou0wE+w/zJcd9Td23R81KW
|
||||
GfMh5r80NET/bx88vee4NNHkWCphhqs53rIrhWV3y3WKaWp7DfP3WMiTBJ+Yc+PI
|
||||
0vMIHKYNy+OqwTjmwgKdN1w1xZhLG7hx0sAdcZGP7q0A6381HtucgS/fucDogMnW
|
||||
H3anE8UGx4HBRjyXsuOaOAgNw2K4IwancUSf67WSzji3AiP46sUun5ERNQARAQAB
|
||||
tBlTcGFjayA8c3BhY2tAc3BhY2suc3BhY2s+iQJXBBMBCgBBFiEEy6ssEDLG/1B4
|
||||
BJ7A+mHVDBLK034FAmgnhhYCGwMFCQWjmoAFCwkIBwICIgIGFQoJCAsCBBYCAwEC
|
||||
HgcCF4AACgkQ+mHVDBLK034zWhAAtjm802qaTSCvB9WvY1RM65/B1GUK3ZEv3fw/
|
||||
Dvt3xd3mh+rzWBTJ8t7+/cPaOq7qOGnfUateHgou+0T6lgCLkrwr4lFa6yZSUATb
|
||||
xcnopcA0Dal218UcIRb20PjPtoKu3Tt9JFceXJGCTYoGz5HbkOemwkR8B+4qMRPW
|
||||
sn1IhV32eig2HUzrUXVOv6WomMtk2qUpND0WnTlZo3EoInJeTzdlXkOR3lRLADM9
|
||||
yPM6Rp8AV/ykM9DztL4SinzyZjqEM7o1H7EFITZSlkjcBPvqDlvowZGN8TVbG9TQ
|
||||
8Nfz8BYF3SVaPduwXwhbE9D8jqtNt652IZ1+1KbMii1l4deu0UYx8BSfJjNANTTU
|
||||
jFDiyNaGnn5OsZXNllsyAHWky6ApyBD9qFxxNr0kiWbVrrN6s2u4ghm5Hgtdx40v
|
||||
hA9+kvB2mtV/HklUkwDTJ6Ytgp5veh8GKvBD9eAWIitl6w153Rba5LkZbk2ijK6k
|
||||
oyN9Ge/YloSMwXpIEnE7/SRE1o5vye294BZjyqnr+U+wzbEYbC7eXJ0peDCbpbZc
|
||||
0kxMDDbrhmHeEaHeWF30hm6WBaUT4SUcPj5BiV3mt3BhtRgAwA3SvuSenk2yRzR8
|
||||
tBES4b/RBmOczfs4w4m5rAmfVNkNwykry4M2jPCJhVA2qG8q1gLxf+AvaPcAvQ8D
|
||||
kmDeNLI=
|
||||
=CYuA
|
||||
-----END PGP PUBLIC KEY BLOCK-----
|
@@ -1,29 +0,0 @@
|
||||
-----BEGIN PGP PUBLIC KEY BLOCK-----
|
||||
|
||||
mQINBGfHlp4BEAC5wkZSHqF9z6GcymuHpk1m9aNXCJdt4ZWvE8ck8GcuVu1nbzlZ
|
||||
h959jqtwk7nFMki5YaNMz6jcQf0eeS75viL4CoPAqFiVyhyCCh5am75h9F7vTBq6
|
||||
190017lhu9IgkAkiklnjfDbyXH+BwqJ78nXp6e6R4ShFMHNGGvYLem1wmPKzqPlZ
|
||||
zN0yjc0+d5pw4hu+IEFrM63yqGp2BVX1X132IKUEcROCQt1QOma5oORhYEtSCieX
|
||||
PuhuHJOA7q6nJuFccPCs5OcDS4IbQgGAbWL4L1+LAGVLVGpK4IVtqEZ831Srclh8
|
||||
0ruyFFeV/hqOONThwwile0Jwh5Jz/2sYxT5c+nlumXWK+CXTm4OCfGt1UuGy6c6u
|
||||
Rz84PHfanbKnATp6RUjz4DMREkmA6qBnUFqGLLGaBKBsm42b7kbo7m5aeItuOwLE
|
||||
U7AcnBEqqHLfI7O1zrHKjQCxhEWP/iok0kgEdiJ4tlPhfDjQRG6thlmZnVdt/08V
|
||||
+bvVkbYZyWPzjbG3QHyFew1+uzPHb2UopgpByVKYEWhCgNfcFtE56lEI9c40Ba5o
|
||||
LaZl0VlgfSLP4c+LoFB6gZp1gcVQuPo1JKd1v5WP60f1iHhazL5LEeMYcW6kvujK
|
||||
58Q683gSH5DsVAnxaj1uU4nvtKDh8IF1CNKKXk8RVsltdpv9bGhV8b4qVQARAQAB
|
||||
tD1UZXN0IFNpZ25pbmcgS2V5IChHUEcgY3JlYXRlZCBmb3IgU3BhY2spIDxub2Jv
|
||||
ZHlAbm93aGVyZS5jb20+iQJOBBMBCgA4FiEE6J1JcfAJex56PrVzcbSEgC54180F
|
||||
AmfHlp4CGwMFCwkIBwIGFQoJCAsCBBYCAwECHgECF4AACgkQcbSEgC54180aDg//
|
||||
f7GqIW5LzYqIqkey+IjdkSSfeD47tlWc2ukKYStHu0gTlHhrUp4rHNJ/s8XQ1o6o
|
||||
jwzWfNMYh68wt9sjuM2BEkkh3RUFEjVqqW+k562gS5ibfKTDtJb2Yj0n/CQKWvoi
|
||||
vUUzO88xW0AnZFieP+vD5iI5Zw4H2dY8cH4X1XlWAJufFdH4WBaZjujNwNOcCsnd
|
||||
w2nE050wKTR2wroWq0HKn1Ni3QNtKWPpLoHGAlhW6ACLa+EFqxHU6D3KhW6IV4Jc
|
||||
sdt36nHNiRiy6nT99asqtN6Z0Yw+EnQSuIDosIbmSgZoieINh0gU6AKwgydxLUxL
|
||||
Cu1w2fZHGuFR/ym0c/tTpM893DxHMc/EZ/SpU8fXkC9lYnQO3or/Y0mLHd0kSEv7
|
||||
XoonvcOu1tOQzmvrvUQUtTn4+6OKpGViyZG5C8Lbk8/yKWFv5b+Gpss/EiGTHSsk
|
||||
bPTHf5jMsWElv0GgFq2TpybtIcY52yJoZ1fBMEA9Nk76Y/MNFlN0d7HyS6tWGr6E
|
||||
8FWJB7RYG5XHMEDIKSheq+Q5cORwz92JPFI+sovZukp+20G7f7/gwos441KamJPc
|
||||
y1+M4uO21aKX2fA07bcgFtm25gNLoHyvjQLcmyDis6xogvciCV3iQ/mtunewgYp/
|
||||
lUX1dv0R5o8TteaAIkbJicbdLtur/iuAWN404E/QShc=
|
||||
=8P00
|
||||
-----END PGP PUBLIC KEY BLOCK-----
|
@@ -1 +1 @@
|
||||
{"keys":{"A98A04B882E19D85FD36EE069565D80B055C92FF":{},"E89D4971F0097B1E7A3EB57371B484802E78D7CD":{}}}
|
||||
{"keys":{"CBAB2C1032C6FF5078049EC0FA61D50C12CAD37E":{}}}
|
||||
|
File diff suppressed because one or more lines are too long
@@ -1 +1 @@
|
||||
7f94d6038bb4e5e7fff817151da5b22d7dd6d1e2d9ad51bd55504676786c17bd
|
||||
81a5add9d75b27fc4d16a4f72685b54903973366531b98c65e8cf5376758a817
|
||||
|
@@ -33,7 +33,7 @@ Hash: SHA512
|
||||
"name":"gcc",
|
||||
"version":"10.2.1"
|
||||
},
|
||||
"namespace":"builtin.mock",
|
||||
"namespace":"builtin_mock",
|
||||
"parameters":{
|
||||
"build_system":"generic",
|
||||
"cflags":[],
|
||||
@@ -85,7 +85,7 @@ Hash: SHA512
|
||||
"name":"gcc",
|
||||
"version":"10.2.1"
|
||||
},
|
||||
"namespace":"builtin.mock",
|
||||
"namespace":"builtin_mock",
|
||||
"parameters":{
|
||||
"build_system":"generic",
|
||||
"cflags":[],
|
||||
@@ -108,17 +108,17 @@ Hash: SHA512
|
||||
}
|
||||
-----BEGIN PGP SIGNATURE-----
|
||||
|
||||
iQIzBAEBCgAdFiEE6J1JcfAJex56PrVzcbSEgC54180FAmfHlp8ACgkQcbSEgC54
|
||||
180hlxAAisLofFhr/PQvLcQ79T3t3V0tqGgz9x6QnPKfbPCgvb66tTNlny+ML0fY
|
||||
y1H9xXQO53QOxfN9cdXcf2EVbRQ2eT6ltmwekI3ZZuCaTguflNu/i11UV6UnDy3x
|
||||
dXOYQhky5QjtPbhJ0NxG5XDKoRFoUPR/rgXsiNG5O0sk3M5H9ldpsj8af5W/6LCL
|
||||
gCTNM8fF0TVbd4MF9TiIECFBng2CrxhHwpl2gPHHxab1zxLRCF6t1lZvL6To0hmC
|
||||
e/Tqre+42PhRSCtXuwhK22r0rvreVUaiglYn8udjOJHwNVKdzLnTZ1OBAFeIq00U
|
||||
9uuroyaF841pq9+8PitwUORurv0lsnHUbfbi/+ou0HzMiaXzz+MPdOXt8nUuyScs
|
||||
oKOi8ExvpWJ7vn6klkvQtMK/Gakzd4YOxO/nk9K8BJgVN3qrODwHYSORk8RrdITS
|
||||
tkjiEJiIoklddiwCf3NUzlxiIYWbiqKqNbY+Pxh4B+OpVDnvRmpkJHgoSuVoCS8b
|
||||
coaOTIgqDpnIClHIj7ogxO+ureRjIIkGNNh6wVhlHDlgm1GzxNUOklMrzDkYMD01
|
||||
eTYxrbicw7ZVwqhFtR8olODKT9QAqXUJOkGHS9IA6FJctatkUkIOG1DSI52AZV1r
|
||||
PYzgdKtTxS60EkN8Igl6VMTkaC05anLygCTyOvGaV7sqVKmzHY8=
|
||||
=8OR5
|
||||
iQIzBAEBCgAdFiEEy6ssEDLG/1B4BJ7A+mHVDBLK034FAmgnhqIACgkQ+mHVDBLK
|
||||
0373kg/+Iy7pfWoAa465XtWUyf87KcjmJ1hE4OmfMc9sA7kdKNYPfmttxfp8jCU5
|
||||
gRc8RnQ5K+h4GWGl9nd6bFOT3oZSBH9WnH33gcnStHubwvHzhY05ZmlKjXKKTJmG
|
||||
rcQ8+vVv/e8KfMatydPuXQmAzbJ0pr2bGnicT8fs/W35hgcyygDZvDqJo3m+q4H7
|
||||
uu4C3LnaixAf7kCZefdxReYvFBNz9Qovws3+LqVFPxWgqo4zYt1PcI24UhCpL2YJ
|
||||
6XJySW7e0rR64bwCZR/owy504aUC64wr8kM19MMJAoB0R4zciJ0YyY8xLfRMI3Tr
|
||||
JTPetuTN7ncKJ2kZJ5L+KbeYnr4+CA5ZYmjyAM5NSJ3fTXuEu477H+1XovcJtP1s
|
||||
IZS10UWX452QEBXE5nWAludmiw4BenyR2Lccg2QfER8jbiZf3U3do43aGoI5U8rg
|
||||
qf1kQ/dMcIX6oSrbxMKymdsuf6e8UCSys3KNwb44UdSBiihgYFtiMfGtQ6Ixsvky
|
||||
TB+EwweUY6LtBuep1fh+M1tHgo9qCxUH79duor0JRDgQ/VLeO6e1RCptc7EHnQZQ
|
||||
mZK7YjVtHYWzyOZ4KsWuLYBSAMvKDhrTxI8cxp816NNGUfj1jmBQR/5vn6d7nMwX
|
||||
PmWrQV9O2e899Mv30VVR9XDf6tJoT+BPvS4Kc5hw/LxjaBbAxXo=
|
||||
=Zprh
|
||||
-----END PGP SIGNATURE-----
|
||||
|
@@ -33,7 +33,7 @@ Hash: SHA512
|
||||
"name":"gcc",
|
||||
"version":"10.2.1"
|
||||
},
|
||||
"namespace":"builtin.mock",
|
||||
"namespace":"builtin_mock",
|
||||
"parameters":{
|
||||
"build_system":"generic",
|
||||
"cflags":[],
|
||||
@@ -56,17 +56,17 @@ Hash: SHA512
|
||||
}
|
||||
-----BEGIN PGP SIGNATURE-----
|
||||
|
||||
iQIzBAEBCgAdFiEE6J1JcfAJex56PrVzcbSEgC54180FAmfHlp8ACgkQcbSEgC54
|
||||
182ezg/7Bkil1mY6d4recJMkFhpBzzDs8aMD+WQOBPoy/bWHIGsPb1DyOOW7lTLa
|
||||
QC9jh9Rq02oMeX0LWvNg7k6iMTayWcrPzJwk1rgh3pg/ySgCTZ576/aP/UOZwA8h
|
||||
HT/3RzsDFlq7Wkh4yYaDgSEDVc5PgUevb1p2f126Z9HMFjG8siEWmuZQOcy4I9JG
|
||||
osQFtwWTLmx96sBMzweZTu2i3iGTPNz4Ae1hu+v5clmSFg43eW7EWChEVoob+3hb
|
||||
hLRxajZEPsIho4yR5yynoxduXeXrLLP7GH6XGnYt7Z2GJR0UamIrPfxYuWBK76V1
|
||||
03Ie2rRXwOKfsjDWw9Z8ziTVu25G0aZ274DX6eQyaWKfvzz69cBXO0fgw1lU8B9S
|
||||
K0j9k/xtnDCrIkPSh4QGQpFRlbzxkj20E+EnwgDCGIlK1rBzo2V5na4YNj+SbC91
|
||||
0BmWrj6dRkQZUMJHeb95kBMfFpKG5B6u7HQxZtIwHFAfF0nypbiB7xmdy/gAmUao
|
||||
ej3Cu34DvWtLVeSh7lRimeEc44WyBDk2YSPqYleAwYMZBn4WSozUS/KVLU2T/AhZ
|
||||
VlLaEBaFrVngmsw5PCdck0XRSNSAN9HUgPItpOzYig20NeT1/69wIlUZVNpLEYGT
|
||||
yvZsmqHFnkunAs6av3XmGl0i8rSA6DujunpNXML6hUciFEK5wg4=
|
||||
=Aq8h
|
||||
iQIzBAEBCgAdFiEEy6ssEDLG/1B4BJ7A+mHVDBLK034FAmgnhqIACgkQ+mHVDBLK
|
||||
036q+Q//XkOoRoZ5g3uyQTXTV3w6YCUezkvGv+WRV4oZfj0CElKf4KoW5bhdtWEM
|
||||
EBRC4UuFturk7m1KrgztKsEFq7vx0TxvbWjj5R64swrwczKkD7i5xjMhWZn0nrpk
|
||||
kzeKJw8zCr+o+qAHUoqTZAAf1GaMOwCKN8rZ5zrulbkrugPY783UKJtfyJc8+BPT
|
||||
dixOerTC5cvzFNHENIKXMTh7Pbww2jdnFCn2eGA1kmyJGkRFhKKQ9kerlUcfOdQB
|
||||
w51jMfgZRoG/hvSnrlrYHJQx1hpUiBV5eyEcLHnlbiJj7cNTvqcrt2nHpy/1Co1H
|
||||
5uiQou5I8ETTvTQrtWNgCtUBg1ZqaKZw8tanSY4cHXoeP5s4uQl1yTEGCEDDFB9y
|
||||
E/yO9xTfak3Avv1h6FZ2Lw+ipVLnlurtpo/jGmr4UgoKV4MZ1hFSseIEWQVyXJ+4
|
||||
kP2gZ/LZF84eYqRKANYGWbKp/fKJQgnn/nhKgySfx4dKHJFRpVNgiGzNYyYwOtOC
|
||||
BWrLIqgvETl+MZZPMPwt8T7ZCYIR5fzQ1itGM3ffmsh9DIvRyu32DRWBcqgiDE7o
|
||||
866L+C6Kk2RyCS8dB3Ep4LW7kO42k0Rq6cvkO8wV+CjbTF/i8OQEclDMxr+ruoN0
|
||||
IKEp2thRZA39iDHGAIPyCsryrZhpEJ+uOfMykWKc0j957CpXLck=
|
||||
=Qmpp
|
||||
-----END PGP SIGNATURE-----
|
||||
|
@@ -1,5 +1,5 @@
|
||||
-----BEGIN PGP SIGNED MESSAGE-----
|
||||
Hash: SHA256
|
||||
Hash: SHA512
|
||||
|
||||
{
|
||||
"spec":{
|
||||
@@ -57,7 +57,7 @@ Hash: SHA256
|
||||
"cpupart":"0x022"
|
||||
}
|
||||
},
|
||||
"namespace":"builtin.mock",
|
||||
"namespace":"builtin_mock",
|
||||
"parameters":{
|
||||
"build_system":"generic",
|
||||
"cflags":[],
|
||||
@@ -169,7 +169,7 @@ Hash: SHA256
|
||||
"cpupart":"0x022"
|
||||
}
|
||||
},
|
||||
"namespace":"builtin.mock",
|
||||
"namespace":"builtin_mock",
|
||||
"parameters":{
|
||||
"build_system":"generic",
|
||||
"cflags":[],
|
||||
@@ -193,7 +193,7 @@ Hash: SHA256
|
||||
"platform_os":"debian6",
|
||||
"target":"aarch64"
|
||||
},
|
||||
"namespace":"builtin.mock",
|
||||
"namespace":"builtin_mock",
|
||||
"parameters":{
|
||||
"build_system":"generic",
|
||||
"languages":[
|
||||
@@ -275,7 +275,7 @@ Hash: SHA256
|
||||
"cpupart":"0x022"
|
||||
}
|
||||
},
|
||||
"namespace":"builtin.mock",
|
||||
"namespace":"builtin_mock",
|
||||
"parameters":{
|
||||
"build_system":"generic",
|
||||
"cflags":[],
|
||||
@@ -353,7 +353,7 @@ Hash: SHA256
|
||||
"cpupart":"0x022"
|
||||
}
|
||||
},
|
||||
"namespace":"builtin.mock",
|
||||
"namespace":"builtin_mock",
|
||||
"parameters":{
|
||||
"build_system":"generic",
|
||||
"cflags":[],
|
||||
@@ -413,17 +413,17 @@ Hash: SHA256
|
||||
}
|
||||
-----BEGIN PGP SIGNATURE-----
|
||||
|
||||
iQIzBAEBCAAdFiEEqYoEuILhnYX9Nu4GlWXYCwVckv8FAmf23+QACgkQlWXYCwVc
|
||||
kv9Xlg//d7uWhVbHjujSXRpoN3hzH5sUvvTSZ9xzvXGAXCoAu2oEGg4hxZPIFQJ3
|
||||
pZzKysZMfeFg+UKwDzex5TlKZ3JtKgCTKYl64zZfUl2EQgo/d/Fjz5mSFHW/6sa1
|
||||
1uTe3+sVt+HlijN72t2412Qbp+/uGvU+KBvXPA7kgkp88Kd/PL9xe3jlT9ytH5Nw
|
||||
3LIghe++JiepjFAKXTfIA04EjLb8c50AAxsK5Xx37HOOVHHQ8L9anFnOVYM+DxAz
|
||||
gn4dBYUQ9Uu5k5uEu5CwtxsED2/Yar7YWIepEnyp6z4zQVbwjO4/w0vZ3wSJ9c4P
|
||||
UhZs8V2akuqIWyzlQuBOjywnEQc/nw9v0py+Dr/Qr3U4XWh/LARWABMxa4IqXMOK
|
||||
aVmd6weVjV4U929gaOT/FCtZPfaFNRbk97YP8yAxuLhSdiGS0Mp16Ygz21fVWB7C
|
||||
UjkGGsKK1cdiJQ0m1CffmydU/nbDjSuw4WZIoIgDzvN7SFm7YBtE+xY+RUPsHU22
|
||||
QMAXojF5abwn48HJeP47MYdfR7+nUJq6XJiJ7/80a7Ciy8SAVxinQWqvigf/hmTf
|
||||
kAiQaqOVSlRBJ2yry5fYBKHSIRvghCqS4t4es8o13R7n2wz68VqKu0JkNlT3Ijjc
|
||||
QjJYtI+844PCDNetPVV8iNWF6upnTJnPHcFmKAEO1663hOc3Dh8=
|
||||
=3fA5
|
||||
iQIzBAEBCgAdFiEEy6ssEDLG/1B4BJ7A+mHVDBLK034FAmgnhqIACgkQ+mHVDBLK
|
||||
035oXBAAj12qztxIYhTbNRq0jpk7/ZfCLRDz/XyqzKx2JbS+p3DfZruVZV/OMZ9I
|
||||
Hlj9GYxQEwLGVsEMXoZDWtUytcte3m6sCG6H8fZGKw6IWQ6eiDR5i7TJWSuPvWGU
|
||||
NMH57kvSJlICLP9x6NWjQeyLAI4I3kASk+Ei/WHAGqIiP9CR1O5IXheMusPDAEjd
|
||||
2IR7khPvJTwpD6rzMHPou9BWk0Jqefb9qHhaJnc0Ga1D5HCS2VdGltViQ0XCX7/7
|
||||
nkWV9ad9NOvbO9oQYIW1jRY8D9Iw9vp2d77Dv5eUzI8or5c5x0VFAHpQL0FUxIR9
|
||||
LpHWUohDiAp3M4kmZqLBPl1Qf2jAXFXiSmcrLhKD5eWhdiwn3Bkhs2JiSiJpHt6K
|
||||
Sa970evIFcGw6sUBGznsuFxmXFfp84LYvzIVjacuzkm9WDvbEE/5pa2b5Pxr7BmH
|
||||
d2xDmAYmZVOso6INf3ZEXOyMBPWyGyq9Hy/8Nyg/+7w2d4ICEG/z/N13VsTqRoXc
|
||||
rb8I0xDE9iCXCelQJYlJcJ2UMZk9E76zd3Bd2WcgCTrrnHsg0fBjmNeyPJcBN8hA
|
||||
am5Lq/Cxqm2Jo2qnjoVmCt8/TBkvT2w8PTpR5uTEbLDl2ghyzxyBkX7a8ldKx55f
|
||||
aL8/OxN+u0pyISTDs5AoZ1YbhgDMiBiZV8ZDIB8PzU8pE78De3Q=
|
||||
=YbRr
|
||||
-----END PGP SIGNATURE-----
|
||||
|
@@ -1,5 +1,5 @@
|
||||
-----BEGIN PGP SIGNED MESSAGE-----
|
||||
Hash: SHA256
|
||||
Hash: SHA512
|
||||
|
||||
{
|
||||
"spec":{
|
||||
@@ -57,7 +57,7 @@ Hash: SHA256
|
||||
"cpupart":"0x022"
|
||||
}
|
||||
},
|
||||
"namespace":"builtin.mock",
|
||||
"namespace":"builtin_mock",
|
||||
"parameters":{
|
||||
"build_system":"generic",
|
||||
"cflags":[],
|
||||
@@ -157,7 +157,7 @@ Hash: SHA256
|
||||
"cpupart":"0x022"
|
||||
}
|
||||
},
|
||||
"namespace":"builtin.mock",
|
||||
"namespace":"builtin_mock",
|
||||
"parameters":{
|
||||
"build_system":"generic",
|
||||
"cflags":[],
|
||||
@@ -181,7 +181,7 @@ Hash: SHA256
|
||||
"platform_os":"debian6",
|
||||
"target":"aarch64"
|
||||
},
|
||||
"namespace":"builtin.mock",
|
||||
"namespace":"builtin_mock",
|
||||
"parameters":{
|
||||
"build_system":"generic",
|
||||
"languages":[
|
||||
@@ -263,7 +263,7 @@ Hash: SHA256
|
||||
"cpupart":"0x022"
|
||||
}
|
||||
},
|
||||
"namespace":"builtin.mock",
|
||||
"namespace":"builtin_mock",
|
||||
"parameters":{
|
||||
"build_system":"generic",
|
||||
"cflags":[],
|
||||
@@ -301,17 +301,17 @@ Hash: SHA256
|
||||
}
|
||||
-----BEGIN PGP SIGNATURE-----
|
||||
|
||||
iQIzBAEBCAAdFiEEqYoEuILhnYX9Nu4GlWXYCwVckv8FAmf23+QACgkQlWXYCwVc
|
||||
kv/zSg/+NrS4JjT9TFSFR/q2vaN9aL7fSTunxp+M8eAzTmg0sgHc/D6ov2PMpUF7
|
||||
1E2mnZ2gL5a5dHtsSCf30ILFzQoD+m+I9yOwcJopcbEjr8pcnXBFe6TT8lkxlXtI
|
||||
EHNsYGMUHFbFvc+hFdWatQJicdDaIbdyEMGAC7Kobs/4KpdBF5VWV+sIrzD5+XzO
|
||||
ACiKRjBmcaJpa950nuEaFzBITgq1aDtZ0EEZdXYvjRnzj9Bm6gbqmWzlllW1wf4r
|
||||
5hSMTpAsRED4TxL433nuf0nKIvTD5Mywzs88kiLCtEABfDy1qccyBAnjyNypFF6B
|
||||
fPqSDnr33s+JQ35t7RcHKfrgowk69UablE25YOUrQP6LtH4QzLBLj4/Z0zuz33hO
|
||||
v+YYe51DgixsMQ2WCKWEO6sNcrcrLBJMFVwUP2FyTTdW3jCYRlFiTYLSfoDhTRJ/
|
||||
4o7f2eEp3sVoOe12jKI6dw/P+c70dl8K4+1ICcnZkwsb0pd0vt2z4J2kPs2+1/0g
|
||||
vpywJO1HL5Zy7/ZRlmeeSMHYEDX2eKhm7QRFbxw1IEbg3stQCA7a425JWztyJ05K
|
||||
sfhFQgPt7F/xanJVFYk/hdza+3+5pFr1K/ARcLFBdLBKGxAXTMMR+NkMp3J5NiOo
|
||||
SMZJ3jG6xA2ntvSkyx/GFawD0FpnlgEByU3E+R/WiQA4VojLpvo=
|
||||
=kfWI
|
||||
iQIzBAEBCgAdFiEEy6ssEDLG/1B4BJ7A+mHVDBLK034FAmgnhqIACgkQ+mHVDBLK
|
||||
0356nQ//aVMUZU8Ly8/b1H4nvKM8Vyd275aFK64rvO89mERDNiYIOKk1pmYSMldU
|
||||
+ltx2iIfVTUCEWYYJb/4UXWmw6SLAXIZ5mtrkALDAeDSih4wqIdevM3yii7pn8Oh
|
||||
/OEyDX8N5k05pnxFLYqR/2gA6vvdxHFd9/h4/zy2Z5w6m1hXb5jtS2ECyYN72nYN
|
||||
8QnnkXWZYturOhb4GawWY1l/rHIBqAseCQXSGR6UyrHTEGLUgT0+VQZwgxLNM4uG
|
||||
xj4xCDTgKiOesa5+3WE8Ug2wDIm48Prvg4qFmNrofguRNiIsNrl5k7wRiJWdfkjc
|
||||
gzs9URYddoCTRR2wpN0CaAQ268UlwZUCjPSrxgCNeqRi4Ob9Q4n37TKXNcVw46Ud
|
||||
MXRezAf+wyPGkq4vudh7cu11mHUcTeev82GM5bYQa6dSna3WvPpie/rx0TZYRkKE
|
||||
hesDW/41ZtFDANfXa7r011ngS5zZwak3zUaoqOdLNhN/xL4TFsZ19uSUdSZHAgSk
|
||||
9Sr3xodwV2D5H6gDuOtAo1vRod1Fx+yoi3BubX0sI5QuFgvtJrHVZmVj2bnGMBKI
|
||||
gR17q1ZHOmp3yPhVE9ZsiLKn9r3yIsfVhoTB6mXOnvq2q1fBxyrEpIGzIUmWfuTm
|
||||
vLn4nXt7PD78msiG/GZt6fShYBAwVfuvG+M1AQrsyGGoW2Bty7M=
|
||||
=hLvB
|
||||
-----END PGP SIGNATURE-----
|
||||
|
@@ -1,5 +1,5 @@
|
||||
-----BEGIN PGP SIGNED MESSAGE-----
|
||||
Hash: SHA256
|
||||
Hash: SHA512
|
||||
|
||||
{
|
||||
"spec":{
|
||||
@@ -57,7 +57,7 @@ Hash: SHA256
|
||||
"cpupart":"0x022"
|
||||
}
|
||||
},
|
||||
"namespace":"builtin.mock",
|
||||
"namespace":"builtin_mock",
|
||||
"parameters":{
|
||||
"build_system":"generic",
|
||||
"cflags":[],
|
||||
@@ -83,17 +83,17 @@ Hash: SHA256
|
||||
}
|
||||
-----BEGIN PGP SIGNATURE-----
|
||||
|
||||
iQIzBAEBCAAdFiEEqYoEuILhnYX9Nu4GlWXYCwVckv8FAmf23+QACgkQlWXYCwVc
|
||||
kv/T8BAAhK/v7CP6lMIKILj35nEi+Gftjs7B7f6qvb4QNtqcGHum6z9t3JxkOOrd
|
||||
+q+Wd329kLYAFs/y9eaGe5X7wY1U7/f863i3XrxHbtmrnMci61D8qMjA1xnBGC+5
|
||||
yd746aVeV/VRbJxTeB9kGcKPMcIQYcearlDMgj5fKfpCKM8a+VyJfw7qHNUyrTnu
|
||||
d6LSGsEey6tGkJecgnJZTNSwryO3BZbg/4EviivMXm38AKGZrSib06qjkoHrPRvB
|
||||
8ftGSGlK4YmFs5/YjKFL7QzuNJeqPNJt4mD64tsk21urOfbQJe5AmdMLPGY0PbW/
|
||||
w++06c8lsd/6FmzUwlnTBUa39lKJjhkhoK7KFGVqZROcXZfhwAyqPZt7ReA5FDMV
|
||||
l5X7sytjQuSFaQPGi5g1xXQGEI394T2I55p5T5/RuQ2PXcFxxSOmIcEcD8o6Z7+x
|
||||
XWLq44KUWQyQP/StjaVhIz9YPogeBBJllA9hN+GzVrr2i+Esu1QO5uDgVuJP7pTA
|
||||
9wwCLV/t0hf2TZcpU2fwEu+DMniaHm6haVwqiu6QGkbkMBx49zkV9b5i9L441GoC
|
||||
Q86R2Gs9O0+QzHuN6egbQ0xKm/lfU8dmJSzV0snXawAeQ/vgCpdinx40EMc7Nz03
|
||||
rgZ3j88c/ADvCb1DVKmu1Phf6U7WqG6/AvB9tYl4Zl30VX7ETaw=
|
||||
=ifvQ
|
||||
iQIzBAEBCgAdFiEEy6ssEDLG/1B4BJ7A+mHVDBLK034FAmgnhqIACgkQ+mHVDBLK
|
||||
037BIQ//U30gx1qTt5cQs+I6fwqQSase8DT7Hi8VdYxMuBTVbEpnPScNpcH03ITC
|
||||
KWVbXvEAPBdoWEfAHpuOJr2pm013dYXaWp1k0G6pLSvnR17LEDTJs0ixAurH4vDr
|
||||
4VXPerPR57sMi0WYomi1+dJhvA3S85+m6KBPLhXgi9Y28leDrFpjBwxVoIN7yUP2
|
||||
tenMI9jAoGh/hts1pIPbALmKbeGUKC2MPu9MF0CtkbbE1VOkeJ6jkZLGki7AAYZ0
|
||||
TSWAeWDk6EG90TZ6ls2anUPI1mNc7JdPqq8L0+jWAwLJi3i/JiDAGUM99hpu9cCF
|
||||
NvZn+eQFOKrE0WG1KsF4vQilOAuE3P+QLomcfZdf2UNi73XPWIF5j46r50oPmXZE
|
||||
+mVUyw7CUbHMZlXvWml0pdugEER1Kyc2nLZdLZYAT92AsPbAcDBQKsm1xf66lOB+
|
||||
FPPLc97oybcFFldrjmUJAASJBeAihZG1aDm6dYBxtynMzzRGdq2+R1chHMOQ5Wej
|
||||
8ZvyRv+TOPUTtRkAxrUpq6wA+BUoq+OBDltOs9mXUIcV3rpOq5nTjKZ5FLMtGaDw
|
||||
No0E5gwceDDLeshT9nAHaqcmSY1LK+/5+aDxOFRm4yRTI+GLJzg8FZCJbJRLstrD
|
||||
Ts4zKdcb0kukKdE9raqWw7xuhbjz2ORiEicZzckzvB1Lx38bG2s=
|
||||
=T5l5
|
||||
-----END PGP SIGNATURE-----
|
||||
|
@@ -1,5 +1,5 @@
|
||||
-----BEGIN PGP SIGNED MESSAGE-----
|
||||
Hash: SHA256
|
||||
Hash: SHA512
|
||||
|
||||
{
|
||||
"spec":{
|
||||
@@ -57,7 +57,7 @@ Hash: SHA256
|
||||
"cpupart":"0x022"
|
||||
}
|
||||
},
|
||||
"namespace":"builtin.mock",
|
||||
"namespace":"builtin_mock",
|
||||
"parameters":{
|
||||
"build_system":"generic",
|
||||
"cflags":[],
|
||||
@@ -93,7 +93,7 @@ Hash: SHA256
|
||||
"platform_os":"debian6",
|
||||
"target":"aarch64"
|
||||
},
|
||||
"namespace":"builtin.mock",
|
||||
"namespace":"builtin_mock",
|
||||
"parameters":{
|
||||
"build_system":"generic",
|
||||
"languages":[
|
||||
@@ -135,17 +135,17 @@ Hash: SHA256
|
||||
}
|
||||
-----BEGIN PGP SIGNATURE-----
|
||||
|
||||
iQIzBAEBCAAdFiEEqYoEuILhnYX9Nu4GlWXYCwVckv8FAmf23+QACgkQlWXYCwVc
|
||||
kv+MsRAAsaQjZbB9iW/Lq9b87H/E5Zmv6RrClvpjSnwvhLR4nhPL3p0G70k6tI/b
|
||||
NEdXctDyvBOJOEoLaEBrCODl/3GjV8B9Gj7OhT/BIKQjlOfJqVdwIrnHgav5ri+Q
|
||||
UUXLtejhJiUNoxeILI/xZx2CoKT9q/3EpQ5ysqdybJmYJCf/hv+lXEhnwUIv8vV/
|
||||
xdRYY//rfeMowCNIZtFPjSejMywXJfFKjl7h5dN5kwM63D6z/sh4zW7tqHq4kk+A
|
||||
2m0WcorVg93wAm+YoJaQJVx8bYeMGfV/TjmY/cSouCt8PM4Vi93vwieZCkzEpXbM
|
||||
BkVN4X3PTMZSOf0WTkEbnQD5v090/DoQPZyBrcDoJ/HmWDiz5Is2wUI0mLVkbg2L
|
||||
+rKNC3ZajJhsWElMGNNtZRLmGeTIe8hT+LNAejo221vrOJbnUmpIjKxVjStDbXmW
|
||||
nulgyEPSTfsJaXgbXmeJ8LOk0tWpBAGC16VzgXrPxoGD2XKxoiPCGLNrF/l1wyl+
|
||||
n+nw3TchNFrofpPrqJzT/vS71B6KDb0PVSTQZfM9+FahrQ+YbsIkzDAuxVZb5t3q
|
||||
HUME95RgoIBbccUGxAPwkaNme2OLaLzsJZ/Xhl5I8T1fraLYapsKNjQ5+CSKO8+t
|
||||
MlJYgSHuazWSetRbZ2H7g7QJWqeHUAWi9i1szpNDYxTFSs8wgDY=
|
||||
=edPy
|
||||
iQIzBAEBCgAdFiEEy6ssEDLG/1B4BJ7A+mHVDBLK034FAmgnhqIACgkQ+mHVDBLK
|
||||
037zHBAAsqy4wItctMqauuna+JjxT1HM7YJElXzqjOWmxyuAzUzjXlhR2DBd/2TI
|
||||
ZEN2q3Z3XY9sCjhZ/4c9wDfMNYLUBLMHuenyV3fOqsfIVL8NprrkGc5mOiJ8HbRk
|
||||
u00qXWogsYSEmbGrlfDKf4HmZtgPNs82+Li1MD5udDUzyApuVbObJumSRh6/1QHm
|
||||
BcQZgMlSCd8xsTxJudXKAnfpemqE41LF0znuU0x5Hj/hU1A3CELynQrLEYnJpzpR
|
||||
ja2l341cBQKNy86kX1/eHQtBJverjFoD3Nx4per8/qUc+xTH0ejMuseyd9P3RLnd
|
||||
WShY8Uk72f1OLGzq5RvayP1M/dBWedajKz5gYOD19pCuFEdQm1LkZhxRWJ35PYMV
|
||||
CqzY/uJgs33zyYkNJKO8CKG5j7Y8zOuZ3YFN8DKmoWa+lC4gFIsXm42BttqiQ5+x
|
||||
Q65YkX/DdPYO6dcUety1j3NuNr70W6PsLyqKBny1WOzKCx25nmzftS0OA76F6UZA
|
||||
hDneqltGrYEQTowU5I7V14f3SMeO8xje3BcqhOAn956/JJObd5VbwqcHwcslwEJA
|
||||
tL3361qbpkc7xURnhciV1eL3RYR9Q4xDnvI1i/k8J8E8W373TviK3r2MG/oKZ6N9
|
||||
n+ehBZhSIT+QUgqylATekoMQfohNVbDQEsQhj96Ky1CC2Iqo1/c=
|
||||
=UIyv
|
||||
-----END PGP SIGNATURE-----
|
||||
|
File diff suppressed because one or more lines are too long
@@ -1 +1 @@
|
||||
57cad2589fae55cda3c35cadf4286d2e7702f90a708da80d70a76213fc45a688
|
||||
fc129b8fab649ab4c5623c874c73bd998a76fd30d2218b9d99340d045c1ec759
|
||||
|
@@ -30,7 +30,7 @@
|
||||
"name":"gcc",
|
||||
"version":"10.2.1"
|
||||
},
|
||||
"namespace":"builtin.mock",
|
||||
"namespace":"builtin_mock",
|
||||
"parameters":{
|
||||
"build_system":"generic",
|
||||
"cflags":[],
|
||||
@@ -82,7 +82,7 @@
|
||||
"name":"gcc",
|
||||
"version":"10.2.1"
|
||||
},
|
||||
"namespace":"builtin.mock",
|
||||
"namespace":"builtin_mock",
|
||||
"parameters":{
|
||||
"build_system":"generic",
|
||||
"cflags":[],
|
||||
|
@@ -30,7 +30,7 @@
|
||||
"name":"gcc",
|
||||
"version":"10.2.1"
|
||||
},
|
||||
"namespace":"builtin.mock",
|
||||
"namespace":"builtin_mock",
|
||||
"parameters":{
|
||||
"build_system":"generic",
|
||||
"cflags":[],
|
||||
|
@@ -54,7 +54,7 @@
|
||||
"cpupart":"0x022"
|
||||
}
|
||||
},
|
||||
"namespace":"builtin.mock",
|
||||
"namespace":"builtin_mock",
|
||||
"parameters":{
|
||||
"build_system":"generic",
|
||||
"cflags":[],
|
||||
@@ -166,7 +166,7 @@
|
||||
"cpupart":"0x022"
|
||||
}
|
||||
},
|
||||
"namespace":"builtin.mock",
|
||||
"namespace":"builtin_mock",
|
||||
"parameters":{
|
||||
"build_system":"generic",
|
||||
"cflags":[],
|
||||
@@ -190,7 +190,7 @@
|
||||
"platform_os":"debian6",
|
||||
"target":"aarch64"
|
||||
},
|
||||
"namespace":"builtin.mock",
|
||||
"namespace":"builtin_mock",
|
||||
"parameters":{
|
||||
"build_system":"generic",
|
||||
"languages":[
|
||||
@@ -272,7 +272,7 @@
|
||||
"cpupart":"0x022"
|
||||
}
|
||||
},
|
||||
"namespace":"builtin.mock",
|
||||
"namespace":"builtin_mock",
|
||||
"parameters":{
|
||||
"build_system":"generic",
|
||||
"cflags":[],
|
||||
@@ -350,7 +350,7 @@
|
||||
"cpupart":"0x022"
|
||||
}
|
||||
},
|
||||
"namespace":"builtin.mock",
|
||||
"namespace":"builtin_mock",
|
||||
"parameters":{
|
||||
"build_system":"generic",
|
||||
"cflags":[],
|
||||
|
@@ -54,7 +54,7 @@
|
||||
"cpupart":"0x022"
|
||||
}
|
||||
},
|
||||
"namespace":"builtin.mock",
|
||||
"namespace":"builtin_mock",
|
||||
"parameters":{
|
||||
"build_system":"generic",
|
||||
"cflags":[],
|
||||
@@ -154,7 +154,7 @@
|
||||
"cpupart":"0x022"
|
||||
}
|
||||
},
|
||||
"namespace":"builtin.mock",
|
||||
"namespace":"builtin_mock",
|
||||
"parameters":{
|
||||
"build_system":"generic",
|
||||
"cflags":[],
|
||||
@@ -178,7 +178,7 @@
|
||||
"platform_os":"debian6",
|
||||
"target":"aarch64"
|
||||
},
|
||||
"namespace":"builtin.mock",
|
||||
"namespace":"builtin_mock",
|
||||
"parameters":{
|
||||
"build_system":"generic",
|
||||
"languages":[
|
||||
@@ -260,7 +260,7 @@
|
||||
"cpupart":"0x022"
|
||||
}
|
||||
},
|
||||
"namespace":"builtin.mock",
|
||||
"namespace":"builtin_mock",
|
||||
"parameters":{
|
||||
"build_system":"generic",
|
||||
"cflags":[],
|
||||
|
@@ -54,7 +54,7 @@
|
||||
"cpupart":"0x022"
|
||||
}
|
||||
},
|
||||
"namespace":"builtin.mock",
|
||||
"namespace":"builtin_mock",
|
||||
"parameters":{
|
||||
"build_system":"generic",
|
||||
"cflags":[],
|
||||
|
@@ -54,7 +54,7 @@
|
||||
"cpupart":"0x022"
|
||||
}
|
||||
},
|
||||
"namespace":"builtin.mock",
|
||||
"namespace":"builtin_mock",
|
||||
"parameters":{
|
||||
"build_system":"generic",
|
||||
"cflags":[],
|
||||
@@ -90,7 +90,7 @@
|
||||
"platform_os":"debian6",
|
||||
"target":"aarch64"
|
||||
},
|
||||
"namespace":"builtin.mock",
|
||||
"namespace":"builtin_mock",
|
||||
"parameters":{
|
||||
"build_system":"generic",
|
||||
"languages":[
|
||||
|
@@ -31,7 +31,7 @@
|
||||
"name": "gcc",
|
||||
"version": "4.5.0"
|
||||
},
|
||||
"namespace": "builtin.mock",
|
||||
"namespace": "builtin_mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
|
@@ -10,7 +10,6 @@
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.version
|
||||
from spack.test.conftest import create_test_repo
|
||||
|
||||
|
||||
def test_false_directives_do_not_exist(mock_packages):
|
||||
@@ -159,66 +158,19 @@ def test_version_type_validation():
|
||||
spack.directives._execute_version(package(name="python"), {})
|
||||
|
||||
|
||||
_pkgx = (
|
||||
"x",
|
||||
"""\
|
||||
from spack.package import *
|
||||
|
||||
class X(Package):
|
||||
version("1.3")
|
||||
version("1.2")
|
||||
version("1.1")
|
||||
version("1.0")
|
||||
|
||||
variant("foo", default=False)
|
||||
|
||||
redistribute(binary=False, when="@1.1")
|
||||
redistribute(binary=False, when="@1.0:1.2+foo")
|
||||
redistribute(source=False, when="@1.0:1.2")
|
||||
""",
|
||||
)
|
||||
|
||||
|
||||
_pkgy = (
|
||||
"y",
|
||||
"""\
|
||||
from spack.package import *
|
||||
|
||||
class Y(Package):
|
||||
version("2.1")
|
||||
version("2.0")
|
||||
|
||||
variant("bar", default=False)
|
||||
|
||||
redistribute(binary=False, source=False)
|
||||
""",
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def _create_test_repo(tmpdir, mutable_config):
|
||||
yield create_test_repo(tmpdir, [_pkgx, _pkgy])
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def test_repo(_create_test_repo, monkeypatch, mock_stage):
|
||||
with spack.repo.use_repositories(_create_test_repo) as mock_repo_path:
|
||||
yield mock_repo_path
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"spec_str,distribute_src,distribute_bin",
|
||||
[
|
||||
("x@1.1~foo", False, False),
|
||||
("x@1.2+foo", False, False),
|
||||
("x@1.2~foo", False, True),
|
||||
("x@1.0~foo", False, True),
|
||||
("x@1.3+foo", True, True),
|
||||
("y@2.0", False, False),
|
||||
("y@2.1+bar", False, False),
|
||||
("redistribute-x@1.1~foo", False, False),
|
||||
("redistribute-x@1.2+foo", False, False),
|
||||
("redistribute-x@1.2~foo", False, True),
|
||||
("redistribute-x@1.0~foo", False, True),
|
||||
("redistribute-x@1.3+foo", True, True),
|
||||
("redistribute-y@2.0", False, False),
|
||||
("redistribute-y@2.1+bar", False, False),
|
||||
],
|
||||
)
|
||||
def test_redistribute_directive(test_repo, spec_str, distribute_src, distribute_bin):
|
||||
def test_redistribute_directive(mock_packages, spec_str, distribute_src, distribute_bin):
|
||||
spec = spack.spec.Spec(spec_str)
|
||||
assert spack.repo.PATH.get_pkg_class(spec.fullname).redistribute_source(spec) == distribute_src
|
||||
concretized_spec = spack.concretize.concretize_one(spec)
|
||||
|
@@ -1,7 +1,7 @@
|
||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Test class methods on Package objects.
|
||||
"""Test class methods on PackageBase objects.
|
||||
|
||||
This doesn't include methods on package *instances* (like do_patch(),
|
||||
etc.). Only methods like ``possible_dependencies()`` that deal with the
|
||||
@@ -20,13 +20,12 @@
|
||||
import spack.deptypes as dt
|
||||
import spack.error
|
||||
import spack.install_test
|
||||
import spack.package
|
||||
import spack.package_base
|
||||
import spack.spec
|
||||
import spack.store
|
||||
import spack.subprocess_context
|
||||
from spack.build_systems.generic import Package
|
||||
from spack.error import InstallError
|
||||
from spack.package_base import PackageBase
|
||||
from spack.solver.input_analysis import NoStaticAnalysis, StaticAnalysis
|
||||
|
||||
|
||||
@@ -241,7 +240,7 @@ def test_cache_extra_sources_fails(install_mockery):
|
||||
def test_package_exes_and_libs():
|
||||
with pytest.raises(spack.error.SpackError, match="defines both"):
|
||||
|
||||
class BadDetectablePackage(spack.package.Package):
|
||||
class BadDetectablePackage(PackageBase):
|
||||
executables = ["findme"]
|
||||
libraries = ["libFindMe.a"]
|
||||
|
||||
@@ -249,7 +248,7 @@ class BadDetectablePackage(spack.package.Package):
|
||||
def test_package_url_and_urls():
|
||||
UrlsPackage = type(
|
||||
"URLsPackage",
|
||||
(spack.package.Package,),
|
||||
(PackageBase,),
|
||||
{
|
||||
"__module__": "spack.pkg.builtin.urls_package",
|
||||
"url": "https://www.example.com/url-package-1.0.tgz",
|
||||
@@ -264,9 +263,7 @@ def test_package_url_and_urls():
|
||||
|
||||
def test_package_license():
|
||||
LicensedPackage = type(
|
||||
"LicensedPackage",
|
||||
(spack.package.Package,),
|
||||
{"__module__": "spack.pkg.builtin.licensed_package"},
|
||||
"LicensedPackage", (PackageBase,), {"__module__": "spack.pkg.builtin.licensed_package"}
|
||||
)
|
||||
|
||||
pkg = LicensedPackage(spack.spec.Spec("licensed-package"))
|
||||
@@ -276,7 +273,7 @@ def test_package_license():
|
||||
assert os.path.basename(pkg.global_license_file) == pkg.license_files[0]
|
||||
|
||||
|
||||
class BaseTestPackage(Package):
|
||||
class BaseTestPackage(PackageBase):
|
||||
extendees = None # currently a required attribute for is_extension()
|
||||
|
||||
|
||||
|
@@ -2,11 +2,13 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import importlib
|
||||
import os
|
||||
import pathlib
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
import spack.build_systems.cmake as cmake
|
||||
import spack.concretize
|
||||
import spack.directives
|
||||
import spack.error
|
||||
@@ -43,7 +45,7 @@ def test_nonexisting_package_filename(self):
|
||||
repo = spack.repo.from_path(mock_packages_path)
|
||||
filename = repo.filename_for_package_name("some-nonexisting-package")
|
||||
assert filename == os.path.join(
|
||||
mock_packages_path, "packages", "some-nonexisting-package", "package.py"
|
||||
mock_packages_path, "packages", "some_nonexisting_package", "package.py"
|
||||
)
|
||||
|
||||
def test_package_class_names(self):
|
||||
@@ -57,16 +59,19 @@ def test_package_class_names(self):
|
||||
assert "_None" == pkg_name_to_class_name("none") # reserved keyword
|
||||
assert "Finally" == pkg_name_to_class_name("finally") # `Finally` is not reserved
|
||||
|
||||
# Below tests target direct imports of spack packages from the
|
||||
# spack.pkg namespace
|
||||
def test_import_package(self):
|
||||
import spack.pkg.builtin.mock.mpich # type: ignore[import] # noqa: F401
|
||||
# Below tests target direct imports of spack packages from the spack.pkg namespace
|
||||
def test_import_package(self, tmp_path: pathlib.Path):
|
||||
root, _ = spack.repo.create_repo(str(tmp_path), "testing_repo", package_api=(1, 0))
|
||||
pkg_path = pathlib.Path(root) / "packages" / "mpich" / "package.py"
|
||||
pkg_path.parent.mkdir(parents=True)
|
||||
pkg_path.write_text("foo = 1")
|
||||
|
||||
def test_import_package_as(self):
|
||||
import spack.pkg.builtin.mock # noqa: F401
|
||||
import spack.pkg.builtin.mock as m # noqa: F401
|
||||
import spack.pkg.builtin.mock.mpich as mp # noqa: F401
|
||||
from spack.pkg.builtin import mock # noqa: F401
|
||||
with spack.repo.use_repositories(root):
|
||||
importlib.import_module("spack.pkg.testing_repo")
|
||||
assert importlib.import_module("spack.pkg.testing_repo.mpich").foo == 1
|
||||
|
||||
del sys.modules["spack.pkg.testing_repo"]
|
||||
del sys.modules["spack.pkg.testing_repo.mpich"]
|
||||
|
||||
def test_inheritance_of_directives(self):
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class("simple-inheritance")
|
||||
@@ -97,28 +102,11 @@ def test_inheritance_of_patches(self):
|
||||
# Will error if inheritor package cannot find inherited patch files
|
||||
_ = spack.concretize.concretize_one("patch-inheritance")
|
||||
|
||||
def test_import_class_from_package(self):
|
||||
from spack.pkg.builtin.mock.mpich import Mpich # noqa: F401
|
||||
|
||||
def test_import_module_from_package(self):
|
||||
from spack.pkg.builtin.mock import mpich # noqa: F401
|
||||
|
||||
def test_import_namespace_container_modules(self):
|
||||
import spack.pkg # noqa: F401
|
||||
import spack.pkg as p # noqa: F401
|
||||
import spack.pkg.builtin # noqa: F401
|
||||
import spack.pkg.builtin as b # noqa: F401
|
||||
import spack.pkg.builtin.mock # noqa: F401
|
||||
import spack.pkg.builtin.mock as m # noqa: F401
|
||||
from spack import pkg # noqa: F401
|
||||
from spack.pkg import builtin # noqa: F401
|
||||
from spack.pkg.builtin import mock # noqa: F401
|
||||
|
||||
|
||||
@pytest.mark.regression("2737")
|
||||
def test_urls_for_versions(mock_packages, config):
|
||||
"""Version directive without a 'url' argument should use default url."""
|
||||
for spec_str in ("url_override@0.9.0", "url_override@1.0.0"):
|
||||
for spec_str in ("url-override@0.9.0", "url-override@1.0.0"):
|
||||
s = spack.concretize.concretize_one(spec_str)
|
||||
url = s.package.url_for_version("0.9.0")
|
||||
assert url == "http://www.anothersite.org/uo-0.9.0.tgz"
|
||||
@@ -140,12 +128,13 @@ def test_url_for_version_with_no_urls(mock_packages, config):
|
||||
pkg_cls(spec).url_for_version("1.1")
|
||||
|
||||
|
||||
@pytest.mark.skip(reason="spack.build_systems moved out of spack/spack")
|
||||
def test_custom_cmake_prefix_path(mock_packages, config):
|
||||
spec = spack.concretize.concretize_one("depends-on-define-cmake-prefix-paths")
|
||||
|
||||
assert cmake.get_cmake_prefix_path(spec.package) == [
|
||||
spec["define-cmake-prefix-paths"].prefix.test
|
||||
]
|
||||
pass
|
||||
# spec = spack.concretize.concretize_one("depends-on-define-cmake-prefix-paths")
|
||||
# assert spack.build_systems.cmake.get_cmake_prefix_path(spec.package) == [
|
||||
# spec["define-cmake-prefix-paths"].prefix.test
|
||||
# ]
|
||||
|
||||
|
||||
def test_url_for_version_with_only_overrides(mock_packages, config):
|
||||
|
@@ -360,11 +360,11 @@ def get_patch(spec, ending):
|
||||
assert foo_patch.path == os.path.join(package_dir, "foo.patch")
|
||||
assert foo_patch.sha256 == foo_sha256
|
||||
|
||||
assert bar_patch.owner == "builtin.mock.patch-several-dependencies"
|
||||
assert bar_patch.owner == "builtin_mock.patch-several-dependencies"
|
||||
assert bar_patch.path == os.path.join(package_dir, "bar.patch")
|
||||
assert bar_patch.sha256 == bar_sha256
|
||||
|
||||
assert baz_patch.owner == "builtin.mock.patch-several-dependencies"
|
||||
assert baz_patch.owner == "builtin_mock.patch-several-dependencies"
|
||||
assert baz_patch.path == os.path.join(package_dir, "baz.patch")
|
||||
assert baz_patch.sha256 == baz_sha256
|
||||
|
||||
@@ -376,11 +376,11 @@ def get_patch(spec, ending):
|
||||
url1_patch = get_patch(fake, "urlpatch.patch")
|
||||
url2_patch = get_patch(fake, "urlpatch2.patch.gz")
|
||||
|
||||
assert url1_patch.owner == "builtin.mock.patch-several-dependencies"
|
||||
assert url1_patch.owner == "builtin_mock.patch-several-dependencies"
|
||||
assert url1_patch.url == "http://example.com/urlpatch.patch"
|
||||
assert url1_patch.sha256 == url1_sha256
|
||||
|
||||
assert url2_patch.owner == "builtin.mock.patch-several-dependencies"
|
||||
assert url2_patch.owner == "builtin_mock.patch-several-dependencies"
|
||||
assert url2_patch.url == "http://example.com/urlpatch2.patch.gz"
|
||||
assert url2_patch.sha256 == url2_sha256
|
||||
assert url2_patch.archive_sha256 == url2_archive_sha256
|
||||
@@ -397,7 +397,7 @@ def test_conditional_patched_deps_with_conditions(mock_packages, config):
|
||||
fake = spec["fake"]
|
||||
|
||||
check_multi_dependency_patch_specs(
|
||||
libelf, libdwarf, fake, "builtin.mock.patch-several-dependencies", spec.package.package_dir
|
||||
libelf, libdwarf, fake, "builtin_mock.patch-several-dependencies", spec.package.package_dir
|
||||
)
|
||||
|
||||
|
||||
@@ -417,7 +417,7 @@ def test_write_and_read_sub_dags_with_patched_deps(mock_packages, config):
|
||||
|
||||
# make sure we can still read patches correctly for these specs
|
||||
check_multi_dependency_patch_specs(
|
||||
libelf, libdwarf, fake, "builtin.mock.patch-several-dependencies", spec.package.package_dir
|
||||
libelf, libdwarf, fake, "builtin_mock.patch-several-dependencies", spec.package.package_dir
|
||||
)
|
||||
|
||||
|
||||
|
@@ -43,19 +43,19 @@ def extra_repo(tmp_path_factory, request):
|
||||
|
||||
def test_repo_getpkg(mutable_mock_repo):
|
||||
mutable_mock_repo.get_pkg_class("pkg-a")
|
||||
mutable_mock_repo.get_pkg_class("builtin.mock.pkg-a")
|
||||
mutable_mock_repo.get_pkg_class("builtin_mock.pkg-a")
|
||||
|
||||
|
||||
def test_repo_multi_getpkg(mutable_mock_repo, extra_repo):
|
||||
mutable_mock_repo.put_first(extra_repo[0])
|
||||
mutable_mock_repo.get_pkg_class("pkg-a")
|
||||
mutable_mock_repo.get_pkg_class("builtin.mock.pkg-a")
|
||||
mutable_mock_repo.get_pkg_class("builtin_mock.pkg-a")
|
||||
|
||||
|
||||
def test_repo_multi_getpkgclass(mutable_mock_repo, extra_repo):
|
||||
mutable_mock_repo.put_first(extra_repo[0])
|
||||
mutable_mock_repo.get_pkg_class("pkg-a")
|
||||
mutable_mock_repo.get_pkg_class("builtin.mock.pkg-a")
|
||||
mutable_mock_repo.get_pkg_class("builtin_mock.pkg-a")
|
||||
|
||||
|
||||
def test_repo_pkg_with_unknown_namespace(mutable_mock_repo):
|
||||
@@ -65,7 +65,7 @@ def test_repo_pkg_with_unknown_namespace(mutable_mock_repo):
|
||||
|
||||
def test_repo_unknown_pkg(mutable_mock_repo):
|
||||
with pytest.raises(spack.repo.UnknownPackageError):
|
||||
mutable_mock_repo.get_pkg_class("builtin.mock.nonexistentpackage")
|
||||
mutable_mock_repo.get_pkg_class("builtin_mock.nonexistentpackage")
|
||||
|
||||
|
||||
def test_repo_last_mtime(mock_packages):
|
||||
@@ -93,15 +93,6 @@ def test_repo_invisibles(mutable_mock_repo, extra_repo):
|
||||
extra_repo[0].all_package_names()
|
||||
|
||||
|
||||
@pytest.mark.parametrize("attr_name,exists", [("cmake", True), ("__sphinx_mock__", False)])
|
||||
@pytest.mark.regression("20661")
|
||||
def test_namespace_hasattr(attr_name, exists, mutable_mock_repo):
|
||||
# Check that we don't fail on 'hasattr' checks because
|
||||
# of a custom __getattr__ implementation
|
||||
nms = spack.repo.SpackNamespace("spack.pkg.builtin.mock")
|
||||
assert hasattr(nms, attr_name) == exists
|
||||
|
||||
|
||||
@pytest.mark.regression("24552")
|
||||
def test_all_package_names_is_cached_correctly(mock_packages):
|
||||
assert "mpi" in spack.repo.all_package_names(include_virtuals=True)
|
||||
@@ -120,25 +111,20 @@ def test_use_repositories_doesnt_change_class(mock_packages):
|
||||
assert id(zlib_cls_inner) == id(zlib_cls_outer)
|
||||
|
||||
|
||||
def test_import_repo_prefixes_as_python_modules(mock_packages):
|
||||
import spack.pkg.builtin.mock
|
||||
|
||||
assert isinstance(spack.pkg, spack.repo.SpackNamespace)
|
||||
assert isinstance(spack.pkg.builtin, spack.repo.SpackNamespace)
|
||||
assert isinstance(spack.pkg.builtin.mock, spack.repo.SpackNamespace)
|
||||
|
||||
|
||||
def test_absolute_import_spack_packages_as_python_modules(mock_packages):
|
||||
import spack.pkg.builtin.mock.mpileaks
|
||||
import spack_repo.builtin_mock.packages.mpileaks.package # type: ignore[import]
|
||||
|
||||
assert hasattr(spack.pkg.builtin.mock, "mpileaks")
|
||||
assert hasattr(spack.pkg.builtin.mock.mpileaks, "Mpileaks")
|
||||
assert isinstance(spack.pkg.builtin.mock.mpileaks.Mpileaks, spack.package_base.PackageMeta)
|
||||
assert issubclass(spack.pkg.builtin.mock.mpileaks.Mpileaks, spack.package_base.PackageBase)
|
||||
assert hasattr(spack_repo.builtin_mock.packages.mpileaks.package, "Mpileaks")
|
||||
assert isinstance(
|
||||
spack_repo.builtin_mock.packages.mpileaks.package.Mpileaks, spack.package_base.PackageMeta
|
||||
)
|
||||
assert issubclass(
|
||||
spack_repo.builtin_mock.packages.mpileaks.package.Mpileaks, spack.package_base.PackageBase
|
||||
)
|
||||
|
||||
|
||||
def test_relative_import_spack_packages_as_python_modules(mock_packages):
|
||||
from spack.pkg.builtin.mock.mpileaks import Mpileaks
|
||||
from spack_repo.builtin_mock.packages.mpileaks.package import Mpileaks
|
||||
|
||||
assert isinstance(Mpileaks, spack.package_base.PackageMeta)
|
||||
assert issubclass(Mpileaks, spack.package_base.PackageBase)
|
||||
@@ -160,7 +146,7 @@ def test_repo_path_handles_package_removal(tmpdir, mock_packages):
|
||||
builder.remove("pkg-c")
|
||||
with spack.repo.use_repositories(builder.root, override=False) as repos:
|
||||
r = repos.repo_for_pkg("pkg-c")
|
||||
assert r.namespace == "builtin.mock"
|
||||
assert r.namespace == "builtin_mock"
|
||||
|
||||
|
||||
def test_repo_dump_virtuals(tmpdir, mutable_mock_repo, mock_packages, ensure_debug, capsys):
|
||||
@@ -185,7 +171,7 @@ def _repo_paths(repos):
|
||||
for entry in repos:
|
||||
if entry == "mock":
|
||||
repo_paths.append(spack.paths.mock_packages_path)
|
||||
namespaces.append("builtin.mock")
|
||||
namespaces.append("builtin_mock")
|
||||
if entry == "extra":
|
||||
name = "extra_mock"
|
||||
repo_dir = tmp_path / name
|
||||
@@ -211,7 +197,7 @@ def test_path_computation_with_names(method_name, mock_repo_path):
|
||||
repo_path = spack.repo.RepoPath(mock_repo_path, cache=None)
|
||||
method = getattr(repo_path, method_name)
|
||||
unqualified = method("mpileaks")
|
||||
qualified = method("builtin.mock.mpileaks")
|
||||
qualified = method("builtin_mock.mpileaks")
|
||||
assert qualified == unqualified
|
||||
|
||||
|
||||
@@ -220,11 +206,11 @@ def test_use_repositories_and_import():
|
||||
import spack.paths
|
||||
|
||||
repo_dir = pathlib.Path(spack.paths.test_repos_path)
|
||||
with spack.repo.use_repositories(str(repo_dir / "compiler_runtime.test")):
|
||||
import spack.pkg.compiler_runtime.test.gcc_runtime
|
||||
with spack.repo.use_repositories(str(repo_dir / "spack_repo" / "compiler_runtime_test")):
|
||||
import spack_repo.compiler_runtime_test.packages.gcc_runtime.package # type: ignore[import] # noqa: E501
|
||||
|
||||
with spack.repo.use_repositories(str(repo_dir / "builtin.mock")):
|
||||
import spack.pkg.builtin.mock.cmake
|
||||
with spack.repo.use_repositories(str(repo_dir / "spack_repo" / "builtin_mock")):
|
||||
import spack_repo.builtin_mock.packages.cmake.package # type: ignore[import] # noqa: F401
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("nullify_globals")
|
||||
@@ -236,7 +222,7 @@ class TestRepo:
|
||||
def test_creation(self, mock_test_cache):
|
||||
repo = spack.repo.Repo(spack.paths.mock_packages_path, cache=mock_test_cache)
|
||||
assert repo.config_file.endswith("repo.yaml")
|
||||
assert repo.namespace == "builtin.mock"
|
||||
assert repo.namespace == "builtin_mock"
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"name,expected", [("mpi", True), ("mpich", False), ("mpileaks", False)]
|
||||
@@ -248,7 +234,7 @@ def test_is_virtual(self, repo_cls, name, expected, mock_test_cache):
|
||||
assert repo.is_virtual_safe(name) is expected
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"module_name,expected",
|
||||
"module_name,pkg_name",
|
||||
[
|
||||
("dla_future", "dla-future"),
|
||||
("num7zip", "7zip"),
|
||||
@@ -256,12 +242,19 @@ def test_is_virtual(self, repo_cls, name, expected, mock_test_cache):
|
||||
("unknown", None),
|
||||
],
|
||||
)
|
||||
def test_real_name(self, module_name, expected, mock_test_cache):
|
||||
def test_real_name(self, module_name, pkg_name, mock_test_cache, tmp_path):
|
||||
"""Test that we can correctly compute the 'real' name of a package, from the one
|
||||
used to import the Python module.
|
||||
"""
|
||||
repo = spack.repo.Repo(spack.paths.mock_packages_path, cache=mock_test_cache)
|
||||
assert repo.real_name(module_name) == expected
|
||||
path, _ = spack.repo.create_repo(str(tmp_path), package_api=(1, 0))
|
||||
if pkg_name is not None:
|
||||
pkg_path = pathlib.Path(path) / "packages" / pkg_name / "package.py"
|
||||
pkg_path.parent.mkdir(parents=True)
|
||||
pkg_path.write_text("")
|
||||
repo = spack.repo.Repo(
|
||||
path, cache=spack.util.file_cache.FileCache(str(tmp_path / "cache"))
|
||||
)
|
||||
assert repo.real_name(module_name) == pkg_name
|
||||
|
||||
@pytest.mark.parametrize("name", ["mpileaks", "7zip", "dla-future"])
|
||||
def test_get(self, name, mock_test_cache):
|
||||
@@ -312,12 +305,12 @@ class TestRepoPath:
|
||||
def test_creation_from_string(self, mock_test_cache):
|
||||
repo = spack.repo.RepoPath(spack.paths.mock_packages_path, cache=mock_test_cache)
|
||||
assert len(repo.repos) == 1
|
||||
assert repo.by_namespace["builtin.mock"] is repo.repos[0]
|
||||
assert repo.by_namespace["builtin_mock"] is repo.repos[0]
|
||||
|
||||
def test_get_repo(self, mock_test_cache):
|
||||
repo = spack.repo.RepoPath(spack.paths.mock_packages_path, cache=mock_test_cache)
|
||||
# builtin.mock is there
|
||||
assert repo.get_repo("builtin.mock") is repo.repos[0]
|
||||
# builtin_mock is there
|
||||
assert repo.get_repo("builtin_mock") is repo.repos[0]
|
||||
# foo is not there, raise
|
||||
with pytest.raises(spack.repo.UnknownNamespaceError):
|
||||
repo.get_repo("foo")
|
||||
@@ -407,7 +400,7 @@ def test_repo_v2_invalid_module_name(tmp_path: pathlib.Path, capsys):
|
||||
(repo_dir / "packages" / "zlib-ng").mkdir()
|
||||
(repo_dir / "packages" / "zlib-ng" / "package.py").write_text(
|
||||
"""
|
||||
from spack.package import Package
|
||||
from spack_repo.builtin_mock.build_systems.generic import Package
|
||||
|
||||
class ZlibNg(Package):
|
||||
pass
|
||||
@@ -416,7 +409,7 @@ class ZlibNg(Package):
|
||||
(repo_dir / "packages" / "UPPERCASE").mkdir()
|
||||
(repo_dir / "packages" / "UPPERCASE" / "package.py").write_text(
|
||||
"""
|
||||
from spack.package import Package
|
||||
from spack_repo.builtin_mock.build_systems.generic import Package
|
||||
|
||||
class Uppercase(Package):
|
||||
pass
|
||||
@@ -440,7 +433,7 @@ def test_repo_v2_module_and_class_to_package_name(tmp_path: pathlib.Path, capsys
|
||||
(repo_dir / "packages" / "_1example_2_test").mkdir()
|
||||
(repo_dir / "packages" / "_1example_2_test" / "package.py").write_text(
|
||||
"""
|
||||
from spack.package import Package
|
||||
from spack_repo.builtin_mock.build_systems.generic import Package
|
||||
|
||||
class _1example2Test(Package):
|
||||
pass
|
||||
|
@@ -609,7 +609,7 @@ def test_indirect_unsatisfied_single_valued_variant(self):
|
||||
|
||||
def test_satisfied_namespace(self):
|
||||
spec = spack.concretize.concretize_one("zlib")
|
||||
assert spec.satisfies("namespace=builtin.mock")
|
||||
assert spec.satisfies("namespace=builtin_mock")
|
||||
assert not spec.satisfies("namespace=builtin")
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
|
@@ -1290,7 +1290,7 @@ def test_parse_filename_missing_slash_as_spec(specfile_for, tmpdir, filename):
|
||||
|
||||
# make sure that only happens when the spec ends in yaml
|
||||
with pytest.raises(spack.solver.asp.UnsatisfiableSpecError) as exc_info:
|
||||
spack.concretize.concretize_one(SpecParser("builtin.mock.doesnotexist").next_spec())
|
||||
spack.concretize.concretize_one(SpecParser("builtin_mock.doesnotexist").next_spec())
|
||||
assert not exc_info.value.long_message or (
|
||||
"Did you mean to specify a filename with" not in exc_info.value.long_message
|
||||
)
|
||||
|
@@ -1,3 +1,4 @@
|
||||
from spack_repo.builtin_mock.build_systems.generic import Package
|
||||
from spack.package import *
|
||||
|
||||
class {{ cls_name }}(Package):
|
||||
|
130
var/spack/repos/spack_repo/builtin/build_systems/_checks.py
Normal file
130
var/spack/repos/spack_repo/builtin/build_systems/_checks.py
Normal file
@@ -0,0 +1,130 @@
|
||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os
|
||||
from typing import Callable, List
|
||||
|
||||
import spack.relocate
|
||||
from spack.package import Builder, InstallError, Spec, run_after
|
||||
|
||||
|
||||
def sanity_check_prefix(builder: Builder):
|
||||
"""Check that specific directories and files are created after installation.
|
||||
|
||||
The files to be checked are in the ``sanity_check_is_file`` attribute of the
|
||||
package object, while the directories are in the ``sanity_check_is_dir``.
|
||||
|
||||
Args:
|
||||
builder: builder that installed the package
|
||||
"""
|
||||
pkg = builder.pkg
|
||||
|
||||
def check_paths(path_list: List[str], filetype: str, predicate: Callable[[str], bool]) -> None:
|
||||
if isinstance(path_list, str):
|
||||
path_list = [path_list]
|
||||
|
||||
for path in path_list:
|
||||
if not predicate(os.path.join(pkg.prefix, path)):
|
||||
raise InstallError(
|
||||
f"Install failed for {pkg.name}. No such {filetype} in prefix: {path}"
|
||||
)
|
||||
|
||||
check_paths(pkg.sanity_check_is_file, "file", os.path.isfile)
|
||||
check_paths(pkg.sanity_check_is_dir, "directory", os.path.isdir)
|
||||
|
||||
# Check that the prefix is not empty apart from the .spack/ directory
|
||||
with os.scandir(pkg.prefix) as entries:
|
||||
f = next(
|
||||
(f for f in entries if not (f.name == ".spack" and f.is_dir(follow_symlinks=False))),
|
||||
None,
|
||||
)
|
||||
|
||||
if f is None:
|
||||
raise InstallError(f"Install failed for {pkg.name}. Nothing was installed!")
|
||||
|
||||
|
||||
def apply_macos_rpath_fixups(builder: Builder):
|
||||
"""On Darwin, make installed libraries more easily relocatable.
|
||||
|
||||
Some build systems (handrolled, autotools, makefiles) can set their own
|
||||
rpaths that are duplicated by spack's compiler wrapper. This fixup
|
||||
interrogates, and postprocesses if necessary, all libraries installed
|
||||
by the code.
|
||||
|
||||
It should be added as a @run_after to packaging systems (or individual
|
||||
packages) that do not install relocatable libraries by default.
|
||||
|
||||
Args:
|
||||
builder: builder that installed the package
|
||||
"""
|
||||
spack.relocate.fixup_macos_rpaths(builder.spec)
|
||||
|
||||
|
||||
def ensure_build_dependencies_or_raise(spec: Spec, dependencies: List[str], error_msg: str):
|
||||
"""Ensure that some build dependencies are present in the concrete spec.
|
||||
|
||||
If not, raise a RuntimeError with a helpful error message.
|
||||
|
||||
Args:
|
||||
spec: concrete spec to be checked.
|
||||
dependencies: list of package names of required build dependencies
|
||||
error_msg: brief error message to be prepended to a longer description
|
||||
|
||||
Raises:
|
||||
RuntimeError: when the required build dependencies are not found
|
||||
"""
|
||||
assert spec.concrete, "Can ensure build dependencies only on concrete specs"
|
||||
build_deps = [d.name for d in spec.dependencies(deptype="build")]
|
||||
missing_deps = [x for x in dependencies if x not in build_deps]
|
||||
|
||||
if not missing_deps:
|
||||
return
|
||||
|
||||
# Raise an exception on missing deps.
|
||||
msg = (
|
||||
"{0}: missing dependencies: {1}.\n\nPlease add "
|
||||
"the following lines to the package:\n\n".format(
|
||||
error_msg, ", ".join(str(d) for d in missing_deps)
|
||||
)
|
||||
)
|
||||
|
||||
for dep in missing_deps:
|
||||
msg += ' depends_on("{0}", type="build", when="@{1} {2}")\n'.format(
|
||||
dep, spec.version, "build_system=autotools"
|
||||
)
|
||||
|
||||
msg += '\nUpdate the version (when="@{0}") as needed.'.format(spec.version)
|
||||
raise RuntimeError(msg)
|
||||
|
||||
|
||||
def execute_build_time_tests(builder: Builder):
|
||||
"""Execute the build-time tests prescribed by builder.
|
||||
|
||||
Args:
|
||||
builder: builder prescribing the test callbacks. The name of the callbacks is
|
||||
stored as a list of strings in the ``build_time_test_callbacks`` attribute.
|
||||
"""
|
||||
if not builder.pkg.run_tests or not builder.build_time_test_callbacks:
|
||||
return
|
||||
|
||||
builder.pkg.tester.phase_tests(builder, "build", builder.build_time_test_callbacks)
|
||||
|
||||
|
||||
def execute_install_time_tests(builder: Builder):
|
||||
"""Execute the install-time tests prescribed by builder.
|
||||
|
||||
Args:
|
||||
builder: builder prescribing the test callbacks. The name of the callbacks is
|
||||
stored as a list of strings in the ``install_time_test_callbacks`` attribute.
|
||||
"""
|
||||
if not builder.pkg.run_tests or not builder.install_time_test_callbacks:
|
||||
return
|
||||
|
||||
builder.pkg.tester.phase_tests(builder, "install", builder.install_time_test_callbacks)
|
||||
|
||||
|
||||
class BuilderWithDefaults(Builder):
|
||||
"""Base class for all specific builders with common callbacks registered."""
|
||||
|
||||
# Check that self.prefix is there after installation
|
||||
run_after("install")(sanity_check_prefix)
|
@@ -23,7 +23,6 @@
|
||||
from .generic import Package
|
||||
from .gnu import GNUMirrorPackage
|
||||
from .go import GoPackage
|
||||
from .intel import IntelPackage
|
||||
from .lua import LuaPackage
|
||||
from .makefile import MakefilePackage
|
||||
from .maven import MavenPackage
|
||||
@@ -69,7 +68,6 @@
|
||||
"Package",
|
||||
"GNUMirrorPackage",
|
||||
"GoPackage",
|
||||
"IntelPackage",
|
||||
"IntelOneApiLibraryPackageWithSdk",
|
||||
"IntelOneApiLibraryPackage",
|
||||
"IntelOneApiStaticLibraryList",
|
@@ -3,12 +3,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
|
||||
import spack.directives
|
||||
import spack.spec
|
||||
import spack.util.executable
|
||||
import spack.util.prefix
|
||||
from spack.package import Executable, Prefix, Spec, extends, filter_file
|
||||
|
||||
from .autotools import AutotoolsBuilder, AutotoolsPackage
|
||||
|
||||
@@ -20,16 +15,13 @@ class AspellBuilder(AutotoolsBuilder):
|
||||
"""
|
||||
|
||||
def configure(
|
||||
self,
|
||||
pkg: "AspellDictPackage", # type: ignore[override]
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
self, pkg: "AspellDictPackage", spec: Spec, prefix: Prefix # type: ignore[override]
|
||||
):
|
||||
aspell = spec["aspell"].prefix.bin.aspell
|
||||
prezip = spec["aspell"].prefix.bin.prezip
|
||||
destdir = prefix
|
||||
|
||||
sh = spack.util.executable.Executable("/bin/sh")
|
||||
sh = Executable("/bin/sh")
|
||||
sh("./configure", "--vars", f"ASPELL={aspell}", f"PREZIP={prezip}", f"DESTDIR={destdir}")
|
||||
|
||||
|
||||
@@ -42,7 +34,7 @@ def configure(
|
||||
class AspellDictPackage(AutotoolsPackage):
|
||||
"""Specialized class for building aspell dictionairies."""
|
||||
|
||||
spack.directives.extends("aspell", when="build_system=autotools")
|
||||
extends("aspell", when="build_system=autotools")
|
||||
|
||||
#: Override the default autotools builder
|
||||
AutotoolsBuilder = AspellBuilder
|
||||
@@ -54,5 +46,5 @@ def patch(self):
|
||||
datadir = aspell("dump", "config", "data-dir", output=str).strip()
|
||||
dictdir = os.path.relpath(dictdir, aspell_spec.prefix)
|
||||
datadir = os.path.relpath(datadir, aspell_spec.prefix)
|
||||
fs.filter_file(r"^dictdir=.*$", f"dictdir=/{dictdir}", "configure")
|
||||
fs.filter_file(r"^datadir=.*$", f"datadir=/{datadir}", "configure")
|
||||
filter_file(r"^dictdir=.*$", f"dictdir=/{dictdir}", "configure")
|
||||
filter_file(r"^datadir=.*$", f"datadir=/{datadir}", "configure")
|
889
var/spack/repos/spack_repo/builtin/build_systems/autotools.py
Normal file
889
var/spack/repos/spack_repo/builtin/build_systems/autotools.py
Normal file
@@ -0,0 +1,889 @@
|
||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os
|
||||
import stat
|
||||
import subprocess
|
||||
from typing import Callable, List, Optional, Set, Tuple, Union
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
|
||||
import spack.build_environment
|
||||
import spack.builder
|
||||
import spack.compilers.libraries
|
||||
from spack.operating_systems.mac_os import macos_version
|
||||
from spack.package import (
|
||||
EnvironmentModifications,
|
||||
Executable,
|
||||
FileFilter,
|
||||
InstallError,
|
||||
PackageBase,
|
||||
Prefix,
|
||||
Spec,
|
||||
Version,
|
||||
build_system,
|
||||
conflicts,
|
||||
copy,
|
||||
depends_on,
|
||||
find,
|
||||
force_remove,
|
||||
is_exe,
|
||||
keep_modification_time,
|
||||
mkdirp,
|
||||
register_builder,
|
||||
run_after,
|
||||
run_before,
|
||||
tty,
|
||||
when,
|
||||
working_dir,
|
||||
)
|
||||
|
||||
from ._checks import (
|
||||
BuilderWithDefaults,
|
||||
apply_macos_rpath_fixups,
|
||||
ensure_build_dependencies_or_raise,
|
||||
execute_build_time_tests,
|
||||
execute_install_time_tests,
|
||||
)
|
||||
|
||||
|
||||
class AutotoolsPackage(PackageBase):
|
||||
"""Specialized class for packages built using GNU Autotools."""
|
||||
|
||||
#: This attribute is used in UI queries that need to know the build
|
||||
#: system base class
|
||||
build_system_class = "AutotoolsPackage"
|
||||
|
||||
#: Legacy buildsystem attribute used to deserialize and install old specs
|
||||
legacy_buildsystem = "autotools"
|
||||
|
||||
build_system("autotools")
|
||||
|
||||
with when("build_system=autotools"):
|
||||
depends_on("gnuconfig", type="build", when="target=ppc64le:")
|
||||
depends_on("gnuconfig", type="build", when="target=aarch64:")
|
||||
depends_on("gnuconfig", type="build", when="target=riscv64:")
|
||||
depends_on("gmake", type="build")
|
||||
conflicts("platform=windows")
|
||||
|
||||
def flags_to_build_system_args(self, flags):
|
||||
"""Produces a list of all command line arguments to pass specified
|
||||
compiler flags to configure."""
|
||||
# Has to be dynamic attribute due to caching.
|
||||
setattr(self, "configure_flag_args", [])
|
||||
for flag, values in flags.items():
|
||||
if values:
|
||||
var_name = "LIBS" if flag == "ldlibs" else flag.upper()
|
||||
values_str = "{0}={1}".format(var_name, " ".join(values))
|
||||
self.configure_flag_args.append(values_str)
|
||||
# Spack's fflags are meant for both F77 and FC, therefore we
|
||||
# additionaly set FCFLAGS if required.
|
||||
values = flags.get("fflags", None)
|
||||
if values:
|
||||
values_str = "FCFLAGS={0}".format(" ".join(values))
|
||||
self.configure_flag_args.append(values_str)
|
||||
|
||||
# Legacy methods (used by too many packages to change them,
|
||||
# need to forward to the builder)
|
||||
def enable_or_disable(self, *args, **kwargs):
|
||||
return spack.builder.create(self).enable_or_disable(*args, **kwargs)
|
||||
|
||||
def with_or_without(self, *args, **kwargs):
|
||||
return spack.builder.create(self).with_or_without(*args, **kwargs)
|
||||
|
||||
|
||||
@register_builder("autotools")
|
||||
class AutotoolsBuilder(BuilderWithDefaults):
|
||||
"""The autotools builder encodes the default way of installing software built
|
||||
with autotools. It has four phases that can be overridden, if need be:
|
||||
|
||||
1. :py:meth:`~.AutotoolsBuilder.autoreconf`
|
||||
2. :py:meth:`~.AutotoolsBuilder.configure`
|
||||
3. :py:meth:`~.AutotoolsBuilder.build`
|
||||
4. :py:meth:`~.AutotoolsBuilder.install`
|
||||
|
||||
They all have sensible defaults and for many packages the only thing necessary
|
||||
is to override the helper method
|
||||
:meth:`~spack_repo.builtin.build_systems.autotools.AutotoolsBuilder.configure_args`.
|
||||
|
||||
For a finer tuning you may also override:
|
||||
|
||||
+-----------------------------------------------+--------------------+
|
||||
| **Method** | **Purpose** |
|
||||
+===============================================+====================+
|
||||
| :py:attr:`~.AutotoolsBuilder.build_targets` | Specify ``make`` |
|
||||
| | targets for the |
|
||||
| | build phase |
|
||||
+-----------------------------------------------+--------------------+
|
||||
| :py:attr:`~.AutotoolsBuilder.install_targets` | Specify ``make`` |
|
||||
| | targets for the |
|
||||
| | install phase |
|
||||
+-----------------------------------------------+--------------------+
|
||||
| :py:meth:`~.AutotoolsBuilder.check` | Run build time |
|
||||
| | tests if required |
|
||||
+-----------------------------------------------+--------------------+
|
||||
|
||||
"""
|
||||
|
||||
#: Phases of a GNU Autotools package
|
||||
phases = ("autoreconf", "configure", "build", "install")
|
||||
|
||||
#: Names associated with package methods in the old build-system format
|
||||
legacy_methods = ("configure_args", "check", "installcheck")
|
||||
|
||||
#: Names associated with package attributes in the old build-system format
|
||||
legacy_attributes = (
|
||||
"archive_files",
|
||||
"patch_libtool",
|
||||
"build_targets",
|
||||
"install_targets",
|
||||
"build_time_test_callbacks",
|
||||
"install_time_test_callbacks",
|
||||
"force_autoreconf",
|
||||
"autoreconf_extra_args",
|
||||
"install_libtool_archives",
|
||||
"patch_config_files",
|
||||
"configure_directory",
|
||||
"configure_abs_path",
|
||||
"build_directory",
|
||||
"autoreconf_search_path_args",
|
||||
)
|
||||
|
||||
#: Whether to update ``libtool`` (e.g. for Arm/Clang/Fujitsu/NVHPC compilers)
|
||||
patch_libtool = True
|
||||
|
||||
#: Targets for ``make`` during the :py:meth:`~.AutotoolsBuilder.build` phase
|
||||
build_targets: List[str] = []
|
||||
#: Targets for ``make`` during the :py:meth:`~.AutotoolsBuilder.install` phase
|
||||
install_targets = ["install"]
|
||||
|
||||
#: Callback names for build-time test
|
||||
build_time_test_callbacks = ["check"]
|
||||
|
||||
#: Callback names for install-time test
|
||||
install_time_test_callbacks = ["installcheck"]
|
||||
|
||||
#: Set to true to force the autoreconf step even if configure is present
|
||||
force_autoreconf = False
|
||||
|
||||
#: Options to be passed to autoreconf when using the default implementation
|
||||
autoreconf_extra_args: List[str] = []
|
||||
|
||||
#: If False deletes all the .la files in the prefix folder after the installation.
|
||||
#: If True instead it installs them.
|
||||
install_libtool_archives = False
|
||||
|
||||
@property
|
||||
def patch_config_files(self) -> bool:
|
||||
"""Whether to update old ``config.guess`` and ``config.sub`` files
|
||||
distributed with the tarball.
|
||||
|
||||
This currently only applies to ``ppc64le:``, ``aarch64:``, and
|
||||
``riscv64`` target architectures.
|
||||
|
||||
The substitutes are taken from the ``gnuconfig`` package, which is
|
||||
automatically added as a build dependency for these architectures. In case
|
||||
system versions of these config files are required, the ``gnuconfig`` package
|
||||
can be marked external, with a prefix pointing to the directory containing the
|
||||
system ``config.guess`` and ``config.sub`` files.
|
||||
"""
|
||||
return (
|
||||
self.pkg.spec.satisfies("target=ppc64le:")
|
||||
or self.pkg.spec.satisfies("target=aarch64:")
|
||||
or self.pkg.spec.satisfies("target=riscv64:")
|
||||
)
|
||||
|
||||
@property
|
||||
def _removed_la_files_log(self) -> str:
|
||||
"""File containing the list of removed libtool archives"""
|
||||
return os.path.join(self.build_directory, "removed_la_files.txt")
|
||||
|
||||
@property
|
||||
def archive_files(self) -> List[str]:
|
||||
"""Files to archive for packages based on autotools"""
|
||||
files = [os.path.join(self.build_directory, "config.log")]
|
||||
if not self.install_libtool_archives:
|
||||
files.append(self._removed_la_files_log)
|
||||
return files
|
||||
|
||||
@run_after("autoreconf")
|
||||
def _do_patch_config_files(self) -> None:
|
||||
"""Some packages ship with older config.guess/config.sub files and need to
|
||||
have these updated when installed on a newer architecture.
|
||||
|
||||
In particular, config.guess fails for PPC64LE for version prior to a
|
||||
2013-06-10 build date (automake 1.13.4) and for AArch64 and RISC-V.
|
||||
"""
|
||||
if not self.patch_config_files:
|
||||
return
|
||||
|
||||
# TODO: Expand this to select the 'config.sub'-compatible architecture
|
||||
# for each platform (e.g. 'config.sub' doesn't accept 'power9le', but
|
||||
# does accept 'ppc64le').
|
||||
if self.pkg.spec.satisfies("target=ppc64le:"):
|
||||
config_arch = "ppc64le"
|
||||
elif self.pkg.spec.satisfies("target=aarch64:"):
|
||||
config_arch = "aarch64"
|
||||
elif self.pkg.spec.satisfies("target=riscv64:"):
|
||||
config_arch = "riscv64"
|
||||
else:
|
||||
config_arch = "local"
|
||||
|
||||
def runs_ok(script_abs_path):
|
||||
# Construct the list of arguments for the call
|
||||
additional_args = {"config.sub": [config_arch]}
|
||||
script_name = os.path.basename(script_abs_path)
|
||||
args = [script_abs_path] + additional_args.get(script_name, [])
|
||||
|
||||
try:
|
||||
subprocess.check_call(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
except Exception as e:
|
||||
tty.debug(e)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
# Get the list of files that needs to be patched
|
||||
to_be_patched = find(self.pkg.stage.path, files=["config.sub", "config.guess"])
|
||||
to_be_patched = [f for f in to_be_patched if not runs_ok(f)]
|
||||
|
||||
# If there are no files to be patched, return early
|
||||
if not to_be_patched:
|
||||
return
|
||||
|
||||
# Otherwise, require `gnuconfig` to be a build dependency
|
||||
ensure_build_dependencies_or_raise(
|
||||
spec=self.pkg.spec, dependencies=["gnuconfig"], error_msg="Cannot patch config files"
|
||||
)
|
||||
|
||||
# Get the config files we need to patch (config.sub / config.guess).
|
||||
to_be_found = list(set(os.path.basename(f) for f in to_be_patched))
|
||||
gnuconfig = self.pkg.spec["gnuconfig"]
|
||||
gnuconfig_dir = gnuconfig.prefix
|
||||
|
||||
# An external gnuconfig may not not have a prefix.
|
||||
if gnuconfig_dir is None:
|
||||
raise InstallError(
|
||||
"Spack could not find substitutes for GNU config files because no "
|
||||
"prefix is available for the `gnuconfig` package. Make sure you set a "
|
||||
"prefix path instead of modules for external `gnuconfig`."
|
||||
)
|
||||
|
||||
candidates = find(gnuconfig_dir, files=to_be_found, recursive=False)
|
||||
|
||||
# For external packages the user may have specified an incorrect prefix.
|
||||
# otherwise the installation is just corrupt.
|
||||
if not candidates:
|
||||
msg = (
|
||||
"Spack could not find `config.guess` and `config.sub` "
|
||||
"files in the `gnuconfig` prefix `{0}`. This means the "
|
||||
"`gnuconfig` package is broken"
|
||||
).format(gnuconfig_dir)
|
||||
if gnuconfig.external:
|
||||
msg += (
|
||||
" or the `gnuconfig` package prefix is misconfigured as" " an external package"
|
||||
)
|
||||
raise InstallError(msg)
|
||||
|
||||
# Filter working substitutes
|
||||
candidates = [f for f in candidates if runs_ok(f)]
|
||||
substitutes = {}
|
||||
for candidate in candidates:
|
||||
config_file = os.path.basename(candidate)
|
||||
substitutes[config_file] = candidate
|
||||
to_be_found.remove(config_file)
|
||||
|
||||
# Check that we found everything we needed
|
||||
if to_be_found:
|
||||
msg = """\
|
||||
Spack could not find working replacements for the following autotools config
|
||||
files: {0}.
|
||||
|
||||
To resolve this problem, please try the following:
|
||||
1. Try to rebuild with `patch_config_files = False` in the package `{1}`, to
|
||||
rule out that Spack tries to replace config files not used by the build.
|
||||
2. Verify that the `gnuconfig` package is up-to-date.
|
||||
3. On some systems you need to use system-provided `config.guess` and `config.sub`
|
||||
files. In this case, mark `gnuconfig` as an non-buildable external package,
|
||||
and set the prefix to the directory containing the `config.guess` and
|
||||
`config.sub` files.
|
||||
"""
|
||||
raise InstallError(msg.format(", ".join(to_be_found), self.pkg.name))
|
||||
|
||||
# Copy the good files over the bad ones
|
||||
for abs_path in to_be_patched:
|
||||
name = os.path.basename(abs_path)
|
||||
mode = os.stat(abs_path).st_mode
|
||||
os.chmod(abs_path, stat.S_IWUSR)
|
||||
copy(substitutes[name], abs_path)
|
||||
os.chmod(abs_path, mode)
|
||||
|
||||
@run_before("configure")
|
||||
def _patch_usr_bin_file(self) -> None:
|
||||
"""On NixOS file is not available in /usr/bin/file. Patch configure
|
||||
scripts to use file from path."""
|
||||
|
||||
if self.spec.os.startswith("nixos"):
|
||||
x = FileFilter(
|
||||
*filter(is_exe, find(self.build_directory, "configure", recursive=True))
|
||||
)
|
||||
with keep_modification_time(*x.filenames):
|
||||
x.filter(regex="/usr/bin/file", repl="file", string=True)
|
||||
|
||||
@run_before("configure")
|
||||
def _set_autotools_environment_variables(self) -> None:
|
||||
"""Many autotools builds use a version of mknod.m4 that fails when
|
||||
running as root unless FORCE_UNSAFE_CONFIGURE is set to 1.
|
||||
|
||||
We set this to 1 and expect the user to take responsibility if
|
||||
they are running as root. They have to anyway, as this variable
|
||||
doesn't actually prevent configure from doing bad things as root.
|
||||
Without it, configure just fails halfway through, but it can
|
||||
still run things *before* this check. Forcing this just removes a
|
||||
nuisance -- this is not circumventing any real protection.
|
||||
"""
|
||||
os.environ["FORCE_UNSAFE_CONFIGURE"] = "1"
|
||||
|
||||
@run_before("configure")
|
||||
def _do_patch_libtool_configure(self) -> None:
|
||||
"""Patch bugs that propagate from libtool macros into "configure" and
|
||||
further into "libtool". Note that patches that can be fixed by patching
|
||||
"libtool" directly should be implemented in the _do_patch_libtool method
|
||||
below."""
|
||||
|
||||
# Exit early if we are required not to patch libtool-related problems:
|
||||
if not self.patch_libtool:
|
||||
return
|
||||
|
||||
x = FileFilter(*filter(is_exe, find(self.build_directory, "configure", recursive=True)))
|
||||
|
||||
# There are distributed automatically generated files that depend on the configure script
|
||||
# and require additional tools for rebuilding.
|
||||
# See https://github.com/spack/spack/pull/30768#issuecomment-1219329860
|
||||
with keep_modification_time(*x.filenames):
|
||||
# Fix parsing of compiler output when collecting predeps and postdeps
|
||||
# https://lists.gnu.org/archive/html/bug-libtool/2016-03/msg00003.html
|
||||
x.filter(regex=r'^(\s*if test x-L = )("\$p" \|\|\s*)$', repl=r"\1x\2")
|
||||
x.filter(
|
||||
regex=r'^(\s*test x-R = )("\$p")(; then\s*)$', repl=r'\1x\2 || test x-l = x"$p"\3'
|
||||
)
|
||||
# Support Libtool 2.4.2 and older:
|
||||
x.filter(regex=r'^(\s*test \$p = "-R")(; then\s*)$', repl=r'\1 || test x-l = x"$p"\2')
|
||||
# Configure scripts generated with libtool < 2.5.4 have a faulty test for the
|
||||
# -single_module linker flag. A deprecation warning makes it think the default is
|
||||
# -multi_module, triggering it to use problematic linker flags (such as ld -r). The
|
||||
# linker default is `-single_module` from (ancient) macOS 10.4, so override by setting
|
||||
# `lt_cv_apple_cc_single_mod=yes`. See the fix in libtool commit
|
||||
# 82f7f52123e4e7e50721049f7fa6f9b870e09c9d.
|
||||
x.filter("lt_cv_apple_cc_single_mod=no", "lt_cv_apple_cc_single_mod=yes", string=True)
|
||||
|
||||
@run_after("configure")
|
||||
def _do_patch_libtool(self) -> None:
|
||||
"""If configure generates a "libtool" script that does not correctly
|
||||
detect the compiler (and patch_libtool is set), patch in the correct
|
||||
values for libtool variables.
|
||||
|
||||
The generated libtool script supports mixed compilers through tags:
|
||||
``libtool --tag=CC/CXX/FC/...```. For each tag there is a block with variables,
|
||||
which defines what flags to pass to the compiler. The default variables (which
|
||||
are used by the default tag CC) are set in a block enclosed by
|
||||
``# ### {BEGIN,END} LIBTOOL CONFIG``. For non-default tags, there are
|
||||
corresponding blocks ``# ### {BEGIN,END} LIBTOOL TAG CONFIG: {CXX,FC,F77}`` at
|
||||
the end of the file (after the exit command). libtool evals these blocks.
|
||||
Whenever we need to update variables that the configure script got wrong
|
||||
(for example cause it did not recognize the compiler), we should properly scope
|
||||
those changes to these tags/blocks so they only apply to the compiler we care
|
||||
about. Below, ``start_at`` and ``stop_at`` are used for that."""
|
||||
|
||||
# Exit early if we are required not to patch libtool:
|
||||
if not self.patch_libtool:
|
||||
return
|
||||
|
||||
x = FileFilter(*filter(is_exe, find(self.build_directory, "libtool", recursive=True)))
|
||||
|
||||
# Exit early if there is nothing to patch:
|
||||
if not x.filenames:
|
||||
return
|
||||
|
||||
markers = {"cc": "LIBTOOL CONFIG"}
|
||||
for tag in ["cxx", "fc", "f77"]:
|
||||
markers[tag] = "LIBTOOL TAG CONFIG: {0}".format(tag.upper())
|
||||
|
||||
# Replace empty linker flag prefixes:
|
||||
if self.spec.satisfies("%nag"):
|
||||
# Nag is mixed with gcc and g++, which are recognized correctly.
|
||||
# Therefore, we change only Fortran values:
|
||||
nag_pkg = self.spec["fortran"].package
|
||||
for tag in ["fc", "f77"]:
|
||||
marker = markers[tag]
|
||||
x.filter(
|
||||
regex='^wl=""$',
|
||||
repl=f'wl="{nag_pkg.linker_arg}"',
|
||||
start_at=f"# ### BEGIN {marker}",
|
||||
stop_at=f"# ### END {marker}",
|
||||
)
|
||||
else:
|
||||
compiler_spec = spack.compilers.libraries.compiler_spec(self.spec)
|
||||
if compiler_spec:
|
||||
x.filter(regex='^wl=""$', repl='wl="{0}"'.format(compiler_spec.package.linker_arg))
|
||||
|
||||
# Replace empty PIC flag values:
|
||||
for compiler, marker in markers.items():
|
||||
if compiler == "cc":
|
||||
language = "c"
|
||||
elif compiler == "cxx":
|
||||
language = "cxx"
|
||||
else:
|
||||
language = "fortran"
|
||||
|
||||
if language not in self.spec:
|
||||
continue
|
||||
|
||||
x.filter(
|
||||
regex='^pic_flag=""$',
|
||||
repl=f'pic_flag="{self.spec[language].package.pic_flag}"',
|
||||
start_at=f"# ### BEGIN {marker}",
|
||||
stop_at=f"# ### END {marker}",
|
||||
)
|
||||
|
||||
# Other compiler-specific patches:
|
||||
if self.spec.satisfies("%fj"):
|
||||
x.filter(regex="-nostdlib", repl="", string=True)
|
||||
rehead = r"/\S*/"
|
||||
for o in [
|
||||
r"fjhpctag\.o",
|
||||
r"fjcrt0\.o",
|
||||
r"fjlang08\.o",
|
||||
r"fjomp\.o",
|
||||
r"crti\.o",
|
||||
r"crtbeginS\.o",
|
||||
r"crtendS\.o",
|
||||
]:
|
||||
x.filter(regex=(rehead + o), repl="")
|
||||
elif self.spec.satisfies("%nag"):
|
||||
for tag in ["fc", "f77"]:
|
||||
marker = markers[tag]
|
||||
start_at = "# ### BEGIN {0}".format(marker)
|
||||
stop_at = "# ### END {0}".format(marker)
|
||||
# Libtool 2.4.2 does not know the shared flag:
|
||||
x.filter(
|
||||
regex=r"\$CC -shared",
|
||||
repl=r"\$CC -Wl,-shared",
|
||||
string=True,
|
||||
start_at=start_at,
|
||||
stop_at=stop_at,
|
||||
)
|
||||
# Libtool does not know how to inject whole archives
|
||||
# (e.g. https://github.com/pmodels/mpich/issues/4358):
|
||||
x.filter(
|
||||
regex=r'^whole_archive_flag_spec="\\\$({?wl}?)--whole-archive'
|
||||
r'\\\$convenience \\\$\1--no-whole-archive"$',
|
||||
repl=r'whole_archive_flag_spec="\$\1--whole-archive'
|
||||
r"\`for conv in \$convenience\\\\\"\\\\\"; do test -n \\\\\"\$conv\\\\\" && "
|
||||
r"new_convenience=\\\\\"\$new_convenience,\$conv\\\\\"; done; "
|
||||
r'func_echo_all \\\\\"\$new_convenience\\\\\"\` \$\1--no-whole-archive"',
|
||||
start_at=start_at,
|
||||
stop_at=stop_at,
|
||||
)
|
||||
# The compiler requires special treatment in certain cases:
|
||||
x.filter(
|
||||
regex=r"^(with_gcc=.*)$",
|
||||
repl="\\1\n\n# Is the compiler the NAG compiler?\nwith_nag=yes",
|
||||
start_at=start_at,
|
||||
stop_at=stop_at,
|
||||
)
|
||||
|
||||
# Disable the special treatment for gcc and g++:
|
||||
for tag in ["cc", "cxx"]:
|
||||
marker = markers[tag]
|
||||
x.filter(
|
||||
regex=r"^(with_gcc=.*)$",
|
||||
repl="\\1\n\n# Is the compiler the NAG compiler?\nwith_nag=no",
|
||||
start_at="# ### BEGIN {0}".format(marker),
|
||||
stop_at="# ### END {0}".format(marker),
|
||||
)
|
||||
|
||||
# The compiler does not support -pthread flag, which might come
|
||||
# from the inherited linker flags. We prepend the flag with -Wl,
|
||||
# before using it:
|
||||
x.filter(
|
||||
regex=r"^(\s*)(for tmp_inherited_linker_flag in \$tmp_inherited_linker_flags; "
|
||||
r"do\s*)$",
|
||||
repl='\\1if test "x$with_nag" = xyes; then\n'
|
||||
"\\1 revert_nag_pthread=$tmp_inherited_linker_flags\n"
|
||||
"\\1 tmp_inherited_linker_flags="
|
||||
"`$ECHO \"$tmp_inherited_linker_flags\" | $SED 's% -pthread% -Wl,-pthread%g'`\n"
|
||||
'\\1 test x"$revert_nag_pthread" = x"$tmp_inherited_linker_flags" && '
|
||||
"revert_nag_pthread=no || revert_nag_pthread=yes\n"
|
||||
"\\1fi\n\\1\\2",
|
||||
start_at='if test -n "$inherited_linker_flags"; then',
|
||||
stop_at='case " $new_inherited_linker_flags " in',
|
||||
)
|
||||
# And revert the modification to produce '*.la' files that can be
|
||||
# used with gcc (normally, we do not install the files but they can
|
||||
# still be used during the building):
|
||||
start_at = '# Time to change all our "foo.ltframework" stuff back to "-framework foo"'
|
||||
stop_at = "# installed libraries to the beginning of the library search list"
|
||||
x.filter(
|
||||
regex=r"(\s*)(# move library search paths that coincide with paths to not "
|
||||
r"yet\s*)$",
|
||||
repl='\\1test x"$with_nag$revert_nag_pthread" = xyesyes &&\n'
|
||||
'\\1 new_inherited_linker_flags=`$ECHO " $new_inherited_linker_flags" | '
|
||||
"$SED 's% -Wl,-pthread% -pthread%g'`\n\\1\\2",
|
||||
start_at=start_at,
|
||||
stop_at=stop_at,
|
||||
)
|
||||
|
||||
@property
|
||||
def configure_directory(self) -> str:
|
||||
"""Return the directory where 'configure' resides."""
|
||||
return self.pkg.stage.source_path
|
||||
|
||||
@property
|
||||
def configure_abs_path(self) -> str:
|
||||
# Absolute path to configure
|
||||
configure_abs_path = os.path.join(os.path.abspath(self.configure_directory), "configure")
|
||||
return configure_abs_path
|
||||
|
||||
@property
|
||||
def build_directory(self) -> str:
|
||||
"""Override to provide another place to build the package"""
|
||||
# Handle the case where the configure directory is set to a non-absolute path
|
||||
# Non-absolute paths are always relative to the staging source path
|
||||
build_dir = self.configure_directory
|
||||
if not os.path.isabs(build_dir):
|
||||
build_dir = os.path.join(self.pkg.stage.source_path, build_dir)
|
||||
return build_dir
|
||||
|
||||
@run_before("autoreconf")
|
||||
def _delete_configure_to_force_update(self) -> None:
|
||||
if self.force_autoreconf:
|
||||
force_remove(self.configure_abs_path)
|
||||
|
||||
@property
|
||||
def autoreconf_search_path_args(self) -> List[str]:
|
||||
"""Search path includes for autoreconf. Add an -I flag for all `aclocal` dirs
|
||||
of build deps, skips the default path of automake, move external include
|
||||
flags to the back, since they might pull in unrelated m4 files shadowing
|
||||
spack dependencies."""
|
||||
return _autoreconf_search_path_args(self.spec)
|
||||
|
||||
@run_after("autoreconf")
|
||||
def _set_configure_or_die(self) -> None:
|
||||
"""Ensure the presence of a "configure" script, or raise. If the "configure"
|
||||
is found, a module level attribute is set.
|
||||
|
||||
Raises:
|
||||
RuntimeError: if the "configure" script is not found
|
||||
"""
|
||||
# Check if the "configure" script is there. If not raise a RuntimeError.
|
||||
if not os.path.exists(self.configure_abs_path):
|
||||
msg = "configure script not found in {0}"
|
||||
raise RuntimeError(msg.format(self.configure_directory))
|
||||
|
||||
# Monkey-patch the configure script in the corresponding module
|
||||
globals_for_pkg = spack.build_environment.ModuleChangePropagator(self.pkg)
|
||||
globals_for_pkg.configure = Executable(self.configure_abs_path)
|
||||
globals_for_pkg.propagate_changes_to_mro()
|
||||
|
||||
def configure_args(self) -> List[str]:
|
||||
"""Return the list of all the arguments that must be passed to configure,
|
||||
except ``--prefix`` which will be pre-pended to the list.
|
||||
"""
|
||||
return []
|
||||
|
||||
def autoreconf(self, pkg: AutotoolsPackage, spec: Spec, prefix: Prefix) -> None:
|
||||
"""Not needed usually, configure should be already there"""
|
||||
|
||||
# If configure exists nothing needs to be done
|
||||
if os.path.exists(self.configure_abs_path):
|
||||
return
|
||||
|
||||
# Else try to regenerate it, which requires a few build dependencies
|
||||
ensure_build_dependencies_or_raise(
|
||||
spec=spec,
|
||||
dependencies=["autoconf", "automake", "libtool"],
|
||||
error_msg="Cannot generate configure",
|
||||
)
|
||||
|
||||
tty.msg("Configure script not found: trying to generate it")
|
||||
tty.warn("*********************************************************")
|
||||
tty.warn("* If the default procedure fails, consider implementing *")
|
||||
tty.warn("* a custom AUTORECONF phase in the package *")
|
||||
tty.warn("*********************************************************")
|
||||
with working_dir(self.configure_directory):
|
||||
# This line is what is needed most of the time
|
||||
# --install, --verbose, --force
|
||||
autoreconf_args = ["-ivf"]
|
||||
autoreconf_args += self.autoreconf_search_path_args
|
||||
autoreconf_args += self.autoreconf_extra_args
|
||||
self.pkg.module.autoreconf(*autoreconf_args)
|
||||
|
||||
def configure(self, pkg: AutotoolsPackage, spec: Spec, prefix: Prefix) -> None:
|
||||
"""Run "configure", with the arguments specified by the builder and an
|
||||
appropriately set prefix.
|
||||
"""
|
||||
options = getattr(self.pkg, "configure_flag_args", [])
|
||||
options += ["--prefix={0}".format(prefix)]
|
||||
options += self.configure_args()
|
||||
|
||||
with working_dir(self.build_directory, create=True):
|
||||
pkg.module.configure(*options)
|
||||
|
||||
def build(self, pkg: AutotoolsPackage, spec: Spec, prefix: Prefix) -> None:
|
||||
"""Run "make" on the build targets specified by the builder."""
|
||||
# See https://autotools.io/automake/silent.html
|
||||
params = ["V=1"]
|
||||
params += self.build_targets
|
||||
with working_dir(self.build_directory):
|
||||
pkg.module.make(*params)
|
||||
|
||||
def install(self, pkg: AutotoolsPackage, spec: Spec, prefix: Prefix) -> None:
|
||||
"""Run "make" on the install targets specified by the builder."""
|
||||
with working_dir(self.build_directory):
|
||||
pkg.module.make(*self.install_targets)
|
||||
|
||||
run_after("build")(execute_build_time_tests)
|
||||
|
||||
def check(self) -> None:
|
||||
"""Run "make" on the ``test`` and ``check`` targets, if found."""
|
||||
with working_dir(self.build_directory):
|
||||
self.pkg._if_make_target_execute("test")
|
||||
self.pkg._if_make_target_execute("check")
|
||||
|
||||
def _activate_or_not(
|
||||
self,
|
||||
name: str,
|
||||
activation_word: str,
|
||||
deactivation_word: str,
|
||||
activation_value: Optional[Union[Callable, str]] = None,
|
||||
variant=None,
|
||||
) -> List[str]:
|
||||
"""This function contain the current implementation details of
|
||||
:meth:`~spack_repo.builtin.build_systems.autotools.AutotoolsBuilder.with_or_without` and
|
||||
:meth:`~spack_repo.builtin.build_systems.autotools.AutotoolsBuilder.enable_or_disable`.
|
||||
|
||||
Args:
|
||||
name: name of the option that is being activated or not
|
||||
activation_word: the default activation word ('with' in the case of
|
||||
``with_or_without``)
|
||||
deactivation_word: the default deactivation word ('without' in the case of
|
||||
``with_or_without``)
|
||||
activation_value: callable that accepts a single value. This value is either one of the
|
||||
allowed values for a multi-valued variant or the name of a bool-valued variant.
|
||||
Returns the parameter to be used when the value is activated.
|
||||
|
||||
The special value "prefix" can also be assigned and will return
|
||||
``spec[name].prefix`` as activation parameter.
|
||||
variant: name of the variant that is being processed (if different from option name)
|
||||
|
||||
Examples:
|
||||
|
||||
Given a package with:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
variant("foo", values=("x", "y"), description=")
|
||||
variant("bar", default=True, description=")
|
||||
variant("ba_z", default=True, description=")
|
||||
|
||||
calling this function like:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
_activate_or_not(
|
||||
"foo", "with", "without", activation_value="prefix"
|
||||
)
|
||||
_activate_or_not("bar", "with", "without")
|
||||
_activate_or_not("ba-z", "with", "without", variant="ba_z")
|
||||
|
||||
will generate the following configuration options:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
--with-x=<prefix-to-x> --without-y --with-bar --with-ba-z
|
||||
|
||||
for ``<spec-name> foo=x +bar``
|
||||
|
||||
Note: returns an empty list when the variant is conditional and its condition
|
||||
is not met.
|
||||
|
||||
Returns:
|
||||
list: list of strings that corresponds to the activation/deactivation
|
||||
of the variant that has been processed
|
||||
|
||||
Raises:
|
||||
KeyError: if name is not among known variants
|
||||
"""
|
||||
spec: Spec = self.pkg.spec
|
||||
args: List[str] = []
|
||||
|
||||
if activation_value == "prefix":
|
||||
activation_value = lambda x: spec[x].prefix
|
||||
|
||||
variant = variant or name
|
||||
|
||||
# Defensively look that the name passed as argument is among variants
|
||||
if not self.pkg.has_variant(variant):
|
||||
msg = '"{0}" is not a variant of "{1}"'
|
||||
raise KeyError(msg.format(variant, self.pkg.name))
|
||||
|
||||
if variant not in spec.variants:
|
||||
return []
|
||||
|
||||
# Create a list of pairs. Each pair includes a configuration
|
||||
# option and whether or not that option is activated
|
||||
vdef = self.pkg.get_variant(variant)
|
||||
if set(vdef.values) == set((True, False)): # type: ignore
|
||||
# BoolValuedVariant carry information about a single option.
|
||||
# Nonetheless, for uniformity of treatment we'll package them
|
||||
# in an iterable of one element.
|
||||
options = [(name, f"+{variant}" in spec)]
|
||||
else:
|
||||
# "feature_values" is used to track values which correspond to
|
||||
# features which can be enabled or disabled as understood by the
|
||||
# package's build system. It excludes values which have special
|
||||
# meanings and do not correspond to features (e.g. "none")
|
||||
feature_values = getattr(vdef.values, "feature_values", None) or vdef.values
|
||||
options = [(v, f"{variant}={v}" in spec) for v in feature_values] # type: ignore
|
||||
|
||||
# For each allowed value in the list of values
|
||||
for option_value, activated in options:
|
||||
# Search for an override in the package for this value
|
||||
override_name = f"{activation_word}_or_{deactivation_word}_{option_value}"
|
||||
line_generator = getattr(self, override_name, None) or getattr(
|
||||
self.pkg, override_name, None
|
||||
)
|
||||
# If not available use a sensible default
|
||||
if line_generator is None:
|
||||
|
||||
def _default_generator(is_activated):
|
||||
if is_activated:
|
||||
line = f"--{activation_word}-{option_value}"
|
||||
if activation_value is not None and activation_value(
|
||||
option_value
|
||||
): # NOQA=ignore=E501
|
||||
line = f"{line}={activation_value(option_value)}"
|
||||
return line
|
||||
return f"--{deactivation_word}-{option_value}"
|
||||
|
||||
line_generator = _default_generator
|
||||
args.append(line_generator(activated))
|
||||
return args
|
||||
|
||||
def with_or_without(
|
||||
self,
|
||||
name: str,
|
||||
activation_value: Optional[Union[Callable, str]] = None,
|
||||
variant: Optional[str] = None,
|
||||
) -> List[str]:
|
||||
"""Inspects a variant and returns the arguments that activate
|
||||
or deactivate the selected feature(s) for the configure options.
|
||||
|
||||
This function works on all type of variants. For bool-valued variants
|
||||
it will return by default ``--with-{name}`` or ``--without-{name}``.
|
||||
For other kinds of variants it will cycle over the allowed values and
|
||||
return either ``--with-{value}`` or ``--without-{value}``.
|
||||
|
||||
If activation_value is given, then for each possible value of the
|
||||
variant, the option ``--with-{value}=activation_value(value)`` or
|
||||
``--without-{value}`` will be added depending on whether or not
|
||||
``variant=value`` is in the spec.
|
||||
|
||||
Args:
|
||||
name: name of a valid multi-valued variant
|
||||
activation_value: callable that accepts a single value and returns the parameter to be
|
||||
used leading to an entry of the type ``--with-{name}={parameter}``.
|
||||
|
||||
The special value "prefix" can also be assigned and will return
|
||||
``spec[name].prefix`` as activation parameter.
|
||||
|
||||
Returns:
|
||||
list of arguments to configure
|
||||
"""
|
||||
return self._activate_or_not(name, "with", "without", activation_value, variant)
|
||||
|
||||
def enable_or_disable(
|
||||
self,
|
||||
name: str,
|
||||
activation_value: Optional[Union[Callable, str]] = None,
|
||||
variant: Optional[str] = None,
|
||||
) -> List[str]:
|
||||
"""Same as
|
||||
:meth:`~spack_repo.builtin.build_systems.autotools.AutotoolsBuilder.with_or_without`
|
||||
but substitute ``with`` with ``enable`` and ``without`` with ``disable``.
|
||||
|
||||
Args:
|
||||
name: name of a valid multi-valued variant
|
||||
activation_value: if present accepts a single value and returns the parameter to be
|
||||
used leading to an entry of the type ``--enable-{name}={parameter}``
|
||||
|
||||
The special value "prefix" can also be assigned and will return
|
||||
``spec[name].prefix`` as activation parameter.
|
||||
|
||||
Returns:
|
||||
list of arguments to configure
|
||||
"""
|
||||
return self._activate_or_not(name, "enable", "disable", activation_value, variant)
|
||||
|
||||
run_after("install")(execute_install_time_tests)
|
||||
|
||||
def installcheck(self) -> None:
|
||||
"""Run "make" on the ``installcheck`` target, if found."""
|
||||
with working_dir(self.build_directory):
|
||||
self.pkg._if_make_target_execute("installcheck")
|
||||
|
||||
@run_after("install")
|
||||
def _remove_libtool_archives(self) -> None:
|
||||
"""Remove all .la files in prefix sub-folders if the package sets
|
||||
``install_libtool_archives`` to be False.
|
||||
"""
|
||||
# If .la files are to be installed there's nothing to do
|
||||
if self.install_libtool_archives:
|
||||
return
|
||||
|
||||
# Remove the files and create a log of what was removed
|
||||
libtool_files = find(str(self.pkg.prefix), "*.la", recursive=True)
|
||||
with fs.safe_remove(*libtool_files):
|
||||
mkdirp(os.path.dirname(self._removed_la_files_log))
|
||||
with open(self._removed_la_files_log, mode="w", encoding="utf-8") as f:
|
||||
f.write("\n".join(libtool_files))
|
||||
|
||||
def setup_build_environment(self, env: EnvironmentModifications) -> None:
|
||||
if self.spec.platform == "darwin" and macos_version() >= Version("11"):
|
||||
# Many configure files rely on matching '10.*' for macOS version
|
||||
# detection and fail to add flags if it shows as version 11.
|
||||
env.set("MACOSX_DEPLOYMENT_TARGET", "10.16")
|
||||
|
||||
# On macOS, force rpaths for shared library IDs and remove duplicate rpaths
|
||||
run_after("install", when="platform=darwin")(apply_macos_rpath_fixups)
|
||||
|
||||
|
||||
def _autoreconf_search_path_args(spec: Spec) -> List[str]:
|
||||
dirs_seen: Set[Tuple[int, int]] = set()
|
||||
flags_spack: List[str] = []
|
||||
flags_external: List[str] = []
|
||||
|
||||
# We don't want to add an include flag for automake's default search path.
|
||||
for automake in spec.dependencies(name="automake", deptype="build"):
|
||||
try:
|
||||
s = os.stat(automake.prefix.share.aclocal)
|
||||
if stat.S_ISDIR(s.st_mode):
|
||||
dirs_seen.add((s.st_ino, s.st_dev))
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
for dep in spec.dependencies(deptype="build"):
|
||||
path = dep.prefix.share.aclocal
|
||||
# Skip non-existing aclocal paths
|
||||
try:
|
||||
s = os.stat(path)
|
||||
except OSError:
|
||||
continue
|
||||
# Skip things seen before, as well as non-dirs.
|
||||
if (s.st_ino, s.st_dev) in dirs_seen or not stat.S_ISDIR(s.st_mode):
|
||||
continue
|
||||
dirs_seen.add((s.st_ino, s.st_dev))
|
||||
flags = flags_external if dep.external else flags_spack
|
||||
flags.extend(["-I", path])
|
||||
return flags_spack + flags_external
|
28
var/spack/repos/spack_repo/builtin/build_systems/bundle.py
Normal file
28
var/spack/repos/spack_repo/builtin/build_systems/bundle.py
Normal file
@@ -0,0 +1,28 @@
|
||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
from spack.package import Builder, PackageBase, Prefix, Spec, build_system, register_builder
|
||||
|
||||
|
||||
class BundlePackage(PackageBase):
|
||||
"""General purpose bundle, or no-code, package class."""
|
||||
|
||||
#: This attribute is used in UI queries that require to know which
|
||||
#: build-system class we are using
|
||||
build_system_class = "BundlePackage"
|
||||
|
||||
#: Legacy buildsystem attribute used to deserialize and install old specs
|
||||
legacy_buildsystem = "bundle"
|
||||
|
||||
#: Bundle packages do not have associated source or binary code.
|
||||
has_code = False
|
||||
|
||||
build_system("bundle")
|
||||
|
||||
|
||||
@register_builder("bundle")
|
||||
class BundleBuilder(Builder):
|
||||
phases = ("install",)
|
||||
|
||||
def install(self, pkg: BundlePackage, spec: Spec, prefix: Prefix) -> None:
|
||||
pass
|
@@ -7,14 +7,7 @@
|
||||
import re
|
||||
from typing import Optional, Tuple
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.phase_callbacks
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
from spack.directives import depends_on
|
||||
from spack.util.executable import which_string
|
||||
from spack.package import Prefix, Spec, depends_on, install, mkdirp, run_after, tty, which_string
|
||||
|
||||
from .cmake import CMakeBuilder, CMakePackage
|
||||
|
||||
@@ -375,9 +368,7 @@ def initconfig_package_entries(self):
|
||||
"""This method is to be overwritten by the package"""
|
||||
return []
|
||||
|
||||
def initconfig(
|
||||
self, pkg: "CachedCMakePackage", spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
def initconfig(self, pkg: "CachedCMakePackage", spec: Spec, prefix: Prefix) -> None:
|
||||
cache_entries = (
|
||||
self.std_initconfig_entries()
|
||||
+ self.initconfig_compiler_entries()
|
||||
@@ -397,10 +388,10 @@ def std_cmake_args(self):
|
||||
args.extend(["-C", self.cache_path])
|
||||
return args
|
||||
|
||||
@spack.phase_callbacks.run_after("install")
|
||||
@run_after("install")
|
||||
def install_cmake_cache(self):
|
||||
fs.mkdirp(self.pkg.spec.prefix.share.cmake)
|
||||
fs.install(self.cache_path, self.pkg.spec.prefix.share.cmake)
|
||||
mkdirp(self.pkg.spec.prefix.share.cmake)
|
||||
install(self.cache_path, self.pkg.spec.prefix.share.cmake)
|
||||
|
||||
|
||||
class CachedCMakePackage(CMakePackage):
|
@@ -2,21 +2,24 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
import spack.spec
|
||||
import spack.util.environment
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, depends_on
|
||||
from spack.multimethod import when
|
||||
from spack.package import (
|
||||
EnvironmentModifications,
|
||||
PackageBase,
|
||||
Prefix,
|
||||
Spec,
|
||||
build_system,
|
||||
depends_on,
|
||||
install_tree,
|
||||
register_builder,
|
||||
run_after,
|
||||
when,
|
||||
working_dir,
|
||||
)
|
||||
|
||||
from ._checks import BuilderWithDefaults, execute_install_time_tests
|
||||
|
||||
|
||||
class CargoPackage(spack.package_base.PackageBase):
|
||||
class CargoPackage(PackageBase):
|
||||
"""Specialized class for packages built using cargo."""
|
||||
|
||||
#: This attribute is used in UI queries that need to know the build
|
||||
@@ -29,7 +32,7 @@ class CargoPackage(spack.package_base.PackageBase):
|
||||
depends_on("rust", type="build")
|
||||
|
||||
|
||||
@spack.builder.builder("cargo")
|
||||
@register_builder("cargo")
|
||||
class CargoBuilder(BuilderWithDefaults):
|
||||
"""The Cargo builder encodes the most common way of building software with
|
||||
a rust Cargo.toml file. It has two phases that can be overridden, if need be:
|
||||
@@ -87,30 +90,24 @@ def check_args(self):
|
||||
"""Argument for ``cargo test`` during check phase"""
|
||||
return []
|
||||
|
||||
def setup_build_environment(
|
||||
self, env: spack.util.environment.EnvironmentModifications
|
||||
) -> None:
|
||||
def setup_build_environment(self, env: EnvironmentModifications) -> None:
|
||||
env.set("CARGO_HOME", self.stage.path)
|
||||
|
||||
def build(
|
||||
self, pkg: CargoPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
def build(self, pkg: CargoPackage, spec: Spec, prefix: Prefix) -> None:
|
||||
"""Runs ``cargo install`` in the source directory"""
|
||||
with fs.working_dir(self.build_directory):
|
||||
with working_dir(self.build_directory):
|
||||
pkg.module.cargo(
|
||||
"install", "--root", "out", "--path", ".", *self.std_build_args, *self.build_args
|
||||
)
|
||||
|
||||
def install(
|
||||
self, pkg: CargoPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
def install(self, pkg: CargoPackage, spec: Spec, prefix: Prefix) -> None:
|
||||
"""Copy build files into package prefix."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
fs.install_tree("out", prefix)
|
||||
with working_dir(self.build_directory):
|
||||
install_tree("out", prefix)
|
||||
|
||||
spack.phase_callbacks.run_after("install")(execute_install_time_tests)
|
||||
run_after("install")(execute_install_time_tests)
|
||||
|
||||
def check(self):
|
||||
"""Run "cargo test"."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
with working_dir(self.build_directory):
|
||||
self.pkg.module.cargo("test", *self.check_args)
|
644
var/spack/repos/spack_repo/builtin/build_systems/cmake.py
Normal file
644
var/spack/repos/spack_repo/builtin/build_systems/cmake.py
Normal file
@@ -0,0 +1,644 @@
|
||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import collections.abc
|
||||
import os
|
||||
import pathlib
|
||||
import platform
|
||||
import re
|
||||
import sys
|
||||
from itertools import chain
|
||||
from typing import Any, List, Optional, Tuple
|
||||
|
||||
from llnl.util.lang import stable_partition
|
||||
|
||||
import spack.deptypes as dt
|
||||
from spack import traverse
|
||||
from spack.package import (
|
||||
InstallError,
|
||||
PackageBase,
|
||||
Prefix,
|
||||
Spec,
|
||||
build_system,
|
||||
conflicts,
|
||||
depends_on,
|
||||
register_builder,
|
||||
run_after,
|
||||
tty,
|
||||
variant,
|
||||
when,
|
||||
working_dir,
|
||||
)
|
||||
from spack.util.environment import filter_system_paths
|
||||
|
||||
from ._checks import BuilderWithDefaults, execute_build_time_tests
|
||||
|
||||
# Regex to extract the primary generator from the CMake generator
|
||||
# string.
|
||||
_primary_generator_extractor = re.compile(r"(?:.* - )?(.*)")
|
||||
|
||||
|
||||
def _extract_primary_generator(generator):
|
||||
"""Use the compiled regex _primary_generator_extractor to extract the
|
||||
primary generator from the generator string which may contain an
|
||||
optional secondary generator.
|
||||
"""
|
||||
return _primary_generator_extractor.match(generator).group(1)
|
||||
|
||||
|
||||
def _maybe_set_python_hints(pkg: PackageBase, args: List[str]) -> None:
|
||||
"""Set the PYTHON_EXECUTABLE, Python_EXECUTABLE, and Python3_EXECUTABLE CMake variables
|
||||
if the package has Python as build or link dep and ``find_python_hints`` is set to True. See
|
||||
``find_python_hints`` for context."""
|
||||
if not getattr(pkg, "find_python_hints", False) or not pkg.spec.dependencies(
|
||||
"python", dt.BUILD | dt.LINK
|
||||
):
|
||||
return
|
||||
python_executable = pkg.spec["python"].command.path
|
||||
args.extend(
|
||||
[
|
||||
define("PYTHON_EXECUTABLE", python_executable),
|
||||
define("Python_EXECUTABLE", python_executable),
|
||||
define("Python3_EXECUTABLE", python_executable),
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
def _supports_compilation_databases(pkg: PackageBase) -> bool:
|
||||
"""Check if this package (and CMake) can support compilation databases."""
|
||||
|
||||
# CMAKE_EXPORT_COMPILE_COMMANDS only exists for CMake >= 3.5
|
||||
if not pkg.spec.satisfies("^cmake@3.5:"):
|
||||
return False
|
||||
|
||||
# CMAKE_EXPORT_COMPILE_COMMANDS is only implemented for Makefile and Ninja generators
|
||||
if not (pkg.spec.satisfies("generator=make") or pkg.spec.satisfies("generator=ninja")):
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def _conditional_cmake_defaults(pkg: PackageBase, args: List[str]) -> None:
|
||||
"""Set a few default defines for CMake, depending on its version."""
|
||||
cmakes = pkg.spec.dependencies("cmake", dt.BUILD)
|
||||
|
||||
if len(cmakes) != 1:
|
||||
return
|
||||
|
||||
cmake = cmakes[0]
|
||||
|
||||
# CMAKE_INTERPROCEDURAL_OPTIMIZATION only exists for CMake >= 3.9
|
||||
try:
|
||||
ipo = pkg.spec.variants["ipo"].value
|
||||
except KeyError:
|
||||
ipo = False
|
||||
|
||||
if cmake.satisfies("@3.9:"):
|
||||
args.append(define("CMAKE_INTERPROCEDURAL_OPTIMIZATION", ipo))
|
||||
|
||||
# Disable Package Registry: export(PACKAGE) may put files in the user's home directory, and
|
||||
# find_package may search there. This is not what we want.
|
||||
|
||||
# Do not populate CMake User Package Registry
|
||||
if cmake.satisfies("@3.15:"):
|
||||
# see https://cmake.org/cmake/help/latest/policy/CMP0090.html
|
||||
args.append(define("CMAKE_POLICY_DEFAULT_CMP0090", "NEW"))
|
||||
elif cmake.satisfies("@3.1:"):
|
||||
# see https://cmake.org/cmake/help/latest/variable/CMAKE_EXPORT_NO_PACKAGE_REGISTRY.html
|
||||
args.append(define("CMAKE_EXPORT_NO_PACKAGE_REGISTRY", True))
|
||||
|
||||
# Do not use CMake User/System Package Registry
|
||||
# https://cmake.org/cmake/help/latest/manual/cmake-packages.7.html#disabling-the-package-registry
|
||||
if cmake.satisfies("@3.16:"):
|
||||
args.append(define("CMAKE_FIND_USE_PACKAGE_REGISTRY", False))
|
||||
elif cmake.satisfies("@3.1:3.15"):
|
||||
args.append(define("CMAKE_FIND_PACKAGE_NO_PACKAGE_REGISTRY", False))
|
||||
args.append(define("CMAKE_FIND_PACKAGE_NO_SYSTEM_PACKAGE_REGISTRY", False))
|
||||
|
||||
# Export a compilation database if supported.
|
||||
if _supports_compilation_databases(pkg):
|
||||
args.append(define("CMAKE_EXPORT_COMPILE_COMMANDS", True))
|
||||
|
||||
# Enable MACOSX_RPATH by default when cmake_minimum_required < 3
|
||||
# https://cmake.org/cmake/help/latest/policy/CMP0042.html
|
||||
if pkg.spec.satisfies("platform=darwin") and cmake.satisfies("@3:"):
|
||||
args.append(define("CMAKE_POLICY_DEFAULT_CMP0042", "NEW"))
|
||||
|
||||
# Disable find package's config mode for versions of Boost that
|
||||
# didn't provide it. See https://github.com/spack/spack/issues/20169
|
||||
# and https://cmake.org/cmake/help/latest/module/FindBoost.html
|
||||
if pkg.spec.satisfies("^boost@:1.69.0"):
|
||||
args.append(define("Boost_NO_BOOST_CMAKE", True))
|
||||
|
||||
|
||||
def generator(*names: str, default: Optional[str] = None) -> None:
|
||||
"""The build system generator to use.
|
||||
|
||||
See ``cmake --help`` for a list of valid generators.
|
||||
Currently, "Unix Makefiles" and "Ninja" are the only generators
|
||||
that Spack supports. Defaults to "Unix Makefiles".
|
||||
|
||||
See https://cmake.org/cmake/help/latest/manual/cmake-generators.7.html
|
||||
for more information.
|
||||
|
||||
Args:
|
||||
names: allowed generators for this package
|
||||
default: default generator
|
||||
"""
|
||||
allowed_values = ("make", "ninja")
|
||||
if any(x not in allowed_values for x in names):
|
||||
msg = "only 'make' and 'ninja' are allowed for CMake's 'generator' directive"
|
||||
raise ValueError(msg)
|
||||
|
||||
default = default or names[0]
|
||||
not_used = [x for x in allowed_values if x not in names]
|
||||
|
||||
def _values(x):
|
||||
return x in allowed_values
|
||||
|
||||
_values.__doc__ = f"{','.join(names)}"
|
||||
|
||||
variant(
|
||||
"generator",
|
||||
default=default,
|
||||
values=_values,
|
||||
description="the build system generator to use",
|
||||
when="build_system=cmake",
|
||||
)
|
||||
for x in not_used:
|
||||
conflicts(f"generator={x}")
|
||||
|
||||
|
||||
def get_cmake_prefix_path(pkg: PackageBase) -> List[str]:
|
||||
"""Obtain the CMAKE_PREFIX_PATH entries for a package, based on the cmake_prefix_path package
|
||||
attribute of direct build/test and transitive link dependencies."""
|
||||
edges = traverse.traverse_topo_edges_generator(
|
||||
traverse.with_artificial_edges([pkg.spec]),
|
||||
visitor=traverse.MixedDepthVisitor(
|
||||
direct=dt.BUILD | dt.TEST, transitive=dt.LINK, key=traverse.by_dag_hash
|
||||
),
|
||||
key=traverse.by_dag_hash,
|
||||
root=False,
|
||||
all_edges=False, # cover all nodes, not all edges
|
||||
)
|
||||
ordered_specs = [edge.spec for edge in edges]
|
||||
# Separate out externals so they do not shadow Spack prefixes
|
||||
externals, spack_built = stable_partition((s for s in ordered_specs), lambda x: x.external)
|
||||
|
||||
return filter_system_paths(
|
||||
path for spec in chain(spack_built, externals) for path in spec.package.cmake_prefix_paths
|
||||
)
|
||||
|
||||
|
||||
class CMakePackage(PackageBase):
|
||||
"""Specialized class for packages built using CMake
|
||||
|
||||
For more information on the CMake build system, see:
|
||||
https://cmake.org/cmake/help/latest/
|
||||
"""
|
||||
|
||||
#: This attribute is used in UI queries that need to know the build
|
||||
#: system base class
|
||||
build_system_class = "CMakePackage"
|
||||
|
||||
#: Legacy buildsystem attribute used to deserialize and install old specs
|
||||
legacy_buildsystem = "cmake"
|
||||
|
||||
#: When this package depends on Python and ``find_python_hints`` is set to True, pass the
|
||||
#: defines {Python3,Python,PYTHON}_EXECUTABLE explicitly, so that CMake locates the right
|
||||
#: Python in its builtin FindPython3, FindPython, and FindPythonInterp modules. Spack does
|
||||
#: CMake's job because CMake's modules by default only search for Python versions known at the
|
||||
#: time of release.
|
||||
find_python_hints = True
|
||||
|
||||
build_system("cmake")
|
||||
|
||||
with when("build_system=cmake"):
|
||||
# https://cmake.org/cmake/help/latest/variable/CMAKE_BUILD_TYPE.html
|
||||
# See https://github.com/spack/spack/pull/36679 and related issues for a
|
||||
# discussion of the trade-offs between Release and RelWithDebInfo for default
|
||||
# builds. Release is chosen to maximize performance and reduce disk-space burden,
|
||||
# at the cost of more difficulty in debugging.
|
||||
variant(
|
||||
"build_type",
|
||||
default="Release",
|
||||
description="CMake build type",
|
||||
values=("Debug", "Release", "RelWithDebInfo", "MinSizeRel"),
|
||||
)
|
||||
# CMAKE_INTERPROCEDURAL_OPTIMIZATION only exists for CMake >= 3.9
|
||||
# https://cmake.org/cmake/help/latest/variable/CMAKE_INTERPROCEDURAL_OPTIMIZATION.html
|
||||
variant(
|
||||
"ipo",
|
||||
default=False,
|
||||
when="^cmake@3.9:",
|
||||
description="CMake interprocedural optimization",
|
||||
)
|
||||
|
||||
if sys.platform == "win32":
|
||||
generator("ninja")
|
||||
else:
|
||||
generator("ninja", "make", default="make")
|
||||
|
||||
depends_on("cmake", type="build")
|
||||
depends_on("gmake", type="build", when="generator=make")
|
||||
depends_on("ninja", type="build", when="generator=ninja")
|
||||
|
||||
def flags_to_build_system_args(self, flags):
|
||||
"""Return a list of all command line arguments to pass the specified
|
||||
compiler flags to cmake. Note CMAKE does not have a cppflags option,
|
||||
so cppflags will be added to cflags, cxxflags, and fflags to mimic the
|
||||
behavior in other tools.
|
||||
"""
|
||||
# Has to be dynamic attribute due to caching
|
||||
setattr(self, "cmake_flag_args", [])
|
||||
|
||||
flag_string = "-DCMAKE_{0}_FLAGS={1}"
|
||||
langs = {"C": "c", "CXX": "cxx", "Fortran": "f"}
|
||||
|
||||
# Handle language compiler flags
|
||||
for lang, pre in langs.items():
|
||||
flag = pre + "flags"
|
||||
# cmake has no explicit cppflags support -> add it to all langs
|
||||
lang_flags = " ".join(flags.get(flag, []) + flags.get("cppflags", []))
|
||||
if lang_flags:
|
||||
self.cmake_flag_args.append(flag_string.format(lang, lang_flags))
|
||||
|
||||
# Cmake has different linker arguments for different build types.
|
||||
# We specify for each of them.
|
||||
if flags["ldflags"]:
|
||||
ldflags = " ".join(flags["ldflags"])
|
||||
# cmake has separate linker arguments for types of builds.
|
||||
self.cmake_flag_args.append(f"-DCMAKE_EXE_LINKER_FLAGS={ldflags}")
|
||||
self.cmake_flag_args.append(f"-DCMAKE_MODULE_LINKER_FLAGS={ldflags}")
|
||||
self.cmake_flag_args.append(f"-DCMAKE_SHARED_LINKER_FLAGS={ldflags}")
|
||||
|
||||
# CMake has libs options separated by language. Apply ours to each.
|
||||
if flags["ldlibs"]:
|
||||
libs_flags = " ".join(flags["ldlibs"])
|
||||
libs_string = "-DCMAKE_{0}_STANDARD_LIBRARIES={1}"
|
||||
for lang in langs:
|
||||
self.cmake_flag_args.append(libs_string.format(lang, libs_flags))
|
||||
|
||||
# Legacy methods (used by too many packages to change them,
|
||||
# need to forward to the builder)
|
||||
def define(self, cmake_var: str, value: Any) -> str:
|
||||
return define(cmake_var, value)
|
||||
|
||||
def define_from_variant(self, cmake_var: str, variant: Optional[str] = None) -> str:
|
||||
return define_from_variant(self, cmake_var, variant)
|
||||
|
||||
|
||||
@register_builder("cmake")
|
||||
class CMakeBuilder(BuilderWithDefaults):
|
||||
"""The cmake builder encodes the default way of building software with CMake. IT
|
||||
has three phases that can be overridden:
|
||||
|
||||
1. :py:meth:`~.CMakeBuilder.cmake`
|
||||
2. :py:meth:`~.CMakeBuilder.build`
|
||||
3. :py:meth:`~.CMakeBuilder.install`
|
||||
|
||||
They all have sensible defaults and for many packages the only thing
|
||||
necessary will be to override :py:meth:`~.CMakeBuilder.cmake_args`.
|
||||
|
||||
For a finer tuning you may also override:
|
||||
|
||||
+-----------------------------------------------+--------------------+
|
||||
| **Method** | **Purpose** |
|
||||
+===============================================+====================+
|
||||
| :py:meth:`~.CMakeBuilder.root_cmakelists_dir` | Location of the |
|
||||
| | root CMakeLists.txt|
|
||||
+-----------------------------------------------+--------------------+
|
||||
| :py:meth:`~.CMakeBuilder.build_directory` | Directory where to |
|
||||
| | build the package |
|
||||
+-----------------------------------------------+--------------------+
|
||||
"""
|
||||
|
||||
#: Phases of a CMake package
|
||||
phases: Tuple[str, ...] = ("cmake", "build", "install")
|
||||
|
||||
#: Names associated with package methods in the old build-system format
|
||||
legacy_methods: Tuple[str, ...] = ("cmake_args", "check")
|
||||
|
||||
#: Names associated with package attributes in the old build-system format
|
||||
legacy_attributes: Tuple[str, ...] = (
|
||||
"build_targets",
|
||||
"install_targets",
|
||||
"build_time_test_callbacks",
|
||||
"archive_files",
|
||||
"root_cmakelists_dir",
|
||||
"std_cmake_args",
|
||||
"build_dirname",
|
||||
"build_directory",
|
||||
)
|
||||
|
||||
#: Targets to be used during the build phase
|
||||
build_targets: List[str] = []
|
||||
#: Targets to be used during the install phase
|
||||
install_targets = ["install"]
|
||||
#: Callback names for build-time test
|
||||
build_time_test_callbacks = ["check"]
|
||||
|
||||
@property
|
||||
def archive_files(self) -> List[str]:
|
||||
"""Files to archive for packages based on CMake"""
|
||||
files = [os.path.join(self.build_directory, "CMakeCache.txt")]
|
||||
if _supports_compilation_databases(self.pkg):
|
||||
files.append(os.path.join(self.build_directory, "compile_commands.json"))
|
||||
return files
|
||||
|
||||
@property
|
||||
def root_cmakelists_dir(self) -> str:
|
||||
"""The relative path to the directory containing CMakeLists.txt
|
||||
|
||||
This path is relative to the root of the extracted tarball,
|
||||
not to the ``build_directory``. Defaults to the current directory.
|
||||
"""
|
||||
return self.pkg.stage.source_path
|
||||
|
||||
@property
|
||||
def generator(self) -> str:
|
||||
if self.spec.satisfies("generator=make"):
|
||||
return "Unix Makefiles"
|
||||
if self.spec.satisfies("generator=ninja"):
|
||||
return "Ninja"
|
||||
raise ValueError(
|
||||
f'{self.spec.format()} has an unsupported value for the "generator" variant'
|
||||
)
|
||||
|
||||
@property
|
||||
def std_cmake_args(self) -> List[str]:
|
||||
"""Standard cmake arguments provided as a property for
|
||||
convenience of package writers
|
||||
"""
|
||||
args = CMakeBuilder.std_args(self.pkg, generator=self.generator)
|
||||
args += getattr(self.pkg, "cmake_flag_args", [])
|
||||
return args
|
||||
|
||||
@staticmethod
|
||||
def std_args(pkg: PackageBase, generator: Optional[str] = None) -> List[str]:
|
||||
"""Computes the standard cmake arguments for a generic package"""
|
||||
default_generator = "Ninja" if sys.platform == "win32" else "Unix Makefiles"
|
||||
generator = generator or default_generator
|
||||
valid_primary_generators = ["Unix Makefiles", "Ninja"]
|
||||
primary_generator = _extract_primary_generator(generator)
|
||||
if primary_generator not in valid_primary_generators:
|
||||
msg = "Invalid CMake generator: '{0}'\n".format(generator)
|
||||
msg += "CMakePackage currently supports the following "
|
||||
msg += "primary generators: '{0}'".format("', '".join(valid_primary_generators))
|
||||
raise InstallError(msg)
|
||||
|
||||
try:
|
||||
build_type = pkg.spec.variants["build_type"].value
|
||||
except KeyError:
|
||||
build_type = "RelWithDebInfo"
|
||||
|
||||
args = [
|
||||
"-G",
|
||||
generator,
|
||||
define("CMAKE_INSTALL_PREFIX", pathlib.Path(pkg.prefix).as_posix()),
|
||||
define("CMAKE_INSTALL_RPATH_USE_LINK_PATH", True),
|
||||
# only include the install prefix lib dirs; rpaths for deps are added by USE_LINK_PATH
|
||||
define(
|
||||
"CMAKE_INSTALL_RPATH",
|
||||
[
|
||||
pathlib.Path(pkg.prefix, "lib").as_posix(),
|
||||
pathlib.Path(pkg.prefix, "lib64").as_posix(),
|
||||
],
|
||||
),
|
||||
define("CMAKE_PREFIX_PATH", get_cmake_prefix_path(pkg)),
|
||||
define("CMAKE_BUILD_TYPE", build_type),
|
||||
]
|
||||
|
||||
if primary_generator == "Unix Makefiles":
|
||||
args.append(define("CMAKE_VERBOSE_MAKEFILE", True))
|
||||
|
||||
if platform.mac_ver()[0]:
|
||||
args.extend(
|
||||
[define("CMAKE_FIND_FRAMEWORK", "LAST"), define("CMAKE_FIND_APPBUNDLE", "LAST")]
|
||||
)
|
||||
|
||||
_conditional_cmake_defaults(pkg, args)
|
||||
_maybe_set_python_hints(pkg, args)
|
||||
|
||||
return args
|
||||
|
||||
@staticmethod
|
||||
def define_cuda_architectures(pkg: PackageBase) -> str:
|
||||
return define_cuda_architectures(pkg)
|
||||
|
||||
@staticmethod
|
||||
def define_hip_architectures(pkg: PackageBase) -> str:
|
||||
return define_hip_architectures(pkg)
|
||||
|
||||
@staticmethod
|
||||
def define(cmake_var: str, value: Any) -> str:
|
||||
return define(cmake_var, value)
|
||||
|
||||
def define_from_variant(self, cmake_var: str, variant: Optional[str] = None) -> str:
|
||||
return define_from_variant(self.pkg, cmake_var, variant)
|
||||
|
||||
@property
|
||||
def build_dirname(self) -> str:
|
||||
"""Directory name to use when building the package."""
|
||||
return f"spack-build-{self.pkg.spec.dag_hash(7)}"
|
||||
|
||||
@property
|
||||
def build_directory(self) -> str:
|
||||
"""Full-path to the directory to use when building the package."""
|
||||
return os.path.join(self.pkg.stage.path, self.build_dirname)
|
||||
|
||||
def cmake_args(self) -> List[str]:
|
||||
"""List of all the arguments that must be passed to cmake, except:
|
||||
|
||||
* CMAKE_INSTALL_PREFIX
|
||||
* CMAKE_BUILD_TYPE
|
||||
|
||||
which will be set automatically.
|
||||
"""
|
||||
return []
|
||||
|
||||
def cmake(self, pkg: CMakePackage, spec: Spec, prefix: Prefix) -> None:
|
||||
"""Runs ``cmake`` in the build directory"""
|
||||
|
||||
if spec.is_develop:
|
||||
# skip cmake phase if it is an incremental develop build
|
||||
|
||||
# Determine the files that will re-run CMake that are generated from a successful
|
||||
# configure step based on state
|
||||
primary_generator = _extract_primary_generator(self.generator)
|
||||
configure_artifact = "Makefile"
|
||||
if primary_generator == "Ninja":
|
||||
configure_artifact = "ninja.build"
|
||||
|
||||
if os.path.isfile(os.path.join(self.build_directory, configure_artifact)):
|
||||
tty.msg(
|
||||
"Incremental build criteria satisfied."
|
||||
"Skipping CMake configure step. To force configuration run"
|
||||
f" `spack clean {pkg.name}`"
|
||||
)
|
||||
return
|
||||
|
||||
options = self.std_cmake_args
|
||||
options += self.cmake_args()
|
||||
options.append(os.path.abspath(self.root_cmakelists_dir))
|
||||
with working_dir(self.build_directory, create=True):
|
||||
pkg.module.cmake(*options)
|
||||
|
||||
def build(self, pkg: CMakePackage, spec: Spec, prefix: Prefix) -> None:
|
||||
"""Make the build targets"""
|
||||
with working_dir(self.build_directory):
|
||||
if self.generator == "Unix Makefiles":
|
||||
pkg.module.make(*self.build_targets)
|
||||
elif self.generator == "Ninja":
|
||||
self.build_targets.append("-v")
|
||||
pkg.module.ninja(*self.build_targets)
|
||||
|
||||
def install(self, pkg: CMakePackage, spec: Spec, prefix: Prefix) -> None:
|
||||
"""Make the install targets"""
|
||||
with working_dir(self.build_directory):
|
||||
if self.generator == "Unix Makefiles":
|
||||
pkg.module.make(*self.install_targets)
|
||||
elif self.generator == "Ninja":
|
||||
pkg.module.ninja(*self.install_targets)
|
||||
|
||||
run_after("build")(execute_build_time_tests)
|
||||
|
||||
def check(self) -> None:
|
||||
"""Search the CMake-generated files for the targets ``test`` and ``check``,
|
||||
and runs them if found.
|
||||
"""
|
||||
with working_dir(self.build_directory):
|
||||
if self.generator == "Unix Makefiles":
|
||||
self.pkg._if_make_target_execute("test", jobs_env="CTEST_PARALLEL_LEVEL")
|
||||
self.pkg._if_make_target_execute("check")
|
||||
elif self.generator == "Ninja":
|
||||
self.pkg._if_ninja_target_execute("test", jobs_env="CTEST_PARALLEL_LEVEL")
|
||||
self.pkg._if_ninja_target_execute("check")
|
||||
|
||||
|
||||
def define(cmake_var: str, value: Any) -> str:
|
||||
"""Return a CMake command line argument that defines a variable.
|
||||
|
||||
The resulting argument will convert boolean values to OFF/ON and lists/tuples to CMake
|
||||
semicolon-separated string lists. All other values will be interpreted as strings.
|
||||
|
||||
Examples:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
[define("BUILD_SHARED_LIBS", True),
|
||||
define("CMAKE_CXX_STANDARD", 14),
|
||||
define("swr", ["avx", "avx2"])]
|
||||
|
||||
will generate the following configuration options:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
["-DBUILD_SHARED_LIBS:BOOL=ON",
|
||||
"-DCMAKE_CXX_STANDARD:STRING=14",
|
||||
"-DSWR:STRING=avx;avx2]
|
||||
|
||||
"""
|
||||
# Create a list of pairs. Each pair includes a configuration
|
||||
# option and whether or not that option is activated
|
||||
if isinstance(value, bool):
|
||||
kind = "BOOL"
|
||||
value = "ON" if value else "OFF"
|
||||
else:
|
||||
kind = "STRING"
|
||||
if isinstance(value, collections.abc.Sequence) and not isinstance(value, str):
|
||||
value = ";".join(str(v) for v in value)
|
||||
else:
|
||||
value = str(value)
|
||||
|
||||
return "".join(["-D", cmake_var, ":", kind, "=", value])
|
||||
|
||||
|
||||
def define_from_variant(pkg: PackageBase, cmake_var: str, variant: Optional[str] = None) -> str:
|
||||
"""Return a CMake command line argument from the given variant's value.
|
||||
|
||||
The optional ``variant`` argument defaults to the lower-case transform
|
||||
of ``cmake_var``.
|
||||
|
||||
Examples:
|
||||
|
||||
Given a package with:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
variant("cxxstd", default="11", values=("11", "14"),
|
||||
multi=False, description="")
|
||||
variant("shared", default=True, description="")
|
||||
variant("swr", values=any_combination_of("avx", "avx2"),
|
||||
description="")
|
||||
|
||||
calling this function like:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
[
|
||||
self.define_from_variant("BUILD_SHARED_LIBS", "shared"),
|
||||
self.define_from_variant("CMAKE_CXX_STANDARD", "cxxstd"),
|
||||
self.define_from_variant("SWR"),
|
||||
]
|
||||
|
||||
will generate the following configuration options:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
[
|
||||
"-DBUILD_SHARED_LIBS:BOOL=ON",
|
||||
"-DCMAKE_CXX_STANDARD:STRING=14",
|
||||
"-DSWR:STRING=avx;avx2",
|
||||
]
|
||||
|
||||
for ``<spec-name> cxxstd=14 +shared swr=avx,avx2``
|
||||
|
||||
Note: if the provided variant is conditional, and the condition is not met, this function
|
||||
returns an empty string. CMake discards empty strings provided on the command line.
|
||||
"""
|
||||
if variant is None:
|
||||
variant = cmake_var.lower()
|
||||
|
||||
if not pkg.has_variant(variant):
|
||||
raise KeyError('"{0}" is not a variant of "{1}"'.format(variant, pkg.name))
|
||||
|
||||
if variant not in pkg.spec.variants:
|
||||
return ""
|
||||
|
||||
value = pkg.spec.variants[variant].value
|
||||
if isinstance(value, (tuple, list)):
|
||||
# Sort multi-valued variants for reproducibility
|
||||
value = sorted(value)
|
||||
|
||||
return define(cmake_var, value)
|
||||
|
||||
|
||||
def define_hip_architectures(pkg: PackageBase) -> str:
|
||||
"""Returns the str ``-DCMAKE_HIP_ARCHITECTURES:STRING=(expanded amdgpu_target)``.
|
||||
|
||||
``amdgpu_target`` is variant composed of a list of the target HIP
|
||||
architectures and it is declared in the rocm package.
|
||||
|
||||
This method is no-op for cmake<3.18 and when ``amdgpu_target`` variant is
|
||||
not set.
|
||||
|
||||
"""
|
||||
if "amdgpu_target" in pkg.spec.variants and pkg.spec.satisfies("^cmake@3.21:"):
|
||||
return define("CMAKE_HIP_ARCHITECTURES", pkg.spec.variants["amdgpu_target"].value)
|
||||
|
||||
return ""
|
||||
|
||||
|
||||
def define_cuda_architectures(pkg: PackageBase) -> str:
|
||||
"""Returns the str ``-DCMAKE_CUDA_ARCHITECTURES:STRING=(expanded cuda_arch)``.
|
||||
|
||||
``cuda_arch`` is variant composed of a list of target CUDA architectures and
|
||||
it is declared in the cuda package.
|
||||
|
||||
This method is no-op for cmake<3.18 and when ``cuda_arch`` variant is not set.
|
||||
|
||||
"""
|
||||
if "cuda_arch" in pkg.spec.variants and pkg.spec.satisfies("^cmake@3.18:"):
|
||||
return define("CMAKE_CUDA_ARCHITECTURES", pkg.spec.variants["cuda_arch"].value)
|
||||
return ""
|
257
var/spack/repos/spack_repo/builtin/build_systems/compiler.py
Normal file
257
var/spack/repos/spack_repo/builtin/build_systems/compiler.py
Normal file
@@ -0,0 +1,257 @@
|
||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import itertools
|
||||
import os
|
||||
import pathlib
|
||||
import re
|
||||
import sys
|
||||
from typing import Dict, List, Optional, Sequence, Tuple, Union
|
||||
|
||||
from llnl.util.lang import classproperty, memoized
|
||||
|
||||
import spack.compilers.error
|
||||
from spack.package import Executable, PackageBase, ProcessError, Spec, tty, which_string
|
||||
|
||||
# Local "type" for type hints
|
||||
Path = Union[str, pathlib.Path]
|
||||
|
||||
|
||||
class CompilerPackage(PackageBase):
|
||||
"""A Package mixin for all common logic for packages that implement compilers"""
|
||||
|
||||
# TODO: how do these play nicely with other tags
|
||||
tags: Sequence[str] = ["compiler"]
|
||||
|
||||
#: Optional suffix regexes for searching for this type of compiler.
|
||||
#: Suffixes are used by some frameworks, e.g. macports uses an '-mp-X.Y'
|
||||
#: version suffix for gcc.
|
||||
compiler_suffixes: List[str] = [r"-.*"]
|
||||
|
||||
#: Optional prefix regexes for searching for this compiler
|
||||
compiler_prefixes: List[str] = []
|
||||
|
||||
#: Compiler argument(s) that produces version information
|
||||
#: If multiple arguments, the earlier arguments must produce errors when invalid
|
||||
compiler_version_argument: Union[str, Tuple[str, ...]] = "-dumpversion"
|
||||
|
||||
#: Regex used to extract version from compiler's output
|
||||
compiler_version_regex: str = "(.*)"
|
||||
|
||||
#: Static definition of languages supported by this class
|
||||
compiler_languages: Sequence[str] = ["c", "cxx", "fortran"]
|
||||
|
||||
#: Relative path to compiler wrappers
|
||||
compiler_wrapper_link_paths: Dict[str, str] = {}
|
||||
|
||||
#: Optimization flags
|
||||
opt_flags: Sequence[str] = []
|
||||
#: Flags for generating debug information
|
||||
debug_flags: Sequence[str] = []
|
||||
|
||||
def __init__(self, spec: Spec):
|
||||
super().__init__(spec)
|
||||
msg = f"Supported languages for {spec} are not a subset of possible supported languages"
|
||||
msg += f" supports: {self.supported_languages}, valid values: {self.compiler_languages}"
|
||||
assert set(self.supported_languages) <= set(self.compiler_languages), msg
|
||||
|
||||
@property
|
||||
def supported_languages(self) -> Sequence[str]:
|
||||
"""Dynamic definition of languages supported by this package"""
|
||||
return self.compiler_languages
|
||||
|
||||
@classproperty
|
||||
def compiler_names(cls) -> Sequence[str]:
|
||||
"""Construct list of compiler names from per-language names"""
|
||||
names = []
|
||||
for language in cls.compiler_languages:
|
||||
names.extend(getattr(cls, f"{language}_names"))
|
||||
return names
|
||||
|
||||
@classproperty
|
||||
def executables(cls) -> Sequence[str]:
|
||||
"""Construct executables for external detection from names, prefixes, and suffixes."""
|
||||
regexp_fmt = r"^({0}){1}({2})$"
|
||||
prefixes = [""] + cls.compiler_prefixes
|
||||
suffixes = [""] + cls.compiler_suffixes
|
||||
if sys.platform == "win32":
|
||||
ext = r"\.(?:exe|bat)"
|
||||
suffixes += [suf + ext for suf in suffixes]
|
||||
return [
|
||||
regexp_fmt.format(prefix, re.escape(name), suffix)
|
||||
for prefix, name, suffix in itertools.product(prefixes, cls.compiler_names, suffixes)
|
||||
]
|
||||
|
||||
@classmethod
|
||||
def determine_version(cls, exe: Path) -> str:
|
||||
version_argument = cls.compiler_version_argument
|
||||
if isinstance(version_argument, str):
|
||||
version_argument = (version_argument,)
|
||||
|
||||
for va in version_argument:
|
||||
try:
|
||||
output = compiler_output(exe, version_argument=va)
|
||||
match = re.search(cls.compiler_version_regex, output)
|
||||
if match:
|
||||
return ".".join(match.groups())
|
||||
except ProcessError:
|
||||
pass
|
||||
except Exception as e:
|
||||
tty.debug(
|
||||
f"[{__file__}] Cannot detect a valid version for the executable "
|
||||
f"{str(exe)}, for package '{cls.name}': {e}"
|
||||
)
|
||||
return ""
|
||||
|
||||
@classmethod
|
||||
def compiler_bindir(cls, prefix: Path) -> Path:
|
||||
"""Overridable method for the location of the compiler bindir within the prefix"""
|
||||
return os.path.join(prefix, "bin")
|
||||
|
||||
@classmethod
|
||||
def determine_compiler_paths(cls, exes: Sequence[Path]) -> Dict[str, Path]:
|
||||
"""Compute the paths to compiler executables associated with this package
|
||||
|
||||
This is a helper method for ``determine_variants`` to compute the ``extra_attributes``
|
||||
to include with each spec object."""
|
||||
# There are often at least two copies (not symlinks) of each compiler executable in the
|
||||
# same directory: one with a canonical name, e.g. "gfortran", and another one with the
|
||||
# target prefix, e.g. "x86_64-pc-linux-gnu-gfortran". There also might be a copy of "gcc"
|
||||
# with the version suffix, e.g. "x86_64-pc-linux-gnu-gcc-6.3.0". To ensure the consistency
|
||||
# of values in the "paths" dictionary (i.e. we prefer all of them to reference copies
|
||||
# with canonical names if possible), we iterate over the executables in the reversed sorted
|
||||
# order:
|
||||
# First pass over languages identifies exes that are perfect matches for canonical names
|
||||
# Second pass checks for names with prefix/suffix
|
||||
# Second pass is sorted by language name length because longer named languages
|
||||
# e.g. cxx can often contain the names of shorter named languages
|
||||
# e.g. c (e.g. clang/clang++)
|
||||
paths = {}
|
||||
exes = sorted(exes, reverse=True)
|
||||
languages = {
|
||||
lang: getattr(cls, f"{lang}_names")
|
||||
for lang in sorted(cls.compiler_languages, key=len, reverse=True)
|
||||
}
|
||||
for exe in exes:
|
||||
for lang, names in languages.items():
|
||||
if os.path.basename(exe) in names:
|
||||
paths[lang] = exe
|
||||
break
|
||||
else:
|
||||
for lang, names in languages.items():
|
||||
if any(name in os.path.basename(exe) for name in names):
|
||||
paths[lang] = exe
|
||||
break
|
||||
|
||||
return paths
|
||||
|
||||
@classmethod
|
||||
def determine_variants(cls, exes: Sequence[Path], version_str: str) -> Tuple:
|
||||
# path determination is separated so it can be reused in subclasses
|
||||
return "", {"compilers": cls.determine_compiler_paths(exes=exes)}
|
||||
|
||||
#: Returns the argument needed to set the RPATH, or None if it does not exist
|
||||
rpath_arg: Optional[str] = "-Wl,-rpath,"
|
||||
#: Flag that needs to be used to pass an argument to the linker
|
||||
linker_arg: str = "-Wl,"
|
||||
#: Flag used to produce Position Independent Code
|
||||
pic_flag: str = "-fPIC"
|
||||
#: Flag used to get verbose output
|
||||
verbose_flags: str = "-v"
|
||||
#: Flag to activate OpenMP support
|
||||
openmp_flag: str = "-fopenmp"
|
||||
|
||||
implicit_rpath_libs: List[str] = []
|
||||
|
||||
def standard_flag(self, *, language: str, standard: str) -> str:
|
||||
"""Returns the flag used to enforce a given standard for a language"""
|
||||
if language not in self.supported_languages:
|
||||
raise spack.compilers.error.UnsupportedCompilerFlag(
|
||||
f"{self.spec} does not provide the '{language}' language"
|
||||
)
|
||||
try:
|
||||
return self._standard_flag(language=language, standard=standard)
|
||||
except (KeyError, RuntimeError) as e:
|
||||
raise spack.compilers.error.UnsupportedCompilerFlag(
|
||||
f"{self.spec} does not provide the '{language}' standard {standard}"
|
||||
) from e
|
||||
|
||||
def _standard_flag(self, *, language: str, standard: str) -> str:
|
||||
raise NotImplementedError("Must be implemented by derived classes")
|
||||
|
||||
def archspec_name(self) -> str:
|
||||
"""Name that archspec uses to refer to this compiler"""
|
||||
return self.spec.name
|
||||
|
||||
@property
|
||||
def cc(self) -> Optional[str]:
|
||||
assert self.spec.concrete, "cannot retrieve C compiler, spec is not concrete"
|
||||
if self.spec.external:
|
||||
return self.spec.extra_attributes.get("compilers", {}).get("c", None)
|
||||
return self._cc_path()
|
||||
|
||||
def _cc_path(self) -> Optional[str]:
|
||||
"""Returns the path to the C compiler, if the package was installed by Spack"""
|
||||
return None
|
||||
|
||||
@property
|
||||
def cxx(self) -> Optional[str]:
|
||||
assert self.spec.concrete, "cannot retrieve C++ compiler, spec is not concrete"
|
||||
if self.spec.external:
|
||||
return self.spec.extra_attributes.get("compilers", {}).get("cxx", None)
|
||||
return self._cxx_path()
|
||||
|
||||
def _cxx_path(self) -> Optional[str]:
|
||||
"""Returns the path to the C++ compiler, if the package was installed by Spack"""
|
||||
return None
|
||||
|
||||
@property
|
||||
def fortran(self):
|
||||
assert self.spec.concrete, "cannot retrieve Fortran compiler, spec is not concrete"
|
||||
if self.spec.external:
|
||||
return self.spec.extra_attributes.get("compilers", {}).get("fortran", None)
|
||||
return self._fortran_path()
|
||||
|
||||
def _fortran_path(self) -> Optional[str]:
|
||||
"""Returns the path to the Fortran compiler, if the package was installed by Spack"""
|
||||
return None
|
||||
|
||||
|
||||
@memoized
|
||||
def _compiler_output(
|
||||
compiler_path: Path, *, version_argument: str, ignore_errors: Tuple[int, ...] = ()
|
||||
) -> str:
|
||||
"""Returns the output from the compiler invoked with the given version argument.
|
||||
|
||||
Args:
|
||||
compiler_path: path of the compiler to be invoked
|
||||
version_argument: the argument used to extract version information
|
||||
"""
|
||||
compiler = Executable(compiler_path)
|
||||
if not version_argument:
|
||||
return compiler(
|
||||
output=str, error=str, ignore_errors=ignore_errors, timeout=120, fail_on_error=True
|
||||
)
|
||||
return compiler(
|
||||
version_argument,
|
||||
output=str,
|
||||
error=str,
|
||||
ignore_errors=ignore_errors,
|
||||
timeout=120,
|
||||
fail_on_error=True,
|
||||
)
|
||||
|
||||
|
||||
def compiler_output(
|
||||
compiler_path: Path, *, version_argument: str, ignore_errors: Tuple[int, ...] = ()
|
||||
) -> str:
|
||||
"""Wrapper for _get_compiler_version_output()."""
|
||||
# This ensures that we memoize compiler output by *absolute path*,
|
||||
# not just executable name. If we don't do this, and the path changes
|
||||
# (e.g., during testing), we can get incorrect results.
|
||||
if not os.path.isabs(compiler_path):
|
||||
compiler_path = which_string(str(compiler_path), required=True)
|
||||
|
||||
return _compiler_output(
|
||||
compiler_path, version_argument=version_argument, ignore_errors=ignore_errors
|
||||
)
|
@@ -5,10 +5,7 @@
|
||||
import re
|
||||
from typing import Iterable, List
|
||||
|
||||
import spack.variant
|
||||
from spack.directives import conflicts, depends_on, variant
|
||||
from spack.multimethod import when
|
||||
from spack.package_base import PackageBase
|
||||
from spack.package import PackageBase, any_combination_of, conflicts, depends_on, variant, when
|
||||
|
||||
|
||||
class CudaPackage(PackageBase):
|
||||
@@ -71,7 +68,7 @@ class CudaPackage(PackageBase):
|
||||
variant(
|
||||
"cuda_arch",
|
||||
description="CUDA architecture",
|
||||
values=spack.variant.any_combination_of(*cuda_arch_values),
|
||||
values=any_combination_of(*cuda_arch_values),
|
||||
sticky=True,
|
||||
when="+cuda",
|
||||
)
|
50
var/spack/repos/spack_repo/builtin/build_systems/generic.py
Normal file
50
var/spack/repos/spack_repo/builtin/build_systems/generic.py
Normal file
@@ -0,0 +1,50 @@
|
||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
from typing import Tuple
|
||||
|
||||
from spack.package import PackageBase, Prefix, Spec, build_system, register_builder, run_after
|
||||
|
||||
from ._checks import BuilderWithDefaults, apply_macos_rpath_fixups, execute_install_time_tests
|
||||
|
||||
|
||||
class Package(PackageBase):
|
||||
"""General purpose class with a single ``install`` phase that needs to be
|
||||
coded by packagers.
|
||||
"""
|
||||
|
||||
#: This attribute is used in UI queries that require to know which
|
||||
#: build-system class we are using
|
||||
build_system_class = "Package"
|
||||
#: Legacy buildsystem attribute used to deserialize and install old specs
|
||||
legacy_buildsystem = "generic"
|
||||
|
||||
build_system("generic")
|
||||
|
||||
|
||||
@register_builder("generic")
|
||||
class GenericBuilder(BuilderWithDefaults):
|
||||
"""A builder for a generic build system, that require packagers
|
||||
to implement an "install" phase.
|
||||
"""
|
||||
|
||||
#: A generic package has only the "install" phase
|
||||
phases = ("install",)
|
||||
|
||||
#: Names associated with package methods in the old build-system format
|
||||
legacy_methods: Tuple[str, ...] = ()
|
||||
|
||||
#: Names associated with package attributes in the old build-system format
|
||||
legacy_attributes: Tuple[str, ...] = ("archive_files", "install_time_test_callbacks")
|
||||
|
||||
#: Callback names for post-install phase tests
|
||||
install_time_test_callbacks = []
|
||||
|
||||
# On macOS, force rpaths for shared library IDs and remove duplicate rpaths
|
||||
run_after("install", when="platform=darwin")(apply_macos_rpath_fixups)
|
||||
|
||||
# unconditionally perform any post-install phase tests
|
||||
run_after("install")(execute_install_time_tests)
|
||||
|
||||
def install(self, pkg: Package, spec: Spec, prefix: Prefix) -> None:
|
||||
raise NotImplementedError
|
38
var/spack/repos/spack_repo/builtin/build_systems/gnu.py
Normal file
38
var/spack/repos/spack_repo/builtin/build_systems/gnu.py
Normal file
@@ -0,0 +1,38 @@
|
||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from typing import Optional
|
||||
|
||||
import spack.util.url
|
||||
from spack.package import PackageBase
|
||||
|
||||
|
||||
class GNUMirrorPackage(PackageBase):
|
||||
"""Mixin that takes care of setting url and mirrors for GNU packages."""
|
||||
|
||||
#: Path of the package in a GNU mirror
|
||||
gnu_mirror_path: Optional[str] = None
|
||||
|
||||
#: List of GNU mirrors used by Spack
|
||||
base_mirrors = [
|
||||
"https://ftpmirror.gnu.org/",
|
||||
"https://ftp.gnu.org/gnu/",
|
||||
# Fall back to http if https didn't work (for instance because
|
||||
# Spack is bootstrapping curl)
|
||||
"http://ftpmirror.gnu.org/",
|
||||
]
|
||||
|
||||
@property
|
||||
def urls(self):
|
||||
self._ensure_gnu_mirror_path_is_set_or_raise()
|
||||
return [
|
||||
spack.util.url.join(m, self.gnu_mirror_path, resolve_href=True)
|
||||
for m in self.base_mirrors
|
||||
]
|
||||
|
||||
def _ensure_gnu_mirror_path_is_set_or_raise(self):
|
||||
if self.gnu_mirror_path is None:
|
||||
cls_name = type(self).__name__
|
||||
msg = "{0} must define a `gnu_mirror_path` attribute" " [none defined]"
|
||||
raise AttributeError(msg.format(cls_name))
|
@@ -2,21 +2,26 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
import spack.spec
|
||||
import spack.util.environment
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, depends_on
|
||||
from spack.multimethod import when
|
||||
from spack.package import (
|
||||
EnvironmentModifications,
|
||||
PackageBase,
|
||||
Prefix,
|
||||
Spec,
|
||||
build_system,
|
||||
depends_on,
|
||||
install,
|
||||
join_path,
|
||||
mkdirp,
|
||||
register_builder,
|
||||
run_after,
|
||||
when,
|
||||
working_dir,
|
||||
)
|
||||
|
||||
from ._checks import BuilderWithDefaults, execute_install_time_tests
|
||||
|
||||
|
||||
class GoPackage(spack.package_base.PackageBase):
|
||||
class GoPackage(PackageBase):
|
||||
"""Specialized class for packages built using the Go toolchain."""
|
||||
|
||||
#: This attribute is used in UI queries that need to know the build
|
||||
@@ -32,7 +37,7 @@ class GoPackage(spack.package_base.PackageBase):
|
||||
depends_on("go", type="build")
|
||||
|
||||
|
||||
@spack.builder.builder("go")
|
||||
@register_builder("go")
|
||||
class GoBuilder(BuilderWithDefaults):
|
||||
"""The Go builder encodes the most common way of building software with
|
||||
a golang go.mod file. It has two phases that can be overridden, if need be:
|
||||
@@ -69,12 +74,10 @@ class GoBuilder(BuilderWithDefaults):
|
||||
#: Callback names for install-time test
|
||||
install_time_test_callbacks = ["check"]
|
||||
|
||||
def setup_build_environment(
|
||||
self, env: spack.util.environment.EnvironmentModifications
|
||||
) -> None:
|
||||
def setup_build_environment(self, env: EnvironmentModifications) -> None:
|
||||
env.set("GO111MODULE", "on")
|
||||
env.set("GOTOOLCHAIN", "local")
|
||||
env.set("GOPATH", fs.join_path(self.pkg.stage.path, "go"))
|
||||
env.set("GOPATH", join_path(self.pkg.stage.path, "go"))
|
||||
|
||||
@property
|
||||
def build_directory(self):
|
||||
@@ -100,24 +103,20 @@ def check_args(self):
|
||||
"""Argument for ``go test`` during check phase"""
|
||||
return []
|
||||
|
||||
def build(
|
||||
self, pkg: GoPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
def build(self, pkg: GoPackage, spec: Spec, prefix: Prefix) -> None:
|
||||
"""Runs ``go build`` in the source directory"""
|
||||
with fs.working_dir(self.build_directory):
|
||||
with working_dir(self.build_directory):
|
||||
pkg.module.go("build", *self.build_args)
|
||||
|
||||
def install(
|
||||
self, pkg: GoPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
def install(self, pkg: GoPackage, spec: Spec, prefix: Prefix) -> None:
|
||||
"""Install built binaries into prefix bin."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
fs.mkdirp(prefix.bin)
|
||||
fs.install(pkg.name, prefix.bin)
|
||||
with working_dir(self.build_directory):
|
||||
mkdirp(prefix.bin)
|
||||
install(pkg.name, prefix.bin)
|
||||
|
||||
spack.phase_callbacks.run_after("install")(execute_install_time_tests)
|
||||
run_after("install")(execute_install_time_tests)
|
||||
|
||||
def check(self):
|
||||
"""Run ``go test .`` in the source directory"""
|
||||
with fs.working_dir(self.build_directory):
|
||||
with working_dir(self.build_directory):
|
||||
self.pkg.module.go("test", *self.check_args)
|
@@ -3,19 +3,23 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os
|
||||
|
||||
from llnl.util.filesystem import find
|
||||
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.spec
|
||||
import spack.util.environment
|
||||
import spack.util.executable
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, depends_on, extends
|
||||
from spack.multimethod import when
|
||||
from spack.package import (
|
||||
Builder,
|
||||
EnvironmentModifications,
|
||||
Executable,
|
||||
PackageBase,
|
||||
Prefix,
|
||||
Spec,
|
||||
build_system,
|
||||
depends_on,
|
||||
extends,
|
||||
find,
|
||||
register_builder,
|
||||
when,
|
||||
)
|
||||
|
||||
|
||||
class LuaPackage(spack.package_base.PackageBase):
|
||||
class LuaPackage(PackageBase):
|
||||
"""Specialized class for lua packages"""
|
||||
|
||||
#: This attribute is used in UI queries that need to know the build
|
||||
@@ -40,16 +44,16 @@ class LuaPackage(spack.package_base.PackageBase):
|
||||
|
||||
@property
|
||||
def lua(self):
|
||||
return spack.util.executable.Executable(self.spec["lua-lang"].prefix.bin.lua)
|
||||
return Executable(self.spec["lua-lang"].prefix.bin.lua)
|
||||
|
||||
@property
|
||||
def luarocks(self):
|
||||
lr = spack.util.executable.Executable(self.spec["lua-lang"].prefix.bin.luarocks)
|
||||
lr = Executable(self.spec["lua-lang"].prefix.bin.luarocks)
|
||||
return lr
|
||||
|
||||
|
||||
@spack.builder.builder("lua")
|
||||
class LuaBuilder(spack.builder.Builder):
|
||||
@register_builder("lua")
|
||||
class LuaBuilder(Builder):
|
||||
phases = ("unpack", "generate_luarocks_config", "preprocess", "install")
|
||||
|
||||
#: Names associated with package methods in the old build-system format
|
||||
@@ -58,9 +62,7 @@ class LuaBuilder(spack.builder.Builder):
|
||||
#: Names associated with package attributes in the old build-system format
|
||||
legacy_attributes = ()
|
||||
|
||||
def unpack(
|
||||
self, pkg: LuaPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
def unpack(self, pkg: LuaPackage, spec: Spec, prefix: Prefix) -> None:
|
||||
if os.path.splitext(pkg.stage.archive_file)[1] == ".rock":
|
||||
directory = pkg.luarocks("unpack", pkg.stage.archive_file, output=str)
|
||||
dirlines = directory.split("\n")
|
||||
@@ -71,9 +73,7 @@ def unpack(
|
||||
def _generate_tree_line(name, prefix):
|
||||
return """{{ name = "{name}", root = "{prefix}" }};""".format(name=name, prefix=prefix)
|
||||
|
||||
def generate_luarocks_config(
|
||||
self, pkg: LuaPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
def generate_luarocks_config(self, pkg: LuaPackage, spec: Spec, prefix: Prefix) -> None:
|
||||
spec = self.pkg.spec
|
||||
table_entries = []
|
||||
for d in spec.traverse(deptype=("build", "run")):
|
||||
@@ -92,18 +92,14 @@ def generate_luarocks_config(
|
||||
)
|
||||
)
|
||||
|
||||
def preprocess(
|
||||
self, pkg: LuaPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
def preprocess(self, pkg: LuaPackage, spec: Spec, prefix: Prefix) -> None:
|
||||
"""Override this to preprocess source before building with luarocks"""
|
||||
pass
|
||||
|
||||
def luarocks_args(self):
|
||||
return []
|
||||
|
||||
def install(
|
||||
self, pkg: LuaPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
def install(self, pkg: LuaPackage, spec: Spec, prefix: Prefix) -> None:
|
||||
rock = "."
|
||||
specs = find(".", "*.rockspec", recursive=False)
|
||||
if specs:
|
||||
@@ -115,7 +111,5 @@ def install(
|
||||
def _luarocks_config_path(self):
|
||||
return os.path.join(self.pkg.stage.source_path, "spack_luarocks.lua")
|
||||
|
||||
def setup_build_environment(
|
||||
self, env: spack.util.environment.EnvironmentModifications
|
||||
) -> None:
|
||||
def setup_build_environment(self, env: EnvironmentModifications) -> None:
|
||||
env.set("LUAROCKS_CONFIG", self._luarocks_config_path())
|
135
var/spack/repos/spack_repo/builtin/build_systems/makefile.py
Normal file
135
var/spack/repos/spack_repo/builtin/build_systems/makefile.py
Normal file
@@ -0,0 +1,135 @@
|
||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
from typing import List
|
||||
|
||||
from spack.package import (
|
||||
PackageBase,
|
||||
Prefix,
|
||||
Spec,
|
||||
build_system,
|
||||
conflicts,
|
||||
depends_on,
|
||||
register_builder,
|
||||
run_after,
|
||||
when,
|
||||
working_dir,
|
||||
)
|
||||
|
||||
from ._checks import (
|
||||
BuilderWithDefaults,
|
||||
apply_macos_rpath_fixups,
|
||||
execute_build_time_tests,
|
||||
execute_install_time_tests,
|
||||
)
|
||||
|
||||
|
||||
class MakefilePackage(PackageBase):
|
||||
"""Specialized class for packages built using Makefiles."""
|
||||
|
||||
#: This attribute is used in UI queries that need to know the build
|
||||
#: system base class
|
||||
build_system_class = "MakefilePackage"
|
||||
#: Legacy buildsystem attribute used to deserialize and install old specs
|
||||
legacy_buildsystem = "makefile"
|
||||
|
||||
build_system("makefile")
|
||||
|
||||
with when("build_system=makefile"):
|
||||
conflicts("platform=windows")
|
||||
depends_on("gmake", type="build")
|
||||
|
||||
|
||||
@register_builder("makefile")
|
||||
class MakefileBuilder(BuilderWithDefaults):
|
||||
"""The Makefile builder encodes the most common way of building software with
|
||||
Makefiles. It has three phases that can be overridden, if need be:
|
||||
|
||||
1. :py:meth:`~.MakefileBuilder.edit`
|
||||
2. :py:meth:`~.MakefileBuilder.build`
|
||||
3. :py:meth:`~.MakefileBuilder.install`
|
||||
|
||||
It is usually necessary to override the :py:meth:`~.MakefileBuilder.edit`
|
||||
phase (which is by default a no-op), while the other two have sensible defaults.
|
||||
|
||||
For a finer tuning you may override:
|
||||
|
||||
+-----------------------------------------------+--------------------+
|
||||
| **Method** | **Purpose** |
|
||||
+===============================================+====================+
|
||||
| :py:attr:`~.MakefileBuilder.build_targets` | Specify ``make`` |
|
||||
| | targets for the |
|
||||
| | build phase |
|
||||
+-----------------------------------------------+--------------------+
|
||||
| :py:attr:`~.MakefileBuilder.install_targets` | Specify ``make`` |
|
||||
| | targets for the |
|
||||
| | install phase |
|
||||
+-----------------------------------------------+--------------------+
|
||||
| :py:meth:`~.MakefileBuilder.build_directory` | Directory where the|
|
||||
| | Makefile is located|
|
||||
+-----------------------------------------------+--------------------+
|
||||
"""
|
||||
|
||||
phases = ("edit", "build", "install")
|
||||
|
||||
#: Names associated with package methods in the old build-system format
|
||||
legacy_methods = ("check", "installcheck")
|
||||
|
||||
#: Names associated with package attributes in the old build-system format
|
||||
legacy_attributes = (
|
||||
"build_targets",
|
||||
"install_targets",
|
||||
"build_time_test_callbacks",
|
||||
"install_time_test_callbacks",
|
||||
"build_directory",
|
||||
)
|
||||
|
||||
#: Targets for ``make`` during the :py:meth:`~.MakefileBuilder.build` phase
|
||||
build_targets: List[str] = []
|
||||
#: Targets for ``make`` during the :py:meth:`~.MakefileBuilder.install` phase
|
||||
install_targets = ["install"]
|
||||
|
||||
#: Callback names for build-time test
|
||||
build_time_test_callbacks = ["check"]
|
||||
|
||||
#: Callback names for install-time test
|
||||
install_time_test_callbacks = ["installcheck"]
|
||||
|
||||
@property
|
||||
def build_directory(self) -> str:
|
||||
"""Return the directory containing the main Makefile."""
|
||||
return self.pkg.stage.source_path
|
||||
|
||||
def edit(self, pkg: MakefilePackage, spec: Spec, prefix: Prefix) -> None:
|
||||
"""Edit the Makefile before calling make. The default is a no-op."""
|
||||
pass
|
||||
|
||||
def build(self, pkg: MakefilePackage, spec: Spec, prefix: Prefix) -> None:
|
||||
"""Run "make" on the build targets specified by the builder."""
|
||||
with working_dir(self.build_directory):
|
||||
pkg.module.make(*self.build_targets)
|
||||
|
||||
def install(self, pkg: MakefilePackage, spec: Spec, prefix: Prefix) -> None:
|
||||
"""Run "make" on the install targets specified by the builder."""
|
||||
with working_dir(self.build_directory):
|
||||
pkg.module.make(*self.install_targets)
|
||||
|
||||
run_after("build")(execute_build_time_tests)
|
||||
|
||||
def check(self) -> None:
|
||||
"""Run "make" on the ``test`` and ``check`` targets, if found."""
|
||||
with working_dir(self.build_directory):
|
||||
self.pkg._if_make_target_execute("test")
|
||||
self.pkg._if_make_target_execute("check")
|
||||
|
||||
run_after("install")(execute_install_time_tests)
|
||||
|
||||
def installcheck(self) -> None:
|
||||
"""Searches the Makefile for an ``installcheck`` target
|
||||
and runs it if found.
|
||||
"""
|
||||
with working_dir(self.build_directory):
|
||||
self.pkg._if_make_target_execute("installcheck")
|
||||
|
||||
# On macOS, force rpaths for shared library IDs and remove duplicate rpaths
|
||||
run_after("install", when="platform=darwin")(apply_macos_rpath_fixups)
|
@@ -1,20 +1,23 @@
|
||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import llnl.util.filesystem as fs
|
||||
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, depends_on
|
||||
from spack.multimethod import when
|
||||
from spack.util.executable import which
|
||||
from spack.package import (
|
||||
PackageBase,
|
||||
Prefix,
|
||||
Spec,
|
||||
build_system,
|
||||
depends_on,
|
||||
install_tree,
|
||||
register_builder,
|
||||
when,
|
||||
which,
|
||||
working_dir,
|
||||
)
|
||||
|
||||
from ._checks import BuilderWithDefaults
|
||||
|
||||
|
||||
class MavenPackage(spack.package_base.PackageBase):
|
||||
class MavenPackage(PackageBase):
|
||||
"""Specialized class for packages that are built using the
|
||||
Maven build system. See https://maven.apache.org/index.html
|
||||
for more information.
|
||||
@@ -34,7 +37,7 @@ class MavenPackage(spack.package_base.PackageBase):
|
||||
depends_on("maven", type="build")
|
||||
|
||||
|
||||
@spack.builder.builder("maven")
|
||||
@register_builder("maven")
|
||||
class MavenBuilder(BuilderWithDefaults):
|
||||
"""The Maven builder encodes the default way to build software with Maven.
|
||||
It has two phases that can be overridden, if need be:
|
||||
@@ -60,20 +63,16 @@ def build_args(self):
|
||||
"""List of args to pass to build phase."""
|
||||
return []
|
||||
|
||||
def build(
|
||||
self, pkg: MavenPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
def build(self, pkg: MavenPackage, spec: Spec, prefix: Prefix) -> None:
|
||||
"""Compile code and package into a JAR file."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
with working_dir(self.build_directory):
|
||||
mvn = which("mvn", required=True)
|
||||
if self.pkg.run_tests:
|
||||
mvn("verify", *self.build_args())
|
||||
else:
|
||||
mvn("package", "-DskipTests", *self.build_args())
|
||||
|
||||
def install(
|
||||
self, pkg: MavenPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
def install(self, pkg: MavenPackage, spec: Spec, prefix: Prefix) -> None:
|
||||
"""Copy to installation prefix."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
fs.install_tree(".", prefix)
|
||||
with working_dir(self.build_directory):
|
||||
install_tree(".", prefix)
|
@@ -4,20 +4,24 @@
|
||||
import os
|
||||
from typing import List
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, conflicts, depends_on, variant
|
||||
from spack.multimethod import when
|
||||
from spack.package import (
|
||||
PackageBase,
|
||||
Prefix,
|
||||
Spec,
|
||||
build_system,
|
||||
conflicts,
|
||||
depends_on,
|
||||
register_builder,
|
||||
run_after,
|
||||
variant,
|
||||
when,
|
||||
working_dir,
|
||||
)
|
||||
|
||||
from ._checks import BuilderWithDefaults, execute_build_time_tests
|
||||
|
||||
|
||||
class MesonPackage(spack.package_base.PackageBase):
|
||||
class MesonPackage(PackageBase):
|
||||
"""Specialized class for packages built using Meson. For more information
|
||||
on the Meson build system, see https://mesonbuild.com/
|
||||
"""
|
||||
@@ -66,7 +70,7 @@ def flags_to_build_system_args(self, flags):
|
||||
setattr(self, "meson_flag_args", [])
|
||||
|
||||
|
||||
@spack.builder.builder("meson")
|
||||
@register_builder("meson")
|
||||
class MesonBuilder(BuilderWithDefaults):
|
||||
"""The Meson builder encodes the default way to build software with Meson.
|
||||
The builder has three phases that can be overridden, if need be:
|
||||
@@ -190,9 +194,7 @@ def meson_args(self) -> List[str]:
|
||||
"""
|
||||
return []
|
||||
|
||||
def meson(
|
||||
self, pkg: MesonPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
def meson(self, pkg: MesonPackage, spec: Spec, prefix: Prefix) -> None:
|
||||
"""Run ``meson`` in the build directory"""
|
||||
options = []
|
||||
if self.spec["meson"].satisfies("@0.64:"):
|
||||
@@ -200,29 +202,25 @@ def meson(
|
||||
options.append(os.path.abspath(self.root_mesonlists_dir))
|
||||
options += self.std_meson_args
|
||||
options += self.meson_args()
|
||||
with fs.working_dir(self.build_directory, create=True):
|
||||
with working_dir(self.build_directory, create=True):
|
||||
pkg.module.meson(*options)
|
||||
|
||||
def build(
|
||||
self, pkg: MesonPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
def build(self, pkg: MesonPackage, spec: Spec, prefix: Prefix) -> None:
|
||||
"""Make the build targets"""
|
||||
options = ["-v"]
|
||||
options += self.build_targets
|
||||
with fs.working_dir(self.build_directory):
|
||||
with working_dir(self.build_directory):
|
||||
pkg.module.ninja(*options)
|
||||
|
||||
def install(
|
||||
self, pkg: MesonPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
def install(self, pkg: MesonPackage, spec: Spec, prefix: Prefix) -> None:
|
||||
"""Make the install targets"""
|
||||
with fs.working_dir(self.build_directory):
|
||||
with working_dir(self.build_directory):
|
||||
pkg.module.ninja(*self.install_targets)
|
||||
|
||||
spack.phase_callbacks.run_after("build")(execute_build_time_tests)
|
||||
run_after("build")(execute_build_time_tests)
|
||||
|
||||
def check(self) -> None:
|
||||
"""Search Meson-generated files for the target ``test`` and run it if found."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
with working_dir(self.build_directory):
|
||||
self.pkg._if_ninja_target_execute("test")
|
||||
self.pkg._if_ninja_target_execute("check")
|
@@ -5,16 +5,20 @@
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, conflicts
|
||||
from spack.package import (
|
||||
PackageBase,
|
||||
Prefix,
|
||||
Spec,
|
||||
build_system,
|
||||
conflicts,
|
||||
register_builder,
|
||||
working_dir,
|
||||
)
|
||||
|
||||
from ._checks import BuilderWithDefaults
|
||||
|
||||
|
||||
class MSBuildPackage(spack.package_base.PackageBase):
|
||||
class MSBuildPackage(PackageBase):
|
||||
"""Specialized class for packages built using Visual Studio project files or solutions."""
|
||||
|
||||
#: This attribute is used in UI queries that need to know the build
|
||||
@@ -30,7 +34,7 @@ def define(self, msbuild_arg, value):
|
||||
return define(msbuild_arg, value)
|
||||
|
||||
|
||||
@spack.builder.builder("msbuild")
|
||||
@register_builder("msbuild")
|
||||
class MSBuildBuilder(BuilderWithDefaults):
|
||||
"""The MSBuild builder encodes the most common way of building software with
|
||||
Mircosoft's MSBuild tool. It has two phases that can be overridden, if need be:
|
||||
@@ -105,23 +109,19 @@ def msbuild_install_args(self):
|
||||
as `msbuild_args` by default."""
|
||||
return self.msbuild_args()
|
||||
|
||||
def build(
|
||||
self, pkg: MSBuildPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
def build(self, pkg: MSBuildPackage, spec: Spec, prefix: Prefix) -> None:
|
||||
"""Run "msbuild" on the build targets specified by the builder."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
with working_dir(self.build_directory):
|
||||
pkg.module.msbuild(
|
||||
*self.std_msbuild_args,
|
||||
*self.msbuild_args(),
|
||||
self.define_targets(*self.build_targets),
|
||||
)
|
||||
|
||||
def install(
|
||||
self, pkg: MSBuildPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
def install(self, pkg: MSBuildPackage, spec: Spec, prefix: Prefix) -> None:
|
||||
"""Run "msbuild" on the install targets specified by the builder.
|
||||
This is INSTALL by default"""
|
||||
with fs.working_dir(self.build_directory):
|
||||
with working_dir(self.build_directory):
|
||||
pkg.module.msbuild(
|
||||
*self.msbuild_install_args(), self.define_targets(*self.install_targets)
|
||||
)
|
@@ -5,16 +5,20 @@
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, conflicts
|
||||
from spack.package import (
|
||||
PackageBase,
|
||||
Prefix,
|
||||
Spec,
|
||||
build_system,
|
||||
conflicts,
|
||||
register_builder,
|
||||
working_dir,
|
||||
)
|
||||
|
||||
from ._checks import BuilderWithDefaults
|
||||
|
||||
|
||||
class NMakePackage(spack.package_base.PackageBase):
|
||||
class NMakePackage(PackageBase):
|
||||
"""Specialized class for packages built using a Makefiles."""
|
||||
|
||||
#: This attribute is used in UI queries that need to know the build
|
||||
@@ -26,7 +30,7 @@ class NMakePackage(spack.package_base.PackageBase):
|
||||
conflicts("platform=darwin", when="build_system=nmake")
|
||||
|
||||
|
||||
@spack.builder.builder("nmake")
|
||||
@register_builder("nmake")
|
||||
class NMakeBuilder(BuilderWithDefaults):
|
||||
"""The NMake builder encodes the most common way of building software with
|
||||
Mircosoft's NMake tool. It has two phases that can be overridden, if need be:
|
||||
@@ -125,20 +129,16 @@ def nmake_install_args(self):
|
||||
Individual packages should override to specify NMake args to command line"""
|
||||
return []
|
||||
|
||||
def build(
|
||||
self, pkg: NMakePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
def build(self, pkg: NMakePackage, spec: Spec, prefix: Prefix) -> None:
|
||||
"""Run "nmake" on the build targets specified by the builder."""
|
||||
opts = self.std_nmake_args
|
||||
opts += self.nmake_args()
|
||||
if self.makefile_name:
|
||||
opts.append("/F{}".format(self.makefile_name))
|
||||
with fs.working_dir(self.build_directory):
|
||||
with working_dir(self.build_directory):
|
||||
pkg.module.nmake(*opts, *self.build_targets, ignore_quotes=self.ignore_quotes)
|
||||
|
||||
def install(
|
||||
self, pkg: NMakePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
def install(self, pkg: NMakePackage, spec: Spec, prefix: Prefix) -> None:
|
||||
"""Run "nmake" on the install targets specified by the builder.
|
||||
This is INSTALL by default"""
|
||||
opts = self.std_nmake_args
|
||||
@@ -147,5 +147,5 @@ def install(
|
||||
if self.makefile_name:
|
||||
opts.append("/F{}".format(self.makefile_name))
|
||||
opts.append(self.define("PREFIX", fs.windows_sfn(prefix)))
|
||||
with fs.working_dir(self.build_directory):
|
||||
with working_dir(self.build_directory):
|
||||
pkg.module.nmake(*opts, *self.install_targets, ignore_quotes=self.ignore_quotes)
|
@@ -1,18 +1,21 @@
|
||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.spec
|
||||
import spack.util.environment
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, extends
|
||||
from spack.multimethod import when
|
||||
from spack.package import (
|
||||
EnvironmentModifications,
|
||||
PackageBase,
|
||||
Prefix,
|
||||
Spec,
|
||||
build_system,
|
||||
extends,
|
||||
register_builder,
|
||||
when,
|
||||
)
|
||||
|
||||
from ._checks import BuilderWithDefaults
|
||||
|
||||
|
||||
class OctavePackage(spack.package_base.PackageBase):
|
||||
class OctavePackage(PackageBase):
|
||||
"""Specialized class for Octave packages. See
|
||||
https://www.gnu.org/software/octave/doc/v4.2.0/Installing-and-Removing-Packages.html
|
||||
for more information.
|
||||
@@ -30,7 +33,7 @@ class OctavePackage(spack.package_base.PackageBase):
|
||||
extends("octave")
|
||||
|
||||
|
||||
@spack.builder.builder("octave")
|
||||
@register_builder("octave")
|
||||
class OctaveBuilder(BuilderWithDefaults):
|
||||
"""The octave builder provides the following phases that can be overridden:
|
||||
|
||||
@@ -45,9 +48,7 @@ class OctaveBuilder(BuilderWithDefaults):
|
||||
#: Names associated with package attributes in the old build-system format
|
||||
legacy_attributes = ()
|
||||
|
||||
def install(
|
||||
self, pkg: OctavePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
def install(self, pkg: OctavePackage, spec: Spec, prefix: Prefix) -> None:
|
||||
"""Install the package from the archive file"""
|
||||
pkg.module.octave(
|
||||
"--quiet",
|
||||
@@ -58,9 +59,7 @@ def install(
|
||||
"pkg prefix %s; pkg install %s" % (prefix, self.pkg.stage.archive_file),
|
||||
)
|
||||
|
||||
def setup_build_environment(
|
||||
self, env: spack.util.environment.EnvironmentModifications
|
||||
) -> None:
|
||||
def setup_build_environment(self, env: EnvironmentModifications) -> None:
|
||||
# octave does not like those environment variables to be set:
|
||||
env.unset("CC")
|
||||
env.unset("CXX")
|
@@ -7,16 +7,25 @@
|
||||
import shutil
|
||||
from os.path import basename, isdir
|
||||
|
||||
from llnl.util import tty
|
||||
from llnl.util.filesystem import HeaderList, LibraryList, find_libraries, join_path, mkdirp
|
||||
from llnl.util.link_tree import LinkTree
|
||||
|
||||
import spack.util.path
|
||||
from spack.build_environment import dso_suffix
|
||||
from spack.directives import conflicts, license, redistribute, variant
|
||||
from spack.error import InstallError
|
||||
from spack.util.environment import EnvironmentModifications
|
||||
from spack.util.executable import Executable
|
||||
from spack.package import (
|
||||
EnvironmentModifications,
|
||||
Executable,
|
||||
HeaderList,
|
||||
InstallError,
|
||||
LibraryList,
|
||||
conflicts,
|
||||
find_libraries,
|
||||
join_path,
|
||||
license,
|
||||
mkdirp,
|
||||
redistribute,
|
||||
tty,
|
||||
variant,
|
||||
)
|
||||
|
||||
from .generic import Package
|
||||
|
197
var/spack/repos/spack_repo/builtin/build_systems/perl.py
Normal file
197
var/spack/repos/spack_repo/builtin/build_systems/perl.py
Normal file
@@ -0,0 +1,197 @@
|
||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os
|
||||
from typing import Iterable
|
||||
|
||||
from llnl.util.lang import memoized
|
||||
|
||||
from spack.package import (
|
||||
Executable,
|
||||
PackageBase,
|
||||
Prefix,
|
||||
SkipTest,
|
||||
Spec,
|
||||
build_system,
|
||||
depends_on,
|
||||
extends,
|
||||
filter_file,
|
||||
find,
|
||||
register_builder,
|
||||
run_after,
|
||||
test_part,
|
||||
when,
|
||||
)
|
||||
|
||||
from ._checks import BuilderWithDefaults, execute_build_time_tests
|
||||
|
||||
|
||||
class PerlPackage(PackageBase):
|
||||
"""Specialized class for packages that are built using Perl."""
|
||||
|
||||
#: This attribute is used in UI queries that need to know the build
|
||||
#: system base class
|
||||
build_system_class = "PerlPackage"
|
||||
#: Legacy buildsystem attribute used to deserialize and install old specs
|
||||
legacy_buildsystem = "perl"
|
||||
|
||||
build_system("perl")
|
||||
|
||||
with when("build_system=perl"):
|
||||
extends("perl")
|
||||
depends_on("gmake", type="build")
|
||||
|
||||
@property
|
||||
@memoized
|
||||
def _platform_dir(self):
|
||||
"""Name of platform-specific module subdirectory."""
|
||||
perl = self.spec["perl"].command
|
||||
options = "-E", "use Config; say $Config{archname}"
|
||||
out = perl(*options, output=str.split, error=str.split)
|
||||
return out.strip()
|
||||
|
||||
@property
|
||||
def use_modules(self) -> Iterable[str]:
|
||||
"""Names of the package's perl modules."""
|
||||
module_files = find(self.prefix.lib, ["*.pm"], recursive=True)
|
||||
|
||||
# Drop the platform directory, if present
|
||||
if self._platform_dir:
|
||||
platform_dir = self._platform_dir + os.sep
|
||||
module_files = [m.replace(platform_dir, "") for m in module_files]
|
||||
|
||||
# Drop the extension and library path
|
||||
prefix = self.prefix.lib + os.sep
|
||||
modules = [os.path.splitext(m)[0].replace(prefix, "") for m in module_files]
|
||||
|
||||
# Drop the perl subdirectory as well
|
||||
return ["::".join(m.split(os.sep)[1:]) for m in modules]
|
||||
|
||||
@property
|
||||
def skip_modules(self) -> Iterable[str]:
|
||||
"""Names of modules that should be skipped when running tests.
|
||||
|
||||
These are a subset of use_modules.
|
||||
|
||||
Returns:
|
||||
List of strings of module names.
|
||||
"""
|
||||
return []
|
||||
|
||||
def test_use(self):
|
||||
"""Test 'use module'"""
|
||||
if not self.use_modules:
|
||||
raise SkipTest("Test requires use_modules package property.")
|
||||
|
||||
perl = self.spec["perl"].command
|
||||
for module in self.use_modules:
|
||||
if module in self.skip_modules:
|
||||
continue
|
||||
|
||||
with test_part(self, f"test_use-{module}", purpose=f"checking use of {module}"):
|
||||
options = ["-we", f'use strict; use {module}; print("OK\n")']
|
||||
out = perl(*options, output=str.split, error=str.split)
|
||||
assert "OK" in out
|
||||
|
||||
|
||||
@register_builder("perl")
|
||||
class PerlBuilder(BuilderWithDefaults):
|
||||
"""The perl builder provides four phases that can be overridden, if required:
|
||||
|
||||
1. :py:meth:`~.PerlBuilder.configure`
|
||||
2. :py:meth:`~.PerlBuilder.build`
|
||||
3. :py:meth:`~.PerlBuilder.check`
|
||||
4. :py:meth:`~.PerlBuilder.install`
|
||||
|
||||
The default methods use, in order of preference:
|
||||
(1) Makefile.PL,
|
||||
(2) Build.PL.
|
||||
|
||||
Some packages may need to override :py:meth:`~.PerlBuilder.configure_args`,
|
||||
which produces a list of arguments for :py:meth:`~.PerlBuilder.configure`.
|
||||
|
||||
Arguments should not include the installation base directory.
|
||||
"""
|
||||
|
||||
#: Phases of a Perl package
|
||||
phases = ("configure", "build", "install")
|
||||
|
||||
#: Names associated with package methods in the old build-system format
|
||||
legacy_methods = ("configure_args", "check", "test_use")
|
||||
|
||||
#: Names associated with package attributes in the old build-system format
|
||||
legacy_attributes = ()
|
||||
|
||||
#: Callback names for build-time test
|
||||
build_time_test_callbacks = ["check"]
|
||||
|
||||
@property
|
||||
def build_method(self):
|
||||
"""Searches the package for either a Makefile.PL or Build.PL.
|
||||
|
||||
Raises:
|
||||
RuntimeError: if neither Makefile.PL nor Build.PL exist
|
||||
"""
|
||||
if os.path.isfile("Makefile.PL"):
|
||||
build_method = "Makefile.PL"
|
||||
elif os.path.isfile("Build.PL"):
|
||||
build_method = "Build.PL"
|
||||
else:
|
||||
raise RuntimeError("Unknown build_method for perl package")
|
||||
return build_method
|
||||
|
||||
@property
|
||||
def build_executable(self):
|
||||
"""Returns the executable method to build the perl package"""
|
||||
if self.build_method == "Makefile.PL":
|
||||
build_executable = self.pkg.module.make
|
||||
elif self.build_method == "Build.PL":
|
||||
build_executable = Executable(os.path.join(self.pkg.stage.source_path, "Build"))
|
||||
return build_executable
|
||||
|
||||
def configure_args(self):
|
||||
"""List of arguments passed to :py:meth:`~.PerlBuilder.configure`.
|
||||
|
||||
Arguments should not include the installation base directory, which
|
||||
is prepended automatically.
|
||||
"""
|
||||
return []
|
||||
|
||||
def configure(self, pkg: PerlPackage, spec: Spec, prefix: Prefix) -> None:
|
||||
"""Run Makefile.PL or Build.PL with arguments consisting of
|
||||
an appropriate installation base directory followed by the
|
||||
list returned by :py:meth:`~.PerlBuilder.configure_args`.
|
||||
"""
|
||||
if self.build_method == "Makefile.PL":
|
||||
options = ["Makefile.PL", "INSTALL_BASE={0}".format(prefix)]
|
||||
elif self.build_method == "Build.PL":
|
||||
options = ["Build.PL", "--install_base", prefix]
|
||||
options += self.configure_args()
|
||||
|
||||
pkg.module.perl(*options)
|
||||
|
||||
# It is possible that the shebang in the Build script that is created from
|
||||
# Build.PL may be too long causing the build to fail. Patching the shebang
|
||||
# does not happen until after install so set '/usr/bin/env perl' here in
|
||||
# the Build script.
|
||||
@run_after("configure")
|
||||
def fix_shebang(self):
|
||||
if self.build_method == "Build.PL":
|
||||
pattern = "#!{0}".format(self.spec["perl"].command.path)
|
||||
repl = "#!/usr/bin/env perl"
|
||||
filter_file(pattern, repl, "Build", backup=False)
|
||||
|
||||
def build(self, pkg: PerlPackage, spec: Spec, prefix: Prefix) -> None:
|
||||
"""Builds a Perl package."""
|
||||
self.build_executable()
|
||||
|
||||
# Ensure that tests run after build (if requested):
|
||||
run_after("build")(execute_build_time_tests)
|
||||
|
||||
def check(self):
|
||||
"""Runs built-in tests of a Perl package."""
|
||||
self.build_executable("test")
|
||||
|
||||
def install(self, pkg: PerlPackage, spec: Spec, prefix: Prefix) -> None:
|
||||
"""Installs a Perl package."""
|
||||
self.build_executable("install")
|
559
var/spack/repos/spack_repo/builtin/build_systems/python.py
Normal file
559
var/spack/repos/spack_repo/builtin/build_systems/python.py
Normal file
@@ -0,0 +1,559 @@
|
||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import functools
|
||||
import operator
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import stat
|
||||
from typing import Dict, Iterable, List, Mapping, Optional, Tuple
|
||||
|
||||
import archspec
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
from llnl.util.lang import ClassProperty, classproperty, match_predicate
|
||||
|
||||
import spack.config
|
||||
import spack.deptypes as dt
|
||||
import spack.detection
|
||||
import spack.platforms
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.store
|
||||
from spack.package import (
|
||||
HeaderList,
|
||||
LibraryList,
|
||||
NoHeadersError,
|
||||
NoLibrariesError,
|
||||
PackageBase,
|
||||
Prefix,
|
||||
Spec,
|
||||
build_system,
|
||||
depends_on,
|
||||
extends,
|
||||
filter_file,
|
||||
find,
|
||||
find_all_headers,
|
||||
join_path,
|
||||
register_builder,
|
||||
run_after,
|
||||
test_part,
|
||||
tty,
|
||||
when,
|
||||
working_dir,
|
||||
)
|
||||
|
||||
from ._checks import BuilderWithDefaults, execute_install_time_tests
|
||||
|
||||
|
||||
def _flatten_dict(dictionary: Mapping[str, object]) -> Iterable[str]:
|
||||
"""Iterable that yields KEY=VALUE paths through a dictionary.
|
||||
|
||||
Args:
|
||||
dictionary: Possibly nested dictionary of arbitrary keys and values.
|
||||
|
||||
Yields:
|
||||
A single path through the dictionary.
|
||||
"""
|
||||
for key, item in dictionary.items():
|
||||
if isinstance(item, dict):
|
||||
# Recursive case
|
||||
for value in _flatten_dict(item):
|
||||
yield f"{key}={value}"
|
||||
else:
|
||||
# Base case
|
||||
yield f"{key}={item}"
|
||||
|
||||
|
||||
class PythonExtension(PackageBase):
|
||||
@property
|
||||
def import_modules(self) -> Iterable[str]:
|
||||
"""Names of modules that the Python package provides.
|
||||
|
||||
These are used to test whether or not the installation succeeded.
|
||||
These names generally come from running:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
>> import setuptools
|
||||
>> setuptools.find_packages()
|
||||
|
||||
in the source tarball directory. If the module names are incorrectly
|
||||
detected, this property can be overridden by the package.
|
||||
|
||||
Returns:
|
||||
List of strings of module names.
|
||||
"""
|
||||
modules = []
|
||||
pkg = self.spec["python"].package
|
||||
|
||||
# Packages may be installed in platform-specific or platform-independent
|
||||
# site-packages directories
|
||||
for directory in {pkg.platlib, pkg.purelib}:
|
||||
root = os.path.join(self.prefix, directory)
|
||||
|
||||
# Some Python libraries are packages: collections of modules
|
||||
# distributed in directories containing __init__.py files
|
||||
for path in find(root, "__init__.py", recursive=True):
|
||||
modules.append(
|
||||
path.replace(root + os.sep, "", 1)
|
||||
.replace(os.sep + "__init__.py", "")
|
||||
.replace("/", ".")
|
||||
)
|
||||
|
||||
# Some Python libraries are modules: individual *.py files
|
||||
# found in the site-packages directory
|
||||
for path in find(root, "*.py", recursive=False):
|
||||
modules.append(
|
||||
path.replace(root + os.sep, "", 1).replace(".py", "").replace("/", ".")
|
||||
)
|
||||
|
||||
modules = [
|
||||
mod
|
||||
for mod in modules
|
||||
if re.match("[a-zA-Z0-9._]+$", mod) and not any(map(mod.startswith, self.skip_modules))
|
||||
]
|
||||
|
||||
tty.debug("Detected the following modules: {0}".format(modules))
|
||||
|
||||
return modules
|
||||
|
||||
@property
|
||||
def skip_modules(self) -> Iterable[str]:
|
||||
"""Names of modules that should be skipped when running tests.
|
||||
|
||||
These are a subset of import_modules. If a module has submodules,
|
||||
they are skipped as well (meaning a.b is skipped if a is contained).
|
||||
|
||||
Returns:
|
||||
List of strings of module names.
|
||||
"""
|
||||
return []
|
||||
|
||||
@property
|
||||
def bindir(self) -> str:
|
||||
"""Path to Python package's bindir, bin on unix like OS's Scripts on Windows"""
|
||||
windows = self.spec.satisfies("platform=windows")
|
||||
return join_path(self.spec.prefix, "Scripts" if windows else "bin")
|
||||
|
||||
def view_file_conflicts(self, view, merge_map):
|
||||
"""Report all file conflicts, excepting special cases for python.
|
||||
Specifically, this does not report errors for duplicate
|
||||
__init__.py files for packages in the same namespace.
|
||||
"""
|
||||
conflicts = list(dst for src, dst in merge_map.items() if os.path.exists(dst))
|
||||
|
||||
if conflicts and self.py_namespace:
|
||||
ext_map = view.extensions_layout.extension_map(self.extendee_spec)
|
||||
namespaces = set(x.package.py_namespace for x in ext_map.values())
|
||||
namespace_re = r"site-packages/{0}/__init__.py".format(self.py_namespace)
|
||||
find_namespace = match_predicate(namespace_re)
|
||||
if self.py_namespace in namespaces:
|
||||
conflicts = list(x for x in conflicts if not find_namespace(x))
|
||||
|
||||
return conflicts
|
||||
|
||||
def add_files_to_view(self, view, merge_map, skip_if_exists=True):
|
||||
# Patch up shebangs if the package extends Python and we put a Python interpreter in the
|
||||
# view.
|
||||
if not self.extendee_spec:
|
||||
return super().add_files_to_view(view, merge_map, skip_if_exists)
|
||||
|
||||
python, *_ = self.spec.dependencies("python-venv") or self.spec.dependencies("python")
|
||||
|
||||
if python.external:
|
||||
return super().add_files_to_view(view, merge_map, skip_if_exists)
|
||||
|
||||
# We only patch shebangs in the bin directory.
|
||||
copied_files: Dict[Tuple[int, int], str] = {} # File identifier -> source
|
||||
delayed_links: List[Tuple[str, str]] = [] # List of symlinks from merge map
|
||||
bin_dir = self.spec.prefix.bin
|
||||
|
||||
for src, dst in merge_map.items():
|
||||
if skip_if_exists and os.path.lexists(dst):
|
||||
continue
|
||||
|
||||
if not fs.path_contains_subdirectory(src, bin_dir):
|
||||
view.link(src, dst)
|
||||
continue
|
||||
|
||||
s = os.lstat(src)
|
||||
|
||||
# Symlink is delayed because we may need to re-target if its target is copied in view
|
||||
if stat.S_ISLNK(s.st_mode):
|
||||
delayed_links.append((src, dst))
|
||||
continue
|
||||
|
||||
# If it's executable and has a shebang, copy and patch it.
|
||||
if (s.st_mode & 0b111) and fs.has_shebang(src):
|
||||
copied_files[(s.st_dev, s.st_ino)] = dst
|
||||
shutil.copy2(src, dst)
|
||||
filter_file(
|
||||
python.prefix, os.path.abspath(view.get_projection_for_spec(self.spec)), dst
|
||||
)
|
||||
else:
|
||||
view.link(src, dst)
|
||||
|
||||
# Finally re-target the symlinks that point to copied files.
|
||||
for src, dst in delayed_links:
|
||||
try:
|
||||
s = os.stat(src)
|
||||
target = copied_files[(s.st_dev, s.st_ino)]
|
||||
except (OSError, KeyError):
|
||||
target = None
|
||||
if target:
|
||||
os.symlink(os.path.relpath(target, os.path.dirname(dst)), dst)
|
||||
else:
|
||||
view.link(src, dst, spec=self.spec)
|
||||
|
||||
def remove_files_from_view(self, view, merge_map):
|
||||
ignore_namespace = False
|
||||
if self.py_namespace:
|
||||
ext_map = view.extensions_layout.extension_map(self.extendee_spec)
|
||||
remaining_namespaces = set(
|
||||
spec.package.py_namespace for name, spec in ext_map.items() if name != self.name
|
||||
)
|
||||
if self.py_namespace in remaining_namespaces:
|
||||
namespace_init = match_predicate(
|
||||
r"site-packages/{0}/__init__.py".format(self.py_namespace)
|
||||
)
|
||||
ignore_namespace = True
|
||||
|
||||
bin_dir = self.spec.prefix.bin
|
||||
|
||||
to_remove = []
|
||||
for src, dst in merge_map.items():
|
||||
if ignore_namespace and namespace_init(dst):
|
||||
continue
|
||||
|
||||
if not fs.path_contains_subdirectory(src, bin_dir):
|
||||
to_remove.append(dst)
|
||||
else:
|
||||
os.remove(dst)
|
||||
|
||||
view.remove_files(to_remove)
|
||||
|
||||
def test_imports(self) -> None:
|
||||
"""Attempts to import modules of the installed package."""
|
||||
|
||||
# Make sure we are importing the installed modules,
|
||||
# not the ones in the source directory
|
||||
python = self.module.python
|
||||
for module in self.import_modules:
|
||||
with test_part(
|
||||
self,
|
||||
f"test_imports_{module}",
|
||||
purpose=f"checking import of {module}",
|
||||
work_dir="spack-test",
|
||||
):
|
||||
python("-c", f"import {module}")
|
||||
|
||||
def update_external_dependencies(self, extendee_spec=None):
|
||||
"""
|
||||
Ensure all external python packages have a python dependency
|
||||
|
||||
If another package in the DAG depends on python, we use that
|
||||
python for the dependency of the external. If not, we assume
|
||||
that the external PythonPackage is installed into the same
|
||||
directory as the python it depends on.
|
||||
"""
|
||||
# TODO: Include this in the solve, rather than instantiating post-concretization
|
||||
if "python" not in self.spec:
|
||||
if extendee_spec:
|
||||
python = extendee_spec
|
||||
elif "python" in self.spec.root:
|
||||
python = self.spec.root["python"]
|
||||
else:
|
||||
python = self.get_external_python_for_prefix()
|
||||
if not python.concrete:
|
||||
repo = spack.repo.PATH.repo_for_pkg(python)
|
||||
python.namespace = repo.namespace
|
||||
|
||||
# Ensure architecture information is present
|
||||
if not python.architecture:
|
||||
host_platform = spack.platforms.host()
|
||||
host_os = host_platform.default_operating_system()
|
||||
host_target = host_platform.default_target()
|
||||
python.architecture = spack.spec.ArchSpec(
|
||||
(str(host_platform), str(host_os), str(host_target))
|
||||
)
|
||||
else:
|
||||
if not python.architecture.platform:
|
||||
python.architecture.platform = spack.platforms.host()
|
||||
platform = spack.platforms.by_name(python.architecture.platform)
|
||||
if not python.architecture.os:
|
||||
python.architecture.os = platform.default_operating_system()
|
||||
if not python.architecture.target:
|
||||
python.architecture.target = archspec.cpu.host().family.name
|
||||
|
||||
python.external_path = self.spec.external_path
|
||||
python._mark_concrete()
|
||||
self.spec.add_dependency_edge(python, depflag=dt.BUILD | dt.LINK | dt.RUN, virtuals=())
|
||||
|
||||
def get_external_python_for_prefix(self):
|
||||
"""
|
||||
For an external package that extends python, find the most likely spec for the python
|
||||
it depends on.
|
||||
|
||||
First search: an "installed" external that shares a prefix with this package
|
||||
Second search: a configured external that shares a prefix with this package
|
||||
Third search: search this prefix for a python package
|
||||
|
||||
Returns:
|
||||
spack.spec.Spec: The external Spec for python most likely to be compatible with self.spec
|
||||
"""
|
||||
python_externals_installed = [
|
||||
s for s in spack.store.STORE.db.query("python") if s.prefix == self.spec.external_path
|
||||
]
|
||||
if python_externals_installed:
|
||||
return python_externals_installed[0]
|
||||
|
||||
python_external_config = spack.config.get("packages:python:externals", [])
|
||||
python_externals_configured = [
|
||||
spack.spec.parse_with_version_concrete(item["spec"])
|
||||
for item in python_external_config
|
||||
if item["prefix"] == self.spec.external_path
|
||||
]
|
||||
if python_externals_configured:
|
||||
return python_externals_configured[0]
|
||||
|
||||
python_externals_detection = spack.detection.by_path(
|
||||
["python"], path_hints=[self.spec.external_path]
|
||||
)
|
||||
|
||||
python_externals_detected = [
|
||||
spec
|
||||
for spec in python_externals_detection.get("python", [])
|
||||
if spec.external_path == self.spec.external_path
|
||||
]
|
||||
if python_externals_detected:
|
||||
return python_externals_detected[0]
|
||||
|
||||
raise StopIteration("No external python could be detected for %s to depend on" % self.spec)
|
||||
|
||||
|
||||
def _homepage(cls: "PythonPackage") -> Optional[str]:
|
||||
"""Get the homepage from PyPI if available."""
|
||||
if cls.pypi:
|
||||
name = cls.pypi.split("/")[0]
|
||||
return f"https://pypi.org/project/{name}/"
|
||||
return None
|
||||
|
||||
|
||||
def _url(cls: "PythonPackage") -> Optional[str]:
|
||||
if cls.pypi:
|
||||
return f"https://files.pythonhosted.org/packages/source/{cls.pypi[0]}/{cls.pypi}"
|
||||
return None
|
||||
|
||||
|
||||
def _list_url(cls: "PythonPackage") -> Optional[str]:
|
||||
if cls.pypi:
|
||||
name = cls.pypi.split("/")[0]
|
||||
return f"https://pypi.org/simple/{name}/"
|
||||
return None
|
||||
|
||||
|
||||
class PythonPackage(PythonExtension):
|
||||
"""Specialized class for packages that are built using pip."""
|
||||
|
||||
#: Package name, version, and extension on PyPI
|
||||
pypi: Optional[str] = None
|
||||
|
||||
# To be used in UI queries that require to know which
|
||||
# build-system class we are using
|
||||
build_system_class = "PythonPackage"
|
||||
#: Legacy buildsystem attribute used to deserialize and install old specs
|
||||
legacy_buildsystem = "python_pip"
|
||||
|
||||
#: Callback names for install-time test
|
||||
install_time_test_callbacks = ["test_imports"]
|
||||
|
||||
build_system("python_pip")
|
||||
|
||||
with when("build_system=python_pip"):
|
||||
extends("python")
|
||||
depends_on("py-pip", type="build")
|
||||
# FIXME: technically wheel is only needed when building from source, not when
|
||||
# installing a downloaded wheel, but I don't want to add wheel as a dep to every
|
||||
# package manually
|
||||
depends_on("py-wheel", type="build")
|
||||
|
||||
py_namespace: Optional[str] = None
|
||||
|
||||
homepage: ClassProperty[Optional[str]] = classproperty(_homepage)
|
||||
url: ClassProperty[Optional[str]] = classproperty(_url)
|
||||
list_url: ClassProperty[Optional[str]] = classproperty(_list_url)
|
||||
|
||||
@property
|
||||
def python_spec(self) -> Spec:
|
||||
"""Get python-venv if it exists or python otherwise."""
|
||||
python, *_ = self.spec.dependencies("python-venv") or self.spec.dependencies("python")
|
||||
return python
|
||||
|
||||
@property
|
||||
def headers(self) -> HeaderList:
|
||||
"""Discover header files in platlib."""
|
||||
|
||||
# Remove py- prefix in package name
|
||||
name = self.spec.name[3:]
|
||||
|
||||
# Headers should only be in include or platlib, but no harm in checking purelib too
|
||||
include = self.prefix.join(self.spec["python"].package.include).join(name)
|
||||
python = self.python_spec
|
||||
platlib = self.prefix.join(python.package.platlib).join(name)
|
||||
purelib = self.prefix.join(python.package.purelib).join(name)
|
||||
|
||||
headers_list = map(find_all_headers, [include, platlib, purelib])
|
||||
headers = functools.reduce(operator.add, headers_list)
|
||||
|
||||
if headers:
|
||||
return headers
|
||||
|
||||
msg = "Unable to locate {} headers in {}, {}, or {}"
|
||||
raise NoHeadersError(msg.format(self.spec.name, include, platlib, purelib))
|
||||
|
||||
@property
|
||||
def libs(self) -> LibraryList:
|
||||
"""Discover libraries in platlib."""
|
||||
|
||||
# Remove py- prefix in package name
|
||||
name = self.spec.name[3:]
|
||||
|
||||
# Libraries should only be in platlib, but no harm in checking purelib too
|
||||
python = self.python_spec
|
||||
platlib = self.prefix.join(python.package.platlib).join(name)
|
||||
purelib = self.prefix.join(python.package.purelib).join(name)
|
||||
|
||||
find_all_libraries = functools.partial(fs.find_all_libraries, recursive=True)
|
||||
libs_list = map(find_all_libraries, [platlib, purelib])
|
||||
libs = functools.reduce(operator.add, libs_list)
|
||||
|
||||
if libs:
|
||||
return libs
|
||||
|
||||
msg = "Unable to recursively locate {} libraries in {} or {}"
|
||||
raise NoLibrariesError(msg.format(self.spec.name, platlib, purelib))
|
||||
|
||||
|
||||
@register_builder("python_pip")
|
||||
class PythonPipBuilder(BuilderWithDefaults):
|
||||
phases = ("install",)
|
||||
|
||||
#: Names associated with package methods in the old build-system format
|
||||
legacy_methods = ("test_imports",)
|
||||
|
||||
#: Same as legacy_methods, but the signature is different
|
||||
legacy_long_methods = ("install_options", "global_options", "config_settings")
|
||||
|
||||
#: Names associated with package attributes in the old build-system format
|
||||
legacy_attributes = ("archive_files", "build_directory", "install_time_test_callbacks")
|
||||
|
||||
#: Callback names for install-time test
|
||||
install_time_test_callbacks = ["test_imports"]
|
||||
|
||||
@staticmethod
|
||||
def std_args(cls) -> List[str]:
|
||||
return [
|
||||
# Verbose
|
||||
"-vvv",
|
||||
# Disable prompting for input
|
||||
"--no-input",
|
||||
# Disable the cache
|
||||
"--no-cache-dir",
|
||||
# Don't check to see if pip is up-to-date
|
||||
"--disable-pip-version-check",
|
||||
# Install packages
|
||||
"install",
|
||||
# Don't install package dependencies
|
||||
"--no-deps",
|
||||
# Overwrite existing packages
|
||||
"--ignore-installed",
|
||||
# Use env vars like PYTHONPATH
|
||||
"--no-build-isolation",
|
||||
# Don't warn that prefix.bin is not in PATH
|
||||
"--no-warn-script-location",
|
||||
# Ignore the PyPI package index
|
||||
"--no-index",
|
||||
]
|
||||
|
||||
@property
|
||||
def build_directory(self) -> str:
|
||||
"""The root directory of the Python package.
|
||||
|
||||
This is usually the directory containing one of the following files:
|
||||
|
||||
* ``pyproject.toml``
|
||||
* ``setup.cfg``
|
||||
* ``setup.py``
|
||||
"""
|
||||
return self.pkg.stage.source_path
|
||||
|
||||
def config_settings(self, spec: Spec, prefix: Prefix) -> Mapping[str, object]:
|
||||
"""Configuration settings to be passed to the PEP 517 build backend.
|
||||
|
||||
Requires pip 22.1 or newer for keys that appear only a single time,
|
||||
or pip 23.1 or newer if the same key appears multiple times.
|
||||
|
||||
Args:
|
||||
spec: Build spec.
|
||||
prefix: Installation prefix.
|
||||
|
||||
Returns:
|
||||
Possibly nested dictionary of KEY, VALUE settings.
|
||||
"""
|
||||
return {}
|
||||
|
||||
def install_options(self, spec: Spec, prefix: Prefix) -> Iterable[str]:
|
||||
"""Extra arguments to be supplied to the setup.py install command.
|
||||
|
||||
Requires pip 23.0 or older.
|
||||
|
||||
Args:
|
||||
spec: Build spec.
|
||||
prefix: Installation prefix.
|
||||
|
||||
Returns:
|
||||
List of options.
|
||||
"""
|
||||
return []
|
||||
|
||||
def global_options(self, spec: Spec, prefix: Prefix) -> Iterable[str]:
|
||||
"""Extra global options to be supplied to the setup.py call before the install
|
||||
or bdist_wheel command.
|
||||
|
||||
Deprecated in pip 23.1.
|
||||
|
||||
Args:
|
||||
spec: Build spec.
|
||||
prefix: Installation prefix.
|
||||
|
||||
Returns:
|
||||
List of options.
|
||||
"""
|
||||
return []
|
||||
|
||||
def install(self, pkg: PythonPackage, spec: Spec, prefix: Prefix) -> None:
|
||||
"""Install everything from build directory."""
|
||||
pip = spec["python"].command
|
||||
pip.add_default_arg("-m", "pip")
|
||||
|
||||
args = PythonPipBuilder.std_args(pkg) + [f"--prefix={prefix}"]
|
||||
|
||||
for setting in _flatten_dict(self.config_settings(spec, prefix)):
|
||||
args.append(f"--config-settings={setting}")
|
||||
for option in self.install_options(spec, prefix):
|
||||
args.append(f"--install-option={option}")
|
||||
for option in self.global_options(spec, prefix):
|
||||
args.append(f"--global-option={option}")
|
||||
|
||||
if pkg.stage.archive_file and pkg.stage.archive_file.endswith(".whl"):
|
||||
args.append(pkg.stage.archive_file)
|
||||
else:
|
||||
args.append(".")
|
||||
|
||||
with working_dir(self.build_directory):
|
||||
pip(*args)
|
||||
|
||||
run_after("install")(execute_install_time_tests)
|
@@ -1,19 +1,21 @@
|
||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
from llnl.util.filesystem import working_dir
|
||||
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, depends_on
|
||||
from spack.package import (
|
||||
PackageBase,
|
||||
Prefix,
|
||||
Spec,
|
||||
build_system,
|
||||
depends_on,
|
||||
register_builder,
|
||||
run_after,
|
||||
working_dir,
|
||||
)
|
||||
|
||||
from ._checks import BuilderWithDefaults, execute_build_time_tests
|
||||
|
||||
|
||||
class QMakePackage(spack.package_base.PackageBase):
|
||||
class QMakePackage(PackageBase):
|
||||
"""Specialized class for packages built using qmake.
|
||||
|
||||
For more information on the qmake build system, see:
|
||||
@@ -32,7 +34,7 @@ class QMakePackage(spack.package_base.PackageBase):
|
||||
depends_on("gmake", type="build")
|
||||
|
||||
|
||||
@spack.builder.builder("qmake")
|
||||
@register_builder("qmake")
|
||||
class QMakeBuilder(BuilderWithDefaults):
|
||||
"""The qmake builder provides three phases that can be overridden:
|
||||
|
||||
@@ -64,23 +66,17 @@ def qmake_args(self):
|
||||
"""List of arguments passed to qmake."""
|
||||
return []
|
||||
|
||||
def qmake(
|
||||
self, pkg: QMakePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
def qmake(self, pkg: QMakePackage, spec: Spec, prefix: Prefix) -> None:
|
||||
"""Run ``qmake`` to configure the project and generate a Makefile."""
|
||||
with working_dir(self.build_directory):
|
||||
pkg.module.qmake(*self.qmake_args())
|
||||
|
||||
def build(
|
||||
self, pkg: QMakePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
def build(self, pkg: QMakePackage, spec: Spec, prefix: Prefix) -> None:
|
||||
"""Make the build targets"""
|
||||
with working_dir(self.build_directory):
|
||||
pkg.module.make()
|
||||
|
||||
def install(
|
||||
self, pkg: QMakePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
def install(self, pkg: QMakePackage, spec: Spec, prefix: Prefix) -> None:
|
||||
"""Make the install targets"""
|
||||
with working_dir(self.build_directory):
|
||||
pkg.module.make("install")
|
||||
@@ -90,4 +86,4 @@ def check(self):
|
||||
with working_dir(self.build_directory):
|
||||
self.pkg._if_make_target_execute("check")
|
||||
|
||||
spack.phase_callbacks.run_after("build")(execute_build_time_tests)
|
||||
run_after("build")(execute_build_time_tests)
|
@@ -3,10 +3,9 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
from typing import Optional, Tuple
|
||||
|
||||
from llnl.util.filesystem import mkdirp
|
||||
from llnl.util.lang import ClassProperty, classproperty
|
||||
|
||||
from spack.directives import extends
|
||||
from spack.package import extends, mkdirp
|
||||
|
||||
from .generic import GenericBuilder, Package
|
||||
|
@@ -4,19 +4,25 @@
|
||||
import os
|
||||
from typing import Optional, Tuple
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.lang import ClassProperty, classproperty
|
||||
|
||||
import spack.builder
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
from spack.build_environment import SPACK_NO_PARALLEL_MAKE
|
||||
from spack.config import determine_number_of_jobs
|
||||
from spack.directives import build_system, extends, maintainers
|
||||
from spack.package_base import PackageBase
|
||||
from spack.package import (
|
||||
Builder,
|
||||
Executable,
|
||||
PackageBase,
|
||||
Prefix,
|
||||
ProcessError,
|
||||
Spec,
|
||||
build_system,
|
||||
determine_number_of_jobs,
|
||||
extends,
|
||||
maintainers,
|
||||
register_builder,
|
||||
tty,
|
||||
working_dir,
|
||||
)
|
||||
from spack.util.environment import env_flag
|
||||
from spack.util.executable import Executable, ProcessError
|
||||
|
||||
|
||||
def _homepage(cls: "RacketPackage") -> Optional[str]:
|
||||
@@ -46,8 +52,8 @@ class RacketPackage(PackageBase):
|
||||
homepage: ClassProperty[Optional[str]] = classproperty(_homepage)
|
||||
|
||||
|
||||
@spack.builder.builder("racket")
|
||||
class RacketBuilder(spack.builder.Builder):
|
||||
@register_builder("racket")
|
||||
class RacketBuilder(Builder):
|
||||
"""The Racket builder provides an ``install`` phase that can be overridden."""
|
||||
|
||||
phases = ("install",)
|
||||
@@ -76,12 +82,10 @@ def build_directory(self):
|
||||
ret = os.path.join(ret, self.subdirectory)
|
||||
return ret
|
||||
|
||||
def install(
|
||||
self, pkg: RacketPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
def install(self, pkg: RacketPackage, spec: Spec, prefix: Prefix) -> None:
|
||||
"""Install everything from build directory."""
|
||||
raco = Executable("raco")
|
||||
with fs.working_dir(self.build_directory):
|
||||
with working_dir(self.build_directory):
|
||||
parallel = pkg.parallel and (not env_flag(SPACK_NO_PARALLEL_MAKE))
|
||||
name = pkg.racket_name
|
||||
assert name is not None, "Racket package name is not set"
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user