Compare commits
129 Commits
hs/feature
...
develop
Author | SHA1 | Date | |
---|---|---|---|
![]() |
05ce2c7766 | ||
![]() |
d8c819f3b8 | ||
![]() |
4d563acd1b | ||
![]() |
7d27e11698 | ||
![]() |
cac7684faa | ||
![]() |
8caba599af | ||
![]() |
b542f379d5 | ||
![]() |
a8aeb17e37 | ||
![]() |
8bcbe52b01 | ||
![]() |
ecb02c1fc6 | ||
![]() |
1f74ac5188 | ||
![]() |
23ba489e06 | ||
![]() |
5879724a2a | ||
![]() |
bfc52d6f50 | ||
![]() |
0107792a9e | ||
![]() |
de6f07094e | ||
![]() |
63c60c18c7 | ||
![]() |
f01a442ad4 | ||
![]() |
cca0eb6873 | ||
![]() |
c39725a9e6 | ||
![]() |
e58351f421 | ||
![]() |
2a028144ba | ||
![]() |
22a7419235 | ||
![]() |
7e21357045 | ||
![]() |
cf7e1643c3 | ||
![]() |
158ddf72cf | ||
![]() |
9567b13b4f | ||
![]() |
f6da60b541 | ||
![]() |
3caff2c5a0 | ||
![]() |
091a8a4734 | ||
![]() |
cfcee7d092 | ||
![]() |
c6d04286c5 | ||
![]() |
3c3e1c6d30 | ||
![]() |
3669f0356b | ||
![]() |
dd72df89d6 | ||
![]() |
445667adbe | ||
![]() |
3ec4797513 | ||
![]() |
50e8f6395c | ||
![]() |
bf9426a48d | ||
![]() |
bf08b1e2c6 | ||
![]() |
687137a057 | ||
![]() |
6c4c9985d5 | ||
![]() |
2927e708bc | ||
![]() |
a9d3bd8d7f | ||
![]() |
64fc66ab48 | ||
![]() |
c86b2860aa | ||
![]() |
d991ebbe09 | ||
![]() |
3f00eeabd2 | ||
![]() |
b77d9b87f8 | ||
![]() |
e46dae9eb6 | ||
![]() |
760fc05da3 | ||
![]() |
e07503e4ac | ||
![]() |
aba8d85b4d | ||
![]() |
546625cdb8 | ||
![]() |
d951c4f112 | ||
![]() |
ef2b596e3f | ||
![]() |
f07789febf | ||
![]() |
4316c4fb00 | ||
![]() |
72871ebde8 | ||
![]() |
3b2163c718 | ||
![]() |
16067871e2 | ||
![]() |
53ae44163d | ||
![]() |
02880a866c | ||
![]() |
a242e77e81 | ||
![]() |
e9b822a86a | ||
![]() |
3ea16482a6 | ||
![]() |
a13557ac94 | ||
![]() |
69e9841262 | ||
![]() |
65279dc6f3 | ||
![]() |
3623d5d20e | ||
![]() |
50cc87500c | ||
![]() |
b4e039ad7b | ||
![]() |
6227bd7986 | ||
![]() |
da9fa24d15 | ||
![]() |
2929ea02a1 | ||
![]() |
c99e654650 | ||
![]() |
8ba4b3c103 | ||
![]() |
240e669793 | ||
![]() |
2bda9159d3 | ||
![]() |
b12a64d687 | ||
![]() |
70f4eef020 | ||
![]() |
7a0c5671dc | ||
![]() |
45402d7850 | ||
![]() |
31d48ba011 | ||
![]() |
4d55fe6284 | ||
![]() |
b8c31b22a5 | ||
![]() |
9738f1c026 | ||
![]() |
e3a7d5763f | ||
![]() |
e7e37899f4 | ||
![]() |
6e98f88c51 | ||
![]() |
600336eba5 | ||
![]() |
56df6b414d | ||
![]() |
5e617be0ad | ||
![]() |
0f44e42a70 | ||
![]() |
ff86b3acdd | ||
![]() |
b4dd42bed7 | ||
![]() |
7f8c5bd4ca | ||
![]() |
ac08428f20 | ||
![]() |
37abfc7541 | ||
![]() |
f96def28cb | ||
![]() |
4b1f126de7 | ||
![]() |
0d586695a0 | ||
![]() |
f1ba23316b | ||
![]() |
3891305005 | ||
![]() |
cf20d677a1 | ||
![]() |
5c5b0d80d2 | ||
![]() |
f8538a1b1c | ||
![]() |
1f0aaafc71 | ||
![]() |
fb8d6e8ea0 | ||
![]() |
756721c6dd | ||
![]() |
153c3f03c8 | ||
![]() |
c4f51ff60d | ||
![]() |
da650aac0c | ||
![]() |
782b5c30d2 | ||
![]() |
1e9be97a25 | ||
![]() |
bd5f277e17 | ||
![]() |
719fd6fb43 | ||
![]() |
abcc641373 | ||
![]() |
abcef565a8 | ||
![]() |
00d65b75a1 | ||
![]() |
de4a9e867d | ||
![]() |
56f40cc1c8 | ||
![]() |
ae2c9d1b99 | ||
![]() |
af24280c96 | ||
![]() |
8daf4bc215 | ||
![]() |
dabf7e9de8 | ||
![]() |
9cfb973d69 | ||
![]() |
e4f9e73671 | ||
![]() |
ae9cffe55f |
1
.github/workflows/import-check.yaml
vendored
1
.github/workflows/import-check.yaml
vendored
@@ -6,6 +6,7 @@ on:
|
||||
jobs:
|
||||
# Check we don't make the situation with circular imports worse
|
||||
import-check:
|
||||
continue-on-error: true
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: julia-actions/setup-julia@v2
|
||||
|
5
.github/workflows/sync-packages.yaml
vendored
5
.github/workflows/sync-packages.yaml
vendored
@@ -27,7 +27,10 @@ jobs:
|
||||
- name: Sync spack/spack-packages with spack/spack
|
||||
run: |
|
||||
cd spack-packages
|
||||
git-filter-repo --quiet --source ../spack --subdirectory-filter var/spack/repos --refs develop
|
||||
git-filter-repo --quiet --source ../spack \
|
||||
--path var/spack/repos/ --path-rename var/spack/repos/:python/ \
|
||||
--path share/spack/gitlab/cloud_pipelines/ --path-rename share/spack/gitlab/cloud_pipelines/:.ci/gitlab/ \
|
||||
--refs develop
|
||||
- name: Push
|
||||
run: |
|
||||
cd spack-packages
|
||||
|
@@ -276,7 +276,7 @@ remove dependent packages *before* removing their dependencies or use the
|
||||
Garbage collection
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
|
||||
When Spack builds software from sources, if often installs tools that are needed
|
||||
When Spack builds software from sources, it often installs tools that are needed
|
||||
just to build or test other software. These are not necessary at runtime.
|
||||
To support cases where removing these tools can be a benefit Spack provides
|
||||
the ``spack gc`` ("garbage collector") command, which will uninstall all unneeded packages:
|
||||
|
@@ -89,7 +89,7 @@ You can see that the mirror is added with ``spack mirror list`` as follows:
|
||||
spack-public https://spack-llnl-mirror.s3-us-west-2.amazonaws.com/
|
||||
|
||||
|
||||
At this point, you've create a buildcache, but spack hasn't indexed it, so if
|
||||
At this point, you've created a buildcache, but Spack hasn't indexed it, so if
|
||||
you run ``spack buildcache list`` you won't see any results. You need to index
|
||||
this new build cache as follows:
|
||||
|
||||
@@ -318,7 +318,7 @@ other system dependencies. However, they are still compatible with tools like
|
||||
``skopeo``, ``podman``, and ``docker`` for pulling and pushing.
|
||||
|
||||
.. note::
|
||||
The docker ``overlayfs2`` storage driver is limited to 128 layers, above which a
|
||||
The Docker ``overlayfs2`` storage driver is limited to 128 layers, above which a
|
||||
``max depth exceeded`` error may be produced when pulling the image. There
|
||||
are `alternative drivers <https://docs.docker.com/storage/storagedriver/>`_.
|
||||
|
||||
|
@@ -14,7 +14,7 @@ is an entire command dedicated to the management of every aspect of bootstrappin
|
||||
|
||||
.. command-output:: spack bootstrap --help
|
||||
|
||||
Spack is configured to bootstrap its dependencies lazily by default; i.e. the first time they are needed and
|
||||
Spack is configured to bootstrap its dependencies lazily by default; i.e., the first time they are needed and
|
||||
can't be found. You can readily check if any prerequisite for using Spack is missing by running:
|
||||
|
||||
.. code-block:: console
|
||||
@@ -36,8 +36,8 @@ can't be found. You can readily check if any prerequisite for using Spack is mis
|
||||
|
||||
In the case of the output shown above Spack detected that both ``clingo`` and ``gnupg``
|
||||
are missing and it's giving detailed information on why they are needed and whether
|
||||
they can be bootstrapped. The return code of this command summarizes the results, if any
|
||||
dependencies are missing the return code is ``1``, otherwise ``0``. Running a command that
|
||||
they can be bootstrapped. The return code of this command summarizes the results; if any
|
||||
dependencies are missing, the return code is ``1``, otherwise ``0``. Running a command that
|
||||
concretizes a spec, like:
|
||||
|
||||
.. code-block:: console
|
||||
|
@@ -66,7 +66,7 @@ on these ideas for each distinct build system that Spack supports:
|
||||
build_systems/rocmpackage
|
||||
build_systems/sourceforgepackage
|
||||
|
||||
For reference, the :py:mod:`Build System API docs <spack.build_systems>`
|
||||
For reference, the :py:mod:`Build System API docs <spack_repo.builtin.build_systems>`
|
||||
provide a list of build systems and methods/attributes that can be
|
||||
overridden. If you are curious about the implementation of a particular
|
||||
build system, you can view the source code by running:
|
||||
@@ -90,7 +90,7 @@ packages. You can quickly find examples by running:
|
||||
You can then view these packages with ``spack edit``.
|
||||
|
||||
This guide is intended to supplement the
|
||||
:py:mod:`Build System API docs <spack.build_systems>` with examples of
|
||||
:py:mod:`Build System API docs <spack_repo.builtin.build_systems>` with examples of
|
||||
how to override commonly used methods. It also provides rules of thumb
|
||||
and suggestions for package developers who are unfamiliar with a
|
||||
particular build system.
|
||||
|
@@ -129,8 +129,8 @@ Adding flags to cmake
|
||||
To add additional flags to the ``cmake`` call, simply override the
|
||||
``cmake_args`` function. The following example defines values for the flags
|
||||
``WHATEVER``, ``ENABLE_BROKEN_FEATURE``, ``DETECT_HDF5``, and ``THREADS`` with
|
||||
and without the :meth:`~spack.build_systems.cmake.CMakeBuilder.define` and
|
||||
:meth:`~spack.build_systems.cmake.CMakeBuilder.define_from_variant` helper functions:
|
||||
and without the :meth:`~spack_repo.builtin.build_systems.cmake.CMakeBuilder.define` and
|
||||
:meth:`~spack_repo.builtin.build_systems.cmake.CMakeBuilder.define_from_variant` helper functions:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
|
@@ -35,8 +35,8 @@
|
||||
if not os.path.exists(link_name):
|
||||
os.symlink(os.path.abspath("../../.."), link_name, target_is_directory=True)
|
||||
sys.path.insert(0, os.path.abspath("_spack_root/lib/spack/external"))
|
||||
sys.path.insert(0, os.path.abspath("_spack_root/lib/spack/external/_vendoring"))
|
||||
sys.path.append(os.path.abspath("_spack_root/lib/spack/"))
|
||||
sys.path.append(os.path.abspath("_spack_root/var/spack/repos/"))
|
||||
|
||||
# Add the Spack bin directory to the path so that we can use its output in docs.
|
||||
os.environ["SPACK_ROOT"] = os.path.abspath("_spack_root")
|
||||
@@ -76,11 +76,20 @@
|
||||
apidoc_args
|
||||
+ [
|
||||
"_spack_root/lib/spack/spack",
|
||||
"_spack_root/lib/spack/spack/package.py", # sphinx struggles with os.chdir re-export.
|
||||
"_spack_root/lib/spack/spack/test/*.py",
|
||||
"_spack_root/lib/spack/spack/test/cmd/*.py",
|
||||
]
|
||||
)
|
||||
sphinx_apidoc(apidoc_args + ["_spack_root/lib/spack/llnl"])
|
||||
sphinx_apidoc(
|
||||
apidoc_args
|
||||
+ [
|
||||
"--implicit-namespaces",
|
||||
"_spack_root/var/spack/repos/spack_repo",
|
||||
"_spack_root/var/spack/repos/spack_repo/builtin/packages",
|
||||
]
|
||||
)
|
||||
|
||||
# Enable todo items
|
||||
todo_include_todos = True
|
||||
@@ -209,7 +218,7 @@ def setup(sphinx):
|
||||
# Spack classes that are private and we don't want to expose
|
||||
("py:class", "spack.provider_index._IndexBase"),
|
||||
("py:class", "spack.repo._PrependFileLoader"),
|
||||
("py:class", "spack.build_systems._checks.BuilderWithDefaults"),
|
||||
("py:class", "spack_repo.builtin.build_systems._checks.BuilderWithDefaults"),
|
||||
# Spack classes that intersphinx is unable to resolve
|
||||
("py:class", "spack.version.StandardVersion"),
|
||||
("py:class", "spack.spec.DependencySpec"),
|
||||
@@ -219,7 +228,7 @@ def setup(sphinx):
|
||||
("py:class", "spack.install_test.Pb"),
|
||||
("py:class", "spack.filesystem_view.SimpleFilesystemView"),
|
||||
("py:class", "spack.traverse.EdgeAndDepth"),
|
||||
("py:class", "archspec.cpu.microarchitecture.Microarchitecture"),
|
||||
("py:class", "_vendoring.archspec.cpu.microarchitecture.Microarchitecture"),
|
||||
("py:class", "spack.compiler.CompilerCache"),
|
||||
# TypeVar that is not handled correctly
|
||||
("py:class", "llnl.util.lang.T"),
|
||||
|
@@ -148,8 +148,8 @@ this can expose you to attacks. Use at your own risk.
|
||||
``ssl_certs``
|
||||
--------------------
|
||||
|
||||
Path to custom certificats for SSL verification. The value can be a
|
||||
filesytem path, or an environment variable that expands to an absolute file path.
|
||||
Path to custom certificates for SSL verification. The value can be a
|
||||
filesystem path, or an environment variable that expands to an absolute file path.
|
||||
The default value is set to the environment variable ``SSL_CERT_FILE``
|
||||
to use the same syntax used by many other applications that automatically
|
||||
detect custom certificates.
|
||||
|
@@ -11,7 +11,7 @@ Container Images
|
||||
Spack :ref:`environments` can easily be turned into container images. This page
|
||||
outlines two ways in which this can be done:
|
||||
|
||||
1. By installing the environment on the host system, and copying the installations
|
||||
1. By installing the environment on the host system and copying the installations
|
||||
into the container image. This approach does not require any tools like Docker
|
||||
or Singularity to be installed.
|
||||
2. By generating a Docker or Singularity recipe that can be used to build the
|
||||
@@ -56,8 +56,8 @@ environment roots and its runtime dependencies.
|
||||
|
||||
.. note::
|
||||
|
||||
When using registries like GHCR and Docker Hub, the ``--oci-password`` flag is not
|
||||
the password for your account, but a personal access token you need to generate separately.
|
||||
When using registries like GHCR and Docker Hub, the ``--oci-password`` flag specifies not
|
||||
the password for your account, but rather a personal access token you need to generate separately.
|
||||
|
||||
The specified ``--base-image`` should have a libc that is compatible with the host system.
|
||||
For example if your host system is Ubuntu 20.04, you can use ``ubuntu:20.04``, ``ubuntu:22.04``
|
||||
|
@@ -20,7 +20,7 @@ be present on the machine where Spack is run:
|
||||
:header-rows: 1
|
||||
|
||||
These requirements can be easily installed on most modern Linux systems;
|
||||
on macOS, the Command Line Tools package is required, and a full XCode suite
|
||||
on macOS, the Command Line Tools package is required, and a full Xcode suite
|
||||
may be necessary for some packages such as Qt and apple-gl. Spack is designed
|
||||
to run on HPC platforms like Cray. Not all packages should be expected
|
||||
to work on all platforms.
|
||||
|
@@ -103,6 +103,7 @@ or refer to the full manual below.
|
||||
:caption: API Docs
|
||||
|
||||
Spack API Docs <spack>
|
||||
Spack Builtin Repo <spack_repo>
|
||||
LLNL API Docs <llnl>
|
||||
|
||||
==================
|
||||
|
@@ -8,7 +8,7 @@
|
||||
Modules (modules.yaml)
|
||||
======================
|
||||
|
||||
The use of module systems to manage user environment in a controlled way
|
||||
The use of module systems to manage user environments in a controlled way
|
||||
is a common practice at HPC centers that is sometimes embraced also by
|
||||
individual programmers on their development machines. To support this
|
||||
common practice Spack integrates with `Environment Modules
|
||||
@@ -490,7 +490,7 @@ that are already in the Lmod hierarchy.
|
||||
|
||||
|
||||
.. note::
|
||||
Tcl and Lua modules also allow for explicit conflicts between modulefiles.
|
||||
Tcl and Lua modules also allow for explicit conflicts between module files.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
@@ -513,7 +513,7 @@ that are already in the Lmod hierarchy.
|
||||
:meth:`~spack.spec.Spec.format` method.
|
||||
|
||||
For Lmod and Environment Modules versions prior 4.2, it is important to
|
||||
express the conflict on both modulefiles conflicting with each other.
|
||||
express the conflict on both module files conflicting with each other.
|
||||
|
||||
|
||||
.. note::
|
||||
@@ -550,7 +550,7 @@ that are already in the Lmod hierarchy.
|
||||
|
||||
.. warning::
|
||||
Consistency of Core packages
|
||||
The user is responsible for maintining consistency among core packages, as ``core_specs``
|
||||
The user is responsible for maintaining consistency among core packages, as ``core_specs``
|
||||
bypasses the hierarchy that allows Lmod to safely switch between coherent software stacks.
|
||||
|
||||
.. warning::
|
||||
|
@@ -69,7 +69,7 @@ An example for ``CMake`` is, for instance:
|
||||
|
||||
The predefined steps for each build system are called "phases".
|
||||
In general, the name and order in which the phases will be executed can be
|
||||
obtained by either reading the API docs at :py:mod:`~.spack.build_systems`, or
|
||||
obtained by either reading the API docs at :py:mod:`~.spack_repo.builtin.build_systems`, or
|
||||
using the ``spack info`` command:
|
||||
|
||||
.. code-block:: console
|
||||
@@ -107,8 +107,6 @@ Since v0.19, Spack supports two ways of writing a package recipe. The most comm
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
from spack.package import *
|
||||
|
||||
class Openjpeg(CMakePackage):
|
||||
"""OpenJPEG is an open-source JPEG 2000 codec written in C language"""
|
||||
|
||||
@@ -143,8 +141,6 @@ builder class explicitly. Using the same example as above, this reads:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
from spack.package import *
|
||||
|
||||
class Openjpeg(CMakePackage):
|
||||
"""OpenJPEG is an open-source JPEG 2000 codec written in C language"""
|
||||
|
||||
@@ -162,7 +158,7 @@ builder class explicitly. Using the same example as above, this reads:
|
||||
url_fmt = "https://github.com/uclouvain/openjpeg/archive/version.{0}.tar.gz"
|
||||
return url_fmt.format(version)
|
||||
|
||||
class CMakeBuilder(spack.build_systems.cmake.CMakeBuilder):
|
||||
class CMakeBuilder(spack_repo.builtin.build_systems.cmake.CMakeBuilder):
|
||||
def cmake_args(self):
|
||||
args = [
|
||||
self.define_from_variant("BUILD_CODEC", "codec"),
|
||||
@@ -183,7 +179,7 @@ Spack can be found at :ref:`package_class_structure`.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Foo(CmakePackage):
|
||||
class Foo(CMakePackage):
|
||||
def cmake_args(self):
|
||||
...
|
||||
|
||||
@@ -260,7 +256,7 @@ for details):
|
||||
#
|
||||
# See the Spack documentation for more information on packaging.
|
||||
# ----------------------------------------------------------------------------
|
||||
import spack.build_systems.autotools
|
||||
import spack_repo.builtin.build_systems.autotools
|
||||
from spack.package import *
|
||||
|
||||
|
||||
@@ -1216,7 +1212,7 @@ class-level tarball URL and VCS. For example:
|
||||
version("master", branch="master")
|
||||
version("12.12.1", md5="ecd4606fa332212433c98bf950a69cc7")
|
||||
version("12.10.1", md5="667333dbd7c0f031d47d7c5511fd0810")
|
||||
version("12.8.1", "9f37f683ee2b427b5540db8a20ed6b15")
|
||||
version("12.8.1", md5="9f37f683ee2b427b5540db8a20ed6b15")
|
||||
|
||||
If a package contains both a ``url`` and ``git`` class-level attribute,
|
||||
Spack decides which to use based on the arguments to the ``version()``
|
||||
@@ -1347,7 +1343,7 @@ Submodules
|
||||
|
||||
version("1.0.1", tag="v1.0.1", submodules=True)
|
||||
|
||||
If a package has needs more fine-grained control over submodules, define
|
||||
If a package needs more fine-grained control over submodules, define
|
||||
``submodules`` to be a callable function that takes the package instance as
|
||||
its only argument. The function should return a list of submodules to be fetched.
|
||||
|
||||
@@ -2257,17 +2253,139 @@ RPATHs in Spack are handled in one of three ways:
|
||||
set in standard variables like ``CC``, ``CXX``, ``F77``, and ``FC``,
|
||||
so most build systems (autotools and many gmake systems) pick them
|
||||
up and use them.
|
||||
#. CMake also respects Spack's compiler wrappers during the build, but
|
||||
modifies them upon installation. If you inherit from ``CMakePackage``,
|
||||
Spack will set the default ``cmake`` defines to ensure that RPATHs
|
||||
are set correctly upon installation.
|
||||
|
||||
#. CMake has its own RPATH handling, and distinguishes between build and
|
||||
install RPATHs. By default, during the build it registers RPATHs to
|
||||
all libraries it links to, so that just-built executables can be run
|
||||
during the build itself. Upon installation, these RPATHs are cleared,
|
||||
unless the user defines the install RPATHs. When inheriting from
|
||||
``CMakePackage``, Spack handles this automatically, and sets
|
||||
``CMAKE_INSTALL_RPATH_USE_LINK_PATH`` and ``CMAKE_INSTALL_RPATH``,
|
||||
so that libraries of dependencies and the package's own libraries
|
||||
can be found at runtime.
|
||||
#. If you need to modify the build to add your own RPATHs, you can
|
||||
use the ``self.rpath`` property of your package, which will
|
||||
return a list of all the RPATHs that Spack will use when it
|
||||
links. You can see this how this is used in the :ref:`PySide
|
||||
example <pyside-patch>` above.
|
||||
|
||||
.. _attribute_parallel:
|
||||
|
||||
---------------
|
||||
Parallel builds
|
||||
---------------
|
||||
|
||||
Spack supports parallel builds on an individual package and at the
|
||||
installation level. Package-level parallelism is established by the
|
||||
``--jobs`` option and its configuration and package recipe equivalents.
|
||||
Installation-level parallelism is driven by the DAG(s) of the requested
|
||||
package or packages.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Package-level build parallelism
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
By default, Spack will invoke ``make()``, or any other similar tool,
|
||||
with a ``-j <njobs>`` argument, so those builds run in parallel.
|
||||
The parallelism is determined by the value of the ``build_jobs`` entry
|
||||
in ``config.yaml`` (see :ref:`here <build-jobs>` for more details on
|
||||
how this value is computed).
|
||||
|
||||
If a package does not build properly in parallel, you can override
|
||||
this setting by adding ``parallel = False`` to your package. For
|
||||
example, OpenSSL's build does not work in parallel, so its package
|
||||
looks like this:
|
||||
|
||||
.. code-block:: python
|
||||
:emphasize-lines: 8
|
||||
:linenos:
|
||||
|
||||
class Openssl(Package):
|
||||
homepage = "http://www.openssl.org"
|
||||
url = "http://www.openssl.org/source/openssl-1.0.1h.tar.gz"
|
||||
|
||||
version("1.0.1h", md5="8d6d684a9430d5cc98a62a5d8fbda8cf")
|
||||
depends_on("zlib-api")
|
||||
|
||||
parallel = False
|
||||
|
||||
You can also disable parallel builds only for specific make
|
||||
invocation:
|
||||
|
||||
.. code-block:: python
|
||||
:emphasize-lines: 5
|
||||
:linenos:
|
||||
|
||||
class Libelf(Package):
|
||||
...
|
||||
|
||||
def install(self, spec, prefix):
|
||||
make("install", parallel=False)
|
||||
|
||||
Note that the ``--jobs`` option works out of the box for all standard
|
||||
build systems. If you are using a non-standard build system instead, you
|
||||
can use the variable ``make_jobs`` to extract the number of jobs specified
|
||||
by the ``--jobs`` option:
|
||||
|
||||
.. code-block:: python
|
||||
:emphasize-lines: 7, 11
|
||||
:linenos:
|
||||
|
||||
class Xios(Package):
|
||||
...
|
||||
def install(self, spec, prefix):
|
||||
...
|
||||
options = [
|
||||
...
|
||||
'--jobs', str(make_jobs),
|
||||
]
|
||||
...
|
||||
make_xios = Executable("./make_xios")
|
||||
make_xios(*options)
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Install-level build parallelism
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Spack supports the concurrent installation of packages within a Spack
|
||||
instance across multiple processes using file system locks. This
|
||||
parallelism is separate from the package-level achieved through build
|
||||
systems' use of the ``-j <njobs>`` option. With install-level parallelism,
|
||||
processes coordinate the installation of the dependencies of specs
|
||||
provided on the command line and as part of an environment build with
|
||||
only **one process** being allowed to install a given package at a time.
|
||||
Refer to :ref:`Dependencies` for more information on dependencies and
|
||||
:ref:`installing-environment` for how to install an environment.
|
||||
|
||||
Concurrent processes may be any combination of interactive sessions and
|
||||
batch jobs. Which means a ``spack install`` can be running in a terminal
|
||||
window while a batch job is running ``spack install`` on the same or
|
||||
overlapping dependencies without any process trying to re-do the work of
|
||||
another.
|
||||
|
||||
For example, if you are using Slurm, you could launch an installation
|
||||
of ``mpich`` using the following command:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ srun -N 2 -n 8 spack install -j 4 mpich@3.3.2
|
||||
|
||||
This will create eight concurrent, four-job installs on two different
|
||||
nodes.
|
||||
|
||||
Alternatively, you could run the same installs on one node by entering
|
||||
the following at the command line of a bash shell:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ for i in {1..12}; do nohup spack install -j 4 mpich@3.3.2 >> mpich_install.txt 2>&1 & done
|
||||
|
||||
.. note::
|
||||
|
||||
The effective parallelism is based on the maximum number of packages
|
||||
that can be installed at the same time, which is limited by the
|
||||
number of packages with no (remaining) uninstalled dependencies.
|
||||
|
||||
|
||||
.. _dependencies:
|
||||
|
||||
------------
|
||||
@@ -2377,7 +2495,7 @@ necessary when there are breaking changes in the dependency that the
|
||||
package cannot handle. In Spack we often add forward compatibility
|
||||
bounds only at the time a new, breaking version of a dependency is
|
||||
released. As with backward compatibility, it is typical to see a list
|
||||
of forward compatibility bounds in a package file as seperate lines:
|
||||
of forward compatibility bounds in a package file as separate lines:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
@@ -2812,45 +2930,14 @@ This means that the former should only be used if the environment variables depe
|
||||
package, whereas the latter should be used if the environment variables depend only on the package
|
||||
itself.
|
||||
|
||||
---------------------------------------------------------
|
||||
Setting and requesting Python variables with data classes
|
||||
---------------------------------------------------------
|
||||
--------------------------------
|
||||
Setting package module variables
|
||||
--------------------------------
|
||||
|
||||
Apart from environment variables, Spack also provides a way to set Python variables that are
|
||||
necessary for configuring or building a package. A package that requires certain Python variables
|
||||
for its build can declare them using a data class and an annotation in the package class:
|
||||
|
||||
.. code-block:: python
|
||||
:emphasize-lines: 1-2,5,10-11
|
||||
:linenos:
|
||||
|
||||
class Data:
|
||||
make: Executable
|
||||
|
||||
class MyPackage(Package):
|
||||
data: Data
|
||||
|
||||
depends_on("gmake", type="build")
|
||||
|
||||
def install(self, spec, prefix):
|
||||
self.data.make("mytarget")
|
||||
self.data.make("install", parallel=False)
|
||||
|
||||
The dependency ``gmake`` implements :meth:`setup_dependent_package <spack.package_base.PackageBase.setup_dependent_package>`
|
||||
to set the ``make`` variable so it can be used by the dependent package:
|
||||
|
||||
.. code-block:: python
|
||||
:linenos:
|
||||
|
||||
class Gmake(Package):
|
||||
...
|
||||
def setup_dependent_package(self, module, dependent_spec):
|
||||
module.make = MakeExecutable(
|
||||
self.spec.prefix.bin.make,
|
||||
jobs=determine_number_of_jobs(dependent_spec),
|
||||
)
|
||||
|
||||
Another example of this can be found in the ``Python`` package:
|
||||
Apart from modifying environment variables of the dependent package, you can also define Python
|
||||
variables to be used by the dependent. This is done by implementing
|
||||
:meth:`setup_dependent_package <spack.package_base.PackageBase.setup_dependent_package>`. An
|
||||
example of this can be found in the ``Python`` package:
|
||||
|
||||
.. literalinclude:: _spack_root/var/spack/repos/spack_repo/builtin/packages/python/package.py
|
||||
:pyobject: Python.setup_dependent_package
|
||||
@@ -2860,181 +2947,14 @@ This allows Python packages to directly use these variables:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Data:
|
||||
python_platlib: str
|
||||
|
||||
class MyPythonPackage(Package):
|
||||
data: Data
|
||||
|
||||
extends("python")
|
||||
|
||||
def install(self, spec, prefix):
|
||||
...
|
||||
install("script.py", self.data.python_platlib)
|
||||
|
||||
There are a few special variables that are set by Spack's build environment instead of by
|
||||
dependencies. These can be request in the data class as well. Among those are ``make_jobs: int``,
|
||||
``configure: Executable``, ``prefix: Prefix``, and ``dso_suffix: str``.
|
||||
|
||||
Notice that type hints in data classes are not required and not enforced at runtime. They are only
|
||||
used for documentation purposes and to help IDEs with code completion.
|
||||
|
||||
-------------------------------
|
||||
Module level variables (legacy)
|
||||
-------------------------------
|
||||
|
||||
For packages that do not use the data class mechanism, Spack will still set global variables
|
||||
in the package module. This is an artifact of the legacy package system and is not recommended
|
||||
to be used in new packages.
|
||||
|
||||
If we omit the data class in the previous example, we can still use the ``make`` variable as
|
||||
a global variable:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class MyPackage(Package):
|
||||
def install(self, spec, prefix):
|
||||
make("mytarget") # not recommended, use data class instead
|
||||
make("install", parallel=False)
|
||||
|
||||
This is not recommended, because it is unclear where the ``make`` variable is set, and leads to
|
||||
issues with editors and type checkers.
|
||||
|
||||
.. _attribute_parallel:
|
||||
|
||||
---------------
|
||||
Parallel builds
|
||||
---------------
|
||||
|
||||
Spack supports parallel builds on an individual package and at the
|
||||
installation level. Package-level parallelism is established by the
|
||||
``--jobs`` option and its configuration and package recipe equivalents.
|
||||
Installation-level parallelism is driven by the DAG(s) of the requested
|
||||
package or packages.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Package-level build parallelism
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
By default, build dependencies like ``gmake`` and ``ninja`` ensure that
|
||||
the ``-j <njobs>`` flag is passed whenever their ``make`` or ``ninja``
|
||||
executables are invoked, so that builds are done in parallel.
|
||||
|
||||
The parallelism is determined by the value of the ``build_jobs`` entry
|
||||
in ``config.yaml`` (see :ref:`here <build-jobs>` for more details on
|
||||
how this value is computed).
|
||||
|
||||
If a package does not build properly in parallel, you can override
|
||||
this setting by adding ``parallel = False`` to your package. For
|
||||
example, OpenSSL's build does not work in parallel, so its package
|
||||
looks like this:
|
||||
|
||||
.. code-block:: python
|
||||
:emphasize-lines: 9
|
||||
:linenos:
|
||||
|
||||
class Openssl(Package):
|
||||
homepage = "http://www.openssl.org"
|
||||
url = "http://www.openssl.org/source/openssl-1.0.1h.tar.gz"
|
||||
|
||||
version("1.0.1h", md5="8d6d684a9430d5cc98a62a5d8fbda8cf")
|
||||
depends_on("zlib-api")
|
||||
depends_on("gmake", type="build")
|
||||
|
||||
parallel = False
|
||||
|
||||
Similarly, you can disable parallel builds only for specific make
|
||||
commands, as ``libdwarf`` does:
|
||||
|
||||
.. code-block:: python
|
||||
:emphasize-lines: 14, 17
|
||||
:linenos:
|
||||
|
||||
class Data:
|
||||
configure: Executable
|
||||
make: Executable
|
||||
|
||||
class Libelf(Package):
|
||||
data: Data
|
||||
def install(self, spec, prefix):
|
||||
...
|
||||
|
||||
def install(self, spec, prefix):
|
||||
self.data.configure("--prefix=" + prefix,
|
||||
"--enable-shared",
|
||||
"--disable-dependency-tracking",
|
||||
"--disable-debug")
|
||||
self.data.make()
|
||||
|
||||
# The mkdir commands in libelf's install can fail in parallel
|
||||
self.data.make("install", parallel=False)
|
||||
|
||||
The first make will run in parallel here, but the second will not. If
|
||||
you set ``parallel`` to ``False`` at the package level, then each call
|
||||
to ``make()`` will be sequential by default, but packagers can call
|
||||
``make(parallel=True)`` to override it.
|
||||
|
||||
Note that the ``--jobs`` option works out of the box for all standard
|
||||
build systems. If you are using a non-standard build system instead, you
|
||||
can use the variable ``make_jobs`` to extract the number of jobs specified
|
||||
by the ``--jobs`` option:
|
||||
|
||||
.. code-block:: python
|
||||
:emphasize-lines: 2, 10
|
||||
:linenos:
|
||||
|
||||
class Data:
|
||||
make_jobs: int
|
||||
|
||||
class Xios(Package):
|
||||
data: Data
|
||||
...
|
||||
|
||||
def install(self, spec, prefix):
|
||||
make_xios = Executable("./make_xios")
|
||||
make_xios(..., "--jobs", str(self.pkg.make_jobs))
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Install-level build parallelism
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Spack supports the concurrent installation of packages within a Spack
|
||||
instance across multiple processes using file system locks. This
|
||||
parallelism is separate from the package-level achieved through build
|
||||
systems' use of the ``-j <njobs>`` option. With install-level parallelism,
|
||||
processes coordinate the installation of the dependencies of specs
|
||||
provided on the command line and as part of an environment build with
|
||||
only **one process** being allowed to install a given package at a time.
|
||||
Refer to :ref:`Dependencies` for more information on dependencies and
|
||||
:ref:`installing-environment` for how to install an environment.
|
||||
|
||||
Concurrent processes may be any combination of interactive sessions and
|
||||
batch jobs. Which means a ``spack install`` can be running in a terminal
|
||||
window while a batch job is running ``spack install`` on the same or
|
||||
overlapping dependencies without any process trying to re-do the work of
|
||||
another.
|
||||
|
||||
For example, if you are using Slurm, you could launch an installation
|
||||
of ``mpich`` using the following command:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ srun -N 2 -n 8 spack install -j 4 mpich@3.3.2
|
||||
|
||||
This will create eight concurrent, four-job installs on two different
|
||||
nodes.
|
||||
|
||||
Alternatively, you could run the same installs on one node by entering
|
||||
the following at the command line of a bash shell:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ for i in {1..12}; do nohup spack install -j 4 mpich@3.3.2 >> mpich_install.txt 2>&1 & done
|
||||
install("script.py", python_platlib)
|
||||
|
||||
.. note::
|
||||
|
||||
The effective parallelism is based on the maximum number of packages
|
||||
that can be installed at the same time, which is limited by the
|
||||
number of packages with no (remaining) uninstalled dependencies.
|
||||
We recommend using ``setup_dependent_package`` sparingly, as it is not always clear where
|
||||
global variables are coming from when editing a ``package.py`` file.
|
||||
|
||||
-----
|
||||
Views
|
||||
@@ -3451,7 +3371,7 @@ the above attribute implementations:
|
||||
"/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6/baz/lib/libFooBaz.so"
|
||||
])
|
||||
|
||||
# baz library directories in the baz subdirectory of the foo porefix
|
||||
# baz library directories in the baz subdirectory of the foo prefix
|
||||
>>> spec["baz"].libs.directories
|
||||
[
|
||||
"/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6/baz/lib"
|
||||
@@ -3765,60 +3685,57 @@ the build system. The build systems currently supported by Spack are:
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| **API docs** | **Description** |
|
||||
+==========================================================+==================================+
|
||||
| :class:`~spack.build_systems.generic` | Generic build system without any |
|
||||
| :class:`~spack_repo.builtin.build_systems.generic` | Generic build system without any |
|
||||
| | base implementation |
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack.build_systems.makefile` | Specialized build system for |
|
||||
| :class:`~spack_repo.builtin.build_systems.makefile` | Specialized build system for |
|
||||
| | software built invoking |
|
||||
| | hand-written Makefiles |
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack.build_systems.autotools` | Specialized build system for |
|
||||
| :class:`~spack_repo.builtin.build_systems.autotools` | Specialized build system for |
|
||||
| | software built using |
|
||||
| | GNU Autotools |
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack.build_systems.cmake` | Specialized build system for |
|
||||
| :class:`~spack_repo.builtin.build_systems.cmake` | Specialized build system for |
|
||||
| | software built using CMake |
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack.build_systems.maven` | Specialized build system for |
|
||||
| :class:`~spack_repo.builtin.build_systems.maven` | Specialized build system for |
|
||||
| | software built using Maven |
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack.build_systems.meson` | Specialized build system for |
|
||||
| :class:`~spack_repo.builtin.build_systems.meson` | Specialized build system for |
|
||||
| | software built using Meson |
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack.build_systems.nmake` | Specialized build system for |
|
||||
| :class:`~spack_repo.builtin.build_systems.nmake` | Specialized build system for |
|
||||
| | software built using NMake |
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack.build_systems.qmake` | Specialized build system for |
|
||||
| :class:`~spack_repo.builtin.build_systems.qmake` | Specialized build system for |
|
||||
| | software built using QMake |
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack.build_systems.scons` | Specialized build system for |
|
||||
| :class:`~spack_repo.builtin.build_systems.scons` | Specialized build system for |
|
||||
| | software built using SCons |
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack.build_systems.waf` | Specialized build system for |
|
||||
| :class:`~spack_repo.builtin.build_systems.waf` | Specialized build system for |
|
||||
| | software built using Waf |
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack.build_systems.r` | Specialized build system for |
|
||||
| :class:`~spack_repo.builtin.build_systems.r` | Specialized build system for |
|
||||
| | R extensions |
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack.build_systems.octave` | Specialized build system for |
|
||||
| :class:`~spack_repo.builtin.build_systems.octave` | Specialized build system for |
|
||||
| | Octave packages |
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack.build_systems.python` | Specialized build system for |
|
||||
| :class:`~spack_repo.builtin.build_systems.python` | Specialized build system for |
|
||||
| | Python extensions |
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack.build_systems.perl` | Specialized build system for |
|
||||
| :class:`~spack_repo.builtin.build_systems.perl` | Specialized build system for |
|
||||
| | Perl extensions |
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack.build_systems.ruby` | Specialized build system for |
|
||||
| :class:`~spack_repo.builtin.build_systems.ruby` | Specialized build system for |
|
||||
| | Ruby extensions |
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack.build_systems.intel` | Specialized build system for |
|
||||
| | licensed Intel software |
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack.build_systems.oneapi` | Specialized build system for |
|
||||
| :class:`~spack_repo.builtin.build_systems.oneapi` | Specialized build system for |
|
||||
| | Intel oneAPI software |
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack.build_systems.aspell_dict` | Specialized build system for |
|
||||
| :class:`~spack_repo.builtin.build_systems.aspell_dict` | Specialized build system for |
|
||||
| | Aspell dictionaries |
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
|
||||
@@ -3830,7 +3747,7 @@ the build system. The build systems currently supported by Spack are:
|
||||
rare cases where manual intervention is needed we need to stress that a
|
||||
package base class depends on the *build system* being used, not the language of the package.
|
||||
For example, a Python extension installed with CMake would ``extends("python")`` and
|
||||
subclass from :class:`~spack.build_systems.cmake.CMakePackage`.
|
||||
subclass from :class:`~spack_repo.builtin.build_systems.cmake.CMakePackage`.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Overriding builder methods
|
||||
@@ -3838,7 +3755,7 @@ Overriding builder methods
|
||||
|
||||
Build-system "phases" have default implementations that fit most of the common cases:
|
||||
|
||||
.. literalinclude:: _spack_root/lib/spack/spack/build_systems/autotools.py
|
||||
.. literalinclude:: _spack_root/var/spack/repos/spack_repo/builtin/build_systems/autotools.py
|
||||
:pyobject: AutotoolsBuilder.configure
|
||||
:linenos:
|
||||
|
||||
@@ -3852,7 +3769,7 @@ configure arguments:
|
||||
|
||||
Each specific build system has a list of attributes and methods that can be overridden to
|
||||
fine-tune the installation of a package without overriding an entire phase. To
|
||||
have more information on them the place to go is the API docs of the :py:mod:`~.spack.build_systems`
|
||||
have more information on them the place to go is the API docs of the :py:mod:`~.spack_repo.builtin.build_systems`
|
||||
module.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
@@ -3894,7 +3811,7 @@ If the ``package.py`` has build instructions in a separate
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class CMakeBuilder(spack.build_systems.cmake.CMakeBuilder):
|
||||
class CMakeBuilder(spack_repo.builtin.build_systems.cmake.CMakeBuilder):
|
||||
def install(self, pkg, spec, prefix):
|
||||
...
|
||||
|
||||
@@ -3907,31 +3824,32 @@ Mixin base classes
|
||||
Besides build systems, there are other cases where common metadata and behavior can be extracted
|
||||
and reused by many packages. For instance, packages that depend on ``Cuda`` or ``Rocm``, share
|
||||
common dependencies and constraints. To factor these attributes into a single place, Spack provides
|
||||
a few mixin classes in the ``spack.build_systems`` module:
|
||||
a few mixin classes in the ``spack_repo.builtin.build_systems`` module:
|
||||
|
||||
+---------------------------------------------------------------+----------------------------------+
|
||||
| **API docs** | **Description** |
|
||||
+===============================================================+==================================+
|
||||
| :class:`~spack.build_systems.cuda.CudaPackage` | A helper class for packages that |
|
||||
| | use CUDA |
|
||||
+---------------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack.build_systems.rocm.ROCmPackage` | A helper class for packages that |
|
||||
| | use ROCm |
|
||||
+---------------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack.build_systems.gnu.GNUMirrorPackage` | A helper class for GNU packages |
|
||||
+---------------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack.build_systems.python.PythonExtension` | A helper class for Python |
|
||||
| | extensions |
|
||||
+---------------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack.build_systems.sourceforge.SourceforgePackage` | A helper class for packages |
|
||||
| | from sourceforge.org |
|
||||
+---------------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack.build_systems.sourceware.SourcewarePackage` | A helper class for packages |
|
||||
| | from sourceware.org |
|
||||
+---------------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack.build_systems.xorg.XorgPackage` | A helper class for x.org |
|
||||
| | packages |
|
||||
+---------------------------------------------------------------+----------------------------------+
|
||||
+----------------------------------------------------------------------------+----------------------------------+
|
||||
| **API docs** | **Description** |
|
||||
+============================================================================+==================================+
|
||||
| :class:`~spack_repo.builtin.build_systems.cuda.CudaPackage` | A helper class for packages that |
|
||||
| | use CUDA |
|
||||
+----------------------------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack_repo.builtin.build_systems.rocm.ROCmPackage` | A helper class for packages that |
|
||||
| | use ROCm |
|
||||
+----------------------------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack_repo.builtin.build_systems.gnu.GNUMirrorPackage` | A helper class for GNU packages |
|
||||
| | |
|
||||
+----------------------------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack_repo.builtin.build_systems.python.PythonExtension` | A helper class for Python |
|
||||
| | extensions |
|
||||
+----------------------------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack_repo.builtin.build_systems.sourceforge.SourceforgePackage` | A helper class for packages |
|
||||
| | from sourceforge.org |
|
||||
+----------------------------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack_repo.builtin.build_systems.sourceware.SourcewarePackage` | A helper class for packages |
|
||||
| | from sourceware.org |
|
||||
+----------------------------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack_repo.builtin.build_systems.xorg.XorgPackage` | A helper class for x.org |
|
||||
| | packages |
|
||||
+----------------------------------------------------------------------------+----------------------------------+
|
||||
|
||||
These classes should be used by adding them to the inheritance tree of the package that needs them,
|
||||
for instance:
|
||||
@@ -3975,13 +3893,13 @@ Additional build instructions are split into separate builder classes:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class CMakeBuilder(spack.build_systems.cmake.CMakeBuilder):
|
||||
class CMakeBuilder(spack_repo.builtin.build_systems.cmake.CMakeBuilder):
|
||||
def cmake_args(self):
|
||||
return [
|
||||
self.define_from_variant("MY_FEATURE", "my_feature")
|
||||
]
|
||||
|
||||
class AutotoolsBuilder(spack.build_systems.autotools.AutotoolsBuilder):
|
||||
class AutotoolsBuilder(spack_repo.builtin.build_systems.autotools.AutotoolsBuilder):
|
||||
def configure_args(self):
|
||||
return self.with_or_without("my-feature", variant="my_feature")
|
||||
|
||||
@@ -4163,6 +4081,50 @@ condition is true. You can explicitly cause the build to fail from
|
||||
if spec.architecture.startswith("darwin"):
|
||||
raise InstallError("This package does not build on Mac OS X!")
|
||||
|
||||
.. _shell-wrappers:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Shell command functions
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Recall the install method from ``libelf``:
|
||||
|
||||
.. literalinclude:: _spack_root/var/spack/repos/spack_repo/builtin/packages/libelf/package.py
|
||||
:pyobject: Libelf.install
|
||||
:linenos:
|
||||
|
||||
Normally in Python, you'd have to write something like this in order
|
||||
to execute shell commands:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
import subprocess
|
||||
subprocess.check_call("configure", "--prefix={0}".format(prefix))
|
||||
|
||||
We've tried to make this a bit easier by providing callable wrapper
|
||||
objects for some shell commands. By default, ``configure``,
|
||||
``cmake``, and ``make`` wrappers are are provided, so you can call
|
||||
them more naturally in your package files.
|
||||
|
||||
If you need other commands, you can use ``which`` to get them:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
sed = which("sed")
|
||||
sed("s/foo/bar/", filename)
|
||||
|
||||
The ``which`` function will search the ``PATH`` for the application.
|
||||
|
||||
Callable wrappers also allow spack to provide some special features.
|
||||
For example, in Spack, ``make`` is parallel by default, and Spack
|
||||
figures out the number of cores on your machine and passes an
|
||||
appropriate value for ``-j<numjobs>`` when it calls ``make`` (see the
|
||||
``parallel`` `package attribute <attribute_parallel>`). In
|
||||
a package file, you can supply a keyword argument, ``parallel=False``,
|
||||
to the ``make`` wrapper to disable parallel make. In the ``libelf``
|
||||
package, this allows us to avoid race conditions in the library's
|
||||
build system.
|
||||
|
||||
^^^^^^^^^^^^^^
|
||||
Compiler flags
|
||||
^^^^^^^^^^^^^^
|
||||
@@ -4337,7 +4299,12 @@ Prefix objects
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Spack passes the ``prefix`` parameter to the install method so that
|
||||
you can pass it to ``configure``, ``cmake``, or some other installer.
|
||||
you can pass it to ``configure``, ``cmake``, or some other installer,
|
||||
e.g.:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
configure("--prefix={0}".format(prefix))
|
||||
|
||||
For the most part, prefix objects behave exactly like strings. For
|
||||
packages that do not have their own install target, or for those that
|
||||
@@ -4348,7 +4315,7 @@ yourself, e.g.:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def install(self, spec: Spec, prefix: Prefix) -> None:
|
||||
def install(self, spec, prefix):
|
||||
mkdirp(prefix.bin)
|
||||
install("foo-tool", prefix.bin)
|
||||
|
||||
@@ -4421,7 +4388,7 @@ do that, e.g.:
|
||||
if spec.satisfies("@1.2:1.4"):
|
||||
configure_args.append("CXXFLAGS='-DWITH_FEATURE'")
|
||||
|
||||
self.data.configure(*configure_args)
|
||||
configure(*configure_args)
|
||||
|
||||
This works for compilers, too:
|
||||
|
||||
@@ -4839,15 +4806,15 @@ of MPI builds:
|
||||
supply includes/libs/etc. This is fairly uncommon.
|
||||
|
||||
2. Others really want the wrappers and assume you're using an MPI
|
||||
"compiler" -- i.e., they have no mechanism to add MPI
|
||||
"compiler" – i.e., they have no mechanism to add MPI
|
||||
includes/libraries/etc.
|
||||
|
||||
3. CMake's ``FindMPI`` needs the compiler wrappers, but it uses them to
|
||||
extract ``-I`` / ``-L`` / ``-D`` arguments, then treats MPI like a
|
||||
extract ``–I`` / ``-L`` / ``-D`` arguments, then treats MPI like a
|
||||
regular library.
|
||||
|
||||
Note that some CMake builds fall into case 2 because they either don't
|
||||
know about or don't like CMake's ``FindMPI`` support -- they just assume
|
||||
know about or don't like CMake's ``FindMPI`` support – they just assume
|
||||
an MPI compiler. Also, some autotools builds fall into case 3 (e.g. `here
|
||||
is an autotools version of CMake's FindMPI
|
||||
<https://github.com/tgamblin/libra/blob/master/m4/lx_find_mpi.m4>`_).
|
||||
@@ -4862,7 +4829,7 @@ Packaging Conventions
|
||||
As mentioned above, in the ``install()`` method, ``CC``, ``CXX``,
|
||||
``F77``, and ``FC`` point to Spack's wrappers around the chosen compiler.
|
||||
Spack's wrappers are not the MPI compiler wrappers, though they do
|
||||
automatically add ``-I``, ``-L``, and ``-Wl,-rpath`` args for
|
||||
automatically add ``–I``, ``–L``, and ``–Wl,-rpath`` args for
|
||||
dependencies in a similar way. The MPI wrappers are a bit different in
|
||||
that they also add ``-l`` arguments for the MPI libraries, and some add
|
||||
special ``-D`` arguments to trigger build options in MPI programs.
|
||||
@@ -4891,8 +4858,8 @@ there instead, e.g.:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
configure("-prefix=%s" % prefix,
|
||||
"-with-cc=%s" % spec["mpi"].mpicc)
|
||||
configure("—prefix=%s" % prefix,
|
||||
"—with-cc=%s" % spec["mpi"].mpicc)
|
||||
|
||||
Now, you may think that doing this will lose the includes, library paths,
|
||||
and RPATHs that Spack's compiler wrapper get you, but we've actually set
|
||||
@@ -5761,7 +5728,7 @@ running each executable, ``foo`` and ``bar``, as independent test parts.
|
||||
.. note::
|
||||
|
||||
The method name ``copy_test_files`` here is for illustration purposes.
|
||||
You are free to use a name that is more suited to your package.
|
||||
You are free to use a name that is better suited to your package.
|
||||
|
||||
The key to copying files for stand-alone testing at build time is use
|
||||
of the ``run_after`` directive, which ensures the associated files are
|
||||
@@ -6221,10 +6188,10 @@ Filtering functions
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
filter_file(r"^\s*CC\s*=.*", "CC = " + self.data.spack_cc, "Makefile")
|
||||
filter_file(r"^\s*CXX\s*=.*", "CXX = " + self.data.spack_cxx, "Makefile")
|
||||
filter_file(r"^\s*F77\s*=.*", "F77 = " + self.data.spack_f77, "Makefile")
|
||||
filter_file(r"^\s*FC\s*=.*", "FC = " + self.data.spack_fc, "Makefile")
|
||||
filter_file(r"^\s*CC\s*=.*", "CC = " + spack_cc, "Makefile")
|
||||
filter_file(r"^\s*CXX\s*=.*", "CXX = " + spack_cxx, "Makefile")
|
||||
filter_file(r"^\s*F77\s*=.*", "F77 = " + spack_f77, "Makefile")
|
||||
filter_file(r"^\s*FC\s*=.*", "FC = " + spack_fc, "Makefile")
|
||||
|
||||
#. Replacing ``#!/usr/bin/perl`` with ``#!/usr/bin/env perl`` in ``bib2xhtml``:
|
||||
|
||||
@@ -6307,10 +6274,29 @@ File functions
|
||||
.. code-block:: python
|
||||
|
||||
with working_dir("libdwarf"):
|
||||
self.pkg.configure("--prefix=" + prefix, "--enable-shared")
|
||||
self.pkg.make()
|
||||
configure("--prefix=" + prefix, "--enable-shared")
|
||||
make()
|
||||
install("libdwarf.a", prefix.lib)
|
||||
|
||||
#. Many CMake builds require that you build "out of source", that
|
||||
is, in a subdirectory. You can handle creating and ``cd``'ing to
|
||||
the subdirectory like the LLVM package does:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
with working_dir("spack-build", create=True):
|
||||
cmake("..",
|
||||
"-DLLVM_REQUIRES_RTTI=1",
|
||||
"-DPYTHON_EXECUTABLE=/usr/bin/python",
|
||||
"-DPYTHON_INCLUDE_DIR=/usr/include/python2.6",
|
||||
"-DPYTHON_LIBRARY=/usr/lib64/libpython2.6.so",
|
||||
*std_cmake_args)
|
||||
make()
|
||||
make("install")
|
||||
|
||||
The ``create=True`` keyword argument causes the command to create
|
||||
the directory if it does not exist.
|
||||
|
||||
:py:func:`touch(path) <llnl.util.filesystem.touch>`
|
||||
Create an empty file at ``path``.
|
||||
|
||||
@@ -7251,7 +7237,7 @@ which are not, there is the `checked_by` parameter in the license directive:
|
||||
|
||||
license("<license>", when="<when>", checked_by="<github username>")
|
||||
|
||||
When you have validated a github license, either when doing so explicitly or
|
||||
When you have validated a package license, either when doing so explicitly or
|
||||
as part of packaging a new package, please set the `checked_by` parameter
|
||||
to your Github username to signal that the license has been manually
|
||||
verified.
|
||||
|
@@ -214,7 +214,7 @@ package versions, simply run the following commands:
|
||||
|
||||
Running ``spack mark -i --all`` tells Spack to mark all of the existing
|
||||
packages within an environment as "implicitly" installed. This tells
|
||||
spack's garbage collection system that these packages should be cleaned up.
|
||||
Spack's garbage collection system that these packages should be cleaned up.
|
||||
|
||||
Don't worry however, this will not remove your entire environment.
|
||||
Running ``spack install`` will reexamine your spack environment after
|
||||
|
@@ -1 +0,0 @@
|
||||
from _pyrsistent_version import *
|
1
lib/spack/external/_vendoring/altgraph.pyi
vendored
1
lib/spack/external/_vendoring/altgraph.pyi
vendored
@@ -1 +0,0 @@
|
||||
from altgraph import *
|
@@ -1,3 +1,3 @@
|
||||
"""Init file to avoid namespace packages"""
|
||||
|
||||
__version__ = "0.2.4"
|
||||
__version__ = "0.2.5"
|
@@ -9,8 +9,8 @@
|
||||
import argparse
|
||||
import typing
|
||||
|
||||
import archspec
|
||||
import archspec.cpu
|
||||
import _vendoring.archspec
|
||||
import _vendoring.archspec.cpu
|
||||
|
||||
|
||||
def _make_parser() -> argparse.ArgumentParser:
|
||||
@@ -24,7 +24,7 @@ def _make_parser() -> argparse.ArgumentParser:
|
||||
"-V",
|
||||
help="Show the version and exit.",
|
||||
action="version",
|
||||
version=f"archspec, version {archspec.__version__}",
|
||||
version=f"archspec, version {_vendoring.archspec.__version__}",
|
||||
)
|
||||
parser.add_argument("--help", "-h", help="Show the help and exit.", action="help")
|
||||
|
||||
@@ -45,9 +45,9 @@ def _make_parser() -> argparse.ArgumentParser:
|
||||
|
||||
|
||||
def cpu() -> int:
|
||||
"""Run the `archspec cpu` subcommand."""
|
||||
"""Run the `_vendoring.archspec.cpu` subcommand."""
|
||||
try:
|
||||
print(archspec.cpu.host())
|
||||
print(_vendoring.archspec.cpu.host())
|
||||
except FileNotFoundError as exc:
|
||||
print(exc)
|
||||
return 1
|
@@ -8,9 +8,9 @@
|
||||
import re
|
||||
import warnings
|
||||
|
||||
import archspec
|
||||
import archspec.cpu.alias
|
||||
import archspec.cpu.schema
|
||||
import _vendoring.archspec
|
||||
import _vendoring.archspec.cpu.alias
|
||||
import _vendoring.archspec.cpu.schema
|
||||
|
||||
from .alias import FEATURE_ALIASES
|
||||
from .schema import LazyDictionary
|
||||
@@ -384,7 +384,7 @@ def fill_target_from_dict(name, data, targets):
|
||||
)
|
||||
|
||||
known_targets = {}
|
||||
data = archspec.cpu.schema.TARGETS_JSON["microarchitectures"]
|
||||
data = _vendoring.archspec.cpu.schema.TARGETS_JSON["microarchitectures"]
|
||||
for name in data:
|
||||
if name in known_targets:
|
||||
# name was already brought in as ancestor to a target
|
20
lib/spack/external/_vendoring/archspec/vendor/cpuid/LICENSE
vendored
Normal file
20
lib/spack/external/_vendoring/archspec/vendor/cpuid/LICENSE
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2014 Anders Høst
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||
this software and associated documentation files (the "Software"), to deal in
|
||||
the Software without restriction, including without limitation the rights to
|
||||
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
|
||||
the Software, and to permit persons to whom the Software is furnished to do so,
|
||||
subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
|
||||
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
|
||||
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
|
||||
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
||||
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
1
lib/spack/external/_vendoring/jsonschema.pyi
vendored
1
lib/spack/external/_vendoring/jsonschema.pyi
vendored
@@ -1 +0,0 @@
|
||||
from jsonschema import *
|
1
lib/spack/external/_vendoring/macholib.pyi
vendored
1
lib/spack/external/_vendoring/macholib.pyi
vendored
@@ -1 +0,0 @@
|
||||
from macholib import *
|
@@ -1,213 +0,0 @@
|
||||
# flake8: noqa: E704
|
||||
# from https://gist.github.com/WuTheFWasThat/091a17d4b5cab597dfd5d4c2d96faf09
|
||||
# Stubs for pyrsistent (Python 3.6)
|
||||
|
||||
from typing import Any
|
||||
from typing import AnyStr
|
||||
from typing import Callable
|
||||
from typing import Iterable
|
||||
from typing import Iterator
|
||||
from typing import List
|
||||
from typing import Optional
|
||||
from typing import Mapping
|
||||
from typing import MutableMapping
|
||||
from typing import Sequence
|
||||
from typing import Set
|
||||
from typing import Union
|
||||
from typing import Tuple
|
||||
from typing import Type
|
||||
from typing import TypeVar
|
||||
from typing import overload
|
||||
|
||||
# see commit 08519aa for explanation of the re-export
|
||||
from pyrsistent.typing import CheckedKeyTypeError as CheckedKeyTypeError
|
||||
from pyrsistent.typing import CheckedPMap as CheckedPMap
|
||||
from pyrsistent.typing import CheckedPSet as CheckedPSet
|
||||
from pyrsistent.typing import CheckedPVector as CheckedPVector
|
||||
from pyrsistent.typing import CheckedType as CheckedType
|
||||
from pyrsistent.typing import CheckedValueTypeError as CheckedValueTypeError
|
||||
from pyrsistent.typing import InvariantException as InvariantException
|
||||
from pyrsistent.typing import PClass as PClass
|
||||
from pyrsistent.typing import PBag as PBag
|
||||
from pyrsistent.typing import PDeque as PDeque
|
||||
from pyrsistent.typing import PList as PList
|
||||
from pyrsistent.typing import PMap as PMap
|
||||
from pyrsistent.typing import PMapEvolver as PMapEvolver
|
||||
from pyrsistent.typing import PSet as PSet
|
||||
from pyrsistent.typing import PSetEvolver as PSetEvolver
|
||||
from pyrsistent.typing import PTypeError as PTypeError
|
||||
from pyrsistent.typing import PVector as PVector
|
||||
from pyrsistent.typing import PVectorEvolver as PVectorEvolver
|
||||
|
||||
T = TypeVar('T')
|
||||
KT = TypeVar('KT')
|
||||
VT = TypeVar('VT')
|
||||
|
||||
def pmap(initial: Union[Mapping[KT, VT], Iterable[Tuple[KT, VT]]] = {}, pre_size: int = 0) -> PMap[KT, VT]: ...
|
||||
def m(**kwargs: VT) -> PMap[str, VT]: ...
|
||||
|
||||
def pvector(iterable: Iterable[T] = ...) -> PVector[T]: ...
|
||||
def v(*iterable: T) -> PVector[T]: ...
|
||||
|
||||
def pset(iterable: Iterable[T] = (), pre_size: int = 8) -> PSet[T]: ...
|
||||
def s(*iterable: T) -> PSet[T]: ...
|
||||
|
||||
# see class_test.py for use cases
|
||||
Invariant = Tuple[bool, Optional[Union[str, Callable[[], str]]]]
|
||||
|
||||
@overload
|
||||
def field(
|
||||
type: Union[Type[T], Sequence[Type[T]]] = ...,
|
||||
invariant: Callable[[Any], Union[Invariant, Iterable[Invariant]]] = lambda _: (True, None),
|
||||
initial: Any = object(),
|
||||
mandatory: bool = False,
|
||||
factory: Callable[[Any], T] = lambda x: x,
|
||||
serializer: Callable[[Any, T], Any] = lambda _, value: value,
|
||||
) -> T: ...
|
||||
# The actual return value (_PField) is irrelevant after a PRecord has been instantiated,
|
||||
# see https://github.com/tobgu/pyrsistent/blob/master/pyrsistent/_precord.py#L10
|
||||
@overload
|
||||
def field(
|
||||
type: Any = ...,
|
||||
invariant: Callable[[Any], Union[Invariant, Iterable[Invariant]]] = lambda _: (True, None),
|
||||
initial: Any = object(),
|
||||
mandatory: bool = False,
|
||||
factory: Callable[[Any], Any] = lambda x: x,
|
||||
serializer: Callable[[Any, Any], Any] = lambda _, value: value,
|
||||
) -> Any: ...
|
||||
|
||||
# Use precise types for the simplest use cases, but fall back to Any for
|
||||
# everything else. See record_test.py for the wide range of possible types for
|
||||
# item_type
|
||||
@overload
|
||||
def pset_field(
|
||||
item_type: Type[T],
|
||||
optional: bool = False,
|
||||
initial: Iterable[T] = ...,
|
||||
) -> PSet[T]: ...
|
||||
@overload
|
||||
def pset_field(
|
||||
item_type: Any,
|
||||
optional: bool = False,
|
||||
initial: Any = (),
|
||||
) -> PSet[Any]: ...
|
||||
|
||||
@overload
|
||||
def pmap_field(
|
||||
key_type: Type[KT],
|
||||
value_type: Type[VT],
|
||||
optional: bool = False,
|
||||
invariant: Callable[[Any], Tuple[bool, Optional[str]]] = lambda _: (True, None),
|
||||
) -> PMap[KT, VT]: ...
|
||||
@overload
|
||||
def pmap_field(
|
||||
key_type: Any,
|
||||
value_type: Any,
|
||||
optional: bool = False,
|
||||
invariant: Callable[[Any], Tuple[bool, Optional[str]]] = lambda _: (True, None),
|
||||
) -> PMap[Any, Any]: ...
|
||||
|
||||
@overload
|
||||
def pvector_field(
|
||||
item_type: Type[T],
|
||||
optional: bool = False,
|
||||
initial: Iterable[T] = ...,
|
||||
) -> PVector[T]: ...
|
||||
@overload
|
||||
def pvector_field(
|
||||
item_type: Any,
|
||||
optional: bool = False,
|
||||
initial: Any = (),
|
||||
) -> PVector[Any]: ...
|
||||
|
||||
def pbag(elements: Iterable[T]) -> PBag[T]: ...
|
||||
def b(*elements: T) -> PBag[T]: ...
|
||||
|
||||
def plist(iterable: Iterable[T] = (), reverse: bool = False) -> PList[T]: ...
|
||||
def l(*elements: T) -> PList[T]: ...
|
||||
|
||||
def pdeque(iterable: Optional[Iterable[T]] = None, maxlen: Optional[int] = None) -> PDeque[T]: ...
|
||||
def dq(*iterable: T) -> PDeque[T]: ...
|
||||
|
||||
@overload
|
||||
def optional(type: T) -> Tuple[T, Type[None]]: ...
|
||||
@overload
|
||||
def optional(*typs: Any) -> Tuple[Any, ...]: ...
|
||||
|
||||
T_PRecord = TypeVar('T_PRecord', bound='PRecord')
|
||||
class PRecord(PMap[AnyStr, Any]):
|
||||
_precord_fields: Mapping
|
||||
_precord_initial_values: Mapping
|
||||
|
||||
def __hash__(self) -> int: ...
|
||||
def __init__(self, **kwargs: Any) -> None: ...
|
||||
def __iter__(self) -> Iterator[Any]: ...
|
||||
def __len__(self) -> int: ...
|
||||
@classmethod
|
||||
def create(
|
||||
cls: Type[T_PRecord],
|
||||
kwargs: Mapping,
|
||||
_factory_fields: Optional[Iterable] = None,
|
||||
ignore_extra: bool = False,
|
||||
) -> T_PRecord: ...
|
||||
# This is OK because T_PRecord is a concrete type
|
||||
def discard(self: T_PRecord, key: KT) -> T_PRecord: ...
|
||||
def remove(self: T_PRecord, key: KT) -> T_PRecord: ...
|
||||
|
||||
def serialize(self, format: Optional[Any] = ...) -> MutableMapping: ...
|
||||
|
||||
# From pyrsistent documentation:
|
||||
# This set function differs slightly from that in the PMap
|
||||
# class. First of all it accepts key-value pairs. Second it accepts multiple key-value
|
||||
# pairs to perform one, atomic, update of multiple fields.
|
||||
@overload
|
||||
def set(self, key: KT, val: VT) -> Any: ...
|
||||
@overload
|
||||
def set(self, **kwargs: VT) -> Any: ...
|
||||
|
||||
def immutable(
|
||||
members: Union[str, Iterable[str]] = '',
|
||||
name: str = 'Immutable',
|
||||
verbose: bool = False,
|
||||
) -> Tuple: ... # actually a namedtuple
|
||||
|
||||
# ignore mypy warning "Overloaded function signatures 1 and 5 overlap with
|
||||
# incompatible return types"
|
||||
@overload
|
||||
def freeze(o: Mapping[KT, VT]) -> PMap[KT, VT]: ... # type: ignore
|
||||
@overload
|
||||
def freeze(o: List[T]) -> PVector[T]: ... # type: ignore
|
||||
@overload
|
||||
def freeze(o: Tuple[T, ...]) -> Tuple[T, ...]: ...
|
||||
@overload
|
||||
def freeze(o: Set[T]) -> PSet[T]: ... # type: ignore
|
||||
@overload
|
||||
def freeze(o: T) -> T: ...
|
||||
|
||||
|
||||
@overload
|
||||
def thaw(o: PMap[KT, VT]) -> MutableMapping[KT, VT]: ... # type: ignore
|
||||
@overload
|
||||
def thaw(o: PVector[T]) -> List[T]: ... # type: ignore
|
||||
@overload
|
||||
def thaw(o: Tuple[T, ...]) -> Tuple[T, ...]: ...
|
||||
# collections.abc.MutableSet is kind of garbage:
|
||||
# https://stackoverflow.com/questions/24977898/why-does-collections-mutableset-not-bestow-an-update-method
|
||||
@overload
|
||||
def thaw(o: PSet[T]) -> Set[T]: ... # type: ignore
|
||||
@overload
|
||||
def thaw(o: T) -> T: ...
|
||||
|
||||
def mutant(fn: Callable) -> Callable: ...
|
||||
|
||||
def inc(x: int) -> int: ...
|
||||
@overload
|
||||
def discard(evolver: PMapEvolver[KT, VT], key: KT) -> None: ...
|
||||
@overload
|
||||
def discard(evolver: PVectorEvolver[T], key: int) -> None: ...
|
||||
@overload
|
||||
def discard(evolver: PSetEvolver[T], key: T) -> None: ...
|
||||
def rex(expr: str) -> Callable[[Any], bool]: ...
|
||||
def ny(_: Any) -> bool: ...
|
||||
|
||||
def get_in(keys: Iterable, coll: Mapping, default: Optional[Any] = None, no_default: bool = False) -> Any: ...
|
1
lib/spack/external/_vendoring/ruamel.pyi
vendored
1
lib/spack/external/_vendoring/ruamel.pyi
vendored
@@ -1 +0,0 @@
|
||||
from ruamel import *
|
@@ -1 +0,0 @@
|
||||
from six import *
|
@@ -1 +0,0 @@
|
||||
from six.moves import *
|
@@ -1 +0,0 @@
|
||||
from six.moves.configparser import *
|
81
lib/spack/external/archspec/README.md
vendored
81
lib/spack/external/archspec/README.md
vendored
@@ -1,81 +0,0 @@
|
||||
[](https://github.com/archspec/archspec/actions)
|
||||
[](https://codecov.io/gh/archspec/archspec)
|
||||
[](https://archspec.readthedocs.io/en/latest/?badge=latest)
|
||||
|
||||
|
||||
# Archspec (Python bindings)
|
||||
|
||||
Archspec aims at providing a standard set of human-understandable labels for
|
||||
various aspects of a system architecture like CPU, network fabrics, etc. and
|
||||
APIs to detect, query and compare them.
|
||||
|
||||
This project grew out of [Spack](https://spack.io/) and is currently under
|
||||
active development. At present it supports APIs to detect and model
|
||||
compatibility relationships among different CPU microarchitectures.
|
||||
|
||||
## Getting started with development
|
||||
|
||||
The `archspec` Python package needs [poetry](https://python-poetry.org/) to
|
||||
be installed from VCS sources. The preferred method to install it is via
|
||||
its custom installer outside of any virtual environment:
|
||||
```console
|
||||
$ curl -sSL https://raw.githubusercontent.com/python-poetry/poetry/master/get-poetry.py | python
|
||||
```
|
||||
You can refer to [Poetry's documentation](https://python-poetry.org/docs/#installation)
|
||||
for further details or for other methods to install this tool. You'll also need `tox`
|
||||
to run unit test:
|
||||
```console
|
||||
$ pip install --user tox
|
||||
```
|
||||
Finally you'll need to clone the repository:
|
||||
```console
|
||||
$ git clone --recursive https://github.com/archspec/archspec.git
|
||||
```
|
||||
|
||||
### Running unit tests
|
||||
Once you have your environment ready you can run `archspec` unit tests
|
||||
using ``tox`` from the root of the repository:
|
||||
```console
|
||||
$ tox
|
||||
[ ... ]
|
||||
py27: commands succeeded
|
||||
py35: commands succeeded
|
||||
py36: commands succeeded
|
||||
py37: commands succeeded
|
||||
py38: commands succeeded
|
||||
pylint: commands succeeded
|
||||
flake8: commands succeeded
|
||||
black: commands succeeded
|
||||
congratulations :)
|
||||
```
|
||||
|
||||
## Citing Archspec
|
||||
|
||||
If you are referencing `archspec` in a publication, please cite the following
|
||||
paper:
|
||||
|
||||
* Massimiliano Culpo, Gregory Becker, Carlos Eduardo Arango Gutierrez, Kenneth
|
||||
Hoste, and Todd Gamblin.
|
||||
[**`archspec`: A library for detecting, labeling, and reasoning about
|
||||
microarchitectures**](https://tgamblin.github.io/pubs/archspec-canopie-hpc-2020.pdf).
|
||||
In *2nd International Workshop on Containers and New Orchestration Paradigms
|
||||
for Isolated Environments in HPC (CANOPIE-HPC'20)*, Online Event, November
|
||||
12, 2020.
|
||||
|
||||
## License
|
||||
|
||||
Archspec is distributed under the terms of both the MIT license and the
|
||||
Apache License (Version 2.0). Users may choose either license, at their
|
||||
option.
|
||||
|
||||
All new contributions must be made under both the MIT and Apache-2.0
|
||||
licenses.
|
||||
|
||||
See [LICENSE-MIT](https://github.com/archspec/archspec/blob/master/LICENSE-MIT),
|
||||
[LICENSE-APACHE](https://github.com/archspec/archspec/blob/master/LICENSE-APACHE),
|
||||
[COPYRIGHT](https://github.com/archspec/archspec/blob/master/COPYRIGHT), and
|
||||
[NOTICE](https://github.com/archspec/archspec/blob/master/NOTICE) for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
LLNL-CODE-811653
|
22
lib/spack/external/archspec/json/COPYRIGHT
vendored
22
lib/spack/external/archspec/json/COPYRIGHT
vendored
@@ -1,22 +0,0 @@
|
||||
Intellectual Property Notice
|
||||
------------------------------
|
||||
|
||||
Archspec is licensed under the Apache License, Version 2.0 (LICENSE-APACHE
|
||||
or http://www.apache.org/licenses/LICENSE-2.0) or the MIT license,
|
||||
(LICENSE-MIT or http://opensource.org/licenses/MIT), at your option.
|
||||
|
||||
Copyrights and patents in the Archspec project are retained by contributors.
|
||||
No copyright assignment is required to contribute to Archspec.
|
||||
|
||||
|
||||
SPDX usage
|
||||
------------
|
||||
|
||||
Individual files contain SPDX tags instead of the full license text.
|
||||
This enables machine processing of license information based on the SPDX
|
||||
License Identifiers that are available here: https://spdx.org/licenses/
|
||||
|
||||
Files that are dual-licensed as Apache-2.0 OR MIT contain the following
|
||||
text in the license header:
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
1
lib/spack/external/vendor.txt
vendored
1
lib/spack/external/vendor.txt
vendored
@@ -9,3 +9,4 @@ macholib==1.16.2
|
||||
altgraph==0.17.3
|
||||
ruamel.yaml==0.17.21
|
||||
typing_extensions==4.1.1
|
||||
archspec @ git+https://github.com/archspec/archspec.git@38ce485258ffc4fc6dd6688f8dc90cb269478c47
|
||||
|
@@ -12,10 +12,9 @@
|
||||
import warnings
|
||||
from typing import Optional, Sequence, Union
|
||||
|
||||
import _vendoring.archspec.cpu
|
||||
from _vendoring.typing_extensions import TypedDict
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
from llnl.util import tty
|
||||
|
||||
@@ -138,7 +137,7 @@ def _fix_ext_suffix(candidate_spec: "spack.spec.Spec"):
|
||||
}
|
||||
|
||||
# If the current architecture is not problematic return
|
||||
generic_target = archspec.cpu.host().family
|
||||
generic_target = _vendoring.archspec.cpu.host().family
|
||||
if str(generic_target) not in _suffix_to_be_checked:
|
||||
return
|
||||
|
||||
@@ -235,7 +234,7 @@ def _root_spec(spec_str: str) -> str:
|
||||
platform = str(spack.platforms.host())
|
||||
|
||||
spec_str += f" platform={platform}"
|
||||
target = archspec.cpu.host().family
|
||||
target = _vendoring.archspec.cpu.host().family
|
||||
spec_str += f" target={target}"
|
||||
|
||||
tty.debug(f"[BOOTSTRAP ROOT SPEC] {spec_str}")
|
||||
|
@@ -13,7 +13,7 @@
|
||||
import sys
|
||||
from typing import Dict, Optional, Tuple
|
||||
|
||||
import archspec.cpu
|
||||
import _vendoring.archspec.cpu
|
||||
|
||||
import spack.compilers.config
|
||||
import spack.compilers.libraries
|
||||
@@ -30,7 +30,7 @@ class ClingoBootstrapConcretizer:
|
||||
def __init__(self, configuration):
|
||||
self.host_platform = spack.platforms.host()
|
||||
self.host_os = self.host_platform.default_operating_system()
|
||||
self.host_target = archspec.cpu.host().family
|
||||
self.host_target = _vendoring.archspec.cpu.host().family
|
||||
self.host_architecture = spack.spec.ArchSpec.default_arch()
|
||||
self.host_architecture.target = str(self.host_target)
|
||||
self.host_compiler = self._valid_compiler_or_raise()
|
||||
|
@@ -8,7 +8,7 @@
|
||||
import sys
|
||||
from typing import Iterable, List
|
||||
|
||||
import archspec.cpu
|
||||
import _vendoring.archspec.cpu
|
||||
|
||||
from llnl.util import tty
|
||||
|
||||
@@ -51,7 +51,7 @@ def environment_root(cls) -> pathlib.Path:
|
||||
"""Environment root directory"""
|
||||
bootstrap_root_path = root_path()
|
||||
python_part = spec_for_current_python().replace("@", "")
|
||||
arch_part = archspec.cpu.host().family
|
||||
arch_part = _vendoring.archspec.cpu.host().family
|
||||
interpreter_part = hashlib.md5(sys.exec_prefix.encode()).hexdigest()[:5]
|
||||
environment_dir = f"{python_part}-{arch_part}-{interpreter_part}"
|
||||
return pathlib.Path(
|
||||
@@ -112,7 +112,7 @@ def _write_spack_yaml_file(self) -> None:
|
||||
context = {
|
||||
"python_spec": spec_for_current_python(),
|
||||
"python_prefix": sys.exec_prefix,
|
||||
"architecture": archspec.cpu.host().family,
|
||||
"architecture": _vendoring.archspec.cpu.host().family,
|
||||
"environment_path": self.environment_root(),
|
||||
"environment_specs": self.spack_dev_requirements(),
|
||||
"store_path": store_path(),
|
||||
|
@@ -59,7 +59,7 @@
|
||||
overload,
|
||||
)
|
||||
|
||||
import archspec.cpu
|
||||
import _vendoring.archspec.cpu
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.string import plural
|
||||
@@ -68,9 +68,6 @@
|
||||
from llnl.util.symlink import symlink
|
||||
from llnl.util.tty.color import cescape, colorize
|
||||
|
||||
import spack.build_systems.cmake
|
||||
import spack.build_systems.meson
|
||||
import spack.build_systems.python
|
||||
import spack.builder
|
||||
import spack.compilers.libraries
|
||||
import spack.config
|
||||
@@ -443,10 +440,12 @@ def optimization_flags(compiler, target):
|
||||
# Try to check if the current compiler comes with a version number or
|
||||
# has an unexpected suffix. If so, treat it as a compiler with a
|
||||
# custom spec.
|
||||
version_number, _ = archspec.cpu.version_components(compiler.version.dotted_numeric_string)
|
||||
version_number, _ = _vendoring.archspec.cpu.version_components(
|
||||
compiler.version.dotted_numeric_string
|
||||
)
|
||||
try:
|
||||
result = target.optimization_flags(compiler.name, version_number)
|
||||
except (ValueError, archspec.cpu.UnsupportedMicroarchitecture):
|
||||
except (ValueError, _vendoring.archspec.cpu.UnsupportedMicroarchitecture):
|
||||
result = ""
|
||||
|
||||
return result
|
||||
@@ -567,9 +566,6 @@ def set_package_py_globals(pkg, context: Context = Context.BUILD):
|
||||
|
||||
jobs = spack.config.determine_number_of_jobs(parallel=pkg.parallel)
|
||||
module.make_jobs = jobs
|
||||
if context == Context.BUILD:
|
||||
module.std_meson_args = spack.build_systems.meson.MesonBuilder.std_args(pkg)
|
||||
module.std_pip_args = spack.build_systems.python.PythonPipBuilder.std_args(pkg)
|
||||
|
||||
module.make = DeprecatedExecutable(pkg.name, "make", "gmake")
|
||||
module.gmake = DeprecatedExecutable(pkg.name, "gmake", "gmake")
|
||||
@@ -998,15 +994,6 @@ def set_all_package_py_globals(self):
|
||||
pkg.setup_dependent_package(dependent_module, spec)
|
||||
dependent_module.propagate_changes_to_mro()
|
||||
|
||||
if self.context == Context.BUILD:
|
||||
pkg = self.specs[0].package
|
||||
module = ModuleChangePropagator(pkg)
|
||||
# std_cmake_args is not sufficiently static to be defined
|
||||
# in set_package_py_globals and is deprecated so its handled
|
||||
# here as a special case
|
||||
module.std_cmake_args = spack.build_systems.cmake.CMakeBuilder.std_args(pkg)
|
||||
module.propagate_changes_to_mro()
|
||||
|
||||
def get_env_modifications(self) -> EnvironmentModifications:
|
||||
"""Returns the environment variable modifications for the given input specs and context.
|
||||
Environment modifications include:
|
||||
@@ -1573,31 +1560,9 @@ class ModuleChangePropagator:
|
||||
def __init__(self, package: spack.package_base.PackageBase) -> None:
|
||||
self._set_self_attributes("package", package)
|
||||
self._set_self_attributes("current_module", package.module)
|
||||
self._set_self_attributes("_set_attributes", {})
|
||||
|
||||
#: Modules for the classes in the MRO up to PackageBase
|
||||
modules_in_mro = []
|
||||
|
||||
# New API: the package class has a "data" annotation, which is a dataclass we instantiate
|
||||
# class MyPackage(Package):
|
||||
#
|
||||
# class Data:
|
||||
# foo: str
|
||||
# bar: int
|
||||
#
|
||||
# data: Data
|
||||
if hasattr(package, "data"):
|
||||
return
|
||||
|
||||
for cls in package.__class__.__mro__:
|
||||
if not hasattr(cls, "__annotations__") or "data" not in cls.__annotations__:
|
||||
continue
|
||||
setattr(package, "data", cls.__annotations__["data"]())
|
||||
return
|
||||
|
||||
# Old API: we define globals on the package module. This is deprecated, because modules
|
||||
# have a one to many relationship with package instances, and DAGs can contain multiple
|
||||
# instances of the same package.
|
||||
for cls in package.__class__.__mro__:
|
||||
module = getattr(cls, "module", None)
|
||||
|
||||
@@ -1609,6 +1574,7 @@ def __init__(self, package: spack.package_base.PackageBase) -> None:
|
||||
|
||||
modules_in_mro.append(module)
|
||||
self._set_self_attributes("modules_in_mro", modules_in_mro)
|
||||
self._set_self_attributes("_set_attributes", {})
|
||||
|
||||
def _set_self_attributes(self, key, value):
|
||||
super().__setattr__(key, value)
|
||||
@@ -1625,11 +1591,5 @@ def __setattr__(self, key, value):
|
||||
self._set_attributes[key] = value
|
||||
|
||||
def propagate_changes_to_mro(self):
|
||||
# New API: update the data class of the package instance
|
||||
if hasattr(self.package, "data"):
|
||||
self.package.data.__dict__.update(self._set_attributes)
|
||||
return
|
||||
|
||||
# Old API: set globals for every module in the package's MRO
|
||||
for module_in_mro in self.modules_in_mro:
|
||||
module_in_mro.__dict__.update(self._set_attributes)
|
||||
|
@@ -1,660 +0,0 @@
|
||||
====================================
|
||||
Development Notes on Intel Packages
|
||||
====================================
|
||||
|
||||
These are notes for concepts and development of
|
||||
lib/spack/spack/build_systems/intel.py .
|
||||
|
||||
For documentation on how to *use* ``IntelPackage``, see
|
||||
lib/spack/docs/build_systems/intelpackage.rst .
|
||||
|
||||
-------------------------------------------------------------------------------
|
||||
Installation and path handling as implemented in ./intel.py
|
||||
-------------------------------------------------------------------------------
|
||||
|
||||
|
||||
***************************************************************************
|
||||
Prefix differences between Spack-external and Spack-internal installations
|
||||
***************************************************************************
|
||||
|
||||
|
||||
Problem summary
|
||||
~~~~~~~~~~~~~~~~
|
||||
|
||||
For Intel packages that were installed external to Spack, ``self.prefix`` will
|
||||
be a *component-specific* path (e.g. to an MKL-specific dir hierarchy), whereas
|
||||
for a package installed by Spack itself, ``self.prefix`` will be a
|
||||
*vendor-level* path that holds one or more components (or parts thereof), and
|
||||
must be further qualified down to a particular desired component.
|
||||
|
||||
It is possible that a similar conceptual difference is inherent to other
|
||||
package families that use a common vendor-style installer.
|
||||
|
||||
|
||||
Description
|
||||
~~~~~~~~~~~~
|
||||
|
||||
Spack makes packages available through two routes, let's call them A and B:
|
||||
|
||||
A. Packages pre-installed external to Spack and configured *for* Spack
|
||||
B. Packages built and installed *by* Spack.
|
||||
|
||||
For a user who is interested in building end-user applications, it should not
|
||||
matter through which route any of its dependent packages has been installed.
|
||||
Most packages natively support a ``prefix`` concept which unifies the two
|
||||
routes just fine.
|
||||
|
||||
Intel packages, however, are more complicated because they consist of a number
|
||||
of components that are released as a suite of varying extent, like "Intel
|
||||
Parallel Studio *Foo* Edition", or subsetted into products like "MKL" or "MPI",
|
||||
each of which also contain libraries from other components like the compiler
|
||||
runtime and multithreading libraries. For this reason, an Intel package is
|
||||
"anchored" during installation at a directory level higher than just the
|
||||
user-facing directory that has the conventional hierarchy of ``bin``, ``lib``,
|
||||
and others relevant for the end-product.
|
||||
|
||||
As a result, internal to Spack, there is a conceptual difference in what
|
||||
``self.prefix`` represents for the two routes.
|
||||
|
||||
For route A, consider MKL installed outside of Spack. It will likely be one
|
||||
product component among other products, at one particular release among others
|
||||
that are installed in sibling or cousin directories on the local system.
|
||||
Therefore, the path given to Spack in ``packages.yaml`` should be a
|
||||
*product-specific and fully version-specific* directory. E.g., for an
|
||||
``intel-mkl`` package, ``self.prefix`` should look like::
|
||||
|
||||
/opt/intel/compilers_and_libraries_2018.1.163/linux/mkl
|
||||
|
||||
In this route, the interaction point with the user is encapsulated in an
|
||||
environment variable which will be (in pseudo-code)::
|
||||
|
||||
MKLROOT := {self.prefix}
|
||||
|
||||
For route B, a Spack-based installation of MKL will be placed in the directory
|
||||
given to the ``./install.sh`` script of Intel's package distribution. This
|
||||
directory is taken to be the *vendor*-specific anchor directory, playing the
|
||||
same role as the default ``/opt/intel``. In this case, ``self.prefix`` will
|
||||
be::
|
||||
|
||||
$SPACK_ROOT/opt/spack/linux-centos6-x86_64/gcc-4.9.3/intel-mkl-2018.1.163-<HASH>
|
||||
|
||||
However, now the environment variable will have to be constructed as *several
|
||||
directory levels down*::
|
||||
|
||||
MKLROOT := {self.prefix}/compilers_and_libraries_2018.1.163/linux/mkl
|
||||
|
||||
A recent post on the Spack mailing list illustrates the confusion when route A
|
||||
was taken while route B was the only one that was coded in Spack:
|
||||
https://groups.google.com/d/msg/spack/x28qlmqPAys/Ewx6220uAgAJ
|
||||
|
||||
|
||||
Solution
|
||||
~~~~~~~~~
|
||||
|
||||
Introduce a series of functions which will return the appropriate
|
||||
directories, regardless of whether the Intel package has been installed
|
||||
external or internal to Spack:
|
||||
|
||||
========================== ==================================================
|
||||
Function Example return values
|
||||
-------------------------- --------------------------------------------------
|
||||
normalize_suite_dir() Spack-external installation:
|
||||
/opt/intel/compilers_and_libraries_2018.1.163
|
||||
Spack-internal installation:
|
||||
$SPACK_ROOT/...<HASH>/compilers_and_libraries_2018.1.163
|
||||
-------------------------- --------------------------------------------------
|
||||
normalize_path('mkl') <suite_dir>/linux/mkl
|
||||
component_bin_dir() <suite_dir>/linux/mkl/bin
|
||||
component_lib_dir() <suite_dir>/linux/mkl/lib/intel64
|
||||
-------------------------- --------------------------------------------------
|
||||
normalize_path('mpi') <suite_dir>/linux/mpi
|
||||
component_bin_dir('mpi') <suite_dir>/linux/mpi/intel64/bin
|
||||
component_lib_dir('mpi') <suite_dir>/linux/mpi/intel64/lib
|
||||
========================== ==================================================
|
||||
|
||||
|
||||
*********************************
|
||||
Analysis of directory layouts
|
||||
*********************************
|
||||
|
||||
Let's look at some sample directory layouts, using ``ls -lF``,
|
||||
but focusing on names and symlinks only.
|
||||
|
||||
Spack-born installation of ``intel-mkl@2018.1.163``
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
::
|
||||
|
||||
$ ls -l <prefix>
|
||||
|
||||
bin/
|
||||
- compilervars.*sh (symlinked) ONLY
|
||||
|
||||
compilers_and_libraries -> compilers_and_libraries_2018
|
||||
- generically-named entry point, stable across versions (one hopes)
|
||||
|
||||
compilers_and_libraries_2018/
|
||||
- vaguely-versioned dirname, holding a stub hierarchy --ignorable
|
||||
|
||||
$ ls -l compilers_and_libraries_2018/linux/
|
||||
bin - actual compilervars.*sh (reg. files) ONLY
|
||||
documentation -> ../../documentation_2018/
|
||||
lib -> ../../compilers_and_libraries_2018.1.163/linux/compiler/lib/
|
||||
mkl -> ../../compilers_and_libraries_2018.1.163/linux/mkl/
|
||||
pkg_bin -> ../../compilers_and_libraries_2018.1.163/linux/bin/
|
||||
samples -> ../../samples_2018/
|
||||
tbb -> ../../compilers_and_libraries_2018.1.163/linux/tbb/
|
||||
|
||||
compilers_and_libraries_2018.1.163/
|
||||
- Main "product" + a minimal set of libs from related products
|
||||
|
||||
$ ls -l compilers_and_libraries_2018.1.163/linux/
|
||||
bin/ - compilervars.*sh, link_install*sh ONLY
|
||||
mkl/ - Main Product ==> to be assigned to MKLROOT
|
||||
compiler/ - lib/intel64_lin/libiomp5* ONLY
|
||||
tbb/ - tbb/lib/intel64_lin/gcc4.[147]/libtbb*.so* ONLY
|
||||
|
||||
parallel_studio_xe_2018 -> parallel_studio_xe_2018.1.038/
|
||||
parallel_studio_xe_2018.1.038/
|
||||
- Alternate product packaging - ignorable
|
||||
|
||||
$ ls -l parallel_studio_xe_2018.1.038/
|
||||
bin/ - actual psxevars.*sh (reg. files)
|
||||
compilers_and_libraries_2018 -> <full_path>/comp...aries_2018.1.163
|
||||
documentation_2018 -> <full_path_prefix>/documentation_2018
|
||||
samples_2018 -> <full_path_prefix>/samples_2018
|
||||
...
|
||||
|
||||
documentation_2018/
|
||||
samples_2018/
|
||||
lib -> compilers_and_libraries/linux/lib/
|
||||
mkl -> compilers_and_libraries/linux/mkl/
|
||||
tbb -> compilers_and_libraries/linux/tbb/
|
||||
- auxiliaries and convenience links
|
||||
|
||||
Spack-external installation of Intel-MPI 2018
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
For MPI, the layout is slightly different than MKL. The prefix will have to
|
||||
include an architecture directory (typically ``intel64``), which then contains
|
||||
bin/, lib/, ..., all without further architecture branching. The environment
|
||||
variable ``I_MPI_ROOT`` from the API documentation, however, must be the
|
||||
package's top directory, not including the architecture.
|
||||
|
||||
FIXME: For MANPATH, need the parent dir.
|
||||
|
||||
::
|
||||
|
||||
$ ls -lF /opt/intel/compilers_and_libraries_2018.1.163/linux/mpi/
|
||||
bin64 -> intel64/bin/
|
||||
etc64 -> intel64/etc/
|
||||
include64 -> intel64/include/
|
||||
lib64 -> intel64/lib/
|
||||
|
||||
benchmarks/
|
||||
binding/
|
||||
intel64/
|
||||
man/
|
||||
test/
|
||||
|
||||
The package contains an MPI-2019 preview; Curiously, its release notes contain
|
||||
the tag: "File structure clean-up." I could not find further documentation on
|
||||
this, however, so it is unclear what, if any, changes will make it to release.
|
||||
|
||||
https://software.intel.com/en-us/articles/restoring-legacy-path-structure-on-intel-mpi-library-2019
|
||||
|
||||
::
|
||||
|
||||
$ ls -lF /opt/intel/compilers_and_libraries_2018.1.163/linux/mpi_2019/
|
||||
binding/
|
||||
doc/
|
||||
imb/
|
||||
intel64/
|
||||
man/
|
||||
test/
|
||||
|
||||
Spack-external installation of Intel Parallel Studio 2018
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
This is the main product bundle that I actually downloaded and installed on my
|
||||
system. Its nominal installation directory mostly holds merely symlinks
|
||||
to components installed in sibling dirs::
|
||||
|
||||
$ ls -lF /opt/intel/parallel_studio_xe_2018.1.038/
|
||||
advisor_2018 -> /opt/intel/advisor_2018/
|
||||
clck_2018 -> /opt/intel/clck/2018.1/
|
||||
compilers_and_libraries_2018 -> /opt/intel/comp....aries_2018.1.163/
|
||||
documentation_2018 -> /opt/intel/documentation_2018/
|
||||
ide_support_2018 -> /opt/intel/ide_support_2018/
|
||||
inspector_2018 -> /opt/intel/inspector_2018/
|
||||
itac_2018 -> /opt/intel/itac/2018.1.017/
|
||||
man -> /opt/intel/man/
|
||||
samples_2018 -> /opt/intel/samples_2018/
|
||||
vtune_amplifier_2018 -> /opt/intel/vtune_amplifier_2018/
|
||||
|
||||
psxevars.csh -> ./bin/psxevars.csh*
|
||||
psxevars.sh -> ./bin/psxevars.sh*
|
||||
bin/ - *vars.*sh scripts + sshconnectivity.exp ONLY
|
||||
|
||||
licensing/
|
||||
uninstall*
|
||||
|
||||
The only relevant regular files are ``*vars.*sh``, but those also just churn
|
||||
through the subordinate vars files of the components.
|
||||
|
||||
Installation model
|
||||
~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Intel packages come with an ``install.sh`` script that is normally run
|
||||
interactively (in either text or GUI mode) but can run unattended with a
|
||||
``--silent <file>`` option, which is of course what Spack uses.
|
||||
|
||||
Format of configuration file
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
The configuration file is conventionally called ``silent.cfg`` and has a simple
|
||||
``token=value`` syntax. Before using the configuration file, the installer
|
||||
calls ``<staging_dir>/pset/check.awk`` to validate it. Example paths to the
|
||||
validator are::
|
||||
|
||||
.../l_mkl_2018.1.163/pset/check.awk .
|
||||
.../parallel_studio_xe_2018_update1_cluster_edition/pset/check.awk
|
||||
|
||||
The tokens that are accepted in the configuration file vary between packages.
|
||||
Tokens not supported for a given package **will cause the installer to stop
|
||||
and fail.** This is particularly relevant for license-related tokens, which are
|
||||
accepted only for packages that actually require a license.
|
||||
|
||||
Reference: [Intel's documentation](https://software.intel.com/en-us/articles/configuration-file-format)
|
||||
|
||||
See also: https://software.intel.com/en-us/articles/silent-installation-guide-for-intel-parallel-studio-xe-composer-edition-for-os-x
|
||||
|
||||
The following is from ``.../parallel_studio_xe_2018_update1_cluster_edition/pset/check.awk``:
|
||||
|
||||
* Tokens valid for all packages encountered::
|
||||
|
||||
ACCEPT_EULA {accept, decline}
|
||||
CONTINUE_WITH_OPTIONAL_ERROR {yes, no}
|
||||
PSET_INSTALL_DIR {/opt/intel, , filepat}
|
||||
CONTINUE_WITH_INSTALLDIR_OVERWRITE {yes, no}
|
||||
COMPONENTS {ALL, DEFAULTS, , anythingpat}
|
||||
PSET_MODE {install, repair, uninstall}
|
||||
NONRPM_DB_DIR {, filepat}
|
||||
|
||||
SIGNING_ENABLED {yes, no}
|
||||
ARCH_SELECTED {IA32, INTEL64, ALL}
|
||||
|
||||
* Mentioned but unexplained in ``check.awk``::
|
||||
|
||||
NO_VALIDATE (?!)
|
||||
|
||||
* Only for licensed packages::
|
||||
|
||||
ACTIVATION_SERIAL_NUMBER {, snpat}
|
||||
ACTIVATION_LICENSE_FILE {, lspat, filepat}
|
||||
ACTIVATION_TYPE {exist_lic, license_server,
|
||||
license_file, trial_lic,
|
||||
|
||||
PHONEHOME_SEND_USAGE_DATA {yes, no}
|
||||
serial_number}
|
||||
|
||||
* Only for Amplifier (obviously)::
|
||||
|
||||
AMPLIFIER_SAMPLING_DRIVER_INSTALL_TYPE {build, kit}
|
||||
AMPLIFIER_DRIVER_ACCESS_GROUP {, anythingpat, vtune}
|
||||
AMPLIFIER_DRIVER_PERMISSIONS {, anythingpat, 666}
|
||||
AMPLIFIER_LOAD_DRIVER {yes, no}
|
||||
AMPLIFIER_C_COMPILER {, filepat, auto, none}
|
||||
AMPLIFIER_KERNEL_SRC_DIR {, filepat, auto, none}
|
||||
AMPLIFIER_MAKE_COMMAND {, filepat, auto, none}
|
||||
AMPLIFIER_INSTALL_BOOT_SCRIPT {yes, no}
|
||||
AMPLIFIER_DRIVER_PER_USER_MODE {yes, no}
|
||||
|
||||
* Only for MKL and Studio::
|
||||
|
||||
CLUSTER_INSTALL_REMOTE {yes, no}
|
||||
CLUSTER_INSTALL_TEMP {, filepat}
|
||||
CLUSTER_INSTALL_MACHINES_FILE {, filepat}
|
||||
|
||||
* "backward compatibility" (?)::
|
||||
|
||||
INSTALL_MODE {RPM, NONRPM}
|
||||
download_only {yes}
|
||||
download_dir {, filepat}
|
||||
|
||||
|
||||
Details for licensing tokens
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Quoted from
|
||||
https://software.intel.com/en-us/articles/configuration-file-format,
|
||||
for reference:
|
||||
|
||||
[ed. note: As of 2018-05, the page incorrectly references ``ACTIVATION``, which
|
||||
was used only until about 2012; this is corrected to ``ACTIVATION_TYPE`` here.]
|
||||
|
||||
...
|
||||
|
||||
``ACTIVATION_TYPE=exist_lic``
|
||||
This directive tells the install program to look for an existing
|
||||
license during the install process. This is the preferred method for
|
||||
silent installs. Take the time to register your serial number and get
|
||||
a license file (see below). Having a license file on the system
|
||||
simplifies the process. In addition, as an administrator it is good
|
||||
practice to know WHERE your licenses are saved on your system.
|
||||
License files are plain text files with a .lic extension. By default
|
||||
these are saved in /opt/intel/licenses which is searched by default.
|
||||
If you save your license elsewhere, perhaps under an NFS folder, set
|
||||
environment variable **INTEL_LICENSE_FILE** to the full path to your
|
||||
license file prior to starting the installation or use the
|
||||
configuration file directive ``ACTIVATION_LICENSE_FILE`` to specify the
|
||||
full pathname to the license file.
|
||||
|
||||
Options for ``ACTIVATION_TYPE`` are ``{ exist_lic, license_file, server_lic,
|
||||
serial_number, trial_lic }``
|
||||
|
||||
``exist_lic``
|
||||
directs the installer to search for a valid license on the server.
|
||||
Searches will utilize the environment variable **INTEL_LICENSE_FILE**,
|
||||
search the default license directory /opt/intel/licenses, or use the
|
||||
``ACTIVATION_LICENSE_FILE`` directive to find a valid license file.
|
||||
|
||||
``license_file``
|
||||
is similar to exist_lic but directs the installer to use
|
||||
``ACTIVATION_LICENSE_FILE`` to find the license file.
|
||||
|
||||
``server_lic``
|
||||
is similar to exist_lic and exist_lic but directs the installer that
|
||||
this is a client installation and a floating license server will be
|
||||
contacted to active the product. This option will contact your
|
||||
floating license server on your network to retrieve the license
|
||||
information. BEFORE using this option make sure your client is
|
||||
correctly set up for your network including all networking, routing,
|
||||
name service, and firewall configuration. Insure that your client has
|
||||
direct access to your floating license server and that firewalls are
|
||||
set up to allow TCP/IP access for the 2 license server ports.
|
||||
server_lic will use **INTEL_LICENSE_FILE** containing a port@host format
|
||||
OR a client license file. The formats for these are described here
|
||||
https://software.intel.com/en-us/articles/licensing-setting-up-the-client-floating-license
|
||||
|
||||
``serial_number``
|
||||
directs the installer to use directive ``ACTIVATION_SERIAL_NUMBER`` for
|
||||
activation. This method will require the installer to contact an
|
||||
external Intel activation server over the Internet to confirm your
|
||||
serial number. Due to user and company firewalls, this method is more
|
||||
complex and hence error prone of the available activation methods. We
|
||||
highly recommend using a license file or license server for activation
|
||||
instead.
|
||||
|
||||
``trial_lic``
|
||||
is used only if you do not have an existing license and intend to
|
||||
temporarily evaluate the compiler. This method creates a temporary
|
||||
trial license in Trusted Storage on your system.
|
||||
|
||||
...
|
||||
|
||||
*******************
|
||||
vars files
|
||||
*******************
|
||||
|
||||
Intel's product packages contain a number of shell initialization files let's call them vars files.
|
||||
|
||||
There are three kinds:
|
||||
|
||||
#. Component-specific vars files, such as `mklvars` or `tbbvars`.
|
||||
#. Toplevel vars files such as "psxevars". They will scan for all
|
||||
component-specific vars files associated with the product, and source them
|
||||
if found.
|
||||
#. Symbolic links to either of them. Links may appear under a different name
|
||||
for backward compatibility.
|
||||
|
||||
At present, IntelPackage class is only concerned with the toplevel vars files,
|
||||
generally found in the product's toplevel bin/ directory.
|
||||
|
||||
For reference, here is an overview of the names and locations of the vars files
|
||||
in the 2018 product releases, as seen for Spack-native installation. NB: May be
|
||||
incomplete as some components may have been omitted during installation.
|
||||
|
||||
Names of vars files seen::
|
||||
|
||||
$ cd opt/spack/linux-centos6-x86_64
|
||||
$ find intel* -name \*vars.sh -printf '%f\n' | sort -u | nl
|
||||
1 advixe-vars.sh
|
||||
2 amplxe-vars.sh
|
||||
3 apsvars.sh
|
||||
4 compilervars.sh
|
||||
5 daalvars.sh
|
||||
6 debuggervars.sh
|
||||
7 iccvars.sh
|
||||
8 ifortvars.sh
|
||||
9 inspxe-vars.sh
|
||||
10 ippvars.sh
|
||||
11 mklvars.sh
|
||||
12 mpivars.sh
|
||||
13 pstlvars.sh
|
||||
14 psxevars.sh
|
||||
15 sep_vars.sh
|
||||
16 tbbvars.sh
|
||||
|
||||
Names and locations of vars files, sorted by Spack package name::
|
||||
|
||||
$ cd opt/spack/linux-centos6-x86_64
|
||||
$ find intel* -name \*vars.sh -printf '%y\t%-15f\t%h\n' \
|
||||
| cut -d/ -f1,4- \
|
||||
| sed '/iccvars\|ifortvars/d; s,/,\t\t,; s,\.sh,,; s, */\(intel[/-]\),\1,' \
|
||||
| sort -k3,3 -k2,2 \
|
||||
| nl \
|
||||
| awk '{printf "%6i %-2s %-16s %-24s %s\n", $1, $2, $3, $4, $5}'
|
||||
|
||||
--------------------------------------------------------------------------------------------------------
|
||||
item no.
|
||||
file or link
|
||||
name of vars file
|
||||
Spack package name
|
||||
dir relative to Spack install dir
|
||||
--------------------------------------------------------------------------------------------------------
|
||||
|
||||
1 f mpivars intel compilers_and_libraries_2018.1.163/linux/mpi/intel64/bin
|
||||
2 f mpivars intel compilers_and_libraries_2018.1.163/linux/mpirt/bin/ia32_lin
|
||||
3 f tbbvars intel compilers_and_libraries_2018.1.163/linux/tbb/bin
|
||||
4 f pstlvars intel compilers_and_libraries_2018.1.163/linux/pstl/bin
|
||||
5 f compilervars intel compilers_and_libraries_2018.1.163/linux/bin
|
||||
6 f compilervars intel compilers_and_libraries_2018/linux/bin
|
||||
7 l compilervars intel bin
|
||||
8 f daalvars intel-daal compilers_and_libraries_2018.2.199/linux/daal/bin
|
||||
9 f psxevars intel-daal parallel_studio_xe_2018.2.046/bin
|
||||
10 l psxevars intel-daal parallel_studio_xe_2018.2.046
|
||||
11 f compilervars intel-daal compilers_and_libraries_2018.2.199/linux/bin
|
||||
12 f compilervars intel-daal compilers_and_libraries_2018/linux/bin
|
||||
13 l compilervars intel-daal bin
|
||||
14 f ippvars intel-ipp compilers_and_libraries_2018.2.199/linux/ipp/bin
|
||||
15 f psxevars intel-ipp parallel_studio_xe_2018.2.046/bin
|
||||
16 l psxevars intel-ipp parallel_studio_xe_2018.2.046
|
||||
17 f compilervars intel-ipp compilers_and_libraries_2018.2.199/linux/bin
|
||||
18 f compilervars intel-ipp compilers_and_libraries_2018/linux/bin
|
||||
19 l compilervars intel-ipp bin
|
||||
20 f mklvars intel-mkl compilers_and_libraries_2018.2.199/linux/mkl/bin
|
||||
21 f psxevars intel-mkl parallel_studio_xe_2018.2.046/bin
|
||||
22 l psxevars intel-mkl parallel_studio_xe_2018.2.046
|
||||
23 f compilervars intel-mkl compilers_and_libraries_2018.2.199/linux/bin
|
||||
24 f compilervars intel-mkl compilers_and_libraries_2018/linux/bin
|
||||
25 l compilervars intel-mkl bin
|
||||
26 f mpivars intel-mpi compilers_and_libraries_2018.2.199/linux/mpi_2019/intel64/bin
|
||||
27 f mpivars intel-mpi compilers_and_libraries_2018.2.199/linux/mpi/intel64/bin
|
||||
28 f psxevars intel-mpi parallel_studio_xe_2018.2.046/bin
|
||||
29 l psxevars intel-mpi parallel_studio_xe_2018.2.046
|
||||
30 f compilervars intel-mpi compilers_and_libraries_2018.2.199/linux/bin
|
||||
31 f compilervars intel-mpi compilers_and_libraries_2018/linux/bin
|
||||
32 l compilervars intel-mpi bin
|
||||
33 f apsvars intel-parallel-studio vtune_amplifier_2018.1.0.535340
|
||||
34 l apsvars intel-parallel-studio performance_snapshots_2018.1.0.535340
|
||||
35 f ippvars intel-parallel-studio compilers_and_libraries_2018.1.163/linux/ipp/bin
|
||||
36 f ippvars intel-parallel-studio composer_xe_2015.6.233/ipp/bin
|
||||
37 f mklvars intel-parallel-studio compilers_and_libraries_2018.1.163/linux/mkl/bin
|
||||
38 f mklvars intel-parallel-studio composer_xe_2015.6.233/mkl/bin
|
||||
39 f mpivars intel-parallel-studio compilers_and_libraries_2018.1.163/linux/mpi/intel64/bin
|
||||
40 f mpivars intel-parallel-studio compilers_and_libraries_2018.1.163/linux/mpirt/bin/ia32_lin
|
||||
41 f tbbvars intel-parallel-studio compilers_and_libraries_2018.1.163/linux/tbb/bin
|
||||
42 f tbbvars intel-parallel-studio composer_xe_2015.6.233/tbb/bin
|
||||
43 f daalvars intel-parallel-studio compilers_and_libraries_2018.1.163/linux/daal/bin
|
||||
44 f pstlvars intel-parallel-studio compilers_and_libraries_2018.1.163/linux/pstl/bin
|
||||
45 f psxevars intel-parallel-studio parallel_studio_xe_2018.1.038/bin
|
||||
46 l psxevars intel-parallel-studio parallel_studio_xe_2018.1.038
|
||||
47 f sep_vars intel-parallel-studio vtune_amplifier_2018.1.0.535340
|
||||
48 f sep_vars intel-parallel-studio vtune_amplifier_2018.1.0.535340/target/android_v4.1_x86_64
|
||||
49 f advixe-vars intel-parallel-studio advisor_2018.1.1.535164
|
||||
50 f amplxe-vars intel-parallel-studio vtune_amplifier_2018.1.0.535340
|
||||
51 f inspxe-vars intel-parallel-studio inspector_2018.1.1.535159
|
||||
52 f compilervars intel-parallel-studio compilers_and_libraries_2018.1.163/linux/bin
|
||||
53 f compilervars intel-parallel-studio compilers_and_libraries_2018/linux/bin
|
||||
54 l compilervars intel-parallel-studio bin
|
||||
55 f debuggervars intel-parallel-studio debugger_2018/bin
|
||||
|
||||
|
||||
********************
|
||||
MPI linkage
|
||||
********************
|
||||
|
||||
|
||||
Library selection
|
||||
~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
In the Spack code so far, the library selections for MPI are:
|
||||
|
||||
::
|
||||
|
||||
libnames = ['libmpifort', 'libmpi']
|
||||
if 'cxx' in self.spec.last_query.extra_parameters:
|
||||
libnames = ['libmpicxx'] + libnames
|
||||
return find_libraries(libnames,
|
||||
root=self.component_lib_dir('mpi'),
|
||||
shared=True, recursive=False)
|
||||
|
||||
The problem is that there are multiple library versions under ``component_lib_dir``::
|
||||
|
||||
$ cd $I_MPI_ROOT
|
||||
$ find . -name libmpi.so | sort
|
||||
./intel64/lib/debug/libmpi.so
|
||||
./intel64/lib/debug_mt/libmpi.so
|
||||
./intel64/lib/libmpi.so
|
||||
./intel64/lib/release/libmpi.so
|
||||
./intel64/lib/release_mt/libmpi.so
|
||||
|
||||
"mt" refers to multi-threading, not in the explicit sense but in the sense of being thread-safe::
|
||||
|
||||
$ mpiifort -help | grep mt
|
||||
-mt_mpi link the thread safe version of the Intel(R) MPI Library
|
||||
|
||||
Well, why should we not inspect what the canonical script does? The wrapper
|
||||
has its own hardcoded "prefix=..." and can thus tell us what it will do, from a
|
||||
*wiped environment* no less!::
|
||||
|
||||
$ env - intel64/bin/mpiicc -show hello.c | ld-unwrap-args
|
||||
icc 'hello.c' \
|
||||
-I/opt/intel/compilers_and_libraries_2018.1.163/linux/mpi/intel64/include \
|
||||
-L/opt/intel/compilers_and_libraries_2018.1.163/linux/mpi/intel64/lib/release_mt \
|
||||
-L/opt/intel/compilers_and_libraries_2018.1.163/linux/mpi/intel64/lib \
|
||||
-Xlinker --enable-new-dtags \
|
||||
-Xlinker -rpath=/opt/intel/compilers_and_libraries_2018.1.163/linux/mpi/intel64/lib/release_mt \
|
||||
-Xlinker -rpath=/opt/intel/compilers_and_libraries_2018.1.163/linux/mpi/intel64/lib \
|
||||
-Xlinker -rpath=/opt/intel/mpi-rt/2017.0.0/intel64/lib/release_mt \
|
||||
-Xlinker -rpath=/opt/intel/mpi-rt/2017.0.0/intel64/lib \
|
||||
-lmpifort \
|
||||
-lmpi \
|
||||
-lmpigi \
|
||||
-ldl \
|
||||
-lrt \
|
||||
-lpthread
|
||||
|
||||
|
||||
MPI Wrapper options
|
||||
~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
For reference, here's the wrapper's builtin help output::
|
||||
|
||||
$ mpiifort -help
|
||||
Simple script to compile and/or link MPI programs.
|
||||
Usage: mpiifort [options] <files>
|
||||
----------------------------------------------------------------------------
|
||||
The following options are supported:
|
||||
-fc=<name> | -f90=<name>
|
||||
specify a FORTRAN compiler name: i.e. -fc=ifort
|
||||
-echo print the scripts during their execution
|
||||
-show show command lines without real calling
|
||||
-config=<name> specify a configuration file: i.e. -config=ifort for mpif90-ifort.conf file
|
||||
-v print version info of mpiifort and its native compiler
|
||||
-profile=<name> specify a profile configuration file (an MPI profiling
|
||||
library): i.e. -profile=myprofile for the myprofile.cfg file.
|
||||
As a special case, lib<name>.so or lib<name>.a may be used
|
||||
if the library is found
|
||||
-check_mpi link against the Intel(R) Trace Collector (-profile=vtmc).
|
||||
-static_mpi link the Intel(R) MPI Library statically
|
||||
-mt_mpi link the thread safe version of the Intel(R) MPI Library
|
||||
-ilp64 link the ILP64 support of the Intel(R) MPI Library
|
||||
-no_ilp64 disable ILP64 support explicitly
|
||||
-fast the same as -static_mpi + pass -fast option to a compiler.
|
||||
-t or -trace
|
||||
link against the Intel(R) Trace Collector
|
||||
-trace-imbalance
|
||||
link against the Intel(R) Trace Collector imbalance library
|
||||
(-profile=vtim)
|
||||
-dynamic_log link against the Intel(R) Trace Collector dynamically
|
||||
-static use static linkage method
|
||||
-nostrip turn off the debug information stripping during static linking
|
||||
-O enable optimization
|
||||
-link_mpi=<name>
|
||||
link against the specified version of the Intel(R) MPI Library
|
||||
All other options will be passed to the compiler without changing.
|
||||
----------------------------------------------------------------------------
|
||||
The following environment variables are used:
|
||||
I_MPI_ROOT the Intel(R) MPI Library installation directory path
|
||||
I_MPI_F90 or MPICH_F90
|
||||
the path/name of the underlying compiler to be used
|
||||
I_MPI_FC_PROFILE or I_MPI_F90_PROFILE or MPIF90_PROFILE
|
||||
the name of profile file (without extension)
|
||||
I_MPI_COMPILER_CONFIG_DIR
|
||||
the folder which contains configuration files *.conf
|
||||
I_MPI_TRACE_PROFILE
|
||||
specify a default profile for the -trace option
|
||||
I_MPI_CHECK_PROFILE
|
||||
specify a default profile for the -check_mpi option
|
||||
I_MPI_CHECK_COMPILER
|
||||
enable compiler setup checks
|
||||
I_MPI_LINK specify the version of the Intel(R) MPI Library
|
||||
I_MPI_DEBUG_INFO_STRIP
|
||||
turn on/off the debug information stripping during static linking
|
||||
I_MPI_FCFLAGS
|
||||
special flags needed for compilation
|
||||
I_MPI_LDFLAGS
|
||||
special flags needed for linking
|
||||
----------------------------------------------------------------------------
|
||||
|
||||
|
||||
Side Note: MPI version divergence in 2015 release
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
The package `intel-parallel-studio@cluster.2015.6` contains both a full MPI
|
||||
development version in `$prefix/impi` and an MPI Runtime under the
|
||||
`composer_xe*` suite directory. Curiously, these have *different versions*,
|
||||
with a release date nearly 1 year apart::
|
||||
|
||||
$ $SPACK_ROOT/...uaxaw7/impi/5.0.3.049/intel64/bin/mpiexec --version
|
||||
Intel(R) MPI Library for Linux* OS, Version 5.0 Update 3 Build 20150804 (build id: 12452)
|
||||
Copyright (C) 2003-2015, Intel Corporation. All rights reserved.
|
||||
|
||||
$ $SPACK_ROOT/...uaxaw7/composer_xe_2015.6.233/mpirt/bin/intel64/mpiexec --version
|
||||
Intel(R) MPI Library for Linux* OS, Version 5.0 Update 1 Build 20140709
|
||||
Copyright (C) 2003-2014, Intel Corporation. All rights reserved.
|
||||
|
||||
I'm not sure what to make of it.
|
||||
|
||||
|
||||
**************
|
||||
macOS support
|
||||
**************
|
||||
|
||||
- On macOS, the Spack methods here only include support to integrate an
|
||||
externally installed MKL.
|
||||
|
||||
- URLs in child packages will be Linux-specific; macOS download packages
|
||||
are located in differently numbered dirs and are named m_*.dmg.
|
File diff suppressed because it is too large
Load Diff
@@ -23,7 +23,7 @@
|
||||
_BUILDERS: Dict[int, "Builder"] = {}
|
||||
|
||||
|
||||
def builder(build_system_name: str):
|
||||
def register_builder(build_system_name: str):
|
||||
"""Class decorator used to register the default builder
|
||||
for a given build-system.
|
||||
|
||||
|
@@ -5,7 +5,7 @@
|
||||
import collections
|
||||
import warnings
|
||||
|
||||
import archspec.cpu
|
||||
import _vendoring.archspec.cpu
|
||||
|
||||
import llnl.util.tty.colify as colify
|
||||
import llnl.util.tty.color as color
|
||||
@@ -92,11 +92,11 @@ def display_target_group(header, target_group):
|
||||
def arch(parser, args):
|
||||
if args.generic_target:
|
||||
# TODO: add deprecation warning in 0.24
|
||||
print(archspec.cpu.host().generic)
|
||||
print(_vendoring.archspec.cpu.host().generic)
|
||||
return
|
||||
|
||||
if args.known_targets:
|
||||
display_targets(archspec.cpu.TARGETS)
|
||||
display_targets(_vendoring.archspec.cpu.TARGETS)
|
||||
return
|
||||
|
||||
if args.frontend:
|
||||
|
@@ -52,6 +52,7 @@
|
||||
# See the Spack documentation for more information on packaging.
|
||||
# ----------------------------------------------------------------------------
|
||||
|
||||
{package_class_import}
|
||||
from spack.package import *
|
||||
|
||||
|
||||
@@ -85,6 +86,7 @@ class BundlePackageTemplate:
|
||||
"""
|
||||
|
||||
base_class_name = "BundlePackage"
|
||||
package_class_import = "from spack_repo.builtin.build_systems.bundle import BundlePackage"
|
||||
|
||||
dependencies = """\
|
||||
# FIXME: Add dependencies if required.
|
||||
@@ -114,6 +116,7 @@ def write(self, pkg_path):
|
||||
name=self.name,
|
||||
class_name=self.class_name,
|
||||
base_class_name=self.base_class_name,
|
||||
package_class_import=self.package_class_import,
|
||||
url_def=self.url_def,
|
||||
versions=self.versions,
|
||||
dependencies="\n".join(all_deps),
|
||||
@@ -126,6 +129,7 @@ class PackageTemplate(BundlePackageTemplate):
|
||||
"""Provides the default values to be used for the package file template"""
|
||||
|
||||
base_class_name = "Package"
|
||||
package_class_import = "from spack_repo.builtin.build_systems.generic import Package"
|
||||
|
||||
body_def = """\
|
||||
def install(self, spec, prefix):
|
||||
@@ -146,6 +150,9 @@ class AutotoolsPackageTemplate(PackageTemplate):
|
||||
that *do* come with a ``configure`` script"""
|
||||
|
||||
base_class_name = "AutotoolsPackage"
|
||||
package_class_import = (
|
||||
"from spack_repo.builtin.build_systems.autotools import AutotoolsPackage"
|
||||
)
|
||||
|
||||
body_def = """\
|
||||
def configure_args(self):
|
||||
@@ -160,6 +167,9 @@ class AutoreconfPackageTemplate(PackageTemplate):
|
||||
that *do not* come with a ``configure`` script"""
|
||||
|
||||
base_class_name = "AutotoolsPackage"
|
||||
package_class_import = (
|
||||
"from spack_repo.builtin.build_systems.autotools import AutotoolsPackage"
|
||||
)
|
||||
|
||||
dependencies = """\
|
||||
depends_on("autoconf", type="build")
|
||||
@@ -186,6 +196,7 @@ class CargoPackageTemplate(PackageTemplate):
|
||||
"""Provides appropriate overrides for cargo-based packages"""
|
||||
|
||||
base_class_name = "CargoPackage"
|
||||
package_class_import = "from spack_repo.builtin.build_systems.cargo import CargoPackage"
|
||||
|
||||
body_def = ""
|
||||
|
||||
@@ -194,6 +205,7 @@ class CMakePackageTemplate(PackageTemplate):
|
||||
"""Provides appropriate overrides for CMake-based packages"""
|
||||
|
||||
base_class_name = "CMakePackage"
|
||||
package_class_import = "from spack_repo.builtin.build_systems.cmake import CMakePackage"
|
||||
|
||||
body_def = """\
|
||||
def cmake_args(self):
|
||||
@@ -208,6 +220,7 @@ class GoPackageTemplate(PackageTemplate):
|
||||
"""Provides appropriate overrides for Go-module-based packages"""
|
||||
|
||||
base_class_name = "GoPackage"
|
||||
package_class_import = "from spack_repo.builtin.build_systems.go import GoPackage"
|
||||
|
||||
body_def = ""
|
||||
|
||||
@@ -216,6 +229,7 @@ class LuaPackageTemplate(PackageTemplate):
|
||||
"""Provides appropriate overrides for LuaRocks-based packages"""
|
||||
|
||||
base_class_name = "LuaPackage"
|
||||
package_class_import = "from spack_repo.builtin.build_systems.lua import LuaPackage"
|
||||
|
||||
body_def = """\
|
||||
def luarocks_args(self):
|
||||
@@ -237,6 +251,7 @@ class MesonPackageTemplate(PackageTemplate):
|
||||
"""Provides appropriate overrides for meson-based packages"""
|
||||
|
||||
base_class_name = "MesonPackage"
|
||||
package_class_import = "from spack_repo.builtin.build_systems.meson import MesonPackage"
|
||||
|
||||
body_def = """\
|
||||
def meson_args(self):
|
||||
@@ -249,6 +264,7 @@ class QMakePackageTemplate(PackageTemplate):
|
||||
"""Provides appropriate overrides for QMake-based packages"""
|
||||
|
||||
base_class_name = "QMakePackage"
|
||||
package_class_import = "from spack_repo.builtin.build_systems.qmake import QMakePackage"
|
||||
|
||||
body_def = """\
|
||||
def qmake_args(self):
|
||||
@@ -261,6 +277,7 @@ class MavenPackageTemplate(PackageTemplate):
|
||||
"""Provides appropriate overrides for Maven-based packages"""
|
||||
|
||||
base_class_name = "MavenPackage"
|
||||
package_class_import = "from spack_repo.builtin.build_systems.maven import MavenPackage"
|
||||
|
||||
body_def = """\
|
||||
def build(self, spec, prefix):
|
||||
@@ -272,6 +289,7 @@ class SconsPackageTemplate(PackageTemplate):
|
||||
"""Provides appropriate overrides for SCons-based packages"""
|
||||
|
||||
base_class_name = "SConsPackage"
|
||||
package_class_import = "from spack_repo.builtin.build_systems.scons import SConsPackage"
|
||||
|
||||
body_def = """\
|
||||
def build_args(self, spec, prefix):
|
||||
@@ -285,6 +303,7 @@ class WafPackageTemplate(PackageTemplate):
|
||||
"""Provides appropriate override for Waf-based packages"""
|
||||
|
||||
base_class_name = "WafPackage"
|
||||
package_class_import = "from spack_repo.builtin.build_systems.waf import WafPackage"
|
||||
|
||||
body_def = """\
|
||||
# FIXME: Override configure_args(), build_args(),
|
||||
@@ -308,6 +327,7 @@ class RacketPackageTemplate(PackageTemplate):
|
||||
"""Provides approriate overrides for Racket extensions"""
|
||||
|
||||
base_class_name = "RacketPackage"
|
||||
package_class_import = "from spack_repo.builtin.build_systems.racket import RacketPackage"
|
||||
|
||||
url_line = """\
|
||||
# FIXME: set the proper location from which to fetch your package
|
||||
@@ -345,6 +365,7 @@ class PythonPackageTemplate(PackageTemplate):
|
||||
"""Provides appropriate overrides for python extensions"""
|
||||
|
||||
base_class_name = "PythonPackage"
|
||||
package_class_import = "from spack_repo.builtin.build_systems.python import PythonPackage"
|
||||
|
||||
dependencies = """\
|
||||
# FIXME: Only add the python/pip/wheel dependencies if you need specific versions
|
||||
@@ -432,6 +453,7 @@ class RPackageTemplate(PackageTemplate):
|
||||
"""Provides appropriate overrides for R extensions"""
|
||||
|
||||
base_class_name = "RPackage"
|
||||
package_class_import = "from spack_repo.builtin.build_systems.r import RPackage"
|
||||
|
||||
dependencies = """\
|
||||
# FIXME: Add dependencies if required.
|
||||
@@ -472,6 +494,7 @@ class PerlmakePackageTemplate(PackageTemplate):
|
||||
that come with a Makefile.PL"""
|
||||
|
||||
base_class_name = "PerlPackage"
|
||||
package_class_import = "from spack_repo.builtin.build_systems.perl import PerlPackage"
|
||||
|
||||
dependencies = """\
|
||||
# FIXME: Add dependencies if required:
|
||||
@@ -509,6 +532,7 @@ class OctavePackageTemplate(PackageTemplate):
|
||||
"""Provides appropriate overrides for octave packages"""
|
||||
|
||||
base_class_name = "OctavePackage"
|
||||
package_class_import = "from spack_repo.builtin.build_systems.octave import OctavePackage"
|
||||
|
||||
dependencies = """\
|
||||
extends("octave")
|
||||
@@ -531,6 +555,7 @@ class RubyPackageTemplate(PackageTemplate):
|
||||
"""Provides appropriate overrides for Ruby packages"""
|
||||
|
||||
base_class_name = "RubyPackage"
|
||||
package_class_import = "from spack_repo.builtin.build_systems.ruby import RubyPackage"
|
||||
|
||||
dependencies = """\
|
||||
# FIXME: Add dependencies if required. Only add the ruby dependency
|
||||
@@ -559,6 +584,7 @@ class MakefilePackageTemplate(PackageTemplate):
|
||||
"""Provides appropriate overrides for Makefile packages"""
|
||||
|
||||
base_class_name = "MakefilePackage"
|
||||
package_class_import = "from spack_repo.builtin.build_systems.makefile import MakefilePackage"
|
||||
|
||||
body_def = """\
|
||||
def edit(self, spec, prefix):
|
||||
@@ -573,6 +599,7 @@ class IntelPackageTemplate(PackageTemplate):
|
||||
"""Provides appropriate overrides for licensed Intel software"""
|
||||
|
||||
base_class_name = "IntelOneApiPackage"
|
||||
package_class_import = "from spack_repo.builtin.build_systems.oneapi import IntelOneApiPackage"
|
||||
|
||||
body_def = """\
|
||||
# FIXME: Override `setup_environment` if necessary."""
|
||||
@@ -582,6 +609,7 @@ class SIPPackageTemplate(PackageTemplate):
|
||||
"""Provides appropriate overrides for SIP packages."""
|
||||
|
||||
base_class_name = "SIPPackage"
|
||||
package_class_import = "from spack_repo.builtin.build_systems.sip import SIPPackage"
|
||||
|
||||
body_def = """\
|
||||
def configure_args(self, spec, prefix):
|
||||
|
@@ -28,7 +28,7 @@ def setup_parser(subparser):
|
||||
"--build-system",
|
||||
dest="path",
|
||||
action="store_const",
|
||||
const=spack.paths.build_systems_path,
|
||||
const=os.path.join(spack.repo.PATH.repos[0].root, "build_systems"),
|
||||
help="edit the build system with the supplied name",
|
||||
)
|
||||
excl_args.add_argument(
|
||||
|
@@ -331,7 +331,8 @@ def env_activate(args):
|
||||
env = create_temp_env_directory()
|
||||
env_path = os.path.abspath(env)
|
||||
short_name = os.path.basename(env_path)
|
||||
ev.create_in_dir(env).write(regenerate=False)
|
||||
view = not args.without_view
|
||||
ev.create_in_dir(env, with_view=view).write(regenerate=False)
|
||||
_tty_info(f"Created and activated temporary environment in {env_path}")
|
||||
|
||||
# Managed environment
|
||||
|
@@ -514,17 +514,18 @@ def extend_with_dependencies(specs):
|
||||
|
||||
|
||||
def concrete_specs_from_cli_or_file(args):
|
||||
tty.msg("Concretizing input specs")
|
||||
if args.specs:
|
||||
specs = spack.cmd.parse_specs(args.specs, concretize=True)
|
||||
specs = spack.cmd.parse_specs(args.specs, concretize=False)
|
||||
if not specs:
|
||||
raise SpackError("unable to parse specs from command line")
|
||||
|
||||
if args.file:
|
||||
specs = specs_from_text_file(args.file, concretize=True)
|
||||
specs = specs_from_text_file(args.file, concretize=False)
|
||||
if not specs:
|
||||
raise SpackError("unable to parse specs from file '{}'".format(args.file))
|
||||
return specs
|
||||
|
||||
concrete_specs = spack.cmd.matching_specs_from_env(specs)
|
||||
return concrete_specs
|
||||
|
||||
|
||||
class IncludeFilter:
|
||||
@@ -607,11 +608,6 @@ def process_mirror_stats(present, mirrored, error):
|
||||
|
||||
def mirror_create(args):
|
||||
"""create a directory to be used as a spack mirror, and fill it with package archives"""
|
||||
if args.specs and args.all:
|
||||
raise SpackError(
|
||||
"cannot specify specs on command line if you chose to mirror all specs with '--all'"
|
||||
)
|
||||
|
||||
if args.file and args.all:
|
||||
raise SpackError(
|
||||
"cannot specify specs with a file if you chose to mirror all specs with '--all'"
|
||||
|
@@ -183,7 +183,7 @@ def pkg_grep(args, unknown_args):
|
||||
grep.add_default_arg("--color=auto")
|
||||
|
||||
# determines number of files to grep at a time
|
||||
grouper = lambda e: e[0] // 500
|
||||
grouper = lambda e: e[0] // 100
|
||||
|
||||
# set up iterator and save the first group to ensure we don't end up with a group of size 1
|
||||
groups = itertools.groupby(enumerate(spack.repo.PATH.all_package_paths()), grouper)
|
||||
|
@@ -3,11 +3,13 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os
|
||||
import shlex
|
||||
import sys
|
||||
from typing import List
|
||||
from typing import Any, List, Optional
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack
|
||||
import spack.config
|
||||
import spack.repo
|
||||
import spack.util.path
|
||||
@@ -65,6 +67,15 @@ def setup_parser(subparser):
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
|
||||
# Migrate
|
||||
migrate_parser = sp.add_parser("migrate", help=repo_migrate.__doc__)
|
||||
migrate_parser.add_argument(
|
||||
"namespace_or_path", help="path to a Spack package repository directory"
|
||||
)
|
||||
migrate_parser.add_argument(
|
||||
"--fix", action="store_true", help="automatically fix the imports in the package files"
|
||||
)
|
||||
|
||||
|
||||
def repo_create(args):
|
||||
"""create a new package repository"""
|
||||
@@ -155,12 +166,76 @@ def repo_list(args):
|
||||
print(f"{repo.namespace:<{max_ns_len + 4}}{repo.package_api_str:<8}{repo.root}")
|
||||
|
||||
|
||||
def _get_repo(name_or_path: str) -> Optional[spack.repo.Repo]:
|
||||
try:
|
||||
return spack.repo.from_path(name_or_path)
|
||||
except spack.repo.RepoError:
|
||||
pass
|
||||
|
||||
for repo in spack.config.get("repos"):
|
||||
try:
|
||||
r = spack.repo.from_path(repo)
|
||||
except spack.repo.RepoError:
|
||||
continue
|
||||
if r.namespace == name_or_path:
|
||||
return r
|
||||
return None
|
||||
|
||||
|
||||
def repo_migrate(args: Any) -> int:
|
||||
"""migrate a package repository to the latest Package API"""
|
||||
from spack.repo_migrate import migrate_v1_to_v2, migrate_v2_imports
|
||||
|
||||
repo = _get_repo(args.namespace_or_path)
|
||||
|
||||
if repo is None:
|
||||
tty.die(f"No such repository: {args.namespace_or_path}")
|
||||
|
||||
if (1, 0) <= repo.package_api < (2, 0):
|
||||
success, repo_v2 = migrate_v1_to_v2(repo, fix=args.fix)
|
||||
exit_code = 0 if success else 1
|
||||
elif (2, 0) <= repo.package_api < (3, 0):
|
||||
repo_v2 = None
|
||||
exit_code = 0 if migrate_v2_imports(repo.packages_path, repo.root, fix=args.fix) else 1
|
||||
else:
|
||||
repo_v2 = None
|
||||
exit_code = 0
|
||||
|
||||
if not args.fix:
|
||||
tty.error(
|
||||
f"No changes were made to the repository {repo.root} with namespace "
|
||||
f"'{repo.namespace}'. Run with --fix to apply the above changes."
|
||||
)
|
||||
|
||||
elif exit_code == 1:
|
||||
tty.error(
|
||||
f"Repository '{repo.namespace}' could not be migrated to the latest Package API. "
|
||||
"Please check the error messages above."
|
||||
)
|
||||
|
||||
elif isinstance(repo_v2, spack.repo.Repo):
|
||||
tty.info(
|
||||
f"Repository '{repo_v2.namespace}' was successfully migrated from "
|
||||
f"package API {repo.package_api_str} to {repo_v2.package_api_str}."
|
||||
)
|
||||
tty.warn(
|
||||
"Remove the old repository from Spack's configuration and add the new one using:\n"
|
||||
f" spack repo remove {shlex.quote(repo.root)}\n"
|
||||
f" spack repo add {shlex.quote(repo_v2.root)}"
|
||||
)
|
||||
|
||||
else:
|
||||
tty.info(f"Repository '{repo.namespace}' was successfully migrated")
|
||||
|
||||
return exit_code
|
||||
|
||||
|
||||
def repo(parser, args):
|
||||
action = {
|
||||
return {
|
||||
"create": repo_create,
|
||||
"list": repo_list,
|
||||
"add": repo_add,
|
||||
"remove": repo_remove,
|
||||
"rm": repo_remove,
|
||||
}
|
||||
action[args.repo_command](args)
|
||||
"migrate": repo_migrate,
|
||||
}[args.repo_command](args)
|
||||
|
@@ -18,8 +18,8 @@
|
||||
import spack.repo
|
||||
import spack.util.git
|
||||
import spack.util.spack_yaml
|
||||
from spack.spec_parser import SPEC_TOKENIZER, SpecTokens
|
||||
from spack.tokenize import Token
|
||||
from spack.spec_parser import NAME, VERSION_LIST, SpecTokens
|
||||
from spack.tokenize import Token, TokenBase, Tokenizer
|
||||
from spack.util.executable import Executable, which
|
||||
|
||||
description = "runs source code style checks on spack"
|
||||
@@ -206,8 +206,8 @@ def setup_parser(subparser):
|
||||
"--spec-strings",
|
||||
action="store_true",
|
||||
help="upgrade spec strings in Python, JSON and YAML files for compatibility with Spack "
|
||||
"v1.0 and v0.x. Example: spack style --spec-strings $(git ls-files). Note: this flag "
|
||||
"will be removed in Spack v1.0.",
|
||||
"v1.0 and v0.x. Example: spack style --spec-strings $(git ls-files). Note: must be "
|
||||
"used only on specs from spack v0.X.",
|
||||
)
|
||||
|
||||
subparser.add_argument("files", nargs=argparse.REMAINDER, help="specific files to check")
|
||||
@@ -332,18 +332,8 @@ def process_files(file_list, is_args):
|
||||
|
||||
rewrite_and_print_output(output, args, pat, replacement)
|
||||
|
||||
packages_isort_args = (
|
||||
"--rm",
|
||||
"spack.pkgkit",
|
||||
"--rm",
|
||||
"spack.package_defs",
|
||||
"-a",
|
||||
"from spack.package import *",
|
||||
)
|
||||
packages_isort_args = packages_isort_args + isort_args
|
||||
|
||||
# packages
|
||||
process_files(filter(is_package, file_list), packages_isort_args)
|
||||
process_files(filter(is_package, file_list), isort_args)
|
||||
# non-packages
|
||||
process_files(filter(lambda f: not is_package(f), file_list), isort_args)
|
||||
|
||||
@@ -521,20 +511,52 @@ def _bootstrap_dev_dependencies():
|
||||
IS_PROBABLY_COMPILER = re.compile(r"%[a-zA-Z_][a-zA-Z0-9\-]")
|
||||
|
||||
|
||||
class _LegacySpecTokens(TokenBase):
|
||||
"""Reconstructs the tokens for previous specs, so we can reuse code to rotate them"""
|
||||
|
||||
# Dependency
|
||||
START_EDGE_PROPERTIES = r"(?:\^\[)"
|
||||
END_EDGE_PROPERTIES = r"(?:\])"
|
||||
DEPENDENCY = r"(?:\^)"
|
||||
# Version
|
||||
VERSION_HASH_PAIR = SpecTokens.VERSION_HASH_PAIR.regex
|
||||
GIT_VERSION = SpecTokens.GIT_VERSION.regex
|
||||
VERSION = SpecTokens.VERSION.regex
|
||||
# Variants
|
||||
PROPAGATED_BOOL_VARIANT = SpecTokens.PROPAGATED_BOOL_VARIANT.regex
|
||||
BOOL_VARIANT = SpecTokens.BOOL_VARIANT.regex
|
||||
PROPAGATED_KEY_VALUE_PAIR = SpecTokens.PROPAGATED_KEY_VALUE_PAIR.regex
|
||||
KEY_VALUE_PAIR = SpecTokens.KEY_VALUE_PAIR.regex
|
||||
# Compilers
|
||||
COMPILER_AND_VERSION = rf"(?:%\s*(?:{NAME})(?:[\s]*)@\s*(?:{VERSION_LIST}))"
|
||||
COMPILER = rf"(?:%\s*(?:{NAME}))"
|
||||
# FILENAME
|
||||
FILENAME = SpecTokens.FILENAME.regex
|
||||
# Package name
|
||||
FULLY_QUALIFIED_PACKAGE_NAME = SpecTokens.FULLY_QUALIFIED_PACKAGE_NAME.regex
|
||||
UNQUALIFIED_PACKAGE_NAME = SpecTokens.UNQUALIFIED_PACKAGE_NAME.regex
|
||||
# DAG hash
|
||||
DAG_HASH = SpecTokens.DAG_HASH.regex
|
||||
# White spaces
|
||||
WS = SpecTokens.WS.regex
|
||||
# Unexpected character(s)
|
||||
UNEXPECTED = SpecTokens.UNEXPECTED.regex
|
||||
|
||||
|
||||
def _spec_str_reorder_compiler(idx: int, blocks: List[List[Token]]) -> None:
|
||||
# only move the compiler to the back if it exists and is not already at the end
|
||||
if not 0 <= idx < len(blocks) - 1:
|
||||
return
|
||||
# if there's only whitespace after the compiler, don't move it
|
||||
if all(token.kind == SpecTokens.WS for block in blocks[idx + 1 :] for token in block):
|
||||
if all(token.kind == _LegacySpecTokens.WS for block in blocks[idx + 1 :] for token in block):
|
||||
return
|
||||
# rotate left and always add at least one WS token between compiler and previous token
|
||||
compiler_block = blocks.pop(idx)
|
||||
if compiler_block[0].kind != SpecTokens.WS:
|
||||
compiler_block.insert(0, Token(SpecTokens.WS, " "))
|
||||
if compiler_block[0].kind != _LegacySpecTokens.WS:
|
||||
compiler_block.insert(0, Token(_LegacySpecTokens.WS, " "))
|
||||
# delete the WS tokens from the new first block if it was at the very start, to prevent leading
|
||||
# WS tokens.
|
||||
while idx == 0 and blocks[0][0].kind == SpecTokens.WS:
|
||||
while idx == 0 and blocks[0][0].kind == _LegacySpecTokens.WS:
|
||||
blocks[0].pop(0)
|
||||
blocks.append(compiler_block)
|
||||
|
||||
@@ -552,11 +574,13 @@ def _spec_str_format(spec_str: str) -> Optional[str]:
|
||||
compiler_block_idx = -1
|
||||
in_edge_attr = False
|
||||
|
||||
for token in SPEC_TOKENIZER.tokenize(spec_str):
|
||||
if token.kind == SpecTokens.UNEXPECTED:
|
||||
legacy_tokenizer = Tokenizer(_LegacySpecTokens)
|
||||
|
||||
for token in legacy_tokenizer.tokenize(spec_str):
|
||||
if token.kind == _LegacySpecTokens.UNEXPECTED:
|
||||
# parsing error, we cannot fix this string.
|
||||
return None
|
||||
elif token.kind in (SpecTokens.COMPILER, SpecTokens.COMPILER_AND_VERSION):
|
||||
elif token.kind in (_LegacySpecTokens.COMPILER, _LegacySpecTokens.COMPILER_AND_VERSION):
|
||||
# multiple compilers are not supported in Spack v0.x, so early return
|
||||
if compiler_block_idx != -1:
|
||||
return None
|
||||
@@ -565,19 +589,19 @@ def _spec_str_format(spec_str: str) -> Optional[str]:
|
||||
current_block = []
|
||||
compiler_block_idx = len(blocks) - 1
|
||||
elif token.kind in (
|
||||
SpecTokens.START_EDGE_PROPERTIES,
|
||||
SpecTokens.DEPENDENCY,
|
||||
SpecTokens.UNQUALIFIED_PACKAGE_NAME,
|
||||
SpecTokens.FULLY_QUALIFIED_PACKAGE_NAME,
|
||||
_LegacySpecTokens.START_EDGE_PROPERTIES,
|
||||
_LegacySpecTokens.DEPENDENCY,
|
||||
_LegacySpecTokens.UNQUALIFIED_PACKAGE_NAME,
|
||||
_LegacySpecTokens.FULLY_QUALIFIED_PACKAGE_NAME,
|
||||
):
|
||||
_spec_str_reorder_compiler(compiler_block_idx, blocks)
|
||||
compiler_block_idx = -1
|
||||
if token.kind == SpecTokens.START_EDGE_PROPERTIES:
|
||||
if token.kind == _LegacySpecTokens.START_EDGE_PROPERTIES:
|
||||
in_edge_attr = True
|
||||
current_block.append(token)
|
||||
blocks.append(current_block)
|
||||
current_block = []
|
||||
elif token.kind == SpecTokens.END_EDGE_PROPERTIES:
|
||||
elif token.kind == _LegacySpecTokens.END_EDGE_PROPERTIES:
|
||||
in_edge_attr = False
|
||||
current_block.append(token)
|
||||
blocks.append(current_block)
|
||||
@@ -585,19 +609,19 @@ def _spec_str_format(spec_str: str) -> Optional[str]:
|
||||
elif in_edge_attr:
|
||||
current_block.append(token)
|
||||
elif token.kind in (
|
||||
SpecTokens.VERSION_HASH_PAIR,
|
||||
SpecTokens.GIT_VERSION,
|
||||
SpecTokens.VERSION,
|
||||
SpecTokens.PROPAGATED_BOOL_VARIANT,
|
||||
SpecTokens.BOOL_VARIANT,
|
||||
SpecTokens.PROPAGATED_KEY_VALUE_PAIR,
|
||||
SpecTokens.KEY_VALUE_PAIR,
|
||||
SpecTokens.DAG_HASH,
|
||||
_LegacySpecTokens.VERSION_HASH_PAIR,
|
||||
_LegacySpecTokens.GIT_VERSION,
|
||||
_LegacySpecTokens.VERSION,
|
||||
_LegacySpecTokens.PROPAGATED_BOOL_VARIANT,
|
||||
_LegacySpecTokens.BOOL_VARIANT,
|
||||
_LegacySpecTokens.PROPAGATED_KEY_VALUE_PAIR,
|
||||
_LegacySpecTokens.KEY_VALUE_PAIR,
|
||||
_LegacySpecTokens.DAG_HASH,
|
||||
):
|
||||
current_block.append(token)
|
||||
blocks.append(current_block)
|
||||
current_block = []
|
||||
elif token.kind == SpecTokens.WS:
|
||||
elif token.kind == _LegacySpecTokens.WS:
|
||||
current_block.append(token)
|
||||
else:
|
||||
raise ValueError(f"unexpected token {token}")
|
||||
|
@@ -10,7 +10,7 @@
|
||||
import warnings
|
||||
from typing import Any, Dict, List, Optional, Tuple
|
||||
|
||||
import archspec.cpu
|
||||
import _vendoring.archspec.cpu
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.lang
|
||||
@@ -316,7 +316,7 @@ def from_external_yaml(config: Dict[str, Any]) -> Optional[spack.spec.Spec]:
|
||||
@staticmethod
|
||||
def _finalize_external_concretization(abstract_spec):
|
||||
if CompilerFactory._GENERIC_TARGET is None:
|
||||
CompilerFactory._GENERIC_TARGET = archspec.cpu.host().family
|
||||
CompilerFactory._GENERIC_TARGET = _vendoring.archspec.cpu.host().family
|
||||
|
||||
if abstract_spec.architecture:
|
||||
abstract_spec.architecture.complete_with_defaults()
|
||||
|
@@ -5,6 +5,7 @@
|
||||
import collections.abc
|
||||
import contextlib
|
||||
import errno
|
||||
import glob
|
||||
import os
|
||||
import pathlib
|
||||
import re
|
||||
@@ -2424,19 +2425,11 @@ def display_specs(specs):
|
||||
|
||||
def make_repo_path(root):
|
||||
"""Make a RepoPath from the repo subdirectories in an environment."""
|
||||
path = spack.repo.RepoPath(cache=spack.caches.MISC_CACHE)
|
||||
|
||||
if os.path.isdir(root):
|
||||
for repo_root in os.listdir(root):
|
||||
repo_root = os.path.join(root, repo_root)
|
||||
|
||||
if not os.path.isdir(repo_root):
|
||||
continue
|
||||
|
||||
repo = spack.repo.from_path(repo_root)
|
||||
path.put_last(repo)
|
||||
|
||||
return path
|
||||
repos = [
|
||||
spack.repo.from_path(os.path.dirname(p))
|
||||
for p in glob.glob(os.path.join(root, "**", "repo.yaml"), recursive=True)
|
||||
]
|
||||
return spack.repo.RepoPath(*repos, cache=spack.caches.MISC_CACHE)
|
||||
|
||||
|
||||
def manifest_file(env_name_or_dir):
|
||||
|
@@ -25,7 +25,7 @@
|
||||
import warnings
|
||||
from typing import List, Tuple
|
||||
|
||||
import archspec.cpu
|
||||
import _vendoring.archspec.cpu
|
||||
|
||||
import llnl.util.lang
|
||||
import llnl.util.tty as tty
|
||||
@@ -734,7 +734,7 @@ def _compatible_sys_types():
|
||||
"""
|
||||
host_platform = spack.platforms.host()
|
||||
host_os = str(host_platform.default_operating_system())
|
||||
host_target = archspec.cpu.host()
|
||||
host_target = _vendoring.archspec.cpu.host()
|
||||
compatible_targets = [host_target] + host_target.ancestors
|
||||
|
||||
compatible_archs = [
|
||||
@@ -794,7 +794,7 @@ def shell_set(var, value):
|
||||
# print environment module system if available. This can be expensive
|
||||
# on clusters, so skip it if not needed.
|
||||
if "modules" in info:
|
||||
generic_arch = archspec.cpu.host().family
|
||||
generic_arch = _vendoring.archspec.cpu.host().family
|
||||
module_spec = "environment-modules target={0}".format(generic_arch)
|
||||
specs = spack.store.STORE.db.query(module_spec)
|
||||
if specs:
|
||||
|
@@ -2,7 +2,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
# flake8: noqa: F401, E402
|
||||
"""spack.package defines the public API for Spack packages, by re-exporting useful symbols from
|
||||
other modules. Packages should import this module, instead of importing from spack.* directly
|
||||
to ensure forward compatibility with future versions of Spack."""
|
||||
@@ -13,17 +12,6 @@
|
||||
# import most common types used in packages
|
||||
from typing import Dict, List, Optional
|
||||
|
||||
|
||||
class tty:
|
||||
import llnl.util.tty as _tty
|
||||
|
||||
debug = _tty.debug
|
||||
error = _tty.error
|
||||
info = _tty.info
|
||||
msg = _tty.msg
|
||||
warn = _tty.warn
|
||||
|
||||
|
||||
from llnl.util.filesystem import (
|
||||
FileFilter,
|
||||
FileList,
|
||||
@@ -61,52 +49,7 @@ class tty:
|
||||
from llnl.util.symlink import symlink
|
||||
|
||||
from spack.build_environment import MakeExecutable
|
||||
from spack.build_systems.aspell_dict import AspellDictPackage
|
||||
from spack.build_systems.autotools import AutotoolsPackage
|
||||
from spack.build_systems.bundle import BundlePackage
|
||||
from spack.build_systems.cached_cmake import (
|
||||
CachedCMakePackage,
|
||||
cmake_cache_filepath,
|
||||
cmake_cache_option,
|
||||
cmake_cache_path,
|
||||
cmake_cache_string,
|
||||
)
|
||||
from spack.build_systems.cargo import CargoPackage
|
||||
from spack.build_systems.cmake import CMakePackage, generator
|
||||
from spack.build_systems.compiler import CompilerPackage
|
||||
from spack.build_systems.cuda import CudaPackage
|
||||
from spack.build_systems.generic import Package
|
||||
from spack.build_systems.gnu import GNUMirrorPackage
|
||||
from spack.build_systems.go import GoPackage
|
||||
from spack.build_systems.intel import IntelPackage
|
||||
from spack.build_systems.lua import LuaPackage
|
||||
from spack.build_systems.makefile import MakefilePackage
|
||||
from spack.build_systems.maven import MavenPackage
|
||||
from spack.build_systems.meson import MesonPackage
|
||||
from spack.build_systems.msbuild import MSBuildPackage
|
||||
from spack.build_systems.nmake import NMakePackage
|
||||
from spack.build_systems.octave import OctavePackage
|
||||
from spack.build_systems.oneapi import (
|
||||
INTEL_MATH_LIBRARIES,
|
||||
IntelOneApiLibraryPackage,
|
||||
IntelOneApiLibraryPackageWithSdk,
|
||||
IntelOneApiPackage,
|
||||
IntelOneApiStaticLibraryList,
|
||||
)
|
||||
from spack.build_systems.perl import PerlPackage
|
||||
from spack.build_systems.python import PythonExtension, PythonPackage
|
||||
from spack.build_systems.qmake import QMakePackage
|
||||
from spack.build_systems.r import RPackage
|
||||
from spack.build_systems.racket import RacketPackage
|
||||
from spack.build_systems.rocm import ROCmPackage
|
||||
from spack.build_systems.ruby import RubyPackage
|
||||
from spack.build_systems.scons import SConsPackage
|
||||
from spack.build_systems.sip import SIPPackage
|
||||
from spack.build_systems.sourceforge import SourceforgePackage
|
||||
from spack.build_systems.sourceware import SourcewarePackage
|
||||
from spack.build_systems.waf import WafPackage
|
||||
from spack.build_systems.xorg import XorgPackage
|
||||
from spack.builder import BaseBuilder
|
||||
from spack.builder import BaseBuilder, Builder, register_builder
|
||||
from spack.config import determine_number_of_jobs
|
||||
from spack.deptypes import ALL_TYPES as all_deptypes
|
||||
from spack.directives import (
|
||||
@@ -138,7 +81,13 @@ class tty:
|
||||
)
|
||||
from spack.mixins import filter_compiler_wrappers
|
||||
from spack.multimethod import default_args, when
|
||||
from spack.package_base import build_system_flags, env_flags, inject_flags, on_package_attributes
|
||||
from spack.package_base import (
|
||||
PackageBase,
|
||||
build_system_flags,
|
||||
env_flags,
|
||||
inject_flags,
|
||||
on_package_attributes,
|
||||
)
|
||||
from spack.package_completions import (
|
||||
bash_completion_path,
|
||||
fish_completion_path,
|
||||
@@ -158,6 +107,126 @@ class tty:
|
||||
cd = chdir
|
||||
pwd = getcwd
|
||||
|
||||
|
||||
class tty:
|
||||
import llnl.util.tty as _tty
|
||||
|
||||
debug = _tty.debug
|
||||
error = _tty.error
|
||||
info = _tty.info
|
||||
msg = _tty.msg
|
||||
warn = _tty.warn
|
||||
|
||||
|
||||
__all__ = [
|
||||
"chdir",
|
||||
"environ",
|
||||
"getcwd",
|
||||
"makedirs",
|
||||
"mkdir",
|
||||
"remove",
|
||||
"removedirs",
|
||||
"move",
|
||||
"rmtree",
|
||||
"Dict",
|
||||
"List",
|
||||
"Optional",
|
||||
"FileFilter",
|
||||
"FileList",
|
||||
"HeaderList",
|
||||
"LibraryList",
|
||||
"ancestor",
|
||||
"can_access",
|
||||
"change_sed_delimiter",
|
||||
"copy",
|
||||
"copy_tree",
|
||||
"filter_file",
|
||||
"find",
|
||||
"find_all_headers",
|
||||
"find_first",
|
||||
"find_headers",
|
||||
"find_libraries",
|
||||
"find_system_libraries",
|
||||
"force_remove",
|
||||
"force_symlink",
|
||||
"install",
|
||||
"install_tree",
|
||||
"is_exe",
|
||||
"join_path",
|
||||
"keep_modification_time",
|
||||
"library_extensions",
|
||||
"mkdirp",
|
||||
"remove_directory_contents",
|
||||
"remove_linked_tree",
|
||||
"rename",
|
||||
"set_executable",
|
||||
"set_install_permissions",
|
||||
"touch",
|
||||
"working_dir",
|
||||
"symlink",
|
||||
"MakeExecutable",
|
||||
"BaseBuilder",
|
||||
"determine_number_of_jobs",
|
||||
"all_deptypes",
|
||||
"build_system",
|
||||
"can_splice",
|
||||
"conditional",
|
||||
"conflicts",
|
||||
"depends_on",
|
||||
"extends",
|
||||
"license",
|
||||
"maintainers",
|
||||
"patch",
|
||||
"provides",
|
||||
"redistribute",
|
||||
"requires",
|
||||
"resource",
|
||||
"variant",
|
||||
"version",
|
||||
"InstallError",
|
||||
"NoHeadersError",
|
||||
"NoLibrariesError",
|
||||
"SkipTest",
|
||||
"cache_extra_test_sources",
|
||||
"check_outputs",
|
||||
"find_required_file",
|
||||
"get_escaped_text_output",
|
||||
"install_test_root",
|
||||
"test_part",
|
||||
"filter_compiler_wrappers",
|
||||
"default_args",
|
||||
"when",
|
||||
"build_system_flags",
|
||||
"env_flags",
|
||||
"inject_flags",
|
||||
"on_package_attributes",
|
||||
"bash_completion_path",
|
||||
"fish_completion_path",
|
||||
"zsh_completion_path",
|
||||
"run_after",
|
||||
"run_before",
|
||||
"Spec",
|
||||
"EnvironmentModifications",
|
||||
"Executable",
|
||||
"ProcessError",
|
||||
"which",
|
||||
"which_string",
|
||||
"fix_darwin_install_name",
|
||||
"Prefix",
|
||||
"any_combination_of",
|
||||
"auto_or_any_combination_of",
|
||||
"disjoint_sets",
|
||||
"Version",
|
||||
"ver",
|
||||
"env",
|
||||
"cd",
|
||||
"pwd",
|
||||
"tty",
|
||||
"Builder",
|
||||
"PackageBase",
|
||||
"register_builder",
|
||||
]
|
||||
|
||||
# These are just here for editor support; they may be set when the build env is set up.
|
||||
configure: Executable
|
||||
make_jobs: int
|
||||
|
@@ -583,7 +583,7 @@ class PackageBase(WindowsRPath, PackageViewMixin, metaclass=PackageMeta):
|
||||
like ``homepage`` and, for a code-based package, ``url``, or functions
|
||||
such as ``install()``.
|
||||
There are many custom ``Package`` subclasses in the
|
||||
``spack.build_systems`` package that make things even easier for
|
||||
``spack_repo.builtin.build_systems`` package that make things even easier for
|
||||
specific build systems.
|
||||
|
||||
"""
|
||||
@@ -986,7 +986,9 @@ def url_for_version(self, version):
|
||||
"""
|
||||
return self._implement_all_urls_for_version(version)[0]
|
||||
|
||||
def update_external_dependencies(self, extendee_spec=None):
|
||||
def _update_external_dependencies(
|
||||
self, extendee_spec: Optional[spack.spec.Spec] = None
|
||||
) -> None:
|
||||
"""
|
||||
Method to override in package classes to handle external dependencies
|
||||
"""
|
||||
|
@@ -58,7 +58,7 @@
|
||||
repos_path = os.path.join(var_path, "repos")
|
||||
test_repos_path = os.path.join(var_path, "test_repos")
|
||||
packages_path = os.path.join(repos_path, "spack_repo", "builtin")
|
||||
mock_packages_path = os.path.join(test_repos_path, "builtin.mock")
|
||||
mock_packages_path = os.path.join(test_repos_path, "spack_repo", "builtin_mock")
|
||||
|
||||
#
|
||||
# Writable things in $spack/var/spack
|
||||
|
@@ -4,7 +4,7 @@
|
||||
import warnings
|
||||
from typing import Optional
|
||||
|
||||
import archspec.cpu
|
||||
import _vendoring.archspec.cpu
|
||||
|
||||
import llnl.util.lang
|
||||
|
||||
@@ -38,15 +38,15 @@ def __init__(self, name):
|
||||
self.name = name
|
||||
self._init_targets()
|
||||
|
||||
def add_target(self, name: str, target: archspec.cpu.Microarchitecture) -> None:
|
||||
def add_target(self, name: str, target: _vendoring.archspec.cpu.Microarchitecture) -> None:
|
||||
if name in Platform.reserved_targets:
|
||||
msg = f"{name} is a spack reserved alias and cannot be the name of a target"
|
||||
raise ValueError(msg)
|
||||
self.targets[name] = target
|
||||
|
||||
def _init_targets(self):
|
||||
self.default = archspec.cpu.host().name
|
||||
for name, microarchitecture in archspec.cpu.TARGETS.items():
|
||||
self.default = _vendoring.archspec.cpu.host().name
|
||||
for name, microarchitecture in _vendoring.archspec.cpu.TARGETS.items():
|
||||
self.add_target(name, microarchitecture)
|
||||
|
||||
def target(self, name):
|
||||
|
@@ -3,7 +3,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import platform
|
||||
|
||||
import archspec.cpu
|
||||
import _vendoring.archspec.cpu
|
||||
|
||||
import spack.operating_systems
|
||||
|
||||
@@ -28,7 +28,7 @@ def __init__(self, name=None):
|
||||
def _init_targets(self):
|
||||
targets = ("aarch64", "m1") if platform.machine() == "arm64" else ("x86_64", "core2")
|
||||
for t in targets:
|
||||
self.add_target(t, archspec.cpu.TARGETS[t])
|
||||
self.add_target(t, _vendoring.archspec.cpu.TARGETS[t])
|
||||
|
||||
@classmethod
|
||||
def detect(cls):
|
||||
|
@@ -55,7 +55,9 @@
|
||||
|
||||
def is_package_module(fullname: str) -> bool:
|
||||
"""Check if the given module is a package module."""
|
||||
return fullname.startswith(PKG_MODULE_PREFIX_V1) or fullname.startswith(PKG_MODULE_PREFIX_V2)
|
||||
return fullname.startswith(PKG_MODULE_PREFIX_V1) or (
|
||||
fullname.startswith(PKG_MODULE_PREFIX_V2) and fullname.endswith(".package")
|
||||
)
|
||||
|
||||
|
||||
def namespace_from_fullname(fullname: str) -> str:
|
||||
@@ -77,6 +79,25 @@ def namespace_from_fullname(fullname: str) -> str:
|
||||
return fullname
|
||||
|
||||
|
||||
class _PrependFileLoader(importlib.machinery.SourceFileLoader):
|
||||
def __init__(self, fullname: str, repo: "Repo", package_name: str) -> None:
|
||||
self.repo = repo
|
||||
self.package_name = package_name
|
||||
path = repo.filename_for_package_name(package_name)
|
||||
self.fullname = fullname
|
||||
self.prepend = b"from spack_repo.builtin.build_systems._package_api_v1 import *\n"
|
||||
super().__init__(self.fullname, path)
|
||||
|
||||
def path_stats(self, path):
|
||||
stats = dict(super().path_stats(path))
|
||||
stats["size"] += len(self.prepend)
|
||||
return stats
|
||||
|
||||
def get_data(self, path):
|
||||
data = super().get_data(path)
|
||||
return self.prepend + data if path == self.path else data
|
||||
|
||||
|
||||
class SpackNamespaceLoader:
|
||||
def create_module(self, spec):
|
||||
return SpackNamespace(spec.name)
|
||||
@@ -123,8 +144,7 @@ def compute_loader(self, fullname: str):
|
||||
# With 2 nested conditionals we can call "repo.real_name" only once
|
||||
package_name = repo.real_name(module_name)
|
||||
if package_name:
|
||||
module_path = repo.filename_for_package_name(package_name)
|
||||
return importlib.machinery.SourceFileLoader(fullname, module_path)
|
||||
return _PrependFileLoader(fullname, repo, package_name)
|
||||
|
||||
# We are importing a full namespace like 'spack.pkg.builtin'
|
||||
if fullname == repo.full_namespace:
|
||||
@@ -153,7 +173,7 @@ def compute_loader(self, fullname: str):
|
||||
def builtin_repo() -> "Repo":
|
||||
"""Get the test repo if it is active, otherwise the builtin repo."""
|
||||
try:
|
||||
return PATH.get_repo("builtin.mock")
|
||||
return PATH.get_repo("builtin_mock")
|
||||
except UnknownNamespaceError:
|
||||
return PATH.get_repo("builtin")
|
||||
|
||||
|
428
lib/spack/spack/repo_migrate.py
Normal file
428
lib/spack/spack/repo_migrate.py
Normal file
@@ -0,0 +1,428 @@
|
||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import ast
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import sys
|
||||
from typing import IO, Dict, List, Optional, Set, Tuple
|
||||
|
||||
import spack.repo
|
||||
import spack.util.naming
|
||||
import spack.util.spack_yaml
|
||||
|
||||
|
||||
def _same_contents(f: str, g: str) -> bool:
|
||||
"""Return True if the files have the same contents."""
|
||||
try:
|
||||
with open(f, "rb") as f1, open(g, "rb") as f2:
|
||||
while True:
|
||||
b1 = f1.read(4096)
|
||||
b2 = f2.read(4096)
|
||||
if b1 != b2:
|
||||
return False
|
||||
if not b1 and not b2:
|
||||
break
|
||||
return True
|
||||
except OSError:
|
||||
return False
|
||||
|
||||
|
||||
def migrate_v1_to_v2(
|
||||
repo: spack.repo.Repo, fix: bool, out: IO[str] = sys.stdout, err: IO[str] = sys.stderr
|
||||
) -> Tuple[bool, Optional[spack.repo.Repo]]:
|
||||
"""To upgrade a repo from Package API v1 to v2 we need to:
|
||||
1. ensure ``spack_repo/<namespace>`` parent dirs to the ``repo.yaml`` file.
|
||||
2. rename <pkg dir>/package.py to <pkg module>/package.py.
|
||||
3. bump the version in ``repo.yaml``.
|
||||
"""
|
||||
if not (1, 0) <= repo.package_api < (2, 0):
|
||||
raise RuntimeError(f"Cannot upgrade from {repo.package_api_str} to v2.0")
|
||||
|
||||
with open(os.path.join(repo.root, "repo.yaml"), encoding="utf-8") as f:
|
||||
updated_config = spack.util.spack_yaml.load(f)
|
||||
updated_config["repo"]["api"] = "v2.0"
|
||||
|
||||
namespace = repo.namespace.split(".")
|
||||
|
||||
if not all(
|
||||
spack.util.naming.valid_module_name(part, package_api=(2, 0)) for part in namespace
|
||||
):
|
||||
print(
|
||||
f"Cannot upgrade from v1 to v2, because the namespace '{repo.namespace}' is not a "
|
||||
"valid Python module",
|
||||
file=err,
|
||||
)
|
||||
return False, None
|
||||
|
||||
try:
|
||||
subdirectory = spack.repo._validate_and_normalize_subdir(
|
||||
repo.subdirectory, repo.root, package_api=(2, 0)
|
||||
)
|
||||
except spack.repo.BadRepoError:
|
||||
print(
|
||||
f"Cannot upgrade from v1 to v2, because the subdirectory '{repo.subdirectory}' is not "
|
||||
"a valid Python module",
|
||||
file=err,
|
||||
)
|
||||
return False, None
|
||||
|
||||
new_root = os.path.join(repo.root, "spack_repo", *namespace)
|
||||
|
||||
ino_to_relpath: Dict[int, str] = {}
|
||||
symlink_to_ino: Dict[str, int] = {}
|
||||
|
||||
prefix_len = len(repo.root) + len(os.sep)
|
||||
|
||||
rename: Dict[str, str] = {}
|
||||
dirs_to_create: List[str] = []
|
||||
files_to_copy: List[str] = []
|
||||
|
||||
errors = False
|
||||
|
||||
stack: List[Tuple[str, int]] = [(repo.root, 0)]
|
||||
while stack:
|
||||
path, depth = stack.pop()
|
||||
|
||||
try:
|
||||
entries = os.scandir(path)
|
||||
except OSError:
|
||||
continue
|
||||
|
||||
for entry in entries:
|
||||
rel_path = entry.path[prefix_len:]
|
||||
|
||||
if depth == 0 and entry.name in ("spack_repo", "repo.yaml"):
|
||||
continue
|
||||
|
||||
ino_to_relpath[entry.inode()] = entry.path[prefix_len:]
|
||||
|
||||
if entry.is_symlink():
|
||||
try:
|
||||
symlink_to_ino[rel_path] = entry.stat(follow_symlinks=True).st_ino
|
||||
except OSError:
|
||||
symlink_to_ino[rel_path] = -1 # dangling or no access
|
||||
|
||||
continue
|
||||
|
||||
elif entry.is_dir(follow_symlinks=False):
|
||||
if entry.name == "__pycache__":
|
||||
continue
|
||||
|
||||
# check if this is a package
|
||||
if (
|
||||
depth == 1
|
||||
and rel_path.startswith(f"{subdirectory}{os.sep}")
|
||||
and os.path.exists(os.path.join(entry.path, "package.py"))
|
||||
):
|
||||
if "_" in entry.name:
|
||||
print(
|
||||
f"Invalid package name '{entry.name}': underscores are not allowed in "
|
||||
"package names, rename the package with hyphens as separators",
|
||||
file=err,
|
||||
)
|
||||
errors = True
|
||||
continue
|
||||
pkg_dir = spack.util.naming.pkg_name_to_pkg_dir(entry.name, package_api=(2, 0))
|
||||
if pkg_dir != entry.name:
|
||||
rename[f"{subdirectory}{os.sep}{entry.name}"] = (
|
||||
f"{subdirectory}{os.sep}{pkg_dir}"
|
||||
)
|
||||
|
||||
dirs_to_create.append(rel_path)
|
||||
|
||||
stack.append((entry.path, depth + 1))
|
||||
continue
|
||||
|
||||
files_to_copy.append(rel_path)
|
||||
|
||||
if errors:
|
||||
return False, None
|
||||
|
||||
rename_regex = re.compile("^(" + "|".join(re.escape(k) for k in rename.keys()) + ")")
|
||||
|
||||
if fix:
|
||||
os.makedirs(new_root, exist_ok=True)
|
||||
|
||||
def _relocate(rel_path: str) -> Tuple[str, str]:
|
||||
old = os.path.join(repo.root, rel_path)
|
||||
if rename:
|
||||
new_rel = rename_regex.sub(lambda m: rename[m.group(0)], rel_path)
|
||||
else:
|
||||
new_rel = rel_path
|
||||
new = os.path.join(new_root, new_rel)
|
||||
return old, new
|
||||
|
||||
if not fix:
|
||||
print("The following directories, files and symlinks will be created:\n", file=out)
|
||||
|
||||
for rel_path in dirs_to_create:
|
||||
_, new_path = _relocate(rel_path)
|
||||
if fix:
|
||||
try:
|
||||
os.mkdir(new_path)
|
||||
except FileExistsError: # not an error if the directory already exists
|
||||
continue
|
||||
else:
|
||||
print(f"create directory {new_path}", file=out)
|
||||
|
||||
for rel_path in files_to_copy:
|
||||
old_path, new_path = _relocate(rel_path)
|
||||
if os.path.lexists(new_path):
|
||||
# if we already copied this file, don't error.
|
||||
if not _same_contents(old_path, new_path):
|
||||
print(
|
||||
f"Cannot upgrade from v1 to v2, because the file '{new_path}' already exists",
|
||||
file=err,
|
||||
)
|
||||
return False, None
|
||||
continue
|
||||
if fix:
|
||||
shutil.copy2(old_path, new_path)
|
||||
else:
|
||||
print(f"copy {old_path} -> {new_path}", file=out)
|
||||
|
||||
for rel_path, ino in symlink_to_ino.items():
|
||||
old_path, new_path = _relocate(rel_path)
|
||||
if ino in ino_to_relpath:
|
||||
# link by path relative to the new root
|
||||
_, new_target = _relocate(ino_to_relpath[ino])
|
||||
tgt = os.path.relpath(new_target, new_path)
|
||||
else:
|
||||
tgt = os.path.realpath(old_path)
|
||||
|
||||
# no-op if the same, error if different
|
||||
if os.path.lexists(new_path):
|
||||
if not os.path.islink(new_path) or os.readlink(new_path) != tgt:
|
||||
print(
|
||||
f"Cannot upgrade from v1 to v2, because the file '{new_path}' already exists",
|
||||
file=err,
|
||||
)
|
||||
return False, None
|
||||
continue
|
||||
|
||||
if fix:
|
||||
os.symlink(tgt, new_path)
|
||||
else:
|
||||
print(f"create symlink {new_path} -> {tgt}", file=out)
|
||||
|
||||
if fix:
|
||||
with open(os.path.join(new_root, "repo.yaml"), "w", encoding="utf-8") as f:
|
||||
spack.util.spack_yaml.dump(updated_config, f)
|
||||
updated_repo = spack.repo.from_path(new_root)
|
||||
else:
|
||||
print(file=out)
|
||||
updated_repo = repo # compute the import diff on the v1 repo since v2 doesn't exist yet
|
||||
|
||||
result = migrate_v2_imports(
|
||||
updated_repo.packages_path, updated_repo.root, fix=fix, out=out, err=err
|
||||
)
|
||||
|
||||
return result, (updated_repo if fix else None)
|
||||
|
||||
|
||||
def migrate_v2_imports(
|
||||
packages_dir: str, root: str, fix: bool, out: IO[str] = sys.stdout, err: IO[str] = sys.stderr
|
||||
) -> bool:
|
||||
"""In Package API v2.0, packages need to explicitly import package classes and a few other
|
||||
symbols from the build_systems module. This function automatically adds the missing imports
|
||||
to each package.py file in the repository."""
|
||||
|
||||
symbol_to_module = {
|
||||
"AspellDictPackage": "spack_repo.builtin.build_systems.aspell_dict",
|
||||
"AutotoolsPackage": "spack_repo.builtin.build_systems.autotools",
|
||||
"BundlePackage": "spack_repo.builtin.build_systems.bundle",
|
||||
"CachedCMakePackage": "spack_repo.builtin.build_systems.cached_cmake",
|
||||
"cmake_cache_filepath": "spack_repo.builtin.build_systems.cached_cmake",
|
||||
"cmake_cache_option": "spack_repo.builtin.build_systems.cached_cmake",
|
||||
"cmake_cache_path": "spack_repo.builtin.build_systems.cached_cmake",
|
||||
"cmake_cache_string": "spack_repo.builtin.build_systems.cached_cmake",
|
||||
"CargoPackage": "spack_repo.builtin.build_systems.cargo",
|
||||
"CMakePackage": "spack_repo.builtin.build_systems.cmake",
|
||||
"generator": "spack_repo.builtin.build_systems.cmake",
|
||||
"CompilerPackage": "spack_repo.builtin.build_systems.compiler",
|
||||
"CudaPackage": "spack_repo.builtin.build_systems.cuda",
|
||||
"Package": "spack_repo.builtin.build_systems.generic",
|
||||
"GNUMirrorPackage": "spack_repo.builtin.build_systems.gnu",
|
||||
"GoPackage": "spack_repo.builtin.build_systems.go",
|
||||
"LuaPackage": "spack_repo.builtin.build_systems.lua",
|
||||
"MakefilePackage": "spack_repo.builtin.build_systems.makefile",
|
||||
"MavenPackage": "spack_repo.builtin.build_systems.maven",
|
||||
"MesonPackage": "spack_repo.builtin.build_systems.meson",
|
||||
"MSBuildPackage": "spack_repo.builtin.build_systems.msbuild",
|
||||
"NMakePackage": "spack_repo.builtin.build_systems.nmake",
|
||||
"OctavePackage": "spack_repo.builtin.build_systems.octave",
|
||||
"INTEL_MATH_LIBRARIES": "spack_repo.builtin.build_systems.oneapi",
|
||||
"IntelOneApiLibraryPackage": "spack_repo.builtin.build_systems.oneapi",
|
||||
"IntelOneApiLibraryPackageWithSdk": "spack_repo.builtin.build_systems.oneapi",
|
||||
"IntelOneApiPackage": "spack_repo.builtin.build_systems.oneapi",
|
||||
"IntelOneApiStaticLibraryList": "spack_repo.builtin.build_systems.oneapi",
|
||||
"PerlPackage": "spack_repo.builtin.build_systems.perl",
|
||||
"PythonExtension": "spack_repo.builtin.build_systems.python",
|
||||
"PythonPackage": "spack_repo.builtin.build_systems.python",
|
||||
"QMakePackage": "spack_repo.builtin.build_systems.qmake",
|
||||
"RPackage": "spack_repo.builtin.build_systems.r",
|
||||
"RacketPackage": "spack_repo.builtin.build_systems.racket",
|
||||
"ROCmPackage": "spack_repo.builtin.build_systems.rocm",
|
||||
"RubyPackage": "spack_repo.builtin.build_systems.ruby",
|
||||
"SConsPackage": "spack_repo.builtin.build_systems.scons",
|
||||
"SIPPackage": "spack_repo.builtin.build_systems.sip",
|
||||
"SourceforgePackage": "spack_repo.builtin.build_systems.sourceforge",
|
||||
"SourcewarePackage": "spack_repo.builtin.build_systems.sourceware",
|
||||
"WafPackage": "spack_repo.builtin.build_systems.waf",
|
||||
"XorgPackage": "spack_repo.builtin.build_systems.xorg",
|
||||
}
|
||||
|
||||
success = True
|
||||
|
||||
for f in os.scandir(packages_dir):
|
||||
pkg_path = os.path.join(f.path, "package.py")
|
||||
if (
|
||||
f.name in ("__init__.py", "__pycache__")
|
||||
or not f.is_dir(follow_symlinks=False)
|
||||
or os.path.islink(pkg_path)
|
||||
):
|
||||
print(f"Skipping {f.path}", file=err)
|
||||
continue
|
||||
try:
|
||||
with open(pkg_path, "rb") as file:
|
||||
tree = ast.parse(file.read())
|
||||
except (OSError, SyntaxError) as e:
|
||||
print(f"Skipping {pkg_path}: {e}", file=err)
|
||||
continue
|
||||
|
||||
#: Symbols that are referenced in the package and may need to be imported.
|
||||
referenced_symbols: Set[str] = set()
|
||||
|
||||
#: Set of symbols of interest that are already defined through imports, assignments, or
|
||||
#: function definitions.
|
||||
defined_symbols: Set[str] = set()
|
||||
|
||||
best_line: Optional[int] = None
|
||||
|
||||
seen_import = False
|
||||
|
||||
for node in ast.walk(tree):
|
||||
# Get the last import statement from the first block of top-level imports
|
||||
if isinstance(node, ast.Module):
|
||||
for child in ast.iter_child_nodes(node):
|
||||
# if we never encounter an import statement, the best line to add is right
|
||||
# before the first node under the module
|
||||
if best_line is None and isinstance(child, ast.stmt):
|
||||
best_line = child.lineno
|
||||
|
||||
# prefer adding right before `from spack.package import ...`
|
||||
if isinstance(child, ast.ImportFrom) and child.module == "spack.package":
|
||||
seen_import = True
|
||||
best_line = child.lineno # add it right before spack.package
|
||||
break
|
||||
|
||||
# otherwise put it right after the last import statement
|
||||
is_import = isinstance(child, (ast.Import, ast.ImportFrom))
|
||||
|
||||
if is_import:
|
||||
if isinstance(child, (ast.stmt, ast.expr)):
|
||||
best_line = (child.end_lineno or child.lineno) + 1
|
||||
|
||||
if not seen_import and is_import:
|
||||
seen_import = True
|
||||
elif seen_import and not is_import:
|
||||
break
|
||||
|
||||
# Function definitions or assignments to variables whose name is a symbol of interest
|
||||
# are considered as redefinitions, so we skip them.
|
||||
elif isinstance(node, ast.FunctionDef):
|
||||
if node.name in symbol_to_module:
|
||||
print(
|
||||
f"{pkg_path}:{node.lineno}: redefinition of `{node.name}` skipped",
|
||||
file=err,
|
||||
)
|
||||
defined_symbols.add(node.name)
|
||||
elif isinstance(node, ast.Assign):
|
||||
for target in node.targets:
|
||||
if isinstance(target, ast.Name) and target.id in symbol_to_module:
|
||||
print(
|
||||
f"{pkg_path}:{target.lineno}: redefinition of `{target.id}` skipped",
|
||||
file=err,
|
||||
)
|
||||
defined_symbols.add(target.id)
|
||||
|
||||
# Register symbols that are not imported.
|
||||
elif isinstance(node, ast.Name) and node.id in symbol_to_module:
|
||||
referenced_symbols.add(node.id)
|
||||
|
||||
# Register imported symbols to make this operation idempotent
|
||||
elif isinstance(node, ast.ImportFrom):
|
||||
for alias in node.names:
|
||||
if alias.name in symbol_to_module:
|
||||
defined_symbols.add(alias.name)
|
||||
if node.module == "spack.package":
|
||||
success = False
|
||||
print(
|
||||
f"{pkg_path}:{node.lineno}: `{alias.name}` is imported from "
|
||||
"`spack.package`, which no longer provides this symbol",
|
||||
file=err,
|
||||
)
|
||||
|
||||
if alias.asname and alias.asname in symbol_to_module:
|
||||
defined_symbols.add(alias.asname)
|
||||
|
||||
# Remove imported symbols from the referenced symbols
|
||||
referenced_symbols.difference_update(defined_symbols)
|
||||
|
||||
if not referenced_symbols:
|
||||
continue
|
||||
|
||||
if best_line is None:
|
||||
print(f"{pkg_path}: failed to update imports", file=err)
|
||||
success = False
|
||||
continue
|
||||
|
||||
# Add the missing imports right after the last import statement
|
||||
with open(pkg_path, "r", encoding="utf-8", newline="") as file:
|
||||
lines = file.readlines()
|
||||
|
||||
# Group missing symbols by their module
|
||||
missing_imports_by_module: Dict[str, list] = {}
|
||||
for symbol in referenced_symbols:
|
||||
module = symbol_to_module[symbol]
|
||||
if module not in missing_imports_by_module:
|
||||
missing_imports_by_module[module] = []
|
||||
missing_imports_by_module[module].append(symbol)
|
||||
|
||||
new_lines = [
|
||||
f"from {module} import {', '.join(sorted(symbols))}\n"
|
||||
for module, symbols in sorted(missing_imports_by_module.items())
|
||||
]
|
||||
|
||||
if not seen_import:
|
||||
new_lines.extend(("\n", "\n"))
|
||||
|
||||
if not fix: # only print the diff
|
||||
success = False # packages need to be fixed, but we didn't do it
|
||||
diff_start, diff_end = max(1, best_line - 3), min(best_line + 2, len(lines))
|
||||
num_changed = diff_end - diff_start + 1
|
||||
num_added = num_changed + len(new_lines)
|
||||
rel_pkg_path = os.path.relpath(pkg_path, start=root)
|
||||
out.write(f"--- a/{rel_pkg_path}\n+++ b/{rel_pkg_path}\n")
|
||||
out.write(f"@@ -{diff_start},{num_changed} +{diff_start},{num_added} @@\n")
|
||||
for line in lines[diff_start - 1 : best_line - 1]:
|
||||
out.write(f" {line}")
|
||||
for line in new_lines:
|
||||
out.write(f"+{line}")
|
||||
for line in lines[best_line - 1 : diff_end]:
|
||||
out.write(f" {line}")
|
||||
continue
|
||||
|
||||
lines[best_line - 1 : best_line - 1] = new_lines
|
||||
|
||||
tmp_file = pkg_path + ".tmp"
|
||||
|
||||
with open(tmp_file, "w", encoding="utf-8", newline="") as file:
|
||||
file.writelines(lines)
|
||||
|
||||
os.replace(tmp_file, pkg_path)
|
||||
|
||||
return success
|
@@ -34,7 +34,7 @@
|
||||
Union,
|
||||
)
|
||||
|
||||
import archspec.cpu
|
||||
import _vendoring.archspec.cpu
|
||||
|
||||
import llnl.util.lang
|
||||
import llnl.util.tty as tty
|
||||
@@ -1617,7 +1617,7 @@ def target_ranges(self, spec, single_target_fn):
|
||||
target = spec.architecture.target
|
||||
|
||||
# Check if the target is a concrete target
|
||||
if str(target) in archspec.cpu.TARGETS:
|
||||
if str(target) in _vendoring.archspec.cpu.TARGETS:
|
||||
return [single_target_fn(spec.name, target)]
|
||||
|
||||
self.target_constraints.add(target)
|
||||
@@ -2556,7 +2556,7 @@ def _spec_clauses(
|
||||
|
||||
edges = spec.edges_from_dependents()
|
||||
virtuals = [x for x in itertools.chain.from_iterable([edge.virtuals for edge in edges])]
|
||||
if not body:
|
||||
if not body and not spec.concrete:
|
||||
for virtual in virtuals:
|
||||
clauses.append(fn.attr("provider_set", spec.name, virtual))
|
||||
clauses.append(fn.attr("virtual_node", virtual))
|
||||
@@ -2753,7 +2753,7 @@ def _supported_targets(self, compiler_name, compiler_version, targets):
|
||||
compiler_name, compiler_version.dotted_numeric_string
|
||||
)
|
||||
supported.append(target)
|
||||
except archspec.cpu.UnsupportedMicroarchitecture:
|
||||
except _vendoring.archspec.cpu.UnsupportedMicroarchitecture:
|
||||
continue
|
||||
except ValueError:
|
||||
continue
|
||||
@@ -2818,7 +2818,7 @@ def target_defaults(self, specs):
|
||||
if not spec.architecture or not spec.architecture.target:
|
||||
continue
|
||||
|
||||
target = archspec.cpu.TARGETS.get(spec.target.name)
|
||||
target = _vendoring.archspec.cpu.TARGETS.get(spec.target.name)
|
||||
if not target:
|
||||
self.target_ranges(spec, None)
|
||||
continue
|
||||
@@ -2830,7 +2830,7 @@ def target_defaults(self, specs):
|
||||
candidate_targets.append(ancestor)
|
||||
|
||||
platform = spack.platforms.host()
|
||||
uarch = archspec.cpu.TARGETS.get(platform.default)
|
||||
uarch = _vendoring.archspec.cpu.TARGETS.get(platform.default)
|
||||
best_targets = {uarch.family.name}
|
||||
for compiler in self.possible_compilers:
|
||||
supported = self._supported_targets(compiler.name, compiler.version, candidate_targets)
|
||||
@@ -2938,7 +2938,7 @@ def _all_targets_satisfiying(single_constraint):
|
||||
return [single_constraint]
|
||||
|
||||
t_min, _, t_max = single_constraint.partition(":")
|
||||
for test_target in archspec.cpu.TARGETS.values():
|
||||
for test_target in _vendoring.archspec.cpu.TARGETS.values():
|
||||
# Check lower bound
|
||||
if t_min and not t_min <= test_target:
|
||||
continue
|
||||
@@ -3894,7 +3894,7 @@ def external_spec_selected(self, node, idx):
|
||||
|
||||
if extendee_spec:
|
||||
extendee_node = SpecBuilder.make_node(pkg=extendee_spec.name)
|
||||
package.update_external_dependencies(self._specs.get(extendee_node, None))
|
||||
package._update_external_dependencies(self._specs.get(extendee_node))
|
||||
|
||||
def depends_on(self, parent_node, dependency_node, type):
|
||||
dependency_spec = self._specs[dependency_node]
|
||||
|
@@ -530,6 +530,32 @@ attr("concrete_variant_set", node(X, A1), Variant, Value, ID)
|
||||
attr("virtual_on_build_edge", ParentNode, BuildDependency, Virtual),
|
||||
not 1 { pkg_fact(BuildDependency, version_satisfies(Constraint, Version)) : hash_attr(BuildDependencyHash, "version", BuildDependency, Version) } 1.
|
||||
|
||||
error(100, "Cannot satisfy the request on {0} to have {1}={2}", BuildDependency, Variant, Value)
|
||||
:- attr("build_requirement", ParentNode, build_requirement("variant_set", BuildDependency, Variant, Value)),
|
||||
attr("concrete_build_dependency", ParentNode, BuildDependency, BuildDependencyHash),
|
||||
not hash_attr(BuildDependencyHash, "variant_value", BuildDependency, Variant, Value).
|
||||
|
||||
error(100, "Cannot satisfy the request on {0} to have the target set to {1}", BuildDependency, Target)
|
||||
:- attr("build_requirement", ParentNode, build_requirement("node_target_set", BuildDependency, Target)),
|
||||
attr("concrete_build_dependency", ParentNode, BuildDependency, BuildDependencyHash),
|
||||
not hash_attr(BuildDependencyHash, "node_target", BuildDependency, Target).
|
||||
|
||||
error(100, "Cannot satisfy the request on {0} to have the os set to {1}", BuildDependency, NodeOS)
|
||||
:- attr("build_requirement", ParentNode, build_requirement("node_os_set", BuildDependency, NodeOS)),
|
||||
attr("concrete_build_dependency", ParentNode, BuildDependency, BuildDependencyHash),
|
||||
not hash_attr(BuildDependencyHash, "node_os", BuildDependency, NodeOS).
|
||||
|
||||
error(100, "Cannot satisfy the request on {0} to have the platform set to {1}", BuildDependency, Platform)
|
||||
:- attr("build_requirement", ParentNode, build_requirement("node_platform_set", BuildDependency, Platform)),
|
||||
attr("concrete_build_dependency", ParentNode, BuildDependency, BuildDependencyHash),
|
||||
not hash_attr(BuildDependencyHash, "node_platform", BuildDependency, Platform).
|
||||
|
||||
error(100, "Cannot satisfy the request on {0} to have the following hash {1}", BuildDependency, BuildHash)
|
||||
:- attr("build_requirement", ParentNode, build_requirement("node_target_set", BuildDependency, Target)),
|
||||
attr("concrete_build_dependency", ParentNode, BuildDependency, BuildDependencyHash),
|
||||
attr("build_requirement", ParentNode, build_requirement("hash", BuildDependency, BuildHash)),
|
||||
BuildHash != BuildDependencyHash.
|
||||
|
||||
% External nodes
|
||||
:- attr("build_requirement", ParentNode, build_requirement("node", BuildDependency)),
|
||||
external(ParentNode),
|
||||
@@ -576,6 +602,32 @@ attr("node_version_satisfies", node(X, BuildDependency), Constraint) :-
|
||||
attr("build_requirement", ParentNode, build_requirement("node_version_satisfies", BuildDependency, Constraint)),
|
||||
build_requirement(ParentNode, node(X, BuildDependency)).
|
||||
|
||||
% Account for properties on the build requirements
|
||||
%
|
||||
% root %gcc@12.0 <properties for gcc> ^dep
|
||||
%
|
||||
attr("variant_set", node(X, BuildDependency), Variant, Value) :-
|
||||
attr("build_requirement", ParentNode, build_requirement("variant_set", BuildDependency, Variant, Value)),
|
||||
build_requirement(ParentNode, node(X, BuildDependency)).
|
||||
|
||||
attr("depends_on", node(X, Parent), node(Y, BuildDependency), "build") :- build_requirement(node(X, Parent), node(Y, BuildDependency)).
|
||||
|
||||
attr("node_target_set", node(X, BuildDependency), Target) :-
|
||||
attr("build_requirement", ParentNode, build_requirement("node_target_set", BuildDependency, Target)),
|
||||
build_requirement(ParentNode, node(X, BuildDependency)).
|
||||
|
||||
attr("node_os_set", node(X, BuildDependency), NodeOS) :-
|
||||
attr("build_requirement", ParentNode, build_requirement("node_os_set", BuildDependency, NodeOS)),
|
||||
build_requirement(ParentNode, node(X, BuildDependency)).
|
||||
|
||||
attr("node_platform_set", node(X, BuildDependency), NodePlatform) :-
|
||||
attr("build_requirement", ParentNode, build_requirement("node_platform_set", BuildDependency, NodePlatform)),
|
||||
build_requirement(ParentNode, node(X, BuildDependency)).
|
||||
|
||||
attr("hash", node(X, BuildDependency), BuildHash) :-
|
||||
attr("build_requirement", ParentNode, build_requirement("hash", BuildDependency, BuildHash)),
|
||||
build_requirement(ParentNode, node(X, BuildDependency)).
|
||||
|
||||
|
||||
1 { attr("provider_set", node(X, BuildDependency), node(0..Y-1, Virtual)) : max_dupes(Virtual, Y) } 1 :-
|
||||
attr("build_requirement", ParentNode, build_requirement("provider_set", BuildDependency, Virtual)),
|
||||
|
@@ -5,7 +5,7 @@
|
||||
import collections
|
||||
from typing import Dict, List, NamedTuple, Set, Tuple, Union
|
||||
|
||||
import archspec.cpu
|
||||
import _vendoring.archspec.cpu
|
||||
|
||||
from llnl.util import lang, tty
|
||||
|
||||
@@ -34,7 +34,7 @@ def unreachable(self, *, pkg_name: str, when_spec: spack.spec.Spec) -> bool:
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def candidate_targets(self) -> List[archspec.cpu.Microarchitecture]:
|
||||
def candidate_targets(self) -> List[_vendoring.archspec.cpu.Microarchitecture]:
|
||||
"""Returns a list of targets that are candidate for concretization"""
|
||||
raise NotImplementedError
|
||||
|
||||
@@ -70,7 +70,7 @@ def __init__(self, *, configuration: spack.config.Configuration, repo: spack.rep
|
||||
self.configuration = configuration
|
||||
self.repo = repo
|
||||
self._platform_condition = spack.spec.Spec(
|
||||
f"platform={spack.platforms.host()} target={archspec.cpu.host().family}:"
|
||||
f"platform={spack.platforms.host()} target={_vendoring.archspec.cpu.host().family}:"
|
||||
)
|
||||
|
||||
try:
|
||||
@@ -110,10 +110,10 @@ def unreachable(self, *, pkg_name: str, when_spec: spack.spec.Spec) -> bool:
|
||||
"""
|
||||
return False
|
||||
|
||||
def candidate_targets(self) -> List[archspec.cpu.Microarchitecture]:
|
||||
def candidate_targets(self) -> List[_vendoring.archspec.cpu.Microarchitecture]:
|
||||
"""Returns a list of targets that are candidate for concretization"""
|
||||
platform = spack.platforms.host()
|
||||
default_target = archspec.cpu.TARGETS[platform.default]
|
||||
default_target = _vendoring.archspec.cpu.TARGETS[platform.default]
|
||||
|
||||
# Construct the list of targets which are compatible with the host
|
||||
candidate_targets = [default_target] + default_target.ancestors
|
||||
@@ -125,7 +125,7 @@ def candidate_targets(self) -> List[archspec.cpu.Microarchitecture]:
|
||||
additional_targets_in_family = sorted(
|
||||
[
|
||||
t
|
||||
for t in archspec.cpu.TARGETS.values()
|
||||
for t in _vendoring.archspec.cpu.TARGETS.values()
|
||||
if (t.family.name == default_target.family.name and t not in candidate_targets)
|
||||
],
|
||||
key=lambda x: len(x.ancestors),
|
||||
|
@@ -74,10 +74,9 @@
|
||||
overload,
|
||||
)
|
||||
|
||||
import _vendoring.archspec.cpu
|
||||
from _vendoring.typing_extensions import Literal
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
import llnl.path
|
||||
import llnl.string
|
||||
import llnl.util.filesystem as fs
|
||||
@@ -217,10 +216,12 @@ def ensure_modern_format_string(fmt: str) -> None:
|
||||
)
|
||||
|
||||
|
||||
def _make_microarchitecture(name: str) -> archspec.cpu.Microarchitecture:
|
||||
if isinstance(name, archspec.cpu.Microarchitecture):
|
||||
def _make_microarchitecture(name: str) -> _vendoring.archspec.cpu.Microarchitecture:
|
||||
if isinstance(name, _vendoring.archspec.cpu.Microarchitecture):
|
||||
return name
|
||||
return archspec.cpu.TARGETS.get(name, archspec.cpu.generic_microarchitecture(name))
|
||||
return _vendoring.archspec.cpu.TARGETS.get(
|
||||
name, _vendoring.archspec.cpu.generic_microarchitecture(name)
|
||||
)
|
||||
|
||||
|
||||
@lang.lazy_lexicographic_ordering
|
||||
@@ -364,7 +365,7 @@ def target(self, value):
|
||||
# will assumed to be the host machine's platform.
|
||||
|
||||
def target_or_none(t):
|
||||
if isinstance(t, archspec.cpu.Microarchitecture):
|
||||
if isinstance(t, _vendoring.archspec.cpu.Microarchitecture):
|
||||
return t
|
||||
if t and t != "None":
|
||||
return _make_microarchitecture(t)
|
||||
@@ -2233,15 +2234,21 @@ def lookup_hash(self):
|
||||
spec._dup(self._lookup_hash())
|
||||
return spec
|
||||
|
||||
# Get dependencies that need to be replaced
|
||||
for node in self.traverse(root=False):
|
||||
if node.abstract_hash:
|
||||
spec._add_dependency(node._lookup_hash(), depflag=0, virtuals=())
|
||||
# Map the dependencies that need to be replaced
|
||||
node_lookup = {
|
||||
id(node): node._lookup_hash()
|
||||
for node in self.traverse(root=False)
|
||||
if node.abstract_hash
|
||||
}
|
||||
|
||||
# reattach nodes that were not otherwise satisfied by new dependencies
|
||||
for node in self.traverse(root=False):
|
||||
if not any(n.satisfies(node) for n in spec.traverse()):
|
||||
spec._add_dependency(node.copy(), depflag=0, virtuals=())
|
||||
# Reconstruct dependencies
|
||||
for edge in self.traverse_edges(root=False):
|
||||
key = edge.parent.name
|
||||
current_node = spec if key == spec.name else spec[key]
|
||||
child_node = node_lookup.get(id(edge.spec), edge.spec.copy())
|
||||
current_node._add_dependency(
|
||||
child_node, depflag=edge.depflag, virtuals=edge.virtuals, direct=edge.direct
|
||||
)
|
||||
|
||||
return spec
|
||||
|
||||
|
@@ -101,9 +101,6 @@
|
||||
|
||||
SPLIT_KVP = re.compile(rf"^({NAME})(:?==?)(.*)$")
|
||||
|
||||
#: Regex with groups to use for splitting %[virtuals=...] tokens
|
||||
SPLIT_COMPILER_TOKEN = re.compile(rf"^%\[virtuals=({VALUE}|{QUOTED_VALUE})]\s*(.*)$")
|
||||
|
||||
#: A filename starts either with a "." or a "/" or a "{name}/, or on Windows, a drive letter
|
||||
#: followed by a colon and "\" or "." or {name}\
|
||||
WINDOWS_FILENAME = r"(?:\.|[a-zA-Z0-9-_]*\\|[a-zA-Z]:\\)(?:[a-zA-Z0-9-_\.\\]*)(?:\.json|\.yaml)"
|
||||
@@ -124,9 +121,9 @@ class SpecTokens(TokenBase):
|
||||
"""
|
||||
|
||||
# Dependency
|
||||
START_EDGE_PROPERTIES = r"(?:\^\[)"
|
||||
START_EDGE_PROPERTIES = r"(?:[\^%]\[)"
|
||||
END_EDGE_PROPERTIES = r"(?:\])"
|
||||
DEPENDENCY = r"(?:\^)"
|
||||
DEPENDENCY = r"(?:[\^\%])"
|
||||
# Version
|
||||
VERSION_HASH_PAIR = rf"(?:@(?:{GIT_VERSION_PATTERN})=(?:{VERSION}))"
|
||||
GIT_VERSION = rf"@(?:{GIT_VERSION_PATTERN})"
|
||||
@@ -136,14 +133,6 @@ class SpecTokens(TokenBase):
|
||||
BOOL_VARIANT = rf"(?:[~+-]\s*{NAME})"
|
||||
PROPAGATED_KEY_VALUE_PAIR = rf"(?:{NAME}:?==(?:{VALUE}|{QUOTED_VALUE}))"
|
||||
KEY_VALUE_PAIR = rf"(?:{NAME}:?=(?:{VALUE}|{QUOTED_VALUE}))"
|
||||
# Compilers
|
||||
COMPILER_AND_VERSION = rf"(?:%\s*(?:{NAME})(?:[\s]*)@\s*(?:{VERSION_LIST}))"
|
||||
COMPILER = rf"(?:%\s*(?:{NAME}))"
|
||||
COMPILER_AND_VERSION_WITH_VIRTUALS = (
|
||||
rf"(?:%\[virtuals=(?:{VALUE}|{QUOTED_VALUE})\]"
|
||||
rf"\s*(?:{NAME})(?:[\s]*)@\s*(?:{VERSION_LIST}))"
|
||||
)
|
||||
COMPILER_WITH_VIRTUALS = rf"(?:%\[virtuals=(?:{VALUE}|{QUOTED_VALUE})\]\s*(?:{NAME}))"
|
||||
# FILENAME
|
||||
FILENAME = rf"(?:{FILENAME})"
|
||||
# Package name
|
||||
@@ -275,25 +264,58 @@ def next_spec(
|
||||
def add_dependency(dep, **edge_properties):
|
||||
"""wrapper around root_spec._add_dependency"""
|
||||
try:
|
||||
root_spec._add_dependency(dep, **edge_properties)
|
||||
target_spec._add_dependency(dep, **edge_properties)
|
||||
except spack.error.SpecError as e:
|
||||
raise SpecParsingError(str(e), self.ctx.current_token, self.literal_str) from e
|
||||
|
||||
initial_spec = initial_spec or spack.spec.Spec()
|
||||
root_spec, parser_warnings = SpecNodeParser(self.ctx, self.literal_str).parse(initial_spec)
|
||||
current_spec = root_spec
|
||||
while True:
|
||||
if self.ctx.accept(SpecTokens.START_EDGE_PROPERTIES):
|
||||
is_direct = self.ctx.current_token.value[0] == "%"
|
||||
|
||||
edge_properties = EdgeAttributeParser(self.ctx, self.literal_str).parse()
|
||||
edge_properties.setdefault("depflag", 0)
|
||||
edge_properties.setdefault("virtuals", ())
|
||||
edge_properties["direct"] = is_direct
|
||||
|
||||
dependency, warnings = self._parse_node(root_spec)
|
||||
|
||||
if is_direct:
|
||||
target_spec = current_spec
|
||||
edge_properties.setdefault("depflag", spack.deptypes.BUILD)
|
||||
if dependency.name in LEGACY_COMPILER_TO_BUILTIN:
|
||||
dependency.name = LEGACY_COMPILER_TO_BUILTIN[dependency.name]
|
||||
|
||||
else:
|
||||
current_spec = dependency
|
||||
target_spec = root_spec
|
||||
edge_properties.setdefault("depflag", 0)
|
||||
|
||||
# print(f"[{current_spec}], {target_spec}->{dependency} {is_direct}")
|
||||
parser_warnings.extend(warnings)
|
||||
add_dependency(dependency, **edge_properties)
|
||||
|
||||
elif self.ctx.accept(SpecTokens.DEPENDENCY):
|
||||
is_direct = self.ctx.current_token.value[0] == "%"
|
||||
dependency, warnings = self._parse_node(root_spec)
|
||||
edge_properties = {}
|
||||
edge_properties["direct"] = is_direct
|
||||
edge_properties["virtuals"] = tuple()
|
||||
if is_direct:
|
||||
target_spec = current_spec
|
||||
edge_properties.setdefault("depflag", spack.deptypes.BUILD)
|
||||
if dependency.name in LEGACY_COMPILER_TO_BUILTIN:
|
||||
dependency.name = LEGACY_COMPILER_TO_BUILTIN[dependency.name]
|
||||
else:
|
||||
current_spec = dependency
|
||||
target_spec = root_spec
|
||||
edge_properties.setdefault("depflag", 0)
|
||||
|
||||
# print(f"[{current_spec}], {target_spec}->{dependency} {is_direct}")
|
||||
|
||||
parser_warnings.extend(warnings)
|
||||
add_dependency(dependency, depflag=0, virtuals=())
|
||||
add_dependency(dependency, **edge_properties)
|
||||
|
||||
else:
|
||||
break
|
||||
@@ -384,34 +406,6 @@ def warn_if_after_compiler(token: str):
|
||||
|
||||
while True:
|
||||
if (
|
||||
self.ctx.accept(SpecTokens.COMPILER)
|
||||
or self.ctx.accept(SpecTokens.COMPILER_AND_VERSION)
|
||||
or self.ctx.accept(SpecTokens.COMPILER_WITH_VIRTUALS)
|
||||
or self.ctx.accept(SpecTokens.COMPILER_AND_VERSION_WITH_VIRTUALS)
|
||||
):
|
||||
current_token = self.ctx.current_token
|
||||
if current_token.kind in (
|
||||
SpecTokens.COMPILER_WITH_VIRTUALS,
|
||||
SpecTokens.COMPILER_AND_VERSION_WITH_VIRTUALS,
|
||||
):
|
||||
m = SPLIT_COMPILER_TOKEN.match(current_token.value)
|
||||
assert m, "SPLIT_COMPILER_TOKEN and COMPILER_* do not agree."
|
||||
virtuals_str, compiler_str = m.groups()
|
||||
virtuals = tuple(virtuals_str.strip("'\" ").split(","))
|
||||
else:
|
||||
virtuals = tuple()
|
||||
compiler_str = current_token.value[1:]
|
||||
|
||||
build_dependency = spack.spec.Spec(compiler_str)
|
||||
if build_dependency.name in LEGACY_COMPILER_TO_BUILTIN:
|
||||
build_dependency.name = LEGACY_COMPILER_TO_BUILTIN[build_dependency.name]
|
||||
|
||||
initial_spec._add_dependency(
|
||||
build_dependency, depflag=spack.deptypes.BUILD, virtuals=virtuals, direct=True
|
||||
)
|
||||
last_compiler = self.ctx.current_token.value
|
||||
|
||||
elif (
|
||||
self.ctx.accept(SpecTokens.VERSION_HASH_PAIR)
|
||||
or self.ctx.accept(SpecTokens.GIT_VERSION)
|
||||
or self.ctx.accept(SpecTokens.VERSION)
|
||||
|
@@ -3,10 +3,9 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import platform
|
||||
|
||||
import _vendoring.archspec.cpu
|
||||
import pytest
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
import spack.concretize
|
||||
import spack.operating_systems
|
||||
import spack.platforms
|
||||
@@ -125,7 +124,8 @@ def test_satisfy_strict_constraint_when_not_concrete(architecture_tuple, constra
|
||||
)
|
||||
@pytest.mark.usefixtures("mock_packages", "config")
|
||||
@pytest.mark.skipif(
|
||||
str(archspec.cpu.host().family) != "x86_64", reason="tests are for x86_64 uarch ranges"
|
||||
str(_vendoring.archspec.cpu.host().family) != "x86_64",
|
||||
reason="tests are for x86_64 uarch ranges",
|
||||
)
|
||||
def test_concretize_target_ranges(root_target_range, dep_target_range, result, monkeypatch):
|
||||
spec = spack.concretize.concretize_one(
|
||||
|
@@ -28,9 +28,15 @@
|
||||
(["invalid-selfhosted-gitlab-patch-url"], ["PKG-DIRECTIVES", "PKG-PROPERTIES"]),
|
||||
# This package has a stand-alone test method in build-time callbacks
|
||||
(["fail-test-audit"], ["PKG-PROPERTIES"]),
|
||||
# This package implements and uses several deprecated stand-alone
|
||||
# test methods
|
||||
(["fail-test-audit-deprecated"], ["PKG-DEPRECATED-ATTRIBUTES"]),
|
||||
# This package implements and uses several deprecated stand-alone test methods
|
||||
pytest.param(
|
||||
["fail-test-audit-deprecated"],
|
||||
["PKG-DEPRECATED-ATTRIBUTES"],
|
||||
marks=pytest.mark.xfail(
|
||||
reason="inspect.getsource() reads the source file, "
|
||||
"which misses an injected import line"
|
||||
),
|
||||
),
|
||||
# This package has stand-alone test methods without non-trivial docstrings
|
||||
(["fail-test-audit-docstring"], ["PKG-PROPERTIES"]),
|
||||
# This package has a stand-alone test method without an implementation
|
||||
|
@@ -241,13 +241,13 @@ def test_default_rpaths_create_install_default_layout(temporary_mirror_dir):
|
||||
uninstall_cmd("-y", "--dependents", gspec.name)
|
||||
|
||||
# Test installing from build caches
|
||||
buildcache_cmd("install", "-u", cspec.name, sy_spec.name)
|
||||
buildcache_cmd("install", "-uo", cspec.name, sy_spec.name)
|
||||
|
||||
# This gives warning that spec is already installed
|
||||
buildcache_cmd("install", "-u", cspec.name)
|
||||
buildcache_cmd("install", "-uo", cspec.name)
|
||||
|
||||
# Test overwrite install
|
||||
buildcache_cmd("install", "-fu", cspec.name)
|
||||
buildcache_cmd("install", "-ufo", cspec.name)
|
||||
|
||||
buildcache_cmd("keys", "-f")
|
||||
buildcache_cmd("list")
|
||||
@@ -273,10 +273,10 @@ def test_default_rpaths_install_nondefault_layout(temporary_mirror_dir):
|
||||
|
||||
# Install some packages with dependent packages
|
||||
# test install in non-default install path scheme
|
||||
buildcache_cmd("install", "-u", cspec.name, sy_spec.name)
|
||||
buildcache_cmd("install", "-uo", cspec.name, sy_spec.name)
|
||||
|
||||
# Test force install in non-default install path scheme
|
||||
buildcache_cmd("install", "-uf", cspec.name)
|
||||
buildcache_cmd("install", "-ufo", cspec.name)
|
||||
|
||||
|
||||
@pytest.mark.requires_executables(*required_executables)
|
||||
@@ -298,19 +298,19 @@ def test_relative_rpaths_install_default_layout(temporary_mirror_dir):
|
||||
cspec = spack.concretize.concretize_one("corge")
|
||||
|
||||
# Install buildcache created with relativized rpaths
|
||||
buildcache_cmd("install", "-uf", cspec.name)
|
||||
buildcache_cmd("install", "-ufo", cspec.name)
|
||||
|
||||
# This gives warning that spec is already installed
|
||||
buildcache_cmd("install", "-uf", cspec.name)
|
||||
buildcache_cmd("install", "-ufo", cspec.name)
|
||||
|
||||
# Uninstall the package and deps
|
||||
uninstall_cmd("-y", "--dependents", gspec.name)
|
||||
|
||||
# Install build cache
|
||||
buildcache_cmd("install", "-uf", cspec.name)
|
||||
buildcache_cmd("install", "-ufo", cspec.name)
|
||||
|
||||
# Test overwrite install
|
||||
buildcache_cmd("install", "-uf", cspec.name)
|
||||
buildcache_cmd("install", "-ufo", cspec.name)
|
||||
|
||||
|
||||
@pytest.mark.requires_executables(*required_executables)
|
||||
@@ -327,7 +327,7 @@ def test_relative_rpaths_install_nondefault(temporary_mirror_dir):
|
||||
cspec = spack.concretize.concretize_one("corge")
|
||||
|
||||
# Test install in non-default install path scheme and relative path
|
||||
buildcache_cmd("install", "-uf", cspec.name)
|
||||
buildcache_cmd("install", "-ufo", cspec.name)
|
||||
|
||||
|
||||
def test_push_and_fetch_keys(mock_gnupghome, tmp_path):
|
||||
|
@@ -10,6 +10,7 @@
|
||||
import spack.compilers.config
|
||||
import spack.config
|
||||
import spack.environment
|
||||
import spack.paths
|
||||
import spack.store
|
||||
import spack.util.path
|
||||
|
||||
@@ -131,7 +132,9 @@ def test_bootstrap_disables_modulefile_generation(mutable_config):
|
||||
|
||||
@pytest.mark.regression("25992")
|
||||
@pytest.mark.requires_executables("gcc")
|
||||
def test_bootstrap_search_for_compilers_with_no_environment(no_packages_yaml):
|
||||
def test_bootstrap_search_for_compilers_with_no_environment(no_packages_yaml, monkeypatch):
|
||||
monkeypatch.setattr(spack.paths, "packages_path", spack.paths.mock_packages_path)
|
||||
|
||||
assert not spack.compilers.config.all_compilers(init_config=False)
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
assert spack.compilers.config.all_compilers(init_config=False)
|
||||
@@ -141,8 +144,10 @@ def test_bootstrap_search_for_compilers_with_no_environment(no_packages_yaml):
|
||||
@pytest.mark.regression("25992")
|
||||
@pytest.mark.requires_executables("gcc")
|
||||
def test_bootstrap_search_for_compilers_with_environment_active(
|
||||
no_packages_yaml, active_mock_environment
|
||||
no_packages_yaml, active_mock_environment, monkeypatch
|
||||
):
|
||||
monkeypatch.setattr(spack.paths, "packages_path", spack.paths.mock_packages_path)
|
||||
|
||||
assert not spack.compilers.config.all_compilers(init_config=False)
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
assert spack.compilers.config.all_compilers(init_config=False)
|
||||
|
@@ -8,10 +8,9 @@
|
||||
import sys
|
||||
from typing import Dict, Optional, Tuple
|
||||
|
||||
import _vendoring.archspec.cpu
|
||||
import pytest
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
from llnl.path import Path, convert_to_platform_path
|
||||
from llnl.util.filesystem import HeaderList, LibraryList
|
||||
|
||||
@@ -691,36 +690,6 @@ def test_clear_compiler_related_runtime_variables_of_build_deps(default_mock_con
|
||||
assert result["ANOTHER_VAR"] == "this-should-be-present"
|
||||
|
||||
|
||||
@pytest.mark.parametrize("context", [Context.BUILD, Context.RUN])
|
||||
def test_build_system_globals_only_set_on_root_during_build(default_mock_concretization, context):
|
||||
"""Test whether when setting up a build environment, the build related globals are set only
|
||||
in the top level spec.
|
||||
|
||||
TODO: Since module instances are globals themselves, and Spack defines properties on them, they
|
||||
persist across tests. In principle this is not terrible, cause the variables are mostly static.
|
||||
But obviously it can lead to very hard to find bugs... We should get rid of those globals and
|
||||
define them instead as a property on the package instance.
|
||||
"""
|
||||
root = spack.concretize.concretize_one("mpileaks")
|
||||
build_variables = ("std_cmake_args", "std_meson_args", "std_pip_args")
|
||||
|
||||
# See todo above, we clear out any properties that may have been set by the previous test.
|
||||
# Commenting this loop will make the test fail. I'm leaving it here as a reminder that those
|
||||
# globals were always a bad idea, and we should pass them to the package instance.
|
||||
for spec in root.traverse():
|
||||
for variable in build_variables:
|
||||
spec.package.module.__dict__.pop(variable, None)
|
||||
|
||||
spack.build_environment.SetupContext(root, context=context).set_all_package_py_globals()
|
||||
|
||||
# Excpect the globals to be set at the root in a build context only.
|
||||
should_be_set = lambda depth: context == Context.BUILD and depth == 0
|
||||
|
||||
for depth, spec in root.traverse(depth=True, root=True):
|
||||
for variable in build_variables:
|
||||
assert hasattr(spec.package.module, variable) == should_be_set(depth)
|
||||
|
||||
|
||||
def test_rpath_with_duplicate_link_deps():
|
||||
"""If we have two instances of one package in the same link sub-dag, only the newest version is
|
||||
rpath'ed. This is for runtime support without splicing."""
|
||||
@@ -757,14 +726,15 @@ def test_rpath_with_duplicate_link_deps():
|
||||
@pytest.mark.filterwarnings("ignore:microarchitecture specific")
|
||||
@pytest.mark.not_on_windows("Windows doesn't support the compiler wrapper")
|
||||
def test_optimization_flags(compiler_spec, target_name, expected_flags, compiler_factory):
|
||||
target = archspec.cpu.TARGETS[target_name]
|
||||
target = _vendoring.archspec.cpu.TARGETS[target_name]
|
||||
compiler = spack.spec.parse_with_version_concrete(compiler_spec)
|
||||
opt_flags = spack.build_environment.optimization_flags(compiler, target)
|
||||
assert opt_flags == expected_flags
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
str(archspec.cpu.host().family) != "x86_64", reason="tests check specific x86_64 uarch flags"
|
||||
str(_vendoring.archspec.cpu.host().family) != "x86_64",
|
||||
reason="tests check specific x86_64 uarch flags",
|
||||
)
|
||||
@pytest.mark.not_on_windows("Windows doesn't support the compiler wrapper")
|
||||
def test_optimization_flags_are_using_node_target(default_mock_concretization, monkeypatch):
|
||||
|
@@ -5,15 +5,13 @@
|
||||
import glob
|
||||
import os
|
||||
|
||||
import _vendoring.archspec.cpu
|
||||
import py.path
|
||||
import pytest
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
|
||||
import spack.build_systems.autotools
|
||||
import spack.build_systems.cmake
|
||||
import spack
|
||||
import spack.builder
|
||||
import spack.concretize
|
||||
import spack.environment
|
||||
@@ -28,6 +26,8 @@
|
||||
|
||||
DATA_PATH = os.path.join(spack.paths.test_path, "data")
|
||||
|
||||
pytestmark = pytest.mark.skip(reason="build_systems module is moved out of spack")
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def concretize_and_setup(default_mock_concretization, monkeypatch):
|
||||
@@ -216,7 +216,8 @@ def test_autotools_gnuconfig_replacement_disabled(self, mutable_database):
|
||||
|
||||
@pytest.mark.disable_clean_stage_check
|
||||
@pytest.mark.skipif(
|
||||
str(archspec.cpu.host().family) != "x86_64", reason="test data is specific for x86_64"
|
||||
str(_vendoring.archspec.cpu.host().family) != "x86_64",
|
||||
reason="test data is specific for x86_64",
|
||||
)
|
||||
def test_autotools_gnuconfig_replacement_no_gnuconfig(self, mutable_database, monkeypatch):
|
||||
"""
|
||||
|
@@ -15,7 +15,7 @@
|
||||
|
||||
@pytest.fixture()
|
||||
def builder_test_repository(config):
|
||||
builder_test_path = os.path.join(spack.paths.test_repos_path, "builder.test")
|
||||
builder_test_path = os.path.join(spack.paths.test_repos_path, "spack_repo", "builder_test")
|
||||
with spack.repo.use_repositories(builder_test_path) as mock_repo:
|
||||
yield mock_repo
|
||||
|
||||
|
@@ -549,11 +549,10 @@ def test_url_buildcache_entry_v2_exists(
|
||||
):
|
||||
"""Test existence check for v2 buildcache entries"""
|
||||
test_mirror_path = v2_buildcache_layout("unsigned")
|
||||
mirror_url = f"file://{test_mirror_path}"
|
||||
mirror_url = pathlib.Path(test_mirror_path).as_uri()
|
||||
mirror("add", "v2mirror", mirror_url)
|
||||
|
||||
with capsys.disabled():
|
||||
output = buildcache("list", "-a", "-l")
|
||||
output = buildcache("list", "-a", "-l")
|
||||
|
||||
assert "Fetching an index from a v2 binary mirror layout" in output
|
||||
assert "is deprecated" in output
|
||||
|
@@ -15,6 +15,8 @@
|
||||
|
||||
compiler = spack.main.SpackCommand("compiler")
|
||||
|
||||
pytestmark = [pytest.mark.usefixtures("mock_packages")]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def compilers_dir(mock_executable):
|
||||
@@ -80,7 +82,7 @@ def test_compiler_find_without_paths(no_packages_yaml, working_env, mock_executa
|
||||
|
||||
|
||||
@pytest.mark.regression("37996")
|
||||
def test_compiler_remove(mutable_config, mock_packages):
|
||||
def test_compiler_remove(mutable_config):
|
||||
"""Tests that we can remove a compiler from configuration."""
|
||||
assert any(
|
||||
compiler.satisfies("gcc@=9.4.0") for compiler in spack.compilers.config.all_compilers()
|
||||
@@ -93,7 +95,7 @@ def test_compiler_remove(mutable_config, mock_packages):
|
||||
|
||||
|
||||
@pytest.mark.regression("37996")
|
||||
def test_removing_compilers_from_multiple_scopes(mutable_config, mock_packages):
|
||||
def test_removing_compilers_from_multiple_scopes(mutable_config):
|
||||
# Duplicate "site" scope into "user" scope
|
||||
site_config = spack.config.get("packages", scope="site")
|
||||
spack.config.set("packages", site_config, scope="user")
|
||||
@@ -189,12 +191,12 @@ def test_compiler_find_path_order(no_packages_yaml, working_env, compilers_dir):
|
||||
}
|
||||
|
||||
|
||||
def test_compiler_list_empty(no_packages_yaml, working_env, compilers_dir):
|
||||
def test_compiler_list_empty(no_packages_yaml, compilers_dir, monkeypatch):
|
||||
"""Spack should not automatically search for compilers when listing them and none are
|
||||
available. And when stdout is not a tty like in tests, there should be no output and
|
||||
no error exit code.
|
||||
"""
|
||||
os.environ["PATH"] = str(compilers_dir)
|
||||
monkeypatch.setenv("PATH", str(compilers_dir), prepend=":")
|
||||
out = compiler("list")
|
||||
assert not out
|
||||
assert compiler.returncode == 0
|
||||
|
@@ -2,134 +2,39 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os
|
||||
|
||||
import pytest
|
||||
|
||||
import spack.cmd.diff
|
||||
import spack.concretize
|
||||
import spack.main
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
import spack.util.spack_json as sjson
|
||||
from spack.test.conftest import create_test_repo
|
||||
|
||||
install_cmd = spack.main.SpackCommand("install")
|
||||
diff_cmd = spack.main.SpackCommand("diff")
|
||||
find_cmd = spack.main.SpackCommand("find")
|
||||
|
||||
|
||||
_p1 = (
|
||||
"p1",
|
||||
"""\
|
||||
from spack.package import *
|
||||
|
||||
class P1(Package):
|
||||
version("1.0")
|
||||
|
||||
variant("p1var", default=True)
|
||||
variant("usev1", default=True)
|
||||
|
||||
depends_on("p2")
|
||||
depends_on("v1", when="+usev1")
|
||||
""",
|
||||
)
|
||||
|
||||
|
||||
_p2 = (
|
||||
"p2",
|
||||
"""\
|
||||
from spack.package import *
|
||||
|
||||
class P2(Package):
|
||||
version("1.0")
|
||||
|
||||
variant("p2var", default=True)
|
||||
|
||||
depends_on("p3")
|
||||
""",
|
||||
)
|
||||
|
||||
|
||||
_p3 = (
|
||||
"p3",
|
||||
"""\
|
||||
from spack.package import *
|
||||
|
||||
class P3(Package):
|
||||
version("1.0")
|
||||
|
||||
variant("p3var", default=True)
|
||||
""",
|
||||
)
|
||||
|
||||
_i1 = (
|
||||
"i1",
|
||||
"""\
|
||||
from spack.package import *
|
||||
|
||||
class I1(Package):
|
||||
version("1.0")
|
||||
|
||||
provides("v1")
|
||||
|
||||
variant("i1var", default=True)
|
||||
|
||||
depends_on("p3")
|
||||
depends_on("p4")
|
||||
""",
|
||||
)
|
||||
|
||||
_i2 = (
|
||||
"i2",
|
||||
"""\
|
||||
from spack.package import *
|
||||
|
||||
class I2(Package):
|
||||
version("1.0")
|
||||
|
||||
provides("v1")
|
||||
|
||||
variant("i2var", default=True)
|
||||
|
||||
depends_on("p3")
|
||||
depends_on("p4")
|
||||
""",
|
||||
)
|
||||
|
||||
|
||||
_p4 = (
|
||||
"p4",
|
||||
"""\
|
||||
from spack.package import *
|
||||
|
||||
class P4(Package):
|
||||
version("1.0")
|
||||
|
||||
variant("p4var", default=True)
|
||||
""",
|
||||
)
|
||||
|
||||
|
||||
# Note that the hash of p1 will differ depending on the variant chosen
|
||||
# we probably always want to omit that from diffs
|
||||
@pytest.fixture
|
||||
def _create_test_repo(tmpdir, mutable_config):
|
||||
"""
|
||||
p1____
|
||||
| \
|
||||
p2 v1
|
||||
| ____/ |
|
||||
p3 p4
|
||||
# p1____
|
||||
# | \
|
||||
# p2 v1
|
||||
# | ____/ |
|
||||
# p3 p4
|
||||
|
||||
i1 and i2 provide v1 (and both have the same dependencies)
|
||||
# i1 and i2 provide v1 (and both have the same dependencies)
|
||||
|
||||
All packages have an associated variant
|
||||
"""
|
||||
yield create_test_repo(tmpdir, [_p1, _p2, _p3, _i1, _i2, _p4])
|
||||
# All packages have an associated variant
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def test_repo(_create_test_repo, monkeypatch, mock_stage):
|
||||
with spack.repo.use_repositories(_create_test_repo) as mock_repo_path:
|
||||
yield mock_repo_path
|
||||
def test_repo(config):
|
||||
builder_test_path = os.path.join(spack.paths.test_repos_path, "spack_repo", "diff")
|
||||
with spack.repo.use_repositories(builder_test_path) as mock_repo:
|
||||
yield mock_repo
|
||||
|
||||
|
||||
def test_diff_ignore(test_repo):
|
||||
|
@@ -6,7 +6,6 @@
|
||||
|
||||
import spack.repo
|
||||
import spack.util.editor
|
||||
from spack.build_systems import autotools, cmake
|
||||
from spack.main import SpackCommand
|
||||
|
||||
edit = SpackCommand("edit")
|
||||
@@ -29,13 +28,15 @@ def editor(*args: str, **kwargs):
|
||||
assert called
|
||||
|
||||
|
||||
def test_edit_files(monkeypatch):
|
||||
def test_edit_files(monkeypatch, mock_packages):
|
||||
"""Test spack edit --build-system autotools cmake"""
|
||||
called = False
|
||||
|
||||
def editor(*args: str, **kwargs):
|
||||
nonlocal called
|
||||
called = True
|
||||
from spack_repo.builtin_mock.build_systems import autotools, cmake # type: ignore
|
||||
|
||||
assert os.path.samefile(args[0], autotools.__file__)
|
||||
assert os.path.samefile(args[1], cmake.__file__)
|
||||
|
||||
|
@@ -886,12 +886,12 @@ def test_env_activate_broken_view(
|
||||
with spack.repo.use_repositories(mock_custom_repository):
|
||||
wrong_repo = env("activate", "--sh", "test")
|
||||
assert "Warning: could not load runtime environment" in wrong_repo
|
||||
assert "Unknown namespace: builtin.mock" in wrong_repo
|
||||
assert "Unknown namespace: builtin_mock" in wrong_repo
|
||||
|
||||
# test replacing repo fixes it
|
||||
normal_repo = env("activate", "--sh", "test")
|
||||
assert "Warning: could not load runtime environment" not in normal_repo
|
||||
assert "Unknown namespace: builtin.mock" not in normal_repo
|
||||
assert "Unknown namespace: builtin_mock" not in normal_repo
|
||||
|
||||
|
||||
def test_to_lockfile_dict():
|
||||
@@ -916,7 +916,7 @@ def test_env_repo():
|
||||
|
||||
pkg_cls = e.repo.get_pkg_class("mpileaks")
|
||||
assert pkg_cls.name == "mpileaks"
|
||||
assert pkg_cls.namespace == "builtin.mock"
|
||||
assert pkg_cls.namespace == "builtin_mock"
|
||||
|
||||
|
||||
def test_user_removed_spec(environment_from_manifest):
|
||||
@@ -4286,7 +4286,7 @@ def test_env_include_packages_url(
|
||||
"""Test inclusion of a (GitHub) URL."""
|
||||
develop_url = "https://github.com/fake/fake/blob/develop/"
|
||||
default_packages = develop_url + "etc/fake/defaults/packages.yaml"
|
||||
sha256 = "8b69d9c6e983dfb8bac2ddc3910a86265cffdd9c85f905c716d426ec5b0d9847"
|
||||
sha256 = "6a1b26c857ca7e5bcd7342092e2f218da43d64b78bd72771f603027ea3c8b4af"
|
||||
spack_yaml = tmpdir.join("spack.yaml")
|
||||
with spack_yaml.open("w") as f:
|
||||
f.write(
|
||||
|
@@ -18,6 +18,8 @@
|
||||
from spack.main import SpackCommand
|
||||
from spack.spec import Spec
|
||||
|
||||
pytestmark = [pytest.mark.usefixtures("mock_packages")]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def executables_found(monkeypatch):
|
||||
@@ -36,40 +38,6 @@ def define_plat_exe(exe):
|
||||
return exe
|
||||
|
||||
|
||||
def test_find_external_single_package(mock_executable):
|
||||
cmake_path = mock_executable("cmake", output="echo cmake version 1.foo")
|
||||
search_dir = cmake_path.parent.parent
|
||||
|
||||
specs_by_package = spack.detection.by_path(["cmake"], path_hints=[str(search_dir)])
|
||||
|
||||
assert len(specs_by_package) == 1 and "cmake" in specs_by_package
|
||||
detected_spec = specs_by_package["cmake"]
|
||||
assert len(detected_spec) == 1 and detected_spec[0] == Spec("cmake@1.foo")
|
||||
|
||||
|
||||
def test_find_external_two_instances_same_package(mock_executable):
|
||||
# Each of these cmake instances is created in a different prefix
|
||||
# In Windows, quoted strings are echo'd with quotes includes
|
||||
# we need to avoid that for proper regex.
|
||||
cmake1 = mock_executable("cmake", output="echo cmake version 1.foo", subdir=("base1", "bin"))
|
||||
cmake2 = mock_executable("cmake", output="echo cmake version 3.17.2", subdir=("base2", "bin"))
|
||||
search_paths = [str(cmake1.parent.parent), str(cmake2.parent.parent)]
|
||||
|
||||
finder = spack.detection.path.ExecutablesFinder()
|
||||
detected_specs = finder.find(
|
||||
pkg_name="cmake", initial_guess=search_paths, repository=spack.repo.PATH
|
||||
)
|
||||
|
||||
assert len(detected_specs) == 2
|
||||
spec_to_path = {s: s.external_path for s in detected_specs}
|
||||
assert spec_to_path[Spec("cmake@1.foo")] == (
|
||||
spack.detection.executable_prefix(str(cmake1.parent))
|
||||
), spec_to_path
|
||||
assert spec_to_path[Spec("cmake@3.17.2")] == (
|
||||
spack.detection.executable_prefix(str(cmake2.parent))
|
||||
)
|
||||
|
||||
|
||||
def test_find_external_update_config(mutable_config):
|
||||
entries = [
|
||||
Spec.from_detection("cmake@1.foo", external_path="/x/y1"),
|
||||
@@ -101,13 +69,24 @@ def test_get_executables(working_env, mock_executable):
|
||||
# TODO: this test should be made to work, but in the meantime it is
|
||||
# causing intermittent (spurious) CI failures on all PRs
|
||||
@pytest.mark.not_on_windows("Test fails intermittently on Windows")
|
||||
def test_find_external_cmd_not_buildable(mutable_config, working_env, mock_executable):
|
||||
def test_find_external_cmd_not_buildable(
|
||||
mutable_config, working_env, mock_executable, monkeypatch
|
||||
):
|
||||
"""When the user invokes 'spack external find --not-buildable', the config
|
||||
for any package where Spack finds an external version should be marked as
|
||||
not buildable.
|
||||
"""
|
||||
cmake_path1 = mock_executable("cmake", output="echo cmake version 1.foo")
|
||||
os.environ["PATH"] = os.pathsep.join([os.path.dirname(cmake_path1)])
|
||||
version = "1.foo"
|
||||
|
||||
@classmethod
|
||||
def _determine_version(cls, exe):
|
||||
return version
|
||||
|
||||
cmake_cls = spack.repo.PATH.get_pkg_class("cmake")
|
||||
monkeypatch.setattr(cmake_cls, "determine_version", _determine_version)
|
||||
|
||||
cmake_path = mock_executable("cmake", output=f"echo cmake version {version}")
|
||||
os.environ["PATH"] = str(cmake_path.parent)
|
||||
external("find", "--not-buildable", "cmake")
|
||||
pkgs_cfg = spack.config.get("packages")
|
||||
assert "cmake" in pkgs_cfg
|
||||
@@ -123,37 +102,51 @@ def test_find_external_cmd_not_buildable(mutable_config, working_env, mock_execu
|
||||
["detectable"],
|
||||
[],
|
||||
[
|
||||
"builtin.mock.find-externals1",
|
||||
"builtin.mock.gcc",
|
||||
"builtin.mock.llvm",
|
||||
"builtin.mock.intel-oneapi-compilers",
|
||||
"builtin_mock.cmake",
|
||||
"builtin_mock.find-externals1",
|
||||
"builtin_mock.gcc",
|
||||
"builtin_mock.intel-oneapi-compilers",
|
||||
"builtin_mock.llvm",
|
||||
"builtin_mock.mpich",
|
||||
],
|
||||
),
|
||||
# find --all --exclude find-externals1
|
||||
(
|
||||
None,
|
||||
["detectable"],
|
||||
["builtin.mock.find-externals1"],
|
||||
["builtin.mock.gcc", "builtin.mock.llvm", "builtin.mock.intel-oneapi-compilers"],
|
||||
["builtin_mock.find-externals1"],
|
||||
[
|
||||
"builtin_mock.cmake",
|
||||
"builtin_mock.gcc",
|
||||
"builtin_mock.intel-oneapi-compilers",
|
||||
"builtin_mock.llvm",
|
||||
"builtin_mock.mpich",
|
||||
],
|
||||
),
|
||||
(
|
||||
None,
|
||||
["detectable"],
|
||||
["find-externals1"],
|
||||
["builtin.mock.gcc", "builtin.mock.llvm", "builtin.mock.intel-oneapi-compilers"],
|
||||
[
|
||||
"builtin_mock.cmake",
|
||||
"builtin_mock.gcc",
|
||||
"builtin_mock.intel-oneapi-compilers",
|
||||
"builtin_mock.llvm",
|
||||
"builtin_mock.mpich",
|
||||
],
|
||||
),
|
||||
# find cmake (and cmake is not detectable)
|
||||
(["cmake"], ["detectable"], [], []),
|
||||
# find hwloc (and mock hwloc is not detectable)
|
||||
(["hwloc"], ["detectable"], [], []),
|
||||
],
|
||||
)
|
||||
def test_package_selection(names, tags, exclude, expected, mutable_mock_repo):
|
||||
def test_package_selection(names, tags, exclude, expected):
|
||||
"""Tests various cases of selecting packages"""
|
||||
# In the mock repo we only have 'find-externals1' that is detectable
|
||||
result = spack.cmd.external.packages_to_search_for(names=names, tags=tags, exclude=exclude)
|
||||
assert set(result) == set(expected)
|
||||
|
||||
|
||||
def test_find_external_no_manifest(mutable_config, working_env, mutable_mock_repo, monkeypatch):
|
||||
def test_find_external_no_manifest(mutable_config, working_env, monkeypatch):
|
||||
"""The user runs 'spack external find'; the default path for storing
|
||||
manifest files does not exist. Ensure that the command does not
|
||||
fail.
|
||||
@@ -166,7 +159,7 @@ def test_find_external_no_manifest(mutable_config, working_env, mutable_mock_rep
|
||||
|
||||
|
||||
def test_find_external_empty_default_manifest_dir(
|
||||
mutable_config, working_env, mutable_mock_repo, tmpdir, monkeypatch
|
||||
mutable_config, working_env, tmpdir, monkeypatch
|
||||
):
|
||||
"""The user runs 'spack external find'; the default path for storing
|
||||
manifest files exists but is empty. Ensure that the command does not
|
||||
@@ -181,7 +174,7 @@ def test_find_external_empty_default_manifest_dir(
|
||||
@pytest.mark.not_on_windows("Can't chmod on Windows")
|
||||
@pytest.mark.skipif(getuid() == 0, reason="user is root")
|
||||
def test_find_external_manifest_with_bad_permissions(
|
||||
mutable_config, working_env, mutable_mock_repo, tmpdir, monkeypatch
|
||||
mutable_config, working_env, tmpdir, monkeypatch
|
||||
):
|
||||
"""The user runs 'spack external find'; the default path for storing
|
||||
manifest files exists but with insufficient permissions. Check that
|
||||
@@ -201,7 +194,7 @@ def test_find_external_manifest_with_bad_permissions(
|
||||
os.chmod(test_manifest_file_path, 0o700)
|
||||
|
||||
|
||||
def test_find_external_manifest_failure(mutable_config, mutable_mock_repo, tmpdir, monkeypatch):
|
||||
def test_find_external_manifest_failure(mutable_config, tmpdir, monkeypatch):
|
||||
"""The user runs 'spack external find'; the manifest parsing fails with
|
||||
some exception. Ensure that the command still succeeds (i.e. moves on
|
||||
to other external detection mechanisms).
|
||||
@@ -221,7 +214,7 @@ def fail():
|
||||
assert "Skipping manifest and continuing" in output
|
||||
|
||||
|
||||
def test_find_external_merge(mutable_config, mutable_mock_repo, tmp_path):
|
||||
def test_find_external_merge(mutable_config, tmp_path):
|
||||
"""Checks that 'spack find external' doesn't overwrite an existing spec in packages.yaml."""
|
||||
pkgs_cfg_init = {
|
||||
"find-externals1": {
|
||||
@@ -247,7 +240,7 @@ def test_find_external_merge(mutable_config, mutable_mock_repo, tmp_path):
|
||||
assert {"spec": "find-externals1@1.2", "prefix": "/x/y2"} in pkg_externals
|
||||
|
||||
|
||||
def test_list_detectable_packages(mutable_config, mutable_mock_repo):
|
||||
def test_list_detectable_packages(mutable_config):
|
||||
external("list")
|
||||
assert external.returncode == 0
|
||||
|
||||
@@ -293,13 +286,23 @@ def test_new_entries_are_reported_correctly(mock_executable, mutable_config, mon
|
||||
|
||||
|
||||
@pytest.mark.parametrize("command_args", [("-t", "build-tools"), ("-t", "build-tools", "cmake")])
|
||||
@pytest.mark.not_on_windows("the test uses bash scripts")
|
||||
def test_use_tags_for_detection(command_args, mock_executable, mutable_config, monkeypatch):
|
||||
versions = {"cmake": "3.19.1", "openssl": "2.8.3"}
|
||||
|
||||
@classmethod
|
||||
def _determine_version(cls, exe):
|
||||
return versions[os.path.basename(exe)]
|
||||
|
||||
cmake_cls = spack.repo.PATH.get_pkg_class("cmake")
|
||||
monkeypatch.setattr(cmake_cls, "determine_version", _determine_version)
|
||||
|
||||
# Prepare an environment to detect a fake cmake
|
||||
cmake_exe = mock_executable("cmake", output="echo cmake version 3.19.1")
|
||||
cmake_exe = mock_executable("cmake", output=f"echo cmake version {versions['cmake']}")
|
||||
prefix = os.path.dirname(cmake_exe)
|
||||
monkeypatch.setenv("PATH", prefix)
|
||||
|
||||
openssl_exe = mock_executable("openssl", output="OpenSSL 2.8.3")
|
||||
openssl_exe = mock_executable("openssl", output=f"OpenSSL {versions['openssl']}")
|
||||
prefix = os.path.dirname(openssl_exe)
|
||||
monkeypatch.setenv("PATH", prefix)
|
||||
|
||||
@@ -316,6 +319,16 @@ def test_failures_in_scanning_do_not_result_in_an_error(
|
||||
mock_executable, monkeypatch, mutable_config
|
||||
):
|
||||
"""Tests that scanning paths with wrong permissions, won't cause `external find` to error."""
|
||||
versions = {"first": "3.19.1", "second": "3.23.3"}
|
||||
|
||||
@classmethod
|
||||
def _determine_version(cls, exe):
|
||||
bin_parent = os.path.dirname(exe).split(os.sep)[-2]
|
||||
return versions[bin_parent]
|
||||
|
||||
cmake_cls = spack.repo.PATH.get_pkg_class("cmake")
|
||||
monkeypatch.setattr(cmake_cls, "determine_version", _determine_version)
|
||||
|
||||
cmake_exe1 = mock_executable(
|
||||
"cmake", output="echo cmake version 3.19.1", subdir=("first", "bin")
|
||||
)
|
||||
@@ -333,21 +346,30 @@ def test_failures_in_scanning_do_not_result_in_an_error(
|
||||
assert external.returncode == 0
|
||||
assert "The following specs have been" in output
|
||||
assert "cmake" in output
|
||||
assert "3.23.3" in output
|
||||
assert "3.19.1" not in output
|
||||
for vers in versions.values():
|
||||
assert vers in output
|
||||
|
||||
|
||||
def test_detect_virtuals(mock_executable, mutable_config, monkeypatch):
|
||||
"""Test whether external find --not-buildable sets virtuals as non-buildable (unless user
|
||||
config sets them to buildable)"""
|
||||
mpich = mock_executable("mpichversion", output="echo MPICH Version: 4.0.2")
|
||||
version = "4.0.2"
|
||||
|
||||
@classmethod
|
||||
def _determine_version(cls, exe):
|
||||
return version
|
||||
|
||||
cmake_cls = spack.repo.PATH.get_pkg_class("mpich")
|
||||
monkeypatch.setattr(cmake_cls, "determine_version", _determine_version)
|
||||
|
||||
mpich = mock_executable("mpichversion", output=f"echo MPICH Version: {version}")
|
||||
prefix = os.path.dirname(mpich)
|
||||
external("find", "--path", prefix, "--not-buildable", "mpich")
|
||||
|
||||
# Check that mpich was correctly detected
|
||||
mpich = mutable_config.get("packages:mpich")
|
||||
assert mpich["buildable"] is False
|
||||
assert Spec(mpich["externals"][0]["spec"]).satisfies("mpich@4.0.2")
|
||||
assert Spec(mpich["externals"][0]["spec"]).satisfies(f"mpich@{version}")
|
||||
|
||||
# Check that the virtual package mpi was marked as non-buildable
|
||||
assert mutable_config.get("packages:mpi:buildable") is False
|
||||
|
@@ -14,12 +14,12 @@
|
||||
import spack.cmd.find
|
||||
import spack.concretize
|
||||
import spack.environment as ev
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
import spack.store
|
||||
import spack.user_environment as uenv
|
||||
from spack.enums import InstallRecordStatus
|
||||
from spack.main import SpackCommand
|
||||
from spack.test.conftest import create_test_repo
|
||||
from spack.test.utilities import SpackCommandArgs
|
||||
from spack.util.pattern import Bunch
|
||||
|
||||
@@ -129,7 +129,7 @@ def test_tag2_tag3(parser, specs):
|
||||
@pytest.mark.db
|
||||
def test_namespaces_shown_correctly(args, with_namespace, database):
|
||||
"""Test that --namespace(s) works. Old syntax is --namespace"""
|
||||
assert ("builtin.mock.zmpi" in find(*args)) == with_namespace
|
||||
assert ("builtin_mock.zmpi" in find(*args)) == with_namespace
|
||||
|
||||
|
||||
@pytest.mark.db
|
||||
@@ -462,89 +462,16 @@ def test_environment_with_version_range_in_compiler_doesnt_fail(tmp_path, mock_p
|
||||
assert "zlib" in output
|
||||
|
||||
|
||||
_pkga = (
|
||||
"a0",
|
||||
"""\
|
||||
from spack.package import *
|
||||
|
||||
class A0(Package):
|
||||
version("1.2")
|
||||
version("1.1")
|
||||
|
||||
depends_on("b0")
|
||||
depends_on("c0")
|
||||
""",
|
||||
)
|
||||
|
||||
|
||||
_pkgb = (
|
||||
"b0",
|
||||
"""\
|
||||
from spack.package import *
|
||||
|
||||
class B0(Package):
|
||||
version("1.2")
|
||||
version("1.1")
|
||||
""",
|
||||
)
|
||||
|
||||
|
||||
_pkgc = (
|
||||
"c0",
|
||||
"""\
|
||||
from spack.package import *
|
||||
|
||||
class C0(Package):
|
||||
version("1.2")
|
||||
version("1.1")
|
||||
|
||||
tags = ["tag0", "tag1"]
|
||||
""",
|
||||
)
|
||||
|
||||
|
||||
_pkgd = (
|
||||
"d0",
|
||||
"""\
|
||||
from spack.package import *
|
||||
|
||||
class D0(Package):
|
||||
version("1.2")
|
||||
version("1.1")
|
||||
|
||||
depends_on("c0")
|
||||
depends_on("e0")
|
||||
""",
|
||||
)
|
||||
|
||||
|
||||
_pkge = (
|
||||
"e0",
|
||||
"""\
|
||||
from spack.package import *
|
||||
|
||||
class E0(Package):
|
||||
tags = ["tag1", "tag2"]
|
||||
|
||||
version("1.2")
|
||||
version("1.1")
|
||||
""",
|
||||
)
|
||||
# a0 d0
|
||||
# / \ / \
|
||||
# b0 c0 e0
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def _create_test_repo(tmpdir, mutable_config):
|
||||
r"""
|
||||
a0 d0
|
||||
/ \ / \
|
||||
b0 c0 e0
|
||||
"""
|
||||
yield create_test_repo(tmpdir, [_pkga, _pkgb, _pkgc, _pkgd, _pkge])
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def test_repo(_create_test_repo, monkeypatch, mock_stage):
|
||||
with spack.repo.use_repositories(_create_test_repo) as mock_repo_path:
|
||||
def test_repo(mock_stage):
|
||||
with spack.repo.use_repositories(
|
||||
os.path.join(spack.paths.test_repos_path, "spack_repo", "find")
|
||||
) as mock_repo_path:
|
||||
yield mock_repo_path
|
||||
|
||||
|
||||
|
@@ -143,13 +143,13 @@ def test_list_count():
|
||||
|
||||
def test_list_repos():
|
||||
with spack.repo.use_repositories(
|
||||
os.path.join(spack.paths.test_repos_path, "builtin.mock"),
|
||||
os.path.join(spack.paths.test_repos_path, "builder.test"),
|
||||
os.path.join(spack.paths.test_repos_path, "spack_repo", "builtin_mock"),
|
||||
os.path.join(spack.paths.test_repos_path, "spack_repo", "builder_test"),
|
||||
):
|
||||
total_pkgs = len(list().strip().split())
|
||||
mock_pkgs = len(list("-r", "builtin.mock").strip().split())
|
||||
builder_pkgs = len(list("-r", "builder.test").strip().split())
|
||||
both_repos = len(list("-r", "builtin.mock", "-r", "builder.test").strip().split())
|
||||
mock_pkgs = len(list("-r", "builtin_mock").strip().split())
|
||||
builder_pkgs = len(list("-r", "builder_test").strip().split())
|
||||
both_repos = len(list("-r", "builtin_mock", "-r", "builder_test").strip().split())
|
||||
|
||||
assert total_pkgs > mock_pkgs > builder_pkgs
|
||||
assert both_repos == total_pkgs
|
||||
|
@@ -61,6 +61,26 @@ def test_mirror_from_env(mutable_mock_env_path, tmp_path, mock_packages, mock_fe
|
||||
assert mirror_res == expected
|
||||
|
||||
|
||||
# Test for command line-specified spec in concretized environment
|
||||
def test_mirror_spec_from_env(mutable_mock_env_path, tmp_path, mock_packages, mock_fetch):
|
||||
mirror_dir = str(tmp_path / "mirror-B")
|
||||
env_name = "test"
|
||||
|
||||
env("create", env_name)
|
||||
with ev.read(env_name):
|
||||
add("simple-standalone-test@0.9")
|
||||
concretize()
|
||||
with spack.config.override("config:checksum", False):
|
||||
mirror("create", "-d", mirror_dir, "simple-standalone-test")
|
||||
|
||||
e = ev.read(env_name)
|
||||
assert set(os.listdir(mirror_dir)) == set([s.name for s in e.user_specs])
|
||||
spec = e.concrete_roots()[0]
|
||||
mirror_res = os.listdir(os.path.join(mirror_dir, spec.name))
|
||||
expected = ["%s.tar.gz" % spec.format("{name}-{version}")]
|
||||
assert mirror_res == expected
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def source_for_pkg_with_hash(mock_packages, tmpdir):
|
||||
s = spack.concretize.concretize_one("trivial-pkg-with-valid-hash")
|
||||
@@ -401,8 +421,7 @@ def test_all_specs_with_all_versions_dont_concretize(self):
|
||||
@pytest.mark.parametrize(
|
||||
"cli_args,error_str",
|
||||
[
|
||||
# Passed more than one among -f --all and specs
|
||||
({"specs": "hdf5", "file": None, "all": True}, "cannot specify specs on command line"),
|
||||
# Passed more than one among -f --all
|
||||
(
|
||||
{"specs": None, "file": "input.txt", "all": True},
|
||||
"cannot specify specs with a file if",
|
||||
|
@@ -39,7 +39,9 @@ def install(self, spec, prefix):
|
||||
def mock_pkg_git_repo(git, tmp_path_factory):
|
||||
"""Copy the builtin.mock repo and make a mutable git repo inside it."""
|
||||
root_dir = tmp_path_factory.mktemp("mock_pkg_git_repo")
|
||||
repo_dir = root_dir / "builtin.mock"
|
||||
# create spack_repo subdir
|
||||
(root_dir / "spack_repo").mkdir()
|
||||
repo_dir = root_dir / "spack_repo" / "builtin_mock"
|
||||
shutil.copytree(spack.paths.mock_packages_path, str(repo_dir))
|
||||
|
||||
repo_cache = spack.util.file_cache.FileCache(root_dir / "cache")
|
||||
@@ -57,25 +59,25 @@ def mock_pkg_git_repo(git, tmp_path_factory):
|
||||
git("-c", "commit.gpgsign=false", "commit", "-m", "initial mock repo commit")
|
||||
|
||||
# add commit with mockpkg-a, mockpkg-b, mockpkg-c packages
|
||||
mkdirp("mockpkg-a", "mockpkg-b", "mockpkg-c")
|
||||
with open("mockpkg-a/package.py", "w", encoding="utf-8") as f:
|
||||
mkdirp("mockpkg_a", "mockpkg_b", "mockpkg_c")
|
||||
with open("mockpkg_a/package.py", "w", encoding="utf-8") as f:
|
||||
f.write(pkg_template.format(name="PkgA"))
|
||||
with open("mockpkg-b/package.py", "w", encoding="utf-8") as f:
|
||||
with open("mockpkg_b/package.py", "w", encoding="utf-8") as f:
|
||||
f.write(pkg_template.format(name="PkgB"))
|
||||
with open("mockpkg-c/package.py", "w", encoding="utf-8") as f:
|
||||
with open("mockpkg_c/package.py", "w", encoding="utf-8") as f:
|
||||
f.write(pkg_template.format(name="PkgC"))
|
||||
git("add", "mockpkg-a", "mockpkg-b", "mockpkg-c")
|
||||
git("add", "mockpkg_a", "mockpkg_b", "mockpkg_c")
|
||||
git("-c", "commit.gpgsign=false", "commit", "-m", "add mockpkg-a, mockpkg-b, mockpkg-c")
|
||||
|
||||
# remove mockpkg-c, add mockpkg-d
|
||||
with open("mockpkg-b/package.py", "a", encoding="utf-8") as f:
|
||||
with open("mockpkg_b/package.py", "a", encoding="utf-8") as f:
|
||||
f.write("\n# change mockpkg-b")
|
||||
git("add", "mockpkg-b")
|
||||
mkdirp("mockpkg-d")
|
||||
with open("mockpkg-d/package.py", "w", encoding="utf-8") as f:
|
||||
git("add", "mockpkg_b")
|
||||
mkdirp("mockpkg_d")
|
||||
with open("mockpkg_d/package.py", "w", encoding="utf-8") as f:
|
||||
f.write(pkg_template.format(name="PkgD"))
|
||||
git("add", "mockpkg-d")
|
||||
git("rm", "-rf", "mockpkg-c")
|
||||
git("add", "mockpkg_d")
|
||||
git("rm", "-rf", "mockpkg_c")
|
||||
git(
|
||||
"-c",
|
||||
"commit.gpgsign=false",
|
||||
@@ -90,7 +92,7 @@ def mock_pkg_git_repo(git, tmp_path_factory):
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def mock_pkg_names():
|
||||
repo = spack.repo.PATH.get_repo("builtin.mock")
|
||||
repo = spack.repo.PATH.get_repo("builtin_mock")
|
||||
|
||||
# Be sure to include virtual packages since packages with stand-alone
|
||||
# tests may inherit additional tests from the virtuals they provide,
|
||||
@@ -111,27 +113,28 @@ def split(output):
|
||||
pkg = spack.main.SpackCommand("pkg")
|
||||
|
||||
|
||||
@pytest.mark.requires_builtin("builtin repository path must exist")
|
||||
def test_builtin_repo():
|
||||
assert spack.repo.builtin_repo() is spack.repo.PATH.get_repo("builtin")
|
||||
|
||||
|
||||
def test_mock_builtin_repo(mock_packages):
|
||||
assert spack.repo.builtin_repo() is spack.repo.PATH.get_repo("builtin.mock")
|
||||
assert spack.repo.builtin_repo() is spack.repo.PATH.get_repo("builtin_mock")
|
||||
|
||||
|
||||
def test_pkg_add(git, mock_pkg_git_repo):
|
||||
with working_dir(mock_pkg_git_repo):
|
||||
mkdirp("mockpkg-e")
|
||||
with open("mockpkg-e/package.py", "w", encoding="utf-8") as f:
|
||||
mkdirp("mockpkg_e")
|
||||
with open("mockpkg_e/package.py", "w", encoding="utf-8") as f:
|
||||
f.write(pkg_template.format(name="PkgE"))
|
||||
|
||||
pkg("add", "mockpkg-e")
|
||||
|
||||
with working_dir(mock_pkg_git_repo):
|
||||
try:
|
||||
assert "A mockpkg-e/package.py" in git("status", "--short", output=str)
|
||||
assert "A mockpkg_e/package.py" in git("status", "--short", output=str)
|
||||
finally:
|
||||
shutil.rmtree("mockpkg-e")
|
||||
shutil.rmtree("mockpkg_e")
|
||||
# Removing a package mid-run disrupts Spack's caching
|
||||
if spack.repo.PATH.repos[0]._fast_package_checker:
|
||||
spack.repo.PATH.repos[0]._fast_package_checker.invalidate()
|
||||
|
@@ -1,15 +1,21 @@
|
||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import io
|
||||
import os
|
||||
import pathlib
|
||||
|
||||
import pytest
|
||||
|
||||
from llnl.util.filesystem import working_dir
|
||||
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.main
|
||||
import spack.repo
|
||||
import spack.repo_migrate
|
||||
from spack.main import SpackCommand
|
||||
from spack.util.executable import Executable
|
||||
|
||||
repo = spack.main.SpackCommand("repo")
|
||||
env = SpackCommand("env")
|
||||
@@ -68,3 +74,101 @@ def test_env_repo_path_vars_substitution(
|
||||
with ev.read("test") as newenv:
|
||||
repos_specs = spack.config.get("repos", default={}, scope=newenv.scope_name)
|
||||
assert current_dir in repos_specs
|
||||
|
||||
|
||||
OLD_7ZIP = b"""\
|
||||
# some comment
|
||||
|
||||
from spack.package import *
|
||||
|
||||
class _7zip(Package):
|
||||
pass
|
||||
"""
|
||||
|
||||
NEW_7ZIP = b"""\
|
||||
# some comment
|
||||
|
||||
from spack_repo.builtin.build_systems.generic import Package
|
||||
from spack.package import *
|
||||
|
||||
class _7zip(Package):
|
||||
pass
|
||||
"""
|
||||
|
||||
OLD_NUMPY = b"""\
|
||||
# some comment
|
||||
|
||||
from spack.package import *
|
||||
|
||||
class PyNumpy(CMakePackage):
|
||||
generator("ninja")
|
||||
"""
|
||||
|
||||
NEW_NUMPY = b"""\
|
||||
# some comment
|
||||
|
||||
from spack_repo.builtin.build_systems.cmake import CMakePackage, generator
|
||||
from spack.package import *
|
||||
|
||||
class PyNumpy(CMakePackage):
|
||||
generator("ninja")
|
||||
"""
|
||||
|
||||
|
||||
def test_repo_migrate(tmp_path: pathlib.Path, config):
|
||||
old_root, _ = spack.repo.create_repo(str(tmp_path), "org.repo", package_api=(1, 0))
|
||||
pkgs_path = pathlib.Path(spack.repo.from_path(old_root).packages_path)
|
||||
new_root = pathlib.Path(old_root) / "spack_repo" / "org" / "repo"
|
||||
|
||||
pkg_7zip_old = pkgs_path / "7zip" / "package.py"
|
||||
pkg_numpy_old = pkgs_path / "py-numpy" / "package.py"
|
||||
pkg_py_7zip_new = new_root / "packages" / "_7zip" / "package.py"
|
||||
pkg_py_numpy_new = new_root / "packages" / "py_numpy" / "package.py"
|
||||
|
||||
pkg_7zip_old.parent.mkdir(parents=True)
|
||||
pkg_numpy_old.parent.mkdir(parents=True)
|
||||
|
||||
pkg_7zip_old.write_bytes(OLD_7ZIP)
|
||||
pkg_numpy_old.write_bytes(OLD_NUMPY)
|
||||
|
||||
repo("migrate", "--fix", old_root)
|
||||
|
||||
# old files are not touched since they are moved
|
||||
assert pkg_7zip_old.read_bytes() == OLD_7ZIP
|
||||
assert pkg_numpy_old.read_bytes() == OLD_NUMPY
|
||||
|
||||
# new files are created and have updated contents
|
||||
assert pkg_py_7zip_new.read_bytes() == NEW_7ZIP
|
||||
assert pkg_py_numpy_new.read_bytes() == NEW_NUMPY
|
||||
|
||||
|
||||
@pytest.mark.not_on_windows("Known failure on windows")
|
||||
def test_migrate_diff(git: Executable, tmp_path: pathlib.Path):
|
||||
root, _ = spack.repo.create_repo(str(tmp_path), "foo", package_api=(2, 0))
|
||||
r = pathlib.Path(root)
|
||||
pkg_7zip = r / "packages" / "_7zip" / "package.py"
|
||||
pkg_py_numpy_new = r / "packages" / "py_numpy" / "package.py"
|
||||
pkg_broken = r / "packages" / "broken" / "package.py"
|
||||
|
||||
pkg_7zip.parent.mkdir(parents=True)
|
||||
pkg_py_numpy_new.parent.mkdir(parents=True)
|
||||
pkg_broken.parent.mkdir(parents=True)
|
||||
pkg_7zip.write_bytes(OLD_7ZIP)
|
||||
pkg_py_numpy_new.write_bytes(OLD_NUMPY)
|
||||
pkg_broken.write_bytes(b"syntax(error")
|
||||
|
||||
stderr = io.StringIO()
|
||||
|
||||
with open(tmp_path / "imports.patch", "w", encoding="utf-8") as stdout:
|
||||
spack.repo_migrate.migrate_v2_imports(
|
||||
str(r / "packages"), str(r), fix=False, out=stdout, err=stderr
|
||||
)
|
||||
|
||||
assert f"Skipping {pkg_broken}" in stderr.getvalue()
|
||||
|
||||
# apply the patch and verify the changes
|
||||
with working_dir(str(r)):
|
||||
git("apply", str(tmp_path / "imports.patch"))
|
||||
|
||||
assert pkg_7zip.read_bytes() == NEW_7ZIP
|
||||
assert pkg_py_numpy_new.read_bytes() == NEW_NUMPY
|
||||
|
@@ -48,11 +48,13 @@ def test_resource_list(mock_packages, capfd):
|
||||
assert "path:" in out
|
||||
|
||||
assert (
|
||||
os.path.join("repos", "builtin.mock", "packages", "patch-a-dependency", "libelf.patch")
|
||||
os.path.join(
|
||||
"spack_repo", "builtin_mock", "packages", "patch_a_dependency", "libelf.patch"
|
||||
)
|
||||
in out
|
||||
)
|
||||
assert "applies to: builtin.mock.libelf" in out
|
||||
assert "patched by: builtin.mock.patch-a-dependency" in out
|
||||
assert "applies to: builtin_mock.libelf" in out
|
||||
assert "patched by: builtin_mock.patch-a-dependency" in out
|
||||
|
||||
|
||||
def test_resource_list_only_hashes(mock_packages, capfd):
|
||||
@@ -74,10 +76,12 @@ def test_resource_show(mock_packages, capfd):
|
||||
|
||||
assert out.startswith(test_hash)
|
||||
assert (
|
||||
os.path.join("repos", "builtin.mock", "packages", "patch-a-dependency", "libelf.patch")
|
||||
os.path.join(
|
||||
"spack_repo", "builtin_mock", "packages", "patch_a_dependency", "libelf.patch"
|
||||
)
|
||||
in out
|
||||
)
|
||||
assert "applies to: builtin.mock.libelf" in out
|
||||
assert "patched by: builtin.mock.patch-a-dependency" in out
|
||||
assert "applies to: builtin_mock.libelf" in out
|
||||
assert "patched by: builtin_mock.patch-a-dependency" in out
|
||||
|
||||
assert len(out.strip().split("\n")) == 4
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user