Compare commits
20 Commits
remove-dep
...
features/r
Author | SHA1 | Date | |
---|---|---|---|
![]() |
6bcea7e9f4 | ||
![]() |
74263b2016 | ||
![]() |
8f480c5c13 | ||
![]() |
2129eca951 | ||
![]() |
c920ad14e6 | ||
![]() |
13a0d3cdfa | ||
![]() |
c1665567a5 | ||
![]() |
7b5e468a5b | ||
![]() |
5d7cf204f6 | ||
![]() |
4c3d9ca525 | ||
![]() |
7a174666e8 | ||
![]() |
37894de044 | ||
![]() |
b5d9e5da63 | ||
![]() |
fea922e92e | ||
![]() |
39914cc5ef | ||
![]() |
1a0a4e1237 | ||
![]() |
5a144722a0 | ||
![]() |
f048fd2a18 | ||
![]() |
ee8954ecec | ||
![]() |
bf23c83861 |
2
.github/workflows/audit.yaml
vendored
2
.github/workflows/audit.yaml
vendored
@@ -23,7 +23,7 @@ jobs:
|
||||
operating_system: ["ubuntu-latest", "macos-latest"]
|
||||
steps:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # @v2
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # @v2
|
||||
with:
|
||||
python-version: ${{inputs.python_version}}
|
||||
- name: Install Python packages
|
||||
|
2
.github/workflows/bootstrap.yml
vendored
2
.github/workflows/bootstrap.yml
vendored
@@ -159,7 +159,7 @@ jobs:
|
||||
brew install cmake bison@2.7 tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # @v2
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # @v2
|
||||
with:
|
||||
python-version: "3.12"
|
||||
- name: Bootstrap clingo
|
||||
|
2
.github/workflows/build-containers.yml
vendored
2
.github/workflows/build-containers.yml
vendored
@@ -57,7 +57,7 @@ jobs:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
|
||||
|
||||
- uses: docker/metadata-action@31cebacef4805868f9ce9a0cb03ee36c32df2ac4
|
||||
- uses: docker/metadata-action@96383f45573cb7f253c731d3b3ab81c87ef81934
|
||||
id: docker_meta
|
||||
with:
|
||||
images: |
|
||||
|
2
.github/workflows/nightly-win-builds.yml
vendored
2
.github/workflows/nightly-win-builds.yml
vendored
@@ -17,7 +17,7 @@ jobs:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
|
2
.github/workflows/style/requirements.txt
vendored
2
.github/workflows/style/requirements.txt
vendored
@@ -2,6 +2,6 @@ black==23.11.0
|
||||
clingo==5.6.2
|
||||
flake8==6.1.0
|
||||
isort==5.12.0
|
||||
mypy==1.7.1
|
||||
mypy==1.6.1
|
||||
types-six==1.16.21.9
|
||||
vermin==1.6.0
|
||||
|
8
.github/workflows/unit_tests.yaml
vendored
8
.github/workflows/unit_tests.yaml
vendored
@@ -54,7 +54,7 @@ jobs:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # @v2
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # @v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install System packages
|
||||
@@ -101,7 +101,7 @@ jobs:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # @v2
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # @v2
|
||||
with:
|
||||
python-version: '3.11'
|
||||
- name: Install System packages
|
||||
@@ -159,7 +159,7 @@ jobs:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # @v2
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # @v2
|
||||
with:
|
||||
python-version: '3.11'
|
||||
- name: Install System packages
|
||||
@@ -194,7 +194,7 @@ jobs:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # @v2
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # @v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install Python packages
|
||||
|
4
.github/workflows/valid-style.yml
vendored
4
.github/workflows/valid-style.yml
vendored
@@ -19,7 +19,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
@@ -38,7 +38,7 @@ jobs:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
|
6
.github/workflows/windows_python.yml
vendored
6
.github/workflows/windows_python.yml
vendored
@@ -18,7 +18,7 @@ jobs:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
@@ -42,7 +42,7 @@ jobs:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
@@ -66,7 +66,7 @@ jobs:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
|
@@ -66,11 +66,10 @@ Resources:
|
||||
* **Matrix space**: [#spack-space:matrix.org](https://matrix.to/#/#spack-space:matrix.org):
|
||||
[bridged](https://github.com/matrix-org/matrix-appservice-slack#matrix-appservice-slack) to Slack.
|
||||
* [**Github Discussions**](https://github.com/spack/spack/discussions):
|
||||
for Q&A and discussions. Note the pinned discussions for announcements.
|
||||
not just for discussions, but also Q&A.
|
||||
* **Mailing list**: [groups.google.com/d/forum/spack](https://groups.google.com/d/forum/spack)
|
||||
* **Twitter**: [@spackpm](https://twitter.com/spackpm). Be sure to
|
||||
`@mention` us!
|
||||
* **Mailing list**: [groups.google.com/d/forum/spack](https://groups.google.com/d/forum/spack):
|
||||
only for announcements. Please use other venues for discussions.
|
||||
|
||||
Contributing
|
||||
------------------------
|
||||
|
@@ -82,7 +82,7 @@ class already contains:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("cmake", type="build")
|
||||
depends_on('cmake', type='build')
|
||||
|
||||
|
||||
If you need to specify a particular version requirement, you can
|
||||
@@ -90,7 +90,7 @@ override this in your package:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("cmake@2.8.12:", type="build")
|
||||
depends_on('cmake@2.8.12:', type='build')
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
@@ -137,10 +137,10 @@ and without the :meth:`~spack.build_systems.cmake.CMakeBuilder.define` and
|
||||
|
||||
def cmake_args(self):
|
||||
args = [
|
||||
"-DWHATEVER:STRING=somevalue",
|
||||
self.define("ENABLE_BROKEN_FEATURE", False),
|
||||
self.define_from_variant("DETECT_HDF5", "hdf5"),
|
||||
self.define_from_variant("THREADS"), # True if +threads
|
||||
'-DWHATEVER:STRING=somevalue',
|
||||
self.define('ENABLE_BROKEN_FEATURE', False),
|
||||
self.define_from_variant('DETECT_HDF5', 'hdf5'),
|
||||
self.define_from_variant('THREADS'), # True if +threads
|
||||
]
|
||||
|
||||
return args
|
||||
@@ -151,10 +151,10 @@ and CMake simply ignores the empty command line argument. For example the follow
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
variant("example", default=True, when="@2.0:")
|
||||
variant('example', default=True, when='@2.0:')
|
||||
|
||||
def cmake_args(self):
|
||||
return [self.define_from_variant("EXAMPLE", "example")]
|
||||
return [self.define_from_variant('EXAMPLE', 'example')]
|
||||
|
||||
will generate ``'cmake' '-DEXAMPLE=ON' ...`` when `@2.0: +example` is met, but will
|
||||
result in ``'cmake' '' ...`` when the spec version is below ``2.0``.
|
||||
@@ -193,9 +193,9 @@ a variant to control this:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
variant("build_type", default="RelWithDebInfo",
|
||||
description="CMake build type",
|
||||
values=("Debug", "Release", "RelWithDebInfo", "MinSizeRel"))
|
||||
variant('build_type', default='RelWithDebInfo',
|
||||
description='CMake build type',
|
||||
values=('Debug', 'Release', 'RelWithDebInfo', 'MinSizeRel'))
|
||||
|
||||
However, not every CMake package accepts all four of these options.
|
||||
Grep the ``CMakeLists.txt`` file to see if the default values are
|
||||
@@ -205,9 +205,9 @@ package overrides the default variant with:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
variant("build_type", default="DebugRelease",
|
||||
description="The build type to build",
|
||||
values=("Debug", "Release", "DebugRelease"))
|
||||
variant('build_type', default='DebugRelease',
|
||||
description='The build type to build',
|
||||
values=('Debug', 'Release', 'DebugRelease'))
|
||||
|
||||
For more information on ``CMAKE_BUILD_TYPE``, see:
|
||||
https://cmake.org/cmake/help/latest/variable/CMAKE_BUILD_TYPE.html
|
||||
@@ -250,7 +250,7 @@ generator is Ninja. To switch to the Ninja generator, simply add:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
generator = "Ninja"
|
||||
generator = 'Ninja'
|
||||
|
||||
|
||||
``CMakePackage`` defaults to "Unix Makefiles". If you switch to the
|
||||
@@ -258,7 +258,7 @@ Ninja generator, make sure to add:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("ninja", type="build")
|
||||
depends_on('ninja', type='build')
|
||||
|
||||
to the package as well. Aside from that, you shouldn't need to do
|
||||
anything else. Spack will automatically detect that you are using
|
||||
@@ -288,7 +288,7 @@ like so:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
root_cmakelists_dir = "src"
|
||||
root_cmakelists_dir = 'src'
|
||||
|
||||
|
||||
Note that this path is relative to the root of the extracted tarball,
|
||||
@@ -304,7 +304,7 @@ different sub-directory, simply override ``build_directory`` like so:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
build_directory = "my-build"
|
||||
build_directory = 'my-build'
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Build and install targets
|
||||
@@ -324,8 +324,8 @@ library or build the documentation, you can add these like so:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
build_targets = ["all", "docs"]
|
||||
install_targets = ["install", "docs"]
|
||||
build_targets = ['all', 'docs']
|
||||
install_targets = ['install', 'docs']
|
||||
|
||||
^^^^^^^
|
||||
Testing
|
||||
|
@@ -934,9 +934,9 @@ a *virtual* ``mkl`` package is declared in Spack.
|
||||
.. code-block:: python
|
||||
|
||||
# Examples for absolute and conditional dependencies:
|
||||
depends_on("mkl")
|
||||
depends_on("mkl", when="+mkl")
|
||||
depends_on("mkl", when="fftw=mkl")
|
||||
depends_on('mkl')
|
||||
depends_on('mkl', when='+mkl')
|
||||
depends_on('mkl', when='fftw=mkl')
|
||||
|
||||
The ``MKLROOT`` environment variable (part of the documented API) will be set
|
||||
during all stages of client package installation, and is available to both
|
||||
@@ -972,8 +972,8 @@ a *virtual* ``mkl`` package is declared in Spack.
|
||||
def configure_args(self):
|
||||
args = []
|
||||
...
|
||||
args.append("--with-blas=%s" % self.spec["blas"].libs.ld_flags)
|
||||
args.append("--with-lapack=%s" % self.spec["lapack"].libs.ld_flags)
|
||||
args.append('--with-blas=%s' % self.spec['blas'].libs.ld_flags)
|
||||
args.append('--with-lapack=%s' % self.spec['lapack'].libs.ld_flags)
|
||||
...
|
||||
|
||||
.. tip::
|
||||
@@ -989,13 +989,13 @@ a *virtual* ``mkl`` package is declared in Spack.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
self.spec["blas"].headers.include_flags
|
||||
self.spec['blas'].headers.include_flags
|
||||
|
||||
and to generate linker options (``-L<dir> -llibname ...``), use the same as above,
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
self.spec["blas"].libs.ld_flags
|
||||
self.spec['blas'].libs.ld_flags
|
||||
|
||||
See
|
||||
:ref:`MakefilePackage <makefilepackage>`
|
||||
|
@@ -88,7 +88,7 @@ override the ``luarocks_args`` method like so:
|
||||
.. code-block:: python
|
||||
|
||||
def luarocks_args(self):
|
||||
return ["flag1", "flag2"]
|
||||
return ['flag1', 'flag2']
|
||||
|
||||
One common use of this is to override warnings or flags for newer compilers, as in:
|
||||
|
||||
|
@@ -48,8 +48,8 @@ class automatically adds the following dependencies:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("java", type=("build", "run"))
|
||||
depends_on("maven", type="build")
|
||||
depends_on('java', type=('build', 'run'))
|
||||
depends_on('maven', type='build')
|
||||
|
||||
|
||||
In the ``pom.xml`` file, you may see sections like:
|
||||
@@ -72,8 +72,8 @@ should add:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("java@7:", type="build")
|
||||
depends_on("maven@3.5.4:", type="build")
|
||||
depends_on('java@7:', type='build')
|
||||
depends_on('maven@3.5.4:', type='build')
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
@@ -88,9 +88,9 @@ the build phase. For example:
|
||||
|
||||
def build_args(self):
|
||||
return [
|
||||
"-Pdist,native",
|
||||
"-Dtar",
|
||||
"-Dmaven.javadoc.skip=true"
|
||||
'-Pdist,native',
|
||||
'-Dtar',
|
||||
'-Dmaven.javadoc.skip=true'
|
||||
]
|
||||
|
||||
|
||||
|
@@ -86,8 +86,8 @@ the ``MesonPackage`` base class already contains:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("meson", type="build")
|
||||
depends_on("ninja", type="build")
|
||||
depends_on('meson', type='build')
|
||||
depends_on('ninja', type='build')
|
||||
|
||||
|
||||
If you need to specify a particular version requirement, you can
|
||||
@@ -95,8 +95,8 @@ override this in your package:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("meson@0.43.0:", type="build")
|
||||
depends_on("ninja", type="build")
|
||||
depends_on('meson@0.43.0:', type='build')
|
||||
depends_on('ninja', type='build')
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
@@ -121,7 +121,7 @@ override the ``meson_args`` method like so:
|
||||
.. code-block:: python
|
||||
|
||||
def meson_args(self):
|
||||
return ["--warnlevel=3"]
|
||||
return ['--warnlevel=3']
|
||||
|
||||
|
||||
This method can be used to pass flags as well as variables.
|
||||
|
@@ -118,7 +118,7 @@ so ``PerlPackage`` contains:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
extends("perl")
|
||||
extends('perl')
|
||||
|
||||
|
||||
If your package requires a specific version of Perl, you should
|
||||
@@ -132,14 +132,14 @@ properly. If your package uses ``Makefile.PL`` to build, add:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("perl-extutils-makemaker", type="build")
|
||||
depends_on('perl-extutils-makemaker', type='build')
|
||||
|
||||
|
||||
If your package uses ``Build.PL`` to build, add:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("perl-module-build", type="build")
|
||||
depends_on('perl-module-build', type='build')
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^
|
||||
@@ -165,11 +165,11 @@ arguments to ``Makefile.PL`` or ``Build.PL`` by overriding
|
||||
.. code-block:: python
|
||||
|
||||
def configure_args(self):
|
||||
expat = self.spec["expat"].prefix
|
||||
expat = self.spec['expat'].prefix
|
||||
|
||||
return [
|
||||
"EXPATLIBPATH={0}".format(expat.lib),
|
||||
"EXPATINCPATH={0}".format(expat.include),
|
||||
'EXPATLIBPATH={0}'.format(expat.lib),
|
||||
'EXPATINCPATH={0}'.format(expat.include),
|
||||
]
|
||||
|
||||
|
||||
|
@@ -83,7 +83,7 @@ base class already contains:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("qt", type="build")
|
||||
depends_on('qt', type='build')
|
||||
|
||||
|
||||
If you want to specify a particular version requirement, or need to
|
||||
@@ -91,7 +91,7 @@ link to the ``qt`` libraries, you can override this in your package:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("qt@5.6.0:")
|
||||
depends_on('qt@5.6.0:')
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Passing arguments to qmake
|
||||
@@ -103,7 +103,7 @@ override the ``qmake_args`` method like so:
|
||||
.. code-block:: python
|
||||
|
||||
def qmake_args(self):
|
||||
return ["-recursive"]
|
||||
return ['-recursive']
|
||||
|
||||
|
||||
This method can be used to pass flags as well as variables.
|
||||
@@ -118,7 +118,7 @@ sub-directory by adding the following to the package:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
build_directory = "src"
|
||||
build_directory = 'src'
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
@@ -163,28 +163,28 @@ attributes that can be used to set ``homepage``, ``url``, ``list_url``, and
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
cran = "caret"
|
||||
cran = 'caret'
|
||||
|
||||
is equivalent to:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
homepage = "https://cloud.r-project.org/package=caret"
|
||||
url = "https://cloud.r-project.org/src/contrib/caret_6.0-86.tar.gz"
|
||||
list_url = "https://cloud.r-project.org/src/contrib/Archive/caret"
|
||||
homepage = 'https://cloud.r-project.org/package=caret'
|
||||
url = 'https://cloud.r-project.org/src/contrib/caret_6.0-86.tar.gz'
|
||||
list_url = 'https://cloud.r-project.org/src/contrib/Archive/caret'
|
||||
|
||||
Likewise, the following ``bioc`` attribute:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
bioc = "BiocVersion"
|
||||
bioc = 'BiocVersion'
|
||||
|
||||
is equivalent to:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
homepage = "https://bioconductor.org/packages/BiocVersion/"
|
||||
git = "https://git.bioconductor.org/packages/BiocVersion"
|
||||
homepage = 'https://bioconductor.org/packages/BiocVersion/'
|
||||
git = 'https://git.bioconductor.org/packages/BiocVersion'
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
@@ -200,7 +200,7 @@ base class contains:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
extends("r")
|
||||
extends('r')
|
||||
|
||||
|
||||
Take a close look at the homepage for ``caret``. If you look at the
|
||||
@@ -209,7 +209,7 @@ You should add this to your package like so:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("r@3.2.0:", type=("build", "run"))
|
||||
depends_on('r@3.2.0:', type=('build', 'run'))
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^
|
||||
@@ -227,7 +227,7 @@ and list all of their dependencies in the following sections:
|
||||
* LinkingTo
|
||||
|
||||
As far as Spack is concerned, all 3 of these dependency types
|
||||
correspond to ``type=("build", "run")``, so you don't have to worry
|
||||
correspond to ``type=('build', 'run')``, so you don't have to worry
|
||||
about the details. If you are curious what they mean,
|
||||
https://github.com/spack/spack/issues/2951 has a pretty good summary:
|
||||
|
||||
@@ -330,7 +330,7 @@ the dependency:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("r-lattice@0.20:", type=("build", "run"))
|
||||
depends_on('r-lattice@0.20:', type=('build', 'run'))
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
@@ -361,20 +361,20 @@ like so:
|
||||
.. code-block:: python
|
||||
|
||||
def configure_args(self):
|
||||
mpi_name = self.spec["mpi"].name
|
||||
mpi_name = self.spec['mpi'].name
|
||||
|
||||
# The type of MPI. Supported values are:
|
||||
# OPENMPI, LAM, MPICH, MPICH2, or CRAY
|
||||
if mpi_name == "openmpi":
|
||||
Rmpi_type = "OPENMPI"
|
||||
elif mpi_name == "mpich":
|
||||
Rmpi_type = "MPICH2"
|
||||
if mpi_name == 'openmpi':
|
||||
Rmpi_type = 'OPENMPI'
|
||||
elif mpi_name == 'mpich':
|
||||
Rmpi_type = 'MPICH2'
|
||||
else:
|
||||
raise InstallError("Unsupported MPI type")
|
||||
raise InstallError('Unsupported MPI type')
|
||||
|
||||
return [
|
||||
"--with-Rmpi-type={0}".format(Rmpi_type),
|
||||
"--with-mpi={0}".format(spec["mpi"].prefix),
|
||||
'--with-Rmpi-type={0}'.format(Rmpi_type),
|
||||
'--with-mpi={0}'.format(spec['mpi'].prefix),
|
||||
]
|
||||
|
||||
|
||||
|
@@ -84,8 +84,8 @@ The ``*.gemspec`` file may contain something like:
|
||||
|
||||
.. code-block:: ruby
|
||||
|
||||
summary = "An implementation of the AsciiDoc text processor and publishing toolchain"
|
||||
description = "A fast, open source text processor and publishing toolchain for converting AsciiDoc content to HTML 5, DocBook 5, and other formats."
|
||||
summary = 'An implementation of the AsciiDoc text processor and publishing toolchain'
|
||||
description = 'A fast, open source text processor and publishing toolchain for converting AsciiDoc content to HTML 5, DocBook 5, and other formats.'
|
||||
|
||||
|
||||
Either of these can be used for the description of the Spack package.
|
||||
@@ -98,7 +98,7 @@ The ``*.gemspec`` file may contain something like:
|
||||
|
||||
.. code-block:: ruby
|
||||
|
||||
homepage = "https://asciidoctor.org"
|
||||
homepage = 'https://asciidoctor.org'
|
||||
|
||||
|
||||
This should be used as the official homepage of the Spack package.
|
||||
@@ -112,21 +112,21 @@ the base class contains:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
extends("ruby")
|
||||
extends('ruby')
|
||||
|
||||
|
||||
The ``*.gemspec`` file may contain something like:
|
||||
|
||||
.. code-block:: ruby
|
||||
|
||||
required_ruby_version = ">= 2.3.0"
|
||||
required_ruby_version = '>= 2.3.0'
|
||||
|
||||
|
||||
This can be added to the Spack package using:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("ruby@2.3.0:", type=("build", "run"))
|
||||
depends_on('ruby@2.3.0:', type=('build', 'run'))
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^
|
||||
|
@@ -124,7 +124,7 @@ are wrong, you can provide the names yourself by overriding
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
import_modules = ["PyQt5"]
|
||||
import_modules = ['PyQt5']
|
||||
|
||||
|
||||
These tests often catch missing dependencies and non-RPATHed
|
||||
|
@@ -63,8 +63,8 @@ run package-specific unit tests.
|
||||
.. code-block:: python
|
||||
|
||||
def installtest(self):
|
||||
with working_dir("test"):
|
||||
pytest = which("py.test")
|
||||
with working_dir('test'):
|
||||
pytest = which('py.test')
|
||||
pytest()
|
||||
|
||||
|
||||
@@ -93,7 +93,7 @@ the following dependency automatically:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("python@2.5:", type="build")
|
||||
depends_on('python@2.5:', type='build')
|
||||
|
||||
|
||||
Waf only supports Python 2.5 and up.
|
||||
@@ -113,7 +113,7 @@ phase, you can use:
|
||||
args = []
|
||||
|
||||
if self.run_tests:
|
||||
args.append("--test")
|
||||
args.append('--test')
|
||||
|
||||
return args
|
||||
|
||||
|
@@ -9,42 +9,46 @@
|
||||
Custom Extensions
|
||||
=================
|
||||
|
||||
*Spack extensions* allow you to extend Spack capabilities by deploying your
|
||||
*Spack extensions* permit you to extend Spack capabilities by deploying your
|
||||
own custom commands or logic in an arbitrary location on your filesystem.
|
||||
This might be extremely useful e.g. to develop and maintain a command whose purpose is
|
||||
too specific to be considered for reintegration into the mainline or to
|
||||
evolve a command through its early stages before starting a discussion to merge
|
||||
it upstream.
|
||||
|
||||
From Spack's point of view an extension is any path in your filesystem which
|
||||
respects the following naming and layout for files:
|
||||
respects a prescribed naming and layout for files:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
spack-scripting/ # The top level directory must match the format 'spack-{extension_name}'
|
||||
├── pytest.ini # Optional file if the extension ships its own tests
|
||||
├── scripting # Folder that may contain modules that are needed for the extension commands
|
||||
│ ├── cmd # Folder containing extension commands
|
||||
│ │ └── filter.py # A new command that will be available
|
||||
│ └── functions.py # Module with internal details
|
||||
└── tests # Tests for this extension
|
||||
│ └── cmd # Folder containing extension commands
|
||||
│ └── filter.py # A new command that will be available
|
||||
├── tests # Tests for this extension
|
||||
│ ├── conftest.py
|
||||
│ └── test_filter.py
|
||||
└── templates # Templates that may be needed by the extension
|
||||
|
||||
In the example above, the extension is named *scripting*. It adds an additional command
|
||||
(``spack filter``) and unit tests to verify its behavior.
|
||||
In the example above the extension named *scripting* adds an additional command (``filter``)
|
||||
and unit tests to verify its behavior. The code for this example can be
|
||||
obtained by cloning the corresponding git repository:
|
||||
|
||||
The extension can import any core Spack module in its implementation. When loaded by
|
||||
the ``spack`` command, the extension itself is imported as a Python package in the
|
||||
``spack.extensions`` namespace. In the example above, since the extension is named
|
||||
"scripting", the corresponding Python module is ``spack.extensions.scripting``.
|
||||
|
||||
The code for this example extension can be obtained by cloning the corresponding git repository:
|
||||
.. TODO: write an ad-hoc "hello world" extension and make it part of the spack organization
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ git -C /tmp clone https://github.com/spack/spack-scripting.git
|
||||
$ cd ~/
|
||||
$ mkdir tmp && cd tmp
|
||||
$ git clone https://github.com/alalazo/spack-scripting.git
|
||||
Cloning into 'spack-scripting'...
|
||||
remote: Counting objects: 11, done.
|
||||
remote: Compressing objects: 100% (7/7), done.
|
||||
remote: Total 11 (delta 0), reused 11 (delta 0), pack-reused 0
|
||||
Receiving objects: 100% (11/11), done.
|
||||
|
||||
As you can see by inspecting the sources, Python modules that are part of the extension
|
||||
can import any core Spack module.
|
||||
|
||||
---------------------------------
|
||||
Configure Spack to Use Extensions
|
||||
@@ -57,7 +61,7 @@ paths to ``config.yaml``. In the case of our example this means ensuring that:
|
||||
|
||||
config:
|
||||
extensions:
|
||||
- /tmp/spack-scripting
|
||||
- ~/tmp/spack-scripting
|
||||
|
||||
is part of your configuration file. Once this is setup any command that the extension provides
|
||||
will be available from the command line:
|
||||
@@ -82,32 +86,37 @@ will be available from the command line:
|
||||
--implicit select specs that are not installed or were installed implicitly
|
||||
--output OUTPUT where to dump the result
|
||||
|
||||
The corresponding unit tests can be run giving the appropriate options to ``spack unit-test``:
|
||||
The corresponding unit tests can be run giving the appropriate options
|
||||
to ``spack unit-test``:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack unit-test --extension=scripting
|
||||
========================================== test session starts ===========================================
|
||||
platform linux -- Python 3.11.5, pytest-7.4.3, pluggy-1.3.0
|
||||
rootdir: /home/culpo/github/spack-scripting
|
||||
configfile: pytest.ini
|
||||
testpaths: tests
|
||||
plugins: xdist-3.5.0
|
||||
|
||||
============================================================== test session starts ===============================================================
|
||||
platform linux2 -- Python 2.7.15rc1, pytest-3.2.5, py-1.4.34, pluggy-0.4.0
|
||||
rootdir: /home/mculpo/tmp/spack-scripting, inifile: pytest.ini
|
||||
collected 5 items
|
||||
|
||||
tests/test_filter.py ..... [100%]
|
||||
tests/test_filter.py ...XX
|
||||
============================================================ short test summary info =============================================================
|
||||
XPASS tests/test_filter.py::test_filtering_specs[flags3-specs3-expected3]
|
||||
XPASS tests/test_filter.py::test_filtering_specs[flags4-specs4-expected4]
|
||||
|
||||
========================================== slowest 30 durations ==========================================
|
||||
2.31s setup tests/test_filter.py::test_filtering_specs[kwargs0-specs0-expected0]
|
||||
0.57s call tests/test_filter.py::test_filtering_specs[kwargs2-specs2-expected2]
|
||||
0.56s call tests/test_filter.py::test_filtering_specs[kwargs4-specs4-expected4]
|
||||
0.54s call tests/test_filter.py::test_filtering_specs[kwargs3-specs3-expected3]
|
||||
0.54s call tests/test_filter.py::test_filtering_specs[kwargs1-specs1-expected1]
|
||||
0.48s call tests/test_filter.py::test_filtering_specs[kwargs0-specs0-expected0]
|
||||
0.01s setup tests/test_filter.py::test_filtering_specs[kwargs4-specs4-expected4]
|
||||
0.01s setup tests/test_filter.py::test_filtering_specs[kwargs2-specs2-expected2]
|
||||
0.01s setup tests/test_filter.py::test_filtering_specs[kwargs1-specs1-expected1]
|
||||
0.01s setup tests/test_filter.py::test_filtering_specs[kwargs3-specs3-expected3]
|
||||
|
||||
(5 durations < 0.005s hidden. Use -vv to show these durations.)
|
||||
=========================================== 5 passed in 5.06s ============================================
|
||||
=========================================================== slowest 20 test durations ============================================================
|
||||
3.74s setup tests/test_filter.py::test_filtering_specs[flags0-specs0-expected0]
|
||||
0.17s call tests/test_filter.py::test_filtering_specs[flags3-specs3-expected3]
|
||||
0.16s call tests/test_filter.py::test_filtering_specs[flags2-specs2-expected2]
|
||||
0.15s call tests/test_filter.py::test_filtering_specs[flags1-specs1-expected1]
|
||||
0.13s call tests/test_filter.py::test_filtering_specs[flags4-specs4-expected4]
|
||||
0.08s call tests/test_filter.py::test_filtering_specs[flags0-specs0-expected0]
|
||||
0.04s teardown tests/test_filter.py::test_filtering_specs[flags4-specs4-expected4]
|
||||
0.00s setup tests/test_filter.py::test_filtering_specs[flags4-specs4-expected4]
|
||||
0.00s setup tests/test_filter.py::test_filtering_specs[flags3-specs3-expected3]
|
||||
0.00s setup tests/test_filter.py::test_filtering_specs[flags1-specs1-expected1]
|
||||
0.00s setup tests/test_filter.py::test_filtering_specs[flags2-specs2-expected2]
|
||||
0.00s teardown tests/test_filter.py::test_filtering_specs[flags2-specs2-expected2]
|
||||
0.00s teardown tests/test_filter.py::test_filtering_specs[flags1-specs1-expected1]
|
||||
0.00s teardown tests/test_filter.py::test_filtering_specs[flags0-specs0-expected0]
|
||||
0.00s teardown tests/test_filter.py::test_filtering_specs[flags3-specs3-expected3]
|
||||
====================================================== 3 passed, 2 xpassed in 4.51 seconds =======================================================
|
||||
|
@@ -250,9 +250,10 @@ Compiler configuration
|
||||
|
||||
Spack has the ability to build packages with multiple compilers and
|
||||
compiler versions. Compilers can be made available to Spack by
|
||||
specifying them manually in ``compilers.yaml``, or automatically by
|
||||
running ``spack compiler find``, but for convenience Spack will
|
||||
automatically detect compilers the first time it needs them.
|
||||
specifying them manually in ``compilers.yaml`` or ``packages.yaml``,
|
||||
or automatically by running ``spack compiler find``, but for
|
||||
convenience Spack will automatically detect compilers the first time
|
||||
it needs them.
|
||||
|
||||
.. _cmd-spack-compilers:
|
||||
|
||||
@@ -457,6 +458,52 @@ specification. The operations available to modify the environment are ``set``, `
|
||||
prepend_path: # Similar for append|remove_path
|
||||
LD_LIBRARY_PATH: /ld/paths/added/by/setvars/sh
|
||||
|
||||
.. note::
|
||||
|
||||
Spack is in the process of moving compilers from a separate
|
||||
attribute to be handled like all other packages. As part of this
|
||||
process, the ``compilers.yaml`` section will eventually be replaced
|
||||
by configuration in the ``packages.yaml`` section. This new
|
||||
configuration is now available, although it is not yet the default
|
||||
behavior.
|
||||
|
||||
Compilers can also be configured as external packages in the
|
||||
``packages.yaml`` config file. Any external package for a compiler
|
||||
(e.g. ``gcc`` or ``llvm``) will be treated as a configured compiler
|
||||
assuming the paths to the compiler executables are determinable from
|
||||
the prefix.
|
||||
|
||||
If the paths to the compiler executable are not determinable from the
|
||||
prefix, you can add them to the ``extra_attributes`` field using the
|
||||
``compilers`` key. The ``compilers`` key accepts compilers for ``c``,
|
||||
``cxx``, ``fortran``, and ``f77``.
|
||||
|
||||
For all other fields from the ``compilers`` config, they can be added
|
||||
to the ``extra_attributes`` field for an external representing a
|
||||
compiler. These fields are used as-is in the internal representation
|
||||
of the compiler config.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
gcc:
|
||||
external:
|
||||
- spec: gcc@12.2.0 arch=linux-rhel8-skylake
|
||||
prefix: /usr
|
||||
extra_attributes:
|
||||
environment:
|
||||
set:
|
||||
GCC_ROOT: /usr
|
||||
external:
|
||||
- spec: llvm+clang@15.0.0 arch=linux-rhel8-skylake
|
||||
prefix: /usr
|
||||
extra_attributes:
|
||||
compilers:
|
||||
c: /usr/bin/clang-with-suffix
|
||||
cxx: /usr/bin/clang++-with-extra-info
|
||||
fortran: /usr/bin/gfortran
|
||||
extra_rpaths:
|
||||
- /usr/lib/llvm/
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Build Your Own Compiler
|
||||
|
@@ -111,28 +111,3 @@ CUDA is split into fewer components and is simpler to specify:
|
||||
prefix: /opt/cuda/cuda-11.0.2/
|
||||
|
||||
where ``/opt/cuda/cuda-11.0.2/lib/`` contains ``libcudart.so``.
|
||||
|
||||
|
||||
|
||||
-----------------------------------
|
||||
Using an External OpenGL API
|
||||
-----------------------------------
|
||||
Depending on whether we have a graphics card or not, we may choose to use OSMesa or GLX to implement the OpenGL API.
|
||||
|
||||
If a graphics card is unavailable, OSMesa is recommended and can typically be built with Spack.
|
||||
However, if we prefer to utilize the system GLX tailored to our graphics card, we need to declare it as an external. Here's how to do it:
|
||||
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
libglx:
|
||||
require: [opengl]
|
||||
opengl:
|
||||
buildable: false
|
||||
externals:
|
||||
- prefix: /usr/
|
||||
spec: opengl@4.6
|
||||
|
||||
Note that prefix has to be the root of both the libraries and the headers, using is /usr not the path the the lib.
|
||||
To know which spec for opengl is available use ``cd /usr/include/GL && grep -Ri gl_version``.
|
||||
|
@@ -1,13 +1,13 @@
|
||||
sphinx==7.2.6
|
||||
sphinxcontrib-programoutput==0.17
|
||||
sphinx_design==0.5.0
|
||||
sphinx-rtd-theme==2.0.0
|
||||
sphinx-rtd-theme==1.3.0
|
||||
python-levenshtein==0.23.0
|
||||
docutils==0.20.1
|
||||
pygments==2.17.2
|
||||
docutils==0.18.1
|
||||
pygments==2.17.1
|
||||
urllib3==2.1.0
|
||||
pytest==7.4.3
|
||||
isort==5.12.0
|
||||
black==23.11.0
|
||||
flake8==6.1.0
|
||||
mypy==1.7.1
|
||||
mypy==1.7.0
|
||||
|
@@ -1047,9 +1047,9 @@ def __bool__(self):
|
||||
"""Whether any exceptions were handled."""
|
||||
return bool(self.exceptions)
|
||||
|
||||
def forward(self, context: str, base: type = BaseException) -> "GroupedExceptionForwarder":
|
||||
def forward(self, context: str) -> "GroupedExceptionForwarder":
|
||||
"""Return a contextmanager which extracts tracebacks and prefixes a message."""
|
||||
return GroupedExceptionForwarder(context, self, base)
|
||||
return GroupedExceptionForwarder(context, self)
|
||||
|
||||
def _receive_forwarded(self, context: str, exc: Exception, tb: List[str]):
|
||||
self.exceptions.append((context, exc, tb))
|
||||
@@ -1072,18 +1072,15 @@ class GroupedExceptionForwarder:
|
||||
"""A contextmanager to capture exceptions and forward them to a
|
||||
GroupedExceptionHandler."""
|
||||
|
||||
def __init__(self, context: str, handler: GroupedExceptionHandler, base: type):
|
||||
def __init__(self, context: str, handler: GroupedExceptionHandler):
|
||||
self._context = context
|
||||
self._handler = handler
|
||||
self._base = base
|
||||
|
||||
def __enter__(self):
|
||||
return None
|
||||
|
||||
def __exit__(self, exc_type, exc_value, tb):
|
||||
if exc_value is not None:
|
||||
if not issubclass(exc_type, self._base):
|
||||
return False
|
||||
self._handler._receive_forwarded(self._context, exc_value, traceback.format_tb(tb))
|
||||
|
||||
# Suppress any exception from being re-raised:
|
||||
|
@@ -726,37 +726,13 @@ def _unknown_variants_in_directives(pkgs, error_cls):
|
||||
|
||||
|
||||
@package_directives
|
||||
def _issues_in_depends_on_directive(pkgs, error_cls):
|
||||
"""Reports issues with 'depends_on' directives.
|
||||
|
||||
Issues might be unknown dependencies, unknown variants or variant values, or declaration
|
||||
of nested dependencies.
|
||||
"""
|
||||
def _unknown_variants_in_dependencies(pkgs, error_cls):
|
||||
"""Report unknown dependencies and wrong variants for dependencies"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
filename = spack.repo.PATH.filename_for_package_name(pkg_name)
|
||||
for dependency_name, dependency_data in pkg_cls.dependencies.items():
|
||||
# Check if there are nested dependencies declared. We don't want directives like:
|
||||
#
|
||||
# depends_on('foo+bar ^fee+baz')
|
||||
#
|
||||
# but we'd like to have two dependencies listed instead.
|
||||
for when, dependency_edge in dependency_data.items():
|
||||
dependency_spec = dependency_edge.spec
|
||||
nested_dependencies = dependency_spec.dependencies()
|
||||
if nested_dependencies:
|
||||
summary = (
|
||||
f"{pkg_name}: invalid nested dependency "
|
||||
f"declaration '{str(dependency_spec)}'"
|
||||
)
|
||||
details = [
|
||||
f"split depends_on('{str(dependency_spec)}', when='{str(when)}') "
|
||||
f"into {len(nested_dependencies) + 1} directives",
|
||||
f"in {filename}",
|
||||
]
|
||||
errors.append(error_cls(summary=summary, details=details))
|
||||
|
||||
# No need to analyze virtual packages
|
||||
if spack.repo.PATH.is_virtual(dependency_name):
|
||||
continue
|
||||
|
@@ -230,11 +230,7 @@ def _associate_built_specs_with_mirror(self, cache_key, mirror_url):
|
||||
)
|
||||
return
|
||||
|
||||
spec_list = [
|
||||
s
|
||||
for s in db.query_local(installed=any, in_buildcache=any)
|
||||
if s.external or db.query_local_by_spec_hash(s.dag_hash()).in_buildcache
|
||||
]
|
||||
spec_list = db.query_local(installed=False, in_buildcache=True)
|
||||
|
||||
for indexed_spec in spec_list:
|
||||
dag_hash = indexed_spec.dag_hash()
|
||||
|
@@ -16,7 +16,6 @@
|
||||
import llnl.util.filesystem as fs
|
||||
from llnl.util import tty
|
||||
|
||||
import spack.platforms
|
||||
import spack.store
|
||||
import spack.util.environment
|
||||
import spack.util.executable
|
||||
@@ -207,19 +206,17 @@ def _root_spec(spec_str: str) -> str:
|
||||
"""Add a proper compiler and target to a spec used during bootstrapping.
|
||||
|
||||
Args:
|
||||
spec_str: spec to be bootstrapped. Must be without compiler and target.
|
||||
spec_str (str): spec to be bootstrapped. Must be without compiler and target.
|
||||
"""
|
||||
# Add a compiler requirement to the root spec.
|
||||
platform = str(spack.platforms.host())
|
||||
if platform == "darwin":
|
||||
# Add a proper compiler hint to the root spec. We use GCC for
|
||||
# everything but MacOS and Windows.
|
||||
if str(spack.platforms.host()) == "darwin":
|
||||
spec_str += " %apple-clang"
|
||||
elif platform == "windows":
|
||||
elif str(spack.platforms.host()) == "windows":
|
||||
# TODO (johnwparent): Remove version constraint when clingo patch is up
|
||||
spec_str += " %msvc@:19.37"
|
||||
elif platform == "linux":
|
||||
else:
|
||||
spec_str += " %gcc"
|
||||
elif platform == "freebsd":
|
||||
spec_str += " %clang"
|
||||
|
||||
target = archspec.cpu.host().family
|
||||
spec_str += f" target={target}"
|
||||
|
@@ -147,7 +147,7 @@ def _add_compilers_if_missing() -> None:
|
||||
mixed_toolchain=sys.platform == "darwin"
|
||||
)
|
||||
if new_compilers:
|
||||
spack.compilers.add_compilers_to_config(new_compilers, init_config=False)
|
||||
spack.compilers.add_compilers_to_config(new_compilers)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
|
@@ -386,7 +386,7 @@ def ensure_module_importable_or_raise(module: str, abstract_spec: Optional[str]
|
||||
exception_handler = GroupedExceptionHandler()
|
||||
|
||||
for current_config in bootstrapping_sources():
|
||||
with exception_handler.forward(current_config["name"], Exception):
|
||||
with exception_handler.forward(current_config["name"]):
|
||||
source_is_enabled_or_raise(current_config)
|
||||
current_bootstrapper = create_bootstrapper(current_config)
|
||||
if current_bootstrapper.try_import(module, abstract_spec):
|
||||
@@ -441,7 +441,7 @@ def ensure_executables_in_path_or_raise(
|
||||
exception_handler = GroupedExceptionHandler()
|
||||
|
||||
for current_config in bootstrapping_sources():
|
||||
with exception_handler.forward(current_config["name"], Exception):
|
||||
with exception_handler.forward(current_config["name"]):
|
||||
source_is_enabled_or_raise(current_config)
|
||||
current_bootstrapper = create_bootstrapper(current_config)
|
||||
if current_bootstrapper.try_search_path(executables, abstract_spec):
|
||||
|
@@ -19,6 +19,7 @@
|
||||
import spack.tengine
|
||||
import spack.util.cpus
|
||||
import spack.util.executable
|
||||
from spack.environment import depfile
|
||||
|
||||
from ._common import _root_spec
|
||||
from .config import root_path, spec_for_current_python, store_path
|
||||
@@ -85,9 +86,12 @@ def __init__(self) -> None:
|
||||
super().__init__(self.environment_root())
|
||||
|
||||
def update_installations(self) -> None:
|
||||
"""Update the installations of this environment."""
|
||||
log_enabled = tty.is_debug() or tty.is_verbose()
|
||||
with tty.SuppressOutput(msg_enabled=log_enabled, warn_enabled=log_enabled):
|
||||
"""Update the installations of this environment.
|
||||
|
||||
The update is done using a depfile on Linux and macOS, and using the ``install_all``
|
||||
method of environments on Windows.
|
||||
"""
|
||||
with tty.SuppressOutput(msg_enabled=False, warn_enabled=False):
|
||||
specs = self.concretize()
|
||||
if specs:
|
||||
colorized_specs = [
|
||||
@@ -96,9 +100,11 @@ def update_installations(self) -> None:
|
||||
]
|
||||
tty.msg(f"[BOOTSTRAPPING] Installing dependencies ({', '.join(colorized_specs)})")
|
||||
self.write(regenerate=False)
|
||||
with tty.SuppressOutput(msg_enabled=log_enabled, warn_enabled=log_enabled):
|
||||
if sys.platform == "win32":
|
||||
self.install_all()
|
||||
self.write(regenerate=True)
|
||||
else:
|
||||
self._install_with_depfile()
|
||||
self.write(regenerate=True)
|
||||
|
||||
def update_syspath_and_environ(self) -> None:
|
||||
"""Update ``sys.path`` and the PATH, PYTHONPATH environment variables to point to
|
||||
@@ -116,6 +122,25 @@ def update_syspath_and_environ(self) -> None:
|
||||
+ [str(x) for x in self.pythonpaths()]
|
||||
)
|
||||
|
||||
def _install_with_depfile(self) -> None:
|
||||
model = depfile.MakefileModel.from_env(self)
|
||||
template = spack.tengine.make_environment().get_template(
|
||||
os.path.join("depfile", "Makefile")
|
||||
)
|
||||
makefile = self.environment_root() / "Makefile"
|
||||
makefile.write_text(template.render(model.to_dict()))
|
||||
make = spack.util.executable.which("make")
|
||||
kwargs = {}
|
||||
if not tty.is_debug():
|
||||
kwargs = {"output": os.devnull, "error": os.devnull}
|
||||
make(
|
||||
"-C",
|
||||
str(self.environment_root()),
|
||||
"-j",
|
||||
str(spack.util.cpus.determine_number_of_jobs(parallel=True)),
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
def _write_spack_yaml_file(self) -> None:
|
||||
tty.msg(
|
||||
"[BOOTSTRAPPING] Spack has missing dependencies, creating a bootstrapping environment"
|
||||
|
@@ -66,6 +66,7 @@ def _core_requirements() -> List[RequiredResponseType]:
|
||||
_core_system_exes = {
|
||||
"make": _missing("make", "required to build software from sources"),
|
||||
"patch": _missing("patch", "required to patch source code before building"),
|
||||
"bash": _missing("bash", "required for Spack compiler wrapper"),
|
||||
"tar": _missing("tar", "required to manage code archives"),
|
||||
"gzip": _missing("gzip", "required to compress/decompress code archives"),
|
||||
"unzip": _missing("unzip", "required to compress/decompress code archives"),
|
||||
|
@@ -9,7 +9,6 @@
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.build_environment
|
||||
import spack.builder
|
||||
|
||||
from .cmake import CMakeBuilder, CMakePackage
|
||||
@@ -286,19 +285,6 @@ def initconfig_hardware_entries(self):
|
||||
def std_initconfig_entries(self):
|
||||
cmake_prefix_path_env = os.environ["CMAKE_PREFIX_PATH"]
|
||||
cmake_prefix_path = cmake_prefix_path_env.replace(os.pathsep, ";")
|
||||
cmake_rpaths_env = spack.build_environment.get_rpaths(self.pkg)
|
||||
cmake_rpaths_path = ";".join(cmake_rpaths_env)
|
||||
complete_rpath_list = cmake_rpaths_path
|
||||
if "SPACK_COMPILER_EXTRA_RPATHS" in os.environ:
|
||||
spack_extra_rpaths_env = os.environ["SPACK_COMPILER_EXTRA_RPATHS"]
|
||||
spack_extra_rpaths_path = spack_extra_rpaths_env.replace(os.pathsep, ";")
|
||||
complete_rpath_list = "{0};{1}".format(complete_rpath_list, spack_extra_rpaths_path)
|
||||
|
||||
if "SPACK_COMPILER_IMPLICIT_RPATHS" in os.environ:
|
||||
spack_implicit_rpaths_env = os.environ["SPACK_COMPILER_IMPLICIT_RPATHS"]
|
||||
spack_implicit_rpaths_path = spack_implicit_rpaths_env.replace(os.pathsep, ";")
|
||||
complete_rpath_list = "{0};{1}".format(complete_rpath_list, spack_implicit_rpaths_path)
|
||||
|
||||
return [
|
||||
"#------------------{0}".format("-" * 60),
|
||||
"# !!!! This is a generated file, edit at own risk !!!!",
|
||||
@@ -306,9 +292,6 @@ def std_initconfig_entries(self):
|
||||
"# CMake executable path: {0}".format(self.pkg.spec["cmake"].command.path),
|
||||
"#------------------{0}\n".format("-" * 60),
|
||||
cmake_cache_string("CMAKE_PREFIX_PATH", cmake_prefix_path),
|
||||
cmake_cache_string("CMAKE_INSTALL_RPATH_USE_LINK_PATH", "ON"),
|
||||
cmake_cache_string("CMAKE_BUILD_RPATH", complete_rpath_list),
|
||||
cmake_cache_string("CMAKE_INSTALL_RPATH", complete_rpath_list),
|
||||
self.define_cmake_cache_from_variant("CMAKE_BUILD_TYPE", "build_type"),
|
||||
]
|
||||
|
||||
|
@@ -6,14 +6,13 @@
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
from typing import Iterable, List, Mapping, Optional
|
||||
from typing import Optional
|
||||
|
||||
import archspec
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.lang as lang
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import HeaderList, LibraryList
|
||||
|
||||
import spack.builder
|
||||
import spack.config
|
||||
@@ -26,18 +25,14 @@
|
||||
from spack.directives import build_system, depends_on, extends, maintainers
|
||||
from spack.error import NoHeadersError, NoLibrariesError
|
||||
from spack.install_test import test_part
|
||||
from spack.spec import Spec
|
||||
from spack.util.prefix import Prefix
|
||||
|
||||
from ._checks import BaseBuilder, execute_install_time_tests
|
||||
|
||||
|
||||
def _flatten_dict(dictionary: Mapping[str, object]) -> Iterable[str]:
|
||||
def _flatten_dict(dictionary):
|
||||
"""Iterable that yields KEY=VALUE paths through a dictionary.
|
||||
|
||||
Args:
|
||||
dictionary: Possibly nested dictionary of arbitrary keys and values.
|
||||
|
||||
Yields:
|
||||
A single path through the dictionary.
|
||||
"""
|
||||
@@ -55,7 +50,7 @@ class PythonExtension(spack.package_base.PackageBase):
|
||||
maintainers("adamjstewart")
|
||||
|
||||
@property
|
||||
def import_modules(self) -> Iterable[str]:
|
||||
def import_modules(self):
|
||||
"""Names of modules that the Python package provides.
|
||||
|
||||
These are used to test whether or not the installation succeeded.
|
||||
@@ -70,7 +65,7 @@ def import_modules(self) -> Iterable[str]:
|
||||
detected, this property can be overridden by the package.
|
||||
|
||||
Returns:
|
||||
List of strings of module names.
|
||||
list: list of strings of module names
|
||||
"""
|
||||
modules = []
|
||||
pkg = self.spec["python"].package
|
||||
@@ -107,14 +102,14 @@ def import_modules(self) -> Iterable[str]:
|
||||
return modules
|
||||
|
||||
@property
|
||||
def skip_modules(self) -> Iterable[str]:
|
||||
def skip_modules(self):
|
||||
"""Names of modules that should be skipped when running tests.
|
||||
|
||||
These are a subset of import_modules. If a module has submodules,
|
||||
they are skipped as well (meaning a.b is skipped if a is contained).
|
||||
|
||||
Returns:
|
||||
List of strings of module names.
|
||||
list: list of strings of module names
|
||||
"""
|
||||
return []
|
||||
|
||||
@@ -190,12 +185,12 @@ def remove_files_from_view(self, view, merge_map):
|
||||
|
||||
view.remove_files(to_remove)
|
||||
|
||||
def test_imports(self) -> None:
|
||||
def test_imports(self):
|
||||
"""Attempts to import modules of the installed package."""
|
||||
|
||||
# Make sure we are importing the installed modules,
|
||||
# not the ones in the source directory
|
||||
python = inspect.getmodule(self).python # type: ignore[union-attr]
|
||||
python = inspect.getmodule(self).python
|
||||
for module in self.import_modules:
|
||||
with test_part(
|
||||
self,
|
||||
@@ -320,27 +315,24 @@ class PythonPackage(PythonExtension):
|
||||
py_namespace: Optional[str] = None
|
||||
|
||||
@lang.classproperty
|
||||
def homepage(cls) -> Optional[str]: # type: ignore[override]
|
||||
def homepage(cls):
|
||||
if cls.pypi:
|
||||
name = cls.pypi.split("/")[0]
|
||||
return f"https://pypi.org/project/{name}/"
|
||||
return None
|
||||
return "https://pypi.org/project/" + name + "/"
|
||||
|
||||
@lang.classproperty
|
||||
def url(cls) -> Optional[str]:
|
||||
def url(cls):
|
||||
if cls.pypi:
|
||||
return f"https://files.pythonhosted.org/packages/source/{cls.pypi[0]}/{cls.pypi}"
|
||||
return None
|
||||
return "https://files.pythonhosted.org/packages/source/" + cls.pypi[0] + "/" + cls.pypi
|
||||
|
||||
@lang.classproperty
|
||||
def list_url(cls) -> Optional[str]: # type: ignore[override]
|
||||
def list_url(cls):
|
||||
if cls.pypi:
|
||||
name = cls.pypi.split("/")[0]
|
||||
return f"https://pypi.org/simple/{name}/"
|
||||
return None
|
||||
return "https://pypi.org/simple/" + name + "/"
|
||||
|
||||
@property
|
||||
def headers(self) -> HeaderList:
|
||||
def headers(self):
|
||||
"""Discover header files in platlib."""
|
||||
|
||||
# Remove py- prefix in package name
|
||||
@@ -358,7 +350,7 @@ def headers(self) -> HeaderList:
|
||||
raise NoHeadersError(msg.format(self.spec.name, include, platlib))
|
||||
|
||||
@property
|
||||
def libs(self) -> LibraryList:
|
||||
def libs(self):
|
||||
"""Discover libraries in platlib."""
|
||||
|
||||
# Remove py- prefix in package name
|
||||
@@ -392,7 +384,7 @@ class PythonPipBuilder(BaseBuilder):
|
||||
install_time_test_callbacks = ["test"]
|
||||
|
||||
@staticmethod
|
||||
def std_args(cls) -> List[str]:
|
||||
def std_args(cls):
|
||||
return [
|
||||
# Verbose
|
||||
"-vvv",
|
||||
@@ -417,7 +409,7 @@ def std_args(cls) -> List[str]:
|
||||
]
|
||||
|
||||
@property
|
||||
def build_directory(self) -> str:
|
||||
def build_directory(self):
|
||||
"""The root directory of the Python package.
|
||||
|
||||
This is usually the directory containing one of the following files:
|
||||
@@ -428,51 +420,51 @@ def build_directory(self) -> str:
|
||||
"""
|
||||
return self.pkg.stage.source_path
|
||||
|
||||
def config_settings(self, spec: Spec, prefix: Prefix) -> Mapping[str, object]:
|
||||
def config_settings(self, spec, prefix):
|
||||
"""Configuration settings to be passed to the PEP 517 build backend.
|
||||
|
||||
Requires pip 22.1 or newer for keys that appear only a single time,
|
||||
or pip 23.1 or newer if the same key appears multiple times.
|
||||
|
||||
Args:
|
||||
spec: Build spec.
|
||||
prefix: Installation prefix.
|
||||
spec (spack.spec.Spec): build spec
|
||||
prefix (spack.util.prefix.Prefix): installation prefix
|
||||
|
||||
Returns:
|
||||
Possibly nested dictionary of KEY, VALUE settings.
|
||||
dict: Possibly nested dictionary of KEY, VALUE settings
|
||||
"""
|
||||
return {}
|
||||
|
||||
def install_options(self, spec: Spec, prefix: Prefix) -> Iterable[str]:
|
||||
def install_options(self, spec, prefix):
|
||||
"""Extra arguments to be supplied to the setup.py install command.
|
||||
|
||||
Requires pip 23.0 or older.
|
||||
|
||||
Args:
|
||||
spec: Build spec.
|
||||
prefix: Installation prefix.
|
||||
spec (spack.spec.Spec): build spec
|
||||
prefix (spack.util.prefix.Prefix): installation prefix
|
||||
|
||||
Returns:
|
||||
List of options.
|
||||
list: list of options
|
||||
"""
|
||||
return []
|
||||
|
||||
def global_options(self, spec: Spec, prefix: Prefix) -> Iterable[str]:
|
||||
def global_options(self, spec, prefix):
|
||||
"""Extra global options to be supplied to the setup.py call before the install
|
||||
or bdist_wheel command.
|
||||
|
||||
Deprecated in pip 23.1.
|
||||
|
||||
Args:
|
||||
spec: Build spec.
|
||||
prefix: Installation prefix.
|
||||
spec (spack.spec.Spec): build spec
|
||||
prefix (spack.util.prefix.Prefix): installation prefix
|
||||
|
||||
Returns:
|
||||
List of options.
|
||||
list: list of options
|
||||
"""
|
||||
return []
|
||||
|
||||
def install(self, pkg: PythonPackage, spec: Spec, prefix: Prefix) -> None:
|
||||
def install(self, pkg, spec, prefix):
|
||||
"""Install everything from build directory."""
|
||||
|
||||
args = PythonPipBuilder.std_args(pkg) + [f"--prefix={prefix}"]
|
||||
|
@@ -6,7 +6,7 @@
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.cmd
|
||||
from spack.cmd.common import arguments
|
||||
import spack.cmd.common.arguments as arguments
|
||||
|
||||
description = "add a spec to an environment"
|
||||
section = "environments"
|
||||
|
@@ -15,13 +15,13 @@
|
||||
import spack.bootstrap
|
||||
import spack.bootstrap.config
|
||||
import spack.bootstrap.core
|
||||
import spack.cmd.common.arguments
|
||||
import spack.config
|
||||
import spack.main
|
||||
import spack.mirror
|
||||
import spack.spec
|
||||
import spack.stage
|
||||
import spack.util.path
|
||||
from spack.cmd.common import arguments
|
||||
|
||||
description = "manage bootstrap configuration"
|
||||
section = "system"
|
||||
@@ -68,8 +68,12 @@
|
||||
|
||||
|
||||
def _add_scope_option(parser):
|
||||
scopes = spack.config.scopes()
|
||||
parser.add_argument(
|
||||
"--scope", action=arguments.ConfigScope, help="configuration scope to read/modify"
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
help="configuration scope to read/modify",
|
||||
)
|
||||
|
||||
|
||||
@@ -102,7 +106,7 @@ def setup_parser(subparser):
|
||||
disable.add_argument("name", help="name of the source to be disabled", nargs="?", default=None)
|
||||
|
||||
reset = sp.add_parser("reset", help="reset bootstrapping configuration to Spack defaults")
|
||||
arguments.add_common_arguments(reset, ["yes_to_all"])
|
||||
spack.cmd.common.arguments.add_common_arguments(reset, ["yes_to_all"])
|
||||
|
||||
root = sp.add_parser("root", help="get/set the root bootstrap directory")
|
||||
_add_scope_option(root)
|
||||
|
@@ -21,6 +21,7 @@
|
||||
|
||||
import spack.binary_distribution as bindist
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.error
|
||||
@@ -39,7 +40,6 @@
|
||||
import spack.util.web as web_util
|
||||
from spack.build_environment import determine_number_of_jobs
|
||||
from spack.cmd import display_specs
|
||||
from spack.cmd.common import arguments
|
||||
from spack.oci.image import (
|
||||
Digest,
|
||||
ImageReference,
|
||||
@@ -182,22 +182,23 @@ def setup_parser(subparser: argparse.ArgumentParser):
|
||||
)
|
||||
|
||||
# used to construct scope arguments below
|
||||
scopes = spack.config.scopes()
|
||||
|
||||
check.add_argument(
|
||||
"--scope",
|
||||
action=arguments.ConfigScope,
|
||||
default=lambda: spack.config.default_modify_scope(),
|
||||
choices=scopes,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_modify_scope(),
|
||||
help="configuration scope containing mirrors to check",
|
||||
)
|
||||
# Unfortunately there are 3 ways to do the same thing here:
|
||||
check_specs = check.add_mutually_exclusive_group()
|
||||
check_specs.add_argument(
|
||||
check_spec_or_specfile = check.add_mutually_exclusive_group(required=True)
|
||||
check_spec_or_specfile.add_argument(
|
||||
"-s", "--spec", help="check single spec instead of release specs file"
|
||||
)
|
||||
check_specs.add_argument(
|
||||
check_spec_or_specfile.add_argument(
|
||||
"--spec-file",
|
||||
help="check single spec from json or yaml file instead of release specs file",
|
||||
)
|
||||
arguments.add_common_arguments(check, ["specs"])
|
||||
|
||||
check.set_defaults(func=check_fn)
|
||||
|
||||
@@ -815,24 +816,15 @@ def check_fn(args: argparse.Namespace):
|
||||
exit code is non-zero, then at least one of the indicated specs needs to be rebuilt
|
||||
"""
|
||||
if args.spec_file:
|
||||
specs_arg = (
|
||||
args.spec_file if os.path.sep in args.spec_file else os.path.join(".", args.spec_file)
|
||||
)
|
||||
tty.warn(
|
||||
"The flag `--spec-file` is deprecated and will be removed in Spack 0.22. "
|
||||
f"Use `spack buildcache check {specs_arg}` instead."
|
||||
"Use --spec instead."
|
||||
)
|
||||
elif args.spec:
|
||||
specs_arg = args.spec
|
||||
tty.warn(
|
||||
"The flag `--spec` is deprecated and will be removed in Spack 0.23. "
|
||||
f"Use `spack buildcache check {specs_arg}` instead."
|
||||
)
|
||||
else:
|
||||
specs_arg = args.specs
|
||||
|
||||
if specs_arg:
|
||||
specs = _matching_specs(spack.cmd.parse_specs(specs_arg))
|
||||
specs = spack.cmd.parse_specs(args.spec or args.spec_file)
|
||||
|
||||
if specs:
|
||||
specs = _matching_specs(specs)
|
||||
else:
|
||||
specs = spack.cmd.require_active_env("buildcache check").all_specs()
|
||||
|
||||
|
@@ -4,7 +4,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import spack.cmd
|
||||
from spack.cmd.common import arguments
|
||||
import spack.cmd.common.arguments as arguments
|
||||
|
||||
description = "change an existing spec in an environment"
|
||||
section = "environments"
|
||||
|
@@ -16,7 +16,6 @@
|
||||
import spack.cmd.buildcache as buildcache
|
||||
import spack.config as cfg
|
||||
import spack.environment as ev
|
||||
import spack.environment.depfile
|
||||
import spack.hash_types as ht
|
||||
import spack.mirror
|
||||
import spack.util.gpg as gpg_util
|
||||
@@ -607,9 +606,7 @@ def ci_rebuild(args):
|
||||
"SPACK_INSTALL_FLAGS={}".format(args_to_string(deps_install_args)),
|
||||
"-j$(nproc)",
|
||||
"install-deps/{}".format(
|
||||
spack.environment.depfile.MakefileSpec(job_spec).safe_format(
|
||||
"{name}-{version}-{hash}"
|
||||
)
|
||||
ev.depfile.MakefileSpec(job_spec).safe_format("{name}-{version}-{hash}")
|
||||
),
|
||||
],
|
||||
spack_cmd + ["install"] + root_install_args,
|
||||
|
@@ -12,13 +12,13 @@
|
||||
|
||||
import spack.bootstrap
|
||||
import spack.caches
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.cmd.test
|
||||
import spack.config
|
||||
import spack.repo
|
||||
import spack.stage
|
||||
import spack.store
|
||||
import spack.util.path
|
||||
from spack.cmd.common import arguments
|
||||
from spack.paths import lib_path, var_path
|
||||
|
||||
description = "remove temporary build files and/or downloaded archives"
|
||||
|
@@ -124,33 +124,6 @@ def __call__(self, parser, namespace, values, option_string=None):
|
||||
setattr(namespace, self.dest, deptype)
|
||||
|
||||
|
||||
class ConfigScope(argparse.Action):
|
||||
"""Pick the currently configured config scopes."""
|
||||
|
||||
def __init__(self, *args, **kwargs) -> None:
|
||||
kwargs.setdefault("metavar", spack.config.SCOPES_METAVAR)
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
@property
|
||||
def default(self):
|
||||
return self._default() if callable(self._default) else self._default
|
||||
|
||||
@default.setter
|
||||
def default(self, value):
|
||||
self._default = value
|
||||
|
||||
@property
|
||||
def choices(self):
|
||||
return spack.config.scopes().keys()
|
||||
|
||||
@choices.setter
|
||||
def choices(self, value):
|
||||
pass
|
||||
|
||||
def __call__(self, parser, namespace, values, option_string=None):
|
||||
setattr(namespace, self.dest, values)
|
||||
|
||||
|
||||
def _cdash_reporter(namespace):
|
||||
"""Helper function to create a CDash reporter. This function gets an early reference to the
|
||||
argparse namespace under construction, so it can later use it to create the object.
|
||||
|
@@ -8,13 +8,13 @@
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.deptypes as dt
|
||||
import spack.error
|
||||
import spack.paths
|
||||
import spack.spec
|
||||
import spack.store
|
||||
from spack import build_environment, traverse
|
||||
from spack.cmd.common import arguments
|
||||
from spack.context import Context
|
||||
from spack.util.environment import dump_environment, pickle_environment
|
||||
|
||||
|
@@ -14,7 +14,6 @@
|
||||
import spack.compilers
|
||||
import spack.config
|
||||
import spack.spec
|
||||
from spack.cmd.common import arguments
|
||||
|
||||
description = "manage compilers"
|
||||
section = "system"
|
||||
@@ -24,6 +23,8 @@
|
||||
def setup_parser(subparser):
|
||||
sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="compiler_command")
|
||||
|
||||
scopes = spack.config.scopes()
|
||||
|
||||
# Find
|
||||
find_parser = sp.add_parser(
|
||||
"find",
|
||||
@@ -46,8 +47,9 @@ def setup_parser(subparser):
|
||||
find_parser.add_argument("add_paths", nargs=argparse.REMAINDER)
|
||||
find_parser.add_argument(
|
||||
"--scope",
|
||||
action=arguments.ConfigScope,
|
||||
default=lambda: spack.config.default_modify_scope("compilers"),
|
||||
choices=scopes,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_modify_scope("compilers"),
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
|
||||
@@ -58,15 +60,20 @@ def setup_parser(subparser):
|
||||
)
|
||||
remove_parser.add_argument("compiler_spec")
|
||||
remove_parser.add_argument(
|
||||
"--scope", action=arguments.ConfigScope, default=None, help="configuration scope to modify"
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=None,
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
|
||||
# List
|
||||
list_parser = sp.add_parser("list", help="list available compilers")
|
||||
list_parser.add_argument(
|
||||
"--scope",
|
||||
action=arguments.ConfigScope,
|
||||
default=lambda: spack.config.default_list_scope(),
|
||||
choices=scopes,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_list_scope(),
|
||||
help="configuration scope to read from",
|
||||
)
|
||||
|
||||
@@ -75,8 +82,9 @@ def setup_parser(subparser):
|
||||
info_parser.add_argument("compiler_spec")
|
||||
info_parser.add_argument(
|
||||
"--scope",
|
||||
action=arguments.ConfigScope,
|
||||
default=lambda: spack.config.default_list_scope(),
|
||||
choices=scopes,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_list_scope(),
|
||||
help="configuration scope to read from",
|
||||
)
|
||||
|
||||
@@ -95,7 +103,7 @@ def compiler_find(args):
|
||||
paths, scope=None, mixed_toolchain=args.mixed_toolchain
|
||||
)
|
||||
if new_compilers:
|
||||
spack.compilers.add_compilers_to_config(new_compilers, scope=args.scope, init_config=False)
|
||||
spack.compilers.add_compilers_to_config(new_compilers, scope=args.scope)
|
||||
n = len(new_compilers)
|
||||
s = "s" if n > 1 else ""
|
||||
|
||||
|
@@ -3,7 +3,7 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from spack.cmd.common import arguments
|
||||
import spack.config
|
||||
from spack.cmd.compiler import compiler_list
|
||||
|
||||
description = "list available compilers"
|
||||
@@ -12,8 +12,13 @@
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
scopes = spack.config.scopes()
|
||||
|
||||
subparser.add_argument(
|
||||
"--scope", action=arguments.ConfigScope, help="configuration scope to read/modify"
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
help="configuration scope to read/modify",
|
||||
)
|
||||
|
||||
|
||||
|
@@ -10,6 +10,7 @@
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.cmd.common.arguments
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.repo
|
||||
@@ -17,7 +18,6 @@
|
||||
import spack.schema.packages
|
||||
import spack.store
|
||||
import spack.util.spack_yaml as syaml
|
||||
from spack.cmd.common import arguments
|
||||
from spack.util.editor import editor
|
||||
|
||||
description = "get and set configuration options"
|
||||
@@ -26,9 +26,14 @@
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
scopes = spack.config.scopes()
|
||||
|
||||
# User can only choose one
|
||||
subparser.add_argument(
|
||||
"--scope", action=arguments.ConfigScope, help="configuration scope to read/modify"
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
help="configuration scope to read/modify",
|
||||
)
|
||||
|
||||
sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="config_command")
|
||||
@@ -96,13 +101,13 @@ def setup_parser(subparser):
|
||||
setup_parser.add_parser = add_parser
|
||||
|
||||
update = sp.add_parser("update", help="update configuration files to the latest format")
|
||||
arguments.add_common_arguments(update, ["yes_to_all"])
|
||||
spack.cmd.common.arguments.add_common_arguments(update, ["yes_to_all"])
|
||||
update.add_argument("section", help="section to update")
|
||||
|
||||
revert = sp.add_parser(
|
||||
"revert", help="revert configuration files to their state before update"
|
||||
)
|
||||
arguments.add_common_arguments(revert, ["yes_to_all"])
|
||||
spack.cmd.common.arguments.add_common_arguments(revert, ["yes_to_all"])
|
||||
revert.add_argument("section", help="section to update")
|
||||
|
||||
|
||||
|
@@ -10,10 +10,10 @@
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.cmd.common.confirmation as confirmation
|
||||
import spack.environment as ev
|
||||
import spack.spec
|
||||
from spack.cmd.common import arguments
|
||||
|
||||
description = "remove specs from the concretized lockfile of an environment"
|
||||
section = "environments"
|
||||
|
@@ -9,11 +9,11 @@
|
||||
from llnl.util.tty.colify import colify
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.environment as ev
|
||||
import spack.package_base
|
||||
import spack.repo
|
||||
import spack.store
|
||||
from spack.cmd.common import arguments
|
||||
|
||||
description = "show dependencies of a package"
|
||||
section = "basic"
|
||||
|
@@ -9,10 +9,10 @@
|
||||
from llnl.util.tty.colify import colify
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.environment as ev
|
||||
import spack.repo
|
||||
import spack.store
|
||||
from spack.cmd.common import arguments
|
||||
|
||||
description = "show packages that depend on another"
|
||||
section = "basic"
|
||||
|
@@ -20,9 +20,9 @@
|
||||
from llnl.util.symlink import symlink
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.environment as ev
|
||||
import spack.store
|
||||
from spack.cmd.common import arguments
|
||||
from spack.database import InstallStatuses
|
||||
from spack.error import SpackError
|
||||
|
||||
|
@@ -9,9 +9,9 @@
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.config
|
||||
import spack.repo
|
||||
from spack.cmd.common import arguments
|
||||
|
||||
description = "developer build: build from code in current working directory"
|
||||
section = "build"
|
||||
|
@@ -8,10 +8,10 @@
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.spec
|
||||
import spack.util.path
|
||||
import spack.version
|
||||
from spack.cmd.common import arguments
|
||||
from spack.error import SpackError
|
||||
|
||||
description = "add a spec to an environment's dev-build information"
|
||||
|
@@ -10,11 +10,11 @@
|
||||
from llnl.util.tty.color import cprint, get_color_when
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.environment as ev
|
||||
import spack.solver.asp as asp
|
||||
import spack.util.environment
|
||||
import spack.util.spack_json as sjson
|
||||
from spack.cmd.common import arguments
|
||||
|
||||
description = "compare two specs"
|
||||
section = "basic"
|
||||
|
@@ -20,6 +20,7 @@
|
||||
import spack.cmd
|
||||
import spack.cmd.common
|
||||
import spack.cmd.common.arguments
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.cmd.install
|
||||
import spack.cmd.modules
|
||||
import spack.cmd.uninstall
|
||||
@@ -30,7 +31,6 @@
|
||||
import spack.schema.env
|
||||
import spack.spec
|
||||
import spack.tengine
|
||||
from spack.cmd.common import arguments
|
||||
from spack.util.environment import EnvironmentModifications
|
||||
|
||||
description = "manage virtual environments"
|
||||
|
@@ -10,10 +10,10 @@
|
||||
from llnl.util.tty.colify import colify
|
||||
|
||||
import spack.cmd as cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.environment as ev
|
||||
import spack.repo
|
||||
import spack.store
|
||||
from spack.cmd.common import arguments
|
||||
|
||||
description = "list extensions for package"
|
||||
section = "extensions"
|
||||
|
@@ -14,12 +14,12 @@
|
||||
|
||||
import spack
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments
|
||||
import spack.config
|
||||
import spack.cray_manifest as cray_manifest
|
||||
import spack.detection
|
||||
import spack.error
|
||||
import spack.util.environment
|
||||
from spack.cmd.common import arguments
|
||||
|
||||
description = "manage external packages in Spack configuration"
|
||||
section = "config"
|
||||
@@ -29,6 +29,8 @@
|
||||
def setup_parser(subparser):
|
||||
sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="external_command")
|
||||
|
||||
scopes = spack.config.scopes()
|
||||
|
||||
find_parser = sp.add_parser("find", help="add external packages to packages.yaml")
|
||||
find_parser.add_argument(
|
||||
"--not-buildable",
|
||||
@@ -46,14 +48,15 @@ def setup_parser(subparser):
|
||||
)
|
||||
find_parser.add_argument(
|
||||
"--scope",
|
||||
action=arguments.ConfigScope,
|
||||
default=lambda: spack.config.default_modify_scope("packages"),
|
||||
choices=scopes,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_modify_scope("packages"),
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
find_parser.add_argument(
|
||||
"--all", action="store_true", help="search for all packages that Spack knows about"
|
||||
)
|
||||
arguments.add_common_arguments(find_parser, ["tags", "jobs"])
|
||||
spack.cmd.common.arguments.add_common_arguments(find_parser, ["tags", "jobs"])
|
||||
find_parser.add_argument("packages", nargs=argparse.REMAINDER)
|
||||
find_parser.epilog = (
|
||||
'The search is by default on packages tagged with the "build-tools" or '
|
||||
|
@@ -6,11 +6,11 @@
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.repo
|
||||
import spack.traverse
|
||||
from spack.cmd.common import arguments
|
||||
|
||||
description = "fetch archives for packages"
|
||||
section = "build"
|
||||
|
@@ -12,9 +12,9 @@
|
||||
|
||||
import spack.bootstrap
|
||||
import spack.cmd as cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.environment as ev
|
||||
import spack.repo
|
||||
from spack.cmd.common import arguments
|
||||
from spack.database import InstallStatuses
|
||||
|
||||
description = "list and search installed packages"
|
||||
|
@@ -7,11 +7,11 @@
|
||||
import os
|
||||
|
||||
import spack.binary_distribution
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.mirror
|
||||
import spack.paths
|
||||
import spack.util.gpg
|
||||
import spack.util.url
|
||||
from spack.cmd.common import arguments
|
||||
|
||||
description = "handle GPG actions for spack"
|
||||
section = "packaging"
|
||||
|
@@ -5,10 +5,10 @@
|
||||
from llnl.util import tty
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.store
|
||||
from spack.cmd.common import arguments
|
||||
from spack.graph import DAGWithDependencyTypes, SimpleDAG, graph_ascii, graph_dot, static_graph_dot
|
||||
|
||||
description = "generate graphs of package dependency relationships"
|
||||
|
@@ -11,13 +11,13 @@
|
||||
import llnl.util.tty.color as color
|
||||
from llnl.util.tty.colify import colify
|
||||
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.deptypes as dt
|
||||
import spack.fetch_strategy as fs
|
||||
import spack.install_test
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.version
|
||||
from spack.cmd.common import arguments
|
||||
from spack.package_base import preferred_version
|
||||
|
||||
description = "get detailed information on a particular package"
|
||||
@@ -327,7 +327,7 @@ def _variants_by_name_when(pkg):
|
||||
"""Adaptor to get variants keyed by { name: { when: { [Variant...] } }."""
|
||||
# TODO: replace with pkg.variants_by_name(when=True) when unified directive dicts are merged.
|
||||
variants = {}
|
||||
for name, (variant, whens) in sorted(pkg.variants.items()):
|
||||
for name, (variant, whens) in pkg.variants.items():
|
||||
for when in whens:
|
||||
variants.setdefault(name, {}).setdefault(when, []).append(variant)
|
||||
return variants
|
||||
|
@@ -14,6 +14,7 @@
|
||||
|
||||
import spack.build_environment
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.fetch_strategy
|
||||
@@ -22,7 +23,6 @@
|
||||
import spack.report
|
||||
import spack.spec
|
||||
import spack.store
|
||||
from spack.cmd.common import arguments
|
||||
from spack.error import SpackError
|
||||
from spack.installer import PackageInstaller
|
||||
|
||||
|
@@ -15,9 +15,9 @@
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.tty.colify import colify
|
||||
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.deptypes as dt
|
||||
import spack.repo
|
||||
from spack.cmd.common import arguments
|
||||
from spack.version import VersionList
|
||||
|
||||
description = "list and search available packages"
|
||||
|
@@ -8,12 +8,12 @@
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.cmd.find
|
||||
import spack.environment as ev
|
||||
import spack.store
|
||||
import spack.user_environment as uenv
|
||||
import spack.util.environment
|
||||
from spack.cmd.common import arguments
|
||||
|
||||
description = "add package to the user environment"
|
||||
section = "user environment"
|
||||
|
@@ -9,11 +9,11 @@
|
||||
|
||||
import spack.builder
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.environment as ev
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
import spack.stage
|
||||
from spack.cmd.common import arguments
|
||||
|
||||
description = "print out locations of packages and spack directories"
|
||||
section = "basic"
|
||||
|
@@ -8,11 +8,11 @@
|
||||
from llnl.util import tty
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.error
|
||||
import spack.package_base
|
||||
import spack.repo
|
||||
import spack.store
|
||||
from spack.cmd.common import arguments
|
||||
from spack.database import InstallStatuses
|
||||
|
||||
description = "mark packages as explicitly or implicitly installed"
|
||||
|
@@ -11,6 +11,7 @@
|
||||
|
||||
import spack.caches
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.concretize
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
@@ -19,7 +20,6 @@
|
||||
import spack.spec
|
||||
import spack.util.path
|
||||
import spack.util.web as web_util
|
||||
from spack.cmd.common import arguments
|
||||
from spack.error import SpackError
|
||||
|
||||
description = "manage mirrors (source and binary)"
|
||||
@@ -88,14 +88,18 @@ def setup_parser(subparser):
|
||||
"--mirror-url", metavar="mirror_url", type=str, help="find mirror to destroy by url"
|
||||
)
|
||||
|
||||
# used to construct scope arguments below
|
||||
scopes = spack.config.scopes()
|
||||
|
||||
# Add
|
||||
add_parser = sp.add_parser("add", help=mirror_add.__doc__)
|
||||
add_parser.add_argument("name", help="mnemonic name for mirror", metavar="mirror")
|
||||
add_parser.add_argument("url", help="url of mirror directory from 'spack mirror create'")
|
||||
add_parser.add_argument(
|
||||
"--scope",
|
||||
action=arguments.ConfigScope,
|
||||
default=lambda: spack.config.default_modify_scope(),
|
||||
choices=scopes,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_modify_scope(),
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
add_parser.add_argument(
|
||||
@@ -113,8 +117,9 @@ def setup_parser(subparser):
|
||||
remove_parser.add_argument("name", help="mnemonic name for mirror", metavar="mirror")
|
||||
remove_parser.add_argument(
|
||||
"--scope",
|
||||
action=arguments.ConfigScope,
|
||||
default=lambda: spack.config.default_modify_scope(),
|
||||
choices=scopes,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_modify_scope(),
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
|
||||
@@ -131,8 +136,9 @@ def setup_parser(subparser):
|
||||
)
|
||||
set_url_parser.add_argument(
|
||||
"--scope",
|
||||
action=arguments.ConfigScope,
|
||||
default=lambda: spack.config.default_modify_scope(),
|
||||
choices=scopes,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_modify_scope(),
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
arguments.add_connection_args(set_url_parser, False)
|
||||
@@ -159,8 +165,9 @@ def setup_parser(subparser):
|
||||
set_parser.add_argument("--url", help="url of mirror directory from 'spack mirror create'")
|
||||
set_parser.add_argument(
|
||||
"--scope",
|
||||
action=arguments.ConfigScope,
|
||||
default=lambda: spack.config.default_modify_scope(),
|
||||
choices=scopes,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_modify_scope(),
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
arguments.add_connection_args(set_parser, False)
|
||||
@@ -169,8 +176,9 @@ def setup_parser(subparser):
|
||||
list_parser = sp.add_parser("list", help=mirror_list.__doc__)
|
||||
list_parser.add_argument(
|
||||
"--scope",
|
||||
action=arguments.ConfigScope,
|
||||
default=lambda: spack.config.default_list_scope(),
|
||||
choices=scopes,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_list_scope(),
|
||||
help="configuration scope to read from",
|
||||
)
|
||||
|
||||
|
@@ -14,11 +14,11 @@
|
||||
from llnl.util.tty import color
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.config
|
||||
import spack.modules
|
||||
import spack.modules.common
|
||||
import spack.repo
|
||||
from spack.cmd.common import arguments
|
||||
|
||||
description = "manipulate module files"
|
||||
section = "environment"
|
||||
|
@@ -6,12 +6,12 @@
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.package_base
|
||||
import spack.repo
|
||||
import spack.traverse
|
||||
from spack.cmd.common import arguments
|
||||
|
||||
description = "patch expanded archive sources in preparation for install"
|
||||
section = "build"
|
||||
|
@@ -12,11 +12,11 @@
|
||||
from llnl.util.tty.colify import colify
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
import spack.util.executable as exe
|
||||
import spack.util.package_hash as ph
|
||||
from spack.cmd.common import arguments
|
||||
|
||||
description = "query packages associated with particular git revisions"
|
||||
section = "developer"
|
||||
|
@@ -6,7 +6,7 @@
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.cmd
|
||||
from spack.cmd.common import arguments
|
||||
import spack.cmd.common.arguments as arguments
|
||||
|
||||
description = "remove specs from an environment"
|
||||
section = "environments"
|
||||
|
@@ -11,7 +11,6 @@
|
||||
import spack.config
|
||||
import spack.repo
|
||||
import spack.util.path
|
||||
from spack.cmd.common import arguments
|
||||
|
||||
description = "manage package source repositories"
|
||||
section = "config"
|
||||
@@ -20,6 +19,7 @@
|
||||
|
||||
def setup_parser(subparser):
|
||||
sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="repo_command")
|
||||
scopes = spack.config.scopes()
|
||||
|
||||
# Create
|
||||
create_parser = sp.add_parser("create", help=repo_create.__doc__)
|
||||
@@ -43,8 +43,9 @@ def setup_parser(subparser):
|
||||
list_parser = sp.add_parser("list", help=repo_list.__doc__)
|
||||
list_parser.add_argument(
|
||||
"--scope",
|
||||
action=arguments.ConfigScope,
|
||||
default=lambda: spack.config.default_list_scope(),
|
||||
choices=scopes,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_list_scope(),
|
||||
help="configuration scope to read from",
|
||||
)
|
||||
|
||||
@@ -53,8 +54,9 @@ def setup_parser(subparser):
|
||||
add_parser.add_argument("path", help="path to a Spack package repository directory")
|
||||
add_parser.add_argument(
|
||||
"--scope",
|
||||
action=arguments.ConfigScope,
|
||||
default=lambda: spack.config.default_modify_scope(),
|
||||
choices=scopes,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_modify_scope(),
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
|
||||
@@ -65,8 +67,9 @@ def setup_parser(subparser):
|
||||
)
|
||||
remove_parser.add_argument(
|
||||
"--scope",
|
||||
action=arguments.ConfigScope,
|
||||
default=lambda: spack.config.default_modify_scope(),
|
||||
choices=scopes,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_modify_scope(),
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
|
||||
|
@@ -6,8 +6,8 @@
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.repo
|
||||
from spack.cmd.common import arguments
|
||||
|
||||
description = "revert checked out package source code"
|
||||
section = "build"
|
||||
|
@@ -12,12 +12,12 @@
|
||||
|
||||
import spack
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.config
|
||||
import spack.environment
|
||||
import spack.hash_types as ht
|
||||
import spack.package_base
|
||||
import spack.solver.asp as asp
|
||||
from spack.cmd.common import arguments
|
||||
|
||||
description = "concretize a specs using an ASP solver"
|
||||
section = "developer"
|
||||
|
@@ -10,11 +10,11 @@
|
||||
|
||||
import spack
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.environment as ev
|
||||
import spack.hash_types as ht
|
||||
import spack.spec
|
||||
import spack.store
|
||||
from spack.cmd.common import arguments
|
||||
|
||||
description = "show what would be installed, given a spec"
|
||||
section = "build"
|
||||
|
@@ -8,13 +8,13 @@
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.package_base
|
||||
import spack.repo
|
||||
import spack.stage
|
||||
import spack.traverse
|
||||
from spack.cmd.common import arguments
|
||||
|
||||
description = "expand downloaded archive in preparation for install"
|
||||
section = "build"
|
||||
|
@@ -15,12 +15,12 @@
|
||||
from llnl.util.tty import colify
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.environment as ev
|
||||
import spack.install_test
|
||||
import spack.package_base
|
||||
import spack.repo
|
||||
import spack.report
|
||||
from spack.cmd.common import arguments
|
||||
|
||||
description = "run spack's tests for an install"
|
||||
section = "admin"
|
||||
|
@@ -10,11 +10,11 @@
|
||||
from llnl.util.filesystem import working_dir
|
||||
|
||||
import spack
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.config
|
||||
import spack.paths
|
||||
import spack.util.git
|
||||
import spack.util.gpg
|
||||
from spack.cmd.common import arguments
|
||||
from spack.util.spack_yaml import syaml_dict
|
||||
|
||||
description = "set up spack for our tutorial (WARNING: modifies config!)"
|
||||
|
@@ -6,7 +6,7 @@
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.cmd
|
||||
from spack.cmd.common import arguments
|
||||
import spack.cmd.common.arguments as arguments
|
||||
|
||||
description = "remove specs from an environment"
|
||||
section = "environments"
|
||||
|
@@ -10,13 +10,13 @@
|
||||
from llnl.util.tty.colify import colify
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.cmd.common.confirmation as confirmation
|
||||
import spack.environment as ev
|
||||
import spack.package_base
|
||||
import spack.spec
|
||||
import spack.store
|
||||
import spack.traverse as traverse
|
||||
from spack.cmd.common import arguments
|
||||
from spack.database import InstallStatuses
|
||||
|
||||
description = "remove installed packages"
|
||||
|
@@ -227,7 +227,9 @@ def unit_test(parser, args, unknown_args):
|
||||
# has been used, then test that extension.
|
||||
pytest_root = spack.paths.spack_root
|
||||
if args.extension:
|
||||
pytest_root = spack.extensions.load_extension(args.extension)
|
||||
target = args.extension
|
||||
extensions = spack.extensions.get_extension_paths()
|
||||
pytest_root = spack.extensions.path_for_extension(target, *extensions)
|
||||
|
||||
# pytest.ini lives in the root of the spack repository.
|
||||
with llnl.util.filesystem.working_dir(pytest_root):
|
||||
|
@@ -7,10 +7,10 @@
|
||||
import sys
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.error
|
||||
import spack.user_environment as uenv
|
||||
import spack.util.environment
|
||||
from spack.cmd.common import arguments
|
||||
|
||||
description = "remove package from the user environment"
|
||||
section = "user environment"
|
||||
|
@@ -8,9 +8,9 @@
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.tty.colify import colify
|
||||
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
from spack.cmd.common import arguments
|
||||
from spack.version import infinity_versions, ver
|
||||
|
||||
description = "list available versions of a package"
|
||||
|
@@ -334,6 +334,40 @@ def __init__(
|
||||
# used for version checks for API, e.g. C++11 flag
|
||||
self._real_version = None
|
||||
|
||||
def __eq__(self, other):
|
||||
return (
|
||||
self.cc == other.cc
|
||||
and self.cxx == other.cxx
|
||||
and self.fc == other.fc
|
||||
and self.f77 == other.f77
|
||||
and self.spec == other.spec
|
||||
and self.operating_system == other.operating_system
|
||||
and self.target == other.target
|
||||
and self.flags == other.flags
|
||||
and self.modules == other.modules
|
||||
and self.environment == other.environment
|
||||
and self.extra_rpaths == other.extra_rpaths
|
||||
and self.enable_implicit_rpaths == other.enable_implicit_rpaths
|
||||
)
|
||||
|
||||
def __hash__(self):
|
||||
return hash(
|
||||
(
|
||||
self.cc,
|
||||
self.cxx,
|
||||
self.fc,
|
||||
self.f77,
|
||||
self.spec,
|
||||
self.operating_system,
|
||||
self.target,
|
||||
str(self.flags),
|
||||
str(self.modules),
|
||||
str(self.environment),
|
||||
str(self.extra_rpaths),
|
||||
self.enable_implicit_rpaths,
|
||||
)
|
||||
)
|
||||
|
||||
def verify_executables(self):
|
||||
"""Raise an error if any of the compiler executables is not valid.
|
||||
|
||||
|
@@ -109,7 +109,7 @@ def _to_dict(compiler):
|
||||
return {"compiler": d}
|
||||
|
||||
|
||||
def get_compiler_config(scope=None, init_config=True):
|
||||
def get_compiler_config(scope=None, init_config=False):
|
||||
"""Return the compiler configuration for the specified architecture."""
|
||||
|
||||
config = spack.config.get("compilers", scope=scope) or []
|
||||
@@ -118,6 +118,8 @@ def get_compiler_config(scope=None, init_config=True):
|
||||
|
||||
merged_config = spack.config.get("compilers")
|
||||
if merged_config:
|
||||
# Config is empty for this scope
|
||||
# Do not init config because there is a non-empty scope
|
||||
return config
|
||||
|
||||
_init_compiler_config(scope=scope)
|
||||
@@ -125,6 +127,105 @@ def get_compiler_config(scope=None, init_config=True):
|
||||
return config
|
||||
|
||||
|
||||
def get_compiler_config_from_packages(scope=None):
|
||||
"""Return the compiler configuration from packages.yaml"""
|
||||
config = spack.config.get("packages", scope=scope)
|
||||
if not config:
|
||||
return []
|
||||
|
||||
packages = []
|
||||
compiler_package_names = supported_compilers() + list(package_name_to_compiler_name.keys())
|
||||
for name, entry in config.items():
|
||||
if name not in compiler_package_names:
|
||||
continue
|
||||
externals_config = entry.get("externals", None)
|
||||
if not externals_config:
|
||||
continue
|
||||
packages.extend(_compiler_config_from_package_config(externals_config))
|
||||
|
||||
return packages
|
||||
|
||||
|
||||
def _compiler_config_from_package_config(config):
|
||||
compilers = []
|
||||
for entry in config:
|
||||
compiler = _compiler_config_from_external(entry)
|
||||
if compiler:
|
||||
compilers.append(compiler)
|
||||
|
||||
return compilers
|
||||
|
||||
|
||||
def _compiler_config_from_external(config):
|
||||
spec = spack.spec.parse_with_version_concrete(config["spec"])
|
||||
# use str(spec.versions) to allow `@x.y.z` instead of `@=x.y.z`
|
||||
compiler_spec = spack.spec.CompilerSpec(
|
||||
package_name_to_compiler_name.get(spec.name, spec.name), spec.version
|
||||
)
|
||||
|
||||
extra_attributes = config.get("extra_attributes", {})
|
||||
prefix = config.get("prefix", None)
|
||||
|
||||
compiler_class = class_for_compiler_name(compiler_spec.name)
|
||||
paths = extra_attributes.get("compilers", {})
|
||||
|
||||
# compilers format has cc/fc/f77, externals format has "c/fortran"
|
||||
if "c" in paths:
|
||||
paths["cc"] = paths.pop("c")
|
||||
if "fortran" in paths:
|
||||
fc = paths.pop("fortran")
|
||||
paths["fc"] = fc
|
||||
if "f77" not in paths:
|
||||
paths["f77"] = fc
|
||||
|
||||
compiler_langs = ["cc", "cxx", "fc", "f77"]
|
||||
for lang in compiler_langs:
|
||||
if paths.setdefault(lang, None):
|
||||
continue
|
||||
|
||||
if not prefix:
|
||||
continue
|
||||
|
||||
# Check for files that satisfy the naming scheme for this compiler
|
||||
bindir = os.path.join(prefix, "bin")
|
||||
for f, regex in itertools.product(os.listdir(bindir), compiler_class.search_regexps(lang)):
|
||||
if regex.match(f):
|
||||
paths[lang] = os.path.join(bindir, f)
|
||||
|
||||
if all(v is None for v in paths.values()):
|
||||
return None
|
||||
|
||||
if not spec.architecture:
|
||||
host_platform = spack.platforms.host()
|
||||
operating_system = host_platform.operating_system("default_os")
|
||||
target = host_platform.target("default_target").microarchitecture
|
||||
else:
|
||||
target = spec.target
|
||||
if not target:
|
||||
host_platform = spack.platforms.host()
|
||||
target = host_platform.target("default_target").microarchitecture
|
||||
|
||||
operating_system = spec.os
|
||||
if not operating_system:
|
||||
host_platform = spack.platforms.host()
|
||||
operating_system = host_platform.operating_system("default_os")
|
||||
|
||||
compiler_entry = {
|
||||
"compiler": {
|
||||
"spec": str(compiler_spec),
|
||||
"paths": paths,
|
||||
"flags": extra_attributes.get("flags", {}),
|
||||
"operating_system": str(operating_system),
|
||||
"target": str(target.family),
|
||||
"modules": config.get("modules", []),
|
||||
"environment": extra_attributes.get("environment", {}),
|
||||
"extra_rpaths": extra_attributes.get("extra_rpaths", []),
|
||||
"implicit_rpaths": extra_attributes.get("implicit_rpaths", None),
|
||||
}
|
||||
}
|
||||
return compiler_entry
|
||||
|
||||
|
||||
def _init_compiler_config(*, scope):
|
||||
"""Compiler search used when Spack has no compilers."""
|
||||
compilers = find_compilers()
|
||||
@@ -142,17 +243,20 @@ def compiler_config_files():
|
||||
compiler_config = config.get("compilers", scope=name)
|
||||
if compiler_config:
|
||||
config_files.append(config.get_config_filename(name, "compilers"))
|
||||
compiler_config_from_packages = get_compiler_config_from_packages(scope=name)
|
||||
if compiler_config_from_packages:
|
||||
config_files.append(config.get_config_filename(name, "packages"))
|
||||
return config_files
|
||||
|
||||
|
||||
def add_compilers_to_config(compilers, scope=None, init_config=True):
|
||||
def add_compilers_to_config(compilers, scope=None):
|
||||
"""Add compilers to the config for the specified architecture.
|
||||
|
||||
Arguments:
|
||||
compilers: a list of Compiler objects.
|
||||
scope: configuration scope to modify.
|
||||
"""
|
||||
compiler_config = get_compiler_config(scope, init_config)
|
||||
compiler_config = get_compiler_config(scope, init_config=False)
|
||||
for compiler in compilers:
|
||||
if not compiler.cc:
|
||||
tty.debug(f"{compiler.spec} does not have a C compiler")
|
||||
@@ -184,6 +288,9 @@ def remove_compiler_from_config(compiler_spec, scope=None):
|
||||
for current_scope in candidate_scopes:
|
||||
removal_happened |= _remove_compiler_from_scope(compiler_spec, scope=current_scope)
|
||||
|
||||
msg = "`spack compiler remove` will not remove compilers defined in packages.yaml"
|
||||
msg += "\nTo remove these compilers, either edit the config or use `spack external remove`"
|
||||
tty.debug(msg)
|
||||
return removal_happened
|
||||
|
||||
|
||||
@@ -198,7 +305,7 @@ def _remove_compiler_from_scope(compiler_spec, scope):
|
||||
True if one or more compiler entries were actually removed, False otherwise
|
||||
"""
|
||||
assert scope is not None, "a specific scope is needed when calling this function"
|
||||
compiler_config = get_compiler_config(scope)
|
||||
compiler_config = get_compiler_config(scope, init_config=False)
|
||||
filtered_compiler_config = [
|
||||
compiler_entry
|
||||
for compiler_entry in compiler_config
|
||||
@@ -221,7 +328,14 @@ def all_compilers_config(scope=None, init_config=True):
|
||||
"""Return a set of specs for all the compiler versions currently
|
||||
available to build with. These are instances of CompilerSpec.
|
||||
"""
|
||||
return get_compiler_config(scope, init_config)
|
||||
from_packages_yaml = get_compiler_config_from_packages(scope)
|
||||
if from_packages_yaml:
|
||||
init_config = False
|
||||
from_compilers_yaml = get_compiler_config(scope, init_config)
|
||||
|
||||
result = from_compilers_yaml + from_packages_yaml
|
||||
key = lambda c: _compiler_from_config_entry(c["compiler"])
|
||||
return list(llnl.util.lang.dedupe(result, key=key))
|
||||
|
||||
|
||||
def all_compiler_specs(scope=None, init_config=True):
|
||||
@@ -388,7 +502,7 @@ def find_specs_by_arch(compiler_spec, arch_spec, scope=None, init_config=True):
|
||||
|
||||
|
||||
def all_compilers(scope=None, init_config=True):
|
||||
config = get_compiler_config(scope, init_config=init_config)
|
||||
config = all_compilers_config(scope, init_config=init_config)
|
||||
compilers = list()
|
||||
for items in config:
|
||||
items = items["compiler"]
|
||||
@@ -403,10 +517,7 @@ def compilers_for_spec(
|
||||
"""This gets all compilers that satisfy the supplied CompilerSpec.
|
||||
Returns an empty list if none are found.
|
||||
"""
|
||||
if use_cache:
|
||||
config = all_compilers_config(scope, init_config)
|
||||
else:
|
||||
config = get_compiler_config(scope, init_config)
|
||||
config = all_compilers_config(scope, init_config)
|
||||
|
||||
matches = set(find(compiler_spec, scope, init_config))
|
||||
compilers = []
|
||||
@@ -582,9 +693,7 @@ def get_compiler_duplicates(compiler_spec, arch_spec):
|
||||
|
||||
scope_to_compilers = {}
|
||||
for scope in config.scopes:
|
||||
compilers = compilers_for_spec(
|
||||
compiler_spec, arch_spec=arch_spec, scope=scope, use_cache=False
|
||||
)
|
||||
compilers = compilers_for_spec(compiler_spec, arch_spec=arch_spec, scope=scope)
|
||||
if compilers:
|
||||
scope_to_compilers[scope] = compilers
|
||||
|
||||
|
@@ -20,16 +20,16 @@ def __init__(self, *args, **kwargs):
|
||||
self.version_argument = "-V"
|
||||
|
||||
# Subclasses use possible names of C compiler
|
||||
cc_names = ["craycc"]
|
||||
cc_names = ["craycc", "cc"]
|
||||
|
||||
# Subclasses use possible names of C++ compiler
|
||||
cxx_names = ["crayCC"]
|
||||
cxx_names = ["crayCC", "CC"]
|
||||
|
||||
# Subclasses use possible names of Fortran 77 compiler
|
||||
f77_names = ["crayftn"]
|
||||
f77_names = ["crayftn", "ftn"]
|
||||
|
||||
# Subclasses use possible names of Fortran 90 compiler
|
||||
fc_names = ["crayftn"]
|
||||
fc_names = ["crayftn", "ftn"]
|
||||
|
||||
# MacPorts builds gcc versions with prefixes and -mp-X.Y suffixes.
|
||||
suffixes = [r"-mp-\d\.\d"]
|
||||
|
@@ -227,7 +227,7 @@ def read(path, apply_updates):
|
||||
if apply_updates and compilers:
|
||||
for compiler in compilers:
|
||||
try:
|
||||
spack.compilers.add_compilers_to_config([compiler], init_config=False)
|
||||
spack.compilers.add_compilers_to_config([compiler])
|
||||
except Exception:
|
||||
warnings.warn(
|
||||
f"Could not add compiler {str(compiler.spec)}: "
|
||||
|
@@ -309,14 +309,10 @@ def find_windows_kit_roots() -> List[str]:
|
||||
return glob.glob(kit_base)
|
||||
|
||||
@staticmethod
|
||||
def find_windows_kit_bin_paths(
|
||||
kit_base: Union[Optional[str], Optional[list]] = None
|
||||
) -> List[str]:
|
||||
def find_windows_kit_bin_paths(kit_base: Optional[str] = None) -> List[str]:
|
||||
"""Returns Windows kit bin directory per version"""
|
||||
kit_base = WindowsKitExternalPaths.find_windows_kit_roots() if not kit_base else kit_base
|
||||
assert kit_base, "Unexpectedly empty value for Windows kit base path"
|
||||
if isinstance(kit_base, str):
|
||||
kit_base = kit_base.split(";")
|
||||
kit_paths = []
|
||||
for kit in kit_base:
|
||||
kit_bin = os.path.join(kit, "bin")
|
||||
@@ -324,14 +320,10 @@ def find_windows_kit_bin_paths(
|
||||
return kit_paths
|
||||
|
||||
@staticmethod
|
||||
def find_windows_kit_lib_paths(
|
||||
kit_base: Union[Optional[str], Optional[list]] = None
|
||||
) -> List[str]:
|
||||
def find_windows_kit_lib_paths(kit_base: Optional[str] = None) -> List[str]:
|
||||
"""Returns Windows kit lib directory per version"""
|
||||
kit_base = WindowsKitExternalPaths.find_windows_kit_roots() if not kit_base else kit_base
|
||||
assert kit_base, "Unexpectedly empty value for Windows kit base path"
|
||||
if isinstance(kit_base, str):
|
||||
kit_base = kit_base.split(";")
|
||||
kit_paths = []
|
||||
for kit in kit_base:
|
||||
kit_lib = os.path.join(kit, "Lib")
|
||||
|
@@ -1554,7 +1554,7 @@ def _concretize_separately(self, tests=False):
|
||||
|
||||
# Ensure we have compilers in compilers.yaml to avoid that
|
||||
# processes try to write the config file in parallel
|
||||
_ = spack.compilers.get_compiler_config()
|
||||
_ = spack.compilers.get_compiler_config(init_config=True)
|
||||
|
||||
# Early return if there is nothing to do
|
||||
if len(args) == 0:
|
||||
|
@@ -6,13 +6,11 @@
|
||||
for Spack's command extensions.
|
||||
"""
|
||||
import difflib
|
||||
import glob
|
||||
import importlib
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import types
|
||||
from typing import List
|
||||
|
||||
import llnl.util.lang
|
||||
|
||||
@@ -77,15 +75,6 @@ def load_command_extension(command, path):
|
||||
if not os.path.exists(cmd_path):
|
||||
return None
|
||||
|
||||
ensure_extension_loaded(extension, path=path)
|
||||
|
||||
module = importlib.import_module(module_name)
|
||||
sys.modules[module_name] = module
|
||||
|
||||
return module
|
||||
|
||||
|
||||
def ensure_extension_loaded(extension, *, path):
|
||||
def ensure_package_creation(name):
|
||||
package_name = "{0}.{1}".format(__name__, name)
|
||||
if package_name in sys.modules:
|
||||
@@ -111,22 +100,10 @@ def ensure_package_creation(name):
|
||||
ensure_package_creation(extension)
|
||||
ensure_package_creation(extension + ".cmd")
|
||||
|
||||
module = importlib.import_module(module_name)
|
||||
sys.modules[module_name] = module
|
||||
|
||||
def load_extension(name: str) -> str:
|
||||
"""Loads a single extension into the 'spack.extensions' package.
|
||||
|
||||
Args:
|
||||
name: name of the extension
|
||||
"""
|
||||
extension_root = path_for_extension(name, paths=get_extension_paths())
|
||||
ensure_extension_loaded(name, path=extension_root)
|
||||
commands = glob.glob(
|
||||
os.path.join(extension_root, extension_name(extension_root), "cmd", "*.py")
|
||||
)
|
||||
commands = [os.path.basename(x).rstrip(".py") for x in commands]
|
||||
for command in commands:
|
||||
load_command_extension(command, extension_root)
|
||||
return extension_root
|
||||
return module
|
||||
|
||||
|
||||
def get_extension_paths():
|
||||
@@ -148,7 +125,7 @@ def get_command_paths():
|
||||
return command_paths
|
||||
|
||||
|
||||
def path_for_extension(target_name: str, *, paths: List[str]) -> str:
|
||||
def path_for_extension(target_name, *paths):
|
||||
"""Return the test root dir for a given extension.
|
||||
|
||||
Args:
|
||||
|
@@ -357,8 +357,7 @@ def _print_installed_pkg(message: str) -> None:
|
||||
Args:
|
||||
message (str): message to be output
|
||||
"""
|
||||
if tty.msg_enabled():
|
||||
print(colorize("@*g{[+]} ") + spack.util.path.debug_padded_filter(message))
|
||||
print(colorize("@*g{[+]} ") + spack.util.path.debug_padded_filter(message))
|
||||
|
||||
|
||||
def print_install_test_log(pkg: "spack.package_base.PackageBase") -> None:
|
||||
@@ -2008,9 +2007,7 @@ def install(self) -> None:
|
||||
|
||||
# Only enable the terminal status line when we're in a tty without debug info
|
||||
# enabled, so that the output does not get cluttered.
|
||||
term_status = TermStatusLine(
|
||||
enabled=sys.stdout.isatty() and tty.msg_enabled() and not tty.is_debug()
|
||||
)
|
||||
term_status = TermStatusLine(enabled=sys.stdout.isatty() and not tty.is_debug())
|
||||
|
||||
while self.build_pq:
|
||||
task = self._pop_task()
|
||||
|
@@ -5,20 +5,11 @@
|
||||
from ._operating_system import OperatingSystem
|
||||
from .cray_backend import CrayBackend
|
||||
from .cray_frontend import CrayFrontend
|
||||
from .freebsd import FreeBSDOs
|
||||
from .linux_distro import LinuxDistro
|
||||
from .mac_os import MacOs
|
||||
from .windows_os import WindowsOs
|
||||
|
||||
__all__ = [
|
||||
"OperatingSystem",
|
||||
"LinuxDistro",
|
||||
"MacOs",
|
||||
"CrayFrontend",
|
||||
"CrayBackend",
|
||||
"WindowsOs",
|
||||
"FreeBSDOs",
|
||||
]
|
||||
__all__ = ["OperatingSystem", "LinuxDistro", "MacOs", "CrayFrontend", "CrayBackend", "WindowsOs"]
|
||||
|
||||
#: List of all the Operating Systems known to Spack
|
||||
operating_systems = [LinuxDistro, MacOs, CrayFrontend, CrayBackend, WindowsOs, FreeBSDOs]
|
||||
operating_systems = [LinuxDistro, MacOs, CrayFrontend, CrayBackend, WindowsOs]
|
||||
|
@@ -1,15 +0,0 @@
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import platform as py_platform
|
||||
|
||||
from spack.version import Version
|
||||
|
||||
from ._operating_system import OperatingSystem
|
||||
|
||||
|
||||
class FreeBSDOs(OperatingSystem):
|
||||
def __init__(self):
|
||||
release = py_platform.release().split("-", 1)[0]
|
||||
super().__init__("freebsd", Version(release))
|
@@ -206,15 +206,11 @@ def tokenize(text: str) -> Iterator[Token]:
|
||||
scanner = ALL_TOKENS.scanner(text) # type: ignore[attr-defined]
|
||||
match: Optional[Match] = None
|
||||
for match in iter(scanner.match, None):
|
||||
# The following two assertions are to help mypy
|
||||
msg = (
|
||||
"unexpected value encountered during parsing. Please submit a bug report "
|
||||
"at https://github.com/spack/spack/issues/new/choose"
|
||||
)
|
||||
assert match is not None, msg
|
||||
assert match.lastgroup is not None, msg
|
||||
yield Token(
|
||||
TokenType.__members__[match.lastgroup], match.group(), match.start(), match.end()
|
||||
TokenType.__members__[match.lastgroup], # type: ignore[attr-defined]
|
||||
match.group(), # type: ignore[attr-defined]
|
||||
match.start(), # type: ignore[attr-defined]
|
||||
match.end(), # type: ignore[attr-defined]
|
||||
)
|
||||
|
||||
if match is None and not text:
|
||||
|
@@ -8,7 +8,6 @@
|
||||
from ._platform import Platform
|
||||
from .cray import Cray
|
||||
from .darwin import Darwin
|
||||
from .freebsd import FreeBSD
|
||||
from .linux import Linux
|
||||
from .test import Test
|
||||
from .windows import Windows
|
||||
@@ -18,7 +17,6 @@
|
||||
"Cray",
|
||||
"Darwin",
|
||||
"Linux",
|
||||
"FreeBSD",
|
||||
"Test",
|
||||
"Windows",
|
||||
"platforms",
|
||||
|
@@ -10,13 +10,12 @@
|
||||
|
||||
from .cray import Cray
|
||||
from .darwin import Darwin
|
||||
from .freebsd import FreeBSD
|
||||
from .linux import Linux
|
||||
from .test import Test
|
||||
from .windows import Windows
|
||||
|
||||
#: List of all the platform classes known to Spack
|
||||
platforms = [Cray, Darwin, Linux, Windows, FreeBSD, Test]
|
||||
platforms = [Cray, Darwin, Linux, Windows, Test]
|
||||
|
||||
|
||||
@llnl.util.lang.memoized
|
||||
|
@@ -1,37 +0,0 @@
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import platform
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
import spack.target
|
||||
from spack.operating_systems.freebsd import FreeBSDOs
|
||||
|
||||
from ._platform import Platform
|
||||
|
||||
|
||||
class FreeBSD(Platform):
|
||||
priority = 102
|
||||
|
||||
def __init__(self):
|
||||
super().__init__("freebsd")
|
||||
|
||||
for name in archspec.cpu.TARGETS:
|
||||
self.add_target(name, spack.target.Target(name))
|
||||
|
||||
# Get specific default
|
||||
self.default = archspec.cpu.host().name
|
||||
self.front_end = self.default
|
||||
self.back_end = self.default
|
||||
|
||||
os = FreeBSDOs()
|
||||
self.default_os = str(os)
|
||||
self.front_os = self.default_os
|
||||
self.back_os = self.default_os
|
||||
self.add_operating_system(str(os), os)
|
||||
|
||||
@classmethod
|
||||
def detect(cls):
|
||||
return platform.system().lower() == "freebsd"
|
@@ -92,12 +92,11 @@ def __init__(self, configuration: CDashConfiguration):
|
||||
self.osname = platform.system()
|
||||
self.osrelease = platform.release()
|
||||
self.target = spack.platforms.host().target("default_target")
|
||||
self.starttime = int(time.time())
|
||||
self.endtime = self.starttime
|
||||
self.endtime = int(time.time())
|
||||
self.buildstamp = (
|
||||
configuration.buildstamp
|
||||
if configuration.buildstamp
|
||||
else build_stamp(configuration.track, self.starttime)
|
||||
else build_stamp(configuration.track, self.endtime)
|
||||
)
|
||||
self.buildIds: Dict[str, str] = {}
|
||||
self.revision = ""
|
||||
@@ -126,7 +125,7 @@ def build_report_for_package(self, report_dir, package, duration):
|
||||
report_data[phase] = {}
|
||||
report_data[phase]["loglines"] = []
|
||||
report_data[phase]["status"] = 0
|
||||
report_data[phase]["starttime"] = self.starttime
|
||||
report_data[phase]["endtime"] = self.endtime
|
||||
|
||||
# Track the phases we perform so we know what reports to create.
|
||||
# We always report the update step because this is how we tell CDash
|
||||
@@ -154,25 +153,6 @@ def build_report_for_package(self, report_dir, package, duration):
|
||||
elif cdash_phase:
|
||||
report_data[cdash_phase]["loglines"].append(xml.sax.saxutils.escape(line))
|
||||
|
||||
# something went wrong pre-cdash "configure" phase b/c we have an exception and only
|
||||
# "update" was encounterd.
|
||||
# dump the report in the configure line so teams can see what the issue is
|
||||
if len(phases_encountered) == 1 and package["exception"]:
|
||||
# TODO this mapping is not ideal since these are pre-configure errors
|
||||
# we need to determine if a more appropriate cdash phase can be utilized
|
||||
# for now we will add a message to the log explaining this
|
||||
cdash_phase = "configure"
|
||||
phases_encountered.append(cdash_phase)
|
||||
|
||||
log_message = (
|
||||
"Pre-configure errors occured in Spack's process that terminated the "
|
||||
"build process prematurely.\nSpack output::\n{0}".format(
|
||||
xml.sax.saxutils.escape(package["exception"])
|
||||
)
|
||||
)
|
||||
|
||||
report_data[cdash_phase]["loglines"].append(log_message)
|
||||
|
||||
# Move the build phase to the front of the list if it occurred.
|
||||
# This supports older versions of CDash that expect this phase
|
||||
# to be reported before all others.
|
||||
@@ -180,9 +160,9 @@ def build_report_for_package(self, report_dir, package, duration):
|
||||
build_pos = phases_encountered.index("build")
|
||||
phases_encountered.insert(0, phases_encountered.pop(build_pos))
|
||||
|
||||
self.endtime = self.starttime + duration
|
||||
self.starttime = self.endtime - duration
|
||||
for phase in phases_encountered:
|
||||
report_data[phase]["endtime"] = self.endtime
|
||||
report_data[phase]["starttime"] = self.starttime
|
||||
report_data[phase]["log"] = "\n".join(report_data[phase]["loglines"])
|
||||
errors, warnings = parse_log_events(report_data[phase]["loglines"])
|
||||
|
||||
@@ -329,7 +309,7 @@ def test_report_for_package(self, report_dir, package, duration):
|
||||
self.buildname = "{0}-{1}".format(self.current_package_name, package["id"])
|
||||
else:
|
||||
self.buildname = self.report_build_name(self.current_package_name)
|
||||
self.endtime = self.starttime + duration
|
||||
self.starttime = self.endtime - duration
|
||||
|
||||
report_data = self.initialize_report(report_dir)
|
||||
report_data["hostname"] = socket.gethostname()
|
||||
@@ -374,7 +354,7 @@ def concretization_report(self, report_dir, msg):
|
||||
self.buildname = self.base_buildname
|
||||
report_data = self.initialize_report(report_dir)
|
||||
report_data["update"] = {}
|
||||
report_data["update"]["starttime"] = self.starttime
|
||||
report_data["update"]["starttime"] = self.endtime
|
||||
report_data["update"]["endtime"] = self.endtime
|
||||
report_data["update"]["revision"] = self.revision
|
||||
report_data["update"]["log"] = msg
|
||||
|
@@ -3134,25 +3134,6 @@ def _develop_specs_from_env(spec, env):
|
||||
spec.constrain(dev_info["spec"])
|
||||
|
||||
|
||||
def _is_reusable_external(packages, spec: spack.spec.Spec) -> bool:
|
||||
"""Returns true iff spec is an external that can be reused.
|
||||
|
||||
Arguments:
|
||||
packages: the packages configuration
|
||||
spec: the spec to check
|
||||
"""
|
||||
for name in {spec.name, *(p.name for p in spec.package.provided)}:
|
||||
for entry in packages.get(name, {}).get("externals", []):
|
||||
if (
|
||||
spec.satisfies(entry["spec"])
|
||||
and spec.external_path == entry.get("prefix")
|
||||
and spec.external_modules == entry.get("modules")
|
||||
):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
class Solver:
|
||||
"""This is the main external interface class for solving.
|
||||
|
||||
@@ -3200,18 +3181,8 @@ def _reusable_specs(self, specs):
|
||||
|
||||
# Specs from buildcaches
|
||||
try:
|
||||
# Specs in a build cache that depend on externals are reusable as long as local
|
||||
# config has matching externals. This should guard against picking up binaries
|
||||
# linked against externals not available locally, while still supporting the use
|
||||
# case of distributing binaries across machines with similar externals.
|
||||
packages = spack.config.get("packages")
|
||||
reusable_specs.extend(
|
||||
[
|
||||
s
|
||||
for s in spack.binary_distribution.update_cache_and_get_specs()
|
||||
if not s.external or _is_reusable_external(packages, s)
|
||||
]
|
||||
)
|
||||
index = spack.binary_distribution.update_cache_and_get_specs()
|
||||
reusable_specs.extend(index)
|
||||
except (spack.binary_distribution.FetchCacheError, IndexError):
|
||||
# this is raised when no mirrors had indices.
|
||||
# TODO: update mirror configuration so it can indicate that the
|
||||
|
@@ -20,7 +20,7 @@
|
||||
% Integrity constraints on DAG nodes
|
||||
:- attr("root", PackageNode), not attr("node", PackageNode).
|
||||
:- attr("version", PackageNode, _), not attr("node", PackageNode), not attr("virtual_node", PackageNode).
|
||||
:- attr("node_version_satisfies", PackageNode, _), not attr("node", PackageNode), not attr("virtual_node", PackageNode).
|
||||
:- attr("node_version_satisfies", PackageNode), not attr("node", PackageNode).
|
||||
:- attr("hash", PackageNode, _), not attr("node", PackageNode).
|
||||
:- attr("node_platform", PackageNode, _), not attr("node", PackageNode).
|
||||
:- attr("node_os", PackageNode, _), not attr("node", PackageNode).
|
||||
|
@@ -30,8 +30,6 @@ def current_host_platform():
|
||||
current_platform = spack.platforms.Darwin()
|
||||
elif "Windows" in platform.system():
|
||||
current_platform = spack.platforms.Windows()
|
||||
elif "FreeBSD" in platform.system():
|
||||
current_platform = spack.platforms.FreeBSD()
|
||||
return current_platform
|
||||
|
||||
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user