Compare commits
42 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
ba6cb62df8 | ||
![]() |
e1437349d1 | ||
![]() |
c1eb3f965b | ||
![]() |
e0d246210f | ||
![]() |
88b47d2714 | ||
![]() |
342520175d | ||
![]() |
8a55299214 | ||
![]() |
174308ca3d | ||
![]() |
506c176d25 | ||
![]() |
88171ff353 | ||
![]() |
fb8f2d8301 | ||
![]() |
27beb100e1 | ||
![]() |
02613d778d | ||
![]() |
a6065334ad | ||
![]() |
243ee7203a | ||
![]() |
271106e5dd | ||
![]() |
9fb9156e82 | ||
![]() |
d88cfbd839 | ||
![]() |
4127a93a91 | ||
![]() |
5df2189e43 | ||
![]() |
667c1960d0 | ||
![]() |
4449058257 | ||
![]() |
1b03f1d5bc | ||
![]() |
cf4e9cb3b3 | ||
![]() |
d189b12050 | ||
![]() |
5134504fd8 | ||
![]() |
ae5018ee09 | ||
![]() |
106ebeb502 | ||
![]() |
ba89754ee1 | ||
![]() |
e733eb0fd9 | ||
![]() |
07b344bf10 | ||
![]() |
a71c65399e | ||
![]() |
d8b73331f6 | ||
![]() |
4741ea683c | ||
![]() |
f33c18290b | ||
![]() |
5ea67e8882 | ||
![]() |
8ade071253 | ||
![]() |
dca09e6e0f | ||
![]() |
2776402c90 | ||
![]() |
3961a86f86 | ||
![]() |
39e594d096 | ||
![]() |
7a91bed5c9 |
24
.github/workflows/bootstrap.yml
vendored
24
.github/workflows/bootstrap.yml
vendored
@@ -83,14 +83,12 @@ jobs:
|
||||
steps:
|
||||
- name: Setup macOS
|
||||
if: ${{ matrix.runner != 'ubuntu-latest' }}
|
||||
run: brew install tree gawk
|
||||
- name: Remove system executables
|
||||
run: |
|
||||
brew install tree
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- name: Setup Ubuntu
|
||||
if: ${{ matrix.runner == 'ubuntu-latest' }}
|
||||
run: |
|
||||
sudo rm -rf $(which gpg) $(which gpg2) $(which patchelf)
|
||||
while [ -n "$(command -v gpg gpg2 patchelf)" ]; do
|
||||
sudo rm $(command -v gpg gpg2 patchelf)
|
||||
done
|
||||
- name: Checkout
|
||||
uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
with:
|
||||
@@ -112,14 +110,12 @@ jobs:
|
||||
steps:
|
||||
- name: Setup macOS
|
||||
if: ${{ matrix.runner != 'ubuntu-latest' }}
|
||||
run: brew install tree
|
||||
- name: Remove system executables
|
||||
run: |
|
||||
brew install tree
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- name: Setup Ubuntu
|
||||
if: ${{ matrix.runner == 'ubuntu-latest' }}
|
||||
run: |
|
||||
sudo rm -rf $(which gpg) $(which gpg2) $(which patchelf)
|
||||
while [ -n "$(command -v gpg gpg2 patchelf)" ]; do
|
||||
sudo rm $(command -v gpg gpg2 patchelf)
|
||||
done
|
||||
- name: Checkout
|
||||
uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
with:
|
||||
|
@@ -14,3 +14,26 @@ sphinx:
|
||||
python:
|
||||
install:
|
||||
- requirements: lib/spack/docs/requirements.txt
|
||||
|
||||
search:
|
||||
ranking:
|
||||
spack.html: -10
|
||||
spack.*.html: -10
|
||||
llnl.html: -10
|
||||
llnl.*.html: -10
|
||||
_modules/*: -10
|
||||
command_index.html: -9
|
||||
basic_usage.html: 5
|
||||
configuration.html: 5
|
||||
config_yaml.html: 5
|
||||
packages_yaml.html: 5
|
||||
build_settings.html: 5
|
||||
environments.html: 5
|
||||
containers.html: 5
|
||||
mirrors.html: 5
|
||||
module_file_support.html: 5
|
||||
repositories.html: 5
|
||||
binary_caches.html: 5
|
||||
chain.html: 5
|
||||
pipelines.html: 5
|
||||
packaging_guide.html: 5
|
||||
|
29
CHANGELOG.md
29
CHANGELOG.md
@@ -1,3 +1,32 @@
|
||||
# v0.22.3 (2024-11-18)
|
||||
|
||||
## Bugfixes
|
||||
- Forward compatibility with Python 3.13 (#46775, #46983, #47035, #47175)
|
||||
- `archspec` was updated to v0.2.5 (#46503, #46958)
|
||||
- Fix path to Spack in `spack env depfile` makefile (#46966)
|
||||
- Fix `glibc` detection in Chinese locales (#47434)
|
||||
- Fix pickle round-trip of specs propagating variants (#47351)
|
||||
- Fix a bug where concurrent spack install commands would not always update explicits correctly
|
||||
(#47358)
|
||||
- Fix a bug where autopush would run before all post install hooks modifying the install prefix
|
||||
had run (#47329)
|
||||
- Fix `spack find -u` (#47102)
|
||||
- Fix a bug where sometimes the wrong Python interpreter was used for build dependencies such as
|
||||
`py-setuptools` (#46980)
|
||||
- Fix default config errors found by `spack audit externals` (#47308)
|
||||
- Fix duplicate printing of external roots in installer (#44917)
|
||||
- Fix modules schema in `compilers.yaml` (#47197)
|
||||
- Reduce the size of generated YAML for Gitlab CI (#44995)
|
||||
- Handle missing metadata file gracefully in bootstrap (#47278)
|
||||
- Show underlying errors on fetch failure (#45714)
|
||||
- Recognize `.` and `..` as paths instead of names in buildcache commands (#47105)
|
||||
- Documentation and style (#46991, #47107, #47110, #47111, #47346, #47307, #47309, #47328, #47160,
|
||||
#47402, #47557, #46709, #47080)
|
||||
- Tests and CI fixes (#47165, #46711)
|
||||
|
||||
## Package updates
|
||||
- ffmpeg: fix hash of patch (#45574)
|
||||
|
||||
# v0.22.2 (2024-09-21)
|
||||
|
||||
## Bugfixes
|
||||
|
@@ -42,8 +42,8 @@ concretizer:
|
||||
# "minimal": allows the duplication of 'build-tools' nodes only (e.g. py-setuptools, cmake etc.)
|
||||
# "full" (experimental): allows separation of the entire build-tool stack (e.g. the entire "cmake" subDAG)
|
||||
strategy: minimal
|
||||
# Option to specify compatiblity between operating systems for reuse of compilers and packages
|
||||
# Specified as a key: [list] where the key is the os that is being targeted, and the list contains the OS's
|
||||
# it can reuse. Note this is a directional compatibility so mutual compatibility between two OS's
|
||||
# Option to specify compatibility between operating systems for reuse of compilers and packages
|
||||
# Specified as a key: [list] where the key is the os that is being targeted, and the list contains the OS's
|
||||
# it can reuse. Note this is a directional compatibility so mutual compatibility between two OS's
|
||||
# requires two entries i.e. os_compatible: {sonoma: [monterey], monterey: [sonoma]}
|
||||
os_compatible: {}
|
||||
|
@@ -37,9 +37,9 @@ packages:
|
||||
jpeg: [libjpeg-turbo, libjpeg]
|
||||
lapack: [openblas, amdlibflame]
|
||||
libc: [glibc, musl]
|
||||
libgfortran: [ gcc-runtime ]
|
||||
libgfortran: [gcc-runtime]
|
||||
libglx: [mesa+glx]
|
||||
libifcore: [ intel-oneapi-runtime ]
|
||||
libifcore: [intel-oneapi-runtime]
|
||||
libllvm: [llvm]
|
||||
lua-lang: [lua, lua-luajit-openresty, lua-luajit]
|
||||
luajit: [lua-luajit-openresty, lua-luajit]
|
||||
|
@@ -184,7 +184,7 @@ Style Tests
|
||||
|
||||
Spack uses `Flake8 <http://flake8.pycqa.org/en/latest/>`_ to test for
|
||||
`PEP 8 <https://www.python.org/dev/peps/pep-0008/>`_ conformance and
|
||||
`mypy <https://mypy.readthedocs.io/en/stable/>` for type checking. PEP 8 is
|
||||
`mypy <https://mypy.readthedocs.io/en/stable/>`_ for type checking. PEP 8 is
|
||||
a series of style guides for Python that provide suggestions for everything
|
||||
from variable naming to indentation. In order to limit the number of PRs that
|
||||
were mostly style changes, we decided to enforce PEP 8 conformance. Your PR
|
||||
|
@@ -716,27 +716,27 @@ Release branches
|
||||
^^^^^^^^^^^^^^^^
|
||||
|
||||
There are currently two types of Spack releases: :ref:`major releases
|
||||
<major-releases>` (``0.17.0``, ``0.18.0``, etc.) and :ref:`point releases
|
||||
<point-releases>` (``0.17.1``, ``0.17.2``, ``0.17.3``, etc.). Here is a
|
||||
<major-releases>` (``0.21.0``, ``0.22.0``, etc.) and :ref:`patch releases
|
||||
<patch-releases>` (``0.22.1``, ``0.22.2``, ``0.22.3``, etc.). Here is a
|
||||
diagram of how Spack release branches work::
|
||||
|
||||
o branch: develop (latest version, v0.19.0.dev0)
|
||||
o branch: develop (latest version, v0.23.0.dev0)
|
||||
|
|
||||
o
|
||||
| o branch: releases/v0.18, tag: v0.18.1
|
||||
| o branch: releases/v0.22, tag: v0.22.1
|
||||
o |
|
||||
| o tag: v0.18.0
|
||||
| o tag: v0.22.0
|
||||
o |
|
||||
| o
|
||||
|/
|
||||
o
|
||||
|
|
||||
o
|
||||
| o branch: releases/v0.17, tag: v0.17.2
|
||||
| o branch: releases/v0.21, tag: v0.21.2
|
||||
o |
|
||||
| o tag: v0.17.1
|
||||
| o tag: v0.21.1
|
||||
o |
|
||||
| o tag: v0.17.0
|
||||
| o tag: v0.21.0
|
||||
o |
|
||||
| o
|
||||
|/
|
||||
@@ -747,8 +747,8 @@ requests target ``develop``. The ``develop`` branch will report that its
|
||||
version is that of the next **major** release with a ``.dev0`` suffix.
|
||||
|
||||
Each Spack release series also has a corresponding branch, e.g.
|
||||
``releases/v0.18`` has ``0.18.x`` versions of Spack, and
|
||||
``releases/v0.17`` has ``0.17.x`` versions. A major release is the first
|
||||
``releases/v0.22`` has ``v0.22.x`` versions of Spack, and
|
||||
``releases/v0.21`` has ``v0.21.x`` versions. A major release is the first
|
||||
tagged version on a release branch. Minor releases are back-ported from
|
||||
develop onto release branches. This is typically done by cherry-picking
|
||||
bugfix commits off of ``develop``.
|
||||
@@ -778,27 +778,40 @@ for more details.
|
||||
Scheduling work for releases
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
We schedule work for releases by creating `GitHub projects
|
||||
<https://github.com/spack/spack/projects>`_. At any time, there may be
|
||||
several open release projects. For example, below are two releases (from
|
||||
some past version of the page linked above):
|
||||
We schedule work for **major releases** through `milestones
|
||||
<https://github.com/spack/spack/milestones>`_ and `GitHub Projects
|
||||
<https://github.com/spack/spack/projects>`_, while **patch releases** use `labels
|
||||
<https://github.com/spack/spack/labels>`_.
|
||||
|
||||
.. image:: images/projects.png
|
||||
There is only one milestone open at a time. Its name corresponds to the next major version, for
|
||||
example ``v0.23``. Important issues and pull requests should be assigned to this milestone by
|
||||
core developers, so that they are not forgotten at the time of release. The milestone is closed
|
||||
when the release is made, and a new milestone is created for the next major release.
|
||||
|
||||
This image shows one release in progress for ``0.15.1`` and another for
|
||||
``0.16.0``. Each of these releases has a project board containing issues
|
||||
and pull requests. GitHub shows a status bar with completed work in
|
||||
green, work in progress in purple, and work not started yet in gray, so
|
||||
it's fairly easy to see progress.
|
||||
Bug reports in GitHub issues are automatically labelled ``bug`` and ``triage``. Spack developers
|
||||
assign one of the labels ``impact-low``, ``impact-medium`` or ``impact-high``. This will make the
|
||||
issue appear in the `Triaged bugs <https://github.com/orgs/spack/projects/6>`_ project board.
|
||||
Important issues should be assigned to the next milestone as well, so they appear at the top of
|
||||
the project board.
|
||||
|
||||
Spack's project boards are not firm commitments so we move work between
|
||||
releases frequently. If we need to make a release and some tasks are not
|
||||
yet done, we will simply move them to the next minor or major release, rather
|
||||
than delaying the release to complete them.
|
||||
Spack's milestones are not firm commitments so we move work between releases frequently. If we
|
||||
need to make a release and some tasks are not yet done, we will simply move them to the next major
|
||||
release milestone, rather than delaying the release to complete them.
|
||||
|
||||
For more on using GitHub project boards, see `GitHub's documentation
|
||||
<https://docs.github.com/en/github/managing-your-work-on-github/about-project-boards>`_.
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
Backporting bug fixes
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
When a bug is fixed in the ``develop`` branch, it is often necessary to backport the fix to one
|
||||
(or more) of the ``release/vX.Y`` branches. Only the release manager is responsible for doing
|
||||
backports, but Spack maintainers are responsible for labelling pull requests (and issues if no bug
|
||||
fix is available yet) with ``vX.Y.Z`` labels. The label should correspond to the next patch version
|
||||
that the bug fix should be backported to.
|
||||
|
||||
Backports are done publicly by the release manager using a pull request named ``Backports vX.Y.Z``.
|
||||
This pull request is opened from the ``backports/vX.Y.Z`` branch, targets the ``releases/vX.Y``
|
||||
branch and contains a (growing) list of cherry-picked commits from the ``develop`` branch.
|
||||
Typically there are one or two backport pull requests open at any given time.
|
||||
|
||||
.. _major-releases:
|
||||
|
||||
@@ -806,25 +819,21 @@ For more on using GitHub project boards, see `GitHub's documentation
|
||||
Making major releases
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Assuming a project board has already been created and all required work
|
||||
completed, the steps to make the major release are:
|
||||
Assuming all required work from the milestone is completed, the steps to make the major release
|
||||
are:
|
||||
|
||||
#. Create two new project boards:
|
||||
#. `Create a new milestone <https://github.com/spack/spack/milestones>`_ for the next major
|
||||
release.
|
||||
|
||||
* One for the next major release
|
||||
* One for the next point release
|
||||
#. `Create a new label <https://github.com/spack/spack/labels>`_ for the next patch release.
|
||||
|
||||
#. Move any optional tasks that are not done to one of the new project boards.
|
||||
|
||||
In general, small bugfixes should go to the next point release. Major
|
||||
features, refactors, and changes that could affect concretization should
|
||||
go in the next major release.
|
||||
#. Move any optional tasks that are not done to the next milestone.
|
||||
|
||||
#. Create a branch for the release, based on ``develop``:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ git checkout -b releases/v0.15 develop
|
||||
$ git checkout -b releases/v0.23 develop
|
||||
|
||||
For a version ``vX.Y.Z``, the branch's name should be
|
||||
``releases/vX.Y``. That is, you should create a ``releases/vX.Y``
|
||||
@@ -860,8 +869,8 @@ completed, the steps to make the major release are:
|
||||
|
||||
Create a pull request targeting the ``develop`` branch, bumping the major
|
||||
version in ``lib/spack/spack/__init__.py`` with a ``dev0`` release segment.
|
||||
For instance when you have just released ``v0.15.0``, set the version
|
||||
to ``(0, 16, 0, 'dev0')`` on ``develop``.
|
||||
For instance when you have just released ``v0.23.0``, set the version
|
||||
to ``(0, 24, 0, 'dev0')`` on ``develop``.
|
||||
|
||||
#. Follow the steps in :ref:`publishing-releases`.
|
||||
|
||||
@@ -870,82 +879,52 @@ completed, the steps to make the major release are:
|
||||
#. Follow the steps in :ref:`announcing-releases`.
|
||||
|
||||
|
||||
.. _point-releases:
|
||||
.. _patch-releases:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
Making point releases
|
||||
Making patch releases
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Assuming a project board has already been created and all required work
|
||||
completed, the steps to make the point release are:
|
||||
To make the patch release process both efficient and transparent, we use a *backports pull request*
|
||||
which contains cherry-picked commits from the ``develop`` branch. The majority of the work is to
|
||||
cherry-pick the bug fixes, which ideally should be done as soon as they land on ``develop``:
|
||||
this ensures cherry-picking happens in order, and makes conflicts easier to resolve since the
|
||||
changes are fresh in the mind of the developer.
|
||||
|
||||
#. Create a new project board for the next point release.
|
||||
The backports pull request is always titled ``Backports vX.Y.Z`` and is labelled ``backports``. It
|
||||
is opened from a branch named ``backports/vX.Y.Z`` and targets the ``releases/vX.Y`` branch.
|
||||
|
||||
#. Move any optional tasks that are not done to the next project board.
|
||||
Whenever a pull request labelled ``vX.Y.Z`` is merged, cherry-pick the associated squashed commit
|
||||
on ``develop`` to the ``backports/vX.Y.Z`` branch. For pull requests that were rebased (or not
|
||||
squashed), cherry-pick each associated commit individually. Never force push to the
|
||||
``backports/vX.Y.Z`` branch.
|
||||
|
||||
#. Check out the release branch (it should already exist).
|
||||
.. warning::
|
||||
|
||||
For the ``X.Y.Z`` release, the release branch is called ``releases/vX.Y``.
|
||||
For ``v0.15.1``, you would check out ``releases/v0.15``:
|
||||
Sometimes you may **still** get merge conflicts even if you have
|
||||
cherry-picked all the commits in order. This generally means there
|
||||
is some other intervening pull request that the one you're trying
|
||||
to pick depends on. In these cases, you'll need to make a judgment
|
||||
call regarding those pull requests. Consider the number of affected
|
||||
files and/or the resulting differences.
|
||||
|
||||
.. code-block:: console
|
||||
1. If the changes are small, you might just cherry-pick it.
|
||||
|
||||
$ git checkout releases/v0.15
|
||||
2. If the changes are large, then you may decide that this fix is not
|
||||
worth including in a patch release, in which case you should remove
|
||||
the label from the pull request. Remember that large, manual backports
|
||||
are seldom the right choice for a patch release.
|
||||
|
||||
#. If a pull request to the release branch named ``Backports vX.Y.Z`` is not already
|
||||
in the project, create it. This pull request ought to be created as early as
|
||||
possible when working on a release project, so that we can build the release
|
||||
commits incrementally, and identify potential conflicts at an early stage.
|
||||
When all commits are cherry-picked in the ``backports/vX.Y.Z`` branch, make the patch
|
||||
release as follows:
|
||||
|
||||
#. Cherry-pick each pull request in the ``Done`` column of the release
|
||||
project board onto the ``Backports vX.Y.Z`` pull request.
|
||||
#. `Create a new label <https://github.com/spack/spack/labels>`_ ``vX.Y.{Z+1}`` for the next patch
|
||||
release.
|
||||
|
||||
This is **usually** fairly simple since we squash the commits from the
|
||||
vast majority of pull requests. That means there is only one commit
|
||||
per pull request to cherry-pick. For example, `this pull request
|
||||
<https://github.com/spack/spack/pull/15777>`_ has three commits, but
|
||||
they were squashed into a single commit on merge. You can see the
|
||||
commit that was created here:
|
||||
#. Replace the label ``vX.Y.Z`` with ``vX.Y.{Z+1}`` for all PRs and issues that are not done.
|
||||
|
||||
.. image:: images/pr-commit.png
|
||||
|
||||
You can easily cherry pick it like this (assuming you already have the
|
||||
release branch checked out):
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ git cherry-pick 7e46da7
|
||||
|
||||
For pull requests that were rebased (or not squashed), you'll need to
|
||||
cherry-pick each associated commit individually.
|
||||
|
||||
.. warning::
|
||||
|
||||
It is important to cherry-pick commits in the order they happened,
|
||||
otherwise you can get conflicts while cherry-picking. When
|
||||
cherry-picking look at the merge date,
|
||||
**not** the number of the pull request or the date it was opened.
|
||||
|
||||
Sometimes you may **still** get merge conflicts even if you have
|
||||
cherry-picked all the commits in order. This generally means there
|
||||
is some other intervening pull request that the one you're trying
|
||||
to pick depends on. In these cases, you'll need to make a judgment
|
||||
call regarding those pull requests. Consider the number of affected
|
||||
files and or the resulting differences.
|
||||
|
||||
1. If the dependency changes are small, you might just cherry-pick it,
|
||||
too. If you do this, add the task to the release board.
|
||||
|
||||
2. If the changes are large, then you may decide that this fix is not
|
||||
worth including in a point release, in which case you should remove
|
||||
the task from the release project.
|
||||
|
||||
3. You can always decide to manually back-port the fix to the release
|
||||
branch if neither of the above options makes sense, but this can
|
||||
require a lot of work. It's seldom the right choice.
|
||||
|
||||
#. When all the commits from the project board are cherry-picked into
|
||||
the ``Backports vX.Y.Z`` pull request, you can push a commit to:
|
||||
#. Manually push a single commit with commit message ``Set version to vX.Y.Z`` to the
|
||||
``backports/vX.Y.Z`` branch, that both bumps the Spack version number and updates the changelog:
|
||||
|
||||
1. Bump the version in ``lib/spack/spack/__init__.py``.
|
||||
2. Update ``CHANGELOG.md`` with a list of the changes.
|
||||
@@ -954,20 +933,22 @@ completed, the steps to make the point release are:
|
||||
release branch. See `the changelog from 0.14.1
|
||||
<https://github.com/spack/spack/commit/ff0abb9838121522321df2a054d18e54b566b44a>`_.
|
||||
|
||||
#. Merge the ``Backports vX.Y.Z`` PR with the **Rebase and merge** strategy. This
|
||||
is needed to keep track in the release branch of all the commits that were
|
||||
cherry-picked.
|
||||
|
||||
#. Make sure CI passes on the release branch, including:
|
||||
#. Make sure CI passes on the **backports pull request**, including:
|
||||
|
||||
* Regular unit tests
|
||||
* Build tests
|
||||
* The E4S pipeline at `gitlab.spack.io <https://gitlab.spack.io>`_
|
||||
|
||||
If CI does not pass, you'll need to figure out why, and make changes
|
||||
to the release branch until it does. You can make more commits, modify
|
||||
or remove cherry-picked commits, or cherry-pick **more** from
|
||||
``develop`` to make this happen.
|
||||
#. Merge the ``Backports vX.Y.Z`` PR with the **Rebase and merge** strategy. This
|
||||
is needed to keep track in the release branch of all the commits that were
|
||||
cherry-picked.
|
||||
|
||||
#. Make sure CI passes on the last commit of the **release branch**.
|
||||
|
||||
#. In the rare case you need to include additional commits in the patch release after the backports
|
||||
PR is merged, it is best to delete the last commit ``Set version to vX.Y.Z`` from the release
|
||||
branch with a single force push, open a new backports PR named ``Backports vX.Y.Z (2)``, and
|
||||
repeat the process. Avoid repeated force pushes to the release branch.
|
||||
|
||||
#. Follow the steps in :ref:`publishing-releases`.
|
||||
|
||||
@@ -1042,25 +1023,31 @@ Updating `releases/latest`
|
||||
|
||||
If the new release is the **highest** Spack release yet, you should
|
||||
also tag it as ``releases/latest``. For example, suppose the highest
|
||||
release is currently ``0.15.3``:
|
||||
release is currently ``0.22.3``:
|
||||
|
||||
* If you are releasing ``0.15.4`` or ``0.16.0``, then you should tag
|
||||
it with ``releases/latest``, as these are higher than ``0.15.3``.
|
||||
* If you are releasing ``0.22.4`` or ``0.23.0``, then you should tag
|
||||
it with ``releases/latest``, as these are higher than ``0.22.3``.
|
||||
|
||||
* If you are making a new release of an **older** major version of
|
||||
Spack, e.g. ``0.14.4``, then you should not tag it as
|
||||
Spack, e.g. ``0.21.4``, then you should not tag it as
|
||||
``releases/latest`` (as there are newer major versions).
|
||||
|
||||
To tag ``releases/latest``, do this:
|
||||
To do so, first fetch the latest tag created on GitHub, since you may not have it locally:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ git checkout releases/vX.Y # vX.Y is the new release's branch
|
||||
$ git tag --force releases/latest
|
||||
$ git push --force --tags
|
||||
$ git fetch --force git@github.com:spack/spack vX.Y.Z
|
||||
|
||||
The ``--force`` argument to ``git tag`` makes ``git`` overwrite the existing
|
||||
``releases/latest`` tag with the new one.
|
||||
Then tag ``vX.Y.Z`` as ``releases/latest`` and push the individual tag to GitHub.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ git tag --force releases/latest vX.Y.Z
|
||||
$ git push --force git@github.com:spack/spack releases/latest
|
||||
|
||||
The ``--force`` argument to ``git tag`` makes ``git`` overwrite the existing ``releases/latest``
|
||||
tag with the new one. Do **not** use the ``--tags`` flag when pushing, since this will push *all*
|
||||
local tags.
|
||||
|
||||
|
||||
.. _announcing-releases:
|
||||
|
@@ -35,7 +35,7 @@ A build matrix showing which packages are working on which systems is shown belo
|
||||
.. code-block:: console
|
||||
|
||||
apt update
|
||||
apt install build-essential ca-certificates coreutils curl environment-modules gfortran git gpg lsb-release python3 python3-distutils python3-venv unzip zip
|
||||
apt install bzip2 ca-certificates file g++ gcc gfortran git gzip lsb-release patch python3 tar unzip xz-utils zstd
|
||||
|
||||
.. tab-item:: RHEL
|
||||
|
||||
@@ -43,14 +43,14 @@ A build matrix showing which packages are working on which systems is shown belo
|
||||
|
||||
dnf install epel-release
|
||||
dnf group install "Development Tools"
|
||||
dnf install curl findutils gcc-gfortran gnupg2 hostname iproute redhat-lsb-core python3 python3-pip python3-setuptools unzip python3-boto3
|
||||
dnf install gcc-gfortran redhat-lsb-core python3 unzip
|
||||
|
||||
.. tab-item:: macOS Brew
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
brew update
|
||||
brew install curl gcc git gnupg zip
|
||||
brew install gcc git zip
|
||||
|
||||
------------
|
||||
Installation
|
||||
|
Binary file not shown.
Before Width: | Height: | Size: 44 KiB |
Binary file not shown.
Before Width: | Height: | Size: 68 KiB |
@@ -12,10 +12,6 @@
|
||||
Spack
|
||||
===================
|
||||
|
||||
.. epigraph::
|
||||
|
||||
`These are docs for the Spack package manager. For sphere packing, see` `pyspack <https://pyspack.readthedocs.io>`_.
|
||||
|
||||
Spack is a package management tool designed to support multiple
|
||||
versions and configurations of software on a wide variety of platforms
|
||||
and environments. It was designed for large supercomputing centers,
|
||||
|
@@ -2442,15 +2442,14 @@ with. For example, suppose that in the ``libdwarf`` package you write:
|
||||
|
||||
depends_on("libelf@0.8")
|
||||
|
||||
Now ``libdwarf`` will require ``libelf`` at *exactly* version ``0.8``.
|
||||
You can also specify a requirement for a particular variant or for
|
||||
specific compiler flags:
|
||||
Now ``libdwarf`` will require ``libelf`` in the range ``0.8``, which
|
||||
includes patch versions ``0.8.1``, ``0.8.2``, etc. Apart from version
|
||||
restrictions, you can also specify variants if this package requires
|
||||
optional features of the dependency.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("libelf@0.8+debug")
|
||||
depends_on("libelf debug=True")
|
||||
depends_on("libelf cppflags='-fPIC'")
|
||||
depends_on("libelf@0.8 +parser +pic")
|
||||
|
||||
Both users *and* package authors can use the same spec syntax to refer
|
||||
to different package configurations. Users use the spec syntax on the
|
||||
@@ -2458,46 +2457,82 @@ command line to find installed packages or to install packages with
|
||||
particular constraints, and package authors can use specs to describe
|
||||
relationships between packages.
|
||||
|
||||
^^^^^^^^^^^^^^
|
||||
Version ranges
|
||||
^^^^^^^^^^^^^^
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Specifying backward and forward compatibility
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Although some packages require a specific version for their dependencies,
|
||||
most can be built with a range of versions. For example, if you are
|
||||
writing a package for a legacy Python module that only works with Python
|
||||
2.4 through 2.6, this would look like:
|
||||
Packages are often compatible with a range of versions of their
|
||||
dependencies. This is typically referred to as backward and forward
|
||||
compatibility. Spack allows you to specify this in the ``depends_on``
|
||||
directive using version ranges.
|
||||
|
||||
**Backwards compatibility** means that the package requires at least a
|
||||
certain version of its dependency:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("python@2.4:2.6")
|
||||
depends_on("python@3.10:")
|
||||
|
||||
Version ranges in Spack are *inclusive*, so ``2.4:2.6`` means any version
|
||||
greater than or equal to ``2.4`` and up to and including any ``2.6.x``. If
|
||||
you want to specify that a package works with any version of Python 3 (or
|
||||
higher), this would look like:
|
||||
In this case, the package requires Python 3.10 or newer.
|
||||
|
||||
Commonly, packages drop support for older versions of a dependency as
|
||||
they release new versions. In Spack you can conveniently add every
|
||||
backward compatibility rule as a separate line:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("python@3:")
|
||||
# backward compatibility with Python
|
||||
depends_on("python@3.8:")
|
||||
depends_on("python@3.9:", when="@1.2:")
|
||||
depends_on("python@3.10:", when="@1.4:")
|
||||
|
||||
Here we leave out the upper bound. If you want to say that a package
|
||||
requires Python 2, you can similarly leave out the lower bound:
|
||||
This means that in general we need Python 3.8 or newer; from version
|
||||
1.2 onwards we need Python 3.9 or newer; from version 1.4 onwards we
|
||||
need Python 3.10 or newer. Notice that it's fine to have overlapping
|
||||
ranges in the ``when`` clauses.
|
||||
|
||||
**Forward compatibility** means that the package requires at most a
|
||||
certain version of its dependency. Forward compatibility rules are
|
||||
necessary when there are breaking changes in the dependency that the
|
||||
package cannot handle. In Spack we often add forward compatibility
|
||||
bounds only at the time a new, breaking version of a dependency is
|
||||
released. As with backward compatibility, it is typical to see a list
|
||||
of forward compatibility bounds in a package file as seperate lines:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("python@:2")
|
||||
# forward compatibility with Python
|
||||
depends_on("python@:3.12", when="@:1.10")
|
||||
depends_on("python@:3.13", when="@:1.12")
|
||||
|
||||
Notice that we didn't use ``@:3``. Version ranges are *inclusive*, so
|
||||
``@:3`` means "up to and including any 3.x version".
|
||||
Notice how the ``:`` now appears before the version number both in the
|
||||
dependency and in the ``when`` clause. This tells Spack that in general
|
||||
we need Python 3.13 or older up to version ``1.12.x``, and up to version
|
||||
``1.10.x`` we need Python 3.12 or older. Said differently, forward compatibility
|
||||
with Python 3.13 was added in version 1.11, while version 1.13 added forward
|
||||
compatibility with Python 3.14.
|
||||
|
||||
You can also simply write
|
||||
Notice that a version range ``@:3.12`` includes *any* patch version
|
||||
number ``3.12.x``, which is often useful when specifying forward compatibility
|
||||
bounds.
|
||||
|
||||
So far we have seen open-ended version ranges, which is by far the most
|
||||
common use case. It is also possible to specify both a lower and an upper bound
|
||||
on the version of a dependency, like this:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("python@2.7")
|
||||
depends_on("python@3.10:3.12")
|
||||
|
||||
to tell Spack that the package needs Python 2.7.x. This is equivalent to
|
||||
``@2.7:2.7``.
|
||||
There is short syntax to specify that a package is compatible with say any
|
||||
``3.x`` version:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("python@3")
|
||||
|
||||
The above is equivalent to ``depends_on("python@3:3")``, which means at least
|
||||
Python version 3 and at most any version ``3.x.y``.
|
||||
|
||||
In very rare cases, you may need to specify an exact version, for example
|
||||
if you need to distinguish between ``3.2`` and ``3.2.1``:
|
||||
|
2
lib/spack/external/__init__.py
vendored
2
lib/spack/external/__init__.py
vendored
@@ -18,7 +18,7 @@
|
||||
|
||||
* Homepage: https://pypi.python.org/pypi/archspec
|
||||
* Usage: Labeling, comparison and detection of microarchitectures
|
||||
* Version: 0.2.5-dev (commit cbb1fd5eb397a70d466e5160b393b87b0dbcc78f)
|
||||
* Version: 0.2.5 (commit 38ce485258ffc4fc6dd6688f8dc90cb269478c47)
|
||||
|
||||
astunparse
|
||||
----------------
|
||||
|
@@ -81,8 +81,13 @@ def __init__(self, name, parents, vendor, features, compilers, generation=0, cpu
|
||||
self.generation = generation
|
||||
# Only relevant for AArch64
|
||||
self.cpu_part = cpu_part
|
||||
# Cache the ancestor computation
|
||||
|
||||
# Cache the "ancestor" computation
|
||||
self._ancestors = None
|
||||
# Cache the "generic" computation
|
||||
self._generic = None
|
||||
# Cache the "family" computation
|
||||
self._family = None
|
||||
|
||||
@property
|
||||
def ancestors(self):
|
||||
@@ -115,6 +120,9 @@ def __eq__(self, other):
|
||||
and self.cpu_part == other.cpu_part
|
||||
)
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self.name)
|
||||
|
||||
@coerce_target_names
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
@@ -171,18 +179,22 @@ def __contains__(self, feature):
|
||||
@property
|
||||
def family(self):
|
||||
"""Returns the architecture family a given target belongs to"""
|
||||
roots = [x for x in [self] + self.ancestors if not x.ancestors]
|
||||
msg = "a target is expected to belong to just one architecture family"
|
||||
msg += f"[found {', '.join(str(x) for x in roots)}]"
|
||||
assert len(roots) == 1, msg
|
||||
if self._family is None:
|
||||
roots = [x for x in [self] + self.ancestors if not x.ancestors]
|
||||
msg = "a target is expected to belong to just one architecture family"
|
||||
msg += f"[found {', '.join(str(x) for x in roots)}]"
|
||||
assert len(roots) == 1, msg
|
||||
self._family = roots.pop()
|
||||
|
||||
return roots.pop()
|
||||
return self._family
|
||||
|
||||
@property
|
||||
def generic(self):
|
||||
"""Returns the best generic architecture that is compatible with self"""
|
||||
generics = [x for x in [self] + self.ancestors if x.vendor == "generic"]
|
||||
return max(generics, key=lambda x: len(x.ancestors))
|
||||
if self._generic is None:
|
||||
generics = [x for x in [self] + self.ancestors if x.vendor == "generic"]
|
||||
self._generic = max(generics, key=lambda x: len(x.ancestors))
|
||||
return self._generic
|
||||
|
||||
def to_dict(self):
|
||||
"""Returns a dictionary representation of this object."""
|
||||
|
@@ -1482,7 +1482,6 @@
|
||||
"cldemote",
|
||||
"movdir64b",
|
||||
"movdiri",
|
||||
"pdcm",
|
||||
"serialize",
|
||||
"waitpkg"
|
||||
],
|
||||
@@ -2237,6 +2236,84 @@
|
||||
]
|
||||
}
|
||||
},
|
||||
"zen5": {
|
||||
"from": ["zen4"],
|
||||
"vendor": "AuthenticAMD",
|
||||
"features": [
|
||||
"abm",
|
||||
"aes",
|
||||
"avx",
|
||||
"avx2",
|
||||
"avx512_bf16",
|
||||
"avx512_bitalg",
|
||||
"avx512bw",
|
||||
"avx512cd",
|
||||
"avx512dq",
|
||||
"avx512f",
|
||||
"avx512ifma",
|
||||
"avx512vbmi",
|
||||
"avx512_vbmi2",
|
||||
"avx512vl",
|
||||
"avx512_vnni",
|
||||
"avx512_vp2intersect",
|
||||
"avx512_vpopcntdq",
|
||||
"avx_vnni",
|
||||
"bmi1",
|
||||
"bmi2",
|
||||
"clflushopt",
|
||||
"clwb",
|
||||
"clzero",
|
||||
"cppc",
|
||||
"cx16",
|
||||
"f16c",
|
||||
"flush_l1d",
|
||||
"fma",
|
||||
"fsgsbase",
|
||||
"gfni",
|
||||
"ibrs_enhanced",
|
||||
"mmx",
|
||||
"movbe",
|
||||
"movdir64b",
|
||||
"movdiri",
|
||||
"pclmulqdq",
|
||||
"popcnt",
|
||||
"rdseed",
|
||||
"sse",
|
||||
"sse2",
|
||||
"sse4_1",
|
||||
"sse4_2",
|
||||
"sse4a",
|
||||
"ssse3",
|
||||
"tsc_adjust",
|
||||
"vaes",
|
||||
"vpclmulqdq",
|
||||
"xsavec",
|
||||
"xsaveopt"
|
||||
],
|
||||
"compilers": {
|
||||
"gcc": [
|
||||
{
|
||||
"versions": "14.1:",
|
||||
"name": "znver5",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"aocc": [
|
||||
{
|
||||
"versions": "5.0:",
|
||||
"name": "znver5",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"clang": [
|
||||
{
|
||||
"versions": "19.1:",
|
||||
"name": "znver5",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"ppc64": {
|
||||
"from": [],
|
||||
"vendor": "generic",
|
||||
|
@@ -18,9 +18,10 @@
|
||||
import threading
|
||||
import traceback
|
||||
from contextlib import contextmanager
|
||||
from multiprocessing.connection import Connection
|
||||
from threading import Thread
|
||||
from types import ModuleType
|
||||
from typing import Optional
|
||||
from typing import Callable, Optional
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
@@ -329,49 +330,6 @@ def close(self):
|
||||
self.file.close()
|
||||
|
||||
|
||||
class MultiProcessFd:
|
||||
"""Return an object which stores a file descriptor and can be passed as an
|
||||
argument to a function run with ``multiprocessing.Process``, such that
|
||||
the file descriptor is available in the subprocess."""
|
||||
|
||||
def __init__(self, fd):
|
||||
self._connection = None
|
||||
self._fd = None
|
||||
if sys.version_info >= (3, 8):
|
||||
self._connection = multiprocessing.connection.Connection(fd)
|
||||
else:
|
||||
self._fd = fd
|
||||
|
||||
@property
|
||||
def fd(self):
|
||||
if self._connection:
|
||||
return self._connection._handle
|
||||
else:
|
||||
return self._fd
|
||||
|
||||
def close(self):
|
||||
if self._connection:
|
||||
self._connection.close()
|
||||
else:
|
||||
os.close(self._fd)
|
||||
|
||||
|
||||
def close_connection_and_file(multiprocess_fd, file):
|
||||
# MultiprocessFd is intended to transmit a FD
|
||||
# to a child process, this FD is then opened to a Python File object
|
||||
# (using fdopen). In >= 3.8, MultiprocessFd encapsulates a
|
||||
# multiprocessing.connection.Connection; Connection closes the FD
|
||||
# when it is deleted, and prints a warning about duplicate closure if
|
||||
# it is not explicitly closed. In < 3.8, MultiprocessFd encapsulates a
|
||||
# simple FD; closing the FD here appears to conflict with
|
||||
# closure of the File object (in < 3.8 that is). Therefore this needs
|
||||
# to choose whether to close the File or the Connection.
|
||||
if sys.version_info >= (3, 8):
|
||||
multiprocess_fd.close()
|
||||
else:
|
||||
file.close()
|
||||
|
||||
|
||||
@contextmanager
|
||||
def replace_environment(env):
|
||||
"""Replace the current environment (`os.environ`) with `env`.
|
||||
@@ -529,22 +487,20 @@ def __enter__(self):
|
||||
# forcing debug output.
|
||||
self._saved_debug = tty._debug
|
||||
|
||||
# OS-level pipe for redirecting output to logger
|
||||
read_fd, write_fd = os.pipe()
|
||||
# Pipe for redirecting output to logger
|
||||
read_fd, self.write_fd = multiprocessing.Pipe(duplex=False)
|
||||
|
||||
read_multiprocess_fd = MultiProcessFd(read_fd)
|
||||
|
||||
# Multiprocessing pipe for communication back from the daemon
|
||||
# Pipe for communication back from the daemon
|
||||
# Currently only used to save echo value between uses
|
||||
self.parent_pipe, child_pipe = multiprocessing.Pipe()
|
||||
self.parent_pipe, child_pipe = multiprocessing.Pipe(duplex=False)
|
||||
|
||||
# Sets a daemon that writes to file what it reads from a pipe
|
||||
try:
|
||||
# need to pass this b/c multiprocessing closes stdin in child.
|
||||
input_multiprocess_fd = None
|
||||
input_fd = None
|
||||
try:
|
||||
if sys.stdin.isatty():
|
||||
input_multiprocess_fd = MultiProcessFd(os.dup(sys.stdin.fileno()))
|
||||
input_fd = Connection(os.dup(sys.stdin.fileno()))
|
||||
except BaseException:
|
||||
# just don't forward input if this fails
|
||||
pass
|
||||
@@ -553,9 +509,9 @@ def __enter__(self):
|
||||
self.process = multiprocessing.Process(
|
||||
target=_writer_daemon,
|
||||
args=(
|
||||
input_multiprocess_fd,
|
||||
read_multiprocess_fd,
|
||||
write_fd,
|
||||
input_fd,
|
||||
read_fd,
|
||||
self.write_fd,
|
||||
self.echo,
|
||||
self.log_file,
|
||||
child_pipe,
|
||||
@@ -566,9 +522,9 @@ def __enter__(self):
|
||||
self.process.start()
|
||||
|
||||
finally:
|
||||
if input_multiprocess_fd:
|
||||
input_multiprocess_fd.close()
|
||||
read_multiprocess_fd.close()
|
||||
if input_fd:
|
||||
input_fd.close()
|
||||
read_fd.close()
|
||||
|
||||
# Flush immediately before redirecting so that anything buffered
|
||||
# goes to the original stream
|
||||
@@ -586,9 +542,9 @@ def __enter__(self):
|
||||
self._saved_stderr = os.dup(sys.stderr.fileno())
|
||||
|
||||
# redirect to the pipe we created above
|
||||
os.dup2(write_fd, sys.stdout.fileno())
|
||||
os.dup2(write_fd, sys.stderr.fileno())
|
||||
os.close(write_fd)
|
||||
os.dup2(self.write_fd.fileno(), sys.stdout.fileno())
|
||||
os.dup2(self.write_fd.fileno(), sys.stderr.fileno())
|
||||
self.write_fd.close()
|
||||
|
||||
else:
|
||||
# Handle I/O the Python way. This won't redirect lower-level
|
||||
@@ -601,7 +557,7 @@ def __enter__(self):
|
||||
self._saved_stderr = sys.stderr
|
||||
|
||||
# create a file object for the pipe; redirect to it.
|
||||
pipe_fd_out = os.fdopen(write_fd, "w")
|
||||
pipe_fd_out = os.fdopen(self.write_fd.fileno(), "w", closefd=False)
|
||||
sys.stdout = pipe_fd_out
|
||||
sys.stderr = pipe_fd_out
|
||||
|
||||
@@ -637,6 +593,7 @@ def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
else:
|
||||
sys.stdout = self._saved_stdout
|
||||
sys.stderr = self._saved_stderr
|
||||
self.write_fd.close()
|
||||
|
||||
# print log contents in parent if needed.
|
||||
if self.log_file.write_in_parent:
|
||||
@@ -850,14 +807,14 @@ def force_echo(self):
|
||||
|
||||
|
||||
def _writer_daemon(
|
||||
stdin_multiprocess_fd,
|
||||
read_multiprocess_fd,
|
||||
write_fd,
|
||||
echo,
|
||||
log_file_wrapper,
|
||||
control_pipe,
|
||||
filter_fn,
|
||||
):
|
||||
stdin_fd: Optional[Connection],
|
||||
read_fd: Connection,
|
||||
write_fd: Connection,
|
||||
echo: bool,
|
||||
log_file_wrapper: FileWrapper,
|
||||
control_fd: Connection,
|
||||
filter_fn: Optional[Callable[[str], str]],
|
||||
) -> None:
|
||||
"""Daemon used by ``log_output`` to write to a log file and to ``stdout``.
|
||||
|
||||
The daemon receives output from the parent process and writes it both
|
||||
@@ -894,43 +851,37 @@ def _writer_daemon(
|
||||
``StringIO`` in the parent. This is mainly for testing.
|
||||
|
||||
Arguments:
|
||||
stdin_multiprocess_fd (int): input from the terminal
|
||||
read_multiprocess_fd (int): pipe for reading from parent's redirected
|
||||
stdout
|
||||
echo (bool): initial echo setting -- controlled by user and
|
||||
preserved across multiple writer daemons
|
||||
log_file_wrapper (FileWrapper): file to log all output
|
||||
control_pipe (Pipe): multiprocessing pipe on which to send control
|
||||
information to the parent
|
||||
filter_fn (callable, optional): function to filter each line of output
|
||||
stdin_fd: optional input from the terminal
|
||||
read_fd: pipe for reading from parent's redirected stdout
|
||||
echo: initial echo setting -- controlled by user and preserved across multiple writer
|
||||
daemons
|
||||
log_file_wrapper: file to log all output
|
||||
control_pipe: multiprocessing pipe on which to send control information to the parent
|
||||
filter_fn: optional function to filter each line of output
|
||||
|
||||
"""
|
||||
# If this process was forked, then it will inherit file descriptors from
|
||||
# the parent process. This process depends on closing all instances of
|
||||
# write_fd to terminate the reading loop, so we close the file descriptor
|
||||
# here. Forking is the process spawning method everywhere except Mac OS
|
||||
# for Python >= 3.8 and on Windows
|
||||
if sys.version_info < (3, 8) or sys.platform != "darwin":
|
||||
os.close(write_fd)
|
||||
# This process depends on closing all instances of write_pipe to terminate the reading loop
|
||||
write_fd.close()
|
||||
|
||||
# 1. Use line buffering (3rd param = 1) since Python 3 has a bug
|
||||
# that prevents unbuffered text I/O.
|
||||
# 2. Python 3.x before 3.7 does not open with UTF-8 encoding by default
|
||||
in_pipe = os.fdopen(read_multiprocess_fd.fd, "r", 1, encoding="utf-8")
|
||||
# 3. closefd=False because Connection has "ownership"
|
||||
read_file = os.fdopen(read_fd.fileno(), "r", 1, encoding="utf-8", closefd=False)
|
||||
|
||||
if stdin_multiprocess_fd:
|
||||
stdin = os.fdopen(stdin_multiprocess_fd.fd)
|
||||
if stdin_fd:
|
||||
stdin_file = os.fdopen(stdin_fd.fileno(), closefd=False)
|
||||
else:
|
||||
stdin = None
|
||||
stdin_file = None
|
||||
|
||||
# list of streams to select from
|
||||
istreams = [in_pipe, stdin] if stdin else [in_pipe]
|
||||
istreams = [read_file, stdin_file] if stdin_file else [read_file]
|
||||
force_echo = False # parent can force echo for certain output
|
||||
|
||||
log_file = log_file_wrapper.unwrap()
|
||||
|
||||
try:
|
||||
with keyboard_input(stdin) as kb:
|
||||
with keyboard_input(stdin_file) as kb:
|
||||
while True:
|
||||
# fix the terminal settings if we recently came to
|
||||
# the foreground
|
||||
@@ -943,12 +894,12 @@ def _writer_daemon(
|
||||
# Allow user to toggle echo with 'v' key.
|
||||
# Currently ignores other chars.
|
||||
# only read stdin if we're in the foreground
|
||||
if stdin in rlist and not _is_background_tty(stdin):
|
||||
if stdin_file and stdin_file in rlist and not _is_background_tty(stdin_file):
|
||||
# it's possible to be backgrounded between the above
|
||||
# check and the read, so we ignore SIGTTIN here.
|
||||
with ignore_signal(signal.SIGTTIN):
|
||||
try:
|
||||
if stdin.read(1) == "v":
|
||||
if stdin_file.read(1) == "v":
|
||||
echo = not echo
|
||||
except IOError as e:
|
||||
# If SIGTTIN is ignored, the system gives EIO
|
||||
@@ -957,13 +908,13 @@ def _writer_daemon(
|
||||
if e.errno != errno.EIO:
|
||||
raise
|
||||
|
||||
if in_pipe in rlist:
|
||||
if read_file in rlist:
|
||||
line_count = 0
|
||||
try:
|
||||
while line_count < 100:
|
||||
# Handle output from the calling process.
|
||||
try:
|
||||
line = _retry(in_pipe.readline)()
|
||||
line = _retry(read_file.readline)()
|
||||
except UnicodeDecodeError:
|
||||
# installs like --test=root gpgme produce non-UTF8 logs
|
||||
line = "<line lost: output was not encoded as UTF-8>\n"
|
||||
@@ -992,7 +943,7 @@ def _writer_daemon(
|
||||
if xoff in controls:
|
||||
force_echo = False
|
||||
|
||||
if not _input_available(in_pipe):
|
||||
if not _input_available(read_file):
|
||||
break
|
||||
finally:
|
||||
if line_count > 0:
|
||||
@@ -1007,14 +958,14 @@ def _writer_daemon(
|
||||
finally:
|
||||
# send written data back to parent if we used a StringIO
|
||||
if isinstance(log_file, io.StringIO):
|
||||
control_pipe.send(log_file.getvalue())
|
||||
control_fd.send(log_file.getvalue())
|
||||
log_file_wrapper.close()
|
||||
close_connection_and_file(read_multiprocess_fd, in_pipe)
|
||||
if stdin_multiprocess_fd:
|
||||
close_connection_and_file(stdin_multiprocess_fd, stdin)
|
||||
read_fd.close()
|
||||
if stdin_fd:
|
||||
stdin_fd.close()
|
||||
|
||||
# send echo value back to the parent so it can be preserved.
|
||||
control_pipe.send(echo)
|
||||
control_fd.send(echo)
|
||||
|
||||
|
||||
def _retry(function):
|
||||
|
@@ -4,7 +4,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string
|
||||
__version__ = "0.22.2"
|
||||
__version__ = "0.22.3"
|
||||
spack_version = __version__
|
||||
|
||||
|
||||
|
@@ -597,7 +597,10 @@ def bootstrapping_sources(scope: Optional[str] = None):
|
||||
current = copy.copy(entry)
|
||||
metadata_dir = spack.util.path.canonicalize_path(entry["metadata"])
|
||||
metadata_yaml = os.path.join(metadata_dir, METADATA_YAML_FILENAME)
|
||||
with open(metadata_yaml, encoding="utf-8") as stream:
|
||||
current.update(spack.util.spack_yaml.load(stream))
|
||||
list_of_sources.append(current)
|
||||
try:
|
||||
with open(metadata_yaml, encoding="utf-8") as stream:
|
||||
current.update(spack.util.spack_yaml.load(stream))
|
||||
list_of_sources.append(current)
|
||||
except OSError:
|
||||
pass
|
||||
return list_of_sources
|
||||
|
@@ -43,7 +43,8 @@
|
||||
from collections import defaultdict
|
||||
from enum import Flag, auto
|
||||
from itertools import chain
|
||||
from typing import Dict, List, Set, Tuple
|
||||
from multiprocessing.connection import Connection
|
||||
from typing import Callable, Dict, List, Optional, Set, Tuple
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.string import plural
|
||||
@@ -51,7 +52,6 @@
|
||||
from llnl.util.lang import dedupe, stable_partition
|
||||
from llnl.util.symlink import symlink
|
||||
from llnl.util.tty.color import cescape, colorize
|
||||
from llnl.util.tty.log import MultiProcessFd
|
||||
|
||||
import spack.build_systems.cmake
|
||||
import spack.build_systems.meson
|
||||
@@ -1145,18 +1145,60 @@ def get_cmake_prefix_path(pkg):
|
||||
|
||||
|
||||
def _setup_pkg_and_run(
|
||||
serialized_pkg, function, kwargs, write_pipe, input_multiprocess_fd, jsfd1, jsfd2
|
||||
serialized_pkg: "spack.subprocess_context.PackageInstallContext",
|
||||
function: Callable,
|
||||
kwargs: Dict,
|
||||
write_pipe: Connection,
|
||||
input_pipe: Optional[Connection],
|
||||
jsfd1: Optional[Connection],
|
||||
jsfd2: Optional[Connection],
|
||||
):
|
||||
"""Main entry point in the child process for Spack builds.
|
||||
|
||||
``_setup_pkg_and_run`` is called by the child process created in
|
||||
``start_build_process()``, and its main job is to run ``function()`` on behalf of
|
||||
some Spack installation (see :ref:`spack.installer.PackageInstaller._install_task`).
|
||||
|
||||
The child process is passed a ``write_pipe``, on which it's expected to send one of
|
||||
the following:
|
||||
|
||||
* ``StopPhase``: error raised by a build process indicating it's stopping at a
|
||||
particular build phase.
|
||||
|
||||
* ``BaseException``: any exception raised by a child build process, which will be
|
||||
wrapped in ``ChildError`` (which adds a bunch of debug info and log context) and
|
||||
raised in the parent.
|
||||
|
||||
* The return value of ``function()``, which can be anything (except an exception).
|
||||
This is returned to the caller.
|
||||
|
||||
Note: ``jsfd1`` and ``jsfd2`` are passed solely to ensure that the child process
|
||||
does not close these file descriptors. Some ``multiprocessing`` backends will close
|
||||
them automatically in the child if they are not passed at process creation time.
|
||||
|
||||
Arguments:
|
||||
serialized_pkg: Spack package install context object (serialized form of the
|
||||
package that we'll build in the child process).
|
||||
function: function to call in the child process; serialized_pkg is passed to
|
||||
this as the first argument.
|
||||
kwargs: additional keyword arguments to pass to ``function()``.
|
||||
write_pipe: multiprocessing ``Connection`` to the parent process, to which the
|
||||
child *must* send a result (or an error) back to parent on.
|
||||
input_multiprocess_fd: stdin from the parent (not passed currently on Windows)
|
||||
jsfd1: gmake Jobserver file descriptor 1.
|
||||
jsfd2: gmake Jobserver file descriptor 2.
|
||||
|
||||
"""
|
||||
|
||||
context: str = kwargs.get("context", "build")
|
||||
|
||||
try:
|
||||
# We are in the child process. Python sets sys.stdin to
|
||||
# open(os.devnull) to prevent our process and its parent from
|
||||
# simultaneously reading from the original stdin. But, we assume
|
||||
# that the parent process is not going to read from it till we
|
||||
# are done with the child, so we undo Python's precaution.
|
||||
if input_multiprocess_fd is not None:
|
||||
sys.stdin = os.fdopen(input_multiprocess_fd.fd)
|
||||
# We are in the child process. Python sets sys.stdin to open(os.devnull) to prevent our
|
||||
# process and its parent from simultaneously reading from the original stdin. But, we
|
||||
# assume that the parent process is not going to read from it till we are done with the
|
||||
# child, so we undo Python's precaution. closefd=False since Connection has ownership.
|
||||
if input_pipe is not None:
|
||||
sys.stdin = os.fdopen(input_pipe.fileno(), closefd=False)
|
||||
|
||||
pkg = serialized_pkg.restore()
|
||||
|
||||
@@ -1172,13 +1214,14 @@ def _setup_pkg_and_run(
|
||||
# Do not create a full ChildError from this, it's not an error
|
||||
# it's a control statement.
|
||||
write_pipe.send(e)
|
||||
except BaseException:
|
||||
except BaseException as e:
|
||||
# catch ANYTHING that goes wrong in the child process
|
||||
exc_type, exc, tb = sys.exc_info()
|
||||
|
||||
# Need to unwind the traceback in the child because traceback
|
||||
# objects can't be sent to the parent.
|
||||
tb_string = traceback.format_exc()
|
||||
exc_type = type(e)
|
||||
tb = e.__traceback__
|
||||
tb_string = "".join(traceback.format_exception(exc_type, e, tb))
|
||||
|
||||
# build up some context from the offending package so we can
|
||||
# show that, too.
|
||||
@@ -1195,8 +1238,8 @@ def _setup_pkg_and_run(
|
||||
elif context == "test":
|
||||
logfile = os.path.join(pkg.test_suite.stage, pkg.test_suite.test_log_name(pkg.spec))
|
||||
|
||||
error_msg = str(exc)
|
||||
if isinstance(exc, (spack.multimethod.NoSuchMethodError, AttributeError)):
|
||||
error_msg = str(e)
|
||||
if isinstance(e, (spack.multimethod.NoSuchMethodError, AttributeError)):
|
||||
process = "test the installation" if context == "test" else "build from sources"
|
||||
error_msg = (
|
||||
"The '{}' package cannot find an attribute while trying to {}. "
|
||||
@@ -1206,7 +1249,7 @@ def _setup_pkg_and_run(
|
||||
"More information at https://spack.readthedocs.io/en/latest/packaging_guide.html#installation-procedure"
|
||||
).format(pkg.name, process, context)
|
||||
error_msg = colorize("@*R{{{}}}".format(error_msg))
|
||||
error_msg = "{}\n\n{}".format(str(exc), error_msg)
|
||||
error_msg = "{}\n\n{}".format(str(e), error_msg)
|
||||
|
||||
# make a pickleable exception to send to parent.
|
||||
msg = "%s: %s" % (exc_type.__name__, error_msg)
|
||||
@@ -1224,8 +1267,8 @@ def _setup_pkg_and_run(
|
||||
|
||||
finally:
|
||||
write_pipe.close()
|
||||
if input_multiprocess_fd is not None:
|
||||
input_multiprocess_fd.close()
|
||||
if input_pipe is not None:
|
||||
input_pipe.close()
|
||||
|
||||
|
||||
def start_build_process(pkg, function, kwargs):
|
||||
@@ -1252,23 +1295,9 @@ def child_fun():
|
||||
If something goes wrong, the child process catches the error and
|
||||
passes it to the parent wrapped in a ChildError. The parent is
|
||||
expected to handle (or re-raise) the ChildError.
|
||||
|
||||
This uses `multiprocessing.Process` to create the child process. The
|
||||
mechanism used to create the process differs on different operating
|
||||
systems and for different versions of Python. In some cases "fork"
|
||||
is used (i.e. the "fork" system call) and some cases it starts an
|
||||
entirely new Python interpreter process (in the docs this is referred
|
||||
to as the "spawn" start method). Breaking it down by OS:
|
||||
|
||||
- Linux always uses fork.
|
||||
- Mac OS uses fork before Python 3.8 and "spawn" for 3.8 and after.
|
||||
- Windows always uses the "spawn" start method.
|
||||
|
||||
For more information on `multiprocessing` child process creation
|
||||
mechanisms, see https://docs.python.org/3/library/multiprocessing.html#contexts-and-start-methods
|
||||
"""
|
||||
read_pipe, write_pipe = multiprocessing.Pipe(duplex=False)
|
||||
input_multiprocess_fd = None
|
||||
input_fd = None
|
||||
jobserver_fd1 = None
|
||||
jobserver_fd2 = None
|
||||
|
||||
@@ -1277,14 +1306,13 @@ def child_fun():
|
||||
try:
|
||||
# Forward sys.stdin when appropriate, to allow toggling verbosity
|
||||
if sys.platform != "win32" and sys.stdin.isatty() and hasattr(sys.stdin, "fileno"):
|
||||
input_fd = os.dup(sys.stdin.fileno())
|
||||
input_multiprocess_fd = MultiProcessFd(input_fd)
|
||||
input_fd = Connection(os.dup(sys.stdin.fileno()))
|
||||
mflags = os.environ.get("MAKEFLAGS", False)
|
||||
if mflags:
|
||||
m = re.search(r"--jobserver-[^=]*=(\d),(\d)", mflags)
|
||||
if m:
|
||||
jobserver_fd1 = MultiProcessFd(int(m.group(1)))
|
||||
jobserver_fd2 = MultiProcessFd(int(m.group(2)))
|
||||
jobserver_fd1 = Connection(int(m.group(1)))
|
||||
jobserver_fd2 = Connection(int(m.group(2)))
|
||||
|
||||
p = multiprocessing.Process(
|
||||
target=_setup_pkg_and_run,
|
||||
@@ -1293,7 +1321,7 @@ def child_fun():
|
||||
function,
|
||||
kwargs,
|
||||
write_pipe,
|
||||
input_multiprocess_fd,
|
||||
input_fd,
|
||||
jobserver_fd1,
|
||||
jobserver_fd2,
|
||||
),
|
||||
@@ -1313,8 +1341,8 @@ def child_fun():
|
||||
|
||||
finally:
|
||||
# Close the input stream in the parent process
|
||||
if input_multiprocess_fd is not None:
|
||||
input_multiprocess_fd.close()
|
||||
if input_fd is not None:
|
||||
input_fd.close()
|
||||
|
||||
def exitcode_msg(p):
|
||||
typ = "exit" if p.exitcode >= 0 else "signal"
|
||||
|
@@ -22,6 +22,8 @@
|
||||
from urllib.parse import urlencode
|
||||
from urllib.request import HTTPHandler, Request, build_opener
|
||||
|
||||
import ruamel.yaml
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.lang import memoized
|
||||
@@ -1310,8 +1312,11 @@ def main_script_replacements(cmd):
|
||||
if not rebuild_everything:
|
||||
sys.exit(1)
|
||||
|
||||
with open(output_file, "w") as outf:
|
||||
outf.write(syaml.dump(sorted_output, default_flow_style=True))
|
||||
# Minimize yaml output size through use of anchors
|
||||
syaml.anchorify(sorted_output)
|
||||
|
||||
with open(output_file, "w") as f:
|
||||
ruamel.yaml.YAML().dump(sorted_output, f)
|
||||
|
||||
|
||||
def _url_encode_string(input_string):
|
||||
|
@@ -661,34 +661,32 @@ def mirror_name_or_url(m):
|
||||
# accidentally to a dir in the current working directory.
|
||||
|
||||
# If there's a \ or / in the name, it's interpreted as a path or url.
|
||||
if "/" in m or "\\" in m:
|
||||
if "/" in m or "\\" in m or m in (".", ".."):
|
||||
return spack.mirror.Mirror(m)
|
||||
|
||||
# Otherwise, the named mirror is required to exist.
|
||||
try:
|
||||
return spack.mirror.require_mirror_name(m)
|
||||
except ValueError as e:
|
||||
raise argparse.ArgumentTypeError(
|
||||
str(e) + ". Did you mean {}?".format(os.path.join(".", m))
|
||||
)
|
||||
raise argparse.ArgumentTypeError(f"{e}. Did you mean {os.path.join('.', m)}?") from e
|
||||
|
||||
|
||||
def mirror_url(url):
|
||||
try:
|
||||
return spack.mirror.Mirror.from_url(url)
|
||||
except ValueError as e:
|
||||
raise argparse.ArgumentTypeError(str(e))
|
||||
raise argparse.ArgumentTypeError(str(e)) from e
|
||||
|
||||
|
||||
def mirror_directory(path):
|
||||
try:
|
||||
return spack.mirror.Mirror.from_local_path(path)
|
||||
except ValueError as e:
|
||||
raise argparse.ArgumentTypeError(str(e))
|
||||
raise argparse.ArgumentTypeError(str(e)) from e
|
||||
|
||||
|
||||
def mirror_name(name):
|
||||
try:
|
||||
return spack.mirror.require_mirror_name(name)
|
||||
except ValueError as e:
|
||||
raise argparse.ArgumentTypeError(str(e))
|
||||
raise argparse.ArgumentTypeError(str(e)) from e
|
||||
|
@@ -169,9 +169,9 @@ def query_arguments(args):
|
||||
if (args.missing or args.only_missing) and not args.only_deprecated:
|
||||
installed.append(InstallStatuses.MISSING)
|
||||
|
||||
known = any
|
||||
predicate_fn = None
|
||||
if args.unknown:
|
||||
known = False
|
||||
predicate_fn = lambda x: not spack.repo.PATH.exists(x.spec.name)
|
||||
|
||||
explicit = any
|
||||
if args.explicit:
|
||||
@@ -179,7 +179,7 @@ def query_arguments(args):
|
||||
if args.implicit:
|
||||
explicit = False
|
||||
|
||||
q_args = {"installed": installed, "known": known, "explicit": explicit}
|
||||
q_args = {"installed": installed, "predicate_fn": predicate_fn, "explicit": explicit}
|
||||
|
||||
install_tree = args.install_tree
|
||||
upstreams = spack.config.get("upstreams", {})
|
||||
|
@@ -61,7 +61,6 @@ def install_kwargs_from_args(args):
|
||||
"dependencies_use_cache": cache_opt(args.use_cache, dep_use_bc),
|
||||
"dependencies_cache_only": cache_opt(args.cache_only, dep_use_bc),
|
||||
"include_build_deps": args.include_build_deps,
|
||||
"explicit": True, # Use true as a default for install command
|
||||
"stop_at": args.until,
|
||||
"unsigned": args.unsigned,
|
||||
"install_deps": ("dependencies" in args.things_to_install),
|
||||
@@ -473,6 +472,7 @@ def install_without_active_env(args, install_kwargs, reporter_factory):
|
||||
require_user_confirmation_for_overwrite(concrete_specs, args)
|
||||
install_kwargs["overwrite"] = [spec.dag_hash() for spec in concrete_specs]
|
||||
|
||||
installs = [(s.package, install_kwargs) for s in concrete_specs]
|
||||
builder = PackageInstaller(installs)
|
||||
installs = [s.package for s in concrete_specs]
|
||||
install_kwargs["explicit"] = [s.dag_hash() for s in concrete_specs]
|
||||
builder = PackageInstaller(installs, install_kwargs)
|
||||
builder.install()
|
||||
|
@@ -101,8 +101,9 @@ def do_mark(specs, explicit):
|
||||
specs (list): list of specs to be marked
|
||||
explicit (bool): whether to mark specs as explicitly installed
|
||||
"""
|
||||
for spec in specs:
|
||||
spack.store.STORE.db.update_explicit(spec, explicit)
|
||||
with spack.store.STORE.db.write_transaction():
|
||||
for spec in specs:
|
||||
spack.store.STORE.db.mark(spec, "explicit", explicit)
|
||||
|
||||
|
||||
def mark_specs(args, specs):
|
||||
|
@@ -377,7 +377,10 @@ def refresh(module_type, specs, args):
|
||||
def modules_cmd(parser, args, module_type, callbacks=callbacks):
|
||||
# Qualifiers to be used when querying the db for specs
|
||||
constraint_qualifiers = {
|
||||
"refresh": {"installed": True, "known": lambda x: not spack.repo.PATH.exists(x)}
|
||||
"refresh": {
|
||||
"installed": True,
|
||||
"predicate_fn": lambda x: spack.repo.PATH.exists(x.spec.name),
|
||||
}
|
||||
}
|
||||
query_args = constraint_qualifiers.get(args.subparser_name, {})
|
||||
|
||||
|
@@ -290,7 +290,7 @@ def __init__(
|
||||
operating_system,
|
||||
target,
|
||||
paths,
|
||||
modules=None,
|
||||
modules: Optional[List[str]] = None,
|
||||
alias=None,
|
||||
environment=None,
|
||||
extra_rpaths=None,
|
||||
|
@@ -283,12 +283,9 @@ def __reduce__(self):
|
||||
database. If it is a spec, we'll evaluate
|
||||
``spec.satisfies(query_spec)``
|
||||
|
||||
known (bool or None): Specs that are "known" are those
|
||||
for which Spack can locate a ``package.py`` file -- i.e.,
|
||||
Spack "knows" how to install them. Specs that are unknown may
|
||||
represent packages that existed in a previous version of
|
||||
Spack, but have since either changed their name or
|
||||
been removed
|
||||
predicate_fn: optional predicate taking an InstallRecord as argument, and returning
|
||||
whether that record is selected for the query. It can be used to craft criteria
|
||||
that need some data for selection not provided by the Database itself.
|
||||
|
||||
installed (bool or InstallStatus or typing.Iterable or None):
|
||||
if ``True``, includes only installed
|
||||
@@ -588,6 +585,9 @@ def _path(self, spec: "spack.spec.Spec") -> pathlib.Path:
|
||||
return self.dir / f"{spec.name}-{spec.dag_hash()}"
|
||||
|
||||
|
||||
SelectType = Callable[[InstallRecord], bool]
|
||||
|
||||
|
||||
class Database:
|
||||
#: Fields written for each install record
|
||||
record_fields: Tuple[str, ...] = DEFAULT_INSTALL_RECORD_FIELDS
|
||||
@@ -1367,7 +1367,7 @@ def _deprecate(self, spec, deprecator):
|
||||
self._data[spec_key] = spec_rec
|
||||
|
||||
@_autospec
|
||||
def mark(self, spec, key, value):
|
||||
def mark(self, spec: "spack.spec.Spec", key: str, value: Any) -> None:
|
||||
"""Mark an arbitrary record on a spec."""
|
||||
with self.write_transaction():
|
||||
return self._mark(spec, key, value)
|
||||
@@ -1516,7 +1516,7 @@ def get_by_hash(self, dag_hash, default=None, installed=any):
|
||||
def _query(
|
||||
self,
|
||||
query_spec=any,
|
||||
known=any,
|
||||
predicate_fn: Optional[SelectType] = None,
|
||||
installed=True,
|
||||
explicit=any,
|
||||
start_date=None,
|
||||
@@ -1524,7 +1524,7 @@ def _query(
|
||||
hashes=None,
|
||||
in_buildcache=any,
|
||||
origin=None,
|
||||
):
|
||||
) -> List["spack.spec.Spec"]:
|
||||
"""Run a query on the database."""
|
||||
|
||||
# TODO: Specs are a lot like queries. Should there be a
|
||||
@@ -1570,7 +1570,7 @@ def _query(
|
||||
if explicit is not any and rec.explicit != explicit:
|
||||
continue
|
||||
|
||||
if known is not any and known(rec.spec.name):
|
||||
if predicate_fn is not None and not predicate_fn(rec):
|
||||
continue
|
||||
|
||||
if start_date or end_date:
|
||||
@@ -1655,14 +1655,14 @@ def query(self, *args, **kwargs):
|
||||
query.__doc__ = ""
|
||||
query.__doc__ += _QUERY_DOCSTRING
|
||||
|
||||
def query_one(self, query_spec, known=any, installed=True):
|
||||
def query_one(self, query_spec, predicate_fn=None, installed=True):
|
||||
"""Query for exactly one spec that matches the query spec.
|
||||
|
||||
Raises an assertion error if more than one spec matches the
|
||||
query. Returns None if no installed package matches.
|
||||
|
||||
"""
|
||||
concrete_specs = self.query(query_spec, known=known, installed=installed)
|
||||
concrete_specs = self.query(query_spec, predicate_fn=predicate_fn, installed=installed)
|
||||
assert len(concrete_specs) <= 1
|
||||
return concrete_specs[0] if concrete_specs else None
|
||||
|
||||
@@ -1709,24 +1709,6 @@ def root(key, record):
|
||||
if id(rec.spec) not in needed and rec.installed
|
||||
]
|
||||
|
||||
def update_explicit(self, spec, explicit):
|
||||
"""
|
||||
Update the spec's explicit state in the database.
|
||||
|
||||
Args:
|
||||
spec (spack.spec.Spec): the spec whose install record is being updated
|
||||
explicit (bool): ``True`` if the package was requested explicitly
|
||||
by the user, ``False`` if it was pulled in as a dependency of
|
||||
an explicit package.
|
||||
"""
|
||||
rec = self.get_record(spec)
|
||||
if explicit != rec.explicit:
|
||||
with self.write_transaction():
|
||||
message = "{s.name}@{s.version} : marking the package {0}"
|
||||
status = "explicit" if explicit else "implicit"
|
||||
tty.debug(message.format(status, s=spec))
|
||||
rec.explicit = explicit
|
||||
|
||||
|
||||
class UpstreamDatabaseLockingError(SpackError):
|
||||
"""Raised when an operation would need to lock an upstream database"""
|
||||
|
@@ -9,11 +9,13 @@
|
||||
|
||||
import os
|
||||
import re
|
||||
import shlex
|
||||
from enum import Enum
|
||||
from typing import List, Optional
|
||||
|
||||
import spack.deptypes as dt
|
||||
import spack.environment.environment as ev
|
||||
import spack.paths
|
||||
import spack.spec
|
||||
import spack.traverse as traverse
|
||||
|
||||
@@ -226,6 +228,7 @@ def to_dict(self):
|
||||
"install_deps_target": self._target("install-deps"),
|
||||
"any_hash_target": self._target("%"),
|
||||
"jobserver_support": self.jobserver_support,
|
||||
"spack_script": shlex.quote(spack.paths.spack_script),
|
||||
"adjacency_list": self.make_adjacency_list,
|
||||
"phony_convenience_targets": " ".join(self.phony_convenience_targets),
|
||||
"pkg_ids_variable": self.pkg_identifier_variable,
|
||||
|
@@ -1936,13 +1936,19 @@ def install_specs(self, specs: Optional[List[Spec]] = None, **install_args):
|
||||
specs = specs if specs is not None else roots
|
||||
|
||||
# Extend the set of specs to overwrite with modified dev specs and their parents
|
||||
install_args["overwrite"] = (
|
||||
install_args.get("overwrite", []) + self._dev_specs_that_need_overwrite()
|
||||
overwrite: Set[str] = set()
|
||||
overwrite.update(install_args.get("overwrite", []), self._dev_specs_that_need_overwrite())
|
||||
install_args["overwrite"] = overwrite
|
||||
|
||||
explicit: Set[str] = set()
|
||||
explicit.update(
|
||||
install_args.get("explicit", []),
|
||||
(s.dag_hash() for s in specs),
|
||||
(s.dag_hash() for s in roots),
|
||||
)
|
||||
install_args["explicit"] = explicit
|
||||
|
||||
installs = [(spec.package, {**install_args, "explicit": spec in roots}) for spec in specs]
|
||||
|
||||
PackageInstaller(installs).install()
|
||||
PackageInstaller([spec.package for spec in specs], install_args).install()
|
||||
|
||||
def all_specs_generator(self) -> Iterable[Spec]:
|
||||
"""Returns a generator for all concrete specs"""
|
||||
|
@@ -30,6 +30,7 @@
|
||||
import shutil
|
||||
import urllib.error
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
from pathlib import PurePath
|
||||
from typing import List, Optional
|
||||
|
||||
@@ -273,10 +274,7 @@ def __init__(self, url=None, checksum=None, **kwargs):
|
||||
@property
|
||||
def curl(self):
|
||||
if not self._curl:
|
||||
try:
|
||||
self._curl = which("curl", required=True)
|
||||
except CommandNotFoundError as exc:
|
||||
tty.error(str(exc))
|
||||
self._curl = web_util.require_curl()
|
||||
return self._curl
|
||||
|
||||
def source_id(self):
|
||||
@@ -297,27 +295,23 @@ def candidate_urls(self):
|
||||
@_needs_stage
|
||||
def fetch(self):
|
||||
if self.archive_file:
|
||||
tty.debug("Already downloaded {0}".format(self.archive_file))
|
||||
tty.debug(f"Already downloaded {self.archive_file}")
|
||||
return
|
||||
|
||||
url = None
|
||||
errors = []
|
||||
errors: List[Exception] = []
|
||||
for url in self.candidate_urls:
|
||||
if not web_util.url_exists(url):
|
||||
tty.debug("URL does not exist: " + url)
|
||||
continue
|
||||
|
||||
try:
|
||||
self._fetch_from_url(url)
|
||||
break
|
||||
except FailedDownloadError as e:
|
||||
errors.append(str(e))
|
||||
|
||||
for msg in errors:
|
||||
tty.debug(msg)
|
||||
errors.extend(e.exceptions)
|
||||
else:
|
||||
raise FailedDownloadError(*errors)
|
||||
|
||||
if not self.archive_file:
|
||||
raise FailedDownloadError(url)
|
||||
raise FailedDownloadError(
|
||||
RuntimeError(f"Missing archive {self.archive_file} after fetching")
|
||||
)
|
||||
|
||||
def _fetch_from_url(self, url):
|
||||
if spack.config.get("config:url_fetch_method") == "curl":
|
||||
@@ -336,19 +330,20 @@ def _check_headers(self, headers):
|
||||
@_needs_stage
|
||||
def _fetch_urllib(self, url):
|
||||
save_file = self.stage.save_filename
|
||||
tty.msg("Fetching {0}".format(url))
|
||||
|
||||
# Run urllib but grab the mime type from the http headers
|
||||
request = urllib.request.Request(url, headers={"User-Agent": web_util.SPACK_USER_AGENT})
|
||||
|
||||
try:
|
||||
url, headers, response = web_util.read_from_url(url)
|
||||
except web_util.SpackWebError as e:
|
||||
response = web_util.urlopen(request)
|
||||
except (TimeoutError, urllib.error.URLError) as e:
|
||||
# clean up archive on failure.
|
||||
if self.archive_file:
|
||||
os.remove(self.archive_file)
|
||||
if os.path.lexists(save_file):
|
||||
os.remove(save_file)
|
||||
msg = "urllib failed to fetch with error {0}".format(e)
|
||||
raise FailedDownloadError(url, msg)
|
||||
raise FailedDownloadError(e) from e
|
||||
|
||||
tty.msg(f"Fetching {url}")
|
||||
|
||||
if os.path.lexists(save_file):
|
||||
os.remove(save_file)
|
||||
@@ -356,7 +351,7 @@ def _fetch_urllib(self, url):
|
||||
with open(save_file, "wb") as _open_file:
|
||||
shutil.copyfileobj(response, _open_file)
|
||||
|
||||
self._check_headers(str(headers))
|
||||
self._check_headers(str(response.headers))
|
||||
|
||||
@_needs_stage
|
||||
def _fetch_curl(self, url):
|
||||
@@ -365,7 +360,7 @@ def _fetch_curl(self, url):
|
||||
if self.stage.save_filename:
|
||||
save_file = self.stage.save_filename
|
||||
partial_file = self.stage.save_filename + ".part"
|
||||
tty.msg("Fetching {0}".format(url))
|
||||
tty.msg(f"Fetching {url}")
|
||||
if partial_file:
|
||||
save_args = [
|
||||
"-C",
|
||||
@@ -405,8 +400,8 @@ def _fetch_curl(self, url):
|
||||
|
||||
try:
|
||||
web_util.check_curl_code(curl.returncode)
|
||||
except spack.error.FetchError as err:
|
||||
raise spack.fetch_strategy.FailedDownloadError(url, str(err))
|
||||
except spack.error.FetchError as e:
|
||||
raise FailedDownloadError(e) from e
|
||||
|
||||
self._check_headers(headers)
|
||||
|
||||
@@ -560,7 +555,7 @@ def fetch(self):
|
||||
os.remove(self.archive_file)
|
||||
if os.path.lexists(file):
|
||||
os.remove(file)
|
||||
raise FailedDownloadError(self.url, f"Failed to fetch {self.url}: {e}") from e
|
||||
raise FailedDownloadError(e) from e
|
||||
|
||||
if os.path.lexists(file):
|
||||
os.remove(file)
|
||||
@@ -1312,35 +1307,41 @@ def __init__(self, *args, **kwargs):
|
||||
@_needs_stage
|
||||
def fetch(self):
|
||||
if self.archive_file:
|
||||
tty.debug("Already downloaded {0}".format(self.archive_file))
|
||||
tty.debug(f"Already downloaded {self.archive_file}")
|
||||
return
|
||||
|
||||
parsed_url = urllib.parse.urlparse(self.url)
|
||||
if parsed_url.scheme != "s3":
|
||||
raise spack.error.FetchError("S3FetchStrategy can only fetch from s3:// urls.")
|
||||
|
||||
tty.debug("Fetching {0}".format(self.url))
|
||||
|
||||
basename = os.path.basename(parsed_url.path)
|
||||
request = urllib.request.Request(
|
||||
self.url, headers={"User-Agent": web_util.SPACK_USER_AGENT}
|
||||
)
|
||||
|
||||
with working_dir(self.stage.path):
|
||||
_, headers, stream = web_util.read_from_url(self.url)
|
||||
try:
|
||||
response = web_util.urlopen(request)
|
||||
except (TimeoutError, urllib.error.URLError) as e:
|
||||
raise FailedDownloadError(e) from e
|
||||
|
||||
tty.debug(f"Fetching {self.url}")
|
||||
|
||||
with open(basename, "wb") as f:
|
||||
shutil.copyfileobj(stream, f)
|
||||
shutil.copyfileobj(response, f)
|
||||
|
||||
content_type = web_util.get_header(headers, "Content-type")
|
||||
content_type = web_util.get_header(response.headers, "Content-type")
|
||||
|
||||
if content_type == "text/html":
|
||||
warn_content_type_mismatch(self.archive_file or "the archive")
|
||||
|
||||
if self.stage.save_filename:
|
||||
llnl.util.filesystem.rename(
|
||||
os.path.join(self.stage.path, basename), self.stage.save_filename
|
||||
)
|
||||
fs.rename(os.path.join(self.stage.path, basename), self.stage.save_filename)
|
||||
|
||||
if not self.archive_file:
|
||||
raise FailedDownloadError(self.url)
|
||||
raise FailedDownloadError(
|
||||
RuntimeError(f"Missing archive {self.archive_file} after fetching")
|
||||
)
|
||||
|
||||
|
||||
@fetcher
|
||||
@@ -1366,17 +1367,23 @@ def fetch(self):
|
||||
if parsed_url.scheme != "gs":
|
||||
raise spack.error.FetchError("GCSFetchStrategy can only fetch from gs:// urls.")
|
||||
|
||||
tty.debug("Fetching {0}".format(self.url))
|
||||
|
||||
basename = os.path.basename(parsed_url.path)
|
||||
request = urllib.request.Request(
|
||||
self.url, headers={"User-Agent": web_util.SPACK_USER_AGENT}
|
||||
)
|
||||
|
||||
with working_dir(self.stage.path):
|
||||
_, headers, stream = web_util.read_from_url(self.url)
|
||||
try:
|
||||
response = web_util.urlopen(request)
|
||||
except (TimeoutError, urllib.error.URLError) as e:
|
||||
raise FailedDownloadError(e) from e
|
||||
|
||||
tty.debug(f"Fetching {self.url}")
|
||||
|
||||
with open(basename, "wb") as f:
|
||||
shutil.copyfileobj(stream, f)
|
||||
shutil.copyfileobj(response, f)
|
||||
|
||||
content_type = web_util.get_header(headers, "Content-type")
|
||||
content_type = web_util.get_header(response.headers, "Content-type")
|
||||
|
||||
if content_type == "text/html":
|
||||
warn_content_type_mismatch(self.archive_file or "the archive")
|
||||
@@ -1385,7 +1392,9 @@ def fetch(self):
|
||||
os.rename(os.path.join(self.stage.path, basename), self.stage.save_filename)
|
||||
|
||||
if not self.archive_file:
|
||||
raise FailedDownloadError(self.url)
|
||||
raise FailedDownloadError(
|
||||
RuntimeError(f"Missing archive {self.archive_file} after fetching")
|
||||
)
|
||||
|
||||
|
||||
@fetcher
|
||||
@@ -1722,9 +1731,9 @@ class NoCacheError(spack.error.FetchError):
|
||||
class FailedDownloadError(spack.error.FetchError):
|
||||
"""Raised when a download fails."""
|
||||
|
||||
def __init__(self, url, msg=""):
|
||||
super().__init__("Failed to fetch file from URL: %s" % url, msg)
|
||||
self.url = url
|
||||
def __init__(self, *exceptions: Exception):
|
||||
super().__init__("Failed to download")
|
||||
self.exceptions = exceptions
|
||||
|
||||
|
||||
class NoArchiveFileError(spack.error.FetchError):
|
||||
|
@@ -33,6 +33,7 @@
|
||||
from llnl.util.tty.color import colorize
|
||||
|
||||
import spack.config
|
||||
import spack.directory_layout
|
||||
import spack.paths
|
||||
import spack.projections
|
||||
import spack.relocate
|
||||
@@ -50,7 +51,7 @@
|
||||
_projections_path = ".spack/projections.yaml"
|
||||
|
||||
|
||||
LinkCallbackType = Callable[[str, str, "FilesystemView", Optional["spack.spec.Spec"]], None]
|
||||
LinkCallbackType = Callable[[str, str, "FilesystemView", Optional[spack.spec.Spec]], None]
|
||||
|
||||
|
||||
def view_symlink(src: str, dst: str, *args, **kwargs) -> None:
|
||||
@@ -62,7 +63,7 @@ def view_hardlink(src: str, dst: str, *args, **kwargs) -> None:
|
||||
|
||||
|
||||
def view_copy(
|
||||
src: str, dst: str, view: "FilesystemView", spec: Optional["spack.spec.Spec"] = None
|
||||
src: str, dst: str, view: "FilesystemView", spec: Optional[spack.spec.Spec] = None
|
||||
) -> None:
|
||||
"""
|
||||
Copy a file from src to dst.
|
||||
@@ -158,7 +159,7 @@ class FilesystemView:
|
||||
def __init__(
|
||||
self,
|
||||
root: str,
|
||||
layout: "spack.directory_layout.DirectoryLayout",
|
||||
layout: spack.directory_layout.DirectoryLayout,
|
||||
*,
|
||||
projections: Optional[Dict] = None,
|
||||
ignore_conflicts: bool = False,
|
||||
@@ -180,7 +181,10 @@ def __init__(
|
||||
|
||||
# Setup link function to include view
|
||||
self.link_type = link_type
|
||||
self.link = ft.partial(function_for_link_type(link_type), view=self)
|
||||
self._link = function_for_link_type(link_type)
|
||||
|
||||
def link(self, src: str, dst: str, spec: Optional[spack.spec.Spec] = None) -> None:
|
||||
self._link(src, dst, self, spec)
|
||||
|
||||
def add_specs(self, *specs, **kwargs):
|
||||
"""
|
||||
@@ -281,7 +285,7 @@ class YamlFilesystemView(FilesystemView):
|
||||
def __init__(
|
||||
self,
|
||||
root: str,
|
||||
layout: "spack.directory_layout.DirectoryLayout",
|
||||
layout: spack.directory_layout.DirectoryLayout,
|
||||
*,
|
||||
projections: Optional[Dict] = None,
|
||||
ignore_conflicts: bool = False,
|
||||
|
@@ -41,8 +41,9 @@ def _populate_hooks(cls):
|
||||
|
||||
relative_names = list(list_modules(spack.paths.hooks_path))
|
||||
|
||||
# Ensure that write_install_manifest comes last
|
||||
ensure_last(relative_names, "absolutify_elf_sonames", "write_install_manifest")
|
||||
# write_install_manifest should come after any mutation of the install prefix, and
|
||||
# autopush should include the install manifest.
|
||||
ensure_last(relative_names, "absolutify_elf_sonames", "write_install_manifest", "autopush")
|
||||
|
||||
for name in relative_names:
|
||||
module_name = __name__ + "." + name
|
||||
|
@@ -440,7 +440,7 @@ def _process_external_package(pkg: "spack.package_base.PackageBase", explicit: b
|
||||
tty.debug(f"{pre} already registered in DB")
|
||||
record = spack.store.STORE.db.get_record(spec)
|
||||
if explicit and not record.explicit:
|
||||
spack.store.STORE.db.update_explicit(spec, explicit)
|
||||
spack.store.STORE.db.mark(spec, "explicit", True)
|
||||
|
||||
except KeyError:
|
||||
# If not, register it and generate the module file.
|
||||
@@ -761,12 +761,8 @@ def __init__(self, pkg: "spack.package_base.PackageBase", install_args: dict):
|
||||
if not self.pkg.spec.concrete:
|
||||
raise ValueError(f"{self.pkg.name} must have a concrete spec")
|
||||
|
||||
# Cache the package phase options with the explicit package,
|
||||
# popping the options to ensure installation of associated
|
||||
# dependencies is NOT affected by these options.
|
||||
|
||||
self.pkg.stop_before_phase = install_args.pop("stop_before", None) # type: ignore[attr-defined] # noqa: E501
|
||||
self.pkg.last_phase = install_args.pop("stop_at", None) # type: ignore[attr-defined]
|
||||
self.pkg.stop_before_phase = install_args.get("stop_before") # type: ignore[attr-defined] # noqa: E501
|
||||
self.pkg.last_phase = install_args.get("stop_at") # type: ignore[attr-defined]
|
||||
|
||||
# Cache the package id for convenience
|
||||
self.pkg_id = package_id(pkg.spec)
|
||||
@@ -1076,19 +1072,17 @@ def flag_installed(self, installed: List[str]) -> None:
|
||||
|
||||
@property
|
||||
def explicit(self) -> bool:
|
||||
"""The package was explicitly requested by the user."""
|
||||
return self.is_root and self.request.install_args.get("explicit", True)
|
||||
return self.pkg.spec.dag_hash() in self.request.install_args.get("explicit", [])
|
||||
|
||||
@property
|
||||
def is_root(self) -> bool:
|
||||
"""The package was requested directly, but may or may not be explicit
|
||||
in an environment."""
|
||||
def is_build_request(self) -> bool:
|
||||
"""The package was requested directly"""
|
||||
return self.pkg == self.request.pkg
|
||||
|
||||
@property
|
||||
def use_cache(self) -> bool:
|
||||
_use_cache = True
|
||||
if self.is_root:
|
||||
if self.is_build_request:
|
||||
return self.request.install_args.get("package_use_cache", _use_cache)
|
||||
else:
|
||||
return self.request.install_args.get("dependencies_use_cache", _use_cache)
|
||||
@@ -1096,7 +1090,7 @@ def use_cache(self) -> bool:
|
||||
@property
|
||||
def cache_only(self) -> bool:
|
||||
_cache_only = False
|
||||
if self.is_root:
|
||||
if self.is_build_request:
|
||||
return self.request.install_args.get("package_cache_only", _cache_only)
|
||||
else:
|
||||
return self.request.install_args.get("dependencies_cache_only", _cache_only)
|
||||
@@ -1122,24 +1116,17 @@ def priority(self):
|
||||
|
||||
class PackageInstaller:
|
||||
"""
|
||||
Class for managing the install process for a Spack instance based on a
|
||||
bottom-up DAG approach.
|
||||
Class for managing the install process for a Spack instance based on a bottom-up DAG approach.
|
||||
|
||||
This installer can coordinate concurrent batch and interactive, local
|
||||
and distributed (on a shared file system) builds for the same Spack
|
||||
instance.
|
||||
This installer can coordinate concurrent batch and interactive, local and distributed (on a
|
||||
shared file system) builds for the same Spack instance.
|
||||
"""
|
||||
|
||||
def __init__(self, installs: List[Tuple["spack.package_base.PackageBase", dict]] = []) -> None:
|
||||
"""Initialize the installer.
|
||||
|
||||
Args:
|
||||
installs (list): list of tuples, where each
|
||||
tuple consists of a package (PackageBase) and its associated
|
||||
install arguments (dict)
|
||||
"""
|
||||
def __init__(
|
||||
self, packages: List["spack.package_base.PackageBase"], install_args: dict
|
||||
) -> None:
|
||||
# List of build requests
|
||||
self.build_requests = [BuildRequest(pkg, install_args) for pkg, install_args in installs]
|
||||
self.build_requests = [BuildRequest(pkg, install_args) for pkg in packages]
|
||||
|
||||
# Priority queue of build tasks
|
||||
self.build_pq: List[Tuple[Tuple[int, int], BuildTask]] = []
|
||||
@@ -1376,8 +1363,8 @@ def _prepare_for_install(self, task: BuildTask) -> None:
|
||||
self._update_installed(task)
|
||||
|
||||
# Only update the explicit entry once for the explicit package
|
||||
if task.explicit:
|
||||
spack.store.STORE.db.update_explicit(task.pkg.spec, True)
|
||||
if task.explicit and not rec.explicit:
|
||||
spack.store.STORE.db.mark(task.pkg.spec, "explicit", True)
|
||||
|
||||
def _cleanup_all_tasks(self) -> None:
|
||||
"""Cleanup all build tasks to include releasing their locks."""
|
||||
@@ -1557,17 +1544,6 @@ def _add_tasks(self, request: BuildRequest, all_deps):
|
||||
tty.warn(f"Installation request refused: {str(err)}")
|
||||
return
|
||||
|
||||
# Skip out early if the spec is not being installed locally (i.e., if
|
||||
# external or upstream).
|
||||
#
|
||||
# External and upstream packages need to get flagged as installed to
|
||||
# ensure proper status tracking for environment build.
|
||||
explicit = request.install_args.get("explicit", True)
|
||||
not_local = _handle_external_and_upstream(request.pkg, explicit)
|
||||
if not_local:
|
||||
self._flag_installed(request.pkg)
|
||||
return
|
||||
|
||||
install_compilers = spack.config.get("config:install_missing_compilers", False)
|
||||
|
||||
install_deps = request.install_args.get("install_deps")
|
||||
@@ -1683,10 +1659,6 @@ def _install_task(self, task: BuildTask, install_status: InstallStatus) -> None:
|
||||
if not pkg.unit_test_check():
|
||||
return
|
||||
|
||||
# Injecting information to know if this installation request is the root one
|
||||
# to determine in BuildProcessInstaller whether installation is explicit or not
|
||||
install_args["is_root"] = task.is_root
|
||||
|
||||
try:
|
||||
self._setup_install_dir(pkg)
|
||||
|
||||
@@ -1998,8 +1970,8 @@ def install(self) -> None:
|
||||
|
||||
self._init_queue()
|
||||
fail_fast_err = "Terminating after first install failure"
|
||||
single_explicit_spec = len(self.build_requests) == 1
|
||||
failed_explicits = []
|
||||
single_requested_spec = len(self.build_requests) == 1
|
||||
failed_build_requests = []
|
||||
|
||||
install_status = InstallStatus(len(self.build_pq))
|
||||
|
||||
@@ -2048,11 +2020,10 @@ def install(self) -> None:
|
||||
# Skip the installation if the spec is not being installed locally
|
||||
# (i.e., if external or upstream) BUT flag it as installed since
|
||||
# some package likely depends on it.
|
||||
if not task.explicit:
|
||||
if _handle_external_and_upstream(pkg, False):
|
||||
term_status.clear()
|
||||
self._flag_installed(pkg, task.dependents)
|
||||
continue
|
||||
if _handle_external_and_upstream(pkg, task.explicit):
|
||||
term_status.clear()
|
||||
self._flag_installed(pkg, task.dependents)
|
||||
continue
|
||||
|
||||
# Flag a failed spec. Do not need an (install) prefix lock since
|
||||
# assume using a separate (failed) prefix lock file.
|
||||
@@ -2197,14 +2168,11 @@ def install(self) -> None:
|
||||
if self.fail_fast:
|
||||
raise InstallError(f"{fail_fast_err}: {str(exc)}", pkg=pkg)
|
||||
|
||||
# Terminate at this point if the single explicit spec has
|
||||
# failed to install.
|
||||
if single_explicit_spec and task.explicit:
|
||||
raise
|
||||
|
||||
# Track explicit spec id and error to summarize when done
|
||||
if task.explicit:
|
||||
failed_explicits.append((pkg, pkg_id, str(exc)))
|
||||
# Terminate when a single build request has failed, or summarize errors later.
|
||||
if task.is_build_request:
|
||||
if single_requested_spec:
|
||||
raise
|
||||
failed_build_requests.append((pkg, pkg_id, str(exc)))
|
||||
|
||||
finally:
|
||||
# Remove the install prefix if anything went wrong during
|
||||
@@ -2227,16 +2195,16 @@ def install(self) -> None:
|
||||
if request.install_args.get("install_package") and request.pkg_id not in self.installed
|
||||
]
|
||||
|
||||
if failed_explicits or missing:
|
||||
for _, pkg_id, err in failed_explicits:
|
||||
if failed_build_requests or missing:
|
||||
for _, pkg_id, err in failed_build_requests:
|
||||
tty.error(f"{pkg_id}: {err}")
|
||||
|
||||
for _, pkg_id in missing:
|
||||
tty.error(f"{pkg_id}: Package was not installed")
|
||||
|
||||
if len(failed_explicits) > 0:
|
||||
pkg = failed_explicits[0][0]
|
||||
ids = [pkg_id for _, pkg_id, _ in failed_explicits]
|
||||
if len(failed_build_requests) > 0:
|
||||
pkg = failed_build_requests[0][0]
|
||||
ids = [pkg_id for _, pkg_id, _ in failed_build_requests]
|
||||
tty.debug(
|
||||
"Associating installation failure with first failed "
|
||||
f"explicit package ({ids[0]}) from {', '.join(ids)}"
|
||||
@@ -2295,7 +2263,7 @@ def __init__(self, pkg: "spack.package_base.PackageBase", install_args: dict):
|
||||
self.verbose = bool(install_args.get("verbose", False))
|
||||
|
||||
# whether installation was explicitly requested by the user
|
||||
self.explicit = install_args.get("is_root", False) and install_args.get("explicit", True)
|
||||
self.explicit = pkg.spec.dag_hash() in install_args.get("explicit", [])
|
||||
|
||||
# env before starting installation
|
||||
self.unmodified_env = install_args.get("unmodified_env", {})
|
||||
|
@@ -87,9 +87,8 @@ def from_url(url: str):
|
||||
"""Create an anonymous mirror by URL. This method validates the URL."""
|
||||
if not urllib.parse.urlparse(url).scheme in supported_url_schemes:
|
||||
raise ValueError(
|
||||
'"{}" is not a valid mirror URL. Scheme must be once of {}.'.format(
|
||||
url, ", ".join(supported_url_schemes)
|
||||
)
|
||||
f'"{url}" is not a valid mirror URL. '
|
||||
f"Scheme must be one of {supported_url_schemes}."
|
||||
)
|
||||
return Mirror(url)
|
||||
|
||||
@@ -734,7 +733,7 @@ def require_mirror_name(mirror_name):
|
||||
"""Find a mirror by name and raise if it does not exist"""
|
||||
mirror = spack.mirror.MirrorCollection().get(mirror_name)
|
||||
if not mirror:
|
||||
raise ValueError('no mirror named "{0}"'.format(mirror_name))
|
||||
raise ValueError(f'no mirror named "{mirror_name}"')
|
||||
return mirror
|
||||
|
||||
|
||||
|
@@ -1876,7 +1876,10 @@ def do_install(self, **kwargs):
|
||||
verbose (bool): Display verbose build output (by default,
|
||||
suppresses it)
|
||||
"""
|
||||
PackageInstaller([(self, kwargs)]).install()
|
||||
explicit = kwargs.get("explicit", True)
|
||||
if isinstance(explicit, bool):
|
||||
kwargs["explicit"] = {self.spec.dag_hash()} if explicit else set()
|
||||
PackageInstaller([self], kwargs).install()
|
||||
|
||||
# TODO (post-34236): Update tests and all packages that use this as a
|
||||
# TODO (post-34236): package method to the routine made available to
|
||||
|
@@ -52,7 +52,10 @@
|
||||
"target": {"type": "string"},
|
||||
"alias": {"anyOf": [{"type": "string"}, {"type": "null"}]},
|
||||
"modules": {
|
||||
"anyOf": [{"type": "string"}, {"type": "null"}, {"type": "array"}]
|
||||
"anyOf": [
|
||||
{"type": "null"},
|
||||
{"type": "array", "items": {"type": "string"}},
|
||||
]
|
||||
},
|
||||
"implicit_rpaths": {
|
||||
"anyOf": [
|
||||
|
@@ -2045,6 +2045,18 @@ def to_node_dict(self, hash=ht.dag_hash):
|
||||
if params:
|
||||
d["parameters"] = params
|
||||
|
||||
if params and not self.concrete:
|
||||
flag_names = [
|
||||
name
|
||||
for name, flags in self.compiler_flags.items()
|
||||
if any(x.propagate for x in flags)
|
||||
]
|
||||
d["propagate"] = sorted(
|
||||
itertools.chain(
|
||||
[v.name for v in self.variants.values() if v.propagate], flag_names
|
||||
)
|
||||
)
|
||||
|
||||
if self.external:
|
||||
d["external"] = syaml.syaml_dict(
|
||||
[
|
||||
@@ -2217,16 +2229,10 @@ def node_dict_with_hashes(self, hash=ht.dag_hash):
|
||||
spec is concrete, the full hash is added as well. If 'build' is in
|
||||
the hash_type, the build hash is also added."""
|
||||
node = self.to_node_dict(hash)
|
||||
# All specs have at least a DAG hash
|
||||
node[ht.dag_hash.name] = self.dag_hash()
|
||||
|
||||
# dag_hash is lazily computed -- but if we write a spec out, we want it
|
||||
# to be included. This is effectively the last chance we get to compute
|
||||
# it accurately.
|
||||
if self.concrete:
|
||||
# all specs have at least a DAG hash
|
||||
node[ht.dag_hash.name] = self.dag_hash()
|
||||
|
||||
else:
|
||||
if not self.concrete:
|
||||
node["concrete"] = False
|
||||
|
||||
# we can also give them other hash types if we want
|
||||
@@ -4999,13 +5005,17 @@ def from_node_dict(cls, node):
|
||||
else:
|
||||
spec.compiler = None
|
||||
|
||||
propagated_names = node.get("propagate", [])
|
||||
for name, values in node.get("parameters", {}).items():
|
||||
propagate = name in propagated_names
|
||||
if name in _valid_compiler_flags:
|
||||
spec.compiler_flags[name] = []
|
||||
for val in values:
|
||||
spec.compiler_flags.add_flag(name, val, False)
|
||||
spec.compiler_flags.add_flag(name, val, propagate)
|
||||
else:
|
||||
spec.variants[name] = vt.MultiValuedVariant.from_node_dict(name, values)
|
||||
spec.variants[name] = vt.MultiValuedVariant.from_node_dict(
|
||||
name, values, propagate=propagate
|
||||
)
|
||||
|
||||
spec.external_path = None
|
||||
spec.external_modules = None
|
||||
|
@@ -13,7 +13,7 @@
|
||||
import stat
|
||||
import sys
|
||||
import tempfile
|
||||
from typing import Callable, Dict, Iterable, Optional, Set
|
||||
from typing import Callable, Dict, Iterable, List, Optional, Set
|
||||
|
||||
import llnl.string
|
||||
import llnl.util.lang
|
||||
@@ -40,6 +40,7 @@
|
||||
import spack.resource
|
||||
import spack.spec
|
||||
import spack.stage
|
||||
import spack.util.crypto
|
||||
import spack.util.lock
|
||||
import spack.util.path as sup
|
||||
import spack.util.pattern as pattern
|
||||
@@ -534,32 +535,29 @@ def generate_fetchers():
|
||||
for fetcher in dynamic_fetchers:
|
||||
yield fetcher
|
||||
|
||||
def print_errors(errors):
|
||||
for msg in errors:
|
||||
tty.debug(msg)
|
||||
|
||||
errors = []
|
||||
errors: List[str] = []
|
||||
for fetcher in generate_fetchers():
|
||||
try:
|
||||
fetcher.stage = self
|
||||
self.fetcher = fetcher
|
||||
self.fetcher.fetch()
|
||||
break
|
||||
except spack.fetch_strategy.NoCacheError:
|
||||
except fs.NoCacheError:
|
||||
# Don't bother reporting when something is not cached.
|
||||
continue
|
||||
except fs.FailedDownloadError as f:
|
||||
errors.extend(f"{fetcher}: {e.__class__.__name__}: {e}" for e in f.exceptions)
|
||||
continue
|
||||
except spack.error.SpackError as e:
|
||||
errors.append("Fetching from {0} failed.".format(fetcher))
|
||||
tty.debug(e)
|
||||
errors.append(f"{fetcher}: {e.__class__.__name__}: {e}")
|
||||
continue
|
||||
else:
|
||||
print_errors(errors)
|
||||
|
||||
self.fetcher = self.default_fetcher
|
||||
default_msg = "All fetchers failed for {0}".format(self.name)
|
||||
raise spack.error.FetchError(err_msg or default_msg, None)
|
||||
|
||||
print_errors(errors)
|
||||
if err_msg:
|
||||
raise spack.error.FetchError(err_msg)
|
||||
raise spack.error.FetchError(
|
||||
f"All fetchers failed for {self.name}", "\n".join(f" {e}" for e in errors)
|
||||
)
|
||||
|
||||
def steal_source(self, dest):
|
||||
"""Copy the source_path directory in its entirety to directory dest
|
||||
@@ -1188,7 +1186,7 @@ def _fetch_and_checksum(url, options, keep_stage, action_fn=None):
|
||||
# Checksum the archive and add it to the list
|
||||
checksum = spack.util.crypto.checksum(hashlib.sha256, stage.archive_file)
|
||||
return checksum, None
|
||||
except FailedDownloadError:
|
||||
except fs.FailedDownloadError:
|
||||
return None, f"[WORKER] Failed to fetch {url}"
|
||||
except Exception as e:
|
||||
return None, f"[WORKER] Something failed on {url}, skipping. ({e})"
|
||||
@@ -1208,7 +1206,3 @@ class RestageError(StageError):
|
||||
|
||||
class VersionFetchError(StageError):
|
||||
"""Raised when we can't determine a URL to fetch a package."""
|
||||
|
||||
|
||||
# Keep this in namespace for convenience
|
||||
FailedDownloadError = fs.FailedDownloadError
|
||||
|
@@ -12,21 +12,21 @@
|
||||
|
||||
def test_build_task_errors(install_mockery):
|
||||
with pytest.raises(ValueError, match="must be a package"):
|
||||
inst.BuildTask("abc", None, False, 0, 0, 0, [])
|
||||
inst.BuildTask("abc", None, False, 0, 0, 0, set())
|
||||
|
||||
spec = spack.spec.Spec("trivial-install-test-package")
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
|
||||
with pytest.raises(ValueError, match="must have a concrete spec"):
|
||||
inst.BuildTask(pkg_cls(spec), None, False, 0, 0, 0, [])
|
||||
inst.BuildTask(pkg_cls(spec), None, False, 0, 0, 0, set())
|
||||
|
||||
spec.concretize()
|
||||
assert spec.concrete
|
||||
with pytest.raises(ValueError, match="must have a build request"):
|
||||
inst.BuildTask(spec.package, None, False, 0, 0, 0, [])
|
||||
inst.BuildTask(spec.package, None, False, 0, 0, 0, set())
|
||||
|
||||
request = inst.BuildRequest(spec.package, {})
|
||||
with pytest.raises(inst.InstallError, match="Cannot create a build task"):
|
||||
inst.BuildTask(spec.package, request, False, 0, 0, inst.STATUS_REMOVED, [])
|
||||
inst.BuildTask(spec.package, request, False, 0, 0, inst.STATUS_REMOVED, set())
|
||||
|
||||
|
||||
def test_build_task_basics(install_mockery):
|
||||
@@ -36,8 +36,8 @@ def test_build_task_basics(install_mockery):
|
||||
|
||||
# Ensure key properties match expectations
|
||||
request = inst.BuildRequest(spec.package, {})
|
||||
task = inst.BuildTask(spec.package, request, False, 0, 0, inst.STATUS_ADDED, [])
|
||||
assert task.explicit # package was "explicitly" requested
|
||||
task = inst.BuildTask(spec.package, request, False, 0, 0, inst.STATUS_ADDED, set())
|
||||
assert not task.explicit
|
||||
assert task.priority == len(task.uninstalled_deps)
|
||||
assert task.key == (task.priority, task.sequence)
|
||||
|
||||
@@ -58,7 +58,7 @@ def test_build_task_strings(install_mockery):
|
||||
|
||||
# Ensure key properties match expectations
|
||||
request = inst.BuildRequest(spec.package, {})
|
||||
task = inst.BuildTask(spec.package, request, False, 0, 0, inst.STATUS_ADDED, [])
|
||||
task = inst.BuildTask(spec.package, request, False, 0, 0, inst.STATUS_ADDED, set())
|
||||
|
||||
# Cover __repr__
|
||||
irep = task.__repr__()
|
||||
|
@@ -3987,7 +3987,7 @@ def test_environment_depfile_makefile(depfile_flags, expected_installs, tmpdir,
|
||||
)
|
||||
|
||||
# Do make dry run.
|
||||
out = make("-n", "-f", makefile, output=str)
|
||||
out = make("-n", "-f", makefile, "SPACK=spack", output=str)
|
||||
|
||||
specs_that_make_would_install = _parse_dry_run_package_installs(out)
|
||||
|
||||
@@ -4025,7 +4025,7 @@ def test_depfile_works_with_gitversions(tmpdir, mock_packages, monkeypatch):
|
||||
env("depfile", "-o", makefile, "--make-disable-jobserver", "--make-prefix=prefix")
|
||||
|
||||
# Do a dry run on the generated depfile
|
||||
out = make("-n", "-f", makefile, output=str)
|
||||
out = make("-n", "-f", makefile, "SPACK=spack", output=str)
|
||||
|
||||
# Check that all specs are there (without duplicates)
|
||||
specs_that_make_would_install = _parse_dry_run_package_installs(out)
|
||||
@@ -4087,7 +4087,12 @@ def test_depfile_phony_convenience_targets(
|
||||
|
||||
# Phony install/* target should install picked package and all its deps
|
||||
specs_that_make_would_install = _parse_dry_run_package_installs(
|
||||
make("-n", picked_spec.format("install/{name}-{version}-{hash}"), output=str)
|
||||
make(
|
||||
"-n",
|
||||
picked_spec.format("install/{name}-{version}-{hash}"),
|
||||
"SPACK=spack",
|
||||
output=str,
|
||||
)
|
||||
)
|
||||
|
||||
assert set(specs_that_make_would_install) == set(expected_installs)
|
||||
@@ -4095,7 +4100,12 @@ def test_depfile_phony_convenience_targets(
|
||||
|
||||
# Phony install-deps/* target shouldn't install picked package
|
||||
specs_that_make_would_install = _parse_dry_run_package_installs(
|
||||
make("-n", picked_spec.format("install-deps/{name}-{version}-{hash}"), output=str)
|
||||
make(
|
||||
"-n",
|
||||
picked_spec.format("install-deps/{name}-{version}-{hash}"),
|
||||
"SPACK=spack",
|
||||
output=str,
|
||||
)
|
||||
)
|
||||
|
||||
assert set(specs_that_make_would_install) == set(expected_installs) - {picked_package}
|
||||
@@ -4155,7 +4165,7 @@ def test_spack_package_ids_variable(tmpdir, mock_packages):
|
||||
make = Executable("make")
|
||||
|
||||
# Do dry run.
|
||||
out = make("-n", "-C", str(tmpdir), output=str)
|
||||
out = make("-n", "-C", str(tmpdir), "SPACK=spack", output=str)
|
||||
|
||||
# post-install: <hash> should've been executed
|
||||
with ev.read("test") as test:
|
||||
|
@@ -69,10 +69,10 @@ def test_query_arguments():
|
||||
|
||||
q_args = query_arguments(args)
|
||||
assert "installed" in q_args
|
||||
assert "known" in q_args
|
||||
assert "predicate_fn" in q_args
|
||||
assert "explicit" in q_args
|
||||
assert q_args["installed"] == ["installed"]
|
||||
assert q_args["known"] is any
|
||||
assert q_args["predicate_fn"] is None
|
||||
assert q_args["explicit"] is any
|
||||
assert "start_date" in q_args
|
||||
assert "end_date" not in q_args
|
||||
|
@@ -18,6 +18,7 @@
|
||||
import spack.config
|
||||
import spack.directory_layout
|
||||
import spack.environment as ev
|
||||
import spack.fetch_strategy
|
||||
import spack.main
|
||||
import spack.package_base
|
||||
import spack.paths
|
||||
|
@@ -56,6 +56,7 @@
|
||||
import spack.util.gpg
|
||||
import spack.util.spack_yaml as syaml
|
||||
import spack.util.url as url_util
|
||||
import spack.util.web
|
||||
import spack.version
|
||||
from spack.fetch_strategy import URLFetchStrategy
|
||||
from spack.util.pattern import Bunch
|
||||
@@ -1817,12 +1818,7 @@ def __call__(self, *args, **kwargs):
|
||||
tty.msg("curl: (22) The requested URL returned error: 404")
|
||||
self.returncode = 22
|
||||
|
||||
def mock_curl(*args):
|
||||
return MockCurl()
|
||||
|
||||
monkeypatch.setattr(spack.util.web, "_curl", mock_curl)
|
||||
|
||||
yield
|
||||
monkeypatch.setattr(spack.util.web, "require_curl", MockCurl)
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
|
@@ -1141,3 +1141,20 @@ def test_database_errors_with_just_a_version_key(tmp_path):
|
||||
|
||||
with pytest.raises(spack.database.InvalidDatabaseVersionError):
|
||||
spack.database.Database(root).query_local()
|
||||
|
||||
|
||||
@pytest.mark.regression("47101")
|
||||
def test_query_with_predicate_fn(database):
|
||||
all_specs = database.query()
|
||||
|
||||
# Name starts with a string
|
||||
specs = database.query(predicate_fn=lambda x: x.spec.name.startswith("mpil"))
|
||||
assert specs and all(x.name.startswith("mpil") for x in specs)
|
||||
assert len(specs) < len(all_specs)
|
||||
|
||||
# Recipe is currently known/unknown
|
||||
specs = database.query(predicate_fn=lambda x: spack.repo.PATH.exists(x.spec.name))
|
||||
assert specs == all_specs
|
||||
|
||||
specs = database.query(predicate_fn=lambda x: not spack.repo.PATH.exists(x.spec.name))
|
||||
assert not specs
|
||||
|
@@ -7,6 +7,7 @@
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
from typing import List, Optional, Union
|
||||
|
||||
import py
|
||||
import pytest
|
||||
@@ -44,12 +45,10 @@ def _mock_repo(root, namespace):
|
||||
repodir.ensure(spack.repo.packages_dir_name, dir=True)
|
||||
yaml = repodir.join("repo.yaml")
|
||||
yaml.write(
|
||||
"""
|
||||
f"""
|
||||
repo:
|
||||
namespace: {0}
|
||||
""".format(
|
||||
namespace
|
||||
)
|
||||
namespace: {namespace}
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
@@ -73,53 +72,21 @@ def _true(*args, **kwargs):
|
||||
return True
|
||||
|
||||
|
||||
def create_build_task(pkg, install_args={}):
|
||||
"""
|
||||
Create a built task for the given (concretized) package
|
||||
|
||||
Args:
|
||||
pkg (spack.package_base.PackageBase): concretized package associated with
|
||||
the task
|
||||
install_args (dict): dictionary of kwargs (or install args)
|
||||
|
||||
Return:
|
||||
(BuildTask) A basic package build task
|
||||
"""
|
||||
request = inst.BuildRequest(pkg, install_args)
|
||||
return inst.BuildTask(pkg, request, False, 0, 0, inst.STATUS_ADDED, [])
|
||||
def create_build_task(
|
||||
pkg: spack.package_base.PackageBase, install_args: Optional[dict] = None
|
||||
) -> inst.BuildTask:
|
||||
request = inst.BuildRequest(pkg, {} if install_args is None else install_args)
|
||||
return inst.BuildTask(pkg, request, False, 0, 0, inst.STATUS_ADDED, set())
|
||||
|
||||
|
||||
def create_installer(installer_args):
|
||||
"""
|
||||
Create an installer using the concretized spec for each arg
|
||||
|
||||
Args:
|
||||
installer_args (list): the list of (spec name, kwargs) tuples
|
||||
|
||||
Return:
|
||||
spack.installer.PackageInstaller: the associated package installer
|
||||
"""
|
||||
const_arg = [(spec.package, kwargs) for spec, kwargs in installer_args]
|
||||
return inst.PackageInstaller(const_arg)
|
||||
|
||||
|
||||
def installer_args(spec_names, kwargs={}):
|
||||
"""Return a the installer argument with each spec paired with kwargs
|
||||
|
||||
Args:
|
||||
spec_names (list): list of spec names
|
||||
kwargs (dict or None): install arguments to apply to all of the specs
|
||||
|
||||
Returns:
|
||||
list: list of (spec, kwargs), the installer constructor argument
|
||||
"""
|
||||
arg = []
|
||||
for name in spec_names:
|
||||
spec = spack.spec.Spec(name)
|
||||
spec.concretize()
|
||||
assert spec.concrete
|
||||
arg.append((spec, kwargs))
|
||||
return arg
|
||||
def create_installer(
|
||||
specs: Union[List[str], List[spack.spec.Spec]], install_args: Optional[dict] = None
|
||||
) -> inst.PackageInstaller:
|
||||
"""Create an installer instance for a list of specs or package names that will be
|
||||
concretized."""
|
||||
_specs = [spack.spec.Spec(s).concretized() if isinstance(s, str) else s for s in specs]
|
||||
_install_args = {} if install_args is None else install_args
|
||||
return inst.PackageInstaller([spec.package for spec in _specs], _install_args)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
@@ -240,8 +207,7 @@ def test_try_install_from_binary_cache(install_mockery, mock_packages, monkeypat
|
||||
|
||||
|
||||
def test_installer_repr(install_mockery):
|
||||
const_arg = installer_args(["trivial-install-test-package"], {})
|
||||
installer = create_installer(const_arg)
|
||||
installer = create_installer(["trivial-install-test-package"])
|
||||
|
||||
irep = installer.__repr__()
|
||||
assert irep.startswith(installer.__class__.__name__)
|
||||
@@ -250,8 +216,7 @@ def test_installer_repr(install_mockery):
|
||||
|
||||
|
||||
def test_installer_str(install_mockery):
|
||||
const_arg = installer_args(["trivial-install-test-package"], {})
|
||||
installer = create_installer(const_arg)
|
||||
installer = create_installer(["trivial-install-test-package"])
|
||||
|
||||
istr = str(installer)
|
||||
assert "#tasks=0" in istr
|
||||
@@ -297,8 +262,7 @@ def _mock_installed(self):
|
||||
builder.add_package("pkg-f")
|
||||
|
||||
with spack.repo.use_repositories(builder.root):
|
||||
const_arg = installer_args(["pkg-a"], {})
|
||||
installer = create_installer(const_arg)
|
||||
installer = create_installer(["pkg-a"])
|
||||
|
||||
installer._init_queue()
|
||||
|
||||
@@ -332,8 +296,7 @@ def test_check_last_phase_error(install_mockery):
|
||||
|
||||
|
||||
def test_installer_ensure_ready_errors(install_mockery, monkeypatch):
|
||||
const_arg = installer_args(["trivial-install-test-package"], {})
|
||||
installer = create_installer(const_arg)
|
||||
installer = create_installer(["trivial-install-test-package"])
|
||||
spec = installer.build_requests[0].pkg.spec
|
||||
|
||||
fmt = r"cannot be installed locally.*{0}"
|
||||
@@ -367,8 +330,7 @@ def test_ensure_locked_err(install_mockery, monkeypatch, tmpdir, capsys):
|
||||
def _raise(lock, timeout=None):
|
||||
raise RuntimeError(mock_err_msg)
|
||||
|
||||
const_arg = installer_args(["trivial-install-test-package"], {})
|
||||
installer = create_installer(const_arg)
|
||||
installer = create_installer(["trivial-install-test-package"])
|
||||
spec = installer.build_requests[0].pkg.spec
|
||||
|
||||
monkeypatch.setattr(ulk.Lock, "acquire_read", _raise)
|
||||
@@ -383,8 +345,7 @@ def _raise(lock, timeout=None):
|
||||
|
||||
def test_ensure_locked_have(install_mockery, tmpdir, capsys):
|
||||
"""Test _ensure_locked when already have lock."""
|
||||
const_arg = installer_args(["trivial-install-test-package"], {})
|
||||
installer = create_installer(const_arg)
|
||||
installer = create_installer(["trivial-install-test-package"], {})
|
||||
spec = installer.build_requests[0].pkg.spec
|
||||
pkg_id = inst.package_id(spec)
|
||||
|
||||
@@ -419,8 +380,7 @@ def test_ensure_locked_have(install_mockery, tmpdir, capsys):
|
||||
|
||||
@pytest.mark.parametrize("lock_type,reads,writes", [("read", 1, 0), ("write", 0, 1)])
|
||||
def test_ensure_locked_new_lock(install_mockery, tmpdir, lock_type, reads, writes):
|
||||
const_arg = installer_args(["pkg-a"], {})
|
||||
installer = create_installer(const_arg)
|
||||
installer = create_installer(["pkg-a"], {})
|
||||
spec = installer.build_requests[0].pkg.spec
|
||||
with tmpdir.as_cwd():
|
||||
ltype, lock = installer._ensure_locked(lock_type, spec.package)
|
||||
@@ -438,8 +398,7 @@ def _pl(db, spec, timeout):
|
||||
lock.default_timeout = 1e-9 if timeout is None else None
|
||||
return lock
|
||||
|
||||
const_arg = installer_args(["pkg-a"], {})
|
||||
installer = create_installer(const_arg)
|
||||
installer = create_installer(["pkg-a"])
|
||||
spec = installer.build_requests[0].pkg.spec
|
||||
|
||||
monkeypatch.setattr(spack.database.SpecLocker, "lock", _pl)
|
||||
@@ -509,7 +468,7 @@ def _conc_spec(compiler):
|
||||
|
||||
def test_update_tasks_for_compiler_packages_as_compiler(mock_packages, config, monkeypatch):
|
||||
spec = spack.spec.Spec("trivial-install-test-package").concretized()
|
||||
installer = inst.PackageInstaller([(spec.package, {})])
|
||||
installer = inst.PackageInstaller([spec.package], {})
|
||||
|
||||
# Add a task to the queue
|
||||
installer._add_init_task(spec.package, installer.build_requests[0], False, {})
|
||||
@@ -693,8 +652,7 @@ def test_check_deps_status_install_failure(install_mockery):
|
||||
for dep in s.traverse(root=False):
|
||||
spack.store.STORE.failure_tracker.mark(dep)
|
||||
|
||||
const_arg = installer_args(["pkg-a"], {})
|
||||
installer = create_installer(const_arg)
|
||||
installer = create_installer(["pkg-a"])
|
||||
request = installer.build_requests[0]
|
||||
|
||||
with pytest.raises(inst.InstallError, match="install failure"):
|
||||
@@ -702,8 +660,7 @@ def test_check_deps_status_install_failure(install_mockery):
|
||||
|
||||
|
||||
def test_check_deps_status_write_locked(install_mockery, monkeypatch):
|
||||
const_arg = installer_args(["pkg-a"], {})
|
||||
installer = create_installer(const_arg)
|
||||
installer = create_installer(["pkg-a"])
|
||||
request = installer.build_requests[0]
|
||||
|
||||
# Ensure the lock is not acquired
|
||||
@@ -714,8 +671,7 @@ def test_check_deps_status_write_locked(install_mockery, monkeypatch):
|
||||
|
||||
|
||||
def test_check_deps_status_external(install_mockery, monkeypatch):
|
||||
const_arg = installer_args(["pkg-a"], {})
|
||||
installer = create_installer(const_arg)
|
||||
installer = create_installer(["pkg-a"])
|
||||
request = installer.build_requests[0]
|
||||
|
||||
# Mock the dependencies as external so assumed to be installed
|
||||
@@ -727,8 +683,7 @@ def test_check_deps_status_external(install_mockery, monkeypatch):
|
||||
|
||||
|
||||
def test_check_deps_status_upstream(install_mockery, monkeypatch):
|
||||
const_arg = installer_args(["pkg-a"], {})
|
||||
installer = create_installer(const_arg)
|
||||
installer = create_installer(["pkg-a"])
|
||||
request = installer.build_requests[0]
|
||||
|
||||
# Mock the known dependencies as installed upstream
|
||||
@@ -746,8 +701,7 @@ def _pkgs(compiler, architecture, pkgs):
|
||||
spec = spack.spec.Spec("mpi").concretized()
|
||||
return [(spec.package, True)]
|
||||
|
||||
const_arg = installer_args(["trivial-install-test-package"], {})
|
||||
installer = create_installer(const_arg)
|
||||
installer = create_installer(["trivial-install-test-package"], {})
|
||||
request = installer.build_requests[0]
|
||||
all_deps = defaultdict(set)
|
||||
|
||||
@@ -762,8 +716,7 @@ def _pkgs(compiler, architecture, pkgs):
|
||||
|
||||
def test_prepare_for_install_on_installed(install_mockery, monkeypatch):
|
||||
"""Test of _prepare_for_install's early return for installed task path."""
|
||||
const_arg = installer_args(["dependent-install"], {})
|
||||
installer = create_installer(const_arg)
|
||||
installer = create_installer(["dependent-install"], {})
|
||||
request = installer.build_requests[0]
|
||||
|
||||
install_args = {"keep_prefix": True, "keep_stage": True, "restage": False}
|
||||
@@ -778,8 +731,7 @@ def test_installer_init_requests(install_mockery):
|
||||
"""Test of installer initial requests."""
|
||||
spec_name = "dependent-install"
|
||||
with spack.config.override("config:install_missing_compilers", True):
|
||||
const_arg = installer_args([spec_name], {})
|
||||
installer = create_installer(const_arg)
|
||||
installer = create_installer([spec_name], {})
|
||||
|
||||
# There is only one explicit request in this case
|
||||
assert len(installer.build_requests) == 1
|
||||
@@ -788,8 +740,7 @@ def test_installer_init_requests(install_mockery):
|
||||
|
||||
|
||||
def test_install_task_use_cache(install_mockery, monkeypatch):
|
||||
const_arg = installer_args(["trivial-install-test-package"], {})
|
||||
installer = create_installer(const_arg)
|
||||
installer = create_installer(["trivial-install-test-package"], {})
|
||||
request = installer.build_requests[0]
|
||||
task = create_build_task(request.pkg)
|
||||
|
||||
@@ -804,8 +755,7 @@ def test_install_task_add_compiler(install_mockery, monkeypatch, capfd):
|
||||
def _add(_compilers):
|
||||
tty.msg(config_msg)
|
||||
|
||||
const_arg = installer_args(["pkg-a"], {})
|
||||
installer = create_installer(const_arg)
|
||||
installer = create_installer(["pkg-a"])
|
||||
task = create_build_task(installer.build_requests[0].pkg)
|
||||
task.compiler = True
|
||||
|
||||
@@ -824,8 +774,7 @@ def _add(_compilers):
|
||||
|
||||
def test_release_lock_write_n_exception(install_mockery, tmpdir, capsys):
|
||||
"""Test _release_lock for supposed write lock with exception."""
|
||||
const_arg = installer_args(["trivial-install-test-package"], {})
|
||||
installer = create_installer(const_arg)
|
||||
installer = create_installer(["trivial-install-test-package"], {})
|
||||
|
||||
pkg_id = "test"
|
||||
with tmpdir.as_cwd():
|
||||
@@ -842,8 +791,7 @@ def test_release_lock_write_n_exception(install_mockery, tmpdir, capsys):
|
||||
@pytest.mark.parametrize("installed", [True, False])
|
||||
def test_push_task_skip_processed(install_mockery, installed):
|
||||
"""Test to ensure skip re-queueing a processed package."""
|
||||
const_arg = installer_args(["pkg-a"], {})
|
||||
installer = create_installer(const_arg)
|
||||
installer = create_installer(["pkg-a"])
|
||||
assert len(list(installer.build_tasks)) == 0
|
||||
|
||||
# Mark the package as installed OR failed
|
||||
@@ -860,8 +808,7 @@ def test_push_task_skip_processed(install_mockery, installed):
|
||||
|
||||
def test_requeue_task(install_mockery, capfd):
|
||||
"""Test to ensure cover _requeue_task."""
|
||||
const_arg = installer_args(["pkg-a"], {})
|
||||
installer = create_installer(const_arg)
|
||||
installer = create_installer(["pkg-a"])
|
||||
task = create_build_task(installer.build_requests[0].pkg)
|
||||
|
||||
# temporarily set tty debug messages on so we can test output
|
||||
@@ -891,8 +838,7 @@ def _mktask(pkg):
|
||||
def _rmtask(installer, pkg_id):
|
||||
raise RuntimeError("Raise an exception to test except path")
|
||||
|
||||
const_arg = installer_args(["pkg-a"], {})
|
||||
installer = create_installer(const_arg)
|
||||
installer = create_installer(["pkg-a"], {})
|
||||
spec = installer.build_requests[0].pkg.spec
|
||||
|
||||
# Cover task removal happy path
|
||||
@@ -921,8 +867,7 @@ def _chgrp(path, group, follow_symlinks=True):
|
||||
monkeypatch.setattr(prefs, "get_package_group", _get_group)
|
||||
monkeypatch.setattr(fs, "chgrp", _chgrp)
|
||||
|
||||
const_arg = installer_args(["trivial-install-test-package"], {})
|
||||
installer = create_installer(const_arg)
|
||||
installer = create_installer(["trivial-install-test-package"], {})
|
||||
spec = installer.build_requests[0].pkg.spec
|
||||
|
||||
fs.touchp(spec.prefix)
|
||||
@@ -948,8 +893,7 @@ def test_cleanup_failed_err(install_mockery, tmpdir, monkeypatch, capsys):
|
||||
def _raise_except(lock):
|
||||
raise RuntimeError(msg)
|
||||
|
||||
const_arg = installer_args(["trivial-install-test-package"], {})
|
||||
installer = create_installer(const_arg)
|
||||
installer = create_installer(["trivial-install-test-package"], {})
|
||||
|
||||
monkeypatch.setattr(lk.Lock, "release_write", _raise_except)
|
||||
pkg_id = "test"
|
||||
@@ -965,8 +909,7 @@ def _raise_except(lock):
|
||||
|
||||
def test_update_failed_no_dependent_task(install_mockery):
|
||||
"""Test _update_failed with missing dependent build tasks."""
|
||||
const_arg = installer_args(["dependent-install"], {})
|
||||
installer = create_installer(const_arg)
|
||||
installer = create_installer(["dependent-install"], {})
|
||||
spec = installer.build_requests[0].pkg.spec
|
||||
|
||||
for dep in spec.traverse(root=False):
|
||||
@@ -977,8 +920,7 @@ def test_update_failed_no_dependent_task(install_mockery):
|
||||
|
||||
def test_install_uninstalled_deps(install_mockery, monkeypatch, capsys):
|
||||
"""Test install with uninstalled dependencies."""
|
||||
const_arg = installer_args(["dependent-install"], {})
|
||||
installer = create_installer(const_arg)
|
||||
installer = create_installer(["dependent-install"], {})
|
||||
|
||||
# Skip the actual installation and any status updates
|
||||
monkeypatch.setattr(inst.PackageInstaller, "_install_task", _noop)
|
||||
@@ -995,8 +937,7 @@ def test_install_uninstalled_deps(install_mockery, monkeypatch, capsys):
|
||||
|
||||
def test_install_failed(install_mockery, monkeypatch, capsys):
|
||||
"""Test install with failed install."""
|
||||
const_arg = installer_args(["pkg-b"], {})
|
||||
installer = create_installer(const_arg)
|
||||
installer = create_installer(["pkg-b"], {})
|
||||
|
||||
# Make sure the package is identified as failed
|
||||
monkeypatch.setattr(spack.database.FailureTracker, "has_failed", _true)
|
||||
@@ -1011,8 +952,7 @@ def test_install_failed(install_mockery, monkeypatch, capsys):
|
||||
|
||||
def test_install_failed_not_fast(install_mockery, monkeypatch, capsys):
|
||||
"""Test install with failed install."""
|
||||
const_arg = installer_args(["pkg-a"], {"fail_fast": False})
|
||||
installer = create_installer(const_arg)
|
||||
installer = create_installer(["pkg-a"], {"fail_fast": False})
|
||||
|
||||
# Make sure the package is identified as failed
|
||||
monkeypatch.setattr(spack.database.FailureTracker, "has_failed", _true)
|
||||
@@ -1036,8 +976,7 @@ def _interrupt(installer, task, install_status, **kwargs):
|
||||
else:
|
||||
installer.installed.add(task.pkg.name)
|
||||
|
||||
const_arg = installer_args([spec_name], {})
|
||||
installer = create_installer(const_arg)
|
||||
installer = create_installer([spec_name], {})
|
||||
|
||||
# Raise a KeyboardInterrupt error to trigger early termination
|
||||
monkeypatch.setattr(inst.PackageInstaller, "_install_task", _interrupt)
|
||||
@@ -1063,8 +1002,7 @@ def _install(installer, task, install_status, **kwargs):
|
||||
else:
|
||||
installer.installed.add(task.pkg.name)
|
||||
|
||||
const_arg = installer_args([spec_name], {})
|
||||
installer = create_installer(const_arg)
|
||||
installer = create_installer([spec_name], {})
|
||||
|
||||
# Raise a KeyboardInterrupt error to trigger early termination
|
||||
monkeypatch.setattr(inst.PackageInstaller, "_install_task", _install)
|
||||
@@ -1090,8 +1028,7 @@ def _install(installer, task, install_status, **kwargs):
|
||||
else:
|
||||
installer.installed.add(task.pkg.name)
|
||||
|
||||
const_arg = installer_args([spec_name, "pkg-a"], {})
|
||||
installer = create_installer(const_arg)
|
||||
installer = create_installer([spec_name, "pkg-a"], {})
|
||||
|
||||
# Raise a KeyboardInterrupt error to trigger early termination
|
||||
monkeypatch.setattr(inst.PackageInstaller, "_install_task", _install)
|
||||
@@ -1105,25 +1042,21 @@ def _install(installer, task, install_status, **kwargs):
|
||||
|
||||
def test_install_fail_fast_on_detect(install_mockery, monkeypatch, capsys):
|
||||
"""Test fail_fast install when an install failure is detected."""
|
||||
const_arg = installer_args(["pkg-b"], {"fail_fast": False})
|
||||
const_arg.extend(installer_args(["pkg-c"], {"fail_fast": True}))
|
||||
installer = create_installer(const_arg)
|
||||
pkg_ids = [inst.package_id(spec) for spec, _ in const_arg]
|
||||
b, c = spack.spec.Spec("pkg-b").concretized(), spack.spec.Spec("pkg-c").concretized()
|
||||
b_id, c_id = inst.package_id(b), inst.package_id(c)
|
||||
|
||||
installer = create_installer([b, c], {"fail_fast": True})
|
||||
|
||||
# Make sure all packages are identified as failed
|
||||
#
|
||||
# This will prevent b from installing, which will cause the build of a
|
||||
# to be skipped.
|
||||
# This will prevent b from installing, which will cause the build of c to be skipped.
|
||||
monkeypatch.setattr(spack.database.FailureTracker, "has_failed", _true)
|
||||
|
||||
with pytest.raises(inst.InstallError, match="after first install failure"):
|
||||
installer.install()
|
||||
|
||||
assert pkg_ids[0] in installer.failed, "Expected b to be marked as failed"
|
||||
assert pkg_ids[1] not in installer.failed, "Expected no attempt to install c"
|
||||
|
||||
out = capsys.readouterr()[1]
|
||||
assert "{0} failed to install".format(pkg_ids[0]) in out
|
||||
assert b_id in installer.failed, "Expected b to be marked as failed"
|
||||
assert c_id not in installer.failed, "Expected no attempt to install c"
|
||||
assert f"{b_id} failed to install" in capsys.readouterr().err
|
||||
|
||||
|
||||
def _test_install_fail_fast_on_except_patch(installer, **kwargs):
|
||||
@@ -1136,8 +1069,7 @@ def _test_install_fail_fast_on_except_patch(installer, **kwargs):
|
||||
@pytest.mark.disable_clean_stage_check
|
||||
def test_install_fail_fast_on_except(install_mockery, monkeypatch, capsys):
|
||||
"""Test fail_fast install when an install failure results from an error."""
|
||||
const_arg = installer_args(["pkg-a"], {"fail_fast": True})
|
||||
installer = create_installer(const_arg)
|
||||
installer = create_installer(["pkg-a"], {"fail_fast": True})
|
||||
|
||||
# Raise a non-KeyboardInterrupt exception to trigger fast failure.
|
||||
#
|
||||
@@ -1160,8 +1092,7 @@ def test_install_lock_failures(install_mockery, monkeypatch, capfd):
|
||||
def _requeued(installer, task, install_status):
|
||||
tty.msg("requeued {0}".format(task.pkg.spec.name))
|
||||
|
||||
const_arg = installer_args(["pkg-b"], {})
|
||||
installer = create_installer(const_arg)
|
||||
installer = create_installer(["pkg-b"], {})
|
||||
|
||||
# Ensure never acquire a lock
|
||||
monkeypatch.setattr(inst.PackageInstaller, "_ensure_locked", _not_locked)
|
||||
@@ -1180,20 +1111,19 @@ def _requeued(installer, task, install_status):
|
||||
|
||||
def test_install_lock_installed_requeue(install_mockery, monkeypatch, capfd):
|
||||
"""Cover basic install handling for installed package."""
|
||||
const_arg = installer_args(["pkg-b"], {})
|
||||
b, _ = const_arg[0]
|
||||
installer = create_installer(const_arg)
|
||||
b = spack.spec.Spec("pkg-b").concretized()
|
||||
b_pkg_id = inst.package_id(b)
|
||||
installer = create_installer([b])
|
||||
|
||||
def _prep(installer, task):
|
||||
installer.installed.add(b_pkg_id)
|
||||
tty.msg("{0} is installed".format(b_pkg_id))
|
||||
tty.msg(f"{b_pkg_id} is installed")
|
||||
|
||||
# also do not allow the package to be locked again
|
||||
monkeypatch.setattr(inst.PackageInstaller, "_ensure_locked", _not_locked)
|
||||
|
||||
def _requeued(installer, task, install_status):
|
||||
tty.msg("requeued {0}".format(inst.package_id(task.pkg.spec)))
|
||||
tty.msg(f"requeued {inst.package_id(task.pkg.spec)}")
|
||||
|
||||
# Flag the package as installed
|
||||
monkeypatch.setattr(inst.PackageInstaller, "_prepare_for_install", _prep)
|
||||
@@ -1206,9 +1136,8 @@ def _requeued(installer, task, install_status):
|
||||
|
||||
assert b_pkg_id not in installer.installed
|
||||
|
||||
out = capfd.readouterr()[0]
|
||||
expected = ["is installed", "read locked", "requeued"]
|
||||
for exp, ln in zip(expected, out.split("\n")):
|
||||
for exp, ln in zip(expected, capfd.readouterr().out.splitlines()):
|
||||
assert exp in ln
|
||||
|
||||
|
||||
@@ -1236,8 +1165,7 @@ def _requeued(installer, task, install_status):
|
||||
# Ensure don't continually requeue the task
|
||||
monkeypatch.setattr(inst.PackageInstaller, "_requeue_task", _requeued)
|
||||
|
||||
const_arg = installer_args(["pkg-b"], {})
|
||||
installer = create_installer(const_arg)
|
||||
installer = create_installer(["pkg-b"], {})
|
||||
|
||||
with pytest.raises(inst.InstallError, match="request failed"):
|
||||
installer.install()
|
||||
@@ -1252,25 +1180,19 @@ def _requeued(installer, task, install_status):
|
||||
|
||||
def test_install_skip_patch(install_mockery, mock_fetch):
|
||||
"""Test the path skip_patch install path."""
|
||||
spec_name = "pkg-b"
|
||||
const_arg = installer_args([spec_name], {"fake": False, "skip_patch": True})
|
||||
installer = create_installer(const_arg)
|
||||
|
||||
installer = create_installer(["pkg-b"], {"fake": False, "skip_patch": True})
|
||||
installer.install()
|
||||
|
||||
spec, install_args = const_arg[0]
|
||||
assert inst.package_id(spec) in installer.installed
|
||||
assert inst.package_id(installer.build_requests[0].pkg.spec) in installer.installed
|
||||
|
||||
|
||||
def test_install_implicit(install_mockery, mock_fetch):
|
||||
"""Test the path skip_patch install path."""
|
||||
spec_name = "trivial-install-test-package"
|
||||
const_arg = installer_args([spec_name], {"fake": False})
|
||||
installer = create_installer(const_arg)
|
||||
installer = create_installer([spec_name], {"fake": False})
|
||||
pkg = installer.build_requests[0].pkg
|
||||
assert not create_build_task(pkg, {"explicit": False}).explicit
|
||||
assert create_build_task(pkg, {"explicit": True}).explicit
|
||||
assert create_build_task(pkg).explicit
|
||||
assert not create_build_task(pkg, {"explicit": []}).explicit
|
||||
assert create_build_task(pkg, {"explicit": [pkg.spec.dag_hash()]}).explicit
|
||||
assert not create_build_task(pkg).explicit
|
||||
|
||||
|
||||
def test_overwrite_install_backup_success(temporary_store, config, mock_packages, tmpdir):
|
||||
@@ -1279,8 +1201,7 @@ def test_overwrite_install_backup_success(temporary_store, config, mock_packages
|
||||
of the original prefix, and leave the original spec marked installed.
|
||||
"""
|
||||
# Get a build task. TODO: refactor this to avoid calling internal methods
|
||||
const_arg = installer_args(["pkg-b"], {})
|
||||
installer = create_installer(const_arg)
|
||||
installer = create_installer(["pkg-b"])
|
||||
installer._init_queue()
|
||||
task = installer._pop_task()
|
||||
|
||||
@@ -1340,8 +1261,7 @@ def remove(self, spec):
|
||||
self.called = True
|
||||
|
||||
# Get a build task. TODO: refactor this to avoid calling internal methods
|
||||
const_arg = installer_args(["pkg-b"], {})
|
||||
installer = create_installer(const_arg)
|
||||
installer = create_installer(["pkg-b"])
|
||||
installer._init_queue()
|
||||
task = installer._pop_task()
|
||||
|
||||
@@ -1374,22 +1294,20 @@ def test_term_status_line():
|
||||
x.clear()
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"explicit_args,is_explicit",
|
||||
[({"explicit": False}, False), ({"explicit": True}, True), ({}, True)],
|
||||
)
|
||||
def test_single_external_implicit_install(install_mockery, explicit_args, is_explicit):
|
||||
@pytest.mark.parametrize("explicit", [True, False])
|
||||
def test_single_external_implicit_install(install_mockery, explicit):
|
||||
pkg = "trivial-install-test-package"
|
||||
s = spack.spec.Spec(pkg).concretized()
|
||||
s.external_path = "/usr"
|
||||
create_installer([(s, explicit_args)]).install()
|
||||
assert spack.store.STORE.db.get_record(pkg).explicit == is_explicit
|
||||
args = {"explicit": [s.dag_hash()] if explicit else []}
|
||||
create_installer([s], args).install()
|
||||
assert spack.store.STORE.db.get_record(pkg).explicit == explicit
|
||||
|
||||
|
||||
def test_overwrite_install_does_install_build_deps(install_mockery, mock_fetch):
|
||||
"""When overwrite installing something from sources, build deps should be installed."""
|
||||
s = spack.spec.Spec("dtrun3").concretized()
|
||||
create_installer([(s, {})]).install()
|
||||
create_installer([s]).install()
|
||||
|
||||
# Verify there is a pure build dep
|
||||
edge = s.edges_to_dependencies(name="dtbuild3").pop()
|
||||
@@ -1400,7 +1318,7 @@ def test_overwrite_install_does_install_build_deps(install_mockery, mock_fetch):
|
||||
build_dep.package.do_uninstall()
|
||||
|
||||
# Overwrite install the root dtrun3
|
||||
create_installer([(s, {"overwrite": [s.dag_hash()]})]).install()
|
||||
create_installer([s], {"overwrite": [s.dag_hash()]}).install()
|
||||
|
||||
# Verify that the build dep was also installed.
|
||||
assert build_dep.installed
|
||||
|
@@ -4,7 +4,6 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import contextlib
|
||||
import multiprocessing
|
||||
import os
|
||||
import signal
|
||||
import sys
|
||||
@@ -14,9 +13,7 @@
|
||||
|
||||
import pytest
|
||||
|
||||
import llnl.util.lang as lang
|
||||
import llnl.util.tty.log as log
|
||||
import llnl.util.tty.pty as pty
|
||||
|
||||
from spack.util.executable import which
|
||||
|
||||
@@ -310,53 +307,6 @@ def no_termios():
|
||||
log.termios = saved
|
||||
|
||||
|
||||
@pytest.mark.skipif(not which("ps"), reason="requires ps utility")
|
||||
@pytest.mark.skipif(not termios, reason="requires termios support")
|
||||
@pytest.mark.parametrize(
|
||||
"test_fn,termios_on_or_off",
|
||||
[
|
||||
# tests with termios
|
||||
(mock_shell_fg, lang.nullcontext),
|
||||
(mock_shell_bg, lang.nullcontext),
|
||||
(mock_shell_bg_fg, lang.nullcontext),
|
||||
(mock_shell_fg_bg, lang.nullcontext),
|
||||
(mock_shell_tstp_cont, lang.nullcontext),
|
||||
(mock_shell_tstp_tstp_cont, lang.nullcontext),
|
||||
(mock_shell_tstp_tstp_cont_cont, lang.nullcontext),
|
||||
# tests without termios
|
||||
(mock_shell_fg_no_termios, no_termios),
|
||||
(mock_shell_bg, no_termios),
|
||||
(mock_shell_bg_fg_no_termios, no_termios),
|
||||
(mock_shell_fg_bg_no_termios, no_termios),
|
||||
(mock_shell_tstp_cont, no_termios),
|
||||
(mock_shell_tstp_tstp_cont, no_termios),
|
||||
(mock_shell_tstp_tstp_cont_cont, no_termios),
|
||||
],
|
||||
)
|
||||
@pytest.mark.xfail(reason="Fails almost consistently when run with coverage and xdist")
|
||||
def test_foreground_background(test_fn, termios_on_or_off, tmpdir):
|
||||
"""Functional tests for foregrounding and backgrounding a logged process.
|
||||
|
||||
This ensures that things like SIGTTOU are not raised and that
|
||||
terminal settings are corrected on foreground/background and on
|
||||
process stop and start.
|
||||
|
||||
"""
|
||||
shell = pty.PseudoShell(test_fn, simple_logger)
|
||||
log_path = str(tmpdir.join("log.txt"))
|
||||
|
||||
# run the shell test
|
||||
with termios_on_or_off():
|
||||
shell.start(log_path=log_path, debug=True)
|
||||
exitcode = shell.join()
|
||||
|
||||
# processes completed successfully
|
||||
assert exitcode == 0
|
||||
|
||||
# assert log was created
|
||||
assert os.path.exists(log_path)
|
||||
|
||||
|
||||
def synchronized_logger(**kwargs):
|
||||
"""Mock logger (minion) process for testing log.keyboard_input.
|
||||
|
||||
@@ -442,58 +392,3 @@ def mock_shell_v_v_no_termios(proc, ctl, **kwargs):
|
||||
time.sleep(0.1)
|
||||
|
||||
os.kill(proc.pid, signal.SIGUSR1)
|
||||
|
||||
|
||||
@pytest.mark.skipif(not which("ps"), reason="requires ps utility")
|
||||
@pytest.mark.skipif(not termios, reason="requires termios support")
|
||||
@pytest.mark.parametrize(
|
||||
"test_fn,termios_on_or_off",
|
||||
[(mock_shell_v_v, lang.nullcontext), (mock_shell_v_v_no_termios, no_termios)],
|
||||
)
|
||||
@pytest.mark.xfail(reason="Fails almost consistently when run with coverage and xdist")
|
||||
def test_foreground_background_output(test_fn, capfd, termios_on_or_off, tmpdir):
|
||||
"""Tests hitting 'v' toggles output, and that force_echo works."""
|
||||
if sys.version_info >= (3, 8) and sys.platform == "darwin" and termios_on_or_off == no_termios:
|
||||
return
|
||||
|
||||
shell = pty.PseudoShell(test_fn, synchronized_logger)
|
||||
log_path = str(tmpdir.join("log.txt"))
|
||||
|
||||
# Locks for synchronizing with minion
|
||||
write_lock = multiprocessing.Lock() # must be held by minion to write
|
||||
v_lock = multiprocessing.Lock() # held while controller is in v mode
|
||||
|
||||
with termios_on_or_off():
|
||||
shell.start(write_lock=write_lock, v_lock=v_lock, debug=True, log_path=log_path)
|
||||
|
||||
exitcode = shell.join()
|
||||
out, err = capfd.readouterr()
|
||||
print(err) # will be shown if something goes wrong
|
||||
print(out)
|
||||
|
||||
# processes completed successfully
|
||||
assert exitcode == 0
|
||||
|
||||
# split output into lines
|
||||
output = out.strip().split("\n")
|
||||
|
||||
# also get lines of log file
|
||||
assert os.path.exists(log_path)
|
||||
with open(log_path) as logfile:
|
||||
log_data = logfile.read().strip().split("\n")
|
||||
|
||||
# Controller and minion process coordinate with locks such that the
|
||||
# minion writes "off" when echo is off, and "on" when echo is on. The
|
||||
# output should contain mostly "on" lines, but may contain "off"
|
||||
# lines if the controller is slow. The important thing to observe
|
||||
# here is that we started seeing 'on' in the end.
|
||||
assert ["forced output", "on"] == lang.uniq(output) or [
|
||||
"forced output",
|
||||
"off",
|
||||
"on",
|
||||
] == lang.uniq(output)
|
||||
|
||||
# log should be off for a while, then on, then off
|
||||
assert ["forced output", "off", "on", "off"] == lang.uniq(log_data) and log_data.count(
|
||||
"off"
|
||||
) > 2 # ensure some "off" lines were omitted
|
||||
|
@@ -8,6 +8,7 @@
|
||||
|
||||
import pytest
|
||||
|
||||
from llnl.util.filesystem import working_dir
|
||||
from llnl.util.symlink import resolve_link_target_relative_to_the_link
|
||||
|
||||
import spack.mirror
|
||||
@@ -15,6 +16,7 @@
|
||||
import spack.util.executable
|
||||
import spack.util.spack_json as sjson
|
||||
import spack.util.url as url_util
|
||||
from spack.cmd.common.arguments import mirror_name_or_url
|
||||
from spack.spec import Spec
|
||||
from spack.stage import Stage
|
||||
from spack.util.executable import which
|
||||
@@ -366,3 +368,12 @@ def test_update_connection_params(direction):
|
||||
assert m.get_access_token(direction) == "token"
|
||||
assert m.get_profile(direction) == "profile"
|
||||
assert m.get_endpoint_url(direction) == "https://example.com"
|
||||
|
||||
|
||||
def test_mirror_name_or_url_dir_parsing(tmp_path):
|
||||
curdir = tmp_path / "mirror"
|
||||
curdir.mkdir()
|
||||
|
||||
with working_dir(curdir):
|
||||
assert mirror_name_or_url(".").fetch_url == curdir.as_uri()
|
||||
assert mirror_name_or_url("..").fetch_url == tmp_path.as_uri()
|
||||
|
@@ -11,6 +11,7 @@
|
||||
import pathlib
|
||||
import platform
|
||||
import shutil
|
||||
import urllib.error
|
||||
from collections import OrderedDict
|
||||
|
||||
import pytest
|
||||
@@ -21,6 +22,7 @@
|
||||
import spack.binary_distribution as bindist
|
||||
import spack.cmd.buildcache as buildcache
|
||||
import spack.error
|
||||
import spack.fetch_strategy
|
||||
import spack.package_base
|
||||
import spack.repo
|
||||
import spack.store
|
||||
@@ -478,7 +480,7 @@ def test_macho_make_paths():
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def mock_download():
|
||||
def mock_download(monkeypatch):
|
||||
"""Mock a failing download strategy."""
|
||||
|
||||
class FailedDownloadStrategy(spack.fetch_strategy.FetchStrategy):
|
||||
@@ -487,19 +489,14 @@ def mirror_id(self):
|
||||
|
||||
def fetch(self):
|
||||
raise spack.fetch_strategy.FailedDownloadError(
|
||||
"<non-existent URL>", "This FetchStrategy always fails"
|
||||
urllib.error.URLError("This FetchStrategy always fails")
|
||||
)
|
||||
|
||||
fetcher = FailedDownloadStrategy()
|
||||
|
||||
@property
|
||||
def fake_fn(self):
|
||||
return fetcher
|
||||
return FailedDownloadStrategy()
|
||||
|
||||
orig_fn = spack.package_base.PackageBase.fetcher
|
||||
spack.package_base.PackageBase.fetcher = fake_fn
|
||||
yield
|
||||
spack.package_base.PackageBase.fetcher = orig_fn
|
||||
monkeypatch.setattr(spack.package_base.PackageBase, "fetcher", fake_fn)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
|
@@ -13,10 +13,13 @@
|
||||
import collections.abc
|
||||
import gzip
|
||||
import inspect
|
||||
import io
|
||||
import json
|
||||
import os
|
||||
import pickle
|
||||
|
||||
import pytest
|
||||
import ruamel.yaml
|
||||
|
||||
import spack.hash_types as ht
|
||||
import spack.paths
|
||||
@@ -505,3 +508,73 @@ def test_load_json_specfiles(specfile, expected_hash, reader_cls):
|
||||
# JSON or YAML file, not a list
|
||||
for edge in s2.traverse_edges():
|
||||
assert isinstance(edge.virtuals, tuple), edge
|
||||
|
||||
|
||||
def test_anchorify_1():
|
||||
"""Test that anchorify replaces duplicate values with references to a single instance, and
|
||||
that that results in anchors in the output YAML."""
|
||||
before = {"a": [1, 2, 3], "b": [1, 2, 3]}
|
||||
after = {"a": [1, 2, 3], "b": [1, 2, 3]}
|
||||
syaml.anchorify(after)
|
||||
assert before == after
|
||||
assert after["a"] is after["b"]
|
||||
|
||||
# Check if anchors are used
|
||||
out = io.StringIO()
|
||||
ruamel.yaml.YAML().dump(after, out)
|
||||
assert (
|
||||
out.getvalue()
|
||||
== """\
|
||||
a: &id001
|
||||
- 1
|
||||
- 2
|
||||
- 3
|
||||
b: *id001
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
def test_anchorify_2():
|
||||
before = {"a": {"b": {"c": True}}, "d": {"b": {"c": True}}, "e": {"c": True}}
|
||||
after = {"a": {"b": {"c": True}}, "d": {"b": {"c": True}}, "e": {"c": True}}
|
||||
syaml.anchorify(after)
|
||||
assert before == after
|
||||
assert after["a"] is after["d"]
|
||||
assert after["a"]["b"] is after["e"]
|
||||
|
||||
# Check if anchors are used
|
||||
out = io.StringIO()
|
||||
ruamel.yaml.YAML().dump(after, out)
|
||||
assert (
|
||||
out.getvalue()
|
||||
== """\
|
||||
a: &id001
|
||||
b: &id002
|
||||
c: true
|
||||
d: *id001
|
||||
e: *id002
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"spec_str",
|
||||
[
|
||||
"hdf5 ++mpi",
|
||||
"hdf5 cflags==-g",
|
||||
"hdf5 foo==bar",
|
||||
"hdf5~~mpi++shared",
|
||||
"hdf5 cflags==-g foo==bar cxxflags==-O3",
|
||||
"hdf5 cflags=-g foo==bar cxxflags==-O3",
|
||||
],
|
||||
)
|
||||
def test_pickle_roundtrip_for_abstract_specs(spec_str):
|
||||
"""Tests that abstract specs correctly round trip when pickled.
|
||||
|
||||
This test compares both spec objects and their string representation, due to some
|
||||
inconsistencies in how `Spec.__eq__` is implemented.
|
||||
"""
|
||||
s = spack.spec.Spec(spec_str)
|
||||
t = pickle.loads(pickle.dumps(s))
|
||||
assert s == t
|
||||
assert str(s) == str(t)
|
||||
|
@@ -18,6 +18,7 @@
|
||||
from llnl.util.symlink import readlink
|
||||
|
||||
import spack.error
|
||||
import spack.fetch_strategy
|
||||
import spack.paths
|
||||
import spack.stage
|
||||
import spack.util.executable
|
||||
@@ -323,17 +324,11 @@ def _mock():
|
||||
return _mock
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def failing_fetch_strategy():
|
||||
"""Returns a fetch strategy that fails."""
|
||||
|
||||
class FailingFetchStrategy(spack.fetch_strategy.FetchStrategy):
|
||||
def fetch(self):
|
||||
raise spack.fetch_strategy.FailedDownloadError(
|
||||
"<non-existent URL>", "This implementation of FetchStrategy always fails"
|
||||
)
|
||||
|
||||
return FailingFetchStrategy()
|
||||
class FailingFetchStrategy(spack.fetch_strategy.FetchStrategy):
|
||||
def fetch(self):
|
||||
raise spack.fetch_strategy.FailedDownloadError(
|
||||
"<non-existent URL>", "This implementation of FetchStrategy always fails"
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
@@ -511,8 +506,8 @@ def test_no_search_if_default_succeeds(self, mock_stage_archive, failing_search_
|
||||
stage.fetch()
|
||||
check_destroy(stage, self.stage_name)
|
||||
|
||||
def test_no_search_mirror_only(self, failing_fetch_strategy, failing_search_fn):
|
||||
stage = Stage(failing_fetch_strategy, name=self.stage_name, search_fn=failing_search_fn)
|
||||
def test_no_search_mirror_only(self, failing_search_fn):
|
||||
stage = Stage(FailingFetchStrategy(), name=self.stage_name, search_fn=failing_search_fn)
|
||||
with stage:
|
||||
try:
|
||||
stage.fetch(mirror_only=True)
|
||||
@@ -527,8 +522,8 @@ def test_no_search_mirror_only(self, failing_fetch_strategy, failing_search_fn):
|
||||
(None, "All fetchers failed"),
|
||||
],
|
||||
)
|
||||
def test_search_if_default_fails(self, failing_fetch_strategy, search_fn, err_msg, expected):
|
||||
stage = Stage(failing_fetch_strategy, name=self.stage_name, search_fn=search_fn)
|
||||
def test_search_if_default_fails(self, search_fn, err_msg, expected):
|
||||
stage = Stage(FailingFetchStrategy(), name=self.stage_name, search_fn=search_fn)
|
||||
|
||||
with stage:
|
||||
with pytest.raises(spack.error.FetchError, match=expected):
|
||||
|
@@ -4,8 +4,10 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import collections
|
||||
import filecmp
|
||||
import os
|
||||
import sys
|
||||
import urllib.error
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -24,6 +26,14 @@
|
||||
from spack.util.executable import which
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def missing_curl(monkeypatch):
|
||||
def require_curl():
|
||||
raise spack.error.FetchError("curl is required but not found")
|
||||
|
||||
monkeypatch.setattr(web_util, "require_curl", require_curl)
|
||||
|
||||
|
||||
@pytest.fixture(params=list(crypto.hashes.keys()))
|
||||
def checksum_type(request):
|
||||
return request.param
|
||||
@@ -66,66 +76,62 @@ def fn_urls(v):
|
||||
return factory
|
||||
|
||||
|
||||
@pytest.mark.parametrize("_fetch_method", ["curl", "urllib"])
|
||||
def test_urlfetchstrategy_sans_url(_fetch_method):
|
||||
def test_urlfetchstrategy_sans_url():
|
||||
"""Ensure constructor with no URL fails."""
|
||||
with spack.config.override("config:url_fetch_method", _fetch_method):
|
||||
with pytest.raises(ValueError):
|
||||
with fs.URLFetchStrategy(None):
|
||||
pass
|
||||
with pytest.raises(ValueError):
|
||||
fs.URLFetchStrategy(None)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("_fetch_method", ["curl", "urllib"])
|
||||
def test_urlfetchstrategy_bad_url(tmpdir, _fetch_method):
|
||||
@pytest.mark.parametrize("method", ["curl", "urllib"])
|
||||
def test_urlfetchstrategy_bad_url(tmp_path, mutable_config, method):
|
||||
"""Ensure fetch with bad URL fails as expected."""
|
||||
testpath = str(tmpdir)
|
||||
with spack.config.override("config:url_fetch_method", _fetch_method):
|
||||
with pytest.raises(fs.FailedDownloadError):
|
||||
fetcher = fs.URLFetchStrategy(url="file:///does-not-exist")
|
||||
assert fetcher is not None
|
||||
mutable_config.set("config:url_fetch_method", method)
|
||||
fetcher = fs.URLFetchStrategy(url=(tmp_path / "does-not-exist").as_uri())
|
||||
|
||||
with Stage(fetcher, path=testpath) as stage:
|
||||
assert stage is not None
|
||||
assert fetcher.archive_file is None
|
||||
fetcher.fetch()
|
||||
with Stage(fetcher, path=str(tmp_path / "stage")):
|
||||
with pytest.raises(fs.FailedDownloadError) as exc:
|
||||
fetcher.fetch()
|
||||
|
||||
assert len(exc.value.exceptions) == 1
|
||||
exception = exc.value.exceptions[0]
|
||||
|
||||
if method == "curl":
|
||||
assert isinstance(exception, spack.error.FetchError)
|
||||
assert "Curl failed with error 37" in str(exception) # FILE_COULDNT_READ_FILE
|
||||
elif method == "urllib":
|
||||
assert isinstance(exception, urllib.error.URLError)
|
||||
assert isinstance(exception.reason, FileNotFoundError)
|
||||
|
||||
|
||||
def test_fetch_options(tmpdir, mock_archive):
|
||||
testpath = str(tmpdir)
|
||||
def test_fetch_options(tmp_path, mock_archive):
|
||||
with spack.config.override("config:url_fetch_method", "curl"):
|
||||
fetcher = fs.URLFetchStrategy(
|
||||
url=mock_archive.url, fetch_options={"cookie": "True", "timeout": 10}
|
||||
)
|
||||
assert fetcher is not None
|
||||
|
||||
with Stage(fetcher, path=testpath) as stage:
|
||||
assert stage is not None
|
||||
with Stage(fetcher, path=str(tmp_path)):
|
||||
assert fetcher.archive_file is None
|
||||
fetcher.fetch()
|
||||
assert filecmp.cmp(fetcher.archive_file, mock_archive.archive_file)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("_fetch_method", ["curl", "urllib"])
|
||||
def test_archive_file_errors(tmpdir, mock_archive, _fetch_method):
|
||||
def test_archive_file_errors(tmp_path, mutable_config, mock_archive, _fetch_method):
|
||||
"""Ensure FetchStrategy commands may only be used as intended"""
|
||||
testpath = str(tmpdir)
|
||||
with spack.config.override("config:url_fetch_method", _fetch_method):
|
||||
fetcher = fs.URLFetchStrategy(url=mock_archive.url)
|
||||
assert fetcher is not None
|
||||
with pytest.raises(fs.FailedDownloadError):
|
||||
with Stage(fetcher, path=testpath) as stage:
|
||||
assert stage is not None
|
||||
assert fetcher.archive_file is None
|
||||
with pytest.raises(fs.NoArchiveFileError):
|
||||
fetcher.archive(testpath)
|
||||
with pytest.raises(fs.NoArchiveFileError):
|
||||
fetcher.expand()
|
||||
with pytest.raises(fs.NoArchiveFileError):
|
||||
fetcher.reset()
|
||||
stage.fetch()
|
||||
with pytest.raises(fs.NoDigestError):
|
||||
fetcher.check()
|
||||
assert fetcher.archive_file is not None
|
||||
fetcher._fetch_from_url("file:///does-not-exist")
|
||||
with Stage(fetcher, path=str(tmp_path)) as stage:
|
||||
assert fetcher.archive_file is None
|
||||
with pytest.raises(fs.NoArchiveFileError):
|
||||
fetcher.archive(str(tmp_path))
|
||||
with pytest.raises(fs.NoArchiveFileError):
|
||||
fetcher.expand()
|
||||
with pytest.raises(fs.NoArchiveFileError):
|
||||
fetcher.reset()
|
||||
stage.fetch()
|
||||
with pytest.raises(fs.NoDigestError):
|
||||
fetcher.check()
|
||||
assert filecmp.cmp(fetcher.archive_file, mock_archive.archive_file)
|
||||
|
||||
|
||||
files = [(".tar.gz", "z"), (".tgz", "z")]
|
||||
@@ -275,16 +281,15 @@ def is_true():
|
||||
|
||||
|
||||
@pytest.mark.parametrize("_fetch_method", ["curl", "urllib"])
|
||||
def test_url_extra_fetch(tmpdir, mock_archive, _fetch_method):
|
||||
def test_url_extra_fetch(tmp_path, mutable_config, mock_archive, _fetch_method):
|
||||
"""Ensure a fetch after downloading is effectively a no-op."""
|
||||
with spack.config.override("config:url_fetch_method", _fetch_method):
|
||||
testpath = str(tmpdir)
|
||||
fetcher = fs.URLFetchStrategy(mock_archive.url)
|
||||
with Stage(fetcher, path=testpath) as stage:
|
||||
assert fetcher.archive_file is None
|
||||
stage.fetch()
|
||||
assert fetcher.archive_file is not None
|
||||
fetcher.fetch()
|
||||
mutable_config.set("config:url_fetch_method", _fetch_method)
|
||||
fetcher = fs.URLFetchStrategy(mock_archive.url)
|
||||
with Stage(fetcher, path=str(tmp_path)) as stage:
|
||||
assert fetcher.archive_file is None
|
||||
stage.fetch()
|
||||
assert filecmp.cmp(fetcher.archive_file, mock_archive.archive_file)
|
||||
fetcher.fetch()
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
@@ -320,49 +325,25 @@ def test_candidate_urls(pkg_factory, url, urls, version, expected, _fetch_method
|
||||
|
||||
|
||||
@pytest.mark.regression("19673")
|
||||
def test_missing_curl(tmpdir, monkeypatch):
|
||||
def test_missing_curl(tmp_path, missing_curl, mutable_config, monkeypatch):
|
||||
"""Ensure a fetch involving missing curl package reports the error."""
|
||||
err_fmt = "No such command {0}"
|
||||
|
||||
def _which(*args, **kwargs):
|
||||
err_msg = err_fmt.format(args[0])
|
||||
raise spack.util.executable.CommandNotFoundError(err_msg)
|
||||
|
||||
# Patching the 'which' symbol imported by fetch_strategy needed due
|
||||
# to 'from spack.util.executable import which' in this module.
|
||||
monkeypatch.setattr(fs, "which", _which)
|
||||
|
||||
testpath = str(tmpdir)
|
||||
url = "http://github.com/spack/spack"
|
||||
with spack.config.override("config:url_fetch_method", "curl"):
|
||||
fetcher = fs.URLFetchStrategy(url=url)
|
||||
assert fetcher is not None
|
||||
with pytest.raises(TypeError, match="object is not callable"):
|
||||
with Stage(fetcher, path=testpath) as stage:
|
||||
out = stage.fetch()
|
||||
assert err_fmt.format("curl") in out
|
||||
mutable_config.set("config:url_fetch_method", "curl")
|
||||
fetcher = fs.URLFetchStrategy(url="http://example.com/file.tar.gz")
|
||||
with pytest.raises(spack.error.FetchError, match="curl is required but not found"):
|
||||
with Stage(fetcher, path=str(tmp_path)) as stage:
|
||||
stage.fetch()
|
||||
|
||||
|
||||
def test_url_fetch_text_without_url(tmpdir):
|
||||
def test_url_fetch_text_without_url():
|
||||
with pytest.raises(spack.error.FetchError, match="URL is required"):
|
||||
web_util.fetch_url_text(None)
|
||||
|
||||
|
||||
def test_url_fetch_text_curl_failures(tmpdir, monkeypatch):
|
||||
def test_url_fetch_text_curl_failures(mutable_config, missing_curl, monkeypatch):
|
||||
"""Check fetch_url_text if URL's curl is missing."""
|
||||
err_fmt = "No such command {0}"
|
||||
|
||||
def _which(*args, **kwargs):
|
||||
err_msg = err_fmt.format(args[0])
|
||||
raise spack.util.executable.CommandNotFoundError(err_msg)
|
||||
|
||||
# Patching the 'which' symbol imported by spack.util.web needed due
|
||||
# to 'from spack.util.executable import which' in this module.
|
||||
monkeypatch.setattr(spack.util.web, "which", _which)
|
||||
|
||||
with spack.config.override("config:url_fetch_method", "curl"):
|
||||
with pytest.raises(spack.error.FetchError, match="Missing required curl"):
|
||||
web_util.fetch_url_text("https://github.com/")
|
||||
mutable_config.set("config:url_fetch_method", "curl")
|
||||
with pytest.raises(spack.error.FetchError, match="curl is required but not found"):
|
||||
web_util.fetch_url_text("https://example.com/")
|
||||
|
||||
|
||||
def test_url_check_curl_errors():
|
||||
@@ -376,24 +357,14 @@ def test_url_check_curl_errors():
|
||||
web_util.check_curl_code(60)
|
||||
|
||||
|
||||
def test_url_missing_curl(tmpdir, monkeypatch):
|
||||
def test_url_missing_curl(mutable_config, missing_curl, monkeypatch):
|
||||
"""Check url_exists failures if URL's curl is missing."""
|
||||
err_fmt = "No such command {0}"
|
||||
|
||||
def _which(*args, **kwargs):
|
||||
err_msg = err_fmt.format(args[0])
|
||||
raise spack.util.executable.CommandNotFoundError(err_msg)
|
||||
|
||||
# Patching the 'which' symbol imported by spack.util.web needed due
|
||||
# to 'from spack.util.executable import which' in this module.
|
||||
monkeypatch.setattr(spack.util.web, "which", _which)
|
||||
|
||||
with spack.config.override("config:url_fetch_method", "curl"):
|
||||
with pytest.raises(spack.error.FetchError, match="Missing required curl"):
|
||||
web_util.url_exists("https://github.com/")
|
||||
mutable_config.set("config:url_fetch_method", "curl")
|
||||
with pytest.raises(spack.error.FetchError, match="curl is required but not found"):
|
||||
web_util.url_exists("https://example.com/")
|
||||
|
||||
|
||||
def test_url_fetch_text_urllib_bad_returncode(tmpdir, monkeypatch):
|
||||
def test_url_fetch_text_urllib_bad_returncode(mutable_config, monkeypatch):
|
||||
class response:
|
||||
def getcode(self):
|
||||
return 404
|
||||
@@ -401,19 +372,19 @@ def getcode(self):
|
||||
def _read_from_url(*args, **kwargs):
|
||||
return None, None, response()
|
||||
|
||||
monkeypatch.setattr(spack.util.web, "read_from_url", _read_from_url)
|
||||
monkeypatch.setattr(web_util, "read_from_url", _read_from_url)
|
||||
mutable_config.set("config:url_fetch_method", "urllib")
|
||||
|
||||
with spack.config.override("config:url_fetch_method", "urllib"):
|
||||
with pytest.raises(spack.error.FetchError, match="failed with error code"):
|
||||
web_util.fetch_url_text("https://github.com/")
|
||||
with pytest.raises(spack.error.FetchError, match="failed with error code"):
|
||||
web_util.fetch_url_text("https://example.com/")
|
||||
|
||||
|
||||
def test_url_fetch_text_urllib_web_error(tmpdir, monkeypatch):
|
||||
def test_url_fetch_text_urllib_web_error(mutable_config, monkeypatch):
|
||||
def _raise_web_error(*args, **kwargs):
|
||||
raise web_util.SpackWebError("bad url")
|
||||
|
||||
monkeypatch.setattr(spack.util.web, "read_from_url", _raise_web_error)
|
||||
monkeypatch.setattr(web_util, "read_from_url", _raise_web_error)
|
||||
mutable_config.set("config:url_fetch_method", "urllib")
|
||||
|
||||
with spack.config.override("config:url_fetch_method", "urllib"):
|
||||
with pytest.raises(spack.error.FetchError, match="fetch failed to verify"):
|
||||
web_util.fetch_url_text("https://github.com/")
|
||||
with pytest.raises(spack.error.FetchError, match="fetch failed to verify"):
|
||||
web_util.fetch_url_text("https://example.com/")
|
||||
|
@@ -432,7 +432,7 @@ def test_ssl_curl_cert_file(cert_exists, tmpdir, ssl_scrubbed_env, mutable_confi
|
||||
if cert_exists:
|
||||
open(mock_cert, "w").close()
|
||||
assert os.path.isfile(mock_cert)
|
||||
curl = spack.util.web._curl()
|
||||
curl = spack.util.web.require_curl()
|
||||
|
||||
# arbitrary call to query the run env
|
||||
dump_env = {}
|
||||
|
@@ -9,20 +9,30 @@
|
||||
import shlex
|
||||
import sys
|
||||
from subprocess import PIPE, run
|
||||
from typing import Optional
|
||||
from typing import Dict, Optional
|
||||
|
||||
import spack.spec
|
||||
import spack.util.elf
|
||||
|
||||
#: Pattern to distinguish glibc from other libc implementations
|
||||
GLIBC_PATTERN = r"\b(?:Free Software Foundation|Roland McGrath|Ulrich Depper)\b"
|
||||
|
||||
|
||||
def _env() -> Dict[str, str]:
|
||||
"""Currently only set LC_ALL=C without clearing further environment variables"""
|
||||
return {**os.environ, "LC_ALL": "C"}
|
||||
|
||||
|
||||
def _libc_from_ldd(ldd: str) -> Optional["spack.spec.Spec"]:
|
||||
try:
|
||||
result = run([ldd, "--version"], stdout=PIPE, stderr=PIPE, check=False)
|
||||
result = run([ldd, "--version"], stdout=PIPE, stderr=PIPE, check=False, env=_env())
|
||||
stdout = result.stdout.decode("utf-8")
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
if not re.search(r"\bFree Software Foundation\b", stdout):
|
||||
# The string "Free Software Foundation" is sometimes translated and not detected, but the names
|
||||
# of the authors are typically present.
|
||||
if not re.search(GLIBC_PATTERN, stdout):
|
||||
return None
|
||||
|
||||
version_str = re.match(r".+\(.+\) (.+)", stdout)
|
||||
@@ -58,7 +68,9 @@ def libc_from_dynamic_linker(dynamic_linker: str) -> Optional["spack.spec.Spec"]
|
||||
# Now try to figure out if glibc or musl, which is the only ones we support.
|
||||
# In recent glibc we can simply execute the dynamic loader. In musl that's always the case.
|
||||
try:
|
||||
result = run([dynamic_linker, "--version"], stdout=PIPE, stderr=PIPE, check=False)
|
||||
result = run(
|
||||
[dynamic_linker, "--version"], stdout=PIPE, stderr=PIPE, check=False, env=_env()
|
||||
)
|
||||
stdout = result.stdout.decode("utf-8")
|
||||
stderr = result.stderr.decode("utf-8")
|
||||
except Exception:
|
||||
@@ -75,7 +87,7 @@ def libc_from_dynamic_linker(dynamic_linker: str) -> Optional["spack.spec.Spec"]
|
||||
return spec
|
||||
except Exception:
|
||||
return None
|
||||
elif re.search(r"\bFree Software Foundation\b", stdout):
|
||||
elif re.search(GLIBC_PATTERN, stdout):
|
||||
# output is like "ld.so (...) stable release version 2.33."
|
||||
match = re.search(r"version (\d+\.\d+(?:\.\d+)?)", stdout)
|
||||
if not match:
|
||||
|
@@ -20,7 +20,7 @@
|
||||
import functools
|
||||
import io
|
||||
import re
|
||||
from typing import IO, List, Optional
|
||||
from typing import IO, Any, Callable, Dict, List, Optional, Union
|
||||
|
||||
import ruamel.yaml
|
||||
from ruamel.yaml import comments, constructor, emitter, error, representer
|
||||
@@ -493,6 +493,29 @@ def name_mark(name):
|
||||
return error.StringMark(name, None, None, None, None, None)
|
||||
|
||||
|
||||
def anchorify(data: Union[dict, list], identifier: Callable[[Any], str] = repr) -> None:
|
||||
"""Replace identical dict/list branches in tree with references to earlier instances. The YAML
|
||||
serializer generate anchors for them, resulting in small yaml files."""
|
||||
anchors: Dict[str, Union[dict, list]] = {}
|
||||
queue: List[Union[dict, list]] = [data]
|
||||
|
||||
while queue:
|
||||
item = queue.pop()
|
||||
|
||||
for key, value in item.items() if isinstance(item, dict) else enumerate(item):
|
||||
if not isinstance(value, (dict, list)):
|
||||
continue
|
||||
|
||||
id = identifier(value)
|
||||
anchor = anchors.get(id)
|
||||
|
||||
if anchor is None:
|
||||
anchors[id] = value
|
||||
queue.append(value)
|
||||
else:
|
||||
item[key] = anchor # replace with reference
|
||||
|
||||
|
||||
class SpackYAMLError(spack.error.SpackError):
|
||||
"""Raised when there are issues with YAML parsing."""
|
||||
|
||||
|
@@ -28,10 +28,11 @@
|
||||
|
||||
import spack.config
|
||||
import spack.error
|
||||
import spack.util.executable
|
||||
import spack.util.path
|
||||
import spack.util.url as url_util
|
||||
|
||||
from .executable import CommandNotFoundError, Executable, which
|
||||
from .executable import CommandNotFoundError, Executable
|
||||
from .gcs import GCSBlob, GCSBucket, GCSHandler
|
||||
from .s3 import UrllibS3Handler, get_s3_session
|
||||
|
||||
@@ -198,7 +199,7 @@ def read_from_url(url, accept_content_type=None):
|
||||
try:
|
||||
response = urlopen(request)
|
||||
except (TimeoutError, URLError) as e:
|
||||
raise SpackWebError(f"Download of {url.geturl()} failed: {e}")
|
||||
raise SpackWebError(f"Download of {url.geturl()} failed: {e.__class__.__name__}: {e}")
|
||||
|
||||
if accept_content_type:
|
||||
try:
|
||||
@@ -307,45 +308,44 @@ def base_curl_fetch_args(url, timeout=0):
|
||||
return curl_args
|
||||
|
||||
|
||||
def check_curl_code(returncode):
|
||||
def check_curl_code(returncode: int) -> None:
|
||||
"""Check standard return code failures for provided arguments.
|
||||
|
||||
Arguments:
|
||||
returncode (int): curl return code
|
||||
returncode: curl return code
|
||||
|
||||
Raises FetchError if the curl returncode indicates failure
|
||||
"""
|
||||
if returncode != 0:
|
||||
if returncode == 22:
|
||||
# This is a 404. Curl will print the error.
|
||||
raise spack.error.FetchError("URL was not found!")
|
||||
if returncode == 0:
|
||||
return
|
||||
elif returncode == 22:
|
||||
# This is a 404. Curl will print the error.
|
||||
raise spack.error.FetchError("URL was not found!")
|
||||
elif returncode == 60:
|
||||
# This is a certificate error. Suggest spack -k
|
||||
raise spack.error.FetchError(
|
||||
"Curl was unable to fetch due to invalid certificate. "
|
||||
"This is either an attack, or your cluster's SSL "
|
||||
"configuration is bad. If you believe your SSL "
|
||||
"configuration is bad, you can try running spack -k, "
|
||||
"which will not check SSL certificates."
|
||||
"Use this at your own risk."
|
||||
)
|
||||
|
||||
if returncode == 60:
|
||||
# This is a certificate error. Suggest spack -k
|
||||
raise spack.error.FetchError(
|
||||
"Curl was unable to fetch due to invalid certificate. "
|
||||
"This is either an attack, or your cluster's SSL "
|
||||
"configuration is bad. If you believe your SSL "
|
||||
"configuration is bad, you can try running spack -k, "
|
||||
"which will not check SSL certificates."
|
||||
"Use this at your own risk."
|
||||
)
|
||||
|
||||
raise spack.error.FetchError("Curl failed with error {0}".format(returncode))
|
||||
raise spack.error.FetchError(f"Curl failed with error {returncode}")
|
||||
|
||||
|
||||
def _curl(curl=None):
|
||||
if not curl:
|
||||
try:
|
||||
curl = which("curl", required=True)
|
||||
except CommandNotFoundError as exc:
|
||||
tty.error(str(exc))
|
||||
raise spack.error.FetchError("Missing required curl fetch method")
|
||||
def require_curl() -> Executable:
|
||||
try:
|
||||
path = spack.util.executable.which_string("curl", required=True)
|
||||
except CommandNotFoundError as e:
|
||||
raise spack.error.FetchError(f"curl is required but not found: {e}") from e
|
||||
curl = spack.util.executable.Executable(path)
|
||||
set_curl_env_for_ssl_certs(curl)
|
||||
return curl
|
||||
|
||||
|
||||
def fetch_url_text(url, curl=None, dest_dir="."):
|
||||
def fetch_url_text(url, curl: Optional[Executable] = None, dest_dir="."):
|
||||
"""Retrieves text-only URL content using the configured fetch method.
|
||||
It determines the fetch method from:
|
||||
|
||||
@@ -379,10 +379,7 @@ def fetch_url_text(url, curl=None, dest_dir="."):
|
||||
fetch_method = spack.config.get("config:url_fetch_method")
|
||||
tty.debug("Using '{0}' to fetch {1} into {2}".format(fetch_method, url, path))
|
||||
if fetch_method == "curl":
|
||||
curl_exe = _curl(curl)
|
||||
if not curl_exe:
|
||||
raise spack.error.FetchError("Missing required fetch method (curl)")
|
||||
|
||||
curl_exe = curl or require_curl()
|
||||
curl_args = ["-O"]
|
||||
curl_args.extend(base_curl_fetch_args(url))
|
||||
|
||||
@@ -439,9 +436,7 @@ def url_exists(url, curl=None):
|
||||
"config:url_fetch_method", "urllib"
|
||||
) == "curl" and url_result.scheme not in ("gs", "s3")
|
||||
if use_curl:
|
||||
curl_exe = _curl(curl)
|
||||
if not curl_exe:
|
||||
return False
|
||||
curl_exe = curl or require_curl()
|
||||
|
||||
# Telling curl to fetch the first byte (-r 0-0) is supposed to be
|
||||
# portable.
|
||||
|
@@ -12,6 +12,7 @@
|
||||
import io
|
||||
import itertools
|
||||
import re
|
||||
from typing import List, Union
|
||||
|
||||
import llnl.util.lang as lang
|
||||
import llnl.util.tty.color
|
||||
@@ -255,19 +256,21 @@ def __init__(self, name, value, propagate=False):
|
||||
self.value = value
|
||||
|
||||
@staticmethod
|
||||
def from_node_dict(name, value):
|
||||
def from_node_dict(
|
||||
name: str, value: Union[str, List[str]], *, propagate: bool = False
|
||||
) -> "AbstractVariant":
|
||||
"""Reconstruct a variant from a node dict."""
|
||||
if isinstance(value, list):
|
||||
# read multi-value variants in and be faithful to the YAML
|
||||
mvar = MultiValuedVariant(name, ())
|
||||
mvar = MultiValuedVariant(name, (), propagate=propagate)
|
||||
mvar._value = tuple(value)
|
||||
mvar._original_value = mvar._value
|
||||
return mvar
|
||||
|
||||
elif str(value).upper() == "TRUE" or str(value).upper() == "FALSE":
|
||||
return BoolValuedVariant(name, value)
|
||||
return BoolValuedVariant(name, value, propagate=propagate)
|
||||
|
||||
return SingleValuedVariant(name, value)
|
||||
return SingleValuedVariant(name, value, propagate=propagate)
|
||||
|
||||
def yaml_entry(self):
|
||||
"""Returns a key, value tuple suitable to be an entry in a yaml dict.
|
||||
|
@@ -1,4 +1,4 @@
|
||||
SPACK ?= spack -c config:install_status:false
|
||||
SPACK ?= {{ spack_script }} -c config:install_status:false
|
||||
SPACK_INSTALL_FLAGS ?=
|
||||
|
||||
# This variable can be used to add post install hooks
|
||||
|
@@ -142,7 +142,7 @@ class Quux
|
||||
"-o",
|
||||
"libquux.dylib",
|
||||
"-install_name",
|
||||
"@rpath/libcorge.dylib",
|
||||
"@rpath/libquux.dylib",
|
||||
"quux.cc.o",
|
||||
"-Wl,-rpath,%s" % prefix.lib64,
|
||||
"-Wl,-rpath,%s" % spec["garply"].prefix.lib64,
|
||||
|
@@ -75,9 +75,18 @@ class Clingo(CMakePackage):
|
||||
patch("clingo_msc_1938_native_handle.patch", when="@:5.7.0 %msvc@19.38:")
|
||||
|
||||
def patch(self):
|
||||
# In bootstrap/prototypes/*.json we don't want to have specs that work for any python
|
||||
# version, so this conditional patch lives here instead of being its own directive.
|
||||
if self.spec.satisfies("@spack,5.3:5.4 ^python@3.9:"):
|
||||
filter_file(
|
||||
"if (!PyEval_ThreadsInitialized()) { PyEval_InitThreads(); }",
|
||||
"",
|
||||
"libpyclingo/pyclingo.cc",
|
||||
string=True,
|
||||
)
|
||||
# Doxygen is optional but can't be disabled with a -D, so patch
|
||||
# it out if it's really supposed to be disabled
|
||||
if "+docs" not in self.spec:
|
||||
if self.spec.satisfies("~docs"):
|
||||
filter_file(
|
||||
r"find_package\(Doxygen\)",
|
||||
'message("Doxygen disabled for Spack build.")',
|
||||
|
@@ -123,7 +123,7 @@ class Ffmpeg(AutotoolsPackage):
|
||||
# Solve build failure against vulkan headers 1.3.279
|
||||
patch(
|
||||
"https://git.ffmpeg.org/gitweb/ffmpeg.git/commitdiff_plain/fef22c87ada4",
|
||||
sha256="856bdc2b6e2a7066cf683a235193b9025d4d73dd7686eda2fbcf83e7e65f8bf9",
|
||||
sha256="5726e8e999e3fc7a5ae4c4c846c9151246e5846c54dc3b8ff8326ee31c59631a",
|
||||
when="@6.1.1",
|
||||
)
|
||||
|
||||
@@ -150,12 +150,12 @@ class Ffmpeg(AutotoolsPackage):
|
||||
# fix incompatibility with texinfo@7, especially @7.1:
|
||||
patch(
|
||||
"https://git.ffmpeg.org/gitweb/ffmpeg.git/commitdiff_plain/f01fdedb69e4accb1d1555106d8f682ff1f1ddc7",
|
||||
sha256="416751f41cfbf086c28b4bbf01ace4c08e5651e59911dca6240292bb1b5c6b53",
|
||||
sha256="673813d13f5c37b75ff5bcb56790ccd6b16962fdb9bddcbbeeead979d47d31b3",
|
||||
when="@6.0",
|
||||
)
|
||||
patch(
|
||||
"https://git.ffmpeg.org/gitweb/ffmpeg.git/commitdiff_plain/f01fdedb69e4accb1d1555106d8f682ff1f1ddc7",
|
||||
sha256="416751f41cfbf086c28b4bbf01ace4c08e5651e59911dca6240292bb1b5c6b53",
|
||||
sha256="673813d13f5c37b75ff5bcb56790ccd6b16962fdb9bddcbbeeead979d47d31b3",
|
||||
when="@5:5.1.3",
|
||||
)
|
||||
|
||||
|
@@ -26,7 +26,7 @@ paths:
|
||||
extra_attributes:
|
||||
compilers:
|
||||
c: ".*/bin/gcc"
|
||||
cxx: ".*/bin/g++"
|
||||
cxx: ".*/bin/g\\+\\+"
|
||||
|
||||
# Mock a version < 7 of GCC that requires -dumpversion and
|
||||
# errors with -dumpfullversion
|
||||
|
@@ -24,7 +24,7 @@ paths:
|
||||
extra_attributes:
|
||||
compilers:
|
||||
c: ".*/bin/nvc"
|
||||
cxx: ".*/bin/nvc++"
|
||||
cxx: ".*/bin/nvc\\+\\+"
|
||||
fortran: ".*/bin/nvfortran"
|
||||
- layout:
|
||||
- executables:
|
||||
@@ -51,7 +51,7 @@ paths:
|
||||
extra_attributes:
|
||||
compilers:
|
||||
c: ".*/bin/nvc"
|
||||
cxx: ".*/bin/nvc++"
|
||||
cxx: ".*/bin/nvc\\+\\+"
|
||||
fortran: ".*/bin/nvfortran"
|
||||
- layout:
|
||||
- executables:
|
||||
@@ -78,5 +78,5 @@ paths:
|
||||
extra_attributes:
|
||||
compilers:
|
||||
c: ".*/bin/nvc"
|
||||
cxx: ".*/bin/nvc++"
|
||||
cxx: ".*/bin/nvc\\+\\+"
|
||||
fortran: ".*/bin/nvfortran"
|
||||
|
@@ -47,7 +47,7 @@ class Python(Package):
|
||||
url = "https://www.python.org/ftp/python/3.8.0/Python-3.8.0.tgz"
|
||||
list_url = "https://www.python.org/ftp/python/"
|
||||
list_depth = 1
|
||||
tags = ["windows", "build-tools"]
|
||||
tags = ["windows"]
|
||||
|
||||
maintainers("skosukhin", "scheibelp")
|
||||
|
||||
|
Reference in New Issue
Block a user