Compare commits

..

1 Commits

Author SHA1 Message Date
Satish Balay
84a398349b petsc, py-petsc4py add v3.21.0 2024-03-30 23:14:23 -05:00
624 changed files with 5049 additions and 12219 deletions

View File

@@ -1,4 +0,0 @@
{
"image": "ghcr.io/spack/ubuntu20.04-runner-amd64-gcc-11.4:2023.08.01",
"postCreateCommand": "./.devcontainer/postCreateCommand.sh"
}

View File

@@ -1,20 +0,0 @@
#!/bin/bash
# Load spack environment at terminal startup
cat <<EOF >> /root/.bashrc
. /workspaces/spack/share/spack/setup-env.sh
EOF
# Load spack environment in this script
. /workspaces/spack/share/spack/setup-env.sh
# Ensure generic targets for maximum matching with buildcaches
spack config --scope site add "packages:all:require:[target=x86_64_v3]"
spack config --scope site add "concretizer:targets:granularity:generic"
# Find compiler and install gcc-runtime
spack compiler find --scope site
# Setup buildcaches
spack mirror add --scope site develop https://binaries.spack.io/develop
spack buildcache keys --install --trust

View File

@@ -22,8 +22,8 @@ jobs:
matrix:
operating_system: ["ubuntu-latest", "macos-latest"]
steps:
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # @v2
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # @v2
with:
python-version: ${{inputs.python_version}}
- name: Install Python packages
@@ -43,9 +43,7 @@ jobs:
. share/spack/setup-env.sh
$(which spack) audit packages
$(which spack) audit externals
- uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed
- uses: codecov/codecov-action@54bcd8715eee62d40e33596ef5e8f0f48dbbccab # @v2.1.0
if: ${{ inputs.with_coverage == 'true' }}
with:
flags: unittests,audits
token: ${{ secrets.CODECOV_TOKEN }}
verbose: true

View File

@@ -159,7 +159,7 @@ jobs:
brew install cmake bison@2.7 tree
- name: Checkout
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # @v2
with:
python-version: "3.12"
- name: Bootstrap clingo

View File

@@ -55,7 +55,7 @@ jobs:
if: github.repository == 'spack/spack'
steps:
- name: Checkout
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # @v2
- uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81
id: docker_meta
@@ -96,7 +96,7 @@ jobs:
uses: docker/setup-qemu-action@68827325e0b33c7199eb31dd4e31fbe9023e06e3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@d70bba72b1f3fd22344832f00baa16ece964efeb
uses: docker/setup-buildx-action@2b51285047da1547ffb1b2203d8be4c0af6b1f20
- name: Log in to GitHub Container Registry
uses: docker/login-action@e92390c5fb421da1463c202d546fed0ec5c39f20

View File

@@ -18,7 +18,6 @@ jobs:
prechecks:
needs: [ changes ]
uses: ./.github/workflows/valid-style.yml
secrets: inherit
with:
with_coverage: ${{ needs.changes.outputs.core }}
all-prechecks:
@@ -36,7 +35,7 @@ jobs:
core: ${{ steps.filter.outputs.core }}
packages: ${{ steps.filter.outputs.packages }}
steps:
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # @v2
if: ${{ github.event_name == 'push' }}
with:
fetch-depth: 0
@@ -71,17 +70,14 @@ jobs:
if: ${{ github.repository == 'spack/spack' && needs.changes.outputs.bootstrap == 'true' }}
needs: [ prechecks, changes ]
uses: ./.github/workflows/bootstrap.yml
secrets: inherit
unit-tests:
if: ${{ github.repository == 'spack/spack' && needs.changes.outputs.core == 'true' }}
needs: [ prechecks, changes ]
uses: ./.github/workflows/unit_tests.yaml
secrets: inherit
windows:
if: ${{ github.repository == 'spack/spack' && needs.changes.outputs.core == 'true' }}
needs: [ prechecks ]
uses: ./.github/workflows/windows_python.yml
secrets: inherit
all:
needs: [ windows, unit-tests, bootstrap ]
runs-on: ubuntu-latest

View File

@@ -17,7 +17,7 @@ jobs:
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
with:
fetch-depth: 0
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c
with:
python-version: 3.9
- name: Install Python packages

View File

@@ -1,4 +1,4 @@
black==24.4.0
black==24.3.0
clingo==5.7.1
flake8==7.0.0
isort==5.13.2

View File

@@ -51,10 +51,10 @@ jobs:
on_develop: false
steps:
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # @v2
with:
fetch-depth: 0
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # @v2
with:
python-version: ${{ matrix.python-version }}
- name: Install System packages
@@ -91,19 +91,17 @@ jobs:
UNIT_TEST_COVERAGE: ${{ matrix.python-version == '3.11' }}
run: |
share/spack/qa/run-unit-tests
- uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed
- uses: codecov/codecov-action@54bcd8715eee62d40e33596ef5e8f0f48dbbccab
with:
flags: unittests,linux,${{ matrix.concretizer }}
token: ${{ secrets.CODECOV_TOKEN }}
verbose: true
# Test shell integration
shell:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # @v2
with:
fetch-depth: 0
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # @v2
with:
python-version: '3.11'
- name: Install System packages
@@ -124,11 +122,9 @@ jobs:
COVERAGE: true
run: |
share/spack/qa/run-shell-tests
- uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed
- uses: codecov/codecov-action@54bcd8715eee62d40e33596ef5e8f0f48dbbccab
with:
flags: shelltests,linux
token: ${{ secrets.CODECOV_TOKEN }}
verbose: true
# Test RHEL8 UBI with platform Python. This job is run
# only on PRs modifying core Spack
@@ -141,7 +137,7 @@ jobs:
dnf install -y \
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
make patch tcl unzip which xz
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # @v2
- name: Setup repo and non-root user
run: |
git --version
@@ -160,10 +156,10 @@ jobs:
clingo-cffi:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # @v2
with:
fetch-depth: 0
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # @v2
with:
python-version: '3.11'
- name: Install System packages
@@ -185,23 +181,20 @@ jobs:
SPACK_TEST_SOLVER: clingo
run: |
share/spack/qa/run-unit-tests
- uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed
- uses: codecov/codecov-action@54bcd8715eee62d40e33596ef5e8f0f48dbbccab # @v2.1.0
with:
flags: unittests,linux,clingo
token: ${{ secrets.CODECOV_TOKEN }}
verbose: true
# Run unit tests on MacOS
macos:
runs-on: ${{ matrix.os }}
runs-on: macos-latest
strategy:
matrix:
os: [macos-latest, macos-14]
python-version: ["3.11"]
steps:
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # @v2
with:
fetch-depth: 0
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # @v2
with:
python-version: ${{ matrix.python-version }}
- name: Install Python packages
@@ -223,8 +216,6 @@ jobs:
$(which spack) solve zlib
common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python' -x)
$(which spack) unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml "${common_args[@]}"
- uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed
- uses: codecov/codecov-action@54bcd8715eee62d40e33596ef5e8f0f48dbbccab
with:
flags: unittests,macos
token: ${{ secrets.CODECOV_TOKEN }}
verbose: true

View File

@@ -19,7 +19,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c
with:
python-version: '3.11'
cache: 'pip'
@@ -38,7 +38,7 @@ jobs:
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
with:
fetch-depth: 0
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c
with:
python-version: '3.11'
cache: 'pip'
@@ -56,7 +56,6 @@ jobs:
share/spack/qa/run-style-tests
audit:
uses: ./.github/workflows/audit.yaml
secrets: inherit
with:
with_coverage: ${{ inputs.with_coverage }}
python_version: '3.11'
@@ -70,7 +69,7 @@ jobs:
dnf install -y \
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
make patch tcl unzip which xz
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # @v2
- name: Setup repo and non-root user
run: |
git --version

View File

@@ -18,7 +18,7 @@ jobs:
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
with:
fetch-depth: 0
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c
with:
python-version: 3.9
- name: Install Python packages
@@ -33,18 +33,16 @@ jobs:
./share/spack/qa/validate_last_exit.ps1
coverage combine -a
coverage xml
- uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed
- uses: codecov/codecov-action@54bcd8715eee62d40e33596ef5e8f0f48dbbccab
with:
flags: unittests,windows
token: ${{ secrets.CODECOV_TOKEN }}
verbose: true
unit-tests-cmd:
runs-on: windows-latest
steps:
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
with:
fetch-depth: 0
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c
with:
python-version: 3.9
- name: Install Python packages
@@ -59,18 +57,16 @@ jobs:
./share/spack/qa/validate_last_exit.ps1
coverage combine -a
coverage xml
- uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed
- uses: codecov/codecov-action@54bcd8715eee62d40e33596ef5e8f0f48dbbccab
with:
flags: unittests,windows
token: ${{ secrets.CODECOV_TOKEN }}
verbose: true
build-abseil:
runs-on: windows-latest
steps:
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
with:
fetch-depth: 0
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c
with:
python-version: 3.9
- name: Install Python packages

View File

@@ -88,7 +88,7 @@ Resources:
[bridged](https://github.com/matrix-org/matrix-appservice-slack#matrix-appservice-slack) to Slack.
* [**Github Discussions**](https://github.com/spack/spack/discussions):
for Q&A and discussions. Note the pinned discussions for announcements.
* **X**: [@spackpm](https://twitter.com/spackpm). Be sure to
* **Twitter**: [@spackpm](https://twitter.com/spackpm). Be sure to
`@mention` us!
* **Mailing list**: [groups.google.com/d/forum/spack](https://groups.google.com/d/forum/spack):
only for announcements. Please use other venues for discussions.

View File

@@ -15,7 +15,7 @@ concretizer:
# as possible, rather than building. If `false`, we'll always give you a fresh
# concretization. If `dependencies`, we'll only reuse dependencies but
# give you a fresh concretization for your root specs.
reuse: true
reuse: dependencies
# Options that tune which targets are considered for concretization. The
# concretization process is very sensitive to the number targets, and the time
# needed to reach a solution increases noticeably with the number of targets
@@ -42,8 +42,3 @@ concretizer:
# "minimal": allows the duplication of 'build-tools' nodes only (e.g. py-setuptools, cmake etc.)
# "full" (experimental): allows separation of the entire build-tool stack (e.g. the entire "cmake" subDAG)
strategy: minimal
# Option to specify compatiblity between operating systems for reuse of compilers and packages
# Specified as a key: [list] where the key is the os that is being targeted, and the list contains the OS's
# it can reuse. Note this is a directional compatibility so mutual compatibility between two OS's
# requires two entries i.e. os_compatible: {sonoma: [monterey], monterey: [sonoma]}
os_compatible: {}

View File

@@ -101,12 +101,6 @@ config:
verify_ssl: true
# This is where custom certs for proxy/firewall are stored.
# It can be a path or environment variable. To match ssl env configuration
# the default is the environment variable SSL_CERT_FILE
ssl_certs: $SSL_CERT_FILE
# Suppress gpg warnings from binary package verification
# Only suppresses warnings, gpg failure will still fail the install
# Potential rationale to set True: users have already explicitly trusted the

View File

@@ -19,6 +19,7 @@ packages:
- apple-clang
- clang
- gcc
- intel
providers:
elf: [libelf]
fuse: [macfuse]

View File

@@ -15,7 +15,7 @@
# -------------------------------------------------------------------------
packages:
all:
compiler: [gcc, clang, oneapi, xl, nag, fj, aocc]
compiler: [gcc, intel, pgi, clang, xl, nag, fj, aocc]
providers:
awk: [gawk]
blas: [openblas, amdblis]
@@ -24,7 +24,6 @@ packages:
elf: [elfutils]
fftw-api: [fftw, amdfftw]
flame: [libflame, amdlibflame]
fortran-rt: [gcc-runtime, intel-oneapi-runtime]
fuse: [libfuse]
gl: [glx, osmesa]
glu: [mesa-glu, openglu]
@@ -35,10 +34,7 @@ packages:
java: [openjdk, jdk, ibm-java]
jpeg: [libjpeg-turbo, libjpeg]
lapack: [openblas, amdlibflame]
libc: [glibc, musl]
libgfortran: [ gcc-runtime ]
libglx: [mesa+glx, mesa18+glx]
libifcore: [ intel-oneapi-runtime ]
libllvm: [llvm]
libosmesa: [mesa+osmesa, mesa18+osmesa]
lua-lang: [lua, lua-luajit-openresty, lua-luajit]

View File

@@ -1119,9 +1119,6 @@ and ``3.4.2``. Similarly, ``@4.2:`` means any version above and including
``4.2``. As a short-hand, ``@3`` is equivalent to the range ``@3:3`` and
includes any version with major version ``3``.
Versions are ordered lexicograpically by its components. For more details
on the order, see :ref:`the packaging guide <version-comparison>`.
Notice that you can distinguish between the specific version ``@=3.2`` and
the range ``@3.2``. This is useful for packages that follow a versioning
scheme that omits the zero patch version number: ``3.2``, ``3.2.1``,

View File

@@ -220,40 +220,6 @@ section of the configuration:
.. _binary_caches_oci:
---------------------------------
Automatic push to a build cache
---------------------------------
Sometimes it is convenient to push packages to a build cache as soon as they are installed. Spack can do this by setting autopush flag when adding a mirror:
.. code-block:: console
$ spack mirror add --autopush <name> <url or path>
Or the autopush flag can be set for an existing mirror:
.. code-block:: console
$ spack mirror set --autopush <name> # enable automatic push for an existing mirror
$ spack mirror set --no-autopush <name> # disable automatic push for an existing mirror
Then after installing a package it is automatically pushed to all mirrors with ``autopush: true``. The command
.. code-block:: console
$ spack install <package>
will have the same effect as
.. code-block:: console
$ spack install <package>
$ spack buildcache push <cache> <package> # for all caches with autopush: true
.. note::
Packages are automatically pushed to a build cache only if they are built from source.
-----------------------------------------
OCI / Docker V2 registries as build cache
-----------------------------------------

View File

@@ -145,22 +145,6 @@ hosts when making ``ssl`` connections. Set to ``false`` to disable, and
tools like ``curl`` will use their ``--insecure`` options. Disabling
this can expose you to attacks. Use at your own risk.
--------------------
``ssl_certs``
--------------------
Path to custom certificats for SSL verification. The value can be a
filesytem path, or an environment variable that expands to a file path.
The default value is set to the environment variable ``SSL_CERT_FILE``
to use the same syntax used by many other applications that automatically
detect custom certificates.
When ``url_fetch_method:curl`` the ``config:ssl_certs`` should resolve to
a single file. Spack will then set the environment variable ``CURL_CA_BUNDLE``
in the subprocess calling ``curl``.
If ``url_fetch_method:urllib`` then files and directories are supported i.e.
``config:ssl_certs:$SSL_CERT_FILE`` or ``config:ssl_certs:$SSL_CERT_DIR``
will work.
--------------------
``checksum``
--------------------

View File

@@ -552,11 +552,11 @@ With either interpreter you can run a single command:
.. code-block:: console
$ spack python -c 'from spack.spec import Spec; Spec("python").concretized()'
...
$ spack python -c 'import distro; distro.linux_distribution()'
('Ubuntu', '18.04', 'Bionic Beaver')
$ spack python -i ipython -c 'from spack.spec import Spec; Spec("python").concretized()'
Out[1]: ...
$ spack python -i ipython -c 'import distro; distro.linux_distribution()'
Out[1]: ('Ubuntu', '18.04', 'Bionic Beaver')
or a file:
@@ -1071,9 +1071,9 @@ Announcing a release
We announce releases in all of the major Spack communication channels.
Publishing the release takes care of GitHub. The remaining channels are
X, Slack, and the mailing list. Here are the steps:
Twitter, Slack, and the mailing list. Here are the steps:
#. Announce the release on X.
#. Announce the release on Twitter.
* Compose the tweet on the ``@spackpm`` account per the
``spack-twitter`` slack channel.

View File

@@ -893,50 +893,26 @@ as an option to the ``version()`` directive. Example situations would be a
"snapshot"-like Version Control System (VCS) tag, a VCS branch such as
``v6-16-00-patches``, or a URL specifying a regularly updated snapshot tarball.
.. _version-comparison:
^^^^^^^^^^^^^^^^^^
Version comparison
^^^^^^^^^^^^^^^^^^
Spack imposes a generic total ordering on the set of versions,
independently from the package they are associated with.
Most Spack versions are numeric, a tuple of integers; for example,
``0.1``, ``6.96`` or ``1.2.3.1``. In this very basic case, version
comparison is lexicographical on the numeric components:
``1.2 < 1.2.1 < 1.2.2 < 1.10``.
``0.1``, ``6.96`` or ``1.2.3.1``. Spack knows how to compare and sort
numeric versions.
Spack can also supports string components such as ``1.1.1a`` and
``1.y.0``. String components are considered less than numeric
components, so ``1.y.0 < 1.0``. This is for consistency with
`RPM <https://bugzilla.redhat.com/show_bug.cgi?id=50977>`_. String
components do not have to be separated by dots or any other delimiter.
So, the contrived version ``1y0`` is identical to ``1.y.0``.
Some Spack versions involve slight extensions of numeric syntax; for
example, ``py-sphinx-rtd-theme@=0.1.10a0``. In this case, numbers are
always considered to be "newer" than letters. This is for consistency
with `RPM <https://bugzilla.redhat.com/show_bug.cgi?id=50977>`_.
Pre-release suffixes also contain string parts, but they are handled
in a special way. For example ``1.2.3alpha1`` is parsed as a pre-release
of the version ``1.2.3``. This allows Spack to order it before the
actual release: ``1.2.3alpha1 < 1.2.3``. Spack supports alpha, beta and
release candidate suffixes: ``1.2alpha1 < 1.2beta1 < 1.2rc1 < 1.2``. Any
suffix not recognized as a pre-release is treated as an ordinary
string component, so ``1.2 < 1.2-mysuffix``.
Spack versions may also be arbitrary non-numeric strings, for example
``develop``, ``master``, ``local``.
Finally, there are a few special string components that are considered
"infinity versions". They include ``develop``, ``main``, ``master``,
``head``, ``trunk``, and ``stable``. For example: ``1.2 < develop``.
These are useful for specifying the most recent development version of
a package (often a moving target like a git branch), without assigning
a specific version number. Infinity versions are not automatically used when determining the latest version of a package unless explicitly required by another package or user.
More formally, the order on versions is defined as follows. A version
string is split into a list of components based on delimiters such as
``.`` and ``-`` and string boundaries. The components are split into
the **release** and a possible **pre-release** (if the last component
is numeric and the second to last is a string ``alpha``, ``beta`` or ``rc``).
The release components are ordered lexicographically, with comparsion
between different types of components as follows:
The order on versions is defined as follows. A version string is split
into a list of components based on delimiters such as ``.``, ``-`` etc.
Lists are then ordered lexicographically, where components are ordered
as follows:
#. The following special strings are considered larger than any other
numeric or non-numeric version component, and satisfy the following
@@ -949,9 +925,6 @@ between different types of components as follows:
#. All other non-numeric components are less than numeric components,
and are ordered alphabetically.
Finally, if the release components are equal, the pre-release components
are used to break the tie, in the obvious way.
The logic behind this sort order is two-fold:
#. Non-numeric versions are usually used for special cases while

View File

@@ -2,12 +2,12 @@ sphinx==7.2.6
sphinxcontrib-programoutput==0.17
sphinx_design==0.5.0
sphinx-rtd-theme==2.0.0
python-levenshtein==0.25.1
python-levenshtein==0.25.0
docutils==0.20.1
pygments==2.17.2
urllib3==2.2.1
pytest==8.1.1
isort==5.13.2
black==24.4.0
black==24.3.0
flake8==7.0.0
mypy==1.9.0

253
lib/spack/env/cc vendored
View File

@@ -47,8 +47,7 @@ SPACK_F77_RPATH_ARG
SPACK_FC_RPATH_ARG
SPACK_LINKER_ARG
SPACK_SHORT_SPEC
SPACK_SYSTEM_DIRS
SPACK_MANAGED_DIRS"
SPACK_SYSTEM_DIRS"
# Optional parameters that aren't required to be set
@@ -174,6 +173,22 @@ preextend() {
unset IFS
}
# system_dir PATH
# test whether a path is a system directory
system_dir() {
IFS=':' # SPACK_SYSTEM_DIRS is colon-separated
path="$1"
for sd in $SPACK_SYSTEM_DIRS; do
if [ "${path}" = "${sd}" ] || [ "${path}" = "${sd}/" ]; then
# success if path starts with a system prefix
unset IFS
return 0
fi
done
unset IFS
return 1 # fail if path starts no system prefix
}
# Fail with a clear message if the input contains any bell characters.
if eval "[ \"\${*#*${lsep}}\" != \"\$*\" ]"; then
die "Compiler command line contains our separator ('${lsep}'). Cannot parse."
@@ -186,18 +201,6 @@ for param in $params; do
fi
done
# eval this because SPACK_MANAGED_DIRS and SPACK_SYSTEM_DIRS are inputs we don't wanna loop over.
# moving the eval inside the function would eval it every call.
eval "\
path_order() {
case \"\$1\" in
$SPACK_MANAGED_DIRS) return 0 ;;
$SPACK_SYSTEM_DIRS) return 2 ;;
/*) return 1 ;;
esac
}
"
# Check if optional parameters are defined
# If we aren't asking for debug flags, don't add them
if [ -z "${SPACK_ADD_DEBUG_FLAGS:-}" ]; then
@@ -245,7 +248,7 @@ case "$command" in
lang_flags=C
debug_flags="-g"
;;
c++|CC|g++|clang++|armclang++|icpc|icpx|pgc++|nvc++|xlc++|xlc++_r|FCC|amdclang++|crayCC)
c++|CC|g++|clang++|armclang++|icpc|icpx|dpcpp|pgc++|nvc++|xlc++|xlc++_r|FCC|amdclang++|crayCC)
command="$SPACK_CXX"
language="C++"
comp="CXX"
@@ -417,12 +420,11 @@ input_command="$*"
parse_Wl() {
while [ $# -ne 0 ]; do
if [ "$wl_expect_rpath" = yes ]; then
path_order "$1"
case $? in
0) append return_spack_store_rpath_dirs_list "$1" ;;
1) append return_rpath_dirs_list "$1" ;;
2) append return_system_rpath_dirs_list "$1" ;;
esac
if system_dir "$1"; then
append return_system_rpath_dirs_list "$1"
else
append return_rpath_dirs_list "$1"
fi
wl_expect_rpath=no
else
case "$1" in
@@ -430,25 +432,21 @@ parse_Wl() {
arg="${1#-rpath=}"
if [ -z "$arg" ]; then
shift; continue
elif system_dir "$arg"; then
append return_system_rpath_dirs_list "$arg"
else
append return_rpath_dirs_list "$arg"
fi
path_order "$arg"
case $? in
0) append return_spack_store_rpath_dirs_list "$arg" ;;
1) append return_rpath_dirs_list "$arg" ;;
2) append return_system_rpath_dirs_list "$arg" ;;
esac
;;
--rpath=*)
arg="${1#--rpath=}"
if [ -z "$arg" ]; then
shift; continue
elif system_dir "$arg"; then
append return_system_rpath_dirs_list "$arg"
else
append return_rpath_dirs_list "$arg"
fi
path_order "$arg"
case $? in
0) append return_spack_store_rpath_dirs_list "$arg" ;;
1) append return_rpath_dirs_list "$arg" ;;
2) append return_system_rpath_dirs_list "$arg" ;;
esac
;;
-rpath|--rpath)
wl_expect_rpath=yes
@@ -475,20 +473,12 @@ categorize_arguments() {
return_other_args_list=""
return_isystem_was_used=""
return_isystem_spack_store_include_dirs_list=""
return_isystem_system_include_dirs_list=""
return_isystem_include_dirs_list=""
return_spack_store_include_dirs_list=""
return_system_include_dirs_list=""
return_include_dirs_list=""
return_spack_store_lib_dirs_list=""
return_system_lib_dirs_list=""
return_lib_dirs_list=""
return_spack_store_rpath_dirs_list=""
return_system_rpath_dirs_list=""
return_rpath_dirs_list=""
@@ -536,7 +526,7 @@ categorize_arguments() {
continue
fi
replaced="$after$stripped"
replaced="$after$stripped"
# it matched, remove it
shift
@@ -556,32 +546,29 @@ categorize_arguments() {
arg="${1#-isystem}"
return_isystem_was_used=true
if [ -z "$arg" ]; then shift; arg="$1"; fi
path_order "$arg"
case $? in
0) append return_isystem_spack_store_include_dirs_list "$arg" ;;
1) append return_isystem_include_dirs_list "$arg" ;;
2) append return_isystem_system_include_dirs_list "$arg" ;;
esac
if system_dir "$arg"; then
append return_isystem_system_include_dirs_list "$arg"
else
append return_isystem_include_dirs_list "$arg"
fi
;;
-I*)
arg="${1#-I}"
if [ -z "$arg" ]; then shift; arg="$1"; fi
path_order "$arg"
case $? in
0) append return_spack_store_include_dirs_list "$arg" ;;
1) append return_include_dirs_list "$arg" ;;
2) append return_system_include_dirs_list "$arg" ;;
esac
if system_dir "$arg"; then
append return_system_include_dirs_list "$arg"
else
append return_include_dirs_list "$arg"
fi
;;
-L*)
arg="${1#-L}"
if [ -z "$arg" ]; then shift; arg="$1"; fi
path_order "$arg"
case $? in
0) append return_spack_store_lib_dirs_list "$arg" ;;
1) append return_lib_dirs_list "$arg" ;;
2) append return_system_lib_dirs_list "$arg" ;;
esac
if system_dir "$arg"; then
append return_system_lib_dirs_list "$arg"
else
append return_lib_dirs_list "$arg"
fi
;;
-l*)
# -loopopt=0 is generated erroneously in autoconf <= 2.69,
@@ -614,32 +601,29 @@ categorize_arguments() {
break
elif [ "$xlinker_expect_rpath" = yes ]; then
# Register the path of -Xlinker -rpath <other args> -Xlinker <path>
path_order "$1"
case $? in
0) append return_spack_store_rpath_dirs_list "$1" ;;
1) append return_rpath_dirs_list "$1" ;;
2) append return_system_rpath_dirs_list "$1" ;;
esac
if system_dir "$1"; then
append return_system_rpath_dirs_list "$1"
else
append return_rpath_dirs_list "$1"
fi
xlinker_expect_rpath=no
else
case "$1" in
-rpath=*)
arg="${1#-rpath=}"
path_order "$arg"
case $? in
0) append return_spack_store_rpath_dirs_list "$arg" ;;
1) append return_rpath_dirs_list "$arg" ;;
2) append return_system_rpath_dirs_list "$arg" ;;
esac
if system_dir "$arg"; then
append return_system_rpath_dirs_list "$arg"
else
append return_rpath_dirs_list "$arg"
fi
;;
--rpath=*)
arg="${1#--rpath=}"
path_order "$arg"
case $? in
0) append return_spack_store_rpath_dirs_list "$arg" ;;
1) append return_rpath_dirs_list "$arg" ;;
2) append return_system_rpath_dirs_list "$arg" ;;
esac
if system_dir "$arg"; then
append return_system_rpath_dirs_list "$arg"
else
append return_rpath_dirs_list "$arg"
fi
;;
-rpath|--rpath)
xlinker_expect_rpath=yes
@@ -677,25 +661,16 @@ categorize_arguments() {
}
categorize_arguments "$@"
spack_store_include_dirs_list="$return_spack_store_include_dirs_list"
system_include_dirs_list="$return_system_include_dirs_list"
include_dirs_list="$return_include_dirs_list"
spack_store_lib_dirs_list="$return_spack_store_lib_dirs_list"
system_lib_dirs_list="$return_system_lib_dirs_list"
lib_dirs_list="$return_lib_dirs_list"
spack_store_rpath_dirs_list="$return_spack_store_rpath_dirs_list"
system_rpath_dirs_list="$return_system_rpath_dirs_list"
rpath_dirs_list="$return_rpath_dirs_list"
isystem_spack_store_include_dirs_list="$return_isystem_spack_store_include_dirs_list"
isystem_system_include_dirs_list="$return_isystem_system_include_dirs_list"
isystem_include_dirs_list="$return_isystem_include_dirs_list"
isystem_was_used="$return_isystem_was_used"
other_args_list="$return_other_args_list"
include_dirs_list="$return_include_dirs_list"
lib_dirs_list="$return_lib_dirs_list"
rpath_dirs_list="$return_rpath_dirs_list"
system_include_dirs_list="$return_system_include_dirs_list"
system_lib_dirs_list="$return_system_lib_dirs_list"
system_rpath_dirs_list="$return_system_rpath_dirs_list"
isystem_was_used="$return_isystem_was_used"
isystem_system_include_dirs_list="$return_isystem_system_include_dirs_list"
isystem_include_dirs_list="$return_isystem_include_dirs_list"
other_args_list="$return_other_args_list"
#
# Add flags from Spack's cppflags, cflags, cxxflags, fcflags, fflags, and
@@ -763,25 +738,16 @@ esac
IFS="$lsep"
categorize_arguments $spack_flags_list
unset IFS
spack_flags_isystem_spack_store_include_dirs_list="$return_isystem_spack_store_include_dirs_list"
spack_flags_isystem_system_include_dirs_list="$return_isystem_system_include_dirs_list"
spack_flags_isystem_include_dirs_list="$return_isystem_include_dirs_list"
spack_flags_spack_store_include_dirs_list="$return_spack_store_include_dirs_list"
spack_flags_system_include_dirs_list="$return_system_include_dirs_list"
spack_flags_include_dirs_list="$return_include_dirs_list"
spack_flags_spack_store_lib_dirs_list="$return_spack_store_lib_dirs_list"
spack_flags_system_lib_dirs_list="$return_system_lib_dirs_list"
spack_flags_lib_dirs_list="$return_lib_dirs_list"
spack_flags_spack_store_rpath_dirs_list="$return_spack_store_rpath_dirs_list"
spack_flags_system_rpath_dirs_list="$return_system_rpath_dirs_list"
spack_flags_rpath_dirs_list="$return_rpath_dirs_list"
spack_flags_isystem_was_used="$return_isystem_was_used"
spack_flags_other_args_list="$return_other_args_list"
spack_flags_include_dirs_list="$return_include_dirs_list"
spack_flags_lib_dirs_list="$return_lib_dirs_list"
spack_flags_rpath_dirs_list="$return_rpath_dirs_list"
spack_flags_system_include_dirs_list="$return_system_include_dirs_list"
spack_flags_system_lib_dirs_list="$return_system_lib_dirs_list"
spack_flags_system_rpath_dirs_list="$return_system_rpath_dirs_list"
spack_flags_isystem_was_used="$return_isystem_was_used"
spack_flags_isystem_system_include_dirs_list="$return_isystem_system_include_dirs_list"
spack_flags_isystem_include_dirs_list="$return_isystem_include_dirs_list"
spack_flags_other_args_list="$return_other_args_list"
# On macOS insert headerpad_max_install_names linker flag
@@ -801,13 +767,11 @@ if [ "$mode" = ccld ] || [ "$mode" = ld ]; then
# Append RPATH directories. Note that in the case of the
# top-level package these directories may not exist yet. For dependencies
# it is assumed that paths have already been confirmed.
extend spack_store_rpath_dirs_list SPACK_STORE_RPATH_DIRS
extend rpath_dirs_list SPACK_RPATH_DIRS
fi
fi
if [ "$mode" = ccld ] || [ "$mode" = ld ]; then
extend spack_store_lib_dirs_list SPACK_STORE_LINK_DIRS
extend lib_dirs_list SPACK_LINK_DIRS
fi
@@ -834,50 +798,38 @@ case "$mode" in
;;
esac
case "$mode" in
cpp|cc|as|ccld)
if [ "$spack_flags_isystem_was_used" = "true" ] || [ "$isystem_was_used" = "true" ]; then
extend isystem_spack_store_include_dirs_list SPACK_STORE_INCLUDE_DIRS
extend isystem_include_dirs_list SPACK_INCLUDE_DIRS
else
extend spack_store_include_dirs_list SPACK_STORE_INCLUDE_DIRS
extend include_dirs_list SPACK_INCLUDE_DIRS
fi
;;
esac
#
# Finally, reassemble the command line.
#
args_list="$flags_list"
# Include search paths partitioned by (in store, non-sytem, system)
# Insert include directories just prior to any system include directories
# NOTE: adding ${lsep} to the prefix here turns every added element into two
extend args_list spack_flags_spack_store_include_dirs_list -I
extend args_list spack_store_include_dirs_list -I
extend args_list spack_flags_include_dirs_list -I
extend args_list include_dirs_list -I
extend args_list spack_flags_isystem_spack_store_include_dirs_list "-isystem${lsep}"
extend args_list isystem_spack_store_include_dirs_list "-isystem${lsep}"
extend args_list spack_flags_include_dirs_list "-I"
extend args_list include_dirs_list "-I"
extend args_list spack_flags_isystem_include_dirs_list "-isystem${lsep}"
extend args_list isystem_include_dirs_list "-isystem${lsep}"
case "$mode" in
cpp|cc|as|ccld)
if [ "$spack_flags_isystem_was_used" = "true" ]; then
extend args_list SPACK_INCLUDE_DIRS "-isystem${lsep}"
elif [ "$isystem_was_used" = "true" ]; then
extend args_list SPACK_INCLUDE_DIRS "-isystem${lsep}"
else
extend args_list SPACK_INCLUDE_DIRS "-I"
fi
;;
esac
extend args_list spack_flags_system_include_dirs_list -I
extend args_list system_include_dirs_list -I
extend args_list spack_flags_isystem_system_include_dirs_list "-isystem${lsep}"
extend args_list isystem_system_include_dirs_list "-isystem${lsep}"
# Library search paths partitioned by (in store, non-sytem, system)
extend args_list spack_flags_spack_store_lib_dirs_list "-L"
extend args_list spack_store_lib_dirs_list "-L"
# Library search paths
extend args_list spack_flags_lib_dirs_list "-L"
extend args_list lib_dirs_list "-L"
extend args_list spack_flags_system_lib_dirs_list "-L"
extend args_list system_lib_dirs_list "-L"
@@ -887,12 +839,8 @@ case "$mode" in
if [ -n "$dtags_to_add" ] ; then
append args_list "$linker_arg$dtags_to_add"
fi
extend args_list spack_flags_spack_store_rpath_dirs_list "$rpath"
extend args_list spack_store_rpath_dirs_list "$rpath"
extend args_list spack_flags_rpath_dirs_list "$rpath"
extend args_list rpath_dirs_list "$rpath"
extend args_list spack_flags_system_rpath_dirs_list "$rpath"
extend args_list system_rpath_dirs_list "$rpath"
;;
@@ -900,12 +848,8 @@ case "$mode" in
if [ -n "$dtags_to_add" ] ; then
append args_list "$dtags_to_add"
fi
extend args_list spack_flags_spack_store_rpath_dirs_list "-rpath${lsep}"
extend args_list spack_store_rpath_dirs_list "-rpath${lsep}"
extend args_list spack_flags_rpath_dirs_list "-rpath${lsep}"
extend args_list rpath_dirs_list "-rpath${lsep}"
extend args_list spack_flags_system_rpath_dirs_list "-rpath${lsep}"
extend args_list system_rpath_dirs_list "-rpath${lsep}"
;;
@@ -969,3 +913,4 @@ fi
# Execute the full command, preserving spaces with IFS set
# to the alarm bell separator.
IFS="$lsep"; exec $full_command_list

View File

@@ -497,7 +497,7 @@ def copy_attributes(self, t, memo=None):
Tag.attrib, merge_attrib]:
if hasattr(self, a):
if memo is not None:
setattr(t, a, copy.deepcopy(getattr(self, a), memo))
setattr(t, a, copy.deepcopy(getattr(self, a, memo)))
else:
setattr(t, a, getattr(self, a))
# fmt: on

View File

@@ -1,13 +0,0 @@
diff --git a/lib/spack/external/_vendoring/ruamel/yaml/comments.py b/lib/spack/external/_vendoring/ruamel/yaml/comments.py
index 1badeda585..892c868af3 100644
--- a/lib/spack/external/_vendoring/ruamel/yaml/comments.py
+++ b/lib/spack/external/_vendoring/ruamel/yaml/comments.py
@@ -497,7 +497,7 @@ def copy_attributes(self, t, memo=None):
Tag.attrib, merge_attrib]:
if hasattr(self, a):
if memo is not None:
- setattr(t, a, copy.deepcopy(getattr(self, a, memo)))
+ setattr(t, a, copy.deepcopy(getattr(self, a), memo))
else:
setattr(t, a, getattr(self, a))
# fmt: on

View File

@@ -12,7 +12,7 @@
# Archive extensions allowed in Spack
PREFIX_EXTENSIONS = ("tar", "TAR")
EXTENSIONS = ("gz", "bz2", "xz", "Z")
NO_TAR_EXTENSIONS = ("zip", "tgz", "tbz2", "tbz", "txz", "whl")
NO_TAR_EXTENSIONS = ("zip", "tgz", "tbz2", "tbz", "txz")
# Add PREFIX_EXTENSIONS and EXTENSIONS last so that .tar.gz is matched *before* .tar or .gz
ALLOWED_ARCHIVE_TYPES = (
@@ -357,8 +357,10 @@ def strip_version_suffixes(path_or_url: str) -> str:
r"i[36]86",
r"ppc64(le)?",
r"armv?(7l|6l|64)?",
# PyPI wheels
r"-(?:py|cp)[23].*",
# PyPI
r"[._-]py[23].*\.whl",
r"[._-]cp[23].*\.whl",
r"[._-]win.*\.exe",
]
for regex in suffix_regexes:
@@ -401,7 +403,7 @@ def expand_contracted_extension_in_path(
def compression_ext_from_compressed_archive(extension: str) -> Optional[str]:
"""Returns compression extension for a compressed archive"""
extension = expand_contracted_extension(extension)
for ext in EXTENSIONS:
for ext in [*EXTENSIONS]:
if ext in extension:
return ext
return None

View File

@@ -198,32 +198,15 @@ def getuid():
return os.getuid()
def _win_rename(src, dst):
# os.replace will still fail if on Windows (but not POSIX) if the dst
# is a symlink to a directory (all other cases have parity Windows <-> Posix)
if os.path.islink(dst) and os.path.isdir(os.path.realpath(dst)):
if os.path.samefile(src, dst):
# src and dst are the same
# do nothing and exit early
return
# If dst exists and is a symlink to a directory
# we need to remove dst and then perform rename/replace
# this is safe to do as there's no chance src == dst now
os.remove(dst)
os.replace(src, dst)
@system_path_filter
def rename(src, dst):
# On Windows, os.rename will fail if the destination file already exists
# os.replace is the same as os.rename on POSIX and is MoveFileExW w/
# the MOVEFILE_REPLACE_EXISTING flag on Windows
# Windows invocation is abstracted behind additonal logic handling
# remaining cases of divergent behavior accross platforms
if sys.platform == "win32":
_win_rename(src, dst)
else:
os.replace(src, dst)
# Windows path existence checks will sometimes fail on junctions/links/symlinks
# so check for that case
if os.path.exists(dst) or islink(dst):
os.remove(dst)
os.rename(src, dst)
@system_path_filter
@@ -1234,12 +1217,10 @@ def windows_sfn(path: os.PathLike):
import ctypes
k32 = ctypes.WinDLL("kernel32", use_last_error=True)
# Method with null values returns size of short path name
sz = k32.GetShortPathNameW(path, None, 0)
# stub Windows types TCHAR[LENGTH]
TCHAR_arr = ctypes.c_wchar * sz
TCHAR_arr = ctypes.c_wchar * len(path)
ret_str = TCHAR_arr()
k32.GetShortPathNameW(path, ctypes.byref(ret_str), sz)
k32.GetShortPathNameW(path, ret_str, len(path))
return ret_str.value

View File

@@ -12,7 +12,7 @@
import traceback
from datetime import datetime
from sys import platform as _platform
from typing import Any, NoReturn
from typing import NoReturn
if _platform != "win32":
import fcntl
@@ -158,22 +158,21 @@ def get_timestamp(force=False):
return ""
def msg(message: Any, *args: Any, newline: bool = True) -> None:
def msg(message, *args, **kwargs):
if not msg_enabled():
return
if isinstance(message, Exception):
message = f"{message.__class__.__name__}: {message}"
else:
message = str(message)
message = "%s: %s" % (message.__class__.__name__, str(message))
newline = kwargs.get("newline", True)
st_text = ""
if _stacktrace:
st_text = process_stacktrace(2)
nl = "\n" if newline else ""
cwrite(f"@*b{{{st_text}==>}} {get_timestamp()}{cescape(_output_filter(message))}{nl}")
if newline:
cprint("@*b{%s==>} %s%s" % (st_text, get_timestamp(), cescape(_output_filter(message))))
else:
cwrite("@*b{%s==>} %s%s" % (st_text, get_timestamp(), cescape(_output_filter(message))))
for arg in args:
print(indent + _output_filter(str(arg)))

View File

@@ -237,6 +237,7 @@ def transpose():
def colified(
elts: List[Any],
cols: int = 0,
output: Optional[IO] = None,
indent: int = 0,
padding: int = 2,
tty: Optional[bool] = None,

View File

@@ -62,7 +62,6 @@
import re
import sys
from contextlib import contextmanager
from typing import Optional
class ColorParseError(Exception):
@@ -96,7 +95,7 @@ def __init__(self, message):
} # white
# Regex to be used for color formatting
COLOR_RE = re.compile(r"@(?:(@)|(\.)|([*_])?([a-zA-Z])?(?:{((?:[^}]|}})*)})?)")
color_re = r"@(?:@|\.|([*_])?([a-zA-Z])?(?:{((?:[^}]|}})*)})?)"
# Mapping from color arguments to values for tty.set_color
color_when_values = {"always": True, "auto": None, "never": False}
@@ -204,64 +203,77 @@ def color_when(value):
set_color_when(old_value)
def _escape(s: str, color: bool, enclose: bool, zsh: bool) -> str:
"""Returns a TTY escape sequence for a color"""
if color:
if zsh:
result = rf"\e[0;{s}m"
class match_to_ansi:
def __init__(self, color=True, enclose=False, zsh=False):
self.color = _color_when_value(color)
self.enclose = enclose
self.zsh = zsh
def escape(self, s):
"""Returns a TTY escape sequence for a color"""
if self.color:
if self.zsh:
result = rf"\e[0;{s}m"
else:
result = f"\033[{s}m"
if self.enclose:
result = rf"\[{result}\]"
return result
else:
result = f"\033[{s}m"
return ""
if enclose:
result = rf"\[{result}\]"
def __call__(self, match):
"""Convert a match object generated by ``color_re`` into an ansi
color code. This can be used as a handler in ``re.sub``.
"""
style, color, text = match.groups()
m = match.group(0)
return result
else:
return ""
if m == "@@":
return "@"
elif m == "@.":
return self.escape(0)
elif m == "@":
raise ColorParseError("Incomplete color format: '%s' in %s" % (m, match.string))
string = styles[style]
if color:
if color not in colors:
raise ColorParseError(
"Invalid color specifier: '%s' in '%s'" % (color, match.string)
)
string += ";" + str(colors[color])
colored_text = ""
if text:
colored_text = text + self.escape(0)
return self.escape(string) + colored_text
def colorize(
string: str, color: Optional[bool] = None, enclose: bool = False, zsh: bool = False
) -> str:
def colorize(string, **kwargs):
"""Replace all color expressions in a string with ANSI control codes.
Args:
string: The string to replace
string (str): The string to replace
Returns:
The filtered string
str: The filtered string
Keyword Arguments:
color: If False, output will be plain text without control codes, for output to
non-console devices (default: automatically choose color or not)
enclose: If True, enclose ansi color sequences with
color (bool): If False, output will be plain text without control
codes, for output to non-console devices.
enclose (bool): If True, enclose ansi color sequences with
square brackets to prevent misestimation of terminal width.
zsh: If True, use zsh ansi codes instead of bash ones (for variables like PS1)
zsh (bool): If True, use zsh ansi codes instead of bash ones (for variables like PS1)
"""
color = color if color is not None else get_color_when()
def match_to_ansi(match):
"""Convert a match object generated by ``COLOR_RE`` into an ansi
color code. This can be used as a handler in ``re.sub``.
"""
escaped_at, dot, style, color_code, text = match.groups()
if escaped_at:
return "@"
elif dot:
return _escape(0, color, enclose, zsh)
elif not (style or color_code):
raise ColorParseError(
f"Incomplete color format: '{match.group(0)}' in '{match.string}'"
)
ansi_code = _escape(f"{styles[style]};{colors.get(color_code, '')}", color, enclose, zsh)
if text:
return f"{ansi_code}{text}{_escape(0, color, enclose, zsh)}"
else:
return ansi_code
return COLOR_RE.sub(match_to_ansi, string).replace("}}", "}")
color = _color_when_value(kwargs.get("color", get_color_when()))
zsh = kwargs.get("zsh", False)
string = re.sub(color_re, match_to_ansi(color, kwargs.get("enclose")), string, zsh)
string = string.replace("}}", "}")
return string
def clen(string):
@@ -293,7 +305,7 @@ def cprint(string, stream=None, color=None):
cwrite(string + "\n", stream, color)
def cescape(string: str) -> str:
def cescape(string):
"""Escapes special characters needed for color codes.
Replaces the following symbols with their equivalent literal forms:
@@ -309,7 +321,10 @@ def cescape(string: str) -> str:
Returns:
(str): the string with color codes escaped
"""
return string.replace("@", "@@").replace("}", "}}")
string = str(string)
string = string.replace("@", "@@")
string = string.replace("}", "}}")
return string
class ColorStream:

View File

@@ -17,6 +17,7 @@
import tarfile
import tempfile
import time
import traceback
import urllib.error
import urllib.parse
import urllib.request
@@ -110,6 +111,10 @@ def __init__(self, errors):
super().__init__(self.message)
class ListMirrorSpecsError(spack.error.SpackError):
"""Raised when unable to retrieve list of specs from the mirror"""
class BinaryCacheIndex:
"""
The BinaryCacheIndex tracks what specs are available on (usually remote)
@@ -536,6 +541,83 @@ def binary_index_location():
BINARY_INDEX: BinaryCacheIndex = llnl.util.lang.Singleton(BinaryCacheIndex) # type: ignore
class NoOverwriteException(spack.error.SpackError):
"""Raised when a file would be overwritten"""
def __init__(self, file_path):
super().__init__(f"Refusing to overwrite the following file: {file_path}")
class NoGpgException(spack.error.SpackError):
"""
Raised when gpg2 is not in PATH
"""
def __init__(self, msg):
super().__init__(msg)
class NoKeyException(spack.error.SpackError):
"""
Raised when gpg has no default key added.
"""
def __init__(self, msg):
super().__init__(msg)
class PickKeyException(spack.error.SpackError):
"""
Raised when multiple keys can be used to sign.
"""
def __init__(self, keys):
err_msg = "Multiple keys available for signing\n%s\n" % keys
err_msg += "Use spack buildcache create -k <key hash> to pick a key."
super().__init__(err_msg)
class NoVerifyException(spack.error.SpackError):
"""
Raised if file fails signature verification.
"""
pass
class NoChecksumException(spack.error.SpackError):
"""
Raised if file fails checksum verification.
"""
def __init__(self, path, size, contents, algorithm, expected, computed):
super().__init__(
f"{algorithm} checksum failed for {path}",
f"Expected {expected} but got {computed}. "
f"File size = {size} bytes. Contents = {contents!r}",
)
class NewLayoutException(spack.error.SpackError):
"""
Raised if directory layout is different from buildcache.
"""
def __init__(self, msg):
super().__init__(msg)
class InvalidMetadataFile(spack.error.SpackError):
pass
class UnsignedPackageException(spack.error.SpackError):
"""
Raised if installation of unsigned package is attempted without
the use of ``--no-check-signature``.
"""
def compute_hash(data):
if isinstance(data, str):
data = data.encode("utf-8")
@@ -910,10 +992,15 @@ def url_read_method(url):
if entry.endswith("spec.json") or entry.endswith("spec.json.sig")
]
read_fn = url_read_method
except KeyError as inst:
msg = "No packages at {0}: {1}".format(cache_prefix, inst)
tty.warn(msg)
except Exception as err:
# If we got some kind of S3 (access denied or other connection error), the first non
# boto-specific class in the exception is Exception. Just print a warning and return
tty.warn(f"Encountered problem listing packages at {cache_prefix}: {err}")
# If we got some kind of S3 (access denied or other connection
# error), the first non boto-specific class in the exception
# hierarchy is Exception. Just print a warning and return
msg = "Encountered problem listing packages at {0}: {1}".format(cache_prefix, err)
tty.warn(msg)
return file_list, read_fn
@@ -960,10 +1047,11 @@ def generate_package_index(cache_prefix, concurrency=32):
"""
try:
file_list, read_fn = _spec_files_from_cache(cache_prefix)
except ListMirrorSpecsError as e:
raise GenerateIndexError(f"Unable to generate package index: {e}") from e
except ListMirrorSpecsError as err:
tty.error("Unable to generate package index, {0}".format(err))
return
tty.debug(f"Retrieving spec descriptor files from {cache_prefix} to build index")
tty.debug("Retrieving spec descriptor files from {0} to build index".format(cache_prefix))
tmpdir = tempfile.mkdtemp()
@@ -973,22 +1061,27 @@ def generate_package_index(cache_prefix, concurrency=32):
try:
_read_specs_and_push_index(file_list, read_fn, cache_prefix, db, db_root_dir, concurrency)
except Exception as e:
raise GenerateIndexError(
f"Encountered problem pushing package index to {cache_prefix}: {e}"
) from e
except Exception as err:
msg = "Encountered problem pushing package index to {0}: {1}".format(cache_prefix, err)
tty.warn(msg)
tty.debug("\n" + traceback.format_exc())
finally:
shutil.rmtree(tmpdir, ignore_errors=True)
shutil.rmtree(tmpdir)
def generate_key_index(key_prefix, tmpdir=None):
"""Create the key index page.
Creates (or replaces) the "index.json" page at the location given in key_prefix. This page
contains an entry for each key (.pub) under key_prefix.
Creates (or replaces) the "index.json" page at the location given in
key_prefix. This page contains an entry for each key (.pub) under
key_prefix.
"""
tty.debug(f"Retrieving key.pub files from {url_util.format(key_prefix)} to build key index")
tty.debug(
" ".join(
("Retrieving key.pub files from", url_util.format(key_prefix), "to build key index")
)
)
try:
fingerprints = (
@@ -996,8 +1089,17 @@ def generate_key_index(key_prefix, tmpdir=None):
for entry in web_util.list_url(key_prefix, recursive=False)
if entry.endswith(".pub")
)
except Exception as e:
raise CannotListKeys(f"Encountered problem listing keys at {key_prefix}: {e}") from e
except KeyError as inst:
msg = "No keys at {0}: {1}".format(key_prefix, inst)
tty.warn(msg)
return
except Exception as err:
# If we got some kind of S3 (access denied or other connection
# error), the first non boto-specific class in the exception
# hierarchy is Exception. Just print a warning and return
msg = "Encountered problem listing keys at {0}: {1}".format(key_prefix, err)
tty.warn(msg)
return
remove_tmpdir = False
@@ -1022,13 +1124,12 @@ def generate_key_index(key_prefix, tmpdir=None):
keep_original=False,
extra_args={"ContentType": "application/json"},
)
except Exception as e:
raise GenerateIndexError(
f"Encountered problem pushing key index to {key_prefix}: {e}"
) from e
except Exception as err:
msg = "Encountered problem pushing key index to {0}: {1}".format(key_prefix, err)
tty.warn(msg)
finally:
if remove_tmpdir:
shutil.rmtree(tmpdir, ignore_errors=True)
shutil.rmtree(tmpdir)
def tarfile_of_spec_prefix(tar: tarfile.TarFile, prefix: str) -> None:
@@ -1099,8 +1200,7 @@ def push_or_raise(spec: Spec, out_url: str, options: PushOptions):
used at the mirror (following <tarball_directory_name>).
This method raises :py:class:`NoOverwriteException` when ``force=False`` and the tarball or
spec.json file already exist in the buildcache. It raises :py:class:`PushToBuildCacheError`
when the tarball or spec.json file cannot be pushed to the buildcache.
spec.json file already exist in the buildcache.
"""
if not spec.concrete:
raise ValueError("spec must be concrete to build tarball")
@@ -1178,18 +1278,13 @@ def _build_tarball_in_stage_dir(spec: Spec, out_url: str, stage_dir: str, option
key = select_signing_key(options.key)
sign_specfile(key, options.force, specfile_path)
try:
# push tarball and signed spec json to remote mirror
web_util.push_to_url(spackfile_path, remote_spackfile_path, keep_original=False)
web_util.push_to_url(
signed_specfile_path if not options.unsigned else specfile_path,
remote_signed_specfile_path if not options.unsigned else remote_specfile_path,
keep_original=False,
)
except Exception as e:
raise PushToBuildCacheError(
f"Encountered problem pushing binary {remote_spackfile_path}: {e}"
) from e
# push tarball and signed spec json to remote mirror
web_util.push_to_url(spackfile_path, remote_spackfile_path, keep_original=False)
web_util.push_to_url(
signed_specfile_path if not options.unsigned else specfile_path,
remote_signed_specfile_path if not options.unsigned else remote_specfile_path,
keep_original=False,
)
# push the key to the build cache's _pgp directory so it can be
# imported
@@ -1201,6 +1296,8 @@ def _build_tarball_in_stage_dir(spec: Spec, out_url: str, stage_dir: str, option
if options.regenerate_index:
generate_package_index(url_util.join(out_url, os.path.relpath(cache_prefix, stage_dir)))
return None
class NotInstalledError(spack.error.SpackError):
"""Raised when a spec is not installed but picked to be packaged."""
@@ -1255,6 +1352,28 @@ def specs_to_be_packaged(
return [s for s in itertools.chain(roots, deps) if not s.external]
def push(spec: Spec, mirror_url: str, options: PushOptions):
"""Create and push binary package for a single spec to the specified
mirror url.
Args:
spec: Spec to package and push
mirror_url: Desired destination url for binary package
options:
Returns:
True if package was pushed, False otherwise.
"""
try:
push_or_raise(spec, mirror_url, options)
except NoOverwriteException as e:
warnings.warn(str(e))
return False
return True
def try_verify(specfile_path):
"""Utility function to attempt to verify a local file. Assumes the
file is a clearsigned signature file.
@@ -2587,96 +2706,3 @@ def conditional_fetch(self) -> FetchIndexResult:
raise FetchIndexError(f"Remote index {url_manifest} is invalid")
return FetchIndexResult(etag=None, hash=index_digest.digest, data=result, fresh=False)
class NoOverwriteException(spack.error.SpackError):
"""Raised when a file would be overwritten"""
def __init__(self, file_path):
super().__init__(f"Refusing to overwrite the following file: {file_path}")
class NoGpgException(spack.error.SpackError):
"""
Raised when gpg2 is not in PATH
"""
def __init__(self, msg):
super().__init__(msg)
class NoKeyException(spack.error.SpackError):
"""
Raised when gpg has no default key added.
"""
def __init__(self, msg):
super().__init__(msg)
class PickKeyException(spack.error.SpackError):
"""
Raised when multiple keys can be used to sign.
"""
def __init__(self, keys):
err_msg = "Multiple keys available for signing\n%s\n" % keys
err_msg += "Use spack buildcache create -k <key hash> to pick a key."
super().__init__(err_msg)
class NoVerifyException(spack.error.SpackError):
"""
Raised if file fails signature verification.
"""
pass
class NoChecksumException(spack.error.SpackError):
"""
Raised if file fails checksum verification.
"""
def __init__(self, path, size, contents, algorithm, expected, computed):
super().__init__(
f"{algorithm} checksum failed for {path}",
f"Expected {expected} but got {computed}. "
f"File size = {size} bytes. Contents = {contents!r}",
)
class NewLayoutException(spack.error.SpackError):
"""
Raised if directory layout is different from buildcache.
"""
def __init__(self, msg):
super().__init__(msg)
class InvalidMetadataFile(spack.error.SpackError):
pass
class UnsignedPackageException(spack.error.SpackError):
"""
Raised if installation of unsigned package is attempted without
the use of ``--no-check-signature``.
"""
class ListMirrorSpecsError(spack.error.SpackError):
"""Raised when unable to retrieve list of specs from the mirror"""
class GenerateIndexError(spack.error.SpackError):
"""Raised when unable to generate key or package index for mirror"""
class CannotListKeys(GenerateIndexError):
"""Raised when unable to list keys when generating key index"""
class PushToBuildCacheError(spack.error.SpackError):
"""Raised when unable to push objects to binary mirror"""

View File

@@ -173,14 +173,35 @@ def _read_metadata(self, package_name: str) -> Any:
return data
def _install_by_hash(
self, pkg_hash: str, pkg_sha256: str, bincache_platform: spack.platforms.Platform
self,
pkg_hash: str,
pkg_sha256: str,
index: List[spack.spec.Spec],
bincache_platform: spack.platforms.Platform,
) -> None:
index_spec = next(x for x in index if x.dag_hash() == pkg_hash)
# Reconstruct the compiler that we need to use for bootstrapping
compiler_entry = {
"modules": [],
"operating_system": str(index_spec.os),
"paths": {
"cc": "/dev/null",
"cxx": "/dev/null",
"f77": "/dev/null",
"fc": "/dev/null",
},
"spec": str(index_spec.compiler),
"target": str(index_spec.target.family),
}
with spack.platforms.use_platform(bincache_platform):
query = spack.binary_distribution.BinaryCacheQuery(all_architectures=True)
for match in spack.store.find([f"/{pkg_hash}"], multiple=False, query_fn=query):
spack.binary_distribution.install_root_node(
match, unsigned=True, force=True, sha256=pkg_sha256
)
with spack.config.override("compilers", [{"compiler": compiler_entry}]):
spec_str = "/" + pkg_hash
query = spack.binary_distribution.BinaryCacheQuery(all_architectures=True)
matches = spack.store.find([spec_str], multiple=False, query_fn=query)
for match in matches:
spack.binary_distribution.install_root_node(
match, unsigned=True, force=True, sha256=pkg_sha256
)
def _install_and_test(
self,
@@ -211,7 +232,7 @@ def _install_and_test(
continue
for _, pkg_hash, pkg_sha256 in item["binaries"]:
self._install_by_hash(pkg_hash, pkg_sha256, bincache_platform)
self._install_by_hash(pkg_hash, pkg_sha256, index, bincache_platform)
info: ConfigDictionary = {}
if test_fn(query_spec=abstract_spec, query_info=info):

View File

@@ -43,7 +43,7 @@
from collections import defaultdict
from enum import Flag, auto
from itertools import chain
from typing import List, Set, Tuple
from typing import List, Tuple
import llnl.util.tty as tty
from llnl.string import plural
@@ -57,10 +57,8 @@
import spack.build_systems.meson
import spack.build_systems.python
import spack.builder
import spack.compilers
import spack.config
import spack.deptypes as dt
import spack.error
import spack.main
import spack.package_base
import spack.paths
@@ -68,7 +66,6 @@
import spack.repo
import spack.schema.environment
import spack.spec
import spack.stage
import spack.store
import spack.subprocess_context
import spack.user_environment
@@ -81,7 +78,7 @@
from spack.installer import InstallError
from spack.util.cpus import determine_number_of_jobs
from spack.util.environment import (
SYSTEM_DIR_CASE_ENTRY,
SYSTEM_DIRS,
EnvironmentModifications,
env_flag,
filter_system_paths,
@@ -104,13 +101,9 @@
# Spack's compiler wrappers.
#
SPACK_ENV_PATH = "SPACK_ENV_PATH"
SPACK_MANAGED_DIRS = "SPACK_MANAGED_DIRS"
SPACK_INCLUDE_DIRS = "SPACK_INCLUDE_DIRS"
SPACK_LINK_DIRS = "SPACK_LINK_DIRS"
SPACK_RPATH_DIRS = "SPACK_RPATH_DIRS"
SPACK_STORE_INCLUDE_DIRS = "SPACK_STORE_INCLUDE_DIRS"
SPACK_STORE_LINK_DIRS = "SPACK_STORE_LINK_DIRS"
SPACK_STORE_RPATH_DIRS = "SPACK_STORE_RPATH_DIRS"
SPACK_RPATH_DEPS = "SPACK_RPATH_DEPS"
SPACK_LINK_DEPS = "SPACK_LINK_DEPS"
SPACK_PREFIX = "SPACK_PREFIX"
@@ -423,7 +416,7 @@ def set_compiler_environment_variables(pkg, env):
env.set("SPACK_COMPILER_SPEC", str(spec.compiler))
env.set("SPACK_SYSTEM_DIRS", SYSTEM_DIR_CASE_ENTRY)
env.set("SPACK_SYSTEM_DIRS", ":".join(SYSTEM_DIRS))
compiler.setup_custom_environment(pkg, env)
@@ -551,26 +544,9 @@ def update_compiler_args_for_dep(dep):
include_dirs = list(dedupe(filter_system_paths(include_dirs)))
rpath_dirs = list(dedupe(filter_system_paths(rpath_dirs)))
# Spack managed directories include the stage, store and upstream stores. We extend this with
# their real paths to make it more robust (e.g. /tmp vs /private/tmp on macOS).
spack_managed_dirs: Set[str] = {
spack.stage.get_stage_root(),
spack.store.STORE.db.root,
*(db.root for db in spack.store.STORE.db.upstream_dbs),
}
spack_managed_dirs.update([os.path.realpath(p) for p in spack_managed_dirs])
env.set(SPACK_MANAGED_DIRS, "|".join(f'"{p}/"*' for p in sorted(spack_managed_dirs)))
is_spack_managed = lambda p: any(p.startswith(store) for store in spack_managed_dirs)
link_dirs_spack, link_dirs_system = stable_partition(link_dirs, is_spack_managed)
include_dirs_spack, include_dirs_system = stable_partition(include_dirs, is_spack_managed)
rpath_dirs_spack, rpath_dirs_system = stable_partition(rpath_dirs, is_spack_managed)
env.set(SPACK_LINK_DIRS, ":".join(link_dirs_system))
env.set(SPACK_INCLUDE_DIRS, ":".join(include_dirs_system))
env.set(SPACK_RPATH_DIRS, ":".join(rpath_dirs_system))
env.set(SPACK_STORE_LINK_DIRS, ":".join(link_dirs_spack))
env.set(SPACK_STORE_INCLUDE_DIRS, ":".join(include_dirs_spack))
env.set(SPACK_STORE_RPATH_DIRS, ":".join(rpath_dirs_spack))
env.set(SPACK_LINK_DIRS, ":".join(link_dirs))
env.set(SPACK_INCLUDE_DIRS, ":".join(include_dirs))
env.set(SPACK_RPATH_DIRS, ":".join(rpath_dirs))
def set_package_py_globals(pkg, context: Context = Context.BUILD):
@@ -607,22 +583,10 @@ def set_package_py_globals(pkg, context: Context = Context.BUILD):
# Put spack compiler paths in module scope. (Some packages use it
# in setup_run_environment etc, so don't put it context == build)
link_dir = spack.paths.build_env_path
pkg_compiler = None
try:
pkg_compiler = pkg.compiler
except spack.compilers.NoCompilerForSpecError as e:
tty.debug(f"cannot set 'spack_cc': {str(e)}")
if pkg_compiler is not None:
module.spack_cc = os.path.join(link_dir, pkg_compiler.link_paths["cc"])
module.spack_cxx = os.path.join(link_dir, pkg_compiler.link_paths["cxx"])
module.spack_f77 = os.path.join(link_dir, pkg_compiler.link_paths["f77"])
module.spack_fc = os.path.join(link_dir, pkg_compiler.link_paths["fc"])
else:
module.spack_cc = None
module.spack_cxx = None
module.spack_f77 = None
module.spack_fc = None
module.spack_cc = os.path.join(link_dir, pkg.compiler.link_paths["cc"])
module.spack_cxx = os.path.join(link_dir, pkg.compiler.link_paths["cxx"])
module.spack_f77 = os.path.join(link_dir, pkg.compiler.link_paths["f77"])
module.spack_fc = os.path.join(link_dir, pkg.compiler.link_paths["fc"])
# Useful directories within the prefix are encapsulated in
# a Prefix object.
@@ -825,7 +789,7 @@ def setup_package(pkg, dirty, context: Context = Context.BUILD):
for mod in ["cray-mpich", "cray-libsci"]:
module("unload", mod)
if target and target.module_name:
if target.module_name:
load_module(target.module_name)
load_external_modules(pkg)

View File

@@ -434,6 +434,11 @@ def _do_patch_libtool(self):
r"crtendS\.o",
]:
x.filter(regex=(rehead + o), repl="")
elif self.pkg.compiler.name == "dpcpp":
# Hack to filter out spurious predep_objects when building with Intel dpcpp
# (see https://github.com/spack/spack/issues/32863):
x.filter(regex=r"^(predep_objects=.*)/tmp/conftest-[0-9A-Fa-f]+\.o", repl=r"\1")
x.filter(regex=r"^(predep_objects=.*)/tmp/a-[0-9A-Fa-f]+\.o", repl=r"\1")
elif self.pkg.compiler.name == "nag":
for tag in ["fc", "f77"]:
marker = markers[tag]

View File

@@ -4,7 +4,6 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import collections.abc
import os
import re
from typing import Tuple
import llnl.util.filesystem as fs
@@ -16,12 +15,6 @@
from .cmake import CMakeBuilder, CMakePackage
def spec_uses_toolchain(spec):
gcc_toolchain_regex = re.compile(".*gcc-toolchain.*")
using_toolchain = list(filter(gcc_toolchain_regex.match, spec.compiler_flags["cxxflags"]))
return using_toolchain
def cmake_cache_path(name, value, comment="", force=False):
"""Generate a string for a cmake cache variable"""
force_str = " FORCE" if force else ""
@@ -220,7 +213,7 @@ def initconfig_mpi_entries(self):
else:
# starting with cmake 3.10, FindMPI expects MPIEXEC_EXECUTABLE
# vs the older versions which expect MPIEXEC
if spec["cmake"].satisfies("@3.10:"):
if self.pkg.spec["cmake"].satisfies("@3.10:"):
entries.append(cmake_cache_path("MPIEXEC_EXECUTABLE", mpiexec))
else:
entries.append(cmake_cache_path("MPIEXEC", mpiexec))
@@ -255,17 +248,12 @@ def initconfig_hardware_entries(self):
# Include the deprecated CUDA_TOOLKIT_ROOT_DIR for supporting BLT packages
entries.append(cmake_cache_path("CUDA_TOOLKIT_ROOT_DIR", cudatoolkitdir))
# CUDA_FLAGS
cuda_flags = []
if not spec.satisfies("cuda_arch=none"):
cuda_archs = ";".join(spec.variants["cuda_arch"].value)
entries.append(cmake_cache_string("CMAKE_CUDA_ARCHITECTURES", cuda_archs))
if spec_uses_toolchain(spec):
cuda_flags.append("-Xcompiler {}".format(spec_uses_toolchain(spec)[0]))
entries.append(cmake_cache_string("CMAKE_CUDA_FLAGS", " ".join(cuda_flags)))
archs = spec.variants["cuda_arch"].value
if archs[0] != "none":
arch_str = ";".join(archs)
entries.append(
cmake_cache_string("CMAKE_CUDA_ARCHITECTURES", "{0}".format(arch_str))
)
if "+rocm" in spec:
entries.append("#------------------{0}".format("-" * 30))
@@ -274,6 +262,9 @@ def initconfig_hardware_entries(self):
# Explicitly setting HIP_ROOT_DIR may be a patch that is no longer necessary
entries.append(cmake_cache_path("HIP_ROOT_DIR", "{0}".format(spec["hip"].prefix)))
entries.append(
cmake_cache_path("HIP_CXX_COMPILER", "{0}".format(self.spec["hip"].hipcc))
)
llvm_bin = spec["llvm-amdgpu"].prefix.bin
llvm_prefix = spec["llvm-amdgpu"].prefix
# Some ROCm systems seem to point to /<path>/rocm-<ver>/ and
@@ -286,9 +277,11 @@ def initconfig_hardware_entries(self):
archs = self.spec.variants["amdgpu_target"].value
if archs[0] != "none":
arch_str = ";".join(archs)
entries.append(cmake_cache_string("CMAKE_HIP_ARCHITECTURES", arch_str))
entries.append(cmake_cache_string("AMDGPU_TARGETS", arch_str))
entries.append(cmake_cache_string("GPU_TARGETS", arch_str))
entries.append(
cmake_cache_string("CMAKE_HIP_ARCHITECTURES", "{0}".format(arch_str))
)
entries.append(cmake_cache_string("AMDGPU_TARGETS", "{0}".format(arch_str)))
entries.append(cmake_cache_string("GPU_TARGETS", "{0}".format(arch_str)))
return entries

View File

@@ -16,7 +16,7 @@
class CargoPackage(spack.package_base.PackageBase):
"""Specialized class for packages built using cargo."""
"""Specialized class for packages built using a Makefiles."""
#: This attribute is used in UI queries that need to know the build
#: system base class

View File

@@ -21,7 +21,7 @@
class MakefilePackage(spack.package_base.PackageBase):
"""Specialized class for packages built using Makefiles."""
"""Specialized class for packages built using a Makefiles."""
#: This attribute is used in UI queries that need to know the build
#: system base class

View File

@@ -14,7 +14,7 @@
from llnl.util.link_tree import LinkTree
from spack.build_environment import dso_suffix
from spack.directives import conflicts, license, variant
from spack.directives import conflicts, variant
from spack.package_base import InstallError
from spack.util.environment import EnvironmentModifications
from spack.util.executable import Executable
@@ -26,7 +26,6 @@ class IntelOneApiPackage(Package):
"""Base class for Intel oneAPI packages."""
homepage = "https://software.intel.com/oneapi"
license("https://intel.ly/393CijO")
# oneAPI license does not allow mirroring outside of the
# organization (e.g. University/Company).

View File

@@ -75,12 +75,9 @@
# does not like its directory structure.
#
import os
import spack.variant
from spack.directives import conflicts, depends_on, variant
from spack.package_base import PackageBase
from spack.util.environment import EnvironmentModifications
class ROCmPackage(PackageBase):
@@ -157,25 +154,6 @@ def hip_flags(amdgpu_target):
archs = ",".join(amdgpu_target)
return "--amdgpu-target={0}".format(archs)
def asan_on(self, env: EnvironmentModifications):
llvm_path = self.spec["llvm-amdgpu"].prefix
env.set("CC", llvm_path + "/bin/clang")
env.set("CXX", llvm_path + "/bin/clang++")
env.set("ASAN_OPTIONS", "detect_leaks=0")
for root, _, files in os.walk(llvm_path):
if "libclang_rt.asan-x86_64.so" in files:
asan_lib_path = root
env.prepend_path("LD_LIBRARY_PATH", asan_lib_path)
if "rhel" in self.spec.os or "sles" in self.spec.os:
SET_DWARF_VERSION_4 = "-gdwarf-5"
else:
SET_DWARF_VERSION_4 = ""
env.set("CFLAGS", f"-fsanitize=address -shared-libasan -g {SET_DWARF_VERSION_4}")
env.set("CXXFLAGS", f"-fsanitize=address -shared-libasan -g {SET_DWARF_VERSION_4}")
env.set("LDFLAGS", "-Wl,--enable-new-dtags -fuse-ld=lld -fsanitize=address -g -Wl,")
# HIP version vs Architecture
# TODO: add a bunch of lines like:

File diff suppressed because it is too large Load Diff

View File

@@ -334,7 +334,8 @@ def display_specs(specs, args=None, **kwargs):
variants (bool): Show variants with specs
indent (int): indent each line this much
groups (bool): display specs grouped by arch/compiler (default True)
decorator (typing.Callable): function to call to decorate specs
decorators (dict): dictionary mappng specs to decorators
header_callback (typing.Callable): called at start of arch/compiler groups
all_headers (bool): show headers even when arch/compiler aren't defined
output (typing.IO): A file object to write to. Default is ``sys.stdout``
@@ -383,13 +384,15 @@ def get_arg(name, default=None):
vfmt = "{variants}" if variants else ""
format_string = nfmt + "{@version}" + ffmt + vfmt
transform = {"package": decorator, "fullpackage": decorator}
def fmt(s, depth=0):
"""Formatter function for all output specs"""
string = ""
if hashes:
string += gray_hash(s, hlen) + " "
string += depth * " "
string += decorator(s, s.cformat(format_string))
string += s.cformat(format_string, transform=transform)
return string
def format_list(specs):
@@ -448,7 +451,7 @@ def filter_loaded_specs(specs):
return [x for x in specs if x.dag_hash() in hashes]
def print_how_many_pkgs(specs, pkg_type="", suffix=""):
def print_how_many_pkgs(specs, pkg_type=""):
"""Given a list of specs, this will print a message about how many
specs are in that list.
@@ -459,7 +462,7 @@ def print_how_many_pkgs(specs, pkg_type="", suffix=""):
category, e.g. if pkg_type is "installed" then the message
would be "3 installed packages"
"""
tty.msg("%s" % llnl.string.plural(len(specs), pkg_type + " package") + suffix)
tty.msg("%s" % llnl.string.plural(len(specs), pkg_type + " package"))
def spack_is_git_repo():

View File

@@ -133,11 +133,6 @@ def setup_parser(subparser: argparse.ArgumentParser):
help="when pushing to an OCI registry, tag an image containing all root specs and their "
"runtime dependencies",
)
push.add_argument(
"--private",
action="store_true",
help="for a private mirror, include non-redistributable packages",
)
arguments.add_common_arguments(push, ["specs", "jobs"])
push.set_defaults(func=push_fn)
@@ -280,37 +275,23 @@ def setup_parser(subparser: argparse.ArgumentParser):
# Sync buildcache entries from one mirror to another
sync = subparsers.add_parser("sync", help=sync_fn.__doc__)
sync_manifest_source = sync.add_argument_group(
"Manifest Source",
"Specify a list of build cache objects to sync using manifest file(s)."
'This option takes the place of the "source mirror" for synchronization'
'and optionally takes a "destination mirror" ',
sync.add_argument(
"--manifest-glob", help="a quoted glob pattern identifying copy manifest files"
)
sync_manifest_source.add_argument(
"--manifest-glob", help="a quoted glob pattern identifying CI rebuild manifest files"
)
sync_source_mirror = sync.add_argument_group(
"Named Source",
"Specify a single registered source mirror to synchronize from. This option requires"
"the specification of a destination mirror.",
)
sync_source_mirror.add_argument(
sync.add_argument(
"src_mirror",
metavar="source mirror",
nargs="?",
type=arguments.mirror_name_or_url,
nargs="?",
help="source mirror name, path, or URL",
)
sync.add_argument(
"dest_mirror",
metavar="destination mirror",
nargs="?",
type=arguments.mirror_name_or_url,
nargs="?",
help="destination mirror name, path, or URL",
)
sync.set_defaults(func=sync_fn)
# Update buildcache index without copying any additional packages
@@ -372,25 +353,6 @@ def _make_pool() -> MaybePool:
return NoPool()
def _skip_no_redistribute_for_public(specs):
remaining_specs = list()
removed_specs = list()
for spec in specs:
if spec.package.redistribute_binary:
remaining_specs.append(spec)
else:
removed_specs.append(spec)
if removed_specs:
colified_output = tty.colify.colified(list(s.name for s in removed_specs), indent=4)
tty.debug(
"The following specs will not be added to the binary cache"
" because they cannot be redistributed:\n"
f"{colified_output}\n"
"You can use `--private` to include them."
)
return remaining_specs
def push_fn(args):
"""create a binary package and push it to a mirror"""
if args.spec_file:
@@ -441,8 +403,6 @@ def push_fn(args):
root="package" in args.things_to_install,
dependencies="dependencies" in args.things_to_install,
)
if not args.private:
specs = _skip_no_redistribute_for_public(specs)
# When pushing multiple specs, print the url once ahead of time, as well as how
# many specs are being pushed.
@@ -1110,17 +1070,7 @@ def sync_fn(args):
requires an active environment in order to know which specs to sync
"""
if args.manifest_glob:
# Passing the args.src_mirror here because it is not possible to
# have the destination be required when specifying a named source
# mirror and optional for the --manifest-glob argument. In the case
# of manifest glob sync, the source mirror positional argument is the
# destination mirror if it is specified. If there are two mirrors
# specified, the second is ignored and the first is the override
# destination.
if args.dest_mirror:
tty.warn(f"Ignoring unused arguemnt: {args.dest_mirror.name}")
manifest_copy(glob.glob(args.manifest_glob), args.src_mirror)
manifest_copy(glob.glob(args.manifest_glob))
return 0
if args.src_mirror is None or args.dest_mirror is None:
@@ -1171,7 +1121,7 @@ def sync_fn(args):
shutil.rmtree(tmpdir)
def manifest_copy(manifest_file_list, dest_mirror=None):
def manifest_copy(manifest_file_list):
"""Read manifest files containing information about specific specs to copy
from source to destination, remove duplicates since any binary packge for
a given hash should be the same as any other, and copy all files specified
@@ -1185,17 +1135,10 @@ def manifest_copy(manifest_file_list, dest_mirror=None):
# Last duplicate hash wins
deduped_manifest[spec_hash] = copy_list
build_cache_dir = bindist.build_cache_relative_path()
for spec_hash, copy_list in deduped_manifest.items():
for copy_file in copy_list:
dest = copy_file["dest"]
if dest_mirror:
src_relative_path = os.path.join(
build_cache_dir, copy_file["src"].rsplit(build_cache_dir, 1)[1].lstrip("/")
)
dest = url_util.join(dest_mirror.push_url, src_relative_path)
tty.debug("copying {0} to {1}".format(copy_file["src"], dest))
copy_buildcache_file(copy_file["src"], dest)
tty.debug("copying {0} to {1}".format(copy_file["src"], copy_file["dest"]))
copy_buildcache_file(copy_file["src"], copy_file["dest"])
def update_index(mirror: spack.mirror.Mirror, update_keys=False):
@@ -1222,18 +1165,14 @@ def update_index(mirror: spack.mirror.Mirror, update_keys=False):
url, bindist.build_cache_relative_path(), bindist.build_cache_keys_relative_path()
)
try:
bindist.generate_key_index(keys_url)
except bindist.CannotListKeys as e:
# Do not error out if listing keys went wrong. This usually means that the _gpg path
# does not exist. TODO: distinguish between this and other errors.
tty.warn(f"did not update the key index: {e}")
bindist.generate_key_index(keys_url)
def update_index_fn(args):
"""update a buildcache index"""
return update_index(args.mirror, update_keys=args.keys)
update_index(args.mirror, update_keys=args.keys)
def buildcache(parser, args):
return args.func(args)
if args.func:
args.func(args)

View File

@@ -14,7 +14,6 @@
import spack.binary_distribution as bindist
import spack.ci as spack_ci
import spack.cmd
import spack.cmd.buildcache as buildcache
import spack.config as cfg
import spack.environment as ev
@@ -33,7 +32,6 @@
SPACK_COMMAND = "spack"
MAKE_COMMAND = "make"
INSTALL_FAIL_CODE = 1
FAILED_CREATE_BUILDCACHE_CODE = 100
def deindent(desc):
@@ -707,9 +705,11 @@ def ci_rebuild(args):
cdash_handler.report_skipped(job_spec, reports_dir, reason=msg)
cdash_handler.copy_test_results(reports_dir, job_test_dir)
# If the install succeeded, create a buildcache entry for this job spec
# and push it to one or more mirrors. If the install did not succeed,
# print out some instructions on how to reproduce this build failure
# outside of the pipeline environment.
if install_exit_code == 0:
# If the install succeeded, push it to one or more mirrors. Failure to push to any mirror
# will result in a non-zero exit code. Pushing is best-effort.
mirror_urls = [buildcache_mirror_url]
# TODO: Remove this block in Spack 0.23
@@ -721,12 +721,13 @@ def ci_rebuild(args):
destination_mirror_urls=mirror_urls,
sign_binaries=spack_ci.can_sign_binaries(),
):
if not result.success:
install_exit_code = FAILED_CREATE_BUILDCACHE_CODE
(tty.msg if result.success else tty.error)(
f'{"Pushed" if result.success else "Failed to push"} '
f'{job_spec.format("{name}{@version}{/hash:7}", color=clr.get_color_when())} '
f"to {result.url}"
msg = tty.msg if result.success else tty.warn
msg(
"{} {} to {}".format(
"Pushed" if result.success else "Failed to push",
job_spec.format("{name}{@version}{/hash:7}", color=clr.get_color_when()),
result.url,
)
)
# If this is a develop pipeline, check if the spec that we just built is
@@ -747,22 +748,22 @@ def ci_rebuild(args):
tty.warn(msg.format(broken_spec_path, err))
else:
# If the install did not succeed, print out some instructions on how to reproduce this
# build failure outside of the pipeline environment.
tty.debug("spack install exited non-zero, will not create buildcache")
api_root_url = os.environ.get("CI_API_V4_URL")
ci_project_id = os.environ.get("CI_PROJECT_ID")
ci_job_id = os.environ.get("CI_JOB_ID")
repro_job_url = f"{api_root_url}/projects/{ci_project_id}/jobs/{ci_job_id}/artifacts"
repro_job_url = "{0}/projects/{1}/jobs/{2}/artifacts".format(
api_root_url, ci_project_id, ci_job_id
)
# Control characters cause this to be printed in blue so it stands out
print(
f"""
reproduce_msg = """
\033[34mTo reproduce this build locally, run:
spack ci reproduce-build {repro_job_url} [--working-dir <dir>] [--autostart]
spack ci reproduce-build {0} [--working-dir <dir>] [--autostart]
If this project does not have public pipelines, you will need to first:
@@ -770,9 +771,12 @@ def ci_rebuild(args):
... then follow the printed instructions.\033[0;0m
"""
""".format(
repro_job_url
)
print(reproduce_msg)
rebuild_timer.stop()
try:
with open("install_timers.json", "w") as timelog:

View File

@@ -9,7 +9,6 @@
import shutil
import sys
import tempfile
from pathlib import Path
from typing import Optional
import llnl.string as string
@@ -45,7 +44,6 @@
"deactivate",
"create",
["remove", "rm"],
["rename", "mv"],
["list", "ls"],
["status", "st"],
"loads",
@@ -474,82 +472,11 @@ def env_remove(args):
tty.msg(f"Successfully removed environment '{bad_env_name}'")
#
# env rename
#
def env_rename_setup_parser(subparser):
"""rename an existing environment"""
subparser.add_argument(
"mv_from", metavar="from", help="name (or path) of existing environment"
)
subparser.add_argument(
"mv_to", metavar="to", help="new name (or path) for existing environment"
)
subparser.add_argument(
"-d",
"--dir",
action="store_true",
help="the specified arguments correspond to directory paths",
)
subparser.add_argument(
"-f", "--force", action="store_true", help="allow overwriting of an existing environment"
)
def env_rename(args):
"""Rename an environment.
This renames a managed environment or moves an anonymous environment.
"""
# Directory option has been specified
if args.dir:
if not ev.is_env_dir(args.mv_from):
tty.die("The specified path does not correspond to a valid spack environment")
from_path = Path(args.mv_from)
if not args.force:
if ev.is_env_dir(args.mv_to):
tty.die(
"The new path corresponds to an existing environment;"
" specify the --force flag to overwrite it."
)
if Path(args.mv_to).exists():
tty.die("The new path already exists; specify the --force flag to overwrite it.")
to_path = Path(args.mv_to)
# Name option being used
elif ev.exists(args.mv_from):
from_path = ev.environment.environment_dir_from_name(args.mv_from)
if not args.force and ev.exists(args.mv_to):
tty.die(
"The new name corresponds to an existing environment;"
" specify the --force flag to overwrite it."
)
to_path = ev.environment.root(args.mv_to)
# Neither
else:
tty.die("The specified name does not correspond to a managed spack environment")
# Guard against renaming from or to an active environment
active_env = ev.active_environment()
if active_env:
from_env = ev.Environment(from_path)
if from_env.path == active_env.path:
tty.die("Cannot rename active environment")
if to_path == active_env.path:
tty.die(f"{args.mv_to} is an active environment")
shutil.rmtree(to_path, ignore_errors=True)
fs.rename(from_path, to_path)
tty.msg(f"Successfully renamed environment {args.mv_from} to {args.mv_to}")
#
# env list
#
def env_list_setup_parser(subparser):
"""list managed environments"""
"""list available environments"""
def env_list(args):

View File

@@ -3,6 +3,7 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import copy
import sys
import llnl.util.lang
@@ -13,7 +14,6 @@
import spack.cmd as cmd
import spack.environment as ev
import spack.repo
import spack.store
from spack.cmd.common import arguments
from spack.database import InstallStatuses
@@ -69,12 +69,6 @@ def setup_parser(subparser):
arguments.add_common_arguments(subparser, ["long", "very_long", "tags", "namespaces"])
subparser.add_argument(
"-r",
"--only-roots",
action="store_true",
help="don't show full list of installed specs in an environment",
)
subparser.add_argument(
"-c",
"--show-concretized",
@@ -146,12 +140,6 @@ def setup_parser(subparser):
subparser.add_argument(
"--only-deprecated", action="store_true", help="show only deprecated packages"
)
subparser.add_argument(
"--install-tree",
action="store",
default="all",
help="Install trees to query: 'all' (default), 'local', 'upstream', upstream name or path",
)
subparser.add_argument("--start-date", help="earliest date of installation [YYYY-MM-DD]")
subparser.add_argument("--end-date", help="latest date of installation [YYYY-MM-DD]")
@@ -180,12 +168,6 @@ def query_arguments(args):
q_args = {"installed": installed, "known": known, "explicit": explicit}
install_tree = args.install_tree
upstreams = spack.config.get("upstreams", {})
if install_tree in upstreams.keys():
install_tree = upstreams[install_tree]["install_tree"]
q_args["install_tree"] = install_tree
# Time window of installation
for attribute in ("start_date", "end_date"):
date = getattr(args, attribute)
@@ -195,22 +177,26 @@ def query_arguments(args):
return q_args
def make_env_decorator(env):
def setup_env(env):
"""Create a function for decorating specs when in an environment."""
roots = set(env.roots())
removed = set(env.removed_specs())
def strip_build(seq):
return set(s.copy(deps=("link", "run")) for s in seq)
added = set(strip_build(env.added_specs()))
roots = set(strip_build(env.roots()))
removed = set(strip_build(env.removed_specs()))
def decorator(spec, fmt):
# add +/-/* to show added/removed/root specs
if any(spec.dag_hash() == r.dag_hash() for r in roots):
return color.colorize(f"@*{{{fmt}}}")
return color.colorize("@*{%s}" % fmt)
elif spec in removed:
return color.colorize(f"@K{{{fmt}}}")
return color.colorize("@K{%s}" % fmt)
else:
return fmt
return "%s" % fmt
return decorator
return decorator, added, roots, removed
def display_env(env, args, decorator, results):
@@ -225,51 +211,28 @@ def display_env(env, args, decorator, results):
"""
tty.msg("In environment %s" % env.name)
num_roots = len(env.user_specs) or "No"
tty.msg(f"{num_roots} root specs")
if not env.user_specs:
tty.msg("No root specs")
else:
tty.msg("Root specs")
concrete_specs = {
root: concrete_root
for root, concrete_root in zip(env.concretized_user_specs, env.concrete_roots())
}
# Root specs cannot be displayed with prefixes, since those are not
# set for abstract specs. Same for hashes
root_args = copy.copy(args)
root_args.paths = False
def root_decorator(spec, string):
"""Decorate root specs with their install status if needed"""
concrete = concrete_specs.get(spec)
if concrete:
status = color.colorize(concrete.install_status().value)
hash = concrete.dag_hash()
else:
status = color.colorize(spack.spec.InstallStatus.absent.value)
hash = "-" * 32
# TODO: status has two extra spaces on the end of it, but fixing this and other spec
# TODO: space format idiosyncrasies is complicated. Fix this eventually
status = status[:-2]
if args.long or args.very_long:
hash = color.colorize(f"@K{{{hash[: 7 if args.long else None]}}}")
return f"{status} {hash} {string}"
else:
return f"{status} {string}"
with spack.store.STORE.db.read_transaction():
# Roots are displayed with variants, etc. so that we can see
# specifically what the user asked for.
cmd.display_specs(
env.user_specs,
args,
# these are overrides of CLI args
paths=False,
long=False,
very_long=False,
# these enforce details in the root specs to show what the user asked for
root_args,
decorator=lambda s, f: color.colorize("@*{%s}" % f),
namespaces=True,
show_flags=True,
show_full_compiler=True,
decorator=root_decorator,
variants=True,
)
print()
print()
if args.show_concretized:
tty.msg("Concretized roots")
@@ -279,7 +242,7 @@ def root_decorator(spec, string):
# Display a header for the installed packages section IF there are installed
# packages. If there aren't any, we'll just end up printing "0 installed packages"
# later.
if results and not args.only_roots:
if results:
tty.msg("Installed packages")
@@ -288,10 +251,9 @@ def find(parser, args):
results = args.specs(**q_args)
env = ev.active_environment()
if not env and args.only_roots:
tty.die("-r / --only-roots requires an active environment")
decorator = make_env_decorator(env) if env else lambda s, f: f
decorator = lambda s, f: f
if env:
decorator, _, roots, _ = setup_env(env)
# use groups by default except with format.
if args.groups is None:
@@ -318,12 +280,9 @@ def find(parser, args):
if env:
display_env(env, args, decorator, results)
count_suffix = " (not shown)"
if not args.only_roots:
cmd.display_specs(results, args, decorator=decorator, all_headers=True)
count_suffix = ""
cmd.display_specs(results, args, decorator=decorator, all_headers=True)
# print number of installed packages last (as the list may be long)
if sys.stdout.isatty() and args.groups:
pkg_type = "loaded" if args.loaded else "installed"
spack.cmd.print_how_many_pkgs(results, pkg_type, suffix=count_suffix)
spack.cmd.print_how_many_pkgs(results, pkg_type)

View File

@@ -263,8 +263,8 @@ def _fmt_name_and_default(variant):
return color.colorize(f"@c{{{variant.name}}} @C{{[{_fmt_value(variant.default)}]}}")
def _fmt_when(when: "spack.spec.Spec", indent: int):
return color.colorize(f"{indent * ' '}@B{{when}} {color.cescape(str(when))}")
def _fmt_when(when, indent):
return color.colorize(f"{indent * ' '}@B{{when}} {color.cescape(when)}")
def _fmt_variant_description(variant, width, indent):
@@ -441,7 +441,7 @@ def get_url(version):
return "No URL"
url = get_url(preferred) if pkg.has_code else ""
line = version(" {0}".format(pad(preferred))) + color.cescape(str(url))
line = version(" {0}".format(pad(preferred))) + color.cescape(url)
color.cwrite(line)
print()
@@ -464,7 +464,7 @@ def get_url(version):
continue
for v, url in vers:
line = version(" {0}".format(pad(v))) + color.cescape(str(url))
line = version(" {0}".format(pad(v))) + color.cescape(url)
color.cprint(line)
@@ -475,7 +475,10 @@ def print_virtuals(pkg, args):
color.cprint(section_title("Virtual Packages: "))
if pkg.provided:
for when, specs in reversed(sorted(pkg.provided.items())):
line = " %s provides %s" % (when.cformat(), ", ".join(s.cformat() for s in specs))
line = " %s provides %s" % (
when.colorized(),
", ".join(s.colorized() for s in specs),
)
print(line)
else:
@@ -494,9 +497,7 @@ def print_licenses(pkg, args):
pad = padder(pkg.licenses, 4)
for when_spec in pkg.licenses:
license_identifier = pkg.licenses[when_spec]
line = license(" {0}".format(pad(license_identifier))) + color.cescape(
str(when_spec)
)
line = license(" {0}".format(pad(license_identifier))) + color.cescape(when_spec)
color.cprint(line)

View File

@@ -420,9 +420,10 @@ def install_with_active_env(env: ev.Environment, args, install_kwargs, reporter_
with reporter_factory(specs_to_install):
env.install_specs(specs_to_install, **install_kwargs)
finally:
if env.views:
with env.write_transaction():
env.write(regenerate=True)
# TODO: this is doing way too much to trigger
# views and modules to be generated.
with env.write_transaction():
env.write(regenerate=True)
def concrete_specs_from_cli(args, install_kwargs):

View File

@@ -5,6 +5,8 @@
import sys
import llnl.util.tty as tty
import spack.cmd
import spack.cmd.find
import spack.environment as ev
@@ -68,6 +70,16 @@ def setup_parser(subparser):
help="load the first match if multiple packages match the spec",
)
subparser.add_argument(
"--only",
default="package,dependencies",
dest="things_to_load",
choices=["package", "dependencies"],
help="select whether to load the package and its dependencies\n\n"
"the default is to load the package and all dependencies. alternatively, "
"one can decide to load only the package or only the dependencies",
)
subparser.add_argument(
"--list",
action="store_true",
@@ -98,6 +110,11 @@ def load(parser, args):
)
return 1
if args.things_to_load != "package,dependencies":
tty.warn(
"The `--only` flag in spack load is deprecated and will be removed in Spack v0.22"
)
with spack.store.STORE.db.read_transaction():
env_mod = uenv.environment_modifications_for_specs(*specs)
for spec in specs:

View File

@@ -71,11 +71,6 @@ def setup_parser(subparser):
help="the number of versions to fetch for each spec, choose 'all' to"
" retrieve all versions of each package",
)
create_parser.add_argument(
"--private",
action="store_true",
help="for a private mirror, include non-redistributable packages",
)
arguments.add_common_arguments(create_parser, ["specs"])
arguments.add_concretizer_args(create_parser)
@@ -113,11 +108,6 @@ def setup_parser(subparser):
"and source use `--type binary --type source` (default)"
),
)
add_parser.add_argument(
"--autopush",
action="store_true",
help=("set mirror to push automatically after installation"),
)
add_parser_signed = add_parser.add_mutually_exclusive_group(required=False)
add_parser_signed.add_argument(
"--unsigned",
@@ -185,21 +175,6 @@ def setup_parser(subparser):
),
)
set_parser.add_argument("--url", help="url of mirror directory from 'spack mirror create'")
set_parser_autopush = set_parser.add_mutually_exclusive_group(required=False)
set_parser_autopush.add_argument(
"--autopush",
help="set mirror to push automatically after installation",
action="store_true",
default=None,
dest="autopush",
)
set_parser_autopush.add_argument(
"--no-autopush",
help="set mirror to not push automatically after installation",
action="store_false",
default=None,
dest="autopush",
)
set_parser_unsigned = set_parser.add_mutually_exclusive_group(required=False)
set_parser_unsigned.add_argument(
"--unsigned",
@@ -243,7 +218,6 @@ def mirror_add(args):
or args.type
or args.oci_username
or args.oci_password
or args.autopush
or args.signed is not None
):
connection = {"url": args.url}
@@ -260,8 +234,6 @@ def mirror_add(args):
if args.type:
connection["binary"] = "binary" in args.type
connection["source"] = "source" in args.type
if args.autopush:
connection["autopush"] = args.autopush
if args.signed is not None:
connection["signed"] = args.signed
mirror = spack.mirror.Mirror(connection, name=args.name)
@@ -298,8 +270,6 @@ def _configure_mirror(args):
changes["access_pair"] = [args.oci_username, args.oci_password]
if getattr(args, "signed", None) is not None:
changes["signed"] = args.signed
if getattr(args, "autopush", None) is not None:
changes["autopush"] = args.autopush
# argparse cannot distinguish between --binary and --no-binary when same dest :(
# notice that set-url does not have these args, so getattr
@@ -364,6 +334,7 @@ def concrete_specs_from_user(args):
specs = filter_externals(specs)
specs = list(set(specs))
specs.sort(key=lambda s: (s.name, s.version))
specs, _ = lang.stable_partition(specs, predicate_fn=not_excluded_fn(args))
return specs
@@ -408,50 +379,36 @@ def concrete_specs_from_cli_or_file(args):
return specs
class IncludeFilter:
def __init__(self, args):
self.exclude_specs = []
if args.exclude_file:
self.exclude_specs.extend(specs_from_text_file(args.exclude_file, concretize=False))
if args.exclude_specs:
self.exclude_specs.extend(spack.cmd.parse_specs(str(args.exclude_specs).split()))
self.private = args.private
def not_excluded_fn(args):
"""Return a predicate that evaluate to True if a spec was not explicitly
excluded by the user.
"""
exclude_specs = []
if args.exclude_file:
exclude_specs.extend(specs_from_text_file(args.exclude_file, concretize=False))
if args.exclude_specs:
exclude_specs.extend(spack.cmd.parse_specs(str(args.exclude_specs).split()))
def __call__(self, x):
return all([self._not_license_excluded(x), self._not_cmdline_excluded(x)])
def not_excluded(x):
return not any(x.satisfies(y) for y in exclude_specs)
def _not_license_excluded(self, x):
"""True if the spec is for a private mirror, or as long as the
package does not explicitly forbid redistributing source."""
if self.private:
return True
elif x.package_class.redistribute_source(x):
return True
else:
tty.debug(
"Skip adding {0} to mirror: the package.py file"
" indicates that a public mirror should not contain"
" it.".format(x.name)
)
return False
def _not_cmdline_excluded(self, x):
"""True if a spec was not explicitly excluded by the user."""
return not any(x.satisfies(y) for y in self.exclude_specs)
return not_excluded
def concrete_specs_from_environment():
def concrete_specs_from_environment(selection_fn):
env = ev.active_environment()
assert env, "an active environment is required"
mirror_specs = env.all_specs()
mirror_specs = filter_externals(mirror_specs)
mirror_specs, _ = lang.stable_partition(mirror_specs, predicate_fn=selection_fn)
return mirror_specs
def all_specs_with_all_versions():
def all_specs_with_all_versions(selection_fn):
specs = [spack.spec.Spec(n) for n in spack.repo.all_package_names()]
mirror_specs = spack.mirror.get_all_versions(specs)
mirror_specs.sort(key=lambda s: (s.name, s.version))
mirror_specs, _ = lang.stable_partition(mirror_specs, predicate_fn=selection_fn)
return mirror_specs
@@ -472,6 +429,12 @@ def versions_per_spec(args):
return num_versions
def create_mirror_for_individual_specs(mirror_specs, path, skip_unstable_versions):
present, mirrored, error = spack.mirror.create(path, mirror_specs, skip_unstable_versions)
tty.msg("Summary for mirror in {}".format(path))
process_mirror_stats(present, mirrored, error)
def process_mirror_stats(present, mirrored, error):
p, m, e = len(present), len(mirrored), len(error)
tty.msg(
@@ -517,28 +480,30 @@ def mirror_create(args):
# When no directory is provided, the source dir is used
path = args.directory or spack.caches.fetch_cache_location()
mirror_specs, mirror_fn = _specs_and_action(args)
mirror_fn(mirror_specs, path=path, skip_unstable_versions=args.skip_unstable_versions)
def _specs_and_action(args):
include_fn = IncludeFilter(args)
if args.all and not ev.active_environment():
mirror_specs = all_specs_with_all_versions()
mirror_fn = create_mirror_for_all_specs
elif args.all and ev.active_environment():
mirror_specs = concrete_specs_from_environment()
mirror_fn = create_mirror_for_individual_specs
else:
mirror_specs = concrete_specs_from_user(args)
mirror_fn = create_mirror_for_individual_specs
create_mirror_for_all_specs(
path=path,
skip_unstable_versions=args.skip_unstable_versions,
selection_fn=not_excluded_fn(args),
)
return
mirror_specs, _ = lang.stable_partition(mirror_specs, predicate_fn=include_fn)
return mirror_specs, mirror_fn
if args.all and ev.active_environment():
create_mirror_for_all_specs_inside_environment(
path=path,
skip_unstable_versions=args.skip_unstable_versions,
selection_fn=not_excluded_fn(args),
)
return
mirror_specs = concrete_specs_from_user(args)
create_mirror_for_individual_specs(
mirror_specs, path=path, skip_unstable_versions=args.skip_unstable_versions
)
def create_mirror_for_all_specs(mirror_specs, path, skip_unstable_versions):
def create_mirror_for_all_specs(path, skip_unstable_versions, selection_fn):
mirror_specs = all_specs_with_all_versions(selection_fn=selection_fn)
mirror_cache, mirror_stats = spack.mirror.mirror_cache_and_stats(
path, skip_unstable_versions=skip_unstable_versions
)
@@ -550,10 +515,11 @@ def create_mirror_for_all_specs(mirror_specs, path, skip_unstable_versions):
process_mirror_stats(*mirror_stats.stats())
def create_mirror_for_individual_specs(mirror_specs, path, skip_unstable_versions):
present, mirrored, error = spack.mirror.create(path, mirror_specs, skip_unstable_versions)
tty.msg("Summary for mirror in {}".format(path))
process_mirror_stats(present, mirrored, error)
def create_mirror_for_all_specs_inside_environment(path, skip_unstable_versions, selection_fn):
mirror_specs = concrete_specs_from_environment(selection_fn=selection_fn)
create_mirror_for_individual_specs(
mirror_specs, path=path, skip_unstable_versions=skip_unstable_versions
)
def mirror_destroy(args):

View File

@@ -91,6 +91,7 @@ def setup_parser(subparser):
def _process_result(result, show, required_format, kwargs):
result.raise_if_unsat()
opt, _, _ = min(result.answers)
if ("opt" in show) and (not required_format):
tty.msg("Best of %d considered solutions." % result.nmodels)

View File

@@ -8,7 +8,6 @@
import os
import platform
import re
import shlex
import shutil
import sys
import tempfile
@@ -23,7 +22,6 @@
import spack.error
import spack.spec
import spack.util.executable
import spack.util.libc
import spack.util.module_cmd
import spack.version
from spack.util.environment import filter_system_paths
@@ -109,6 +107,7 @@ def _parse_link_paths(string):
"""
lib_search_paths = False
raw_link_dirs = []
tty.debug("parsing implicit link info")
for line in string.splitlines():
if lib_search_paths:
if line.startswith("\t"):
@@ -123,7 +122,7 @@ def _parse_link_paths(string):
continue
if _LINKER_LINE_IGNORE.match(line):
continue
tty.debug(f"implicit link dirs: link line: {line}")
tty.debug("linker line: %s" % line)
next_arg = False
for arg in line.split():
@@ -139,12 +138,15 @@ def _parse_link_paths(string):
link_dir_arg = _LINK_DIR_ARG.match(arg)
if link_dir_arg:
link_dir = link_dir_arg.group("dir")
tty.debug("linkdir: %s" % link_dir)
raw_link_dirs.append(link_dir)
link_dir_arg = _LIBPATH_ARG.match(arg)
if link_dir_arg:
link_dir = link_dir_arg.group("dir")
tty.debug("libpath: %s", link_dir)
raw_link_dirs.append(link_dir)
tty.debug("found raw link dirs: %s" % ", ".join(raw_link_dirs))
implicit_link_dirs = list()
visited = set()
@@ -154,7 +156,7 @@ def _parse_link_paths(string):
implicit_link_dirs.append(normalized_path)
visited.add(normalized_path)
tty.debug(f"implicit link dirs: result: {', '.join(implicit_link_dirs)}")
tty.debug("found link dirs: %s" % ", ".join(implicit_link_dirs))
return implicit_link_dirs
@@ -182,21 +184,6 @@ def _parse_non_system_link_dirs(string: str) -> List[str]:
return list(p for p in link_dirs if not in_system_subdirectory(p))
def _parse_dynamic_linker(output: str):
"""Parse -dynamic-linker /path/to/ld.so from compiler output"""
for line in reversed(output.splitlines()):
if "-dynamic-linker" not in line:
continue
args = shlex.split(line)
for idx in reversed(range(1, len(args))):
arg = args[idx]
if arg == "-dynamic-linker" or args == "--dynamic-linker":
return args[idx + 1]
elif arg.startswith("--dynamic-linker=") or arg.startswith("-dynamic-linker="):
return arg.split("=", 1)[1]
def in_system_subdirectory(path):
system_dirs = [
"/lib/",
@@ -430,35 +417,17 @@ def real_version(self):
self._real_version = self.version
return self._real_version
def implicit_rpaths(self) -> List[str]:
def implicit_rpaths(self):
if self.enable_implicit_rpaths is False:
return []
output = self.compiler_verbose_output
if not output:
return []
link_dirs = _parse_non_system_link_dirs(output)
# Put CXX first since it has the most linking issues
# And because it has flags that affect linking
link_dirs = self._get_compiler_link_paths()
all_required_libs = list(self.required_libs) + Compiler._all_compiler_rpath_libraries
return list(paths_containing_libs(link_dirs, all_required_libs))
@property
def default_libc(self) -> Optional["spack.spec.Spec"]:
"""Determine libc targeted by the compiler from link line"""
output = self.compiler_verbose_output
if not output:
return None
dynamic_linker = _parse_dynamic_linker(output)
if not dynamic_linker:
return None
return spack.util.libc.libc_from_dynamic_linker(dynamic_linker)
@property
def required_libs(self):
"""For executables created with this compiler, the compiler libraries
@@ -467,18 +436,17 @@ def required_libs(self):
# By default every compiler returns the empty list
return []
@property
def compiler_verbose_output(self) -> Optional[str]:
"""Verbose output from compiling a dummy C source file. Output is cached."""
if not hasattr(self, "_compile_c_source_output"):
self._compile_c_source_output = self._compile_dummy_c_source()
print(self._compile_c_source_output)
return self._compile_c_source_output
def _compile_dummy_c_source(self) -> Optional[str]:
def _get_compiler_link_paths(self):
cc = self.cc if self.cc else self.cxx
if not cc or not self.verbose_flag:
return None
# Cannot determine implicit link paths without a compiler / verbose flag
return []
# What flag types apply to first_compiler, in what order
if cc == self.cc:
flags = ["cflags", "cppflags", "ldflags"]
else:
flags = ["cxxflags", "cppflags", "ldflags"]
try:
tmpdir = tempfile.mkdtemp(prefix="spack-implicit-link-info")
@@ -490,16 +458,20 @@ def _compile_dummy_c_source(self) -> Optional[str]:
"int main(int argc, char* argv[]) { (void)argc; (void)argv; return 0; }\n"
)
cc_exe = spack.util.executable.Executable(cc)
for flag_type in ["cflags" if cc == self.cc else "cxxflags", "cppflags", "ldflags"]:
for flag_type in flags:
cc_exe.add_default_arg(*self.flags.get(flag_type, []))
with self.compiler_environment():
return cc_exe(self.verbose_flag, fin, "-o", fout, output=str, error=str)
output = cc_exe(self.verbose_flag, fin, "-o", fout, output=str, error=str)
return _parse_non_system_link_dirs(output)
except spack.util.executable.ProcessError as pe:
tty.debug("ProcessError: Command exited with non-zero status: " + pe.long_message)
return []
finally:
shutil.rmtree(tmpdir, ignore_errors=True)
@property
def verbose_flag(self) -> Optional[str]:
def verbose_flag(self):
"""
This property should be overridden in the compiler subclass if a
verbose flag is available.

View File

@@ -10,7 +10,6 @@
import itertools
import multiprocessing.pool
import os
import warnings
from typing import Dict, List, Optional, Tuple
import archspec.cpu
@@ -110,33 +109,27 @@ def _to_dict(compiler):
return {"compiler": d}
def get_compiler_config(
configuration: "spack.config.Configuration",
*,
scope: Optional[str] = None,
init_config: bool = False,
) -> List[Dict]:
def get_compiler_config(scope=None, init_config=False):
"""Return the compiler configuration for the specified architecture."""
config = configuration.get("compilers", scope=scope) or []
config = spack.config.get("compilers", scope=scope) or []
if config or not init_config:
return config
merged_config = configuration.get("compilers")
merged_config = spack.config.get("compilers")
if merged_config:
# Config is empty for this scope
# Do not init config because there is a non-empty scope
return config
_init_compiler_config(configuration, scope=scope)
config = configuration.get("compilers", scope=scope)
_init_compiler_config(scope=scope)
config = spack.config.get("compilers", scope=scope)
return config
def get_compiler_config_from_packages(
configuration: "spack.config.Configuration", *, scope: Optional[str] = None
) -> List[Dict]:
def get_compiler_config_from_packages(scope=None):
"""Return the compiler configuration from packages.yaml"""
config = configuration.get("packages", scope=scope)
config = spack.config.get("packages", scope=scope)
if not config:
return []
@@ -223,15 +216,13 @@ def _compiler_config_from_external(config):
return compiler_entry
def _init_compiler_config(
configuration: "spack.config.Configuration", *, scope: Optional[str]
) -> None:
def _init_compiler_config(*, scope):
"""Compiler search used when Spack has no compilers."""
compilers = find_compilers()
compilers_dict = []
for compiler in compilers:
compilers_dict.append(_to_dict(compiler))
configuration.set("compilers", compilers_dict, scope=scope)
spack.config.set("compilers", compilers_dict, scope=scope)
def compiler_config_files():
@@ -242,7 +233,7 @@ def compiler_config_files():
compiler_config = config.get("compilers", scope=name)
if compiler_config:
config_files.append(config.get_config_filename(name, "compilers"))
compiler_config_from_packages = get_compiler_config_from_packages(config, scope=name)
compiler_config_from_packages = get_compiler_config_from_packages(scope=name)
if compiler_config_from_packages:
config_files.append(config.get_config_filename(name, "packages"))
return config_files
@@ -255,9 +246,7 @@ def add_compilers_to_config(compilers, scope=None):
compilers: a list of Compiler objects.
scope: configuration scope to modify.
"""
compiler_config = get_compiler_config(
configuration=spack.config.CONFIG, scope=scope, init_config=False
)
compiler_config = get_compiler_config(scope, init_config=False)
for compiler in compilers:
if not compiler.cc:
tty.debug(f"{compiler.spec} does not have a C compiler")
@@ -306,9 +295,7 @@ def _remove_compiler_from_scope(compiler_spec, scope):
True if one or more compiler entries were actually removed, False otherwise
"""
assert scope is not None, "a specific scope is needed when calling this function"
compiler_config = get_compiler_config(
configuration=spack.config.CONFIG, scope=scope, init_config=False
)
compiler_config = get_compiler_config(scope, init_config=False)
filtered_compiler_config = [
compiler_entry
for compiler_entry in compiler_config
@@ -323,28 +310,21 @@ def _remove_compiler_from_scope(compiler_spec, scope):
# We need to preserve the YAML type for comments, hence we are copying the
# items in the list that has just been retrieved
compiler_config[:] = filtered_compiler_config
spack.config.CONFIG.set("compilers", compiler_config, scope=scope)
spack.config.set("compilers", compiler_config, scope=scope)
return True
def all_compilers_config(
configuration: "spack.config.Configuration",
*,
scope: Optional[str] = None,
init_config: bool = True,
) -> List["spack.compiler.Compiler"]:
def all_compilers_config(scope=None, init_config=True):
"""Return a set of specs for all the compiler versions currently
available to build with. These are instances of CompilerSpec.
"""
from_packages_yaml = get_compiler_config_from_packages(configuration, scope=scope)
from_packages_yaml = get_compiler_config_from_packages(scope)
if from_packages_yaml:
init_config = False
from_compilers_yaml = get_compiler_config(configuration, scope=scope, init_config=init_config)
from_compilers_yaml = get_compiler_config(scope, init_config)
result = from_compilers_yaml + from_packages_yaml
# Dedupe entries by the compiler they represent
# If the entry is invalid, treat it as unique for deduplication
key = lambda c: _compiler_from_config_entry(c["compiler"] or id(c))
key = lambda c: _compiler_from_config_entry(c["compiler"])
return list(llnl.util.lang.dedupe(result, key=key))
@@ -352,7 +332,7 @@ def all_compiler_specs(scope=None, init_config=True):
# Return compiler specs from the merged config.
return [
spack.spec.parse_with_version_concrete(s["compiler"]["spec"], compiler=True)
for s in all_compilers_config(spack.config.CONFIG, scope=scope, init_config=init_config)
for s in all_compilers_config(scope, init_config)
]
@@ -512,20 +492,11 @@ def find_specs_by_arch(compiler_spec, arch_spec, scope=None, init_config=True):
def all_compilers(scope=None, init_config=True):
return all_compilers_from(
configuration=spack.config.CONFIG, scope=scope, init_config=init_config
)
def all_compilers_from(configuration, scope=None, init_config=True):
compilers = []
for items in all_compilers_config(
configuration=configuration, scope=scope, init_config=init_config
):
config = all_compilers_config(scope, init_config=init_config)
compilers = list()
for items in config:
items = items["compiler"]
compiler = _compiler_from_config_entry(items) # can be None in error case
if compiler:
compilers.append(compiler)
compilers.append(_compiler_from_config_entry(items))
return compilers
@@ -536,7 +507,7 @@ def compilers_for_spec(
"""This gets all compilers that satisfy the supplied CompilerSpec.
Returns an empty list if none are found.
"""
config = all_compilers_config(spack.config.CONFIG, scope=scope, init_config=init_config)
config = all_compilers_config(scope, init_config)
matches = set(find(compiler_spec, scope, init_config))
compilers = []
@@ -546,7 +517,7 @@ def compilers_for_spec(
def compilers_for_arch(arch_spec, scope=None):
config = all_compilers_config(spack.config.CONFIG, scope=scope)
config = all_compilers_config(scope)
return list(get_compilers(config, arch_spec=arch_spec))
@@ -632,10 +603,7 @@ def _compiler_from_config_entry(items):
compiler = _compiler_cache.get(config_id, None)
if compiler is None:
try:
compiler = compiler_from_dict(items)
except UnknownCompilerError as e:
warnings.warn(e.message)
compiler = compiler_from_dict(items)
_compiler_cache[config_id] = compiler
return compiler
@@ -688,9 +656,7 @@ def get_compilers(config, cspec=None, arch_spec=None):
raise ValueError(msg)
continue
compiler = _compiler_from_config_entry(items)
if compiler:
compilers.append(compiler)
compilers.append(_compiler_from_config_entry(items))
return compilers
@@ -967,11 +933,10 @@ def _default_make_compilers(cmp_id, paths):
make_mixed_toolchain(flat_compilers)
# Finally, create the compiler list
compilers: List["spack.compiler.Compiler"] = []
compilers = []
for compiler_id, _, compiler in flat_compilers:
make_compilers = getattr(compiler_id.os, "make_compilers", _default_make_compilers)
candidates = make_compilers(compiler_id, compiler)
compilers.extend(x for x in candidates if x.cc is not None)
compilers.extend(make_compilers(compiler_id, compiler))
return compilers

View File

@@ -38,10 +38,10 @@ class Clang(Compiler):
cxx_names = ["clang++"]
# Subclasses use possible names of Fortran 77 compiler
f77_names = ["flang-new", "flang"]
f77_names = ["flang"]
# Subclasses use possible names of Fortran 90 compiler
fc_names = ["flang-new", "flang"]
fc_names = ["flang"]
version_argument = "--version"
@@ -171,11 +171,10 @@ def extract_version_from_output(cls, output):
match = re.search(
# Normal clang compiler versions are left as-is
r"(?:clang|flang-new) version ([^ )\n]+)-svn[~.\w\d-]*|"
r"clang version ([^ )\n]+)-svn[~.\w\d-]*|"
# Don't include hyphenated patch numbers in the version
# (see https://github.com/spack/spack/pull/14365 for details)
r"(?:clang|flang-new) version ([^ )\n]+?)-[~.\w\d-]*|"
r"(?:clang|flang-new) version ([^ )\n]+)",
r"clang version ([^ )\n]+?)-[~.\w\d-]*|" r"clang version ([^ )\n]+)",
output,
)
if match:

View File

@@ -0,0 +1,34 @@
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
import spack.compilers.oneapi
class Dpcpp(spack.compilers.oneapi.Oneapi):
"""This is the same as the oneAPI compiler but uses dpcpp instead of
icpx (for DPC++ source files). It explicitly refers to dpcpp, so that
CMake test files which check the compiler name (e.g. CMAKE_CXX_COMPILER)
detect it as dpcpp.
Ideally we could switch out icpx for dpcpp where needed in the oneAPI
compiler definition, but two things are needed for that: (a) a way to
tell the compiler that it should be using dpcpp and (b) a way to
customize the link_paths
See also: https://www.intel.com/content/www/us/en/develop/documentation/oneapi-dpcpp-cpp-compiler-dev-guide-and-reference/top/compiler-setup/using-the-command-line/invoking-the-compiler.html
"""
# Subclasses use possible names of C++ compiler
cxx_names = ["dpcpp"]
# Named wrapper links within build_env_path
link_paths = {
"cc": os.path.join("oneapi", "icx"),
"cxx": os.path.join("oneapi", "dpcpp"),
"f77": os.path.join("oneapi", "ifx"),
"fc": os.path.join("oneapi", "ifx"),
}

View File

@@ -8,7 +8,7 @@
import subprocess
import sys
import tempfile
from typing import Dict, List
from typing import Dict, List, Set
import archspec.cpu
@@ -20,7 +20,15 @@
from spack.error import SpackError
from spack.version import Version, VersionRange
FC_PATH: Dict[str, str] = dict()
avail_fc_version: Set[str] = set()
fc_path: Dict[str, str] = dict()
fortran_mapping = {
"2021.3.0": "19.29.30133",
"2021.2.1": "19.28.29913",
"2021.2.0": "19.28.29334",
"2021.1.0": "19.28.29333",
}
class CmdCall:
@@ -107,13 +115,15 @@ def command_str(self):
return f"{script} {self.arch} {self.sdk_ver} {self.vcvars_ver}"
def get_valid_fortran_pth():
"""Assign maximum available fortran compiler version"""
# TODO (johnwparent): validate compatibility w/ try compiler
# functionality when added
def get_valid_fortran_pth(comp_ver):
cl_ver = str(comp_ver)
sort_fn = lambda fc_ver: Version(fc_ver)
sort_fc_ver = sorted(list(FC_PATH.keys()), key=sort_fn)
return FC_PATH[sort_fc_ver[-1]] if sort_fc_ver else None
sort_fc_ver = sorted(list(avail_fc_version), key=sort_fn)
for ver in sort_fc_ver:
if ver in fortran_mapping:
if Version(cl_ver) <= Version(fortran_mapping[ver]):
return fc_path[ver]
return None
class Msvc(Compiler):
@@ -157,9 +167,11 @@ def __init__(self, *args, **kwargs):
# This positional argument "paths" is later parsed and process by the base class
# via the call to `super` later in this method
paths = args[3]
latest_fc = get_valid_fortran_pth()
new_pth = [pth if pth else latest_fc for pth in paths[2:]]
paths[2:] = new_pth
# This positional argument "cspec" is also parsed and handled by the base class
# constructor
cspec = args[0]
new_pth = [pth if pth else get_valid_fortran_pth(cspec.version) for pth in paths]
paths[:] = new_pth
# Initialize, deferring to base class but then adding the vcvarsallfile
# file based on compiler executable path.
super().__init__(*args, **kwargs)
@@ -171,7 +183,7 @@ def __init__(self, *args, **kwargs):
# and stores their path, but their respective VCVARS
# file must be invoked before useage.
env_cmds = []
compiler_root = os.path.join(os.path.dirname(self.cc), "../../../../../..")
compiler_root = os.path.join(self.cc, "../../../../../../..")
vcvars_script_path = os.path.join(compiler_root, "Auxiliary", "Build", "vcvars64.bat")
# get current platform architecture and format for vcvars argument
arch = spack.platforms.real_host().default.lower()
@@ -186,34 +198,11 @@ def __init__(self, *args, **kwargs):
# paths[2] refers to the fc path and is a generic check
# for a fortran compiler
if paths[2]:
def get_oneapi_root(pth: str):
"""From within a prefix known to be a oneAPI path
determine the oneAPI root path from arbitrary point
under root
Args:
pth: path prefixed within oneAPI root
"""
if not pth:
return ""
while os.path.basename(pth) and os.path.basename(pth) != "oneAPI":
pth = os.path.dirname(pth)
return pth
# If this found, it sets all the vars
oneapi_root = get_oneapi_root(self.fc)
if not oneapi_root:
raise RuntimeError(f"Non-oneAPI Fortran compiler {self.fc} assigned to MSVC")
oneapi_root = os.getenv("ONEAPI_ROOT")
oneapi_root_setvars = os.path.join(oneapi_root, "setvars.bat")
# some oneAPI exes return a version more precise than their
# install paths specify, so we determine path from
# the install path rather than the fc executable itself
numver = r"\d+\.\d+(?:\.\d+)?"
pattern = f"((?:{numver})|(?:latest))"
version_from_path = re.search(pattern, self.fc).group(1)
oneapi_version_setvars = os.path.join(
oneapi_root, "compiler", version_from_path, "env", "vars.bat"
oneapi_root, "compiler", str(self.ifx_version), "env", "vars.bat"
)
# order matters here, the specific version env must be invoked first,
# otherwise it will be ignored if the root setvars sets up the oneapi
@@ -325,19 +314,23 @@ def setup_custom_environment(self, pkg, env):
@classmethod
def fc_version(cls, fc):
# We're using intel for the Fortran compilers, which exist if
# ONEAPI_ROOT is a meaningful variable
if not sys.platform == "win32":
return "unknown"
fc_ver = cls.default_version(fc)
FC_PATH[fc_ver] = fc
try:
sps = spack.operating_systems.windows_os.WindowsOs().compiler_search_paths
except AttributeError:
raise SpackError(
"Windows compiler search paths not established, "
"please report this behavior to github.com/spack/spack"
)
clp = spack.util.executable.which_string("cl", path=sps)
return cls.default_version(clp) if clp else fc_ver
avail_fc_version.add(fc_ver)
fc_path[fc_ver] = fc
if os.getenv("ONEAPI_ROOT"):
try:
sps = spack.operating_systems.windows_os.WindowsOs().compiler_search_paths
except AttributeError:
raise SpackError("Windows compiler search paths not established")
clp = spack.util.executable.which_string("cl", path=sps)
ver = cls.default_version(clp)
else:
ver = fc_ver
return ver
@classmethod
def f77_version(cls, f77):

View File

@@ -64,7 +64,7 @@ def verbose_flag(self):
#
# This way, we at least enable the implicit rpath detection, which is
# based on compilation of a C file (see method
# spack.compiler._compile_dummy_c_source): in the case of a mixed
# spack.compiler._get_compiler_link_paths): in the case of a mixed
# NAG/GCC toolchain, the flag will be passed to g++ (e.g.
# 'g++ -Wl,-v ./main.c'), otherwise, the flag will be passed to nagfor
# (e.g. 'nagfor -Wl,-v ./main.c' - note that nagfor recognizes '.c'

View File

@@ -749,6 +749,7 @@ def _concretize_specs_together_new(*abstract_specs, **kwargs):
result = solver.solve(
abstract_specs, tests=kwargs.get("tests", False), allow_deprecated=allow_deprecated
)
result.raise_if_unsat()
return [s.copy() for s in result.specs]

View File

@@ -107,7 +107,7 @@
#: metavar to use for commands that accept scopes
#: this is shorter and more readable than listing all choices
SCOPES_METAVAR = "{defaults,system,site,user,command_line}[/PLATFORM] or env:ENVIRONMENT"
SCOPES_METAVAR = "{defaults,system,site,user}[/PLATFORM] or env:ENVIRONMENT"
#: Base name for the (internal) overrides scope.
_OVERRIDES_BASE_NAME = "overrides-"
@@ -1562,9 +1562,8 @@ def ensure_latest_format_fn(section: str) -> Callable[[YamlConfigDict], bool]:
def use_configuration(
*scopes_or_paths: Union[ConfigScope, str]
) -> Generator[Configuration, None, None]:
"""Use the configuration scopes passed as arguments within the context manager.
This function invalidates caches, and is therefore very slow.
"""Use the configuration scopes passed as arguments within the
context manager.
Args:
*scopes_or_paths: scope objects or paths to be used

View File

@@ -25,7 +25,6 @@
import socket
import sys
import time
from json import JSONDecoder
from typing import (
Any,
Callable,
@@ -819,8 +818,7 @@ def _read_from_file(self, filename):
"""
try:
with open(filename, "r") as f:
# In the future we may use a stream of JSON objects, hence `raw_decode` for compat.
fdata, _ = JSONDecoder().raw_decode(f.read())
fdata = sjson.load(f)
except Exception as e:
raise CorruptDatabaseError("error parsing database:", str(e)) from e
@@ -835,24 +833,27 @@ def check(cond, msg):
# High-level file checks
db = fdata["database"]
check("installs" in db, "no 'installs' in JSON DB.")
check("version" in db, "no 'version' in JSON DB.")
installs = db["installs"]
# TODO: better version checking semantics.
version = vn.Version(db["version"])
if version > _DB_VERSION:
raise InvalidDatabaseVersionError(self, _DB_VERSION, version)
elif version < _DB_VERSION and not any(
old == version and new == _DB_VERSION for old, new in _SKIP_REINDEX
):
tty.warn(f"Spack database version changed from {version} to {_DB_VERSION}. Upgrading.")
elif version < _DB_VERSION:
if not any(old == version and new == _DB_VERSION for old, new in _SKIP_REINDEX):
tty.warn(
"Spack database version changed from %s to %s. Upgrading."
% (version, _DB_VERSION)
)
self.reindex(spack.store.STORE.layout)
installs = dict(
(k, v.to_dict(include_fields=self._record_fields)) for k, v in self._data.items()
)
else:
check("installs" in db, "no 'installs' in JSON DB.")
installs = db["installs"]
self.reindex(spack.store.STORE.layout)
installs = dict(
(k, v.to_dict(include_fields=self._record_fields))
for k, v in self._data.items()
)
spec_reader = reader(version)
@@ -1620,32 +1621,15 @@ def query_local(self, *args, **kwargs):
query_local.__doc__ += _QUERY_DOCSTRING
def query(self, *args, **kwargs):
"""Query the Spack database including all upstream databases.
Additional Arguments:
install_tree (str): query 'all' (default), 'local', 'upstream', or upstream path
"""
install_tree = kwargs.pop("install_tree", "all")
valid_trees = ["all", "upstream", "local", self.root] + [u.root for u in self.upstream_dbs]
if install_tree not in valid_trees:
msg = "Invalid install_tree argument to Database.query()\n"
msg += f"Try one of {', '.join(valid_trees)}"
tty.error(msg)
return []
"""Query the Spack database including all upstream databases."""
upstream_results = []
upstreams = self.upstream_dbs
if install_tree not in ("all", "upstream"):
upstreams = [u for u in self.upstream_dbs if u.root == install_tree]
for upstream_db in upstreams:
for upstream_db in self.upstream_dbs:
# queries for upstream DBs need to *not* lock - we may not
# have permissions to do this and the upstream DBs won't know about
# us anyway (so e.g. they should never uninstall specs)
upstream_results.extend(upstream_db._query(*args, **kwargs) or [])
local_results = []
if install_tree in ("all", "local") or self.root == install_tree:
local_results = set(self.query_local(*args, **kwargs))
local_results = set(self.query_local(*args, **kwargs))
results = list(local_results) + list(x for x in upstream_results if x not in local_results)

View File

@@ -27,7 +27,6 @@ class OpenMpi(Package):
* ``variant``
* ``version``
* ``requires``
* ``redistribute``
"""
import collections
@@ -64,7 +63,6 @@ class OpenMpi(Package):
__all__ = [
"DirectiveError",
"DirectiveMeta",
"DisableRedistribute",
"version",
"conflicts",
"depends_on",
@@ -77,7 +75,6 @@ class OpenMpi(Package):
"resource",
"build_system",
"requires",
"redistribute",
]
#: These are variant names used by Spack internally; packages can't use them
@@ -97,9 +94,6 @@ class OpenMpi(Package):
PatchesType = Optional[Union[Patcher, str, List[Union[Patcher, str]]]]
SUPPORTED_LANGUAGES = ("fortran", "cxx")
def _make_when_spec(value: WhenType) -> Optional["spack.spec.Spec"]:
"""Create a ``Spec`` that indicates when a directive should be applied.
@@ -591,9 +585,6 @@ def depends_on(
@see The section "Dependency specs" in the Spack Packaging Guide.
"""
if spack.spec.Spec(spec).name in SUPPORTED_LANGUAGES:
assert type == "build", "languages must be of 'build' type"
return _language(lang_spec_str=spec, when=when)
def _execute_depends_on(pkg: "spack.package_base.PackageBase"):
_depends_on(pkg, spec, when=when, type=type, patches=patches)
@@ -601,64 +592,6 @@ def _execute_depends_on(pkg: "spack.package_base.PackageBase"):
return _execute_depends_on
#: Store whether a given Spec source/binary should not be redistributed.
class DisableRedistribute:
def __init__(self, source, binary):
self.source = source
self.binary = binary
@directive("disable_redistribute")
def redistribute(source=None, binary=None, when: WhenType = None):
"""Can be used inside a Package definition to declare that
the package source and/or compiled binaries should not be
redistributed.
By default, Packages allow source/binary distribution (i.e. in
mirrors). Because of this, and because overlapping enable/
disable specs are not allowed, this directive only allows users
to explicitly disable redistribution for specs.
"""
return lambda pkg: _execute_redistribute(pkg, source, binary, when)
def _execute_redistribute(
pkg: "spack.package_base.PackageBase", source=None, binary=None, when: WhenType = None
):
if source is None and binary is None:
return
elif (source is True) or (binary is True):
raise DirectiveError(
"Source/binary distribution are true by default, they can only "
"be explicitly disabled."
)
if source is None:
source = True
if binary is None:
binary = True
when_spec = _make_when_spec(when)
if not when_spec:
return
if source is False:
max_constraint = spack.spec.Spec(f"{pkg.name}@{when_spec.versions}")
if not max_constraint.satisfies(when_spec):
raise DirectiveError("Source distribution can only be disabled for versions")
if when_spec in pkg.disable_redistribute:
disable = pkg.disable_redistribute[when_spec]
if not source:
disable.source = True
if not binary:
disable.binary = True
else:
pkg.disable_redistribute[when_spec] = DisableRedistribute(
source=not source, binary=not binary
)
@directive(("extendees", "dependencies"))
def extends(spec, when=None, type=("build", "run"), patches=None):
"""Same as depends_on, but also adds this package to the extendee list.
@@ -1034,6 +967,7 @@ def license(
checked_by: string or list of strings indicating which github user checked the
license (if any).
when: A spec specifying when the license applies.
when: A spec specifying when the license applies.
"""
return lambda pkg: _execute_license(pkg, license_identifier, when)
@@ -1080,21 +1014,6 @@ def _execute_requires(pkg: "spack.package_base.PackageBase"):
return _execute_requires
@directive("languages")
def _language(lang_spec_str: str, *, when: Optional[Union[str, bool]] = None):
"""Temporary implementation of language virtuals, until compilers are proper dependencies."""
def _execute_languages(pkg: "spack.package_base.PackageBase"):
when_spec = _make_when_spec(when)
if not when_spec:
return
languages = pkg.languages.setdefault(when_spec, set())
languages.add(lang_spec_str)
return _execute_languages
class DirectiveError(spack.error.SpackError):
"""This is raised when something is wrong with a package directive."""

View File

@@ -106,16 +106,17 @@ def environment_name(path: Union[str, pathlib.Path]) -> str:
return path_str
def ensure_no_disallowed_env_config_mods(scopes: List[spack.config.ConfigScope]) -> None:
def check_disallowed_env_config_mods(scopes):
for scope in scopes:
config = scope.get_section("config")
if config and "environments_root" in config["config"]:
raise SpackEnvironmentError(
"Spack environments are prohibited from modifying 'config:environments_root' "
"because it can make the definition of the environment ill-posed. Please "
"remove from your environment and place it in a permanent scope such as "
"defaults, system, site, etc."
)
with spack.config.use_configuration(scope):
if spack.config.get("config:environments_root"):
raise SpackEnvironmentError(
"Spack environments are prohibited from modifying 'config:environments_root' "
"because it can make the definition of the environment ill-posed. Please "
"remove from your environment and place it in a permanent scope such as "
"defaults, system, site, etc."
)
return scopes
def default_manifest_yaml():
@@ -1426,7 +1427,7 @@ def _concretize_separately(self, tests=False):
# Ensure we have compilers in compilers.yaml to avoid that
# processes try to write the config file in parallel
_ = spack.compilers.get_compiler_config(spack.config.CONFIG, init_config=True)
_ = spack.compilers.get_compiler_config(init_config=True)
# Early return if there is nothing to do
if len(args) == 0:
@@ -2462,10 +2463,6 @@ def __init__(self, manifest_dir: Union[pathlib.Path, str]) -> None:
self.scope_name = f"env:{environment_name(self.manifest_dir)}"
self.config_stage_dir = os.path.join(env_subdir_path(manifest_dir), "config")
#: Configuration scopes associated with this environment. Note that these are not
#: invalidated by a re-read of the manifest file.
self._config_scopes: Optional[List[spack.config.ConfigScope]] = None
if not self.manifest_file.exists():
msg = f"cannot find '{manifest_name}' in {self.manifest_dir}"
raise SpackEnvironmentError(msg)
@@ -2811,19 +2808,16 @@ def included_config_scopes(self) -> List[spack.config.ConfigScope]:
@property
def env_config_scopes(self) -> List[spack.config.ConfigScope]:
"""A list of all configuration scopes for the environment manifest. On the first call this
instantiates all the scopes, on subsequent calls it returns the cached list."""
if self._config_scopes is not None:
return self._config_scopes
scopes: List[spack.config.ConfigScope] = [
*self.included_config_scopes,
spack.config.SingleFileScope(
self.scope_name, str(self.manifest_file), spack.schema.env.schema, [TOP_LEVEL_KEY]
),
]
ensure_no_disallowed_env_config_mods(scopes)
self._config_scopes = scopes
return scopes
"""A list of all configuration scopes for the environment manifest.
Returns: All configuration scopes associated with the environment
"""
config_name = self.scope_name
env_scope = spack.config.SingleFileScope(
config_name, str(self.manifest_file), spack.schema.env.schema, [TOP_LEVEL_KEY]
)
return check_disallowed_env_config_mods(self.included_config_scopes + [env_scope])
def prepare_config_scope(self) -> None:
"""Add the manifest's scopes to the global configuration search path."""

View File

@@ -662,6 +662,9 @@ def add_specs(self, *specs: spack.spec.Spec) -> None:
return
# Drop externals
for s in specs:
if s.external:
tty.warn("Skipping external package: " + s.short_spec)
specs = [s for s in specs if not s.external]
self._sanity_check_view_projection(specs)

View File

@@ -1,27 +0,0 @@
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import llnl.util.tty as tty
import spack.binary_distribution as bindist
import spack.mirror
def post_install(spec, explicit):
# Push package to all buildcaches with autopush==True
# Do nothing if package was not installed from source
pkg = spec.package
if pkg.installed_from_binary_cache:
return
# Push the package to all autopush mirrors
for mirror in spack.mirror.MirrorCollection(binary=True, autopush=True).values():
bindist.push_or_raise(
spec,
mirror.push_url,
bindist.PushOptions(force=True, regenerate_index=False, unsigned=not mirror.signed),
)
tty.msg(f"{spec.name}: Pushed to build cache: '{mirror.name}'")

View File

@@ -119,7 +119,7 @@ def __init__(self, pkg_count: int):
self.pkg_ids: Set[str] = set()
def next_pkg(self, pkg: "spack.package_base.PackageBase"):
pkg_id = package_id(pkg.spec)
pkg_id = package_id(pkg)
if pkg_id not in self.pkg_ids:
self.pkg_num += 1
@@ -221,12 +221,12 @@ def _handle_external_and_upstream(pkg: "spack.package_base.PackageBase", explici
# consists in module file generation and registration in the DB.
if pkg.spec.external:
_process_external_package(pkg, explicit)
_print_installed_pkg(f"{pkg.prefix} (external {package_id(pkg.spec)})")
_print_installed_pkg(f"{pkg.prefix} (external {package_id(pkg)})")
return True
if pkg.spec.installed_upstream:
tty.verbose(
f"{package_id(pkg.spec)} is installed in an upstream Spack instance at "
f"{package_id(pkg)} is installed in an upstream Spack instance at "
f"{pkg.spec.prefix}"
)
_print_installed_pkg(pkg.prefix)
@@ -403,7 +403,7 @@ def _install_from_cache(
return False
t.stop()
pkg_id = package_id(pkg.spec)
pkg_id = package_id(pkg)
tty.debug(f"Successfully extracted {pkg_id} from binary cache")
_write_timer_json(pkg, t, True)
@@ -484,7 +484,7 @@ def _process_binary_cache_tarball(
if download_result is None:
return False
tty.msg(f"Extracting {package_id(pkg.spec)} from binary cache")
tty.msg(f"Extracting {package_id(pkg)} from binary cache")
with timer.measure("install"), spack.util.path.filter_padding():
binary_distribution.extract_tarball(pkg.spec, download_result, force=False, timer=timer)
@@ -513,7 +513,7 @@ def _try_install_from_binary_cache(
if not spack.mirror.MirrorCollection(binary=True):
return False
tty.debug(f"Searching for binary cache of {package_id(pkg.spec)}")
tty.debug(f"Searching for binary cache of {package_id(pkg)}")
with timer.measure("search"):
matches = binary_distribution.get_mirrors_for_spec(pkg.spec, index_only=True)
@@ -610,7 +610,7 @@ def get_dependent_ids(spec: "spack.spec.Spec") -> List[str]:
Returns: list of package ids
"""
return [package_id(d) for d in spec.dependents()]
return [package_id(d.package) for d in spec.dependents()]
def install_msg(name: str, pid: int, install_status: InstallStatus) -> str:
@@ -720,7 +720,7 @@ def log(pkg: "spack.package_base.PackageBase") -> None:
dump_packages(pkg.spec, packages_dir)
def package_id(spec: "spack.spec.Spec") -> str:
def package_id(pkg: "spack.package_base.PackageBase") -> str:
"""A "unique" package identifier for installation purposes
The identifier is used to track build tasks, locks, install, and
@@ -732,10 +732,10 @@ def package_id(spec: "spack.spec.Spec") -> str:
Args:
pkg: the package from which the identifier is derived
"""
if not spec.concrete:
if not pkg.spec.concrete:
raise ValueError("Cannot provide a unique, readable id when the spec is not concretized.")
return f"{spec.name}-{spec.version}-{spec.dag_hash()}"
return f"{pkg.name}-{pkg.version}-{pkg.spec.dag_hash()}"
class BuildRequest:
@@ -765,7 +765,7 @@ def __init__(self, pkg: "spack.package_base.PackageBase", install_args: dict):
self.pkg.last_phase = install_args.pop("stop_at", None) # type: ignore[attr-defined]
# Cache the package id for convenience
self.pkg_id = package_id(pkg.spec)
self.pkg_id = package_id(pkg)
# Save off the original install arguments plus standard defaults
# since they apply to the requested package *and* dependencies.
@@ -780,9 +780,9 @@ def __init__(self, pkg: "spack.package_base.PackageBase", install_args: dict):
# are not able to return full dependents for all packages across
# environment specs.
self.dependencies = set(
package_id(d)
package_id(d.package)
for d in self.pkg.spec.dependencies(deptype=self.get_depflags(self.pkg))
if package_id(d) != self.pkg_id
if package_id(d.package) != self.pkg_id
)
def __repr__(self) -> str:
@@ -832,7 +832,7 @@ def get_depflags(self, pkg: "spack.package_base.PackageBase") -> int:
depflag = dt.LINK | dt.RUN
include_build_deps = self.install_args.get("include_build_deps")
if self.pkg_id == package_id(pkg.spec):
if self.pkg_id == package_id(pkg):
cache_only = self.install_args.get("package_cache_only")
else:
cache_only = self.install_args.get("dependencies_cache_only")
@@ -927,7 +927,7 @@ def __init__(
raise ValueError(f"{self.pkg.name} must have a concrete spec")
# The "unique" identifier for the task's package
self.pkg_id = package_id(self.pkg.spec)
self.pkg_id = package_id(self.pkg)
# The explicit build request associated with the package
if not isinstance(request, BuildRequest):
@@ -965,9 +965,9 @@ def __init__(
# if use traverse for transitive dependencies, then must remove
# transitive dependents on failure.
self.dependencies = set(
package_id(d)
package_id(d.package)
for d in self.pkg.spec.dependencies(deptype=self.request.get_depflags(self.pkg))
if package_id(d) != self.pkg_id
if package_id(d.package) != self.pkg_id
)
# Handle bootstrapped compiler
@@ -976,18 +976,14 @@ def __init__(
# a dependency of the build task. Here we add it to self.dependencies
compiler_spec = self.pkg.spec.compiler
arch_spec = self.pkg.spec.architecture
strict = spack.concretize.Concretizer().check_for_compiler_existence
if (
not spack.compilers.compilers_for_spec(compiler_spec, arch_spec=arch_spec)
and not strict
):
if not spack.compilers.compilers_for_spec(compiler_spec, arch_spec=arch_spec):
# The compiler is in the queue, identify it as dependency
dep = spack.compilers.pkg_spec_for_compiler(compiler_spec)
dep.constrain(f"platform={str(arch_spec.platform)}")
dep.constrain(f"os={str(arch_spec.os)}")
dep.constrain(f"target={arch_spec.target.microarchitecture.family.name}:")
dep.concretize()
dep_id = package_id(dep)
dep_id = package_id(dep.package)
self.dependencies.add(dep_id)
# List of uninstalled dependencies, which is used to establish
@@ -1198,7 +1194,7 @@ def _add_bootstrap_compilers(
"""
packages = _packages_needed_to_bootstrap_compiler(compiler, architecture, pkgs)
for comp_pkg, is_compiler in packages:
pkgid = package_id(comp_pkg.spec)
pkgid = package_id(comp_pkg)
if pkgid not in self.build_tasks:
self._add_init_task(comp_pkg, request, is_compiler, all_deps)
elif is_compiler:
@@ -1245,7 +1241,7 @@ def _add_init_task(
"""
task = BuildTask(pkg, request, is_compiler, 0, 0, STATUS_ADDED, self.installed)
for dep_id in task.dependencies:
all_deps[dep_id].add(package_id(pkg.spec))
all_deps[dep_id].add(package_id(pkg))
self._push_task(task)
@@ -1280,7 +1276,7 @@ def _check_deps_status(self, request: BuildRequest) -> None:
err = "Cannot proceed with {0}: {1}"
for dep in request.traverse_dependencies():
dep_pkg = dep.package
dep_id = package_id(dep)
dep_id = package_id(dep_pkg)
# Check for failure since a prefix lock is not required
if spack.store.STORE.failure_tracker.has_failed(dep):
@@ -1413,7 +1409,7 @@ def _cleanup_task(self, pkg: "spack.package_base.PackageBase") -> None:
Args:
pkg: the package being installed
"""
self._remove_task(package_id(pkg.spec))
self._remove_task(package_id(pkg))
# Ensure we have a read lock to prevent others from uninstalling the
# spec during our installation.
@@ -1427,7 +1423,7 @@ def _ensure_install_ready(self, pkg: "spack.package_base.PackageBase") -> None:
Args:
pkg: the package being locally installed
"""
pkg_id = package_id(pkg.spec)
pkg_id = package_id(pkg)
pre = f"{pkg_id} cannot be installed locally:"
# External packages cannot be installed locally.
@@ -1469,7 +1465,7 @@ def _ensure_locked(
"write",
], f'"{lock_type}" is not a supported package management lock type'
pkg_id = package_id(pkg.spec)
pkg_id = package_id(pkg)
ltype, lock = self.locks.get(pkg_id, (lock_type, None))
if lock and ltype == lock_type:
return ltype, lock
@@ -1605,7 +1601,7 @@ def _add_tasks(self, request: BuildRequest, all_deps):
for dep in request.traverse_dependencies():
dep_pkg = dep.package
dep_id = package_id(dep)
dep_id = package_id(dep_pkg)
if dep_id not in self.build_tasks:
self._add_init_task(dep_pkg, request, False, all_deps)
@@ -1917,7 +1913,7 @@ def _flag_installed(
dependent_ids: set of the package's dependent ids, or None if the dependent ids are
limited to those maintained in the package (dependency DAG)
"""
pkg_id = package_id(pkg.spec)
pkg_id = package_id(pkg)
if pkg_id in self.installed:
# Already determined the package has been installed
@@ -2278,15 +2274,11 @@ def __init__(self, pkg: "spack.package_base.PackageBase", install_args: dict):
# whether to install source code with the packag
self.install_source = install_args.get("install_source", False)
is_develop = pkg.spec.is_develop
# whether to keep the build stage after installation
# Note: user commands do not have an explicit choice to disable
# keeping stages (i.e., we have a --keep-stage option, but not
# a --destroy-stage option), so we can override a default choice
# to destroy
self.keep_stage = is_develop or install_args.get("keep_stage", False)
self.keep_stage = install_args.get("keep_stage", False)
# whether to restage
self.restage = (not is_develop) and install_args.get("restage", False)
self.restage = install_args.get("restage", False)
# whether to skip the patch phase
self.skip_patch = install_args.get("skip_patch", False)
@@ -2313,7 +2305,7 @@ def __init__(self, pkg: "spack.package_base.PackageBase", install_args: dict):
# info/debug information
self.pre = _log_prefix(pkg.name)
self.pkg_id = package_id(pkg.spec)
self.pkg_id = package_id(pkg)
def run(self) -> bool:
"""Main entry point from ``build_process`` to kick off install in child."""

View File

@@ -137,12 +137,6 @@ def source(self):
def signed(self) -> bool:
return isinstance(self._data, str) or self._data.get("signed", True)
@property
def autopush(self) -> bool:
if isinstance(self._data, str):
return False
return self._data.get("autopush", False)
@property
def fetch_url(self):
"""Get the valid, canonicalized fetch URL"""
@@ -156,7 +150,7 @@ def push_url(self):
def _update_connection_dict(self, current_data: dict, new_data: dict, top_level: bool):
keys = ["url", "access_pair", "access_token", "profile", "endpoint_url"]
if top_level:
keys += ["binary", "source", "signed", "autopush"]
keys += ["binary", "source", "signed"]
changed = False
for key in keys:
if key in new_data and current_data.get(key) != new_data[key]:
@@ -292,7 +286,6 @@ def __init__(
scope=None,
binary: Optional[bool] = None,
source: Optional[bool] = None,
autopush: Optional[bool] = None,
):
"""Initialize a mirror collection.
@@ -304,27 +297,21 @@ def __init__(
If None, do not filter on binary mirrors.
source: If True, only include source mirrors.
If False, omit source mirrors.
If None, do not filter on source mirrors.
autopush: If True, only include mirrors that have autopush enabled.
If False, omit mirrors that have autopush enabled.
If None, do not filter on autopush."""
mirrors_data = (
mirrors.items()
if mirrors is not None
else spack.config.get("mirrors", scope=scope).items()
)
mirrors = (Mirror(data=mirror, name=name) for name, mirror in mirrors_data)
If None, do not filter on source mirrors."""
self._mirrors = {
name: Mirror(data=mirror, name=name)
for name, mirror in (
mirrors.items()
if mirrors is not None
else spack.config.get("mirrors", scope=scope).items()
)
}
def _filter(m: Mirror):
if source is not None and m.source != source:
return False
if binary is not None and m.binary != binary:
return False
if autopush is not None and m.autopush != autopush:
return False
return True
if source is not None:
self._mirrors = {k: v for k, v in self._mirrors.items() if v.source == source}
self._mirrors = {m.name: m for m in mirrors if _filter(m)}
if binary is not None:
self._mirrors = {k: v for k, v in self._mirrors.items() if v.binary == binary}
def __eq__(self, other):
return self._mirrors == other._mirrors

View File

@@ -83,17 +83,6 @@ def configuration(module_set_name):
)
_FORMAT_STRING_RE = re.compile(r"({[^}]*})")
def _format_env_var_name(spec, var_name_fmt):
"""Format the variable name, but uppercase any formatted fields."""
fmt_parts = _FORMAT_STRING_RE.split(var_name_fmt)
return "".join(
spec.format(part).upper() if _FORMAT_STRING_RE.match(part) else part for part in fmt_parts
)
def _check_tokens_are_valid(format_string, message):
"""Checks that the tokens used in the format string are valid in
the context of module file and environment variable naming.
@@ -748,12 +737,20 @@ def environment_modifications(self):
exclude = self.conf.exclude_env_vars
# We may have tokens to substitute in environment commands
# Prepare a suitable transformation dictionary for the names
# of the environment variables. This means turn the valid
# tokens uppercase.
transform = {}
for token in _valid_tokens:
transform[token] = lambda s, string: str.upper(string)
for x in env:
# Ensure all the tokens are valid in this context
msg = "some tokens cannot be expanded in an environment variable name"
_check_tokens_are_valid(x.name, message=msg)
x.name = _format_env_var_name(self.spec, x.name)
# Transform them
x.name = self.spec.format(x.name, transform=transform)
if self.modification_needs_formatting(x):
try:
# Not every command has a value

View File

@@ -73,24 +73,17 @@ def vs_install_paths(self):
def msvc_paths(self):
return [os.path.join(path, "VC", "Tools", "MSVC") for path in self.vs_install_paths]
@property
def oneapi_root(self):
root = os.environ.get("ONEAPI_ROOT", "") or os.path.join(
os.environ.get("ProgramFiles(x86)", ""), "Intel", "oneAPI"
)
if os.path.exists(root):
return root
@property
def compiler_search_paths(self):
# First Strategy: Find MSVC directories using vswhere
_compiler_search_paths = []
for p in self.msvc_paths:
_compiler_search_paths.extend(glob.glob(os.path.join(p, "*", "bin", "Hostx64", "x64")))
oneapi_root = self.oneapi_root
if oneapi_root:
if os.getenv("ONEAPI_ROOT"):
_compiler_search_paths.extend(
glob.glob(os.path.join(oneapi_root, "compiler", "**", "bin"), recursive=True)
glob.glob(
os.path.join(str(os.getenv("ONEAPI_ROOT")), "compiler", "*", "windows", "bin")
)
)
# Second strategy: Find MSVC via the registry

View File

@@ -468,41 +468,7 @@ def _names(when_indexed_dictionary):
return sorted(all_names)
class RedistributionMixin:
"""Logic for determining whether a Package is source/binary
redistributable.
"""
#: Store whether a given Spec source/binary should not be
#: redistributed.
disable_redistribute: Dict["spack.spec.Spec", "spack.directives.DisableRedistribute"]
# Source redistribution must be determined before concretization
# (because source mirrors work with un-concretized Specs).
@classmethod
def redistribute_source(cls, spec):
"""Whether it should be possible to add the source of this
package to a Spack mirror.
"""
for when_spec, disable_redistribute in cls.disable_redistribute.items():
if disable_redistribute.source and spec.satisfies(when_spec):
return False
return True
@property
def redistribute_binary(self):
"""Whether it should be possible to create a binary out of an
installed instance of this package.
"""
for when_spec, disable_redistribute in self.__class__.disable_redistribute.items():
if disable_redistribute.binary and self.spec.satisfies(when_spec):
return False
return True
class PackageBase(WindowsRPath, PackageViewMixin, RedistributionMixin, metaclass=PackageMeta):
class PackageBase(WindowsRPath, PackageViewMixin, metaclass=PackageMeta):
"""This is the superclass for all spack packages.
***The Package class***
@@ -601,7 +567,6 @@ class PackageBase(WindowsRPath, PackageViewMixin, RedistributionMixin, metaclass
provided_together: Dict["spack.spec.Spec", List[Set[str]]]
patches: Dict["spack.spec.Spec", List["spack.patch.Patch"]]
variants: Dict[str, Tuple["spack.variant.Variant", "spack.spec.Spec"]]
languages: Dict["spack.spec.Spec", Set[str]]
#: By default, packages are not virtual
#: Virtual packages override this attribute

View File

@@ -726,14 +726,14 @@ def first_repo(self):
"""Get the first repo in precedence order."""
return self.repos[0] if self.repos else None
@llnl.util.lang.memoized
def _all_package_names_set(self, include_virtuals):
return {name for repo in self.repos for name in repo.all_package_names(include_virtuals)}
@llnl.util.lang.memoized
def _all_package_names(self, include_virtuals):
"""Return all unique package names in all repositories."""
return sorted(self._all_package_names_set(include_virtuals), key=lambda n: n.lower())
all_pkgs = set()
for repo in self.repos:
for name in repo.all_package_names(include_virtuals):
all_pkgs.add(name)
return sorted(all_pkgs, key=lambda n: n.lower())
def all_package_names(self, include_virtuals=False):
return self._all_package_names(include_virtuals)
@@ -794,11 +794,7 @@ def patch_index(self):
@autospec
def providers_for(self, vpkg_spec):
providers = [
spec
for spec in self.provider_index.providers_for(vpkg_spec)
if spec.name in self._all_package_names_set(include_virtuals=False)
]
providers = self.provider_index.providers_for(vpkg_spec)
if not providers:
raise UnknownPackageError(vpkg_spec.fullname)
return providers

View File

@@ -14,7 +14,7 @@
import xml.sax.saxutils
from typing import Dict, Optional
from urllib.parse import urlencode
from urllib.request import HTTPSHandler, Request, build_opener
from urllib.request import HTTPHandler, Request, build_opener
import llnl.util.tty as tty
from llnl.util.filesystem import working_dir
@@ -27,7 +27,6 @@
from spack.error import SpackError
from spack.util.crypto import checksum
from spack.util.log_parse import parse_log_events
from spack.util.web import urllib_ssl_cert_handler
from .base import Reporter
from .extract import extract_test_parts
@@ -428,7 +427,7 @@ def upload(self, filename):
# Compute md5 checksum for the contents of this file.
md5sum = checksum(hashlib.md5, filename, block_size=8192)
opener = build_opener(HTTPSHandler(context=urllib_ssl_cert_handler()))
opener = build_opener(HTTPHandler)
with open(filename, "rb") as f:
params_dict = {
"build": self.buildname,

View File

@@ -34,7 +34,6 @@
"strategy": {"type": "string", "enum": ["none", "minimal", "full"]}
},
},
"os_compatible": {"type": "object", "additionalProperties": {"type": "array"}},
},
}
}

View File

@@ -73,7 +73,6 @@
"environments_root": {"type": "string"},
"connect_timeout": {"type": "integer", "minimum": 0},
"verify_ssl": {"type": "boolean"},
"ssl_certs": {"type": "string"},
"suppress_gpg_warnings": {"type": "boolean"},
"install_missing_compilers": {"type": "boolean"},
"debug": {"type": "boolean"},

View File

@@ -46,7 +46,6 @@
"signed": {"type": "boolean"},
"fetch": fetch_and_push,
"push": fetch_and_push,
"autopush": {"type": "boolean"},
**connection, # type: ignore
},
}

View File

@@ -15,7 +15,6 @@
import types
import typing
import warnings
from contextlib import contextmanager
from typing import Callable, Dict, Iterator, List, NamedTuple, Optional, Set, Tuple, Type, Union
import archspec.cpu
@@ -41,8 +40,6 @@
import spack.spec
import spack.store
import spack.util.crypto
import spack.util.elf
import spack.util.libc
import spack.util.path
import spack.util.timer
import spack.variant
@@ -122,17 +119,6 @@ def __str__(self):
return f"{self._name_.lower()}"
@contextmanager
def spec_with_name(spec, name):
"""Context manager to temporarily set the name of a spec"""
old_name = spec.name
spec.name = name
try:
yield spec
finally:
spec.name = old_name
class RequirementKind(enum.Enum):
"""Purpose / provenance of a requirement"""
@@ -281,36 +267,8 @@ def _create_counter(specs: List[spack.spec.Spec], tests: bool):
return NoDuplicatesCounter(specs, tests=tests)
def all_compilers_in_config(configuration):
return spack.compilers.all_compilers_from(configuration)
def all_libcs() -> Set[spack.spec.Spec]:
"""Return a set of all libc specs targeted by any configured compiler. If none, fall back to
libc determined from the current Python process if dynamically linked."""
libcs = {
c.default_libc for c in all_compilers_in_config(spack.config.CONFIG) if c.default_libc
}
if libcs:
return libcs
libc = spack.util.libc.libc_from_current_python_process()
return {libc} if libc else set()
def libc_is_compatible(lhs: spack.spec.Spec, rhs: spack.spec.Spec) -> List[spack.spec.Spec]:
return (
lhs.name == rhs.name
and lhs.external_path == rhs.external_path
and lhs.version >= rhs.version
)
def using_libc_compatibility() -> bool:
"""Returns True if we are currently using libc compatibility"""
return spack.platforms.host().name == "linux"
def all_compilers_in_config():
return spack.compilers.all_compilers()
def extend_flag_list(flag_list, new_flags):
@@ -583,7 +541,6 @@ def _concretization_version_order(version_info: Tuple[GitOrStandardVersion, dict
info.get("preferred", False),
not info.get("deprecated", False),
not version.isdevelop(),
not version.is_prerelease(),
version,
)
@@ -596,23 +553,6 @@ def _spec_with_default_name(spec_str, name):
return spec
def _external_config_with_implicit_externals(configuration):
# Read packages.yaml and normalize it, so that it will not contain entries referring to
# virtual packages.
packages_yaml = _normalize_packages_yaml(configuration.get("packages"))
# Add externals for libc from compilers on Linux
if not using_libc_compatibility():
return packages_yaml
for compiler in all_compilers_in_config(configuration):
libc = compiler.default_libc
if libc:
entry = {"spec": f"{libc} %{compiler.spec}", "prefix": libc.external_path}
packages_yaml.setdefault(libc.name, {}).setdefault("externals", []).append(entry)
return packages_yaml
class ErrorHandler:
def __init__(self, model):
self.model = model
@@ -747,9 +687,8 @@ def on_model(model):
raise UnsatisfiableSpecError(msg)
#: Data class to collect information on a requirement
class RequirementRule(NamedTuple):
"""Data class to collect information on a requirement"""
pkg_name: str
policy: str
requirements: List["spack.spec.Spec"]
@@ -758,27 +697,6 @@ class RequirementRule(NamedTuple):
message: Optional[str]
class KnownCompiler(NamedTuple):
"""Data class to collect information on compilers"""
spec: "spack.spec.Spec"
os: str
target: str
available: bool
compiler_obj: Optional["spack.compiler.Compiler"]
def _key(self):
return self.spec, self.os, self.target
def __eq__(self, other: object):
if not isinstance(other, KnownCompiler):
return NotImplemented
return self._key() == other._key()
def __hash__(self):
return hash(self._key())
class PyclingoDriver:
def __init__(self, cores=True):
"""Driver for the Python clingo interface.
@@ -831,16 +749,10 @@ def solve(self, setup, specs, reuse=None, output=None, control=None, allow_depre
self.control.load(os.path.join(parent_dir, "heuristic.lp"))
if spack.config.CONFIG.get("concretizer:duplicates:strategy", "none") != "none":
self.control.load(os.path.join(parent_dir, "heuristic_separate.lp"))
self.control.load(os.path.join(parent_dir, "os_compatibility.lp"))
self.control.load(os.path.join(parent_dir, "display.lp"))
if not setup.concretize_everything:
self.control.load(os.path.join(parent_dir, "when_possible.lp"))
# Binary compatibility is based on libc on Linux, and on the os tag elsewhere
if using_libc_compatibility():
self.control.load(os.path.join(parent_dir, "libc_compatibility.lp"))
else:
self.control.load(os.path.join(parent_dir, "os_compatibility.lp"))
timer.stop("load")
# Grounding is the first step in the solve -- it turns our facts
@@ -850,6 +762,7 @@ def solve(self, setup, specs, reuse=None, output=None, control=None, allow_depre
timer.stop("ground")
# With a grounded program, we can run the solve.
result = Result(specs)
models = [] # stable models if things go well
cores = [] # unsatisfiable cores if they do not
@@ -870,7 +783,6 @@ def on_model(model):
timer.stop("solve")
# once done, construct the solve result
result = Result(specs)
result.satisfiable = solve_result.satisfiable
if result.satisfiable:
@@ -911,8 +823,6 @@ def on_model(model):
print("Statistics:")
pprint.pprint(self.control.statistics)
result.raise_if_unsat()
if result.satisfiable and result.unsolved_specs and setup.concretize_everything:
unsolved_str = Result.format_unsolved(result.unsolved_specs)
raise InternalConcretizerError(
@@ -1037,9 +947,6 @@ def __init__(self, tests: bool = False):
self.pkgs: Set[str] = set()
self.explicitly_required_namespaces: Dict[str, str] = {}
# list of unique libc specs targeted by compilers (or an educated guess if no compiler)
self.libcs: List[spack.spec.Spec] = []
def pkg_version_rules(self, pkg):
"""Output declared versions of a package.
@@ -1132,52 +1039,41 @@ def conflict_rules(self, pkg):
)
self.gen.newline()
def package_languages(self, pkg):
for when_spec, languages in pkg.languages.items():
condition_msg = f"{pkg.name} needs the {', '.join(sorted(languages))} language"
if when_spec != spack.spec.Spec():
condition_msg += f" when {when_spec}"
condition_id = self.condition(when_spec, name=pkg.name, msg=condition_msg)
for language in sorted(languages):
self.gen.fact(fn.pkg_fact(pkg.name, fn.language(condition_id, language)))
self.gen.newline()
def config_compatible_os(self):
"""Facts about compatible os's specified in configs"""
self.gen.h2("Compatible OS from concretizer config file")
os_data = spack.config.get("concretizer:os_compatible", {})
for recent, reusable in os_data.items():
for old in reusable:
self.gen.fact(fn.os_compatible(recent, old))
self.gen.newline()
def compiler_facts(self):
"""Facts about available compilers."""
self.gen.h2("Available compilers")
for compiler_id, compiler in enumerate(self.possible_compilers):
indexed_possible_compilers = list(enumerate(self.possible_compilers))
for compiler_id, compiler in indexed_possible_compilers:
self.gen.fact(fn.compiler_id(compiler_id))
self.gen.fact(fn.compiler_name(compiler_id, compiler.spec.name))
self.gen.fact(fn.compiler_version(compiler_id, compiler.spec.version))
if compiler.os:
self.gen.fact(fn.compiler_os(compiler_id, compiler.os))
if compiler.operating_system:
self.gen.fact(fn.compiler_os(compiler_id, compiler.operating_system))
if compiler.target == "any":
compiler.target = None
if compiler.target is not None:
self.gen.fact(fn.compiler_target(compiler_id, compiler.target))
if compiler.compiler_obj is not None:
c = compiler.compiler_obj
for flag_type, flags in c.flags.items():
for flag in flags:
self.gen.fact(fn.compiler_flag(compiler_id, flag_type, flag))
for flag_type, flags in compiler.flags.items():
for flag in flags:
self.gen.fact(fn.compiler_flag(compiler_id, flag_type, flag))
if compiler.available:
self.gen.fact(fn.compiler_available(compiler_id))
self.gen.fact(fn.compiler_weight(compiler_id, compiler_id))
self.gen.newline()
# Set compiler defaults, given a list of possible compilers
self.gen.h2("Default compiler preferences (CompilerID, Weight)")
ppk = spack.package_prefs.PackagePrefs("all", "compiler", all=False)
matches = sorted(indexed_possible_compilers, key=lambda x: ppk(x[1].spec))
for weight, (compiler_id, cspec) in enumerate(matches):
f = fn.compiler_weight(compiler_id, weight)
self.gen.fact(f)
def package_requirement_rules(self, pkg):
parser = RequirementParser(spack.config.CONFIG)
self.emit_facts_from_requirement_rules(parser.rules(pkg))
@@ -1192,9 +1088,6 @@ def pkg_rules(self, pkg, tests):
self.pkg_version_rules(pkg)
self.gen.newline()
# languages
self.package_languages(pkg)
# variants
self.variant_rules(pkg)
@@ -1391,39 +1284,34 @@ def condition(
Returns:
int: id of the condition created by this function
"""
name = required_spec.name or name
if not name:
raise ValueError(f"Must provide a name for anonymous condition: '{required_spec}'")
named_cond = required_spec.copy()
named_cond.name = named_cond.name or name
if not named_cond.name:
raise ValueError(f"Must provide a name for anonymous condition: '{named_cond}'")
with spec_with_name(required_spec, name):
# Check if we can emit the requirements before updating the condition ID counter.
# In this way, if a condition can't be emitted but the exception is handled in the caller,
# we won't emit partial facts.
# Check if we can emit the requirements before updating the condition ID counter.
# In this way, if a condition can't be emitted but the exception is handled in the
# caller, we won't emit partial facts.
condition_id = next(self._id_counter)
self.gen.fact(fn.pkg_fact(named_cond.name, fn.condition(condition_id)))
self.gen.fact(fn.condition_reason(condition_id, msg))
condition_id = next(self._id_counter)
self.gen.fact(fn.pkg_fact(required_spec.name, fn.condition(condition_id)))
self.gen.fact(fn.condition_reason(condition_id, msg))
trigger_id = self._get_condition_id(
required_spec, cache=self._trigger_cache, body=True, transform=transform_required
)
self.gen.fact(
fn.pkg_fact(required_spec.name, fn.condition_trigger(condition_id, trigger_id))
)
if not imposed_spec:
return condition_id
effect_id = self._get_condition_id(
imposed_spec, cache=self._effect_cache, body=False, transform=transform_imposed
)
self.gen.fact(
fn.pkg_fact(required_spec.name, fn.condition_effect(condition_id, effect_id))
)
trigger_id = self._get_condition_id(
named_cond, cache=self._trigger_cache, body=True, transform=transform_required
)
self.gen.fact(fn.pkg_fact(named_cond.name, fn.condition_trigger(condition_id, trigger_id)))
if not imposed_spec:
return condition_id
effect_id = self._get_condition_id(
imposed_spec, cache=self._effect_cache, body=False, transform=transform_imposed
)
self.gen.fact(fn.pkg_fact(named_cond.name, fn.condition_effect(condition_id, effect_id)))
return condition_id
def impose(self, condition_id, imposed_spec, node=True, name=None, body=False):
imposed_constraints = self.spec_clauses(imposed_spec, body=body, required_from=name)
for pred in imposed_constraints:
@@ -1610,8 +1498,12 @@ def emit_facts_from_requirement_rules(self, rules: List[RequirementRule]):
requirement_weight += 1
def external_packages(self):
"""Facts on external packages, from packages.yaml and implicit externals."""
packages_yaml = _external_config_with_implicit_externals(spack.config.CONFIG)
"""Facts on external packages, as read from packages.yaml"""
# Read packages.yaml and normalize it, so that it
# will not contain entries referring to virtual
# packages.
packages_yaml = spack.config.get("packages")
packages_yaml = _normalize_packages_yaml(packages_yaml)
self.gen.h1("External packages")
for pkg_name, data in packages_yaml.items():
@@ -1662,7 +1554,6 @@ def external_imposition(input_spec, requirements):
self.gen.newline()
self.trigger_rules()
self.effect_rules()
def preferred_variants(self, pkg_name):
"""Facts on concretization preferences, as read from packages.yaml"""
@@ -1708,6 +1599,23 @@ def target_preferences(self):
for i, preferred in enumerate(package_targets):
self.gen.fact(fn.target_weight(str(preferred.architecture.target), i))
def flag_defaults(self):
self.gen.h2("Compiler flag defaults")
# types of flags that can be on specs
for flag in spack.spec.FlagMap.valid_compiler_flags():
self.gen.fact(fn.flag_type(flag))
self.gen.newline()
# flags from compilers.yaml
compilers = all_compilers_in_config()
for compiler in compilers:
for name, flags in compiler.flags.items():
for flag in flags:
self.gen.fact(
fn.compiler_version_flag(compiler.name, compiler.version, name, flag)
)
def spec_clauses(
self,
spec: spack.spec.Spec,
@@ -1884,16 +1792,6 @@ def _spec_clauses(
if dep.name == "gcc-runtime":
continue
# libc is also solved again by clingo, but in this case the compatibility
# is not encoded in the parent node - so we need to emit explicit facts
if "libc" in dspec.virtuals:
for libc in self.libcs:
if libc_is_compatible(libc, dep):
clauses.append(
fn.attr("compatible_libc", spec.name, libc.name, libc.version)
)
continue
# We know dependencies are real for concrete specs. For abstract
# specs they just mean the dep is somehow in the DAG.
for dtype in dt.ALL_FLAGS:
@@ -2123,16 +2021,9 @@ def target_defaults(self, specs):
candidate_targets.append(ancestor)
best_targets = {uarch.family.name}
for compiler_id, known_compiler in enumerate(self.possible_compilers):
if not known_compiler.available:
continue
compiler = known_compiler.compiler_obj
for compiler_id, compiler in enumerate(self.possible_compilers):
# Stub support for cross-compilation, to be expanded later
if known_compiler.target is not None and compiler.target not in (
str(uarch.family),
"any",
):
if compiler.target is not None and compiler.target != str(uarch.family):
self.gen.fact(fn.compiler_supports_target(compiler_id, compiler.target))
self.gen.newline()
continue
@@ -2188,6 +2079,58 @@ def virtual_providers(self):
self.gen.fact(fn.virtual(vspec))
self.gen.newline()
def generate_possible_compilers(self, specs):
compilers = all_compilers_in_config()
# Search for compilers which differs only by aspects that are
# not selectable by users using the spec syntax
seen, sanitized_list = set(), []
for compiler in compilers:
key = compiler.spec, compiler.operating_system, compiler.target
if key in seen:
warnings.warn(
f"duplicate found for {compiler.spec} on "
f"{compiler.operating_system}/{compiler.target}. "
f"Edit your compilers.yaml configuration to remove it."
)
continue
sanitized_list.append(compiler)
seen.add(key)
cspecs = set([c.spec for c in compilers])
# add compiler specs from the input line to possibilities if we
# don't require compilers to exist.
strict = spack.concretize.Concretizer().check_for_compiler_existence
for s in traverse.traverse_nodes(specs):
# we don't need to validate compilers for already-built specs
if s.concrete or not s.compiler:
continue
version = s.compiler.versions.concrete
if not version or any(c.satisfies(s.compiler) for c in cspecs):
continue
# Error when a compiler is not found and strict mode is enabled
if strict:
raise spack.concretize.UnavailableCompilerVersionError(s.compiler)
# Make up a compiler matching the input spec. This is for bootstrapping.
compiler_cls = spack.compilers.class_for_compiler_name(s.compiler.name)
compilers.append(
compiler_cls(s.compiler, operating_system=None, target=None, paths=[None] * 4)
)
self.gen.fact(fn.allow_compiler(s.compiler.name, version))
return list(
sorted(
compilers,
key=lambda compiler: (compiler.spec.name, compiler.spec.version),
reverse=True,
)
)
def define_version_constraints(self):
"""Define what version_satisfies(...) means in ASP logic."""
for pkg_name, versions in sorted(self.version_constraints):
@@ -2215,7 +2158,7 @@ def versions_for(v):
if isinstance(v, vn.StandardVersion):
return [v]
elif isinstance(v, vn.ClosedOpenRange):
return [v.lo, vn._prev_version(v.hi)]
return [v.lo, vn.prev_version(v.hi)]
elif isinstance(v, vn.VersionList):
return sum((versions_for(e) for e in v), [])
else:
@@ -2349,7 +2292,8 @@ def setup(
node_counter = _create_counter(specs, tests=self.tests)
self.possible_virtuals = node_counter.possible_virtuals()
self.pkgs = node_counter.possible_dependencies()
self.libcs = sorted(all_libcs()) # type: ignore[type-var]
self.pkgs.update(spack.repo.PATH.packages_with_tags("runtime"))
# Fail if we already know an unreachable node is requested
for spec in specs:
@@ -2359,16 +2303,11 @@ def setup(
if missing_deps:
raise spack.spec.InvalidDependencyError(spec.name, missing_deps)
for node in traverse.traverse_nodes(specs):
for node in spack.traverse.traverse_nodes(specs):
if node.namespace is not None:
self.explicitly_required_namespaces[node.name] = node.namespace
self.gen = ProblemInstanceBuilder()
compiler_parser = CompilerParser(configuration=spack.config.CONFIG).with_input_specs(specs)
if using_libc_compatibility():
for libc in self.libcs:
self.gen.fact(fn.allowed_libc(libc.name, libc.version))
if not allow_deprecated:
self.gen.fact(fn.deprecated_versions_not_allowed())
@@ -2388,17 +2327,17 @@ def setup(
)
specs = tuple(specs) # ensure compatible types to add
# get possible compilers
self.possible_compilers = self.generate_possible_compilers(specs)
self.gen.h1("Reusable concrete specs")
self.define_concrete_input_specs(specs, self.pkgs)
if reuse:
self.gen.fact(fn.optimize_for_reuse())
for reusable_spec in reuse:
compiler_parser.add_compiler_from_concrete_spec(reusable_spec)
self.register_concrete_spec(reusable_spec, self.pkgs)
self.concrete_specs()
self.possible_compilers = compiler_parser.possible_compilers()
self.gen.h1("Generic statements on possible packages")
node_counter.possible_packages_facts(self.gen, fn)
@@ -2408,7 +2347,6 @@ def setup(
self.gen.newline()
self.gen.h1("General Constraints")
self.config_compatible_os()
self.compiler_facts()
# architecture defaults
@@ -2499,36 +2437,15 @@ def visit(node):
def define_runtime_constraints(self):
"""Define the constraints to be imposed on the runtimes"""
recorder = RuntimePropertyRecorder(self)
for compiler in self.possible_compilers:
compiler_with_different_cls_names = {
"oneapi": "intel-oneapi-compilers",
"clang": "llvm",
}
compiler_cls_name = compiler_with_different_cls_names.get(
compiler.spec.name, compiler.spec.name
)
try:
compiler_cls = spack.repo.PATH.get_pkg_class(compiler_cls_name)
if hasattr(compiler_cls, "runtime_constraints"):
compiler_cls.runtime_constraints(spec=compiler.spec, pkg=recorder)
except spack.repo.UnknownPackageError:
pass
# Inject libc from available compilers, on Linux
if not compiler.available:
if compiler.name != "gcc":
continue
if using_libc_compatibility() and compiler.compiler_obj.default_libc:
recorder("*").depends_on(
"libc", when=f"%{compiler.spec}", type="link", description="Add libc"
)
recorder("*").depends_on(
str(compiler.compiler_obj.default_libc),
when=f"%{compiler.spec}",
type="link",
description="Add libc",
)
try:
compiler_cls = spack.repo.PATH.get_pkg_class(compiler.name)
except spack.repo.UnknownPackageError:
continue
if hasattr(compiler_cls, "runtime_constraints"):
compiler_cls.runtime_constraints(compiler=compiler, pkg=recorder)
recorder.consume_facts()
@@ -2900,97 +2817,6 @@ def reject_requirement_constraint(
return False
class CompilerParser:
"""Parses configuration files, and builds a list of possible compilers for the solve."""
def __init__(self, configuration) -> None:
self.compilers: Set[KnownCompiler] = set()
for c in all_compilers_in_config(configuration):
if using_libc_compatibility() and not c.default_libc:
warnings.warn(
f"cannot detect libc from {c.spec}. The compiler will not be used "
f"during concretization."
)
continue
target = c.target if c.target != "any" else None
candidate = KnownCompiler(
spec=c.spec, os=c.operating_system, target=target, available=True, compiler_obj=c
)
if candidate in self.compilers:
warnings.warn(
f"duplicate found for {c.spec} on {c.operating_system}/{c.target}. "
f"Edit your compilers.yaml configuration to remove it."
)
continue
self.compilers.add(candidate)
def with_input_specs(self, input_specs: List["spack.spec.Spec"]) -> "CompilerParser":
"""Accounts for input specs when building the list of possible compilers.
Args:
input_specs: specs to be concretized
"""
strict = spack.concretize.Concretizer().check_for_compiler_existence
default_os = str(spack.platforms.host().default_os)
default_target = str(archspec.cpu.host().family)
for s in traverse.traverse_nodes(input_specs):
# we don't need to validate compilers for already-built specs
if s.concrete or not s.compiler:
continue
version = s.compiler.versions.concrete
if not version or any(item.spec.satisfies(s.compiler) for item in self.compilers):
continue
# Error when a compiler is not found and strict mode is enabled
if strict:
raise spack.concretize.UnavailableCompilerVersionError(s.compiler)
# Make up a compiler matching the input spec. This is for bootstrapping.
compiler_cls = spack.compilers.class_for_compiler_name(s.compiler.name)
compiler_obj = compiler_cls(
s.compiler, operating_system=default_os, target=default_target, paths=[None] * 4
)
self.compilers.add(
KnownCompiler(
spec=s.compiler,
os=default_os,
target=default_target,
available=True,
compiler_obj=compiler_obj,
)
)
return self
def add_compiler_from_concrete_spec(self, spec: "spack.spec.Spec") -> None:
"""Account for compilers that are coming from concrete specs, through reuse.
Args:
spec: concrete spec to be reused
"""
assert spec.concrete, "the spec argument must be concrete"
candidate = KnownCompiler(
spec=spec.compiler,
os=str(spec.architecture.os),
target=str(spec.architecture.target.microarchitecture.family),
available=False,
compiler_obj=None,
)
self.compilers.add(candidate)
def possible_compilers(self) -> List[KnownCompiler]:
# Here we have to sort two times, first sort by name and ascending version
result = sorted(self.compilers, key=lambda x: (x.spec.name, x.spec.version), reverse=True)
# Then stable sort to prefer available compilers and account for preferences
ppk = spack.package_prefs.PackagePrefs("all", "compiler", all=False)
result.sort(key=lambda x: (not x.available, ppk(x.spec)))
return result
class RuntimePropertyRecorder:
"""An object of this class is injected in callbacks to compilers, to let them declare
properties of the runtimes they support and of the runtimes they provide, and to add
@@ -3031,24 +2857,13 @@ def reset(self):
"""Resets the current state."""
self.current_package = None
def depends_on(
self,
dependency_str: str,
*,
when: str,
type: str,
description: str,
languages: Optional[List[str]] = None,
) -> None:
def depends_on(self, dependency_str: str, *, when: str, type: str, description: str) -> None:
"""Injects conditional dependencies on packages.
Conditional dependencies can be either "real" packages or virtual dependencies.
Args:
dependency_str: the dependency spec to inject
when: anonymous condition to be met on a package to have the dependency
type: dependency type
languages: languages needed by the package for the dependency to be considered
description: human-readable description of the rule for adding the dependency
"""
# TODO: The API for this function is not final, and is still subject to change. At
@@ -3074,45 +2889,26 @@ def depends_on(
f" not external({node_variable}),\n"
f" not runtime(Package)"
).replace(f'"{placeholder}"', f"{node_variable}")
if languages:
body_str += ",\n"
for language in languages:
body_str += f' attr("language", {node_variable}, "{language}")'
head_clauses = self._setup.spec_clauses(dependency_spec, body=False)
runtime_pkg = dependency_spec.name
is_virtual = head_clauses[0].args[0] == "virtual_node"
main_rule = (
f"% {description}\n"
f'1 {{ attr("depends_on", {node_variable}, node(0..X-1, "{runtime_pkg}"), "{type}") :'
f' max_dupes("{runtime_pkg}", X)}} 1:-\n'
f' max_dupes("gcc-runtime", X)}} 1:-\n'
f"{body_str}.\n\n"
)
if is_virtual:
main_rule = (
f"% {description}\n"
f'attr("dependency_holds", {node_variable}, "{runtime_pkg}", "{type}") :-\n'
f"{body_str}.\n\n"
)
self.rules.append(main_rule)
for clause in head_clauses:
if clause.args[0] == "node":
continue
runtime_node = f'node(RuntimeID, "{runtime_pkg}")'
head_str = str(clause).replace(f'"{runtime_pkg}"', runtime_node)
depends_on_constraint = (
rule = (
f"{head_str} :-\n"
f' attr("depends_on", {node_variable}, {runtime_node}, "{type}"),\n'
f"{body_str}.\n\n"
)
if is_virtual:
depends_on_constraint = (
f' attr("depends_on", {node_variable}, ProviderNode, "{type}"),\n'
f" provider(ProviderNode, {runtime_node}),\n"
)
rule = f"{head_str} :-\n" f"{depends_on_constraint}" f"{body_str}.\n\n"
self.rules.append(rule)
self.reset()
@@ -3277,8 +3073,12 @@ def no_flags(self, node, flag_type):
self._specs[node].compiler_flags[flag_type] = []
def external_spec_selected(self, node, idx):
"""This means that the external spec and index idx has been selected for this package."""
packages_yaml = _external_config_with_implicit_externals(spack.config.CONFIG)
"""This means that the external spec and index idx
has been selected for this package.
"""
packages_yaml = spack.config.get("packages")
packages_yaml = _normalize_packages_yaml(packages_yaml)
spec_info = packages_yaml[node.pkg]["externals"][int(idx)]
self._specs[node].external_path = spec_info.get("prefix", None)
self._specs[node].external_modules = spack.spec.Spec._format_module_list(
@@ -3326,9 +3126,7 @@ def reorder_flags(self):
imposes order afterwards.
"""
# reverse compilers so we get highest priority compilers that share a spec
compilers = dict(
(c.spec, c) for c in reversed(all_compilers_in_config(spack.config.CONFIG))
)
compilers = dict((c.spec, c) for c in reversed(all_compilers_in_config()))
cmd_specs = dict((s.name, s) for spec in self._command_line_specs for s in spec.traverse())
for spec in self._specs.values():
@@ -3520,7 +3318,7 @@ def _is_reusable(spec: spack.spec.Spec, packages, local: bool) -> bool:
return False
if not spec.external:
return _has_runtime_dependencies(spec)
return True
# Cray external manifest externals are always reusable
if local:
@@ -3545,19 +3343,6 @@ def _is_reusable(spec: spack.spec.Spec, packages, local: bool) -> bool:
return False
def _has_runtime_dependencies(spec: spack.spec.Spec) -> bool:
if not WITH_RUNTIME:
return True
if spec.compiler.name == "gcc" and not spec.dependencies("gcc-runtime"):
return False
if spec.compiler.name == "oneapi" and not spec.dependencies("intel-oneapi-runtime"):
return False
return True
class Solver:
"""This is the main external interface class for solving.
@@ -3576,7 +3361,7 @@ def __init__(self):
# These properties are settable via spack configuration, and overridable
# by setting them directly as properties.
self.reuse = spack.config.get("concretizer:reuse", True)
self.reuse = spack.config.get("concretizer:reuse", False)
@staticmethod
def _check_input_and_extract_concrete_specs(specs):
@@ -3593,7 +3378,7 @@ def _check_input_and_extract_concrete_specs(specs):
def _reusable_specs(self, specs):
reusable_specs = []
if self.reuse:
packages = _external_config_with_implicit_externals(spack.config.CONFIG)
packages = spack.config.get("packages")
# Specs from the local Database
with spack.store.STORE.db.read_transaction():
reusable_specs.extend(
@@ -3699,14 +3484,9 @@ def solve_in_rounds(
if not result.unsolved_specs:
break
if not result.specs:
# This is also a problem: no specs were solved for, which
# means we would be in a loop if we tried again
unsolved_str = Result.format_unsolved(result.unsolved_specs)
raise InternalConcretizerError(
"Internal Spack error: a subset of input specs could not"
f" be solved for.\n\t{unsolved_str}"
)
# This means we cannot progress with solving the input
if not result.satisfiable or not result.specs:
break
input_specs = list(x for (x, y) in result.unsolved_specs)
for spec in result.specs:

View File

@@ -80,7 +80,6 @@ unification_set(SetID, VirtualNode)
#defined multiple_unification_sets/1.
#defined runtime/1.
%----
% Rules to break symmetry and speed-up searches
@@ -127,12 +126,10 @@ trigger_node(TriggerID, Node, Node) :-
trigger_condition_holds(TriggerID, Node),
literal(TriggerID).
% Since we trigger the existence of literal nodes from a condition, we need to construct the condition_set/2
% Since we trigger the existence of literal nodes from a condition, we need to construct
% the condition_set/2 manually below
mentioned_in_literal(Root, Mentioned) :- mentioned_in_literal(TriggerID, Root, Mentioned), solve_literal(TriggerID).
condition_set(node(min_dupe_id, Root), node(min_dupe_id, Root)) :- mentioned_in_literal(Root, Root).
1 { condition_set(node(min_dupe_id, Root), node(0..Y-1, Mentioned)) : max_dupes(Mentioned, Y) } 1 :-
mentioned_in_literal(Root, Mentioned), Mentioned != Root.
condition_set(node(min_dupe_id, Root), node(min_dupe_id, Mentioned)) :- mentioned_in_literal(Root, Mentioned).
% Discriminate between "roots" that have been explicitly requested, and roots that are deduced from "virtual roots"
explicitly_requested_root(node(min_dupe_id, Package)) :-
@@ -140,20 +137,6 @@ explicitly_requested_root(node(min_dupe_id, Package)) :-
trigger_and_effect(Package, TriggerID, EffectID),
imposed_constraint(EffectID, "root", Package).
% Keep track of which nodes are associated with which root DAG
associated_with_root(RootNode, RootNode) :- attr("root", RootNode).
associated_with_root(RootNode, ChildNode) :-
depends_on(ParentNode, ChildNode),
associated_with_root(RootNode, ParentNode).
% We cannot have a node in the root condition set, that is not associated with that root
:- attr("root", RootNode),
condition_set(RootNode, node(X, Package)),
not virtual(Package),
not associated_with_root(RootNode, node(X, Package)).
#defined concretize_everything/0.
#defined literal/1.
@@ -175,14 +158,6 @@ error(100, multiple_values_error, Attribute, Package)
attr_single_value(Attribute),
2 { attr(Attribute, node(ID, Package), Value) }.
%-----------------------------------------------------------------------------
% Languages used
%-----------------------------------------------------------------------------
attr("language", node(X, Package), Language) :-
condition_holds(ConditionID, node(X, Package)),
pkg_fact(Package,language(ConditionID, Language)).
%-----------------------------------------------------------------------------
% Version semantics
%-----------------------------------------------------------------------------
@@ -539,12 +514,6 @@ attr("virtual_on_edge", PackageNode, ProviderNode, Virtual)
provider(ProviderNode, node(_, Virtual)),
not external(PackageNode).
% If a virtual node is in the answer set, it must be either a virtual root,
% or used somewhere
:- attr("virtual_node", node(_, Virtual)),
not attr("virtual_on_incoming_edges", _, Virtual),
not attr("virtual_root", node(_, Virtual)).
attr("virtual_on_incoming_edges", ProviderNode, Virtual)
:- attr("virtual_on_edge", _, ProviderNode, Virtual).
@@ -907,8 +876,12 @@ error(100, "{0} variant '{1}' cannot have values '{2}' and '{3}' as they come fr
Set1 < Set2, % see[1]
build(node(ID, Package)).
:- attr("variant_set", node(ID, Package), Variant, Value),
not attr("variant_value", node(ID, Package), Variant, Value).
% variant_set is an explicitly set variant value. If it's not 'set',
% we revert to the default value. If it is set, we force the set value
attr("variant_value", PackageNode, Variant, Value)
:- attr("node", PackageNode),
node_has_variant(PackageNode, Variant),
attr("variant_set", PackageNode, Variant, Value).
% The rules below allow us to prefer default values for variants
% whenever possible. If a variant is set in a spec, or if it is
@@ -993,13 +966,14 @@ pkg_fact(Package, variant_single_value("dev_path"))
% Platform semantics
%-----------------------------------------------------------------------------
% NOTE: Currently we have a single allowed platform per DAG, therefore there is no
% need to have additional optimization criteria. If we ever add cross-platform dags,
% this needs to be changed.
:- 2 { allowed_platform(Platform) }, internal_error("More than one allowed platform detected").
% if no platform is set, fall back to the default
error(100, "platform '{0}' is not allowed on the current host", Platform)
:- attr("node_platform", _, Platform), not allowed_platform(Platform).
1 { attr("node_platform", PackageNode, Platform) : allowed_platform(Platform) } 1
:- attr("node", PackageNode).
attr("node_platform", PackageNode, Platform)
:- attr("node", PackageNode),
not attr("node_platform_set", PackageNode),
node_platform_default(Platform).
% setting platform on a node is a hard constraint
attr("node_platform", PackageNode, Platform)
@@ -1023,6 +997,14 @@ error(100, "Cannot select '{0} os={1}' (operating system '{1}' is not buildable)
attr("node_os", node(X, Package), OS),
not buildable_os(OS).
% can't have dependencies on incompatible OS's
error(100, "{0} and dependency {1} have incompatible operating systems 'os={2}' and 'os={3}'", Package, Dependency, PackageNodeOS, DependencyOS)
:- depends_on(node(X, Package), node(Y, Dependency)),
attr("node_os", node(X, Package), PackageNodeOS),
attr("node_os", node(Y, Dependency), DependencyOS),
not os_compatible(PackageNodeOS, DependencyOS),
build(node(X, Package)).
% give OS choice weights according to os declarations
node_os_weight(PackageNode, Weight)
:- attr("node", PackageNode),
@@ -1035,6 +1017,13 @@ os_compatible(OS, OS) :- os(OS).
% Transitive compatibility among operating systems
os_compatible(OS1, OS3) :- os_compatible(OS1, OS2), os_compatible(OS2, OS3).
% We can select only operating systems compatible with the ones
% for which we can build software. We need a cardinality constraint
% since we might have more than one "buildable_os(OS)" fact.
:- not 1 { os_compatible(CurrentOS, ReusedOS) : buildable_os(CurrentOS) },
attr("node_os", Package, ReusedOS),
internal_error("Reused OS incompatible with build OS").
% If an OS is set explicitly respect the value
attr("node_os", PackageNode, OS) :- attr("node_os_set", PackageNode, OS), attr("node", PackageNode).
@@ -1082,9 +1071,6 @@ error(100, "{0} compiler '{2}@{3}' incompatible with 'target={1}'", Package, Tar
compiler_version(CompilerID, Version),
build(node(X, Package)).
#defined compiler_supports_target/2.
#defined compiler_available/1.
% if a target is set explicitly, respect it
attr("node_target", PackageNode, Target)
:- attr("node", PackageNode), attr("node_target_set", PackageNode, Target).
@@ -1115,7 +1101,7 @@ error(100, "'{0} target={1}' is not compatible with this machine", Package, Targ
% Compiler semantics
%-----------------------------------------------------------------------------
% There must be only one compiler set per built node.
{ node_compiler(PackageNode, CompilerID) : compiler_id(CompilerID), compiler_available(CompilerID) } :-
{ node_compiler(PackageNode, CompilerID) : compiler_id(CompilerID) } :-
attr("node", PackageNode),
build(PackageNode).
@@ -1132,7 +1118,6 @@ attr("node_compiler_version", PackageNode, CompilerName, CompilerVersion)
:- node_compiler(PackageNode, CompilerID),
compiler_name(CompilerID, CompilerName),
compiler_version(CompilerID, CompilerVersion),
compiler_available(CompilerID),
build(PackageNode).
attr("node_compiler", PackageNode, CompilerName)
@@ -1193,8 +1178,8 @@ error(100, "{0} compiler '%{1}@{2}' incompatible with 'os={3}'", Package, Compil
node_compiler(node(X, Package), CompilerID),
compiler_name(CompilerID, Compiler),
compiler_version(CompilerID, Version),
compiler_os(CompilerID, CompilerOS),
not os_compatible(CompilerOS, OS),
not compiler_os(CompilerID, OS),
not allow_compiler(Compiler, Version),
build(node(X, Package)).
% If a package and one of its dependencies don't have the
@@ -1215,6 +1200,7 @@ compiler_mismatch_required(PackageNode, DependencyNode)
not compiler_match(PackageNode, DependencyNode).
#defined compiler_os/3.
#defined allow_compiler/2.
% compilers weighted by preference according to packages.yaml
node_compiler_weight(node(ID, Package), Weight)
@@ -1499,20 +1485,18 @@ opt_criterion(40, "compiler mismatches that are not from CLI").
#minimize{ 0@240: #true }.
#minimize{ 0@40: #true }.
#minimize{
1@40+Priority,PackageNode,node(ID, Dependency)
: compiler_mismatch(PackageNode, node(ID, Dependency)),
build_priority(node(ID, Dependency), Priority),
not runtime(Dependency)
1@40+Priority,PackageNode,DependencyNode
: compiler_mismatch(PackageNode, DependencyNode),
build_priority(PackageNode, Priority)
}.
opt_criterion(39, "compiler mismatches that are not from CLI").
#minimize{ 0@239: #true }.
#minimize{ 0@39: #true }.
#minimize{
1@39+Priority,PackageNode,node(ID, Dependency)
: compiler_mismatch_required(PackageNode, node(ID, Dependency)),
build_priority(node(ID, Dependency), Priority),
not runtime(Dependency)
1@39+Priority,PackageNode,DependencyNode
: compiler_mismatch_required(PackageNode, DependencyNode),
build_priority(PackageNode, Priority)
}.
opt_criterion(30, "non-preferred OS's").
@@ -1529,10 +1513,9 @@ opt_criterion(25, "version badness").
#minimize{ 0@225: #true }.
#minimize{ 0@25: #true }.
#minimize{
Weight@25+Priority,node(X, Package)
: version_weight(node(X, Package), Weight),
build_priority(node(X, Package), Priority),
not runtime(Package)
Weight@25+Priority,PackageNode
: version_weight(PackageNode, Weight),
build_priority(PackageNode, Priority)
}.
% Try to use all the default values of variants
@@ -1551,10 +1534,9 @@ opt_criterion(15, "non-preferred compilers").
#minimize{ 0@215: #true }.
#minimize{ 0@15: #true }.
#minimize{
Weight@15+Priority,node(X, Package)
: node_compiler_weight(node(X, Package), Weight),
build_priority(node(X, Package), Priority),
not runtime(Package)
Weight@15+Priority,PackageNode
: node_compiler_weight(PackageNode, Weight),
build_priority(PackageNode, Priority)
}.
% Minimize the number of mismatches for targets in the DAG, try
@@ -1563,55 +1545,18 @@ opt_criterion(10, "target mismatches").
#minimize{ 0@210: #true }.
#minimize{ 0@10: #true }.
#minimize{
1@10+Priority,PackageNode,node(ID, Dependency)
: node_target_mismatch(PackageNode, node(ID, Dependency)),
build_priority(node(ID, Dependency), Priority),
not runtime(Dependency)
1@10+Priority,PackageNode,Dependency
: node_target_mismatch(PackageNode, Dependency),
build_priority(PackageNode, Priority)
}.
opt_criterion(5, "non-preferred targets").
#minimize{ 0@205: #true }.
#minimize{ 0@5: #true }.
#minimize{
Weight@5+Priority,node(X, Package)
: node_target_weight(node(X, Package), Weight),
build_priority(node(X, Package), Priority),
not runtime(Package)
}.
% Minimize the number of compiler mismatches for runtimes
opt_criterion(4, "compiler mismatches (runtimes)").
#minimize{ 0@204: #true }.
#minimize{ 0@4: #true }.
#minimize{
1@4,PackageNode,node(ID, Dependency)
: compiler_mismatch(PackageNode, node(ID, Dependency)), runtime(Dependency)
}.
#minimize{
1@4,PackageNode,node(ID, Dependency)
: compiler_mismatch_required(PackageNode, node(ID, Dependency)), runtime(Dependency)
}.
% Choose more recent versions for runtimes
opt_criterion(3, "version badness (runtimes)").
#minimize{ 0@203: #true }.
#minimize{ 0@3: #true }.
#minimize{
Weight@3,node(X, Package)
: version_weight(node(X, Package), Weight),
runtime(Package)
}.
% Choose best target for runtimes
opt_criterion(2, "non-preferred targets (runtimes)").
#minimize{ 0@202: #true }.
#minimize{ 0@2: #true }.
#minimize{
Weight@2,node(X, Package)
: node_target_weight(node(X, Package), Weight),
runtime(Package)
Weight@5+Priority,PackageNode
: node_target_weight(PackageNode, Weight),
build_priority(PackageNode, Priority)
}.
% Choose more recent versions for nodes

View File

@@ -10,7 +10,6 @@
import spack.deptypes as dt
import spack.package_base
import spack.repo
import spack.spec
PossibleDependencies = Set[str]
@@ -25,13 +24,7 @@ class Counter:
"""
def __init__(self, specs: List["spack.spec.Spec"], tests: bool) -> None:
runtime_pkgs = spack.repo.PATH.packages_with_tags("runtime")
runtime_virtuals = set()
for x in runtime_pkgs:
pkg_class = spack.repo.PATH.get_pkg_class(x)
runtime_virtuals.update(pkg_class.provided_virtual_names())
self.specs = specs + [spack.spec.Spec(x) for x in runtime_pkgs]
self.specs = specs
self.link_run_types: dt.DepFlag = dt.LINK | dt.RUN | dt.TEST
self.all_types: dt.DepFlag = dt.ALL
@@ -40,9 +33,7 @@ def __init__(self, specs: List["spack.spec.Spec"], tests: bool) -> None:
self.all_types = dt.LINK | dt.RUN | dt.BUILD
self._possible_dependencies: PossibleDependencies = set()
self._possible_virtuals: Set[str] = (
set(x.name for x in specs if x.virtual) | runtime_virtuals
)
self._possible_virtuals: Set[str] = set(x.name for x in specs if x.virtual)
def possible_dependencies(self) -> PossibleDependencies:
"""Returns the list of possible dependencies"""

View File

@@ -1,37 +0,0 @@
% Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
% Spack Project Developers. See the top-level COPYRIGHT file for details.
%
% SPDX-License-Identifier: (Apache-2.0 OR MIT)
%=============================================================================
% Libc compatibility rules for reusing solves.
%
% These rules are used on Linux
%=============================================================================
% A package cannot be reused if the libc is not compatible with it
:- provider(node(X, LibcPackage), node(0, "libc")),
attr("version", node(X, LibcPackage), LibcVersion),
attr("hash", node(R, ReusedPackage), Hash),
% Libc packages can be reused without the "compatible_libc" attribute
ReusedPackage != LibcPackage,
not attr("compatible_libc", node(R, ReusedPackage), LibcPackage, LibcVersion).
% Check whether the DAG has any built package
has_built_packages() :- build(X), not external(X).
% A libc is needed in the DAG
:- has_built_packages(), not provider(_, node(0, "libc")).
% The libc must be chosen among available ones
:- has_built_packages(),
provider(node(X, LibcPackage), node(0, "libc")),
attr("node", node(X, LibcPackage)),
attr("version", node(X, LibcPackage), LibcVersion),
not allowed_libc(LibcPackage, LibcVersion).
% A built node must depend on libc
:- build(PackageNode),
provider(LibcNode, node(0, "libc")),
not external(PackageNode),
not depends_on(PackageNode, LibcNode).

View File

@@ -7,24 +7,21 @@
% OS compatibility rules for reusing solves.
% os_compatible(RecentOS, OlderOS)
% OlderOS binaries can be used on RecentOS
%
% These rules are used on every platform, but Linux
%=============================================================================
% macOS
os_compatible("monterey", "bigsur").
os_compatible("bigsur", "catalina").
% can't have dependencies on incompatible OS's
error(100, "{0} and dependency {1} have incompatible operating systems 'os={2}' and 'os={3}'", Package, Dependency, PackageNodeOS, DependencyOS)
:- depends_on(node(X, Package), node(Y, Dependency)),
attr("node_os", node(X, Package), PackageNodeOS),
attr("node_os", node(Y, Dependency), DependencyOS),
not os_compatible(PackageNodeOS, DependencyOS),
build(node(X, Package)).
% Ubuntu
os_compatible("ubuntu22.04", "ubuntu21.10").
os_compatible("ubuntu21.10", "ubuntu21.04").
os_compatible("ubuntu21.04", "ubuntu20.10").
os_compatible("ubuntu20.10", "ubuntu20.04").
os_compatible("ubuntu20.04", "ubuntu19.10").
os_compatible("ubuntu19.10", "ubuntu19.04").
os_compatible("ubuntu19.04", "ubuntu18.10").
os_compatible("ubuntu18.10", "ubuntu18.04").
% We can select only operating systems compatible with the ones
% for which we can build software. We need a cardinality constraint
% since we might have more than one "buildable_os(OS)" fact.
:- not 1 { os_compatible(CurrentOS, ReusedOS) : buildable_os(CurrentOS) },
attr("node_os", Package, ReusedOS).
%EL8
os_compatible("rhel8", "rocky8").

View File

@@ -51,6 +51,7 @@
import collections
import collections.abc
import enum
import io
import itertools
import os
import pathlib
@@ -58,7 +59,7 @@
import re
import socket
import warnings
from typing import Any, Callable, Dict, List, Match, Optional, Set, Tuple, Union
from typing import Any, Callable, Dict, List, Optional, Set, Tuple, Union
import llnl.path
import llnl.string
@@ -120,44 +121,36 @@
"SpecDeprecatedError",
]
SPEC_FORMAT_RE = re.compile(
r"(?:" # this is one big or, with matches ordered by priority
# OPTION 1: escaped character (needs to be first to catch opening \{)
# Note that an unterminated \ at the end of a string is left untouched
r"(?:\\(.))"
r"|" # or
# OPTION 2: an actual format string
r"{" # non-escaped open brace {
r"([%@/]|arch=)?" # optional sigil (to print sigil in color)
r"(?:\^([^}\.]+)\.)?" # optional ^depname. (to get attr from dependency)
# after the sigil or depname, we can have a hash expression or another attribute
r"(?:" # one of
r"(hash\b)(?:\:(\d+))?" # hash followed by :<optional length>
r"|" # or
r"([^}]*)" # another attribute to format
r")" # end one of
r"(})?" # finish format string with non-escaped close brace }, or missing if not present
r"|"
# OPTION 3: mismatched close brace (option 2 would consume a matched open brace)
r"(})" # brace
r")",
re.IGNORECASE,
)
#: Valid pattern for an identifier in Spack
IDENTIFIER_RE = r"\w[\w-]*"
# Coloring of specs when using color output. Fields are printed with
# different colors to enhance readability.
# See llnl.util.tty.color for descriptions of the color codes.
COMPILER_COLOR = "@g" #: color for highlighting compilers
VERSION_COLOR = "@c" #: color for highlighting versions
ARCHITECTURE_COLOR = "@m" #: color for highlighting architectures
VARIANT_COLOR = "@B" #: color for highlighting variants
ENABLED_VARIANT_COLOR = "@B" #: color for highlighting enabled variants
DISABLED_VARIANT_COLOR = "r" #: color for highlighting disabled varaints
DEPENDENCY_COLOR = "@." #: color for highlighting dependencies
HASH_COLOR = "@K" #: color for highlighting package hashes
#: This map determines the coloring of specs when using color output.
#: We make the fields different colors to enhance readability.
#: See llnl.util.tty.color for descriptions of the color codes.
COLOR_FORMATS = {
"%": COMPILER_COLOR,
"@": VERSION_COLOR,
"=": ARCHITECTURE_COLOR,
"+": ENABLED_VARIANT_COLOR,
"~": DISABLED_VARIANT_COLOR,
"^": DEPENDENCY_COLOR,
"#": HASH_COLOR,
}
#: Regex used for splitting by spec field separators.
#: These need to be escaped to avoid metacharacters in
#: ``COLOR_FORMATS.keys()``.
_SEPARATORS = "[\\%s]" % "\\".join(COLOR_FORMATS.keys())
#: Default format for Spec.format(). This format can be round-tripped, so that:
#: Spec(Spec("string").format()) == Spec("string)"
DEFAULT_FORMAT = (
@@ -200,7 +193,26 @@ class InstallStatus(enum.Enum):
missing = "@r{[-]} "
# regexes used in spec formatting
def colorize_spec(spec):
"""Returns a spec colorized according to the colors specified in
COLOR_FORMATS."""
class insert_color:
def __init__(self):
self.last = None
def __call__(self, match):
# ignore compiler versions (color same as compiler)
sep = match.group(0)
if self.last == "%" and sep == "@":
return clr.cescape(sep)
self.last = sep
return "%s%s" % (COLOR_FORMATS[sep], clr.cescape(sep))
return clr.colorize(re.sub(_SEPARATORS, insert_color(), str(spec)) + "@.")
OLD_STYLE_FMT_RE = re.compile(r"\${[A-Z]+}")
@@ -899,9 +911,6 @@ def flags():
yield flags
def __str__(self):
if not self:
return ""
sorted_items = sorted((k, v) for k, v in self.items() if v)
result = ""
@@ -1399,13 +1408,6 @@ def external_path(self, ext_path):
def external(self):
return bool(self.external_path) or bool(self.external_modules)
@property
def is_develop(self):
"""Return whether the Spec represents a user-developed package
in a Spack ``Environment`` (i.e. using `spack develop`).
"""
return bool(self.variants.get("dev_path", False))
def clear_dependencies(self):
"""Trim the dependencies of this spec."""
self._dependencies.clear()
@@ -2959,6 +2961,7 @@ def _new_concretize(self, tests=False):
allow_deprecated = spack.config.get("config:deprecated", False)
solver = spack.solver.asp.Solver()
result = solver.solve([self], tests=tests, allow_deprecated=allow_deprecated)
result.raise_if_unsat()
# take the best answer
opt, i, answer = min(result.answers)
@@ -4283,7 +4286,10 @@ def deps():
yield deps
def format(self, format_string: str = DEFAULT_FORMAT, color: Optional[bool] = False) -> str:
def colorized(self):
return colorize_spec(self)
def format(self, format_string=DEFAULT_FORMAT, **kwargs):
r"""Prints out particular pieces of a spec, depending on what is
in the format string.
@@ -4346,65 +4352,79 @@ def format(self, format_string: str = DEFAULT_FORMAT, color: Optional[bool] = Fa
literal ``\`` character.
Args:
format_string: string containing the format to be expanded
color: True for colorized result; False for no color; None for auto color.
format_string (str): string containing the format to be expanded
Keyword Args:
color (bool): True if returned string is colored
transform (dict): maps full-string formats to a callable \
that accepts a string and returns another one
"""
ensure_modern_format_string(format_string)
color = kwargs.get("color", False)
transform = kwargs.get("transform", {})
def safe_color(sigil: str, string: str, color_fmt: Optional[str]) -> str:
# avoid colorizing if there is no color or the string is empty
if (color is False) or not color_fmt or not string:
return sigil + string
# escape and add the sigil here to avoid multiple concatenations
if sigil == "@":
sigil = "@@"
return clr.colorize(f"{color_fmt}{sigil}{clr.cescape(string)}@.", color=color)
out = io.StringIO()
def format_attribute(match_object: Match) -> str:
(esc, sig, dep, hash, hash_len, attribute, close_brace, unmatched_close_brace) = (
match_object.groups()
)
if esc:
return esc
elif unmatched_close_brace:
raise SpecFormatStringError(f"Unmatched close brace: '{format_string}'")
elif not close_brace:
raise SpecFormatStringError(f"Missing close brace: '{format_string}'")
def write(s, c=None):
f = clr.cescape(s)
if c is not None:
f = COLOR_FORMATS[c] + f + "@."
clr.cwrite(f, stream=out, color=color)
current = self if dep is None else self[dep]
def write_attribute(spec, attribute, color):
attribute = attribute.lower()
# Hash attributes can return early.
# NOTE: we currently treat abstract_hash like an attribute and ignore
# any length associated with it. We may want to change that.
if hash:
if sig and sig != "/":
raise SpecFormatSigilError(sig, "DAG hashes", hash)
try:
length = int(hash_len) if hash_len else None
except ValueError:
raise SpecFormatStringError(f"Invalid hash length: '{hash_len}'")
return safe_color(sig or "", current.dag_hash(length), HASH_COLOR)
sig = ""
if attribute.startswith(("@", "%", "/")):
# color sigils that are inside braces
sig = attribute[0]
attribute = attribute[1:]
elif attribute.startswith("arch="):
sig = " arch=" # include space as separator
attribute = attribute[5:]
current = spec
if attribute.startswith("^"):
attribute = attribute[1:]
dep, attribute = attribute.split(".", 1)
current = self[dep]
if attribute == "":
raise SpecFormatStringError("Format string attributes must be non-empty")
attribute = attribute.lower()
parts = attribute.split(".")
assert parts
# check that the sigil is valid for the attribute.
if not sig:
sig = ""
elif sig == "@" and parts[-1] not in ("versions", "version"):
if sig == "@" and parts[-1] not in ("versions", "version"):
raise SpecFormatSigilError(sig, "versions", attribute)
elif sig == "%" and attribute not in ("compiler", "compiler.name"):
raise SpecFormatSigilError(sig, "compilers", attribute)
elif sig == "/" and attribute != "abstract_hash":
elif sig == "/" and not re.match(r"(abstract_)?hash(:\d+)?$", attribute):
raise SpecFormatSigilError(sig, "DAG hashes", attribute)
elif sig == "arch=":
if attribute not in ("architecture", "arch"):
raise SpecFormatSigilError(sig, "the architecture", attribute)
sig = " arch=" # include space as separator
elif sig == " arch=" and attribute not in ("architecture", "arch"):
raise SpecFormatSigilError(sig, "the architecture", attribute)
# find the morph function for our attribute
morph = transform.get(attribute, lambda s, x: x)
# Special cases for non-spec attributes and hashes.
# These must be the only non-dep component of the format attribute
if attribute == "spack_root":
write(morph(spec, spack.paths.spack_root))
return
elif attribute == "spack_install":
write(morph(spec, spack.store.STORE.layout.root))
return
elif re.match(r"hash(:\d)?", attribute):
col = "#"
if ":" in attribute:
_, length = attribute.split(":")
write(sig + morph(spec, current.dag_hash(int(length))), col)
else:
write(sig + morph(spec, current.dag_hash()), col)
return
# Iterate over components using getattr to get next element
for idx, part in enumerate(parts):
@@ -4413,7 +4433,7 @@ def format_attribute(match_object: Match) -> str:
if part.startswith("_"):
raise SpecFormatStringError("Attempted to format private attribute")
else:
if part == "variants" and isinstance(current, vt.VariantMap):
if isinstance(current, vt.VariantMap):
# subscript instead of getattr for variant names
current = current[part]
else:
@@ -4437,31 +4457,62 @@ def format_attribute(match_object: Match) -> str:
raise SpecFormatStringError(m)
if isinstance(current, vn.VersionList):
if current == vn.any_version:
# don't print empty version lists
return ""
# We don't print empty version lists
return
if callable(current):
raise SpecFormatStringError("Attempted to format callable object")
if current is None:
# not printing anything
return ""
# We're not printing anything
return
# Set color codes for various attributes
color = None
col = None
if "variants" in parts:
color = VARIANT_COLOR
col = "+"
elif "architecture" in parts:
color = ARCHITECTURE_COLOR
col = "="
elif "compiler" in parts or "compiler_flags" in parts:
color = COMPILER_COLOR
col = "%"
elif "version" in parts or "versions" in parts:
color = VERSION_COLOR
col = "@"
# return colored output
return safe_color(sig, str(current), color)
# Finally, write the output
write(sig + morph(spec, str(current)), col)
return SPEC_FORMAT_RE.sub(format_attribute, format_string).strip()
attribute = ""
in_attribute = False
escape = False
for c in format_string:
if escape:
out.write(c)
escape = False
elif c == "\\":
escape = True
elif in_attribute:
if c == "}":
write_attribute(self, attribute, color)
attribute = ""
in_attribute = False
else:
attribute += c
else:
if c == "}":
raise SpecFormatStringError(
"Encountered closing } before opening { in %s" % format_string
)
elif c == "{":
in_attribute = True
else:
out.write(c)
if in_attribute:
raise SpecFormatStringError(
"Format string terminated while reading attribute." "Missing terminating }."
)
formatted_spec = out.getvalue()
return formatted_spec.strip()
def cformat(self, *args, **kwargs):
"""Same as format, but color defaults to auto instead of False."""
@@ -4469,16 +4520,6 @@ def cformat(self, *args, **kwargs):
kwargs.setdefault("color", None)
return self.format(*args, **kwargs)
@property
def spack_root(self):
"""Special field for using ``{spack_root}`` in Spec.format()."""
return spack.paths.spack_root
@property
def spack_install(self):
"""Special field for using ``{spack_install}`` in Spec.format()."""
return spack.store.STORE.layout.root
def format_path(
# self, format_string: str, _path_ctor: Optional[pathlib.PurePath] = None
self,
@@ -4504,27 +4545,18 @@ def format_path(
path_ctor = _path_ctor or pathlib.PurePath
format_string_as_path = path_ctor(format_string)
if format_string_as_path.is_absolute() or (
# Paths that begin with a single "\" on windows are relative, but we still
# want to preserve the initial "\\" to be consistent with PureWindowsPath.
# Ensure that this '\' is not passed to polite_filename() so it's not converted to '_'
(os.name == "nt" or path_ctor == pathlib.PureWindowsPath)
and format_string_as_path.parts[0] == "\\"
):
if format_string_as_path.is_absolute():
output_path_components = [format_string_as_path.parts[0]]
input_path_components = list(format_string_as_path.parts[1:])
else:
output_path_components = []
input_path_components = list(format_string_as_path.parts)
output_path_components += [
fs.polite_filename(self.format(part)) for part in input_path_components
fs.polite_filename(self.format(x)) for x in input_path_components
]
return str(path_ctor(*output_path_components))
def __str__(self):
if not self._dependencies:
return self.format()
root_str = [self.format()]
sorted_dependencies = sorted(
self.traverse(root=False), key=lambda x: (x.name, x.abstract_hash)

View File

@@ -927,10 +927,6 @@ def destroy(self):
shutil.rmtree(self.path)
except FileNotFoundError:
pass
try:
os.remove(self.reference_link)
except FileNotFoundError:
pass
self.created = False
def restage(self):

View File

@@ -142,7 +142,7 @@ def optimization_flags(self, compiler):
# custom spec.
compiler_version = compiler.version
version_number, suffix = archspec.cpu.version_components(compiler.version)
if not version_number or suffix:
if not version_number or suffix not in ("", "apple"):
# Try to deduce the underlying version of the compiler, regardless
# of its name in compilers.yaml. Depending on where this function
# is called we might get either a CompilerSpec or a fully fledged
@@ -155,6 +155,5 @@ def optimization_flags(self, compiler):
# log this and just return compiler.version instead
tty.debug(str(e))
return self.microarchitecture.optimization_flags(
compiler.name, compiler_version.dotted_numeric_string
)
compiler_version = compiler_version.dotted.force_numeric
return self.microarchitecture.optimization_flags(compiler.name, str(compiler_version))

View File

@@ -8,16 +8,13 @@
import pytest
import archspec.cpu
import llnl.util.filesystem as fs
import spack.compilers
import spack.concretize
import spack.operating_systems
import spack.platforms
import spack.target
from spack.spec import ArchSpec, Spec
from spack.spec import ArchSpec, CompilerSpec, Spec
@pytest.fixture(scope="module")
@@ -126,60 +123,52 @@ def test_arch_spec_container_semantic(item, architecture_str):
@pytest.mark.parametrize(
"compiler_spec,target_name,expected_flags",
[
# Homogeneous compilers
# Check compilers with version numbers from a single toolchain
("gcc@4.7.2", "ivybridge", "-march=core-avx-i -mtune=core-avx-i"),
("clang@3.5", "x86_64", "-march=x86-64 -mtune=generic"),
("apple-clang@9.1.0", "x86_64", "-march=x86-64"),
# Mixed toolchain
# Check mixed toolchains
("clang@8.0.0", "broadwell", ""),
("clang@3.5", "x86_64", "-march=x86-64 -mtune=generic"),
# Check Apple's Clang compilers
("apple-clang@9.1.0", "x86_64", "-march=x86-64"),
],
)
@pytest.mark.filterwarnings("ignore:microarchitecture specific")
def test_optimization_flags(compiler_spec, target_name, expected_flags, compiler_factory):
def test_optimization_flags(compiler_spec, target_name, expected_flags, config):
target = spack.target.Target(target_name)
compiler_dict = compiler_factory(spec=compiler_spec, operating_system="")["compiler"]
if compiler_spec == "clang@8.0.0":
compiler_dict["paths"] = {
"cc": "/path/to/clang-8",
"cxx": "/path/to/clang++-8",
"f77": "/path/to/gfortran-9",
"fc": "/path/to/gfortran-9",
}
compiler = spack.compilers.compiler_from_dict(compiler_dict)
compiler = spack.compilers.compilers_for_spec(compiler_spec).pop()
opt_flags = target.optimization_flags(compiler)
assert opt_flags == expected_flags
@pytest.mark.parametrize(
"compiler_str,real_version,target_str,expected_flags",
"compiler,real_version,target_str,expected_flags",
[
("gcc@=9.2.0", None, "haswell", "-march=haswell -mtune=haswell"),
(CompilerSpec("gcc@=9.2.0"), None, "haswell", "-march=haswell -mtune=haswell"),
# Check that custom string versions are accepted
("gcc@=10foo", "9.2.0", "icelake", "-march=icelake-client -mtune=icelake-client"),
(
CompilerSpec("gcc@=10foo"),
"9.2.0",
"icelake",
"-march=icelake-client -mtune=icelake-client",
),
# Check that we run version detection (4.4.0 doesn't support icelake)
("gcc@=4.4.0-special", "9.2.0", "icelake", "-march=icelake-client -mtune=icelake-client"),
(
CompilerSpec("gcc@=4.4.0-special"),
"9.2.0",
"icelake",
"-march=icelake-client -mtune=icelake-client",
),
# Check that the special case for Apple's clang is treated correctly
# i.e. it won't try to detect the version again
("apple-clang@=9.1.0", None, "x86_64", "-march=x86-64"),
(CompilerSpec("apple-clang@=9.1.0"), None, "x86_64", "-march=x86-64"),
],
)
def test_optimization_flags_with_custom_versions(
compiler_str,
real_version,
target_str,
expected_flags,
monkeypatch,
mutable_config,
compiler_factory,
compiler, real_version, target_str, expected_flags, monkeypatch, config
):
target = spack.target.Target(target_str)
compiler_dict = compiler_factory(spec=compiler_str, operating_system="redhat6")
mutable_config.set("compilers", [compiler_dict])
if real_version:
monkeypatch.setattr(spack.compiler.Compiler, "get_real_version", lambda x: real_version)
compiler = spack.compilers.compiler_from_dict(compiler_dict["compiler"])
opt_flags = target.optimization_flags(compiler)
assert opt_flags == expected_flags
@@ -214,10 +203,9 @@ def test_satisfy_strict_constraint_when_not_concrete(architecture_tuple, constra
)
@pytest.mark.usefixtures("mock_packages", "config")
@pytest.mark.only_clingo("Fixing the parser broke this test for the original concretizer.")
@pytest.mark.skipif(
str(archspec.cpu.host().family) != "x86_64", reason="tests are for x86_64 uarch ranges"
)
def test_concretize_target_ranges(root_target_range, dep_target_range, result, monkeypatch):
# Monkeypatch so that all concretization is done as if the machine is core2
monkeypatch.setattr(spack.platforms.test.Test, "default", "core2")
spec = Spec(f"a %gcc@10 foobar=bar target={root_target_range} ^b target={dep_target_range}")
with spack.concretize.disable_compiler_existence_check():
spec.concretize()

View File

@@ -19,8 +19,6 @@
import py
import pytest
import archspec.cpu
from llnl.util.filesystem import join_path, visit_directory_tree
import spack.binary_distribution as bindist
@@ -36,7 +34,7 @@
import spack.util.spack_yaml as syaml
import spack.util.url as url_util
import spack.util.web as web_util
from spack.binary_distribution import CannotListKeys, GenerateIndexError, get_buildfile_manifest
from spack.binary_distribution import get_buildfile_manifest
from spack.directory_layout import DirectoryLayout
from spack.paths import test_path
from spack.spec import Spec
@@ -390,11 +388,11 @@ def test_built_spec_cache(mirror_dir):
assert any([r["spec"] == s for r in results])
def fake_dag_hash(spec, length=None):
def fake_dag_hash(spec):
# Generate an arbitrary hash that is intended to be different than
# whatever a Spec reported before (to test actions that trigger when
# the hash changes)
return "tal4c7h4z0gqmixb1eqa92mjoybxn5l6"[:length]
return "tal4c7h4z0gqmixb1eqa92mjoybxn5l6"
@pytest.mark.usefixtures(
@@ -465,57 +463,50 @@ def test_generate_index_missing(monkeypatch, tmpdir, mutable_config):
assert "libelf" not in cache_list
def test_generate_key_index_failure(monkeypatch):
def list_url(url, recursive=False):
if "fails-listing" in url:
raise Exception("Couldn't list the directory")
return ["first.pub", "second.pub"]
def push_to_url(*args, **kwargs):
raise Exception("Couldn't upload the file")
monkeypatch.setattr(web_util, "list_url", list_url)
monkeypatch.setattr(web_util, "push_to_url", push_to_url)
with pytest.raises(CannotListKeys, match="Encountered problem listing keys"):
bindist.generate_key_index("s3://non-existent/fails-listing")
with pytest.raises(GenerateIndexError, match="problem pushing .* Couldn't upload"):
bindist.generate_key_index("s3://non-existent/fails-uploading")
def test_generate_package_index_failure(monkeypatch, capfd):
def test_generate_indices_key_error(monkeypatch, capfd):
def mock_list_url(url, recursive=False):
raise Exception("Some HTTP error")
print("mocked list_url({0}, {1})".format(url, recursive))
raise KeyError("Test KeyError handling")
monkeypatch.setattr(web_util, "list_url", mock_list_url)
test_url = "file:///fake/keys/dir"
with pytest.raises(GenerateIndexError, match="Unable to generate package index"):
bindist.generate_package_index(test_url)
# Make sure generate_key_index handles the KeyError
bindist.generate_key_index(test_url)
assert (
f"Warning: Encountered problem listing packages at {test_url}: Some HTTP error"
in capfd.readouterr().err
)
err = capfd.readouterr()[1]
assert "Warning: No keys at {0}".format(test_url) in err
# Make sure generate_package_index handles the KeyError
bindist.generate_package_index(test_url)
err = capfd.readouterr()[1]
assert "Warning: No packages at {0}".format(test_url) in err
def test_generate_indices_exception(monkeypatch, capfd):
def mock_list_url(url, recursive=False):
print("mocked list_url({0}, {1})".format(url, recursive))
raise Exception("Test Exception handling")
monkeypatch.setattr(web_util, "list_url", mock_list_url)
url = "file:///fake/keys/dir"
test_url = "file:///fake/keys/dir"
with pytest.raises(GenerateIndexError, match=f"Encountered problem listing keys at {url}"):
bindist.generate_key_index(url)
# Make sure generate_key_index handles the Exception
bindist.generate_key_index(test_url)
with pytest.raises(GenerateIndexError, match="Unable to generate package index"):
bindist.generate_package_index(url)
err = capfd.readouterr()[1]
expect = "Encountered problem listing keys at {0}".format(test_url)
assert expect in err
assert f"Encountered problem listing packages at {url}" in capfd.readouterr().err
# Make sure generate_package_index handles the Exception
bindist.generate_package_index(test_url)
err = capfd.readouterr()[1]
expect = "Encountered problem listing packages at {0}".format(test_url)
assert expect in err
@pytest.mark.usefixtures("mock_fetch", "install_mockery")
@@ -582,20 +573,11 @@ def test_update_sbang(tmpdir, test_mirror):
uninstall_cmd("-y", "/%s" % new_spec.dag_hash())
@pytest.mark.skipif(
str(archspec.cpu.host().family) != "x86_64",
reason="test data uses gcc 4.5.0 which does not support aarch64",
)
def test_install_legacy_buildcache_layout(
mutable_config, compiler_factory, install_mockery_mutable_config
):
def test_install_legacy_buildcache_layout(install_mockery_mutable_config):
"""Legacy buildcache layout involved a nested archive structure
where the .spack file contained a repeated spec.json and another
compressed archive file containing the install tree. This test
makes sure we can still read that layout."""
mutable_config.set(
"compilers", [compiler_factory(spec="gcc@4.5.0", operating_system="debian6")]
)
legacy_layout_dir = os.path.join(test_path, "data", "mirrors", "legacy_layout")
mirror_url = "file://{0}".format(legacy_layout_dir)
filename = (

View File

@@ -63,8 +63,7 @@ def build_environment(working_env):
os.environ["SPACK_LINKER_ARG"] = "-Wl,"
os.environ["SPACK_DTAGS_TO_ADD"] = "--disable-new-dtags"
os.environ["SPACK_DTAGS_TO_STRIP"] = "--enable-new-dtags"
os.environ["SPACK_SYSTEM_DIRS"] = "/usr/include|/usr/lib"
os.environ["SPACK_MANAGED_DIRS"] = f"{prefix}/opt/spack"
os.environ["SPACK_SYSTEM_DIRS"] = "/usr/include /usr/lib"
os.environ["SPACK_TARGET_ARGS"] = ""
if "SPACK_DEPENDENCIES" in os.environ:

View File

@@ -9,8 +9,6 @@
import py.path
import pytest
import archspec.cpu
import llnl.util.filesystem as fs
import spack.build_systems.autotools
@@ -211,9 +209,6 @@ def test_autotools_gnuconfig_replacement_disabled(
assert "gnuconfig version of config.guess" not in f.read()
@pytest.mark.disable_clean_stage_check
@pytest.mark.skipif(
str(archspec.cpu.host().family) != "x86_64", reason="test data is specific for x86_64"
)
def test_autotools_gnuconfig_replacement_no_gnuconfig(self, mutable_database, monkeypatch):
"""
Tests whether a useful error message is shown when patch_config_files is

View File

@@ -15,7 +15,7 @@
import spack.config
import spack.spec
from spack.paths import build_env_path
from spack.util.environment import SYSTEM_DIR_CASE_ENTRY, set_env
from spack.util.environment import SYSTEM_DIRS, set_env
from spack.util.executable import Executable, ProcessError
#
@@ -159,8 +159,7 @@ def wrapper_environment(working_env):
SPACK_DEBUG_LOG_ID="foo-hashabc",
SPACK_COMPILER_SPEC="gcc@4.4.7",
SPACK_SHORT_SPEC="foo@1.2 arch=linux-rhel6-x86_64 /hashabc",
SPACK_SYSTEM_DIRS=SYSTEM_DIR_CASE_ENTRY,
SPACK_MANAGED_DIRS="/path/to/spack-1/opt/spack/*|/path/to/spack-2/opt/spack/*",
SPACK_SYSTEM_DIRS=":".join(SYSTEM_DIRS),
SPACK_CC_RPATH_ARG="-Wl,-rpath,",
SPACK_CXX_RPATH_ARG="-Wl,-rpath,",
SPACK_F77_RPATH_ARG="-Wl,-rpath,",
@@ -908,108 +907,3 @@ def test_linker_strips_loopopt(wrapper_environment, wrapper_flags):
result = cc(*(test_args + ["-loopopt=0", "-c", "x.c"]), output=str)
result = result.strip().split("\n")
assert "-loopopt=0" in result
def test_spack_managed_dirs_are_prioritized(wrapper_environment):
# We have two different stores with 5 packages divided over them
pkg1 = "/path/to/spack-1/opt/spack/linux-ubuntu22.04-zen2/gcc-13.2.0/pkg-1.0-abcdef"
pkg2 = "/path/to/spack-1/opt/spack/linux-ubuntu22.04-zen2/gcc-13.2.0/pkg-2.0-abcdef"
pkg3 = "/path/to/spack-2/opt/spack/linux-ubuntu22.04-zen2/gcc-13.2.0/pkg-3.0-abcdef"
pkg4 = "/path/to/spack-2/opt/spack/linux-ubuntu22.04-zen2/gcc-13.2.0/pkg-4.0-abcdef"
pkg5 = "/path/to/spack-2/opt/spack/linux-ubuntu22.04-zen2/gcc-13.2.0/pkg-5.0-abcdef"
variables = {
# cppflags, ldflags from the command line, config or package.py take highest priority
"SPACK_CPPFLAGS": f"-I/usr/local/include -I/external-1/include -I{pkg1}/include",
"SPACK_LDFLAGS": f"-L/usr/local/lib -L/external-1/lib -L{pkg1}/lib "
f"-Wl,-rpath,/usr/local/lib -Wl,-rpath,/external-1/lib -Wl,-rpath,{pkg1}/lib",
# automatic -L, -Wl,-rpath, -I flags from dependencies -- on the spack side they are
# already partitioned into "spack owned prefixes" and "non-spack owned prefixes"
"SPACK_STORE_LINK_DIRS": f"{pkg4}/lib:{pkg5}/lib",
"SPACK_STORE_RPATH_DIRS": f"{pkg4}/lib:{pkg5}/lib",
"SPACK_STORE_INCLUDE_DIRS": f"{pkg4}/include:{pkg5}/include",
"SPACK_LINK_DIRS": "/external-3/lib:/external-4/lib",
"SPACK_RPATH_DIRS": "/external-3/lib:/external-4/lib",
"SPACK_INCLUDE_DIRS": "/external-3/include:/external-4/include",
}
with set_env(SPACK_TEST_COMMAND="dump-args", **variables):
effective_call = (
cc(
# system paths
"-I/usr/include",
"-L/usr/lib",
"-Wl,-rpath,/usr/lib",
# some other externals
"-I/external-2/include",
"-L/external-2/lib",
"-Wl,-rpath,/external-2/lib",
# relative paths are considered "spack managed" since they are in the stage dir
"-I..",
"-L..",
"-Wl,-rpath,..", # pathological but simpler for the test.
# spack store paths
f"-I{pkg2}/include",
f"-I{pkg3}/include",
f"-L{pkg2}/lib",
f"-L{pkg3}/lib",
f"-Wl,-rpath,{pkg2}/lib",
f"-Wl,-rpath,{pkg3}/lib",
"hello.c",
"-o",
"hello",
output=str,
)
.strip()
.split("\n")
)
dash_I = [flag[2:] for flag in effective_call if flag.startswith("-I")]
dash_L = [flag[2:] for flag in effective_call if flag.startswith("-L")]
dash_Wl_rpath = [flag[11:] for flag in effective_call if flag.startswith("-Wl,-rpath")]
assert dash_I == [
# spack owned dirs from SPACK_*FLAGS
f"{pkg1}/include",
# spack owned dirs from command line & automatic flags for deps (in that order)]
"..",
f"{pkg2}/include", # from command line
f"{pkg3}/include", # from command line
f"{pkg4}/include", # from SPACK_STORE_INCLUDE_DIRS
f"{pkg5}/include", # from SPACK_STORE_INCLUDE_DIRS
# non-system dirs from SPACK_*FLAGS
"/external-1/include",
# non-system dirs from command line & automatic flags for deps (in that order)
"/external-2/include", # from command line
"/external-3/include", # from SPACK_INCLUDE_DIRS
"/external-4/include", # from SPACK_INCLUDE_DIRS
# system dirs from SPACK_*FLAGS
"/usr/local/include",
# system dirs from command line
"/usr/include",
]
assert (
dash_L
== dash_Wl_rpath
== [
# spack owned dirs from SPACK_*FLAGS
f"{pkg1}/lib",
# spack owned dirs from command line & automatic flags for deps (in that order)
"..",
f"{pkg2}/lib", # from command line
f"{pkg3}/lib", # from command line
f"{pkg4}/lib", # from SPACK_STORE_LINK_DIRS
f"{pkg5}/lib", # from SPACK_STORE_LINK_DIRS
# non-system dirs from SPACK_*FLAGS
"/external-1/lib",
# non-system dirs from command line & automatic flags for deps (in that order)
"/external-2/lib", # from command line
"/external-3/lib", # from SPACK_LINK_DIRS
"/external-4/lib", # from SPACK_LINK_DIRS
# system dirs from SPACK_*FLAGS
"/usr/local/lib",
# system dirs from command line
"/usr/lib",
]
)

View File

@@ -448,7 +448,7 @@ def _fail(self, args):
def test_ci_create_buildcache(tmpdir, working_env, config, mock_packages, monkeypatch):
"""Test that create_buildcache returns a list of objects with the correct
keys and types."""
monkeypatch.setattr(spack.ci, "_push_to_build_cache", lambda a, b, c: True)
monkeypatch.setattr(spack.ci, "push_mirror_contents", lambda a, b, c: True)
results = ci.create_buildcache(
None, destination_mirror_urls=["file:///fake-url-one", "file:///fake-url-two"]

View File

@@ -4,7 +4,6 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import errno
import json
import os
import shutil
@@ -169,25 +168,6 @@ def test_update_key_index(
assert "index.json" in key_dir_list
def test_buildcache_autopush(tmp_path, install_mockery, mock_fetch):
"""Test buildcache with autopush"""
mirror_dir = tmp_path / "mirror"
mirror_autopush_dir = tmp_path / "mirror_autopush"
mirror("add", "--unsigned", "mirror", mirror_dir.as_uri())
mirror("add", "--autopush", "--unsigned", "mirror-autopush", mirror_autopush_dir.as_uri())
s = Spec("libdwarf").concretized()
# Install and generate build cache index
s.package.do_install()
metadata_file = spack.binary_distribution.tarball_name(s, ".spec.json")
assert not (mirror_dir / "build_cache" / metadata_file).exists()
assert (mirror_autopush_dir / "build_cache" / metadata_file).exists()
def test_buildcache_sync(
mutable_mock_env_path,
install_mockery_mutable_config,
@@ -254,71 +234,10 @@ def verify_mirror_contents():
# Use mirror names to specify mirrors
mirror("add", "src", src_mirror_url)
mirror("add", "dest", dest_mirror_url)
mirror("add", "ignored", "file:///dummy/io")
buildcache("sync", "src", "dest")
verify_mirror_contents()
shutil.rmtree(dest_mirror_dir)
def manifest_insert(manifest, spec, dest_url):
manifest[spec.dag_hash()] = [
{
"src": spack.util.url.join(
src_mirror_url,
spack.binary_distribution.build_cache_relative_path(),
spack.binary_distribution.tarball_name(spec, ".spec.json"),
),
"dest": spack.util.url.join(
dest_url,
spack.binary_distribution.build_cache_relative_path(),
spack.binary_distribution.tarball_name(spec, ".spec.json"),
),
},
{
"src": spack.util.url.join(
src_mirror_url,
spack.binary_distribution.build_cache_relative_path(),
spack.binary_distribution.tarball_path_name(spec, ".spack"),
),
"dest": spack.util.url.join(
dest_url,
spack.binary_distribution.build_cache_relative_path(),
spack.binary_distribution.tarball_path_name(spec, ".spack"),
),
},
]
manifest_file = os.path.join(tmpdir.strpath, "manifest_dest.json")
with open(manifest_file, "w") as fd:
test_env = ev.active_environment()
manifest = {}
for spec in test_env.specs_by_hash.values():
manifest_insert(manifest, spec, dest_mirror_url)
json.dump(manifest, fd)
buildcache("sync", "--manifest-glob", manifest_file)
verify_mirror_contents()
shutil.rmtree(dest_mirror_dir)
manifest_file = os.path.join(tmpdir.strpath, "manifest_bad_dest.json")
with open(manifest_file, "w") as fd:
manifest = {}
for spec in test_env.specs_by_hash.values():
manifest_insert(
manifest, spec, spack.util.url.join(dest_mirror_url, "invalid_path")
)
json.dump(manifest, fd)
# Trigger the warning
output = buildcache("sync", "--manifest-glob", manifest_file, "dest", "ignored")
assert "Ignoring unused arguemnt: ignored" in output
verify_mirror_contents()
shutil.rmtree(dest_mirror_dir)
def test_buildcache_create_install(
@@ -446,10 +365,3 @@ def test_push_and_install_with_mirror_marked_unsigned_does_not_require_extra_fla
spec.package.do_uninstall(force=True)
spec.package.do_install(**kwargs)
def test_skip_no_redistribute(mock_packages, config):
specs = list(Spec("no-redistribute-dependent").concretized().traverse())
filtered = spack.cmd.buildcache._skip_no_redistribute_for_public(specs)
assert not any(s.name == "no-redistribute" for s in filtered)
assert any(s.name == "no-redistribute-dependent" for s in filtered)

View File

@@ -26,7 +26,6 @@
import spack.util.gpg
import spack.util.spack_yaml as syaml
import spack.util.url as url_util
from spack.cmd.ci import FAILED_CREATE_BUILDCACHE_CODE
from spack.schema.buildcache_spec import schema as specfile_schema
from spack.schema.ci import schema as ci_schema
from spack.schema.database_index import schema as db_idx_schema
@@ -48,8 +47,6 @@
@pytest.fixture()
def ci_base_environment(working_env, tmpdir):
os.environ["CI_PROJECT_DIR"] = tmpdir.strpath
os.environ["CI_PIPELINE_ID"] = "7192"
os.environ["CI_JOB_NAME"] = "mock"
@pytest.fixture(scope="function")
@@ -117,13 +114,13 @@ def test_specs_staging(config, tmpdir):
with repo.use_repositories(builder.root):
spec_a = Spec("a").concretized()
spec_a_label = ci._spec_ci_label(spec_a)
spec_b_label = ci._spec_ci_label(spec_a["b"])
spec_c_label = ci._spec_ci_label(spec_a["c"])
spec_d_label = ci._spec_ci_label(spec_a["d"])
spec_e_label = ci._spec_ci_label(spec_a["e"])
spec_f_label = ci._spec_ci_label(spec_a["f"])
spec_g_label = ci._spec_ci_label(spec_a["g"])
spec_a_label = ci._spec_deps_key(spec_a)
spec_b_label = ci._spec_deps_key(spec_a["b"])
spec_c_label = ci._spec_deps_key(spec_a["c"])
spec_d_label = ci._spec_deps_key(spec_a["d"])
spec_e_label = ci._spec_deps_key(spec_a["e"])
spec_f_label = ci._spec_deps_key(spec_a["f"])
spec_g_label = ci._spec_deps_key(spec_a["g"])
spec_labels, dependencies, stages = ci.stage_spec_jobs([spec_a])
@@ -779,43 +776,6 @@ def test_ci_rebuild_mock_success(
assert "Cannot copy test logs" in out
def test_ci_rebuild_mock_failure_to_push(
tmpdir,
working_env,
mutable_mock_env_path,
install_mockery_mutable_config,
mock_gnupghome,
mock_stage,
mock_fetch,
mock_binary_index,
ci_base_environment,
monkeypatch,
):
pkg_name = "trivial-install-test-package"
rebuild_env = create_rebuild_env(tmpdir, pkg_name)
# Mock the install script succuess
def mock_success(*args, **kwargs):
return 0
monkeypatch.setattr(spack.ci, "process_command", mock_success)
# Mock failure to push to the build cache
def mock_push_or_raise(*args, **kwargs):
raise spack.binary_distribution.PushToBuildCacheError(
"Encountered problem pushing binary <url>: <expection>"
)
monkeypatch.setattr(spack.binary_distribution, "push_or_raise", mock_push_or_raise)
with rebuild_env.env_dir.as_cwd():
activate_rebuild_env(tmpdir, pkg_name, rebuild_env)
expect = f"Command exited with code {FAILED_CREATE_BUILDCACHE_CODE}"
with pytest.raises(spack.main.SpackCommandError, match=expect):
ci_cmd("rebuild", fail_on_error=True)
@pytest.mark.skip(reason="fails intermittently and covered by gitlab ci")
def test_ci_rebuild(
tmpdir,
@@ -1103,7 +1063,7 @@ def test_ci_generate_mirror_override(
@pytest.mark.disable_clean_stage_check
def test_push_to_build_cache(
def test_push_mirror_contents(
tmpdir,
mutable_mock_env_path,
install_mockery_mutable_config,
@@ -1164,7 +1124,7 @@ def test_push_to_build_cache(
install_cmd("--add", "--keep-stage", json_path)
for s in concrete_spec.traverse():
ci.push_to_build_cache(s, mirror_url, True)
ci.push_mirror_contents(s, mirror_url, True)
buildcache_path = os.path.join(mirror_dir.strpath, "build_cache")
@@ -1257,16 +1217,21 @@ def test_push_to_build_cache(
assert len(dl_dir_list) == 2
def test_push_to_build_cache_exceptions(monkeypatch, tmp_path, capsys):
def _push_to_build_cache(spec, sign_binaries, mirror_url):
def test_push_mirror_contents_exceptions(monkeypatch, capsys):
def failing_access(*args, **kwargs):
raise Exception("Error: Access Denied")
monkeypatch.setattr(spack.ci, "_push_to_build_cache", _push_to_build_cache)
monkeypatch.setattr(spack.ci, "_push_mirror_contents", failing_access)
# Input doesn't matter, as we are faking exceptional output
url = tmp_path.as_uri()
ci.push_to_build_cache(None, url, None)
assert f"Permission problem writing to {url}" in capsys.readouterr().err
# Input doesn't matter, as wwe are faking exceptional output
url = "fakejunk"
ci.push_mirror_contents(None, url, None)
captured = capsys.readouterr()
std_out = captured[0]
expect_msg = "Permission problem writing to {0}".format(url)
assert expect_msg in std_out
@pytest.mark.parametrize("match_behavior", ["first", "merge"])
@@ -1496,24 +1461,26 @@ def test_ci_rebuild_index(
working_dir = tmpdir.join("working_dir")
mirror_dir = working_dir.join("mirror")
mirror_url = url_util.path_to_file_url(str(mirror_dir))
mirror_url = "file://{0}".format(mirror_dir.strpath)
spack_yaml_contents = f"""
spack_yaml_contents = """
spack:
specs:
- callpath
mirrors:
test-mirror: {mirror_url}
ci:
pipeline-gen:
- submapping:
- match:
- patchelf
build-job:
tags:
- donotcare
image: donotcare
"""
specs:
- callpath
mirrors:
test-mirror: {0}
ci:
pipeline-gen:
- submapping:
- match:
- patchelf
build-job:
tags:
- donotcare
image: donotcare
""".format(
mirror_url
)
filename = str(tmpdir.join("spack.yaml"))
with open(filename, "w") as f:

View File

@@ -123,18 +123,17 @@ def test_root_and_dep_match_returns_root(mock_packages, mutable_mock_env_path):
@pytest.mark.parametrize(
"arg,conf", [("--reuse", True), ("--fresh", False), ("--reuse-deps", "dependencies")]
"arg,config", [("--reuse", True), ("--fresh", False), ("--reuse-deps", "dependencies")]
)
def test_concretizer_arguments(mutable_config, mock_packages, arg, conf):
def test_concretizer_arguments(mutable_config, mock_packages, arg, config):
"""Ensure that ConfigSetAction is doing the right thing."""
spec = spack.main.SpackCommand("spec")
assert spack.config.get("concretizer:reuse", None, scope="command_line") is None
assert spack.config.get("concretizer:reuse", None) is None
spec(arg, "zlib")
assert spack.config.get("concretizer:reuse", None) == conf
assert spack.config.get("concretizer:reuse", None, scope="command_line") == conf
assert spack.config.get("concretizer:reuse", None) == config
def test_use_buildcache_type():

View File

@@ -112,10 +112,10 @@ def test_compiler_find_no_apple_gcc(no_compilers_yaml, working_env, mock_executa
@pytest.mark.regression("37996")
def test_compiler_remove(mutable_config, mock_packages):
"""Tests that we can remove a compiler from configuration."""
assert spack.spec.CompilerSpec("gcc@=9.4.0") in spack.compilers.all_compiler_specs()
args = spack.util.pattern.Bunch(all=True, compiler_spec="gcc@9.4.0", add_paths=[], scope=None)
assert spack.spec.CompilerSpec("gcc@=4.5.0") in spack.compilers.all_compiler_specs()
args = spack.util.pattern.Bunch(all=True, compiler_spec="gcc@4.5.0", add_paths=[], scope=None)
spack.cmd.compiler.compiler_remove(args)
assert spack.spec.CompilerSpec("gcc@=9.4.0") not in spack.compilers.all_compiler_specs()
assert spack.spec.CompilerSpec("gcc@=4.5.0") not in spack.compilers.all_compiler_specs()
@pytest.mark.regression("37996")
@@ -124,10 +124,10 @@ def test_removing_compilers_from_multiple_scopes(mutable_config, mock_packages):
site_config = spack.config.get("compilers", scope="site")
spack.config.set("compilers", site_config, scope="user")
assert spack.spec.CompilerSpec("gcc@=9.4.0") in spack.compilers.all_compiler_specs()
args = spack.util.pattern.Bunch(all=True, compiler_spec="gcc@9.4.0", add_paths=[], scope=None)
assert spack.spec.CompilerSpec("gcc@=4.5.0") in spack.compilers.all_compiler_specs()
args = spack.util.pattern.Bunch(all=True, compiler_spec="gcc@4.5.0", add_paths=[], scope=None)
spack.cmd.compiler.compiler_remove(args)
assert spack.spec.CompilerSpec("gcc@=9.4.0") not in spack.compilers.all_compiler_specs()
assert spack.spec.CompilerSpec("gcc@=4.5.0") not in spack.compilers.all_compiler_specs()
@pytest.mark.not_on_windows("Cannot execute bash script on Windows")
@@ -175,9 +175,7 @@ def test_compiler_find_mixed_suffixes(
assert "clang@11.0.0" in output
assert "gcc@8.4.0" in output
config = spack.compilers.get_compiler_config(
no_compilers_yaml, scope="site", init_config=False
)
config = spack.compilers.get_compiler_config("site", False)
clang = next(c["compiler"] for c in config if c["compiler"]["spec"] == "clang@=11.0.0")
gcc = next(c["compiler"] for c in config if c["compiler"]["spec"] == "gcc@=8.4.0")
@@ -212,9 +210,7 @@ def test_compiler_find_prefer_no_suffix(no_compilers_yaml, working_env, compiler
assert "clang@11.0.0" in output
assert "gcc@8.4.0" in output
config = spack.compilers.get_compiler_config(
no_compilers_yaml, scope="site", init_config=False
)
config = spack.compilers.get_compiler_config("site", False)
clang = next(c["compiler"] for c in config if c["compiler"]["spec"] == "clang@=11.0.0")
assert clang["paths"]["cc"] == str(compilers_dir / "clang")
@@ -233,9 +229,7 @@ def test_compiler_find_path_order(no_compilers_yaml, working_env, compilers_dir)
compiler("find", "--scope=site")
config = spack.compilers.get_compiler_config(
no_compilers_yaml, scope="site", init_config=False
)
config = spack.compilers.get_compiler_config("site", False)
gcc = next(c["compiler"] for c in config if c["compiler"]["spec"] == "gcc@=8.4.0")
assert gcc["paths"] == {
"cc": str(new_dir / "gcc-8"),

View File

@@ -20,10 +20,7 @@
install = SpackCommand("install")
env = SpackCommand("env")
pytestmark = [
pytest.mark.not_on_windows("does not run on windows"),
pytest.mark.disable_clean_stage_check,
]
pytestmark = pytest.mark.not_on_windows("does not run on windows")
def test_dev_build_basics(tmpdir, install_mockery):
@@ -44,6 +41,7 @@ def test_dev_build_basics(tmpdir, install_mockery):
assert os.path.exists(str(tmpdir))
@pytest.mark.disable_clean_stage_check
def test_dev_build_before(tmpdir, install_mockery):
spec = spack.spec.Spec(f"dev-build-test-install@0.0.0 dev_path={tmpdir}").concretized()
@@ -60,6 +58,7 @@ def test_dev_build_before(tmpdir, install_mockery):
assert not os.path.exists(spec.prefix)
@pytest.mark.disable_clean_stage_check
def test_dev_build_until(tmpdir, install_mockery):
spec = spack.spec.Spec(f"dev-build-test-install@0.0.0 dev_path={tmpdir}").concretized()
@@ -77,6 +76,7 @@ def test_dev_build_until(tmpdir, install_mockery):
assert not spack.store.STORE.db.query(spec, installed=True)
@pytest.mark.disable_clean_stage_check
def test_dev_build_until_last_phase(tmpdir, install_mockery):
# Test that we ignore the last_phase argument if it is already last
spec = spack.spec.Spec(f"dev-build-test-install@0.0.0 dev_path={tmpdir}").concretized()
@@ -96,6 +96,7 @@ def test_dev_build_until_last_phase(tmpdir, install_mockery):
assert os.path.exists(str(tmpdir))
@pytest.mark.disable_clean_stage_check
def test_dev_build_before_until(tmpdir, install_mockery, capsys):
spec = spack.spec.Spec(f"dev-build-test-install@0.0.0 dev_path={tmpdir}").concretized()
@@ -133,6 +134,7 @@ def mock_module_noop(*args):
pass
@pytest.mark.disable_clean_stage_check
def test_dev_build_drop_in(tmpdir, mock_packages, monkeypatch, install_mockery, working_env):
monkeypatch.setattr(os, "execvp", print_spack_cc)
monkeypatch.setattr(spack.build_environment, "module", mock_module_noop)

View File

@@ -188,127 +188,6 @@ def test_env_remove(capfd):
assert "bar" not in out
def test_env_rename_managed(capfd):
# Need real environment
with pytest.raises(spack.main.SpackCommandError):
env("rename", "foo", "bar")
assert (
"The specified name does not correspond to a managed spack environment"
in capfd.readouterr()[0]
)
env("create", "foo")
out = env("list")
assert "foo" in out
out = env("rename", "foo", "bar")
assert "Successfully renamed environment foo to bar" in out
out = env("list")
assert "foo" not in out
assert "bar" in out
bar = ev.read("bar")
with bar:
# Cannot rename active environment
with pytest.raises(spack.main.SpackCommandError):
env("rename", "bar", "baz")
assert "Cannot rename active environment" in capfd.readouterr()[0]
env("create", "qux")
# Cannot rename to an active environment (even with force flag)
with pytest.raises(spack.main.SpackCommandError):
env("rename", "-f", "qux", "bar")
assert "bar is an active environment" in capfd.readouterr()[0]
# Can rename inactive environment when another's active
out = env("rename", "qux", "quux")
assert "Successfully renamed environment qux to quux" in out
out = env("list")
assert "bar" in out
assert "baz" not in out
env("create", "baz")
# Cannot rename to existing environment without --force
with pytest.raises(spack.main.SpackCommandError):
env("rename", "bar", "baz")
errmsg = (
"The new name corresponds to an existing environment;"
" specify the --force flag to overwrite it."
)
assert errmsg in capfd.readouterr()[0]
env("rename", "-f", "bar", "baz")
out = env("list")
assert "bar" not in out
assert "baz" in out
def test_env_rename_anonymous(capfd, tmpdir):
# Need real environment
with pytest.raises(spack.main.SpackCommandError):
env("rename", "-d", "./non-existing", "./also-non-existing")
assert (
"The specified path does not correspond to a valid spack environment"
in capfd.readouterr()[0]
)
anon_foo = str(tmpdir / "foo")
env("create", "-d", anon_foo)
anon_bar = str(tmpdir / "bar")
out = env("rename", "-d", anon_foo, anon_bar)
assert f"Successfully renamed environment {anon_foo} to {anon_bar}" in out
assert not ev.is_env_dir(anon_foo)
assert ev.is_env_dir(anon_bar)
# Cannot rename active environment
anon_baz = str(tmpdir / "baz")
env("activate", "--sh", "-d", anon_bar)
with pytest.raises(spack.main.SpackCommandError):
env("rename", "-d", anon_bar, anon_baz)
assert "Cannot rename active environment" in capfd.readouterr()[0]
env("deactivate", "--sh")
assert ev.is_env_dir(anon_bar)
assert not ev.is_env_dir(anon_baz)
# Cannot rename to existing environment without --force
env("create", "-d", anon_baz)
with pytest.raises(spack.main.SpackCommandError):
env("rename", "-d", anon_bar, anon_baz)
errmsg = (
"The new path corresponds to an existing environment;"
" specify the --force flag to overwrite it."
)
assert errmsg in capfd.readouterr()[0]
assert ev.is_env_dir(anon_bar)
assert ev.is_env_dir(anon_baz)
env("rename", "-f", "-d", anon_bar, anon_baz)
assert not ev.is_env_dir(anon_bar)
assert ev.is_env_dir(anon_baz)
# Cannot rename to existing (non-environment) path without --force
qux = tmpdir / "qux"
qux.mkdir()
anon_qux = str(qux)
assert not ev.is_env_dir(anon_qux)
with pytest.raises(spack.main.SpackCommandError):
env("rename", "-d", anon_baz, anon_qux)
errmsg = "The new path already exists; specify the --force flag to overwrite it."
assert errmsg in capfd.readouterr()[0]
env("rename", "-f", "-d", anon_baz, anon_qux)
assert not ev.is_env_dir(anon_baz)
assert ev.is_env_dir(anon_qux)
def test_concretize():
e = ev.create("test")
e.add("mpileaks")
@@ -858,7 +737,8 @@ def test_with_config_bad_include_activate(environment_from_manifest, tmpdir):
"""
)
with ev.Environment(env_root) as e:
e = ev.Environment(env_root)
with e:
e.concretize()
# we've created an environment with some included config files (which do
@@ -868,7 +748,7 @@ def test_with_config_bad_include_activate(environment_from_manifest, tmpdir):
os.remove(abs_include_path)
os.remove(include1)
with pytest.raises(spack.config.ConfigFileError) as exc:
ev.activate(ev.Environment(env_root))
ev.activate(e)
err = exc.value.message
assert "missing include" in err
@@ -976,7 +856,6 @@ def test_env_with_included_config_file(mutable_mock_env_path, packages_file):
assert any(x.satisfies("mpileaks@2.2") for x in e._get_environment_specs())
@pytest.mark.only_clingo("original concretizer does not support requirements")
def test_config_change_existing(mutable_mock_env_path, tmp_path, mock_packages, mutable_config):
"""Test ``config change`` with config in the ``spack.yaml`` as well as an
included file scope.
@@ -1052,7 +931,6 @@ def test_config_change_existing(mutable_mock_env_path, tmp_path, mock_packages,
spack.spec.Spec("bowtie@1.2.2").concretized()
@pytest.mark.only_clingo("original concretizer does not support requirements")
def test_config_change_new(mutable_mock_env_path, tmp_path, mock_packages, mutable_config):
spack_yaml = tmp_path / ev.manifest_name
spack_yaml.write_text(
@@ -1062,7 +940,8 @@ def test_config_change_new(mutable_mock_env_path, tmp_path, mock_packages, mutab
"""
)
with ev.Environment(tmp_path):
e = ev.Environment(tmp_path)
with e:
config("change", "packages:mpich:require:~debug")
with pytest.raises(spack.solver.asp.UnsatisfiableSpecError):
spack.spec.Spec("mpich+debug").concretized()
@@ -1079,7 +958,7 @@ def test_config_change_new(mutable_mock_env_path, tmp_path, mock_packages, mutab
require: "@3.0.3"
"""
)
with ev.Environment(tmp_path):
with e:
assert spack.spec.Spec("mpich").concretized().satisfies("@3.0.3")
with pytest.raises(spack.config.ConfigError, match="not a list"):
config("change", "packages:mpich:require:~debug")
@@ -3159,41 +3038,6 @@ def test_modules_exist_after_env_install(
assert spec.prefix in contents
@pytest.mark.disable_clean_stage_check
def test_install_develop_keep_stage(
environment_from_manifest, install_mockery, mock_fetch, monkeypatch, tmpdir
):
"""Develop a dependency of a package and make sure that the associated
stage for the package is retained after a successful install.
"""
environment_from_manifest(
"""
spack:
specs:
- mpileaks
"""
)
monkeypatch.setattr(spack.stage.DevelopStage, "destroy", _always_fail)
with ev.read("test") as e:
libelf_dev_path = tmpdir.ensure("libelf-test-dev-path", dir=True)
develop(f"--path={libelf_dev_path}", "libelf@0.8.13")
concretize()
(libelf_spec,) = e.all_matching_specs("libelf")
(mpileaks_spec,) = e.all_matching_specs("mpileaks")
assert not os.path.exists(libelf_spec.package.stage.path)
assert not os.path.exists(mpileaks_spec.package.stage.path)
install()
assert os.path.exists(libelf_spec.package.stage.path)
assert not os.path.exists(mpileaks_spec.package.stage.path)
# Helper method for test_install_develop_keep_stage
def _always_fail(cls, *args, **kwargs):
raise Exception("Restage or destruction of dev stage detected during install")
@pytest.mark.regression("24148")
def test_virtual_spec_concretize_together(tmpdir):
# An environment should permit to concretize "mpi"
@@ -3287,7 +3131,7 @@ def test_create_and_activate_managed(tmp_path):
env("deactivate")
def test_create_and_activate_anonymous(tmp_path):
def test_create_and_activate_unmanaged(tmp_path):
with fs.working_dir(str(tmp_path)):
env_dir = os.path.join(str(tmp_path), "foo")
shell = env("activate", "--without-view", "--create", "--sh", "-d", env_dir)

View File

@@ -64,7 +64,6 @@ def test_query_arguments():
implicit=False,
start_date="2018-02-23",
end_date=None,
install_tree="all",
)
q_args = query_arguments(args)
@@ -76,7 +75,6 @@ def test_query_arguments():
assert q_args["explicit"] is any
assert "start_date" in q_args
assert "end_date" not in q_args
assert q_args["install_tree"] == "all"
# Check that explicit works correctly
args.explicit = True

View File

@@ -88,7 +88,6 @@ def __init__(
exclude_file=None,
exclude_specs=None,
directory=None,
private=False,
):
self.specs = specs or []
self.all = all
@@ -97,7 +96,6 @@ def __init__(
self.dependencies = dependencies
self.exclude_file = exclude_file
self.exclude_specs = exclude_specs
self.private = private
self.directory = directory
@@ -106,7 +104,7 @@ def test_exclude_specs(mock_packages, config):
specs=["mpich"], versions_per_spec="all", exclude_specs="mpich@3.0.1:3.0.2 mpich@1.0"
)
mirror_specs, _ = spack.cmd.mirror._specs_and_action(args)
mirror_specs = spack.cmd.mirror.concrete_specs_from_user(args)
expected_include = set(
spack.spec.Spec(x).concretized() for x in ["mpich@3.0.3", "mpich@3.0.4", "mpich@3.0"]
)
@@ -115,19 +113,6 @@ def test_exclude_specs(mock_packages, config):
assert not any(spec.satisfies(y) for spec in mirror_specs for y in expected_exclude)
def test_exclude_specs_public_mirror(mock_packages, config):
args = MockMirrorArgs(
specs=["no-redistribute-dependent"],
versions_per_spec="all",
dependencies=True,
private=False,
)
mirror_specs, _ = spack.cmd.mirror._specs_and_action(args)
assert not any(s.name == "no-redistribute" for s in mirror_specs)
assert any(s.name == "no-redistribute-dependent" for s in mirror_specs)
def test_exclude_file(mock_packages, tmpdir, config):
exclude_path = os.path.join(str(tmpdir), "test-exclude.txt")
with open(exclude_path, "w") as exclude_file:
@@ -140,7 +125,7 @@ def test_exclude_file(mock_packages, tmpdir, config):
args = MockMirrorArgs(specs=["mpich"], versions_per_spec="all", exclude_file=exclude_path)
mirror_specs, _ = spack.cmd.mirror._specs_and_action(args)
mirror_specs = spack.cmd.mirror.concrete_specs_from_user(args)
expected_include = set(
spack.spec.Spec(x).concretized() for x in ["mpich@3.0.3", "mpich@3.0.4", "mpich@3.0"]
)
@@ -277,9 +262,11 @@ def test_mirror_destroy(
class TestMirrorCreate:
@pytest.mark.regression("31736", "31985")
def test_all_specs_with_all_versions_dont_concretize(self):
args = MockMirrorArgs(all=True, exclude_file=None, exclude_specs=None)
mirror_specs, _ = spack.cmd.mirror._specs_and_action(args)
assert all(not s.concrete for s in mirror_specs)
args = MockMirrorArgs(exclude_file=None, exclude_specs=None)
specs = spack.cmd.mirror.all_specs_with_all_versions(
selection_fn=spack.cmd.mirror.not_excluded_fn(args)
)
assert all(not s.concrete for s in specs)
@pytest.mark.parametrize(
"cli_args,error_str",
@@ -337,8 +324,8 @@ def test_error_conditions(self, cli_args, error_str):
],
)
def test_exclude_specs_from_user(self, cli_args, not_expected, config):
mirror_specs, _ = spack.cmd.mirror._specs_and_action(MockMirrorArgs(**cli_args))
assert not any(s.satisfies(y) for s in mirror_specs for y in not_expected)
specs = spack.cmd.mirror.concrete_specs_from_user(MockMirrorArgs(**cli_args))
assert not any(s.satisfies(y) for s in specs for y in not_expected)
@pytest.mark.parametrize("abstract_specs", [("bowtie", "callpath")])
def test_specs_from_cli_are_the_same_as_from_file(self, abstract_specs, config, tmpdir):
@@ -420,27 +407,3 @@ def test_mirror_add_set_signed(mutable_config):
assert spack.config.get("mirrors:example") == {"url": "http://example.com", "signed": False}
mirror("set", "--signed", "example")
assert spack.config.get("mirrors:example") == {"url": "http://example.com", "signed": True}
def test_mirror_add_set_autopush(mutable_config):
# Add mirror without autopush
mirror("add", "example", "http://example.com")
assert spack.config.get("mirrors:example") == "http://example.com"
mirror("set", "--no-autopush", "example")
assert spack.config.get("mirrors:example") == {"url": "http://example.com", "autopush": False}
mirror("set", "--autopush", "example")
assert spack.config.get("mirrors:example") == {"url": "http://example.com", "autopush": True}
mirror("set", "--no-autopush", "example")
assert spack.config.get("mirrors:example") == {"url": "http://example.com", "autopush": False}
mirror("remove", "example")
# Add mirror with autopush
mirror("add", "--autopush", "example", "http://example.com")
assert spack.config.get("mirrors:example") == {"url": "http://example.com", "autopush": True}
mirror("set", "--autopush", "example")
assert spack.config.get("mirrors:example") == {"url": "http://example.com", "autopush": True}
mirror("set", "--no-autopush", "example")
assert spack.config.get("mirrors:example") == {"url": "http://example.com", "autopush": False}
mirror("set", "--autopush", "example")
assert spack.config.get("mirrors:example") == {"url": "http://example.com", "autopush": True}
mirror("remove", "example")

View File

@@ -31,7 +31,7 @@ def test_spec():
@pytest.mark.only_clingo("Known failure of the original concretizer")
def test_spec_concretizer_args(mutable_config, mutable_database, do_not_check_runtimes_on_reuse):
def test_spec_concretizer_args(mutable_config, mutable_database):
"""End-to-end test of CLI concretizer prefs.
It's here to make sure that everything works from CLI

View File

@@ -14,7 +14,6 @@
import spack.compilers
import spack.spec
import spack.util.environment
import spack.util.module_cmd
from spack.compiler import Compiler
from spack.util.executable import Executable, ProcessError
@@ -63,16 +62,10 @@ def test_multiple_conflicting_compiler_definitions(mutable_config):
assert cmp.f77 == "f77"
def test_get_compiler_duplicates(mutable_config, compiler_factory):
def test_get_compiler_duplicates(config):
# In this case there is only one instance of the specified compiler in
# the test configuration (so it is not actually a duplicate), but the
# method behaves the same.
cnl_compiler = compiler_factory(spec="gcc@4.5.0", operating_system="CNL")
# CNL compiler has no target attribute, and this is essential to make detection pass
del cnl_compiler["compiler"]["target"]
mutable_config.set(
"compilers", [compiler_factory(spec="gcc@4.5.0", operating_system="SuSE11"), cnl_compiler]
)
cfg_file_to_duplicates = spack.compilers.get_compiler_duplicates(
"gcc@4.5.0", spack.spec.ArchSpec("cray-CNL-xeon")
)
@@ -82,6 +75,13 @@ def test_get_compiler_duplicates(mutable_config, compiler_factory):
assert len(duplicates) == 1
def test_all_compilers(config):
all_compilers = spack.compilers.all_compilers()
filtered = [x for x in all_compilers if str(x.spec) == "clang@=3.3"]
filtered = [x for x in filtered if x.operating_system == "SuSE11"]
assert len(filtered) == 1
@pytest.mark.parametrize(
"input_version,expected_version,expected_error",
[(None, None, "Couldn't get version for compiler /usr/bin/gcc"), ("4.9", "4.9", None)],
@@ -138,6 +138,14 @@ def __init__(self):
environment={},
)
def _get_compiler_link_paths(self):
# Mock os.path.isdir so the link paths don't have to exist
old_isdir = os.path.isdir
os.path.isdir = lambda x: True
ret = super()._get_compiler_link_paths()
os.path.isdir = old_isdir
return ret
@property
def name(self):
return "mockcompiler"
@@ -155,25 +163,34 @@ def verbose_flag(self):
required_libs = ["libgfortran"]
@pytest.mark.not_on_windows("Not supported on Windows (yet)")
def test_implicit_rpaths(dirs_with_libfiles):
def test_implicit_rpaths(dirs_with_libfiles, monkeypatch):
lib_to_dirs, all_dirs = dirs_with_libfiles
def try_all_dirs(*args):
return all_dirs
monkeypatch.setattr(MockCompiler, "_get_compiler_link_paths", try_all_dirs)
expected_rpaths = set(lib_to_dirs["libstdc++"] + lib_to_dirs["libgfortran"])
compiler = MockCompiler()
compiler._compile_c_source_output = "ld " + " ".join(f"-L{d}" for d in all_dirs)
retrieved_rpaths = compiler.implicit_rpaths()
assert set(retrieved_rpaths) == set(lib_to_dirs["libstdc++"] + lib_to_dirs["libgfortran"])
assert set(retrieved_rpaths) == expected_rpaths
without_flag_output = "ld -L/path/to/first/lib -L/path/to/second/lib64"
with_flag_output = "ld -L/path/to/first/with/flag/lib -L/path/to/second/lib64"
no_flag_dirs = ["/path/to/first/lib", "/path/to/second/lib64"]
no_flag_output = "ld -L%s -L%s" % tuple(no_flag_dirs)
flag_dirs = ["/path/to/first/with/flag/lib", "/path/to/second/lib64"]
flag_output = "ld -L%s -L%s" % tuple(flag_dirs)
def call_compiler(exe, *args, **kwargs):
# This method can replace Executable.__call__ to emulate a compiler that
# changes libraries depending on a flag.
if "--correct-flag" in exe.exe:
return with_flag_output
return without_flag_output
return flag_output
return no_flag_output
@pytest.mark.not_on_windows("Not supported on Windows (yet)")
@@ -187,8 +204,8 @@ def call_compiler(exe, *args, **kwargs):
("cc", "cppflags"),
],
)
@pytest.mark.enable_compiler_execution
def test_compile_dummy_c_source_adds_flags(monkeypatch, exe, flagname):
@pytest.mark.enable_compiler_link_paths
def test_get_compiler_link_paths(monkeypatch, exe, flagname):
# create fake compiler that emits mock verbose output
compiler = MockCompiler()
monkeypatch.setattr(Executable, "__call__", call_compiler)
@@ -205,38 +222,40 @@ def test_compile_dummy_c_source_adds_flags(monkeypatch, exe, flagname):
assert False
# Test without flags
assert compiler._compile_dummy_c_source() == without_flag_output
assert compiler._get_compiler_link_paths() == no_flag_dirs
if flagname:
# set flags and test
compiler.flags = {flagname: ["--correct-flag"]}
assert compiler._compile_dummy_c_source() == with_flag_output
assert compiler._get_compiler_link_paths() == flag_dirs
@pytest.mark.enable_compiler_execution
def test_compile_dummy_c_source_no_path():
def test_get_compiler_link_paths_no_path():
compiler = MockCompiler()
compiler.cc = None
compiler.cxx = None
assert compiler._compile_dummy_c_source() is None
compiler.f77 = None
compiler.fc = None
assert compiler._get_compiler_link_paths() == []
@pytest.mark.enable_compiler_execution
def test_compile_dummy_c_source_no_verbose_flag():
def test_get_compiler_link_paths_no_verbose_flag():
compiler = MockCompiler()
compiler._verbose_flag = None
assert compiler._compile_dummy_c_source() is None
assert compiler._get_compiler_link_paths() == []
@pytest.mark.not_on_windows("Not supported on Windows (yet)")
@pytest.mark.enable_compiler_execution
def test_compile_dummy_c_source_load_env(working_env, monkeypatch, tmpdir):
@pytest.mark.enable_compiler_link_paths
def test_get_compiler_link_paths_load_env(working_env, monkeypatch, tmpdir):
gcc = str(tmpdir.join("gcc"))
with open(gcc, "w") as f:
f.write(
f"""#!/bin/sh
"""#!/bin/sh
if [ "$ENV_SET" = "1" ] && [ "$MODULE_LOADED" = "1" ]; then
printf '{without_flag_output}'
echo '"""
+ no_flag_output
+ """'
fi
"""
)
@@ -256,7 +275,7 @@ def module(*args):
compiler.environment = {"set": {"ENV_SET": "1"}}
compiler.modules = ["turn_on"]
assert compiler._compile_dummy_c_source() == without_flag_output
assert compiler._get_compiler_link_paths() == no_flag_dirs
# Get the desired flag from the specified compiler spec.
@@ -635,25 +654,7 @@ def test_xl_r_flags():
"compiler_spec,expected_result",
[("gcc@4.7.2", False), ("clang@3.3", False), ("clang@8.0.0", True)],
)
def test_detecting_mixed_toolchains(
compiler_spec, expected_result, mutable_config, compiler_factory
):
mixed_c = compiler_factory(spec="clang@8.0.0", operating_system="debian6")
mixed_c["compiler"]["paths"] = {
"cc": "/path/to/clang-8",
"cxx": "/path/to/clang++-8",
"f77": "/path/to/gfortran-9",
"fc": "/path/to/gfortran-9",
}
mutable_config.set(
"compilers",
[
compiler_factory(spec="gcc@4.7.2", operating_system="debian6"),
compiler_factory(spec="clang@3.3", operating_system="debian6"),
mixed_c,
],
)
def test_detecting_mixed_toolchains(compiler_spec, expected_result, config):
compiler = spack.compilers.compilers_for_spec(compiler_spec).pop()
assert spack.compilers.is_mixed_toolchain(compiler) is expected_result
@@ -682,7 +683,7 @@ def test_raising_if_compiler_target_is_over_specific(config):
]
arch_spec = spack.spec.ArchSpec(("linux", "ubuntu18.04", "haswell"))
with spack.config.override("compilers", compilers):
cfg = spack.compilers.get_compiler_config(config)
cfg = spack.compilers.get_compiler_config()
with pytest.raises(ValueError):
spack.compilers.get_compilers(cfg, spack.spec.CompilerSpec("gcc@9.0.1"), arch_spec)
@@ -894,52 +895,3 @@ def prepare_executable(name):
# Test that null entries don't fail
compiler.cc = None
compiler.verify_executables()
@pytest.mark.parametrize(
"detected_versions,expected_length",
[
# If we detect a C compiler we expect the result to be valid
(
[
spack.compilers.DetectVersionArgs(
id=spack.compilers.CompilerID(
os="ubuntu20.04", compiler_name="clang", version="12.0.0"
),
variation=spack.compilers.NameVariation(prefix="", suffix="-12"),
language="cc",
path="/usr/bin/clang-12",
),
spack.compilers.DetectVersionArgs(
id=spack.compilers.CompilerID(
os="ubuntu20.04", compiler_name="clang", version="12.0.0"
),
variation=spack.compilers.NameVariation(prefix="", suffix="-12"),
language="cxx",
path="/usr/bin/clang++-12",
),
],
1,
),
# If we detect only a C++ compiler we expect the result to be discarded
(
[
spack.compilers.DetectVersionArgs(
id=spack.compilers.CompilerID(
os="ubuntu20.04", compiler_name="clang", version="12.0.0"
),
variation=spack.compilers.NameVariation(prefix="", suffix="-12"),
language="cxx",
path="/usr/bin/clang++-12",
)
],
0,
),
],
)
def test_detection_requires_c_compiler(detected_versions, expected_length):
"""Tests that compilers automatically added to the configuration have
at least a C compiler.
"""
result = spack.compilers.make_compiler_list(detected_versions)
assert len(result) == expected_length

View File

@@ -13,7 +13,6 @@
import llnl.util.lang
import spack.compiler
import spack.compilers
import spack.concretize
import spack.config
@@ -68,24 +67,6 @@ def check_concretize(abstract_spec):
return concrete
@pytest.fixture(scope="function", autouse=True)
def binary_compatibility(monkeypatch, request):
"""Selects whether we use OS compatibility for binaries, or libc compatibility."""
if spack.platforms.real_host().name != "linux":
return
if "mock_packages" not in request.fixturenames:
# Only builtin.mock has a mock glibc package
return
if "database" in request.fixturenames or "mutable_database" in request.fixturenames:
# Databases have been created without glibc support
return
monkeypatch.setattr(spack.solver.asp, "using_libc_compatibility", lambda: True)
monkeypatch.setattr(spack.compiler.Compiler, "default_libc", Spec("glibc@=2.28"))
@pytest.fixture(
params=[
# no_deps
@@ -139,16 +120,14 @@ def current_host(request, monkeypatch):
# is_preference is not empty if we want to supply the
# preferred target via packages.yaml
cpu, _, is_preference = request.param.partition("-")
target = archspec.cpu.TARGETS[cpu]
monkeypatch.setattr(spack.platforms.Test, "default", cpu)
monkeypatch.setattr(spack.platforms.Test, "front_end", cpu)
if not is_preference:
target = archspec.cpu.TARGETS[cpu]
monkeypatch.setattr(archspec.cpu, "host", lambda: target)
yield target
else:
target = archspec.cpu.TARGETS["sapphirerapids"]
monkeypatch.setattr(archspec.cpu, "host", lambda: target)
with spack.config.override("packages:all", {"target": [cpu]}):
yield target
@@ -258,24 +237,10 @@ def change(self, changes=None):
yield _changing_pkg
@pytest.fixture()
def clang12_with_flags(compiler_factory):
c = compiler_factory(spec="clang@12.2.0", operating_system="redhat6")
c["compiler"]["flags"] = {"cflags": "-O3", "cxxflags": "-O3"}
return c
@pytest.fixture()
def gcc11_with_flags(compiler_factory):
c = compiler_factory(spec="gcc@11.1.0", operating_system="redhat6")
c["compiler"]["flags"] = {"cflags": "-O0 -g", "cxxflags": "-O0 -g", "fflags": "-O0 -g"}
return c
# This must use the mutable_config fixture because the test
# adjusting_default_target_based_on_compiler uses the current_host fixture,
# which changes the config.
@pytest.mark.usefixtures("mutable_config", "mock_packages", "do_not_check_runtimes_on_reuse")
@pytest.mark.usefixtures("mutable_config", "mock_packages")
class TestConcretize:
def test_concretize(self, spec):
check_concretize(spec)
@@ -364,34 +329,18 @@ def test_provides_handles_multiple_providers_of_same_version(self):
assert Spec("builtin.mock.multi-provider-mpi@1.10.0") in providers
assert Spec("builtin.mock.multi-provider-mpi@1.8.8") in providers
def test_different_compilers_get_different_flags(
self, mutable_config, clang12_with_flags, gcc11_with_flags
):
"""Tests that nodes get the flags of the associated compiler."""
mutable_config.set("compilers", [clang12_with_flags, gcc11_with_flags])
def test_different_compilers_get_different_flags(self):
client = Spec(
"cmake-client %gcc@11.1.0 platform=test os=fe target=fe"
" ^cmake %clang@12.2.0 platform=test os=fe target=fe"
).concretized()
+ " ^cmake %clang@12.2.0 platform=test os=fe target=fe"
)
client.concretize()
cmake = client["cmake"]
assert set(client.compiler_flags["cflags"]) == {"-O0", "-g"}
assert set(cmake.compiler_flags["cflags"]) == {"-O3"}
assert set(client.compiler_flags["fflags"]) == {"-O0", "-g"}
assert set(client.compiler_flags["cflags"]) == set(["-O0", "-g"])
assert set(cmake.compiler_flags["cflags"]) == set(["-O3"])
assert set(client.compiler_flags["fflags"]) == set(["-O0", "-g"])
assert not set(cmake.compiler_flags["fflags"])
@pytest.mark.regression("9908")
def test_spec_flags_maintain_order(self, mutable_config, gcc11_with_flags):
"""Tests that Spack assembles flags in a consistent way (i.e. with the same ordering),
for successive concretizations.
"""
mutable_config.set("compilers", [gcc11_with_flags])
spec_str = "libelf %gcc@11.1.0 os=redhat6"
for _ in range(3):
s = Spec(spec_str).concretized()
assert all(
s.compiler_flags[x] == ["-O0", "-g"] for x in ("cflags", "cxxflags", "fflags")
)
@pytest.mark.xfail(reason="Broken, needs to be fixed")
def test_compiler_flags_from_compiler_and_dependent(self):
client = Spec("cmake-client %clang@12.2.0 platform=test os=fe target=fe cflags==-g")
@@ -400,8 +349,7 @@ def test_compiler_flags_from_compiler_and_dependent(self):
for spec in [client, cmake]:
assert spec.compiler_flags["cflags"] == ["-O3", "-g"]
def test_compiler_flags_differ_identical_compilers(self, mutable_config, clang12_with_flags):
mutable_config.set("compilers", [clang12_with_flags])
def test_compiler_flags_differ_identical_compilers(self):
# Correct arch to use test compiler that has flags
spec = Spec("a %clang@12.2.0 platform=test os=fe target=fe")
@@ -456,20 +404,25 @@ def test_compiler_inherited_upwards(self):
for dep in spec.traverse():
assert "%clang" in dep
def test_architecture_inheritance(self):
"""test_architecture_inheritance is likely to fail with an
UnavailableCompilerVersionError if the architecture is concretized
incorrectly.
"""
spec = Spec("cmake-client %gcc@11.1.0 os=fe ^ cmake")
spec.concretize()
assert spec["cmake"].architecture == spec.architecture
@pytest.mark.only_clingo("Fixing the parser broke this test for the original concretizer")
def test_architecture_deep_inheritance(self, mock_targets, compiler_factory):
def test_architecture_deep_inheritance(self, mock_targets):
"""Make sure that indirect dependencies receive architecture
information from the root even when partial architecture information
is provided by an intermediate dependency.
"""
cnl_compiler = compiler_factory(spec="gcc@4.5.0", operating_system="CNL")
# CNL compiler has no target attribute, and this is essential to make detection pass
del cnl_compiler["compiler"]["target"]
with spack.config.override("compilers", [cnl_compiler]):
spec_str = "mpileaks %gcc@4.5.0 os=CNL target=nocona ^dyninst os=CNL ^callpath os=CNL"
spec = Spec(spec_str).concretized()
for s in spec.traverse(root=False):
assert s.architecture.target == spec.architecture.target
spec_str = "mpileaks %gcc@4.5.0 os=CNL target=nocona" " ^dyninst os=CNL ^callpath os=CNL"
spec = Spec(spec_str).concretized()
for s in spec.traverse(root=False):
assert s.architecture.target == spec.architecture.target
def test_compiler_flags_from_user_are_grouped(self):
spec = Spec('a%gcc cflags="-O -foo-flag foo-val" platform=test')
@@ -635,7 +588,7 @@ def test_my_dep_depends_on_provider_of_my_virtual_dep(self):
spec.normalize()
spec.concretize()
@pytest.mark.parametrize("compiler_str", ["clang", "gcc", "gcc@10.2.1", "clang@:15.0.0"])
@pytest.mark.parametrize("compiler_str", ["clang", "gcc", "gcc@10.2.1", "clang@:12.0.0"])
def test_compiler_inheritance(self, compiler_str):
spec_str = "mpileaks %{0}".format(compiler_str)
spec = Spec(spec_str).concretized()
@@ -895,21 +848,18 @@ def test_concretize_anonymous_dep(self, spec_str):
@pytest.mark.parametrize(
"spec_str,expected_str",
[
# Unconstrained versions select default compiler (gcc@10.2.1)
# Unconstrained versions select default compiler (gcc@4.5.0)
("bowtie@1.4.0", "%gcc@10.2.1"),
# Version with conflicts and no valid gcc select another compiler
("bowtie@1.3.0", "%clang@15.0.0"),
("bowtie@1.3.0", "%clang@12.0.0"),
# If a higher gcc is available still prefer that
("bowtie@1.2.2 os=redhat6", "%gcc@11.1.0"),
],
)
@pytest.mark.only_clingo("Original concretizer cannot work around conflicts")
def test_compiler_conflicts_in_package_py(
self, spec_str, expected_str, clang12_with_flags, gcc11_with_flags
):
with spack.config.override("compilers", [clang12_with_flags, gcc11_with_flags]):
s = Spec(spec_str).concretized()
assert s.satisfies(expected_str)
def test_compiler_conflicts_in_package_py(self, spec_str, expected_str):
s = Spec(spec_str).concretized()
assert s.satisfies(expected_str)
@pytest.mark.parametrize(
"spec_str,expected,unexpected",
@@ -1033,7 +983,7 @@ def test_working_around_conflicting_defaults(self, spec_str, expected):
[("cmake", ["%clang"]), ("cmake %gcc", ["%gcc"]), ("cmake %clang", ["%clang"])],
)
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
def test_external_package_and_compiler_preferences(self, spec_str, expected, mutable_config):
def test_external_package_and_compiler_preferences(self, spec_str, expected):
packages_yaml = {
"all": {"compiler": ["clang", "gcc"]},
"cmake": {
@@ -1041,7 +991,7 @@ def test_external_package_and_compiler_preferences(self, spec_str, expected, mut
"buildable": False,
},
}
mutable_config.set("packages", packages_yaml)
spack.config.set("packages", packages_yaml)
s = Spec(spec_str).concretized()
assert s.external
@@ -1207,18 +1157,16 @@ def test_activating_test_dependencies(self, spec_str, tests_arg, with_dep, witho
@pytest.mark.regression("20019")
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
def test_compiler_match_is_preferred_to_newer_version(self, compiler_factory):
def test_compiler_match_is_preferred_to_newer_version(self):
# This spec depends on openblas. Openblas has a conflict
# that doesn't allow newer versions with gcc@4.4.0. Check
# that an old version of openblas is selected, rather than
# a different compiler for just that node.
with spack.config.override(
"compilers", [compiler_factory(spec="gcc@10.1.0", operating_system="redhat6")]
):
spec_str = "simple-inheritance+openblas %gcc@10.1.0 os=redhat6"
s = Spec(spec_str).concretized()
assert "openblas@0.2.15" in s
assert s["openblas"].satisfies("%gcc@10.1.0")
spec_str = "simple-inheritance+openblas %gcc@10.1.0 os=redhat6"
s = Spec(spec_str).concretized()
assert "openblas@0.2.15" in s
assert s["openblas"].satisfies("%gcc@10.1.0")
@pytest.mark.regression("19981")
def test_target_ranges_in_conflicts(self):
@@ -1243,10 +1191,7 @@ def test_variant_not_default(self):
@pytest.mark.regression("20055")
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
def test_custom_compiler_version(self, mutable_config, compiler_factory):
mutable_config.set(
"compilers", [compiler_factory(spec="gcc@10foo", operating_system="redhat6")]
)
def test_custom_compiler_version(self):
s = Spec("a %gcc@10foo os=redhat6").concretized()
assert "%gcc@10foo" in s
@@ -1346,9 +1291,6 @@ def mock_fn(*args, **kwargs):
def test_reuse_installed_packages_when_package_def_changes(
self, context, mutable_database, repo_with_changing_recipe
):
# test applies only with reuse turned off in concretizer
spack.config.set("concretizer:reuse", False)
# Install a spec
root = Spec("root").concretized()
dependency = root["changing"].copy()
@@ -1372,22 +1314,6 @@ def test_reuse_installed_packages_when_package_def_changes(
# Structure and package hash will be different without reuse
assert root.dag_hash() != new_root_without_reuse.dag_hash()
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
@pytest.mark.regression("43663")
def test_no_reuse_when_variant_condition_does_not_hold(self, mutable_database, mock_packages):
spack.config.set("concretizer:reuse", True)
# Install a spec for which the `version_based` variant condition does not hold
old = Spec("conditional-variant-pkg @1").concretized()
old.package.do_install(fake=True, explicit=True)
# Then explicitly require a spec with `+version_based`, which shouldn't reuse previous spec
new1 = Spec("conditional-variant-pkg +version_based").concretized()
assert new1.satisfies("@2 +version_based")
new2 = Spec("conditional-variant-pkg +two_whens").concretized()
assert new2.satisfies("@2 +two_whens +version_based")
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
def test_reuse_with_flags(self, mutable_database, mutable_config):
spack.config.set("concretizer:reuse", True)
@@ -1465,23 +1391,16 @@ def test_external_with_non_default_variant_as_dependency(self):
("mpileaks%gcc@10.2.1 platform=test os=redhat6", "os=redhat6"),
],
)
def test_os_selection_when_multiple_choices_are_possible(
self, spec_str, expected_os, compiler_factory
):
# GCC 10.2.1 is defined both for debian and for redhat
with spack.config.override(
"compilers", [compiler_factory(spec="gcc@10.2.1", operating_system="redhat6")]
):
s = Spec(spec_str).concretized()
for node in s.traverse():
if node.name == "glibc":
continue
assert node.satisfies(expected_os)
def test_os_selection_when_multiple_choices_are_possible(self, spec_str, expected_os):
s = Spec(spec_str).concretized()
for node in s.traverse():
assert node.satisfies(expected_os)
@pytest.mark.regression("22718")
@pytest.mark.parametrize(
"spec_str,expected_compiler",
[("mpileaks", "%gcc@10.2.1"), ("mpileaks ^mpich%clang@15.0.0", "%clang@15.0.0")],
[("mpileaks", "%gcc@10.2.1"), ("mpileaks ^mpich%clang@12.0.0", "%clang@12.0.0")],
)
def test_compiler_is_unique(self, spec_str, expected_compiler):
s = Spec(spec_str).concretized()
@@ -1769,7 +1688,7 @@ def test_reuse_with_unknown_package_dont_raise(self, tmpdir, temporary_store, mo
[
(["libelf", "libelf@0.8.10"], 1),
(["libdwarf%gcc", "libelf%clang"], 2),
(["libdwarf%gcc", "libdwarf%clang"], 3),
(["libdwarf%gcc", "libdwarf%clang"], 4),
(["libdwarf^libelf@0.8.12", "libdwarf^libelf@0.8.13"], 4),
(["hdf5", "zmpi"], 3),
(["hdf5", "mpich"], 2),
@@ -1788,8 +1707,7 @@ def test_best_effort_coconcretize(self, specs, expected):
for s in result.specs:
concrete_specs.update(s.traverse())
libc_offset = 1 if spack.solver.asp.using_libc_compatibility() else 0
assert len(concrete_specs) == expected + libc_offset
assert len(concrete_specs) == expected
@pytest.mark.parametrize(
"specs,expected_spec,occurances",
@@ -1837,22 +1755,6 @@ def test_best_effort_coconcretize_preferences(self, specs, expected_spec, occura
counter += 1
assert counter == occurances, concrete_specs
@pytest.mark.only_clingo("Original concretizer cannot concretize in rounds")
def test_solve_in_rounds_all_unsolved(self, monkeypatch, mock_packages, config):
specs = [Spec(x) for x in ["libdwarf%gcc", "libdwarf%clang"]]
solver = spack.solver.asp.Solver()
solver.reuse = False
simulate_unsolved_property = list((x, None) for x in specs)
monkeypatch.setattr(spack.solver.asp.Result, "unsolved_specs", simulate_unsolved_property)
monkeypatch.setattr(spack.solver.asp.Result, "specs", list())
with pytest.raises(
spack.solver.asp.InternalConcretizerError,
match="a subset of input specs could not be solved for",
):
list(solver.solve_in_rounds(specs))
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
def test_coconcretize_reuse_and_virtuals(self):
reusable_specs = []
@@ -1909,34 +1811,31 @@ def test_version_weight_and_provenance(self):
result_spec = result.specs[0]
num_specs = len(list(result_spec.traverse()))
libc_offset = 1 if spack.solver.asp.using_libc_compatibility() else 0
criteria = [
(num_specs - 1 - libc_offset, None, "number of packages to build (vs. reuse)"),
(num_specs - 1, None, "number of packages to build (vs. reuse)"),
(2, 0, "version badness"),
]
for criterion in criteria:
assert criterion in result.criteria, result_spec
assert criterion in result.criteria
assert result_spec.satisfies("^b@1.0")
@pytest.mark.regression("31169")
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
def test_reuse_succeeds_with_config_compatible_os(self):
def test_not_reusing_incompatible_os_or_compiler(self):
root_spec = Spec("b")
s = root_spec.concretized()
other_os = s.copy()
mock_os = "ubuntu2204"
other_os.architecture = spack.spec.ArchSpec(
"test-{os}-{target}".format(os=mock_os, target=str(s.architecture.target))
)
reusable_specs = [other_os]
overrides = {"concretizer": {"reuse": True, "os_compatible": {s.os: [mock_os]}}}
custom_scope = spack.config.InternalConfigScope("concretize_override", overrides)
with spack.config.override(custom_scope):
wrong_compiler, wrong_os = s.copy(), s.copy()
wrong_compiler.compiler = spack.spec.CompilerSpec("gcc@12.1.0")
wrong_os.architecture = spack.spec.ArchSpec("test-ubuntu2204-x86_64")
reusable_specs = [wrong_compiler, wrong_os]
with spack.config.override("concretizer:reuse", True):
solver = spack.solver.asp.Solver()
setup = spack.solver.asp.SpackSolverSetup()
result, _, _ = solver.driver.solve(setup, [root_spec], reuse=reusable_specs)
concrete_spec = result.specs[0]
assert concrete_spec.satisfies("os={}".format(other_os.architecture.os))
assert concrete_spec.satisfies("%{}".format(s.compiler))
assert concrete_spec.satisfies("os={}".format(s.architecture.os))
def test_git_hash_assigned_version_is_preferred(self):
hash = "a" * 40
@@ -2123,11 +2022,7 @@ def test_external_python_extension_find_dependency_from_installed(self, monkeypa
# install python external
python = Spec("python").concretized()
def query(*args, **kwargs):
return [python]
monkeypatch.setattr(spack.store.STORE.db, "query", query)
monkeypatch.setattr(spack.store.STORE.db, "query", lambda x: [python])
# ensure that we can't be faking this by getting it from config
external_conf.pop("python")
@@ -2391,42 +2286,6 @@ def test_select_lower_priority_package_from_repository_stack(
assert s[name].concrete
assert s[name].namespace == namespace
@pytest.mark.only_clingo("Old concretizer cannot reuse")
def test_reuse_specs_from_non_available_compilers(self, mutable_config, mutable_database):
"""Tests that we can reuse specs with compilers that are not configured locally."""
# All the specs in the mutable DB have been compiled with %gcc@=10.2.1
specs = mutable_database.query_local()
assert all(s.satisfies("%gcc@=10.2.1") for s in specs)
spack.compilers.remove_compiler_from_config("gcc@=10.2.1")
assert not spack.compilers.compilers_for_spec("gcc@=10.2.1")
mutable_config.set("concretizer:reuse", True)
# mpileaks is in the database, it will be reused with gcc@=10.2.1
root = Spec("mpileaks").concretized()
for s in root.traverse():
assert s.satisfies("%gcc@10.2.1")
# fftw is not in the database, therefore the root will be compiled with gcc@=9.4.0,
# while the mpi is reused from the database and is compiled with gcc@=10.2.1
root = Spec("fftw").concretized()
assert root.satisfies("%gcc@=9.4.0")
for s in root.traverse(root=False):
assert s.satisfies("%gcc@10.2.1")
@pytest.mark.regression("43406")
def test_externals_with_platform_explicitly_set(self, tmp_path):
"""Tests that users can specify platform=xxx in an external spec"""
external_conf = {
"mpich": {
"buildable": False,
"externals": [{"spec": "mpich@=2.0.0 platform=test", "prefix": str(tmp_path)}],
}
}
spack.config.set("packages", external_conf)
s = Spec("mpich").concretized()
assert s.external
@pytest.fixture()
def duplicates_test_repository():
@@ -2565,29 +2424,6 @@ def test_no_multiple_solutions_with_different_edges_same_nodes(self):
assert len(edges) == 1
assert edges[0].spec.satisfies("@=60")
@pytest.mark.regression("43647")
def test_specifying_different_versions_build_deps(self):
"""Tests that we can concretize a spec with nodes using the same build
dependency pinned at different versions, when the constraint is specified
in the root spec.
o hdf5@1.0
|\
o | pinned-gmake@1.0
o | gmake@3.0
/
o gmake@4.1
"""
hdf5_str = "hdf5@1.0 ^gmake@4.1"
pinned_str = "pinned-gmake@1.0 ^gmake@3.0"
input_specs = [Spec(hdf5_str), Spec(pinned_str)]
solver = spack.solver.asp.Solver()
result = solver.solve(input_specs)
assert any(x.satisfies(hdf5_str) for x in result.specs)
assert any(x.satisfies(pinned_str) for x in result.specs)
@pytest.mark.parametrize(
"v_str,v_opts,checksummed",
@@ -2777,28 +2613,3 @@ def test_reusable_externals_different_spec(mock_packages, tmpdir):
{"mpich": {"externals": [{"spec": "mpich@4.1 +debug", "prefix": tmpdir.strpath}]}},
local=False,
)
def test_concretization_version_order():
versions = [
(Version("develop"), {}),
(Version("1.0"), {}),
(Version("2.0"), {"deprecated": True}),
(Version("1.1"), {}),
(Version("1.1alpha1"), {}),
(Version("0.9"), {"preferred": True}),
]
result = [
v
for v, _ in sorted(
versions, key=spack.solver.asp._concretization_version_order, reverse=True
)
]
assert result == [
Version("0.9"), # preferred
Version("1.1"), # latest non-deprecated final version
Version("1.0"), # latest non-deprecated final version
Version("1.1alpha1"), # prereleases
Version("develop"), # likely development version
Version("2.0"), # deprecated
]

View File

@@ -7,8 +7,6 @@
import pytest
import archspec.cpu
import spack.paths
import spack.repo
import spack.solver.asp
@@ -26,7 +24,9 @@ def _concretize_with_reuse(*, root_str, reused_str):
reused_spec = spack.spec.Spec(reused_str).concretized()
setup = spack.solver.asp.SpackSolverSetup(tests=False)
driver = spack.solver.asp.PyclingoDriver()
result, _, _ = driver.solve(setup, [spack.spec.Spec(f"{root_str}")], reuse=[reused_spec])
result, _, _ = driver.solve(
setup, [spack.spec.Spec(f"{root_str} ^{reused_str}")], reuse=[reused_spec]
)
root = result.specs[0]
return root, reused_spec
@@ -47,7 +47,7 @@ def enable_runtimes():
def test_correct_gcc_runtime_is_injected_as_dependency(runtime_repo):
s = spack.spec.Spec("a%gcc@10.2.1 ^b%gcc@9.4.0").concretized()
s = spack.spec.Spec("a%gcc@10.2.1 ^b%gcc@4.5.0").concretized()
a, b = s["a"], s["b"]
# Both a and b should depend on the same gcc-runtime directly
@@ -78,28 +78,9 @@ def test_external_nodes_do_not_have_runtimes(runtime_repo, mutable_config, tmp_p
"root_str,reused_str,expected,nruntime",
[
# The reused runtime is older than we need, thus we'll add a more recent one for a
("a%gcc@10.2.1", "b%gcc@9.4.0", {"a": "gcc-runtime@10.2.1", "b": "gcc-runtime@9.4.0"}, 2),
("a%gcc@10.2.1", "b%gcc@4.5.0", {"a": "gcc-runtime@10.2.1", "b": "gcc-runtime@4.5.0"}, 2),
# The root is compiled with an older compiler, thus we'll reuse the runtime from b
("a%gcc@9.4.0", "b%gcc@10.2.1", {"a": "gcc-runtime@10.2.1", "b": "gcc-runtime@10.2.1"}, 1),
# Same as before, but tests that we can reuse from a more generic target
pytest.param(
"a%gcc@9.4.0",
"b%gcc@10.2.1 target=x86_64",
{"a": "gcc-runtime@10.2.1 target=x86_64", "b": "gcc-runtime@10.2.1 target=x86_64"},
1,
marks=pytest.mark.skipif(
str(archspec.cpu.host().family) != "x86_64", reason="test data is x86_64 specific"
),
),
pytest.param(
"a%gcc@10.2.1",
"b%gcc@9.4.0 target=x86_64",
{"a": "gcc-runtime@10.2.1 target=x86_64", "b": "gcc-runtime@9.4.0 target=x86_64"},
2,
marks=pytest.mark.skipif(
str(archspec.cpu.host().family) != "x86_64", reason="test data is x86_64 specific"
),
),
("a%gcc@4.5.0", "b%gcc@10.2.1", {"a": "gcc-runtime@10.2.1", "b": "gcc-runtime@10.2.1"}, 1),
],
)
def test_reusing_specs_with_gcc_runtime(root_str, reused_str, expected, nruntime, runtime_repo):
@@ -123,8 +104,8 @@ def test_reusing_specs_with_gcc_runtime(root_str, reused_str, expected, nruntime
[
# Ensure that, whether we have multiple runtimes in the DAG or not,
# we always link only the latest version
("a%gcc@10.2.1", "b%gcc@9.4.0", ["gcc-runtime@10.2.1"], ["gcc-runtime@9.4.0"]),
("a%gcc@9.4.0", "b%gcc@10.2.1", ["gcc-runtime@10.2.1"], ["gcc-runtime@9.4.0"]),
("a%gcc@10.2.1", "b%gcc@4.5.0", ["gcc-runtime@10.2.1"], ["gcc-runtime@4.5.0"]),
("a%gcc@4.5.0", "b%gcc@10.2.1", ["gcc-runtime@10.2.1"], ["gcc-runtime@4.5.0"]),
],
)
def test_views_can_handle_duplicate_runtime_nodes(

View File

@@ -105,7 +105,7 @@ def test_preferred_variants_from_wildcard(self):
@pytest.mark.parametrize(
"compiler_str,spec_str",
[("gcc@=9.4.0", "mpileaks"), ("clang@=15.0.0", "mpileaks"), ("gcc@=9.4.0", "openmpi")],
[("gcc@=4.5.0", "mpileaks"), ("clang@=12.0.0", "mpileaks"), ("gcc@=4.5.0", "openmpi")],
)
def test_preferred_compilers(self, compiler_str, spec_str):
"""Test preferred compilers are applied correctly"""

Some files were not shown because too many files have changed in this diff Show More