Compare commits
2 Commits
docs/testi
...
traceback-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
01ec40b8ad | ||
|
|
4d0a5ae724 |
4
.github/workflows/audit.yaml
vendored
4
.github/workflows/audit.yaml
vendored
@@ -28,8 +28,8 @@ jobs:
|
||||
run:
|
||||
shell: ${{ matrix.system.shell }}
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||
- uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871
|
||||
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
|
||||
with:
|
||||
python-version: ${{inputs.python_version}}
|
||||
- name: Install Python packages
|
||||
|
||||
2
.github/workflows/bin/bootstrap-test.sh
vendored
2
.github/workflows/bin/bootstrap-test.sh
vendored
@@ -1,7 +1,7 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
source share/spack/setup-env.sh
|
||||
$PYTHON bin/spack bootstrap disable github-actions-v0.5
|
||||
$PYTHON bin/spack bootstrap disable github-actions-v0.4
|
||||
$PYTHON bin/spack bootstrap disable spack-install
|
||||
$PYTHON bin/spack $SPACK_FLAGS solve zlib
|
||||
tree $BOOTSTRAP/store
|
||||
|
||||
51
.github/workflows/bootstrap.yml
vendored
51
.github/workflows/bootstrap.yml
vendored
@@ -37,14 +37,14 @@ jobs:
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap disable github-actions-v0.6
|
||||
spack bootstrap disable github-actions-v0.5
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack external find cmake bison
|
||||
spack -d solve zlib
|
||||
tree ~/.spack/bootstrap/store/
|
||||
@@ -60,17 +60,17 @@ jobs:
|
||||
run: |
|
||||
brew install cmake bison tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
|
||||
with:
|
||||
python-version: "3.12"
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap disable github-actions-v0.6
|
||||
spack bootstrap disable github-actions-v0.5
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack external find --not-buildable cmake bison
|
||||
spack -d solve zlib
|
||||
tree $HOME/.spack/bootstrap/store/
|
||||
@@ -83,22 +83,22 @@ jobs:
|
||||
steps:
|
||||
- name: Setup macOS
|
||||
if: ${{ matrix.runner != 'ubuntu-latest' }}
|
||||
run: brew install tree gawk
|
||||
- name: Remove system executables
|
||||
run: |
|
||||
while [ -n "$(command -v gpg gpg2 patchelf)" ]; do
|
||||
sudo rm $(command -v gpg gpg2 patchelf)
|
||||
done
|
||||
brew install tree gawk
|
||||
sudo rm -rf $(command -v gpg gpg2)
|
||||
- name: Setup Ubuntu
|
||||
if: ${{ matrix.runner == 'ubuntu-latest' }}
|
||||
run: sudo rm -rf $(command -v gpg gpg2 patchelf)
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack solve zlib
|
||||
spack bootstrap disable github-actions-v0.6
|
||||
spack bootstrap disable github-actions-v0.5
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack -d gpg list
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
@@ -110,17 +110,19 @@ jobs:
|
||||
steps:
|
||||
- name: Setup macOS
|
||||
if: ${{ matrix.runner != 'ubuntu-latest' }}
|
||||
run: brew install tree
|
||||
- name: Remove system executables
|
||||
run: |
|
||||
while [ -n "$(command -v gpg gpg2 patchelf)" ]; do
|
||||
sudo rm $(command -v gpg gpg2 patchelf)
|
||||
done
|
||||
brew install tree
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- name: Setup Ubuntu
|
||||
if: ${{ matrix.runner == 'ubuntu-latest' }}
|
||||
run: |
|
||||
sudo rm -rf $(which gpg) $(which gpg2) $(which patchelf)
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
|
||||
with:
|
||||
python-version: |
|
||||
3.8
|
||||
@@ -128,16 +130,15 @@ jobs:
|
||||
3.10
|
||||
3.11
|
||||
3.12
|
||||
3.13
|
||||
- name: Set bootstrap sources
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap disable github-actions-v0.5
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack bootstrap disable spack-install
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
set -e
|
||||
for ver in '3.8' '3.9' '3.10' '3.11' '3.12' '3.13'; do
|
||||
for ver in '3.8' '3.9' '3.10' '3.11' '3.12' ; do
|
||||
not_found=1
|
||||
ver_dir="$(find $RUNNER_TOOL_CACHE/Python -wholename "*/${ver}.*/*/bin" | grep . || true)"
|
||||
if [[ -d "$ver_dir" ]] ; then
|
||||
@@ -171,10 +172,10 @@ jobs:
|
||||
runs-on: "windows-latest"
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
|
||||
with:
|
||||
python-version: "3.12"
|
||||
- name: Setup Windows
|
||||
@@ -184,8 +185,8 @@ jobs:
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
./share/spack/setup-env.ps1
|
||||
spack bootstrap disable github-actions-v0.6
|
||||
spack bootstrap disable github-actions-v0.5
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack external find --not-buildable cmake bison
|
||||
spack -d solve zlib
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
|
||||
2
.github/workflows/build-containers.yml
vendored
2
.github/workflows/build-containers.yml
vendored
@@ -55,7 +55,7 @@ jobs:
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871
|
||||
|
||||
- uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81
|
||||
id: docker_meta
|
||||
|
||||
2
.github/workflows/ci.yaml
vendored
2
.github/workflows/ci.yaml
vendored
@@ -24,7 +24,7 @@ jobs:
|
||||
core: ${{ steps.filter.outputs.core }}
|
||||
packages: ${{ steps.filter.outputs.packages }}
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
- uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
4
.github/workflows/coverage.yml
vendored
4
.github/workflows/coverage.yml
vendored
@@ -8,8 +8,8 @@ jobs:
|
||||
upload:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||
- uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871
|
||||
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
|
||||
4
.github/workflows/nightly-win-builds.yml
vendored
4
.github/workflows/nightly-win-builds.yml
vendored
@@ -14,10 +14,10 @@ jobs:
|
||||
build-paraview-deps:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
- uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
|
||||
@@ -3,5 +3,5 @@ clingo==5.7.1
|
||||
flake8==7.1.1
|
||||
isort==5.13.2
|
||||
mypy==1.8.0
|
||||
types-six==1.16.21.20241009
|
||||
types-six==1.16.21.20240513
|
||||
vermin==1.6.0
|
||||
|
||||
24
.github/workflows/unit_tests.yaml
vendored
24
.github/workflows/unit_tests.yaml
vendored
@@ -40,10 +40,10 @@ jobs:
|
||||
on_develop: false
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
- uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install System packages
|
||||
@@ -89,10 +89,10 @@ jobs:
|
||||
shell:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
- uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
|
||||
with:
|
||||
python-version: '3.11'
|
||||
- name: Install System packages
|
||||
@@ -130,7 +130,7 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
- uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
@@ -149,10 +149,10 @@ jobs:
|
||||
clingo-cffi:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
- uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
|
||||
with:
|
||||
python-version: '3.13'
|
||||
- name: Install System packages
|
||||
@@ -170,8 +170,8 @@ jobs:
|
||||
run: |
|
||||
. share/spack/setup-env.sh
|
||||
spack bootstrap disable spack-install
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack bootstrap disable github-actions-v0.5
|
||||
spack bootstrap disable github-actions-v0.6
|
||||
spack bootstrap status
|
||||
spack solve zlib
|
||||
spack unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml lib/spack/spack/test/concretize.py
|
||||
@@ -188,10 +188,10 @@ jobs:
|
||||
os: [macos-13, macos-14]
|
||||
python-version: ["3.11"]
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
- uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install Python packages
|
||||
@@ -226,10 +226,10 @@ jobs:
|
||||
powershell Invoke-Expression -Command "./share/spack/qa/windows_test_setup.ps1"; {0}
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
- uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
|
||||
20
.github/workflows/valid-style.yml
vendored
20
.github/workflows/valid-style.yml
vendored
@@ -18,8 +18,8 @@ jobs:
|
||||
validate:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||
- uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871
|
||||
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
@@ -35,10 +35,10 @@ jobs:
|
||||
style:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
- uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
@@ -70,7 +70,7 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
- uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
@@ -98,14 +98,14 @@ jobs:
|
||||
# PR: use the base of the PR as the old commit
|
||||
- name: Checkout PR base commit
|
||||
if: github.event_name == 'pull_request'
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.base.sha }}
|
||||
path: old
|
||||
# not a PR: use the previous commit as the old commit
|
||||
- name: Checkout previous commit
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871
|
||||
with:
|
||||
fetch-depth: 2
|
||||
path: old
|
||||
@@ -114,14 +114,14 @@ jobs:
|
||||
run: git -C old reset --hard HEAD^
|
||||
|
||||
- name: Checkout new commit
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871
|
||||
with:
|
||||
path: new
|
||||
- name: Install circular import checker
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871
|
||||
with:
|
||||
repository: haampie/circular-import-fighter
|
||||
ref: 9f60f51bc7134e0be73f27623f1b0357d1718427
|
||||
ref: 555519c6fd5564fd2eb844e7b87e84f4d12602e2
|
||||
path: circular-import-fighter
|
||||
- name: Install dependencies
|
||||
working-directory: circular-import-fighter
|
||||
|
||||
@@ -1,11 +1,71 @@
|
||||
@ECHO OFF
|
||||
setlocal EnableDelayedExpansion
|
||||
:: (c) 2021 Lawrence Livermore National Laboratory
|
||||
:: To use this file independently of Spack's installer, execute this script in its directory, or add the
|
||||
:: associated bin directory to your PATH. Invoke to launch Spack Shell.
|
||||
::
|
||||
:: source_dir/spack/bin/spack_cmd.bat
|
||||
::
|
||||
pushd %~dp0..
|
||||
set SPACK_ROOT=%CD%
|
||||
pushd %CD%\..
|
||||
set spackinstdir=%CD%
|
||||
popd
|
||||
|
||||
call "%~dp0..\share\spack\setup-env.bat"
|
||||
pushd %SPACK_ROOT%
|
||||
%comspec% /K
|
||||
|
||||
:: Check if Python is on the PATH
|
||||
if not defined python_pf_ver (
|
||||
(for /f "delims=" %%F in ('where python.exe') do (
|
||||
set "python_pf_ver=%%F"
|
||||
goto :found_python
|
||||
) ) 2> NUL
|
||||
)
|
||||
:found_python
|
||||
if not defined python_pf_ver (
|
||||
:: If not, look for Python from the Spack installer
|
||||
:get_builtin
|
||||
(for /f "tokens=*" %%g in ('dir /b /a:d "!spackinstdir!\Python*"') do (
|
||||
set "python_ver=%%g")) 2> NUL
|
||||
|
||||
if not defined python_ver (
|
||||
echo Python was not found on your system.
|
||||
echo Please install Python or add Python to your PATH.
|
||||
) else (
|
||||
set "py_path=!spackinstdir!\!python_ver!"
|
||||
set "py_exe=!py_path!\python.exe"
|
||||
)
|
||||
goto :exitpoint
|
||||
) else (
|
||||
:: Python is already on the path
|
||||
set "py_exe=!python_pf_ver!"
|
||||
(for /F "tokens=* USEBACKQ" %%F in (
|
||||
`"!py_exe!" --version`) do (set "output=%%F")) 2>NUL
|
||||
if not "!output:Microsoft Store=!"=="!output!" goto :get_builtin
|
||||
goto :exitpoint
|
||||
)
|
||||
:exitpoint
|
||||
|
||||
set "PATH=%SPACK_ROOT%\bin\;%PATH%"
|
||||
if defined py_path (
|
||||
set "PATH=%py_path%;%PATH%"
|
||||
)
|
||||
|
||||
if defined py_exe (
|
||||
"%py_exe%" "%SPACK_ROOT%\bin\haspywin.py"
|
||||
)
|
||||
|
||||
set "EDITOR=notepad"
|
||||
|
||||
DOSKEY spacktivate=spack env activate $*
|
||||
|
||||
@echo **********************************************************************
|
||||
@echo ** Spack Package Manager
|
||||
@echo **********************************************************************
|
||||
|
||||
IF "%1"=="" GOTO CONTINUE
|
||||
set
|
||||
GOTO:EOF
|
||||
|
||||
:continue
|
||||
set PROMPT=[spack] %PROMPT%
|
||||
%comspec% /k
|
||||
|
||||
@@ -9,15 +9,15 @@ bootstrap:
|
||||
# may not be able to bootstrap all the software that Spack needs,
|
||||
# depending on its type.
|
||||
sources:
|
||||
- name: github-actions-v0.6
|
||||
metadata: $spack/share/spack/bootstrap/github-actions-v0.6
|
||||
- name: github-actions-v0.5
|
||||
- name: 'github-actions-v0.5'
|
||||
metadata: $spack/share/spack/bootstrap/github-actions-v0.5
|
||||
- name: spack-install
|
||||
- name: 'github-actions-v0.4'
|
||||
metadata: $spack/share/spack/bootstrap/github-actions-v0.4
|
||||
- name: 'spack-install'
|
||||
metadata: $spack/share/spack/bootstrap/spack-install
|
||||
trusted:
|
||||
# By default we trust bootstrapping from sources and from binaries
|
||||
# produced on Github via the workflow
|
||||
github-actions-v0.6: true
|
||||
github-actions-v0.5: true
|
||||
github-actions-v0.4: true
|
||||
spack-install: true
|
||||
|
||||
@@ -42,8 +42,8 @@ concretizer:
|
||||
# "minimal": allows the duplication of 'build-tools' nodes only (e.g. py-setuptools, cmake etc.)
|
||||
# "full" (experimental): allows separation of the entire build-tool stack (e.g. the entire "cmake" subDAG)
|
||||
strategy: minimal
|
||||
# Option to specify compatibility between operating systems for reuse of compilers and packages
|
||||
# Specified as a key: [list] where the key is the os that is being targeted, and the list contains the OS's
|
||||
# it can reuse. Note this is a directional compatibility so mutual compatibility between two OS's
|
||||
# Option to specify compatiblity between operating systems for reuse of compilers and packages
|
||||
# Specified as a key: [list] where the key is the os that is being targeted, and the list contains the OS's
|
||||
# it can reuse. Note this is a directional compatibility so mutual compatibility between two OS's
|
||||
# requires two entries i.e. os_compatible: {sonoma: [monterey], monterey: [sonoma]}
|
||||
os_compatible: {}
|
||||
|
||||
@@ -40,9 +40,9 @@ packages:
|
||||
jpeg: [libjpeg-turbo, libjpeg]
|
||||
lapack: [openblas, amdlibflame]
|
||||
libc: [glibc, musl]
|
||||
libgfortran: [gcc-runtime]
|
||||
libgfortran: [ gcc-runtime ]
|
||||
libglx: [mesa+glx]
|
||||
libifcore: [intel-oneapi-runtime]
|
||||
libifcore: [ intel-oneapi-runtime ]
|
||||
libllvm: [llvm]
|
||||
lua-lang: [lua, lua-luajit-openresty, lua-luajit]
|
||||
luajit: [lua-luajit-openresty, lua-luajit]
|
||||
|
||||
@@ -1359,10 +1359,6 @@ For example, for the ``stackstart`` variant:
|
||||
mpileaks stackstart==4 # variant will be propagated to dependencies
|
||||
mpileaks stackstart=4 # only mpileaks will have this variant value
|
||||
|
||||
Spack also allows variants to be propagated from a package that does
|
||||
not have that variant.
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^
|
||||
Compiler Flags
|
||||
^^^^^^^^^^^^^^
|
||||
|
||||
@@ -214,7 +214,6 @@ def setup(sphinx):
|
||||
# Spack classes that intersphinx is unable to resolve
|
||||
("py:class", "spack.version.StandardVersion"),
|
||||
("py:class", "spack.spec.DependencySpec"),
|
||||
("py:class", "spack.spec.ArchSpec"),
|
||||
("py:class", "spack.spec.InstallStatus"),
|
||||
("py:class", "spack.spec.SpecfileReaderBase"),
|
||||
("py:class", "spack.install_test.Pb"),
|
||||
|
||||
@@ -511,7 +511,6 @@ Spack understands over a dozen special variables. These are:
|
||||
* ``$target_family``. The target family for the current host, as
|
||||
detected by ArchSpec. E.g. ``x86_64`` or ``aarch64``.
|
||||
* ``$date``: the current date in the format YYYY-MM-DD
|
||||
* ``$spack_short_version``: the Spack version truncated to the first components.
|
||||
|
||||
|
||||
Note that, as with shell variables, you can write these as ``$varname``
|
||||
|
||||
@@ -184,7 +184,7 @@ Style Tests
|
||||
|
||||
Spack uses `Flake8 <http://flake8.pycqa.org/en/latest/>`_ to test for
|
||||
`PEP 8 <https://www.python.org/dev/peps/pep-0008/>`_ conformance and
|
||||
`mypy <https://mypy.readthedocs.io/en/stable/>`_ for type checking. PEP 8 is
|
||||
`mypy <https://mypy.readthedocs.io/en/stable/>` for type checking. PEP 8 is
|
||||
a series of style guides for Python that provide suggestions for everything
|
||||
from variable naming to indentation. In order to limit the number of PRs that
|
||||
were mostly style changes, we decided to enforce PEP 8 conformance. Your PR
|
||||
|
||||
@@ -333,9 +333,13 @@ inserting them at different places in the spack code base. Whenever a hook
|
||||
type triggers by way of a function call, we find all the hooks of that type,
|
||||
and run them.
|
||||
|
||||
Spack defines hooks by way of a module in the ``lib/spack/spack/hooks`` directory.
|
||||
This module has to be registered in ``__init__.py`` so that Spack is aware of it.
|
||||
This section will cover the basic kind of hooks, and how to write them.
|
||||
Spack defines hooks by way of a module at ``lib/spack/spack/hooks`` where we can define
|
||||
types of hooks in the ``__init__.py``, and then python files in that folder
|
||||
can use hook functions. The files are automatically parsed, so if you write
|
||||
a new file for some integration (e.g., ``lib/spack/spack/hooks/myintegration.py``
|
||||
you can then write hook functions in that file that will be automatically detected,
|
||||
and run whenever your hook is called. This section will cover the basic kind
|
||||
of hooks, and how to write them.
|
||||
|
||||
^^^^^^^^^^^^^^
|
||||
Types of Hooks
|
||||
|
||||
@@ -1,23 +0,0 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
.. _in-the-news:
|
||||
|
||||
==================
|
||||
Spack In The News
|
||||
==================
|
||||
|
||||
Since its inception, Spack has been featured in many blog posts and news stories. Here we collect a
|
||||
|
||||
.. warning::
|
||||
Disclaimer:
|
||||
Spack is not responsible for and does not control the content of these external pages.
|
||||
Spack makes no representatations about their accuracy or applicability.
|
||||
|
||||
----
|
||||
2024
|
||||
----
|
||||
|
||||
- May 22, 2024: `Spack on Windows: A New Era in Cross-Platform Dependency Management <https://www.kitware.com/spack-on-windows-a-new-era-in-cross-platform-dependency-management/>`_ (Kitware)
|
||||
@@ -12,6 +12,10 @@
|
||||
Spack
|
||||
===================
|
||||
|
||||
.. epigraph::
|
||||
|
||||
`These are docs for the Spack package manager. For sphere packing, see` `pyspack <https://pyspack.readthedocs.io>`_.
|
||||
|
||||
Spack is a package management tool designed to support multiple
|
||||
versions and configurations of software on a wide variety of platforms
|
||||
and environments. It was designed for large supercomputing centers,
|
||||
@@ -97,13 +101,6 @@ or refer to the full manual below.
|
||||
build_systems
|
||||
developer_guide
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
:caption: Testimonials
|
||||
|
||||
in_the_news
|
||||
user_reports
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
:caption: API Docs
|
||||
|
||||
@@ -2503,14 +2503,15 @@ with. For example, suppose that in the ``libdwarf`` package you write:
|
||||
|
||||
depends_on("libelf@0.8")
|
||||
|
||||
Now ``libdwarf`` will require ``libelf`` in the range ``0.8``, which
|
||||
includes patch versions ``0.8.1``, ``0.8.2``, etc. Apart from version
|
||||
restrictions, you can also specify variants if this package requires
|
||||
optional features of the dependency.
|
||||
Now ``libdwarf`` will require ``libelf`` at *exactly* version ``0.8``.
|
||||
You can also specify a requirement for a particular variant or for
|
||||
specific compiler flags:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("libelf@0.8 +parser +pic")
|
||||
depends_on("libelf@0.8+debug")
|
||||
depends_on("libelf debug=True")
|
||||
depends_on("libelf cppflags='-fPIC'")
|
||||
|
||||
Both users *and* package authors can use the same spec syntax to refer
|
||||
to different package configurations. Users use the spec syntax on the
|
||||
@@ -2518,82 +2519,46 @@ command line to find installed packages or to install packages with
|
||||
particular constraints, and package authors can use specs to describe
|
||||
relationships between packages.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Specifying backward and forward compatibility
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
^^^^^^^^^^^^^^
|
||||
Version ranges
|
||||
^^^^^^^^^^^^^^
|
||||
|
||||
Packages are often compatible with a range of versions of their
|
||||
dependencies. This is typically referred to as backward and forward
|
||||
compatibility. Spack allows you to specify this in the ``depends_on``
|
||||
directive using version ranges.
|
||||
|
||||
**Backwards compatibility** means that the package requires at least a
|
||||
certain version of its dependency:
|
||||
Although some packages require a specific version for their dependencies,
|
||||
most can be built with a range of versions. For example, if you are
|
||||
writing a package for a legacy Python module that only works with Python
|
||||
2.4 through 2.6, this would look like:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("python@3.10:")
|
||||
depends_on("python@2.4:2.6")
|
||||
|
||||
In this case, the package requires Python 3.10 or newer.
|
||||
|
||||
Commonly, packages drop support for older versions of a dependency as
|
||||
they release new versions. In Spack you can conveniently add every
|
||||
backward compatibility rule as a separate line:
|
||||
Version ranges in Spack are *inclusive*, so ``2.4:2.6`` means any version
|
||||
greater than or equal to ``2.4`` and up to and including any ``2.6.x``. If
|
||||
you want to specify that a package works with any version of Python 3 (or
|
||||
higher), this would look like:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
# backward compatibility with Python
|
||||
depends_on("python@3.8:")
|
||||
depends_on("python@3.9:", when="@1.2:")
|
||||
depends_on("python@3.10:", when="@1.4:")
|
||||
depends_on("python@3:")
|
||||
|
||||
This means that in general we need Python 3.8 or newer; from version
|
||||
1.2 onwards we need Python 3.9 or newer; from version 1.4 onwards we
|
||||
need Python 3.10 or newer. Notice that it's fine to have overlapping
|
||||
ranges in the ``when`` clauses.
|
||||
|
||||
**Forward compatibility** means that the package requires at most a
|
||||
certain version of its dependency. Forward compatibility rules are
|
||||
necessary when there are breaking changes in the dependency that the
|
||||
package cannot handle. In Spack we often add forward compatibility
|
||||
bounds only at the time a new, breaking version of a dependency is
|
||||
released. As with backward compatibility, it is typical to see a list
|
||||
of forward compatibility bounds in a package file as seperate lines:
|
||||
Here we leave out the upper bound. If you want to say that a package
|
||||
requires Python 2, you can similarly leave out the lower bound:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
# forward compatibility with Python
|
||||
depends_on("python@:3.12", when="@:1.10")
|
||||
depends_on("python@:3.13", when="@:1.12")
|
||||
depends_on("python@:2")
|
||||
|
||||
Notice how the ``:`` now appears before the version number both in the
|
||||
dependency and in the ``when`` clause. This tells Spack that in general
|
||||
we need Python 3.13 or older up to version ``1.12.x``, and up to version
|
||||
``1.10.x`` we need Python 3.12 or older. Said differently, forward compatibility
|
||||
with Python 3.13 was added in version 1.11, while version 1.13 added forward
|
||||
compatibility with Python 3.14.
|
||||
Notice that we didn't use ``@:3``. Version ranges are *inclusive*, so
|
||||
``@:3`` means "up to and including any 3.x version".
|
||||
|
||||
Notice that a version range ``@:3.12`` includes *any* patch version
|
||||
number ``3.12.x``, which is often useful when specifying forward compatibility
|
||||
bounds.
|
||||
|
||||
So far we have seen open-ended version ranges, which is by far the most
|
||||
common use case. It is also possible to specify both a lower and an upper bound
|
||||
on the version of a dependency, like this:
|
||||
You can also simply write
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("python@3.10:3.12")
|
||||
depends_on("python@2.7")
|
||||
|
||||
There is short syntax to specify that a package is compatible with say any
|
||||
``3.x`` version:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("python@3")
|
||||
|
||||
The above is equivalent to ``depends_on("python@3:3")``, which means at least
|
||||
Python version 3 and at most any version ``3.x.y``.
|
||||
to tell Spack that the package needs Python 2.7.x. This is equivalent to
|
||||
``@2.7:2.7``.
|
||||
|
||||
In very rare cases, you may need to specify an exact version, for example
|
||||
if you need to distinguish between ``3.2`` and ``3.2.1``:
|
||||
|
||||
@@ -59,7 +59,7 @@ Functional Example
|
||||
------------------
|
||||
|
||||
The simplest fully functional standalone example of a working pipeline can be
|
||||
examined live at this example `project <https://gitlab.com/spack/pipeline-quickstart>`_
|
||||
examined live at this example `project <https://gitlab.com/scott.wittenburg/spack-pipeline-demo>`_
|
||||
on gitlab.com.
|
||||
|
||||
Here's the ``.gitlab-ci.yml`` file from that example that builds and runs the
|
||||
@@ -67,46 +67,39 @@ pipeline:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
stages: [ "generate", "build" ]
|
||||
stages: [generate, build]
|
||||
|
||||
variables:
|
||||
SPACK_REPOSITORY: "https://github.com/spack/spack.git"
|
||||
SPACK_REF: "develop-2024-10-06"
|
||||
SPACK_USER_CONFIG_PATH: ${CI_PROJECT_DIR}
|
||||
SPACK_BACKTRACE: 1
|
||||
SPACK_REPO: https://github.com/scottwittenburg/spack.git
|
||||
SPACK_REF: pipelines-reproducible-builds
|
||||
|
||||
generate-pipeline:
|
||||
tags:
|
||||
- saas-linux-small-amd64
|
||||
stage: generate
|
||||
tags:
|
||||
- docker
|
||||
image:
|
||||
name: ghcr.io/spack/ubuntu20.04-runner-x86_64:2023-01-01
|
||||
script:
|
||||
- git clone ${SPACK_REPOSITORY}
|
||||
- cd spack && git checkout ${SPACK_REF} && cd ../
|
||||
name: ghcr.io/scottwittenburg/ecpe4s-ubuntu18.04-runner-x86_64:2020-09-01
|
||||
entrypoint: [""]
|
||||
before_script:
|
||||
- git clone ${SPACK_REPO}
|
||||
- pushd spack && git checkout ${SPACK_REF} && popd
|
||||
- . "./spack/share/spack/setup-env.sh"
|
||||
- spack --version
|
||||
script:
|
||||
- spack env activate --without-view .
|
||||
- spack -d -v --color=always
|
||||
ci generate
|
||||
--check-index-only
|
||||
- spack -d ci generate
|
||||
--artifacts-root "${CI_PROJECT_DIR}/jobs_scratch_dir"
|
||||
--output-file "${CI_PROJECT_DIR}/jobs_scratch_dir/cloud-ci-pipeline.yml"
|
||||
--output-file "${CI_PROJECT_DIR}/jobs_scratch_dir/pipeline.yml"
|
||||
artifacts:
|
||||
paths:
|
||||
- "${CI_PROJECT_DIR}/jobs_scratch_dir"
|
||||
|
||||
build-pipeline:
|
||||
build-jobs:
|
||||
stage: build
|
||||
trigger:
|
||||
include:
|
||||
- artifact: jobs_scratch_dir/cloud-ci-pipeline.yml
|
||||
- artifact: "jobs_scratch_dir/pipeline.yml"
|
||||
job: generate-pipeline
|
||||
strategy: depend
|
||||
needs:
|
||||
- artifacts: True
|
||||
job: generate-pipeline
|
||||
|
||||
|
||||
The key thing to note above is that there are two jobs: The first job to run,
|
||||
``generate-pipeline``, runs the ``spack ci generate`` command to generate a
|
||||
@@ -121,93 +114,82 @@ And here's the spack environment built by the pipeline represented as a
|
||||
spack:
|
||||
view: false
|
||||
concretizer:
|
||||
unify: true
|
||||
reuse: false
|
||||
unify: false
|
||||
|
||||
definitions:
|
||||
- pkgs:
|
||||
- zlib
|
||||
- bzip2 ~debug
|
||||
- compiler:
|
||||
- '%gcc'
|
||||
- bzip2
|
||||
- arch:
|
||||
- '%gcc@7.5.0 arch=linux-ubuntu18.04-x86_64'
|
||||
|
||||
specs:
|
||||
- matrix:
|
||||
- - $pkgs
|
||||
- - $compiler
|
||||
- - $arch
|
||||
|
||||
mirrors: { "mirror": "s3://spack-public/mirror" }
|
||||
|
||||
ci:
|
||||
target: gitlab
|
||||
|
||||
enable-artifacts-buildcache: True
|
||||
rebuild-index: False
|
||||
pipeline-gen:
|
||||
- any-job:
|
||||
tags:
|
||||
- saas-linux-small-amd64
|
||||
image:
|
||||
name: ghcr.io/spack/ubuntu20.04-runner-x86_64:2023-01-01
|
||||
before_script:
|
||||
- git clone ${SPACK_REPOSITORY}
|
||||
- cd spack && git checkout ${SPACK_REF} && cd ../
|
||||
- . "./spack/share/spack/setup-env.sh"
|
||||
- spack --version
|
||||
- export SPACK_USER_CONFIG_PATH=${CI_PROJECT_DIR}
|
||||
- spack config blame mirrors
|
||||
- git clone ${SPACK_REPO}
|
||||
- pushd spack && git checkout ${SPACK_CHECKOUT_VERSION} && popd
|
||||
- . "./spack/share/spack/setup-env.sh"
|
||||
- build-job:
|
||||
tags: [docker]
|
||||
image:
|
||||
name: ghcr.io/scottwittenburg/ecpe4s-ubuntu18.04-runner-x86_64:2020-09-01
|
||||
entrypoint: [""]
|
||||
|
||||
|
||||
The elements of this file important to spack ci pipelines are described in more
|
||||
detail below, but there are a couple of things to note about the above working
|
||||
example:
|
||||
|
||||
.. note::
|
||||
The use of ``reuse: false`` in spack environments used for pipelines is
|
||||
almost always what you want, as without it your pipelines will not rebuild
|
||||
packages even if package hashes have changed. This is due to the concretizer
|
||||
strongly preferring known hashes when ``reuse: true``.
|
||||
There is no ``script`` attribute specified for here. The reason for this is
|
||||
Spack CI will automatically generate reasonable default scripts. More
|
||||
detail on what is in these scripts can be found below.
|
||||
|
||||
The ``ci`` section in the above environment file contains the bare minimum
|
||||
configuration required for ``spack ci generate`` to create a working pipeline.
|
||||
The ``target: gitlab`` tells spack that the desired pipeline output is for
|
||||
gitlab. However, this isn't strictly required, as currently gitlab is the
|
||||
only possible output format for pipelines. The ``pipeline-gen`` section
|
||||
contains the key information needed to specify attributes for the generated
|
||||
jobs. Notice that it contains a list which has only a single element in
|
||||
this case. In real pipelines it will almost certainly have more elements,
|
||||
and in those cases, order is important: spack starts at the bottom of the
|
||||
list and works upwards when applying attributes.
|
||||
Also notice the ``before_script`` section. It is required when using any of the
|
||||
default scripts to source the ``setup-env.sh`` script in order to inform
|
||||
the default scripts where to find the ``spack`` executable.
|
||||
|
||||
But in this simple case, we use only the special key ``any-job`` to
|
||||
indicate that spack should apply the specified attributes (``tags``, ``image``,
|
||||
and ``before_script``) to any job it generates. This includes jobs for
|
||||
building/pushing all packages, a ``rebuild-index`` job at the end of the
|
||||
pipeline, as well as any ``noop`` jobs that might be needed by gitlab when
|
||||
no rebuilds are required.
|
||||
Normally ``enable-artifacts-buildcache`` is not recommended in production as it
|
||||
results in large binary artifacts getting transferred back and forth between
|
||||
gitlab and the runners. But in this example on gitlab.com where there is no
|
||||
shared, persistent file system, and where no secrets are stored for giving
|
||||
permission to write to an S3 bucket, ``enabled-buildcache-artifacts`` is the only
|
||||
way to propagate binaries from jobs to their dependents.
|
||||
|
||||
Something to note is that in this simple case, we rely on spack to
|
||||
generate a reasonable script for the package build jobs (it just creates
|
||||
a script that invokes ``spack ci rebuild``).
|
||||
Also, it is usually a good idea to let the pipeline generate a final "rebuild the
|
||||
buildcache index" job, so that subsequent pipeline generation can quickly determine
|
||||
which specs are up to date and which need to be rebuilt (it's a good idea for other
|
||||
reasons as well, but those are out of scope for this discussion). In this case we
|
||||
have disabled it (using ``rebuild-index: False``) because the index would only be
|
||||
generated in the artifacts mirror anyway, and consequently would not be available
|
||||
during subsequent pipeline runs.
|
||||
|
||||
Another thing to note is the use of the ``SPACK_USER_CONFIG_DIR`` environment
|
||||
variable in any generated jobs. The purpose of this is to make spack
|
||||
aware of one final file in the example, the one that contains the mirror
|
||||
configuration. This file, ``mirrors.yaml`` looks like this:
|
||||
.. note::
|
||||
With the addition of reproducible builds (#22887) a previously working
|
||||
pipeline will require some changes:
|
||||
|
||||
.. code-block:: yaml
|
||||
* In the build-jobs, the environment location changed.
|
||||
This will typically show as a ``KeyError`` in the failing job. Be sure to
|
||||
point to ``${SPACK_CONCRETE_ENV_DIR}``.
|
||||
|
||||
mirrors:
|
||||
buildcache-destination:
|
||||
url: oci://registry.gitlab.com/spack/pipeline-quickstart
|
||||
binary: true
|
||||
access_pair:
|
||||
id_variable: CI_REGISTRY_USER
|
||||
secret_variable: CI_REGISTRY_PASSWORD
|
||||
* When using ``include`` in your environment, be sure to make the included
|
||||
files available in the build jobs. This means adding those files to the
|
||||
artifact directory. Those files will also be missing in the reproducibility
|
||||
artifact.
|
||||
|
||||
|
||||
Note the name of the mirror is ``buildcache-destination``, which is required
|
||||
as of Spack 0.23 (see below for more information). The mirror url simply
|
||||
points to the container registry associated with the project, while
|
||||
``id_variable`` and ``secret_variable`` refer to to environment variables
|
||||
containing the access credentials for the mirror.
|
||||
|
||||
When spack builds packages for this example project, they will be pushed to
|
||||
the project container registry, where they will be available for subsequent
|
||||
jobs to install as dependencies, or for other pipelines to use to build runnable
|
||||
container images.
|
||||
* Because the location of the environment changed, including files with
|
||||
relative path may have to be adapted to work both in the project context
|
||||
(generation job) and in the concrete env dir context (build job).
|
||||
|
||||
-----------------------------------
|
||||
Spack commands supporting pipelines
|
||||
@@ -435,6 +417,15 @@ configuration with a ``script`` attribute. Specifying a signing job without a sc
|
||||
does not create a signing job and the job configuration attributes will be ignored.
|
||||
Signing jobs are always assigned the runner tags ``aws``, ``protected``, and ``notary``.
|
||||
|
||||
^^^^^^^^^^^^^^^^^
|
||||
Cleanup (cleanup)
|
||||
^^^^^^^^^^^^^^^^^
|
||||
|
||||
When using ``temporary-storage-url-prefix`` the cleanup job will destroy the mirror
|
||||
created for the associated Gitlab pipeline. Cleanup jobs do not allow modifying the
|
||||
script, but do expect that the spack command is in the path and require a
|
||||
``before_script`` to be specified that sources the ``setup-env.sh`` script.
|
||||
|
||||
.. _noop_jobs:
|
||||
|
||||
^^^^^^^^^^^^
|
||||
@@ -750,6 +741,15 @@ environment/stack file, and in that case no bootstrapping will be done (only the
|
||||
specs will be staged for building) and the runners will be expected to already
|
||||
have all needed compilers installed and configured for spack to use.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
Pipeline Buildcache
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The ``enable-artifacts-buildcache`` key
|
||||
takes a boolean and determines whether the pipeline uses artifacts to store and
|
||||
pass along the buildcaches from one stage to the next (the default if you don't
|
||||
provide this option is ``False``).
|
||||
|
||||
^^^^^^^^^^^^^^^^
|
||||
Broken Specs URL
|
||||
^^^^^^^^^^^^^^^^
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
sphinx==8.1.3
|
||||
sphinx==7.4.7
|
||||
sphinxcontrib-programoutput==0.17
|
||||
sphinx_design==0.6.1
|
||||
sphinx-rtd-theme==3.0.1
|
||||
python-levenshtein==0.26.1
|
||||
docutils==0.21.2
|
||||
python-levenshtein==0.26.0
|
||||
docutils==0.20.1
|
||||
pygments==2.18.0
|
||||
urllib3==2.2.3
|
||||
pytest==8.3.3
|
||||
|
||||
@@ -1,28 +0,0 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
.. _user-reports:
|
||||
|
||||
============
|
||||
User Reports
|
||||
============
|
||||
|
||||
Spack has an active user community which deploys Spack in a multitude of contexts.
|
||||
Here we collect a growing list of reports by users on how they have applied Spack
|
||||
in their context.
|
||||
|
||||
If you have a user report that you think merits inclusion, feel free to open a pull
|
||||
request and add it to this list.
|
||||
|
||||
.. warning::
|
||||
Disclaimer:
|
||||
Spack is not responsible for and does not control the content of these external pages.
|
||||
Spack makes no representatations about their accuracy or applicability.
|
||||
|
||||
----
|
||||
2024
|
||||
----
|
||||
|
||||
- September 3, 2024: `Aidan Heerdegen: RRR: Reliability, Replicability, Reproducibility for climate models <https://www.youtube.com/watch?v=BVoVliqgx1U>`_ (ACCESS-NRI)
|
||||
238
lib/spack/env/cc
vendored
238
lib/spack/env/cc
vendored
@@ -101,9 +101,10 @@ setsep() {
|
||||
esac
|
||||
}
|
||||
|
||||
# prepend LISTNAME ELEMENT
|
||||
# prepend LISTNAME ELEMENT [SEP]
|
||||
#
|
||||
# Prepend ELEMENT to the list stored in the variable LISTNAME.
|
||||
# Prepend ELEMENT to the list stored in the variable LISTNAME,
|
||||
# assuming the list is separated by SEP.
|
||||
# Handles empty lists and single-element lists.
|
||||
prepend() {
|
||||
varname="$1"
|
||||
@@ -237,36 +238,6 @@ esac
|
||||
}
|
||||
"
|
||||
|
||||
# path_list functions. Path_lists have 3 parts: spack_store_<list>, <list> and system_<list>,
|
||||
# which are used to prioritize paths when assembling the final command line.
|
||||
|
||||
# init_path_lists LISTNAME
|
||||
# Set <LISTNAME>, spack_store_<LISTNAME>, and system_<LISTNAME> to "".
|
||||
init_path_lists() {
|
||||
eval "spack_store_$1=\"\""
|
||||
eval "$1=\"\""
|
||||
eval "system_$1=\"\""
|
||||
}
|
||||
|
||||
# assign_path_lists LISTNAME1 LISTNAME2
|
||||
# Copy contents of LISTNAME2 into LISTNAME1, for each path_list prefix.
|
||||
assign_path_lists() {
|
||||
eval "spack_store_$1=\"\${spack_store_$2}\""
|
||||
eval "$1=\"\${$2}\""
|
||||
eval "system_$1=\"\${system_$2}\""
|
||||
}
|
||||
|
||||
# append_path_lists LISTNAME ELT
|
||||
# Append the provided ELT to the appropriate list, based on the result of path_order().
|
||||
append_path_lists() {
|
||||
path_order "$2"
|
||||
case $? in
|
||||
0) eval "append spack_store_$1 \"\$2\"" ;;
|
||||
1) eval "append $1 \"\$2\"" ;;
|
||||
2) eval "append system_$1 \"\$2\"" ;;
|
||||
esac
|
||||
}
|
||||
|
||||
# Check if optional parameters are defined
|
||||
# If we aren't asking for debug flags, don't add them
|
||||
if [ -z "${SPACK_ADD_DEBUG_FLAGS:-}" ]; then
|
||||
@@ -499,7 +470,12 @@ input_command="$*"
|
||||
parse_Wl() {
|
||||
while [ $# -ne 0 ]; do
|
||||
if [ "$wl_expect_rpath" = yes ]; then
|
||||
append_path_lists return_rpath_dirs_list "$1"
|
||||
path_order "$1"
|
||||
case $? in
|
||||
0) append return_spack_store_rpath_dirs_list "$1" ;;
|
||||
1) append return_rpath_dirs_list "$1" ;;
|
||||
2) append return_system_rpath_dirs_list "$1" ;;
|
||||
esac
|
||||
wl_expect_rpath=no
|
||||
else
|
||||
case "$1" in
|
||||
@@ -508,14 +484,24 @@ parse_Wl() {
|
||||
if [ -z "$arg" ]; then
|
||||
shift; continue
|
||||
fi
|
||||
append_path_lists return_rpath_dirs_list "$arg"
|
||||
path_order "$arg"
|
||||
case $? in
|
||||
0) append return_spack_store_rpath_dirs_list "$arg" ;;
|
||||
1) append return_rpath_dirs_list "$arg" ;;
|
||||
2) append return_system_rpath_dirs_list "$arg" ;;
|
||||
esac
|
||||
;;
|
||||
--rpath=*)
|
||||
arg="${1#--rpath=}"
|
||||
if [ -z "$arg" ]; then
|
||||
shift; continue
|
||||
fi
|
||||
append_path_lists return_rpath_dirs_list "$arg"
|
||||
path_order "$arg"
|
||||
case $? in
|
||||
0) append return_spack_store_rpath_dirs_list "$arg" ;;
|
||||
1) append return_rpath_dirs_list "$arg" ;;
|
||||
2) append return_system_rpath_dirs_list "$arg" ;;
|
||||
esac
|
||||
;;
|
||||
-rpath|--rpath)
|
||||
wl_expect_rpath=yes
|
||||
@@ -523,7 +509,8 @@ parse_Wl() {
|
||||
"$dtags_to_strip")
|
||||
;;
|
||||
-Wl)
|
||||
# Nested -Wl,-Wl means we're in NAG compiler territory. We don't support it.
|
||||
# Nested -Wl,-Wl means we're in NAG compiler territory, we don't support
|
||||
# it.
|
||||
return 1
|
||||
;;
|
||||
*)
|
||||
@@ -542,10 +529,21 @@ categorize_arguments() {
|
||||
return_other_args_list=""
|
||||
return_isystem_was_used=""
|
||||
|
||||
init_path_lists return_isystem_include_dirs_list
|
||||
init_path_lists return_include_dirs_list
|
||||
init_path_lists return_lib_dirs_list
|
||||
init_path_lists return_rpath_dirs_list
|
||||
return_isystem_spack_store_include_dirs_list=""
|
||||
return_isystem_system_include_dirs_list=""
|
||||
return_isystem_include_dirs_list=""
|
||||
|
||||
return_spack_store_include_dirs_list=""
|
||||
return_system_include_dirs_list=""
|
||||
return_include_dirs_list=""
|
||||
|
||||
return_spack_store_lib_dirs_list=""
|
||||
return_system_lib_dirs_list=""
|
||||
return_lib_dirs_list=""
|
||||
|
||||
return_spack_store_rpath_dirs_list=""
|
||||
return_system_rpath_dirs_list=""
|
||||
return_rpath_dirs_list=""
|
||||
|
||||
# Global state for keeping track of -Wl,-rpath -Wl,/path
|
||||
wl_expect_rpath=no
|
||||
@@ -611,17 +609,32 @@ categorize_arguments() {
|
||||
arg="${1#-isystem}"
|
||||
return_isystem_was_used=true
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
append_path_lists return_isystem_include_dirs_list "$arg"
|
||||
path_order "$arg"
|
||||
case $? in
|
||||
0) append return_isystem_spack_store_include_dirs_list "$arg" ;;
|
||||
1) append return_isystem_include_dirs_list "$arg" ;;
|
||||
2) append return_isystem_system_include_dirs_list "$arg" ;;
|
||||
esac
|
||||
;;
|
||||
-I*)
|
||||
arg="${1#-I}"
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
append_path_lists return_include_dirs_list "$arg"
|
||||
path_order "$arg"
|
||||
case $? in
|
||||
0) append return_spack_store_include_dirs_list "$arg" ;;
|
||||
1) append return_include_dirs_list "$arg" ;;
|
||||
2) append return_system_include_dirs_list "$arg" ;;
|
||||
esac
|
||||
;;
|
||||
-L*)
|
||||
arg="${1#-L}"
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
append_path_lists return_lib_dirs_list "$arg"
|
||||
path_order "$arg"
|
||||
case $? in
|
||||
0) append return_spack_store_lib_dirs_list "$arg" ;;
|
||||
1) append return_lib_dirs_list "$arg" ;;
|
||||
2) append return_system_lib_dirs_list "$arg" ;;
|
||||
esac
|
||||
;;
|
||||
-l*)
|
||||
# -loopopt=0 is generated erroneously in autoconf <= 2.69,
|
||||
@@ -654,17 +667,32 @@ categorize_arguments() {
|
||||
break
|
||||
elif [ "$xlinker_expect_rpath" = yes ]; then
|
||||
# Register the path of -Xlinker -rpath <other args> -Xlinker <path>
|
||||
append_path_lists return_rpath_dirs_list "$1"
|
||||
path_order "$1"
|
||||
case $? in
|
||||
0) append return_spack_store_rpath_dirs_list "$1" ;;
|
||||
1) append return_rpath_dirs_list "$1" ;;
|
||||
2) append return_system_rpath_dirs_list "$1" ;;
|
||||
esac
|
||||
xlinker_expect_rpath=no
|
||||
else
|
||||
case "$1" in
|
||||
-rpath=*)
|
||||
arg="${1#-rpath=}"
|
||||
append_path_lists return_rpath_dirs_list "$arg"
|
||||
path_order "$arg"
|
||||
case $? in
|
||||
0) append return_spack_store_rpath_dirs_list "$arg" ;;
|
||||
1) append return_rpath_dirs_list "$arg" ;;
|
||||
2) append return_system_rpath_dirs_list "$arg" ;;
|
||||
esac
|
||||
;;
|
||||
--rpath=*)
|
||||
arg="${1#--rpath=}"
|
||||
append_path_lists return_rpath_dirs_list "$arg"
|
||||
path_order "$arg"
|
||||
case $? in
|
||||
0) append return_spack_store_rpath_dirs_list "$arg" ;;
|
||||
1) append return_rpath_dirs_list "$arg" ;;
|
||||
2) append return_system_rpath_dirs_list "$arg" ;;
|
||||
esac
|
||||
;;
|
||||
-rpath|--rpath)
|
||||
xlinker_expect_rpath=yes
|
||||
@@ -681,36 +709,7 @@ categorize_arguments() {
|
||||
"$dtags_to_strip")
|
||||
;;
|
||||
*)
|
||||
# if mode is not ld, we can just add to other args
|
||||
if [ "$mode" != "ld" ]; then
|
||||
append return_other_args_list "$1"
|
||||
shift
|
||||
continue
|
||||
fi
|
||||
|
||||
# if we're in linker mode, we need to parse raw RPATH args
|
||||
case "$1" in
|
||||
-rpath=*)
|
||||
arg="${1#-rpath=}"
|
||||
append_path_lists return_rpath_dirs_list "$arg"
|
||||
;;
|
||||
--rpath=*)
|
||||
arg="${1#--rpath=}"
|
||||
append_path_lists return_rpath_dirs_list "$arg"
|
||||
;;
|
||||
-rpath|--rpath)
|
||||
if [ $# -eq 1 ]; then
|
||||
# -rpath without value: let the linker raise an error.
|
||||
append return_other_args_list "$1"
|
||||
break
|
||||
fi
|
||||
shift
|
||||
append_path_lists return_rpath_dirs_list "$1"
|
||||
;;
|
||||
*)
|
||||
append return_other_args_list "$1"
|
||||
;;
|
||||
esac
|
||||
append return_other_args_list "$1"
|
||||
;;
|
||||
esac
|
||||
shift
|
||||
@@ -732,10 +731,21 @@ categorize_arguments() {
|
||||
|
||||
categorize_arguments "$@"
|
||||
|
||||
assign_path_lists isystem_include_dirs_list return_isystem_include_dirs_list
|
||||
assign_path_lists include_dirs_list return_include_dirs_list
|
||||
assign_path_lists lib_dirs_list return_lib_dirs_list
|
||||
assign_path_lists rpath_dirs_list return_rpath_dirs_list
|
||||
spack_store_include_dirs_list="$return_spack_store_include_dirs_list"
|
||||
system_include_dirs_list="$return_system_include_dirs_list"
|
||||
include_dirs_list="$return_include_dirs_list"
|
||||
|
||||
spack_store_lib_dirs_list="$return_spack_store_lib_dirs_list"
|
||||
system_lib_dirs_list="$return_system_lib_dirs_list"
|
||||
lib_dirs_list="$return_lib_dirs_list"
|
||||
|
||||
spack_store_rpath_dirs_list="$return_spack_store_rpath_dirs_list"
|
||||
system_rpath_dirs_list="$return_system_rpath_dirs_list"
|
||||
rpath_dirs_list="$return_rpath_dirs_list"
|
||||
|
||||
isystem_spack_store_include_dirs_list="$return_isystem_spack_store_include_dirs_list"
|
||||
isystem_system_include_dirs_list="$return_isystem_system_include_dirs_list"
|
||||
isystem_include_dirs_list="$return_isystem_include_dirs_list"
|
||||
|
||||
isystem_was_used="$return_isystem_was_used"
|
||||
other_args_list="$return_other_args_list"
|
||||
@@ -811,10 +821,21 @@ IFS="$lsep"
|
||||
categorize_arguments $spack_flags_list
|
||||
unset IFS
|
||||
|
||||
assign_path_lists spack_flags_isystem_include_dirs_list return_isystem_include_dirs_list
|
||||
assign_path_lists spack_flags_include_dirs_list return_include_dirs_list
|
||||
assign_path_lists spack_flags_lib_dirs_list return_lib_dirs_list
|
||||
assign_path_lists spack_flags_rpath_dirs_list return_rpath_dirs_list
|
||||
spack_flags_isystem_spack_store_include_dirs_list="$return_isystem_spack_store_include_dirs_list"
|
||||
spack_flags_isystem_system_include_dirs_list="$return_isystem_system_include_dirs_list"
|
||||
spack_flags_isystem_include_dirs_list="$return_isystem_include_dirs_list"
|
||||
|
||||
spack_flags_spack_store_include_dirs_list="$return_spack_store_include_dirs_list"
|
||||
spack_flags_system_include_dirs_list="$return_system_include_dirs_list"
|
||||
spack_flags_include_dirs_list="$return_include_dirs_list"
|
||||
|
||||
spack_flags_spack_store_lib_dirs_list="$return_spack_store_lib_dirs_list"
|
||||
spack_flags_system_lib_dirs_list="$return_system_lib_dirs_list"
|
||||
spack_flags_lib_dirs_list="$return_lib_dirs_list"
|
||||
|
||||
spack_flags_spack_store_rpath_dirs_list="$return_spack_store_rpath_dirs_list"
|
||||
spack_flags_system_rpath_dirs_list="$return_system_rpath_dirs_list"
|
||||
spack_flags_rpath_dirs_list="$return_rpath_dirs_list"
|
||||
|
||||
spack_flags_isystem_was_used="$return_isystem_was_used"
|
||||
spack_flags_other_args_list="$return_other_args_list"
|
||||
@@ -873,7 +894,7 @@ esac
|
||||
case "$mode" in
|
||||
cpp|cc|as|ccld)
|
||||
if [ "$spack_flags_isystem_was_used" = "true" ] || [ "$isystem_was_used" = "true" ]; then
|
||||
extend spack_store_isystem_include_dirs_list SPACK_STORE_INCLUDE_DIRS
|
||||
extend isystem_spack_store_include_dirs_list SPACK_STORE_INCLUDE_DIRS
|
||||
extend isystem_include_dirs_list SPACK_INCLUDE_DIRS
|
||||
else
|
||||
extend spack_store_include_dirs_list SPACK_STORE_INCLUDE_DIRS
|
||||
@@ -889,63 +910,64 @@ args_list="$flags_list"
|
||||
|
||||
# Include search paths partitioned by (in store, non-sytem, system)
|
||||
# NOTE: adding ${lsep} to the prefix here turns every added element into two
|
||||
extend args_list spack_store_spack_flags_include_dirs_list -I
|
||||
extend args_list spack_flags_spack_store_include_dirs_list -I
|
||||
extend args_list spack_store_include_dirs_list -I
|
||||
|
||||
extend args_list spack_flags_include_dirs_list -I
|
||||
extend args_list include_dirs_list -I
|
||||
|
||||
extend args_list spack_store_spack_flags_isystem_include_dirs_list "-isystem${lsep}"
|
||||
extend args_list spack_store_isystem_include_dirs_list "-isystem${lsep}"
|
||||
extend args_list spack_flags_isystem_spack_store_include_dirs_list "-isystem${lsep}"
|
||||
extend args_list isystem_spack_store_include_dirs_list "-isystem${lsep}"
|
||||
|
||||
extend args_list spack_flags_isystem_include_dirs_list "-isystem${lsep}"
|
||||
extend args_list isystem_include_dirs_list "-isystem${lsep}"
|
||||
|
||||
extend args_list system_spack_flags_include_dirs_list -I
|
||||
extend args_list spack_flags_system_include_dirs_list -I
|
||||
extend args_list system_include_dirs_list -I
|
||||
|
||||
extend args_list system_spack_flags_isystem_include_dirs_list "-isystem${lsep}"
|
||||
extend args_list system_isystem_include_dirs_list "-isystem${lsep}"
|
||||
extend args_list spack_flags_isystem_system_include_dirs_list "-isystem${lsep}"
|
||||
extend args_list isystem_system_include_dirs_list "-isystem${lsep}"
|
||||
|
||||
# Library search paths partitioned by (in store, non-sytem, system)
|
||||
extend args_list spack_store_spack_flags_lib_dirs_list "-L"
|
||||
extend args_list spack_flags_spack_store_lib_dirs_list "-L"
|
||||
extend args_list spack_store_lib_dirs_list "-L"
|
||||
|
||||
extend args_list spack_flags_lib_dirs_list "-L"
|
||||
extend args_list lib_dirs_list "-L"
|
||||
|
||||
extend args_list system_spack_flags_lib_dirs_list "-L"
|
||||
extend args_list spack_flags_system_lib_dirs_list "-L"
|
||||
extend args_list system_lib_dirs_list "-L"
|
||||
|
||||
# RPATHs arguments
|
||||
rpath_prefix=""
|
||||
case "$mode" in
|
||||
ccld)
|
||||
if [ -n "$dtags_to_add" ] ; then
|
||||
append args_list "$linker_arg$dtags_to_add"
|
||||
fi
|
||||
rpath_prefix="$rpath"
|
||||
extend args_list spack_flags_spack_store_rpath_dirs_list "$rpath"
|
||||
extend args_list spack_store_rpath_dirs_list "$rpath"
|
||||
|
||||
extend args_list spack_flags_rpath_dirs_list "$rpath"
|
||||
extend args_list rpath_dirs_list "$rpath"
|
||||
|
||||
extend args_list spack_flags_system_rpath_dirs_list "$rpath"
|
||||
extend args_list system_rpath_dirs_list "$rpath"
|
||||
;;
|
||||
ld)
|
||||
if [ -n "$dtags_to_add" ] ; then
|
||||
append args_list "$dtags_to_add"
|
||||
fi
|
||||
rpath_prefix="-rpath${lsep}"
|
||||
extend args_list spack_flags_spack_store_rpath_dirs_list "-rpath${lsep}"
|
||||
extend args_list spack_store_rpath_dirs_list "-rpath${lsep}"
|
||||
|
||||
extend args_list spack_flags_rpath_dirs_list "-rpath${lsep}"
|
||||
extend args_list rpath_dirs_list "-rpath${lsep}"
|
||||
|
||||
extend args_list spack_flags_system_rpath_dirs_list "-rpath${lsep}"
|
||||
extend args_list system_rpath_dirs_list "-rpath${lsep}"
|
||||
;;
|
||||
esac
|
||||
|
||||
# if mode is ccld or ld, extend RPATH lists with the prefix determined above
|
||||
if [ -n "$rpath_prefix" ]; then
|
||||
extend args_list spack_store_spack_flags_rpath_dirs_list "$rpath_prefix"
|
||||
extend args_list spack_store_rpath_dirs_list "$rpath_prefix"
|
||||
|
||||
extend args_list spack_flags_rpath_dirs_list "$rpath_prefix"
|
||||
extend args_list rpath_dirs_list "$rpath_prefix"
|
||||
|
||||
extend args_list system_spack_flags_rpath_dirs_list "$rpath_prefix"
|
||||
extend args_list system_rpath_dirs_list "$rpath_prefix"
|
||||
fi
|
||||
|
||||
# Other arguments from the input command
|
||||
extend args_list other_args_list
|
||||
extend args_list spack_flags_other_args_list
|
||||
|
||||
@@ -20,11 +20,11 @@
|
||||
import tempfile
|
||||
from contextlib import contextmanager
|
||||
from itertools import accumulate
|
||||
from typing import Callable, Deque, Dict, Iterable, List, Match, Optional, Set, Tuple, Union
|
||||
from typing import Callable, Iterable, List, Match, Optional, Tuple, Union
|
||||
|
||||
import llnl.util.symlink
|
||||
from llnl.util import tty
|
||||
from llnl.util.lang import dedupe, fnmatch_translate_multiple, memoized
|
||||
from llnl.util.lang import dedupe, memoized
|
||||
from llnl.util.symlink import islink, readlink, resolve_link_target_relative_to_the_link, symlink
|
||||
|
||||
from ..path import path_to_os_path, system_path_filter
|
||||
@@ -1673,40 +1673,28 @@ def find_first(root: str, files: Union[Iterable[str], str], bfs_depth: int = 2)
|
||||
return FindFirstFile(root, *files, bfs_depth=bfs_depth).find()
|
||||
|
||||
|
||||
def find(
|
||||
root: Union[str, List[str]],
|
||||
files: Union[str, List[str]],
|
||||
recursive: bool = True,
|
||||
max_depth: Optional[int] = None,
|
||||
) -> List[str]:
|
||||
"""Finds all non-directory files matching the filename patterns from ``files`` starting from
|
||||
``root``. This function returns a deterministic result for the same input and directory
|
||||
structure when run multiple times. Symlinked directories are followed, and unique directories
|
||||
are searched only once. Each matching file is returned only once at lowest depth in case
|
||||
multiple paths exist due to symlinked directories. The function has similarities to the Unix
|
||||
``find`` utility.
|
||||
def find(root, files, recursive=True):
|
||||
"""Search for ``files`` starting from the ``root`` directory.
|
||||
|
||||
Like GNU/BSD find but written entirely in Python.
|
||||
|
||||
Examples:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ find -L /usr -name python3 -type f
|
||||
$ find /usr -name python
|
||||
|
||||
is roughly equivalent to
|
||||
is equivalent to:
|
||||
|
||||
>>> find("/usr", "python3")
|
||||
|
||||
with the notable difference that this function only lists a single path to each file in case of
|
||||
symlinked directories.
|
||||
>>> find('/usr', 'python')
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ find -L /usr/local/bin /usr/local/sbin -maxdepth 1 '(' -name python3 -o -name getcap \\
|
||||
')' -type f
|
||||
$ find /usr/local/bin -maxdepth 1 -name python
|
||||
|
||||
is roughly equivalent to:
|
||||
is equivalent to:
|
||||
|
||||
>>> find(["/usr/local/bin", "/usr/local/sbin"], ["python3", "getcap"], recursive=False)
|
||||
>>> find('/usr/local/bin', 'python', recursive=False)
|
||||
|
||||
Accepts any glob characters accepted by fnmatch:
|
||||
|
||||
@@ -1720,116 +1708,70 @@ def find(
|
||||
========== ====================================
|
||||
|
||||
Parameters:
|
||||
root: One or more root directories to start searching from
|
||||
files: One or more filename patterns to search for
|
||||
recursive: if False search only root, if True descends from roots. Defaults to True.
|
||||
max_depth: if set, don't search below this depth. Cannot be set if recursive is False
|
||||
root (str): The root directory to start searching from
|
||||
files (str or collections.abc.Sequence): Library name(s) to search for
|
||||
recursive (bool): if False search only root folder,
|
||||
if True descends top-down from the root. Defaults to True.
|
||||
|
||||
Returns a list of absolute, matching file paths.
|
||||
Returns:
|
||||
list: The files that have been found
|
||||
"""
|
||||
if not isinstance(root, list):
|
||||
root = [root]
|
||||
|
||||
if not isinstance(files, list):
|
||||
if isinstance(files, str):
|
||||
files = [files]
|
||||
|
||||
# If recursive is false, max_depth can only be None or 0
|
||||
if max_depth and not recursive:
|
||||
raise ValueError(f"max_depth ({max_depth}) cannot be set if recursive is False")
|
||||
if recursive:
|
||||
tty.debug(f"Find (recursive): {root} {str(files)}")
|
||||
result = _find_recursive(root, files)
|
||||
else:
|
||||
tty.debug(f"Find (not recursive): {root} {str(files)}")
|
||||
result = _find_non_recursive(root, files)
|
||||
|
||||
if not recursive:
|
||||
max_depth = 0
|
||||
elif max_depth is None:
|
||||
max_depth = sys.maxsize
|
||||
|
||||
tty.debug(f"Find (max depth = {max_depth}): {root} {files}")
|
||||
result = _find_max_depth(root, files, max_depth)
|
||||
tty.debug(f"Find complete: {root} {files}")
|
||||
tty.debug(f"Find complete: {root} {str(files)}")
|
||||
return result
|
||||
|
||||
|
||||
def _log_file_access_issue(e: OSError, path: str) -> None:
|
||||
errno_name = errno.errorcode.get(e.errno, "UNKNOWN")
|
||||
tty.debug(f"find must skip {path}: {errno_name} {e}")
|
||||
@system_path_filter
|
||||
def _find_recursive(root, search_files):
|
||||
# The variable here is **on purpose** a defaultdict. The idea is that
|
||||
# we want to poke the filesystem as little as possible, but still maintain
|
||||
# stability in the order of the answer. Thus we are recording each library
|
||||
# found in a key, and reconstructing the stable order later.
|
||||
found_files = collections.defaultdict(list)
|
||||
|
||||
# Make the path absolute to have os.walk also return an absolute path
|
||||
root = os.path.abspath(root)
|
||||
for path, _, list_files in os.walk(root):
|
||||
for search_file in search_files:
|
||||
matches = glob.glob(os.path.join(path, search_file))
|
||||
matches = [os.path.join(path, x) for x in matches]
|
||||
found_files[search_file].extend(matches)
|
||||
|
||||
answer = []
|
||||
for search_file in search_files:
|
||||
answer.extend(found_files[search_file])
|
||||
|
||||
return answer
|
||||
|
||||
|
||||
def _dir_id(s: os.stat_result) -> Tuple[int, int]:
|
||||
# Note: on windows, st_ino is the file index and st_dev is the volume serial number. See
|
||||
# https://github.com/python/cpython/blob/3.9/Python/fileutils.c
|
||||
return (s.st_ino, s.st_dev)
|
||||
@system_path_filter
|
||||
def _find_non_recursive(root, search_files):
|
||||
# The variable here is **on purpose** a defaultdict as os.list_dir
|
||||
# can return files in any order (does not preserve stability)
|
||||
found_files = collections.defaultdict(list)
|
||||
|
||||
# Make the path absolute to have absolute path returned
|
||||
root = os.path.abspath(root)
|
||||
|
||||
def _find_max_depth(roots: List[str], globs: List[str], max_depth: int = sys.maxsize) -> List[str]:
|
||||
"""See ``find`` for the public API."""
|
||||
# Apply normcase to file patterns and filenames to respect case insensitive filesystems
|
||||
regex, groups = fnmatch_translate_multiple([os.path.normcase(x) for x in globs])
|
||||
# Ordered dictionary that keeps track of the files found for each pattern
|
||||
capture_group_to_paths: Dict[str, List[str]] = {group: [] for group in groups}
|
||||
# Ensure returned paths are always absolute
|
||||
roots = [os.path.abspath(r) for r in roots]
|
||||
# Breadth-first search queue. Each element is a tuple of (depth, directory)
|
||||
dir_queue: Deque[Tuple[int, str]] = collections.deque()
|
||||
# Set of visited directories. Each element is a tuple of (inode, device)
|
||||
visited_dirs: Set[Tuple[int, int]] = set()
|
||||
for search_file in search_files:
|
||||
matches = glob.glob(os.path.join(root, search_file))
|
||||
matches = [os.path.join(root, x) for x in matches]
|
||||
found_files[search_file].extend(matches)
|
||||
|
||||
for root in roots:
|
||||
try:
|
||||
stat_root = os.stat(root)
|
||||
except OSError as e:
|
||||
_log_file_access_issue(e, root)
|
||||
continue
|
||||
dir_id = _dir_id(stat_root)
|
||||
if dir_id not in visited_dirs:
|
||||
dir_queue.appendleft((0, root))
|
||||
visited_dirs.add(dir_id)
|
||||
answer = []
|
||||
for search_file in search_files:
|
||||
answer.extend(found_files[search_file])
|
||||
|
||||
while dir_queue:
|
||||
depth, next_dir = dir_queue.pop()
|
||||
try:
|
||||
dir_iter = os.scandir(next_dir)
|
||||
except OSError as e:
|
||||
_log_file_access_issue(e, next_dir)
|
||||
continue
|
||||
|
||||
with dir_iter:
|
||||
ordered_entries = sorted(dir_iter, key=lambda x: x.name)
|
||||
for dir_entry in ordered_entries:
|
||||
try:
|
||||
it_is_a_dir = dir_entry.is_dir(follow_symlinks=True)
|
||||
except OSError as e:
|
||||
# Possible permission issue, or a symlink that cannot be resolved (ELOOP).
|
||||
_log_file_access_issue(e, dir_entry.path)
|
||||
continue
|
||||
|
||||
if it_is_a_dir and depth < max_depth:
|
||||
try:
|
||||
# The stat should be performed in a try/except block. We repeat that here
|
||||
# vs. moving to the above block because we only want to call `stat` if we
|
||||
# haven't exceeded our max_depth
|
||||
if sys.platform == "win32":
|
||||
# Note: st_ino/st_dev on DirEntry.stat are not set on Windows, so we
|
||||
# have to call os.stat
|
||||
stat_info = os.stat(dir_entry.path, follow_symlinks=True)
|
||||
else:
|
||||
stat_info = dir_entry.stat(follow_symlinks=True)
|
||||
except OSError as e:
|
||||
_log_file_access_issue(e, dir_entry.path)
|
||||
continue
|
||||
|
||||
dir_id = _dir_id(stat_info)
|
||||
if dir_id not in visited_dirs:
|
||||
dir_queue.appendleft((depth + 1, dir_entry.path))
|
||||
visited_dirs.add(dir_id)
|
||||
else:
|
||||
m = regex.match(os.path.normcase(os.path.basename(dir_entry.path)))
|
||||
if not m:
|
||||
continue
|
||||
for group in capture_group_to_paths:
|
||||
if m.group(group):
|
||||
capture_group_to_paths[group].append(dir_entry.path)
|
||||
break
|
||||
|
||||
return [path for paths in capture_group_to_paths.values() for path in paths]
|
||||
return answer
|
||||
|
||||
|
||||
# Utilities for libraries and headers
|
||||
@@ -2268,9 +2210,7 @@ def find_system_libraries(libraries, shared=True):
|
||||
return libraries_found
|
||||
|
||||
|
||||
def find_libraries(
|
||||
libraries, root, shared=True, recursive=False, runtime=True, max_depth: Optional[int] = None
|
||||
):
|
||||
def find_libraries(libraries, root, shared=True, recursive=False, runtime=True):
|
||||
"""Returns an iterable of full paths to libraries found in a root dir.
|
||||
|
||||
Accepts any glob characters accepted by fnmatch:
|
||||
@@ -2291,8 +2231,6 @@ def find_libraries(
|
||||
otherwise for static. Defaults to True.
|
||||
recursive (bool): if False search only root folder,
|
||||
if True descends top-down from the root. Defaults to False.
|
||||
max_depth (int): if set, don't search below this depth. Cannot be set
|
||||
if recursive is False
|
||||
runtime (bool): Windows only option, no-op elsewhere. If true,
|
||||
search for runtime shared libs (.DLL), otherwise, search
|
||||
for .Lib files. If shared is false, this has no meaning.
|
||||
@@ -2301,7 +2239,6 @@ def find_libraries(
|
||||
Returns:
|
||||
LibraryList: The libraries that have been found
|
||||
"""
|
||||
|
||||
if isinstance(libraries, str):
|
||||
libraries = [libraries]
|
||||
elif not isinstance(libraries, collections.abc.Sequence):
|
||||
@@ -2334,10 +2271,8 @@ def find_libraries(
|
||||
libraries = ["{0}.{1}".format(lib, suffix) for lib in libraries for suffix in suffixes]
|
||||
|
||||
if not recursive:
|
||||
if max_depth:
|
||||
raise ValueError(f"max_depth ({max_depth}) cannot be set if recursive is False")
|
||||
# If not recursive, look for the libraries directly in root
|
||||
return LibraryList(find(root, libraries, recursive=False))
|
||||
return LibraryList(find(root, libraries, False))
|
||||
|
||||
# To speedup the search for external packages configured e.g. in /usr,
|
||||
# perform first non-recursive search in root/lib then in root/lib64 and
|
||||
@@ -2355,7 +2290,7 @@ def find_libraries(
|
||||
if found_libs:
|
||||
break
|
||||
else:
|
||||
found_libs = find(root, libraries, recursive=True, max_depth=max_depth)
|
||||
found_libs = find(root, libraries, True)
|
||||
|
||||
return LibraryList(found_libs)
|
||||
|
||||
|
||||
@@ -5,15 +5,12 @@
|
||||
|
||||
import collections.abc
|
||||
import contextlib
|
||||
import fnmatch
|
||||
import functools
|
||||
import itertools
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import traceback
|
||||
import typing
|
||||
import warnings
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Callable, Iterable, List, Tuple, TypeVar
|
||||
|
||||
@@ -861,32 +858,6 @@ def elide_list(line_list: List[str], max_num: int = 10) -> List[str]:
|
||||
return line_list
|
||||
|
||||
|
||||
if sys.version_info >= (3, 9):
|
||||
PatternStr = re.Pattern[str]
|
||||
else:
|
||||
PatternStr = typing.Pattern[str]
|
||||
|
||||
|
||||
def fnmatch_translate_multiple(patterns: List[str]) -> Tuple[PatternStr, List[str]]:
|
||||
"""Same as fnmatch.translate, but creates a single regex of the form
|
||||
``(?P<pattern0>...)|(?P<pattern1>...)|...`` for each pattern in the iterable, where
|
||||
``patternN`` is a named capture group that matches the corresponding pattern translated by
|
||||
``fnmatch.translate``. This can be used to match multiple patterns in a single pass. No case
|
||||
normalization is performed on the patterns.
|
||||
|
||||
Args:
|
||||
patterns: list of fnmatch patterns
|
||||
|
||||
Returns:
|
||||
Tuple of the combined regex and the list of named capture groups corresponding to each
|
||||
pattern in the input list.
|
||||
"""
|
||||
groups = [f"pattern{i}" for i in range(len(patterns))]
|
||||
regexes = (fnmatch.translate(p) for p in patterns)
|
||||
combined = re.compile("|".join(f"(?P<{g}>{r})" for g, r in zip(groups, regexes)))
|
||||
return combined, groups
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def nullcontext(*args, **kwargs):
|
||||
"""Empty context manager.
|
||||
@@ -899,6 +870,15 @@ class UnhashableArguments(TypeError):
|
||||
"""Raise when an @memoized function receives unhashable arg or kwarg values."""
|
||||
|
||||
|
||||
def enum(**kwargs):
|
||||
"""Return an enum-like class.
|
||||
|
||||
Args:
|
||||
**kwargs: explicit dictionary of enums
|
||||
"""
|
||||
return type("Enum", (object,), kwargs)
|
||||
|
||||
|
||||
T = TypeVar("T")
|
||||
|
||||
|
||||
@@ -934,21 +914,6 @@ def ensure_last(lst, *elements):
|
||||
lst.append(lst.pop(lst.index(elt)))
|
||||
|
||||
|
||||
class Const:
|
||||
"""Class level constant, raises when trying to set the attribute"""
|
||||
|
||||
__slots__ = ["value"]
|
||||
|
||||
def __init__(self, value):
|
||||
self.value = value
|
||||
|
||||
def __get__(self, instance, owner):
|
||||
return self.value
|
||||
|
||||
def __set__(self, instance, value):
|
||||
raise TypeError(f"Const value does not support assignment [value={self.value}]")
|
||||
|
||||
|
||||
class TypedMutableSequence(collections.abc.MutableSequence):
|
||||
"""Base class that behaves like a list, just with a different type.
|
||||
|
||||
@@ -1053,42 +1018,3 @@ def __init__(self, callback):
|
||||
|
||||
def __get__(self, instance, owner):
|
||||
return self.callback(owner)
|
||||
|
||||
|
||||
class DeprecatedProperty:
|
||||
"""Data descriptor to error or warn when a deprecated property is accessed.
|
||||
|
||||
Derived classes must define a factory method to return an adaptor for the deprecated
|
||||
property, if the descriptor is not set to error.
|
||||
"""
|
||||
|
||||
__slots__ = ["name"]
|
||||
|
||||
#: 0 - Nothing
|
||||
#: 1 - Warning
|
||||
#: 2 - Error
|
||||
error_lvl = 0
|
||||
|
||||
def __init__(self, name: str) -> None:
|
||||
self.name = name
|
||||
|
||||
def __get__(self, instance, owner):
|
||||
if instance is None:
|
||||
return self
|
||||
|
||||
if self.error_lvl == 1:
|
||||
warnings.warn(
|
||||
f"accessing the '{self.name}' property of '{instance}', which is deprecated"
|
||||
)
|
||||
elif self.error_lvl == 2:
|
||||
raise AttributeError(f"cannot access the '{self.name}' attribute of '{instance}'")
|
||||
|
||||
return self.factory(instance, owner)
|
||||
|
||||
def __set__(self, instance, value):
|
||||
raise TypeError(
|
||||
f"the deprecated property '{self.name}' of '{instance}' does not support assignment"
|
||||
)
|
||||
|
||||
def factory(self, instance, owner):
|
||||
raise NotImplementedError("must be implemented by derived classes")
|
||||
|
||||
@@ -263,9 +263,7 @@ def match_to_ansi(match):
|
||||
f"Incomplete color format: '{match.group(0)}' in '{match.string}'"
|
||||
)
|
||||
|
||||
color_number = colors.get(color_code, "")
|
||||
semi = ";" if color_number else ""
|
||||
ansi_code = _escape(f"{styles[style]}{semi}{color_number}", color, enclose, zsh)
|
||||
ansi_code = _escape(f"{styles[style]};{colors.get(color_code, '')}", color, enclose, zsh)
|
||||
if text:
|
||||
return f"{ansi_code}{text}{_escape(0, color, enclose, zsh)}"
|
||||
else:
|
||||
|
||||
@@ -10,6 +10,7 @@
|
||||
import errno
|
||||
import io
|
||||
import multiprocessing
|
||||
import multiprocessing.connection
|
||||
import os
|
||||
import re
|
||||
import select
|
||||
@@ -18,10 +19,9 @@
|
||||
import threading
|
||||
import traceback
|
||||
from contextlib import contextmanager
|
||||
from multiprocessing.connection import Connection
|
||||
from threading import Thread
|
||||
from types import ModuleType
|
||||
from typing import Callable, Optional
|
||||
from typing import Optional
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
@@ -345,6 +345,48 @@ def close(self):
|
||||
self.file.close()
|
||||
|
||||
|
||||
class MultiProcessFd:
|
||||
"""Return an object which stores a file descriptor and can be passed as an
|
||||
argument to a function run with ``multiprocessing.Process``, such that
|
||||
the file descriptor is available in the subprocess. It provides access via
|
||||
the `fd` property.
|
||||
|
||||
This object takes control over the associated FD: files opened from this
|
||||
using `fdopen` need to use `closefd=False`.
|
||||
"""
|
||||
|
||||
# As for why you have to fdopen(..., closefd=False): when a
|
||||
# multiprocessing.connection.Connection object stores an fd, it assumes
|
||||
# control over it, and will attempt to close it when gc'ed during __del__;
|
||||
# if you fdopen(multiprocessfd.fd, closefd=True) then the resulting file
|
||||
# will also assume control, and you can see warnings when there is an
|
||||
# attempted double close.
|
||||
|
||||
def __init__(self, fd):
|
||||
self._connection = None
|
||||
self._fd = None
|
||||
if sys.version_info >= (3, 8):
|
||||
self._connection = multiprocessing.connection.Connection(fd)
|
||||
else:
|
||||
self._fd = fd
|
||||
|
||||
@property
|
||||
def fd(self):
|
||||
if self._connection:
|
||||
return self._connection.fileno()
|
||||
else:
|
||||
return self._fd
|
||||
|
||||
def close(self):
|
||||
"""Rather than `.close()`ing any file opened from the associated
|
||||
`.fd`, the `MultiProcessFd` should be closed with this.
|
||||
"""
|
||||
if self._connection:
|
||||
self._connection.close()
|
||||
else:
|
||||
os.close(self._fd)
|
||||
|
||||
|
||||
@contextmanager
|
||||
def replace_environment(env):
|
||||
"""Replace the current environment (`os.environ`) with `env`.
|
||||
@@ -502,20 +544,22 @@ def __enter__(self):
|
||||
# forcing debug output.
|
||||
self._saved_debug = tty._debug
|
||||
|
||||
# Pipe for redirecting output to logger
|
||||
read_fd, self.write_fd = multiprocessing.Pipe(duplex=False)
|
||||
# OS-level pipe for redirecting output to logger
|
||||
read_fd, write_fd = os.pipe()
|
||||
|
||||
# Pipe for communication back from the daemon
|
||||
read_multiprocess_fd = MultiProcessFd(read_fd)
|
||||
|
||||
# Multiprocessing pipe for communication back from the daemon
|
||||
# Currently only used to save echo value between uses
|
||||
self.parent_pipe, child_pipe = multiprocessing.Pipe(duplex=False)
|
||||
self.parent_pipe, child_pipe = multiprocessing.Pipe()
|
||||
|
||||
# Sets a daemon that writes to file what it reads from a pipe
|
||||
try:
|
||||
# need to pass this b/c multiprocessing closes stdin in child.
|
||||
input_fd = None
|
||||
input_multiprocess_fd = None
|
||||
try:
|
||||
if sys.stdin.isatty():
|
||||
input_fd = Connection(os.dup(sys.stdin.fileno()))
|
||||
input_multiprocess_fd = MultiProcessFd(os.dup(sys.stdin.fileno()))
|
||||
except BaseException:
|
||||
# just don't forward input if this fails
|
||||
pass
|
||||
@@ -524,9 +568,9 @@ def __enter__(self):
|
||||
self.process = multiprocessing.Process(
|
||||
target=_writer_daemon,
|
||||
args=(
|
||||
input_fd,
|
||||
read_fd,
|
||||
self.write_fd,
|
||||
input_multiprocess_fd,
|
||||
read_multiprocess_fd,
|
||||
write_fd,
|
||||
self.echo,
|
||||
self.log_file,
|
||||
child_pipe,
|
||||
@@ -537,9 +581,9 @@ def __enter__(self):
|
||||
self.process.start()
|
||||
|
||||
finally:
|
||||
if input_fd:
|
||||
input_fd.close()
|
||||
read_fd.close()
|
||||
if input_multiprocess_fd:
|
||||
input_multiprocess_fd.close()
|
||||
read_multiprocess_fd.close()
|
||||
|
||||
# Flush immediately before redirecting so that anything buffered
|
||||
# goes to the original stream
|
||||
@@ -557,9 +601,9 @@ def __enter__(self):
|
||||
self._saved_stderr = os.dup(sys.stderr.fileno())
|
||||
|
||||
# redirect to the pipe we created above
|
||||
os.dup2(self.write_fd.fileno(), sys.stdout.fileno())
|
||||
os.dup2(self.write_fd.fileno(), sys.stderr.fileno())
|
||||
self.write_fd.close()
|
||||
os.dup2(write_fd, sys.stdout.fileno())
|
||||
os.dup2(write_fd, sys.stderr.fileno())
|
||||
os.close(write_fd)
|
||||
|
||||
else:
|
||||
# Handle I/O the Python way. This won't redirect lower-level
|
||||
@@ -572,7 +616,7 @@ def __enter__(self):
|
||||
self._saved_stderr = sys.stderr
|
||||
|
||||
# create a file object for the pipe; redirect to it.
|
||||
pipe_fd_out = os.fdopen(self.write_fd.fileno(), "w", closefd=False)
|
||||
pipe_fd_out = os.fdopen(write_fd, "w")
|
||||
sys.stdout = pipe_fd_out
|
||||
sys.stderr = pipe_fd_out
|
||||
|
||||
@@ -608,7 +652,6 @@ def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
else:
|
||||
sys.stdout = self._saved_stdout
|
||||
sys.stderr = self._saved_stderr
|
||||
self.write_fd.close()
|
||||
|
||||
# print log contents in parent if needed.
|
||||
if self.log_file.write_in_parent:
|
||||
@@ -822,14 +865,14 @@ def force_echo(self):
|
||||
|
||||
|
||||
def _writer_daemon(
|
||||
stdin_fd: Optional[Connection],
|
||||
read_fd: Connection,
|
||||
write_fd: Connection,
|
||||
echo: bool,
|
||||
log_file_wrapper: FileWrapper,
|
||||
control_fd: Connection,
|
||||
filter_fn: Optional[Callable[[str], str]],
|
||||
) -> None:
|
||||
stdin_multiprocess_fd,
|
||||
read_multiprocess_fd,
|
||||
write_fd,
|
||||
echo,
|
||||
log_file_wrapper,
|
||||
control_pipe,
|
||||
filter_fn,
|
||||
):
|
||||
"""Daemon used by ``log_output`` to write to a log file and to ``stdout``.
|
||||
|
||||
The daemon receives output from the parent process and writes it both
|
||||
@@ -866,37 +909,43 @@ def _writer_daemon(
|
||||
``StringIO`` in the parent. This is mainly for testing.
|
||||
|
||||
Arguments:
|
||||
stdin_fd: optional input from the terminal
|
||||
read_fd: pipe for reading from parent's redirected stdout
|
||||
echo: initial echo setting -- controlled by user and preserved across multiple writer
|
||||
daemons
|
||||
log_file_wrapper: file to log all output
|
||||
control_pipe: multiprocessing pipe on which to send control information to the parent
|
||||
filter_fn: optional function to filter each line of output
|
||||
stdin_multiprocess_fd (int): input from the terminal
|
||||
read_multiprocess_fd (int): pipe for reading from parent's redirected
|
||||
stdout
|
||||
echo (bool): initial echo setting -- controlled by user and
|
||||
preserved across multiple writer daemons
|
||||
log_file_wrapper (FileWrapper): file to log all output
|
||||
control_pipe (Pipe): multiprocessing pipe on which to send control
|
||||
information to the parent
|
||||
filter_fn (callable, optional): function to filter each line of output
|
||||
|
||||
"""
|
||||
# This process depends on closing all instances of write_pipe to terminate the reading loop
|
||||
write_fd.close()
|
||||
# If this process was forked, then it will inherit file descriptors from
|
||||
# the parent process. This process depends on closing all instances of
|
||||
# write_fd to terminate the reading loop, so we close the file descriptor
|
||||
# here. Forking is the process spawning method everywhere except Mac OS
|
||||
# for Python >= 3.8 and on Windows
|
||||
if sys.version_info < (3, 8) or sys.platform != "darwin":
|
||||
os.close(write_fd)
|
||||
|
||||
# 1. Use line buffering (3rd param = 1) since Python 3 has a bug
|
||||
# that prevents unbuffered text I/O.
|
||||
# 2. Python 3.x before 3.7 does not open with UTF-8 encoding by default
|
||||
# 3. closefd=False because Connection has "ownership"
|
||||
read_file = os.fdopen(read_fd.fileno(), "r", 1, encoding="utf-8", closefd=False)
|
||||
in_pipe = os.fdopen(read_multiprocess_fd.fd, "r", 1, encoding="utf-8", closefd=False)
|
||||
|
||||
if stdin_fd:
|
||||
stdin_file = os.fdopen(stdin_fd.fileno(), closefd=False)
|
||||
if stdin_multiprocess_fd:
|
||||
stdin = os.fdopen(stdin_multiprocess_fd.fd, closefd=False)
|
||||
else:
|
||||
stdin_file = None
|
||||
stdin = None
|
||||
|
||||
# list of streams to select from
|
||||
istreams = [read_file, stdin_file] if stdin_file else [read_file]
|
||||
istreams = [in_pipe, stdin] if stdin else [in_pipe]
|
||||
force_echo = False # parent can force echo for certain output
|
||||
|
||||
log_file = log_file_wrapper.unwrap()
|
||||
|
||||
try:
|
||||
with keyboard_input(stdin_file) as kb:
|
||||
with keyboard_input(stdin) as kb:
|
||||
while True:
|
||||
# fix the terminal settings if we recently came to
|
||||
# the foreground
|
||||
@@ -909,12 +958,12 @@ def _writer_daemon(
|
||||
# Allow user to toggle echo with 'v' key.
|
||||
# Currently ignores other chars.
|
||||
# only read stdin if we're in the foreground
|
||||
if stdin_file and stdin_file in rlist and not _is_background_tty(stdin_file):
|
||||
if stdin in rlist and not _is_background_tty(stdin):
|
||||
# it's possible to be backgrounded between the above
|
||||
# check and the read, so we ignore SIGTTIN here.
|
||||
with ignore_signal(signal.SIGTTIN):
|
||||
try:
|
||||
if stdin_file.read(1) == "v":
|
||||
if stdin.read(1) == "v":
|
||||
echo = not echo
|
||||
except IOError as e:
|
||||
# If SIGTTIN is ignored, the system gives EIO
|
||||
@@ -923,13 +972,13 @@ def _writer_daemon(
|
||||
if e.errno != errno.EIO:
|
||||
raise
|
||||
|
||||
if read_file in rlist:
|
||||
if in_pipe in rlist:
|
||||
line_count = 0
|
||||
try:
|
||||
while line_count < 100:
|
||||
# Handle output from the calling process.
|
||||
try:
|
||||
line = _retry(read_file.readline)()
|
||||
line = _retry(in_pipe.readline)()
|
||||
except UnicodeDecodeError:
|
||||
# installs like --test=root gpgme produce non-UTF8 logs
|
||||
line = "<line lost: output was not encoded as UTF-8>\n"
|
||||
@@ -958,7 +1007,7 @@ def _writer_daemon(
|
||||
if xoff in controls:
|
||||
force_echo = False
|
||||
|
||||
if not _input_available(read_file):
|
||||
if not _input_available(in_pipe):
|
||||
break
|
||||
finally:
|
||||
if line_count > 0:
|
||||
@@ -973,14 +1022,14 @@ def _writer_daemon(
|
||||
finally:
|
||||
# send written data back to parent if we used a StringIO
|
||||
if isinstance(log_file, io.StringIO):
|
||||
control_fd.send(log_file.getvalue())
|
||||
control_pipe.send(log_file.getvalue())
|
||||
log_file_wrapper.close()
|
||||
read_fd.close()
|
||||
if stdin_fd:
|
||||
stdin_fd.close()
|
||||
read_multiprocess_fd.close()
|
||||
if stdin_multiprocess_fd:
|
||||
stdin_multiprocess_fd.close()
|
||||
|
||||
# send echo value back to the parent so it can be preserved.
|
||||
control_fd.send(echo)
|
||||
control_pipe.send(echo)
|
||||
|
||||
|
||||
def _retry(function):
|
||||
|
||||
@@ -69,15 +69,4 @@ def get_version() -> str:
|
||||
return spack_version
|
||||
|
||||
|
||||
def get_short_version() -> str:
|
||||
"""Short Spack version."""
|
||||
return f"{spack_version_info[0]}.{spack_version_info[1]}"
|
||||
|
||||
|
||||
__all__ = [
|
||||
"spack_version_info",
|
||||
"spack_version",
|
||||
"get_version",
|
||||
"get_spack_commit",
|
||||
"get_short_version",
|
||||
]
|
||||
__all__ = ["spack_version_info", "spack_version", "get_version", "get_spack_commit"]
|
||||
|
||||
@@ -722,8 +722,9 @@ def _ensure_env_methods_are_ported_to_builders(pkgs, error_cls):
|
||||
)
|
||||
builder_cls_names = [spack.builder.BUILDER_CLS[x].__name__ for x in build_system_names]
|
||||
|
||||
module = pkg_cls.module
|
||||
has_builders_in_package_py = any(
|
||||
spack.builder.get_builder_class(pkg_cls, name) for name in builder_cls_names
|
||||
getattr(module, name, False) for name in builder_cls_names
|
||||
)
|
||||
if not has_builders_in_package_py:
|
||||
continue
|
||||
@@ -805,7 +806,7 @@ def _uses_deprecated_globals(pkgs, error_cls):
|
||||
|
||||
file = spack.repo.PATH.filename_for_package_name(pkg_name)
|
||||
tree = ast.parse(open(file).read())
|
||||
visitor = DeprecatedMagicGlobals(("std_cmake_args", "std_meson_args", "std_pip_args"))
|
||||
visitor = DeprecatedMagicGlobals(("std_cmake_args",))
|
||||
visitor.visit(tree)
|
||||
if visitor.references_to_globals:
|
||||
errors.append(
|
||||
|
||||
@@ -252,7 +252,7 @@ def _associate_built_specs_with_mirror(self, cache_key, mirror_url):
|
||||
|
||||
spec_list = [
|
||||
s
|
||||
for s in db.query_local(installed=any)
|
||||
for s in db.query_local(installed=any, in_buildcache=any)
|
||||
if s.external or db.query_local_by_spec_hash(s.dag_hash()).in_buildcache
|
||||
]
|
||||
|
||||
|
||||
@@ -37,7 +37,6 @@
|
||||
import spack.binary_distribution
|
||||
import spack.config
|
||||
import spack.detection
|
||||
import spack.mirror
|
||||
import spack.platforms
|
||||
import spack.spec
|
||||
import spack.store
|
||||
@@ -45,6 +44,7 @@
|
||||
import spack.util.executable
|
||||
import spack.util.path
|
||||
import spack.util.spack_yaml
|
||||
import spack.util.url
|
||||
import spack.version
|
||||
from spack.installer import PackageInstaller
|
||||
|
||||
@@ -91,7 +91,12 @@ def __init__(self, conf: ConfigDictionary) -> None:
|
||||
self.metadata_dir = spack.util.path.canonicalize_path(conf["metadata"])
|
||||
|
||||
# Promote (relative) paths to file urls
|
||||
self.url = spack.mirror.Mirror(conf["info"]["url"]).fetch_url
|
||||
url = conf["info"]["url"]
|
||||
if spack.util.url.is_path_instead_of_url(url):
|
||||
if not os.path.isabs(url):
|
||||
url = os.path.join(self.metadata_dir, url)
|
||||
url = spack.util.url.path_to_file_url(url)
|
||||
self.url = url
|
||||
|
||||
@property
|
||||
def mirror_scope(self) -> spack.config.InternalConfigScope:
|
||||
@@ -602,10 +607,7 @@ def bootstrapping_sources(scope: Optional[str] = None):
|
||||
current = copy.copy(entry)
|
||||
metadata_dir = spack.util.path.canonicalize_path(entry["metadata"])
|
||||
metadata_yaml = os.path.join(metadata_dir, METADATA_YAML_FILENAME)
|
||||
try:
|
||||
with open(metadata_yaml, encoding="utf-8") as stream:
|
||||
current.update(spack.util.spack_yaml.load(stream))
|
||||
list_of_sources.append(current)
|
||||
except OSError:
|
||||
pass
|
||||
with open(metadata_yaml, encoding="utf-8") as stream:
|
||||
current.update(spack.util.spack_yaml.load(stream))
|
||||
list_of_sources.append(current)
|
||||
return list_of_sources
|
||||
|
||||
@@ -44,7 +44,6 @@
|
||||
from collections import defaultdict
|
||||
from enum import Flag, auto
|
||||
from itertools import chain
|
||||
from multiprocessing.connection import Connection
|
||||
from typing import Callable, Dict, List, Optional, Set, Tuple
|
||||
|
||||
import archspec.cpu
|
||||
@@ -55,6 +54,7 @@
|
||||
from llnl.util.lang import dedupe, stable_partition
|
||||
from llnl.util.symlink import symlink
|
||||
from llnl.util.tty.color import cescape, colorize
|
||||
from llnl.util.tty.log import MultiProcessFd
|
||||
|
||||
import spack.build_systems._checks
|
||||
import spack.build_systems.cmake
|
||||
@@ -1061,8 +1061,8 @@ def set_all_package_py_globals(self):
|
||||
pkg.setup_dependent_package(dependent_module, spec)
|
||||
dependent_module.propagate_changes_to_mro()
|
||||
|
||||
pkg = self.specs[0].package
|
||||
if self.context == Context.BUILD:
|
||||
pkg = self.specs[0].package
|
||||
module = ModuleChangePropagator(pkg)
|
||||
# std_cmake_args is not sufficiently static to be defined
|
||||
# in set_package_py_globals and is deprecated so its handled
|
||||
@@ -1143,10 +1143,10 @@ def _setup_pkg_and_run(
|
||||
serialized_pkg: "spack.subprocess_context.PackageInstallContext",
|
||||
function: Callable,
|
||||
kwargs: Dict,
|
||||
write_pipe: Connection,
|
||||
input_pipe: Optional[Connection],
|
||||
jsfd1: Optional[Connection],
|
||||
jsfd2: Optional[Connection],
|
||||
write_pipe: multiprocessing.connection.Connection,
|
||||
input_multiprocess_fd: Optional[MultiProcessFd],
|
||||
jsfd1: Optional[MultiProcessFd],
|
||||
jsfd2: Optional[MultiProcessFd],
|
||||
):
|
||||
"""Main entry point in the child process for Spack builds.
|
||||
|
||||
@@ -1188,12 +1188,13 @@ def _setup_pkg_and_run(
|
||||
context: str = kwargs.get("context", "build")
|
||||
|
||||
try:
|
||||
# We are in the child process. Python sets sys.stdin to open(os.devnull) to prevent our
|
||||
# process and its parent from simultaneously reading from the original stdin. But, we
|
||||
# assume that the parent process is not going to read from it till we are done with the
|
||||
# child, so we undo Python's precaution. closefd=False since Connection has ownership.
|
||||
if input_pipe is not None:
|
||||
sys.stdin = os.fdopen(input_pipe.fileno(), closefd=False)
|
||||
# We are in the child process. Python sets sys.stdin to
|
||||
# open(os.devnull) to prevent our process and its parent from
|
||||
# simultaneously reading from the original stdin. But, we assume
|
||||
# that the parent process is not going to read from it till we
|
||||
# are done with the child, so we undo Python's precaution.
|
||||
if input_multiprocess_fd is not None:
|
||||
sys.stdin = os.fdopen(input_multiprocess_fd.fd, closefd=False)
|
||||
|
||||
pkg = serialized_pkg.restore()
|
||||
|
||||
@@ -1262,8 +1263,8 @@ def _setup_pkg_and_run(
|
||||
|
||||
finally:
|
||||
write_pipe.close()
|
||||
if input_pipe is not None:
|
||||
input_pipe.close()
|
||||
if input_multiprocess_fd is not None:
|
||||
input_multiprocess_fd.close()
|
||||
|
||||
|
||||
def start_build_process(pkg, function, kwargs):
|
||||
@@ -1290,9 +1291,23 @@ def child_fun():
|
||||
If something goes wrong, the child process catches the error and
|
||||
passes it to the parent wrapped in a ChildError. The parent is
|
||||
expected to handle (or re-raise) the ChildError.
|
||||
|
||||
This uses `multiprocessing.Process` to create the child process. The
|
||||
mechanism used to create the process differs on different operating
|
||||
systems and for different versions of Python. In some cases "fork"
|
||||
is used (i.e. the "fork" system call) and some cases it starts an
|
||||
entirely new Python interpreter process (in the docs this is referred
|
||||
to as the "spawn" start method). Breaking it down by OS:
|
||||
|
||||
- Linux always uses fork.
|
||||
- Mac OS uses fork before Python 3.8 and "spawn" for 3.8 and after.
|
||||
- Windows always uses the "spawn" start method.
|
||||
|
||||
For more information on `multiprocessing` child process creation
|
||||
mechanisms, see https://docs.python.org/3/library/multiprocessing.html#contexts-and-start-methods
|
||||
"""
|
||||
read_pipe, write_pipe = multiprocessing.Pipe(duplex=False)
|
||||
input_fd = None
|
||||
input_multiprocess_fd = None
|
||||
jobserver_fd1 = None
|
||||
jobserver_fd2 = None
|
||||
|
||||
@@ -1301,13 +1316,14 @@ def child_fun():
|
||||
try:
|
||||
# Forward sys.stdin when appropriate, to allow toggling verbosity
|
||||
if sys.platform != "win32" and sys.stdin.isatty() and hasattr(sys.stdin, "fileno"):
|
||||
input_fd = Connection(os.dup(sys.stdin.fileno()))
|
||||
input_fd = os.dup(sys.stdin.fileno())
|
||||
input_multiprocess_fd = MultiProcessFd(input_fd)
|
||||
mflags = os.environ.get("MAKEFLAGS", False)
|
||||
if mflags:
|
||||
m = re.search(r"--jobserver-[^=]*=(\d),(\d)", mflags)
|
||||
if m:
|
||||
jobserver_fd1 = Connection(int(m.group(1)))
|
||||
jobserver_fd2 = Connection(int(m.group(2)))
|
||||
jobserver_fd1 = MultiProcessFd(int(m.group(1)))
|
||||
jobserver_fd2 = MultiProcessFd(int(m.group(2)))
|
||||
|
||||
p = multiprocessing.Process(
|
||||
target=_setup_pkg_and_run,
|
||||
@@ -1316,7 +1332,7 @@ def child_fun():
|
||||
function,
|
||||
kwargs,
|
||||
write_pipe,
|
||||
input_fd,
|
||||
input_multiprocess_fd,
|
||||
jobserver_fd1,
|
||||
jobserver_fd2,
|
||||
),
|
||||
@@ -1336,8 +1352,8 @@ def child_fun():
|
||||
|
||||
finally:
|
||||
# Close the input stream in the parent process
|
||||
if input_fd is not None:
|
||||
input_fd.close()
|
||||
if input_multiprocess_fd is not None:
|
||||
input_multiprocess_fd.close()
|
||||
|
||||
def exitcode_msg(p):
|
||||
typ = "exit" if p.exitcode >= 0 else "signal"
|
||||
|
||||
@@ -12,7 +12,6 @@
|
||||
|
||||
import spack.error
|
||||
import spack.multimethod
|
||||
import spack.repo
|
||||
|
||||
#: Builder classes, as registered by the "builder" decorator
|
||||
BUILDER_CLS = {}
|
||||
@@ -75,14 +74,6 @@ def __call__(self, spec, prefix):
|
||||
return self.phase_fn(self.builder.pkg, spec, prefix)
|
||||
|
||||
|
||||
def get_builder_class(pkg, name: str) -> Optional[type]:
|
||||
"""Return the builder class if a package module defines it."""
|
||||
cls = getattr(pkg.module, name, None)
|
||||
if cls and cls.__module__.startswith(spack.repo.ROOT_PYTHON_NAMESPACE):
|
||||
return cls
|
||||
return None
|
||||
|
||||
|
||||
def _create(pkg):
|
||||
"""Return a new builder object for the package object being passed as argument.
|
||||
|
||||
@@ -108,10 +99,9 @@ class hierarchy (look at AspellDictPackage for an example of that)
|
||||
package_buildsystem = buildsystem_name(pkg)
|
||||
default_builder_cls = BUILDER_CLS[package_buildsystem]
|
||||
builder_cls_name = default_builder_cls.__name__
|
||||
builder_class = get_builder_class(pkg, builder_cls_name)
|
||||
|
||||
if builder_class:
|
||||
return builder_class(pkg)
|
||||
builder_cls = getattr(pkg.module, builder_cls_name, None)
|
||||
if builder_cls:
|
||||
return builder_cls(pkg)
|
||||
|
||||
# Specialized version of a given buildsystem can subclass some
|
||||
# base classes and specialize certain phases or methods or attributes.
|
||||
|
||||
@@ -5,6 +5,7 @@
|
||||
|
||||
"""Caches used by Spack to store data"""
|
||||
import os
|
||||
from typing import Union
|
||||
|
||||
import llnl.util.lang
|
||||
from llnl.util.filesystem import mkdirp
|
||||
@@ -31,8 +32,12 @@ def _misc_cache():
|
||||
return spack.util.file_cache.FileCache(path)
|
||||
|
||||
|
||||
FileCacheType = Union[spack.util.file_cache.FileCache, llnl.util.lang.Singleton]
|
||||
|
||||
#: Spack's cache for small data
|
||||
MISC_CACHE: spack.util.file_cache.FileCache = llnl.util.lang.Singleton(_misc_cache) # type: ignore
|
||||
MISC_CACHE: Union[spack.util.file_cache.FileCache, llnl.util.lang.Singleton] = (
|
||||
llnl.util.lang.Singleton(_misc_cache)
|
||||
)
|
||||
|
||||
|
||||
def fetch_cache_location():
|
||||
@@ -69,4 +74,6 @@ def store(self, fetcher, relative_dest):
|
||||
|
||||
|
||||
#: Spack's local cache for downloaded source archives
|
||||
FETCH_CACHE: spack.fetch_strategy.FsCache = llnl.util.lang.Singleton(_fetch_cache) # type: ignore
|
||||
FETCH_CACHE: Union[spack.fetch_strategy.FsCache, llnl.util.lang.Singleton] = (
|
||||
llnl.util.lang.Singleton(_fetch_cache)
|
||||
)
|
||||
|
||||
@@ -34,7 +34,7 @@
|
||||
import spack.binary_distribution as bindist
|
||||
import spack.concretize
|
||||
import spack.config as cfg
|
||||
import spack.error
|
||||
import spack.environment as ev
|
||||
import spack.main
|
||||
import spack.mirror
|
||||
import spack.paths
|
||||
@@ -95,6 +95,8 @@ def dispatch_open(fullurl, data=None, timeout=None, verify_ssl=True):
|
||||
|
||||
TEMP_STORAGE_MIRROR_NAME = "ci_temporary_mirror"
|
||||
SPACK_RESERVED_TAGS = ["public", "protected", "notary"]
|
||||
# TODO: Remove this in Spack 0.23
|
||||
SHARED_PR_MIRROR_URL = "s3://spack-binaries-prs/shared_pr_mirror"
|
||||
JOB_NAME_FORMAT = (
|
||||
"{name}{@version} {/hash:7} {%compiler.name}{@compiler.version}{ arch=architecture}"
|
||||
)
|
||||
@@ -199,11 +201,11 @@ def _remove_satisfied_deps(deps, satisfied_list):
|
||||
return nodes, edges, stages
|
||||
|
||||
|
||||
def _print_staging_summary(spec_labels, stages, rebuild_decisions):
|
||||
def _print_staging_summary(spec_labels, stages, mirrors_to_check, rebuild_decisions):
|
||||
if not stages:
|
||||
return
|
||||
|
||||
mirrors = spack.mirror.MirrorCollection(binary=True)
|
||||
mirrors = spack.mirror.MirrorCollection(mirrors=mirrors_to_check, binary=True)
|
||||
tty.msg("Checked the following mirrors for binaries:")
|
||||
for m in mirrors.values():
|
||||
tty.msg(f" {m.fetch_url}")
|
||||
@@ -250,14 +252,21 @@ def _spec_matches(spec, match_string):
|
||||
return spec.intersects(match_string)
|
||||
|
||||
|
||||
def _format_job_needs(dep_jobs, build_group, prune_dag, rebuild_decisions):
|
||||
def _format_job_needs(
|
||||
dep_jobs, build_group, prune_dag, rebuild_decisions, enable_artifacts_buildcache
|
||||
):
|
||||
needs_list = []
|
||||
for dep_job in dep_jobs:
|
||||
dep_spec_key = _spec_ci_label(dep_job)
|
||||
rebuild = rebuild_decisions[dep_spec_key].rebuild
|
||||
|
||||
if not prune_dag or rebuild:
|
||||
needs_list.append({"job": get_job_name(dep_job, build_group), "artifacts": False})
|
||||
needs_list.append(
|
||||
{
|
||||
"job": get_job_name(dep_job, build_group),
|
||||
"artifacts": enable_artifacts_buildcache,
|
||||
}
|
||||
)
|
||||
return needs_list
|
||||
|
||||
|
||||
@@ -401,6 +410,12 @@ def __init__(self, ci_config, spec_labels, stages):
|
||||
|
||||
self.ir = {
|
||||
"jobs": {},
|
||||
"temporary-storage-url-prefix": self.ci_config.get(
|
||||
"temporary-storage-url-prefix", None
|
||||
),
|
||||
"enable-artifacts-buildcache": self.ci_config.get(
|
||||
"enable-artifacts-buildcache", False
|
||||
),
|
||||
"rebuild-index": self.ci_config.get("rebuild-index", True),
|
||||
"broken-specs-url": self.ci_config.get("broken-specs-url", None),
|
||||
"broken-tests-packages": self.ci_config.get("broken-tests-packages", []),
|
||||
@@ -683,13 +698,14 @@ def generate_gitlab_ci_yaml(
|
||||
prune_dag=False,
|
||||
check_index_only=False,
|
||||
artifacts_root=None,
|
||||
remote_mirror_override=None,
|
||||
):
|
||||
"""Generate a gitlab yaml file to run a dynamic child pipeline from
|
||||
the spec matrix in the active environment.
|
||||
|
||||
Arguments:
|
||||
env (spack.environment.Environment): Activated environment object
|
||||
which must contain a ci section describing how to map
|
||||
which must contain a gitlab-ci section describing how to map
|
||||
specs to runners
|
||||
print_summary (bool): Should we print a summary of all the jobs in
|
||||
the stages in which they were placed.
|
||||
@@ -704,21 +720,39 @@ def generate_gitlab_ci_yaml(
|
||||
artifacts_root (str): Path where artifacts like logs, environment
|
||||
files (spack.yaml, spack.lock), etc should be written. GitLab
|
||||
requires this to be within the project directory.
|
||||
remote_mirror_override (str): Typically only needed when one spack.yaml
|
||||
is used to populate several mirrors with binaries, based on some
|
||||
criteria. Spack protected pipelines populate different mirrors based
|
||||
on branch name, facilitated by this option. DEPRECATED
|
||||
"""
|
||||
with spack.concretize.disable_compiler_existence_check():
|
||||
with env.write_transaction():
|
||||
env.concretize()
|
||||
env.write()
|
||||
|
||||
yaml_root = env.manifest[ev.TOP_LEVEL_KEY]
|
||||
|
||||
# Get the joined "ci" config with all of the current scopes resolved
|
||||
ci_config = cfg.get("ci")
|
||||
|
||||
config_deprecated = False
|
||||
if not ci_config:
|
||||
raise SpackCIError("Environment does not have a `ci` configuration")
|
||||
tty.warn("Environment does not have `ci` a configuration")
|
||||
gitlabci_config = yaml_root.get("gitlab-ci")
|
||||
if not gitlabci_config:
|
||||
tty.die("Environment yaml does not have `gitlab-ci` config section. Cannot recover.")
|
||||
|
||||
tty.warn(
|
||||
"The `gitlab-ci` configuration is deprecated in favor of `ci`.\n",
|
||||
"To update run \n\t$ spack env update /path/to/ci/spack.yaml",
|
||||
)
|
||||
translate_deprecated_config(gitlabci_config)
|
||||
ci_config = gitlabci_config
|
||||
config_deprecated = True
|
||||
|
||||
# Default target is gitlab...and only target is gitlab
|
||||
if not ci_config.get("target", "gitlab") == "gitlab":
|
||||
raise SpackCIError('Spack CI module only generates target "gitlab"')
|
||||
tty.die('Spack CI module only generates target "gitlab"')
|
||||
|
||||
cdash_config = cfg.get("cdash")
|
||||
cdash_handler = CDashHandler(cdash_config) if "build-group" in cdash_config else None
|
||||
@@ -779,6 +813,12 @@ def generate_gitlab_ci_yaml(
|
||||
spack_pipeline_type = os.environ.get("SPACK_PIPELINE_TYPE", None)
|
||||
|
||||
copy_only_pipeline = spack_pipeline_type == "spack_copy_only"
|
||||
if copy_only_pipeline and config_deprecated:
|
||||
tty.warn(
|
||||
"SPACK_PIPELINE_TYPE=spack_copy_only is not supported when using\n",
|
||||
"deprecated ci configuration, a no-op pipeline will be generated\n",
|
||||
"instead.",
|
||||
)
|
||||
|
||||
def ensure_expected_target_path(path):
|
||||
"""Returns passed paths with all Windows path separators exchanged
|
||||
@@ -797,16 +837,38 @@ def ensure_expected_target_path(path):
|
||||
return path
|
||||
|
||||
pipeline_mirrors = spack.mirror.MirrorCollection(binary=True)
|
||||
deprecated_mirror_config = False
|
||||
buildcache_destination = None
|
||||
if "buildcache-destination" not in pipeline_mirrors:
|
||||
raise SpackCIError("spack ci generate requires a mirror named 'buildcache-destination'")
|
||||
if "buildcache-destination" in pipeline_mirrors:
|
||||
if remote_mirror_override:
|
||||
tty.die(
|
||||
"Using the deprecated --buildcache-destination cli option and "
|
||||
"having a mirror named 'buildcache-destination' at the same time "
|
||||
"is not allowed"
|
||||
)
|
||||
buildcache_destination = pipeline_mirrors["buildcache-destination"]
|
||||
else:
|
||||
deprecated_mirror_config = True
|
||||
# TODO: This will be an error in Spack 0.23
|
||||
|
||||
buildcache_destination = pipeline_mirrors["buildcache-destination"]
|
||||
# TODO: Remove this block in spack 0.23
|
||||
remote_mirror_url = None
|
||||
if deprecated_mirror_config:
|
||||
if "mirrors" not in yaml_root or len(yaml_root["mirrors"].values()) < 1:
|
||||
tty.die("spack ci generate requires an env containing a mirror")
|
||||
|
||||
ci_mirrors = yaml_root["mirrors"]
|
||||
mirror_urls = [url for url in ci_mirrors.values()]
|
||||
remote_mirror_url = mirror_urls[0]
|
||||
|
||||
spack_buildcache_copy = os.environ.get("SPACK_COPY_BUILDCACHE", None)
|
||||
if spack_buildcache_copy:
|
||||
buildcache_copies = {}
|
||||
buildcache_copy_src_prefix = buildcache_destination.fetch_url
|
||||
buildcache_copy_src_prefix = (
|
||||
buildcache_destination.fetch_url
|
||||
if buildcache_destination
|
||||
else remote_mirror_override or remote_mirror_url
|
||||
)
|
||||
buildcache_copy_dest_prefix = spack_buildcache_copy
|
||||
|
||||
# Check for a list of "known broken" specs that we should not bother
|
||||
@@ -816,10 +878,55 @@ def ensure_expected_target_path(path):
|
||||
if "broken-specs-url" in ci_config:
|
||||
broken_specs_url = ci_config["broken-specs-url"]
|
||||
|
||||
enable_artifacts_buildcache = False
|
||||
if "enable-artifacts-buildcache" in ci_config:
|
||||
tty.warn("Support for enable-artifacts-buildcache will be removed in Spack 0.23")
|
||||
enable_artifacts_buildcache = ci_config["enable-artifacts-buildcache"]
|
||||
|
||||
rebuild_index_enabled = True
|
||||
if "rebuild-index" in ci_config and ci_config["rebuild-index"] is False:
|
||||
rebuild_index_enabled = False
|
||||
|
||||
temp_storage_url_prefix = None
|
||||
if "temporary-storage-url-prefix" in ci_config:
|
||||
tty.warn("Support for temporary-storage-url-prefix will be removed in Spack 0.23")
|
||||
temp_storage_url_prefix = ci_config["temporary-storage-url-prefix"]
|
||||
|
||||
# If a remote mirror override (alternate buildcache destination) was
|
||||
# specified, add it here in case it has already built hashes we might
|
||||
# generate.
|
||||
# TODO: Remove this block in Spack 0.23
|
||||
mirrors_to_check = None
|
||||
if deprecated_mirror_config and remote_mirror_override:
|
||||
if spack_pipeline_type == "spack_protected_branch":
|
||||
# Overriding the main mirror in this case might result
|
||||
# in skipping jobs on a release pipeline because specs are
|
||||
# up to date in develop. Eventually we want to notice and take
|
||||
# advantage of this by scheduling a job to copy the spec from
|
||||
# develop to the release, but until we have that, this makes
|
||||
# sure we schedule a rebuild job if the spec isn't already in
|
||||
# override mirror.
|
||||
mirrors_to_check = {"override": remote_mirror_override}
|
||||
|
||||
# If we have a remote override and we want generate pipeline using
|
||||
# --check-index-only, then the override mirror needs to be added to
|
||||
# the configured mirrors when bindist.update() is run, or else we
|
||||
# won't fetch its index and include in our local cache.
|
||||
spack.mirror.add(
|
||||
spack.mirror.Mirror(remote_mirror_override, name="ci_pr_mirror"),
|
||||
cfg.default_modify_scope(),
|
||||
)
|
||||
|
||||
# TODO: Remove this block in Spack 0.23
|
||||
shared_pr_mirror = None
|
||||
if deprecated_mirror_config and spack_pipeline_type == "spack_pull_request":
|
||||
stack_name = os.environ.get("SPACK_CI_STACK_NAME", "")
|
||||
shared_pr_mirror = url_util.join(SHARED_PR_MIRROR_URL, stack_name)
|
||||
spack.mirror.add(
|
||||
spack.mirror.Mirror(shared_pr_mirror, name="ci_shared_pr_mirror"),
|
||||
cfg.default_modify_scope(),
|
||||
)
|
||||
|
||||
pipeline_artifacts_dir = artifacts_root
|
||||
if not pipeline_artifacts_dir:
|
||||
proj_dir = os.environ.get("CI_PROJECT_DIR", os.getcwd())
|
||||
@@ -828,8 +935,9 @@ def ensure_expected_target_path(path):
|
||||
pipeline_artifacts_dir = os.path.abspath(pipeline_artifacts_dir)
|
||||
concrete_env_dir = os.path.join(pipeline_artifacts_dir, "concrete_environment")
|
||||
|
||||
# Copy the environment manifest file into the concrete environment directory,
|
||||
# along with the spack.lock file.
|
||||
# Now that we've added the mirrors we know about, they should be properly
|
||||
# reflected in the environment manifest file, so copy that into the
|
||||
# concrete environment directory, along with the spack.lock file.
|
||||
if not os.path.exists(concrete_env_dir):
|
||||
os.makedirs(concrete_env_dir)
|
||||
shutil.copyfile(env.manifest_path, os.path.join(concrete_env_dir, "spack.yaml"))
|
||||
@@ -854,12 +962,18 @@ def ensure_expected_target_path(path):
|
||||
env_includes.extend(include_scopes)
|
||||
env_yaml_root["spack"]["include"] = [ensure_expected_target_path(i) for i in env_includes]
|
||||
|
||||
if "gitlab-ci" in env_yaml_root["spack"] and "ci" not in env_yaml_root["spack"]:
|
||||
env_yaml_root["spack"]["ci"] = env_yaml_root["spack"].pop("gitlab-ci")
|
||||
translate_deprecated_config(env_yaml_root["spack"]["ci"])
|
||||
|
||||
with open(os.path.join(concrete_env_dir, "spack.yaml"), "w") as fd:
|
||||
fd.write(syaml.dump_config(env_yaml_root, default_flow_style=False))
|
||||
|
||||
job_log_dir = os.path.join(pipeline_artifacts_dir, "logs")
|
||||
job_repro_dir = os.path.join(pipeline_artifacts_dir, "reproduction")
|
||||
job_test_dir = os.path.join(pipeline_artifacts_dir, "tests")
|
||||
# TODO: Remove this line in Spack 0.23
|
||||
local_mirror_dir = os.path.join(pipeline_artifacts_dir, "mirror")
|
||||
user_artifacts_dir = os.path.join(pipeline_artifacts_dir, "user_data")
|
||||
|
||||
# We communicate relative paths to the downstream jobs to avoid issues in
|
||||
@@ -873,6 +987,8 @@ def ensure_expected_target_path(path):
|
||||
rel_job_log_dir = os.path.relpath(job_log_dir, ci_project_dir)
|
||||
rel_job_repro_dir = os.path.relpath(job_repro_dir, ci_project_dir)
|
||||
rel_job_test_dir = os.path.relpath(job_test_dir, ci_project_dir)
|
||||
# TODO: Remove this line in Spack 0.23
|
||||
rel_local_mirror_dir = os.path.join(local_mirror_dir, ci_project_dir)
|
||||
rel_user_artifacts_dir = os.path.relpath(user_artifacts_dir, ci_project_dir)
|
||||
|
||||
# Speed up staging by first fetching binary indices from all mirrors
|
||||
@@ -934,7 +1050,7 @@ def ensure_expected_target_path(path):
|
||||
continue
|
||||
|
||||
up_to_date_mirrors = bindist.get_mirrors_for_spec(
|
||||
spec=release_spec, index_only=check_index_only
|
||||
spec=release_spec, mirrors_to_check=mirrors_to_check, index_only=check_index_only
|
||||
)
|
||||
|
||||
spec_record.rebuild = not up_to_date_mirrors
|
||||
@@ -978,14 +1094,25 @@ def main_script_replacements(cmd):
|
||||
|
||||
job_object["needs"] = []
|
||||
if spec_label in dependencies:
|
||||
# In this case, "needs" is only used for scheduling
|
||||
# purposes, so we only get the direct dependencies.
|
||||
dep_jobs = []
|
||||
for dep_label in dependencies[spec_label]:
|
||||
dep_jobs.append(spec_labels[dep_label])
|
||||
if enable_artifacts_buildcache:
|
||||
# Get dependencies transitively, so they're all
|
||||
# available in the artifacts buildcache.
|
||||
dep_jobs = [d for d in release_spec.traverse(deptype="all", root=False)]
|
||||
else:
|
||||
# In this case, "needs" is only used for scheduling
|
||||
# purposes, so we only get the direct dependencies.
|
||||
dep_jobs = []
|
||||
for dep_label in dependencies[spec_label]:
|
||||
dep_jobs.append(spec_labels[dep_label])
|
||||
|
||||
job_object["needs"].extend(
|
||||
_format_job_needs(dep_jobs, build_group, prune_dag, rebuild_decisions)
|
||||
_format_job_needs(
|
||||
dep_jobs,
|
||||
build_group,
|
||||
prune_dag,
|
||||
rebuild_decisions,
|
||||
enable_artifacts_buildcache,
|
||||
)
|
||||
)
|
||||
|
||||
rebuild_spec = spec_record.rebuild
|
||||
@@ -1067,6 +1194,19 @@ def main_script_replacements(cmd):
|
||||
},
|
||||
)
|
||||
|
||||
# TODO: Remove this block in Spack 0.23
|
||||
if enable_artifacts_buildcache:
|
||||
bc_root = os.path.join(local_mirror_dir, "build_cache")
|
||||
job_object["artifacts"]["paths"].extend(
|
||||
[
|
||||
os.path.join(bc_root, p)
|
||||
for p in [
|
||||
bindist.tarball_name(release_spec, ".spec.json"),
|
||||
bindist.tarball_directory_name(release_spec),
|
||||
]
|
||||
]
|
||||
)
|
||||
|
||||
job_object["stage"] = stage_name
|
||||
job_object["retry"] = {"max": 2, "when": JOB_RETRY_CONDITIONS}
|
||||
job_object["interruptible"] = True
|
||||
@@ -1081,7 +1221,15 @@ def main_script_replacements(cmd):
|
||||
job_id += 1
|
||||
|
||||
if print_summary:
|
||||
_print_staging_summary(spec_labels, stages, rebuild_decisions)
|
||||
_print_staging_summary(spec_labels, stages, mirrors_to_check, rebuild_decisions)
|
||||
|
||||
# Clean up remote mirror override if enabled
|
||||
# TODO: Remove this block in Spack 0.23
|
||||
if deprecated_mirror_config:
|
||||
if remote_mirror_override:
|
||||
spack.mirror.remove("ci_pr_mirror", cfg.default_modify_scope())
|
||||
if spack_pipeline_type == "spack_pull_request":
|
||||
spack.mirror.remove("ci_shared_pr_mirror", cfg.default_modify_scope())
|
||||
|
||||
tty.debug(f"{job_id} build jobs generated in {stage_id} stages")
|
||||
|
||||
@@ -1103,7 +1251,7 @@ def main_script_replacements(cmd):
|
||||
"when": ["runner_system_failure", "stuck_or_timeout_failure", "script_failure"],
|
||||
}
|
||||
|
||||
if copy_only_pipeline:
|
||||
if copy_only_pipeline and not config_deprecated:
|
||||
stage_names.append("copy")
|
||||
sync_job = copy.deepcopy(spack_ci_ir["jobs"]["copy"]["attributes"])
|
||||
sync_job["stage"] = "copy"
|
||||
@@ -1113,12 +1261,17 @@ def main_script_replacements(cmd):
|
||||
if "variables" not in sync_job:
|
||||
sync_job["variables"] = {}
|
||||
|
||||
sync_job["variables"]["SPACK_COPY_ONLY_DESTINATION"] = buildcache_destination.fetch_url
|
||||
sync_job["variables"]["SPACK_COPY_ONLY_DESTINATION"] = (
|
||||
buildcache_destination.fetch_url
|
||||
if buildcache_destination
|
||||
else remote_mirror_override or remote_mirror_url
|
||||
)
|
||||
|
||||
if "buildcache-source" not in pipeline_mirrors:
|
||||
raise SpackCIError("Copy-only pipelines require a mirror named 'buildcache-source'")
|
||||
|
||||
buildcache_source = pipeline_mirrors["buildcache-source"].fetch_url
|
||||
if "buildcache-source" in pipeline_mirrors:
|
||||
buildcache_source = pipeline_mirrors["buildcache-source"].fetch_url
|
||||
else:
|
||||
# TODO: Remove this condition in Spack 0.23
|
||||
buildcache_source = os.environ.get("SPACK_SOURCE_MIRROR", None)
|
||||
sync_job["variables"]["SPACK_BUILDCACHE_SOURCE"] = buildcache_source
|
||||
sync_job["dependencies"] = []
|
||||
|
||||
@@ -1126,6 +1279,27 @@ def main_script_replacements(cmd):
|
||||
job_id += 1
|
||||
|
||||
if job_id > 0:
|
||||
# TODO: Remove this block in Spack 0.23
|
||||
if temp_storage_url_prefix:
|
||||
# There were some rebuild jobs scheduled, so we will need to
|
||||
# schedule a job to clean up the temporary storage location
|
||||
# associated with this pipeline.
|
||||
stage_names.append("cleanup-temp-storage")
|
||||
cleanup_job = copy.deepcopy(spack_ci_ir["jobs"]["cleanup"]["attributes"])
|
||||
|
||||
cleanup_job["stage"] = "cleanup-temp-storage"
|
||||
cleanup_job["when"] = "always"
|
||||
cleanup_job["retry"] = service_job_retries
|
||||
cleanup_job["interruptible"] = True
|
||||
|
||||
cleanup_job["script"] = _unpack_script(
|
||||
cleanup_job["script"],
|
||||
op=lambda cmd: cmd.replace("mirror_prefix", temp_storage_url_prefix),
|
||||
)
|
||||
|
||||
cleanup_job["dependencies"] = []
|
||||
output_object["cleanup"] = cleanup_job
|
||||
|
||||
if (
|
||||
"script" in spack_ci_ir["jobs"]["signing"]["attributes"]
|
||||
and spack_pipeline_type == "spack_protected_branch"
|
||||
@@ -1142,9 +1316,11 @@ def main_script_replacements(cmd):
|
||||
signing_job["interruptible"] = True
|
||||
if "variables" not in signing_job:
|
||||
signing_job["variables"] = {}
|
||||
signing_job["variables"][
|
||||
"SPACK_BUILDCACHE_DESTINATION"
|
||||
] = buildcache_destination.push_url
|
||||
signing_job["variables"]["SPACK_BUILDCACHE_DESTINATION"] = (
|
||||
buildcache_destination.push_url # need the s3 url for aws s3 sync
|
||||
if buildcache_destination
|
||||
else remote_mirror_override or remote_mirror_url
|
||||
)
|
||||
signing_job["dependencies"] = []
|
||||
|
||||
output_object["sign-pkgs"] = signing_job
|
||||
@@ -1155,7 +1331,9 @@ def main_script_replacements(cmd):
|
||||
final_job = spack_ci_ir["jobs"]["reindex"]["attributes"]
|
||||
|
||||
final_job["stage"] = "stage-rebuild-index"
|
||||
target_mirror = buildcache_destination.push_url
|
||||
target_mirror = remote_mirror_override or remote_mirror_url
|
||||
if buildcache_destination:
|
||||
target_mirror = buildcache_destination.push_url
|
||||
final_job["script"] = _unpack_script(
|
||||
final_job["script"],
|
||||
op=lambda cmd: cmd.replace("{index_target_mirror}", target_mirror),
|
||||
@@ -1181,11 +1359,17 @@ def main_script_replacements(cmd):
|
||||
"SPACK_CONCRETE_ENV_DIR": rel_concrete_env_dir,
|
||||
"SPACK_VERSION": spack_version,
|
||||
"SPACK_CHECKOUT_VERSION": version_to_clone,
|
||||
# TODO: Remove this line in Spack 0.23
|
||||
"SPACK_REMOTE_MIRROR_URL": remote_mirror_url,
|
||||
"SPACK_JOB_LOG_DIR": rel_job_log_dir,
|
||||
"SPACK_JOB_REPRO_DIR": rel_job_repro_dir,
|
||||
"SPACK_JOB_TEST_DIR": rel_job_test_dir,
|
||||
# TODO: Remove this line in Spack 0.23
|
||||
"SPACK_LOCAL_MIRROR_DIR": rel_local_mirror_dir,
|
||||
"SPACK_PIPELINE_TYPE": str(spack_pipeline_type),
|
||||
"SPACK_CI_STACK_NAME": os.environ.get("SPACK_CI_STACK_NAME", "None"),
|
||||
# TODO: Remove this line in Spack 0.23
|
||||
"SPACK_CI_SHARED_PR_MIRROR_URL": shared_pr_mirror or "None",
|
||||
"SPACK_REBUILD_CHECK_UP_TO_DATE": str(prune_dag),
|
||||
"SPACK_REBUILD_EVERYTHING": str(rebuild_everything),
|
||||
"SPACK_REQUIRE_SIGNING": os.environ.get("SPACK_REQUIRE_SIGNING", "False"),
|
||||
@@ -1194,6 +1378,10 @@ def main_script_replacements(cmd):
|
||||
for item, val in output_vars.items():
|
||||
output_vars[item] = ensure_expected_target_path(val)
|
||||
|
||||
# TODO: Remove this block in Spack 0.23
|
||||
if deprecated_mirror_config and remote_mirror_override:
|
||||
(output_object["variables"]["SPACK_REMOTE_MIRROR_OVERRIDE"]) = remote_mirror_override
|
||||
|
||||
spack_stack_name = os.environ.get("SPACK_CI_STACK_NAME", None)
|
||||
if spack_stack_name:
|
||||
output_object["variables"]["SPACK_CI_STACK_NAME"] = spack_stack_name
|
||||
@@ -1220,8 +1408,15 @@ def main_script_replacements(cmd):
|
||||
noop_job["retry"] = 0
|
||||
noop_job["allow_failure"] = True
|
||||
|
||||
tty.debug("No specs to rebuild, generating no-op job")
|
||||
output_object = {"no-specs-to-rebuild": noop_job}
|
||||
if copy_only_pipeline and config_deprecated:
|
||||
tty.debug("Generating no-op job as copy-only is unsupported here.")
|
||||
noop_job["script"] = [
|
||||
'echo "copy-only pipelines are not supported with deprecated ci configs"'
|
||||
]
|
||||
output_object = {"unsupported-copy": noop_job}
|
||||
else:
|
||||
tty.debug("No specs to rebuild, generating no-op job")
|
||||
output_object = {"no-specs-to-rebuild": noop_job}
|
||||
|
||||
# Ensure the child pipeline always runs
|
||||
output_object["workflow"] = {"rules": [{"when": "always"}]}
|
||||
@@ -2259,6 +2454,83 @@ def report_skipped(self, spec: spack.spec.Spec, report_dir: str, reason: Optiona
|
||||
reporter.test_skipped_report(report_dir, spec, reason)
|
||||
|
||||
|
||||
class SpackCIError(spack.error.SpackError):
|
||||
def __init__(self, msg):
|
||||
super().__init__(msg)
|
||||
def translate_deprecated_config(config):
|
||||
# Remove all deprecated keys from config
|
||||
mappings = config.pop("mappings", [])
|
||||
match_behavior = config.pop("match_behavior", "first")
|
||||
|
||||
build_job = {}
|
||||
if "image" in config:
|
||||
build_job["image"] = config.pop("image")
|
||||
if "tags" in config:
|
||||
build_job["tags"] = config.pop("tags")
|
||||
if "variables" in config:
|
||||
build_job["variables"] = config.pop("variables")
|
||||
|
||||
# Scripts always override in old CI
|
||||
if "before_script" in config:
|
||||
build_job["before_script:"] = config.pop("before_script")
|
||||
if "script" in config:
|
||||
build_job["script:"] = config.pop("script")
|
||||
if "after_script" in config:
|
||||
build_job["after_script:"] = config.pop("after_script")
|
||||
|
||||
signing_job = None
|
||||
if "signing-job-attributes" in config:
|
||||
signing_job = {"signing-job": config.pop("signing-job-attributes")}
|
||||
|
||||
service_job_attributes = None
|
||||
if "service-job-attributes" in config:
|
||||
service_job_attributes = config.pop("service-job-attributes")
|
||||
|
||||
# If this config already has pipeline-gen do not more
|
||||
if "pipeline-gen" in config:
|
||||
return True if mappings or build_job or signing_job or service_job_attributes else False
|
||||
|
||||
config["target"] = "gitlab"
|
||||
|
||||
config["pipeline-gen"] = []
|
||||
pipeline_gen = config["pipeline-gen"]
|
||||
|
||||
# Build Job
|
||||
submapping = []
|
||||
for section in mappings:
|
||||
submapping_section = {"match": section["match"]}
|
||||
if "runner-attributes" in section:
|
||||
remapped_attributes = {}
|
||||
if match_behavior == "first":
|
||||
for key, value in section["runner-attributes"].items():
|
||||
# Scripts always override in old CI
|
||||
if key == "script":
|
||||
remapped_attributes["script:"] = value
|
||||
elif key == "before_script":
|
||||
remapped_attributes["before_script:"] = value
|
||||
elif key == "after_script":
|
||||
remapped_attributes["after_script:"] = value
|
||||
else:
|
||||
remapped_attributes[key] = value
|
||||
else:
|
||||
# Handle "merge" behavior be allowing scripts to merge in submapping section
|
||||
remapped_attributes = section["runner-attributes"]
|
||||
submapping_section["build-job"] = remapped_attributes
|
||||
|
||||
if "remove-attributes" in section:
|
||||
# Old format only allowed tags in this section, so no extra checks are needed
|
||||
submapping_section["build-job-remove"] = section["remove-attributes"]
|
||||
submapping.append(submapping_section)
|
||||
pipeline_gen.append({"submapping": submapping, "match_behavior": match_behavior})
|
||||
|
||||
if build_job:
|
||||
pipeline_gen.append({"build-job": build_job})
|
||||
|
||||
# Signing Job
|
||||
if signing_job:
|
||||
pipeline_gen.append(signing_job)
|
||||
|
||||
# Service Jobs
|
||||
if service_job_attributes:
|
||||
pipeline_gen.append({"reindex-job": service_job_attributes})
|
||||
pipeline_gen.append({"noop-job": service_job_attributes})
|
||||
pipeline_gen.append({"cleanup-job": service_job_attributes})
|
||||
|
||||
return True
|
||||
|
||||
@@ -17,7 +17,6 @@
|
||||
from llnl.util.tty.colify import colify
|
||||
from llnl.util.tty.color import colorize
|
||||
|
||||
import spack.concretize
|
||||
import spack.config # breaks a cycle.
|
||||
import spack.environment as ev
|
||||
import spack.error
|
||||
@@ -174,29 +173,10 @@ def parse_specs(
|
||||
arg_string = " ".join([quote_kvp(arg) for arg in args])
|
||||
|
||||
specs = spack.parser.parse(arg_string)
|
||||
if not concretize:
|
||||
return specs
|
||||
|
||||
to_concretize = [(s, None) for s in specs]
|
||||
return _concretize_spec_pairs(to_concretize, tests=tests)
|
||||
|
||||
|
||||
def _concretize_spec_pairs(to_concretize, tests=False):
|
||||
"""Helper method that concretizes abstract specs from a list of abstract,concrete pairs.
|
||||
|
||||
Any spec with a concrete spec associated with it will concretize to that spec. Any spec
|
||||
with ``None`` for its concrete spec will be newly concretized. This method respects unification
|
||||
rules from config."""
|
||||
unify = spack.config.get("concretizer:unify", False)
|
||||
|
||||
concretize_method = spack.concretize.concretize_separately # unify: false
|
||||
if unify is True:
|
||||
concretize_method = spack.concretize.concretize_together
|
||||
elif unify == "when_possible":
|
||||
concretize_method = spack.concretize.concretize_together_when_possible
|
||||
|
||||
concretized = concretize_method(to_concretize, tests=tests)
|
||||
return [concrete for _, concrete in concretized]
|
||||
for spec in specs:
|
||||
if concretize:
|
||||
spec.concretize(tests=tests)
|
||||
return specs
|
||||
|
||||
|
||||
def matching_spec_from_env(spec):
|
||||
@@ -212,22 +192,6 @@ def matching_spec_from_env(spec):
|
||||
return spec.concretized()
|
||||
|
||||
|
||||
def matching_specs_from_env(specs):
|
||||
"""
|
||||
Same as ``matching_spec_from_env`` but respects spec unification rules.
|
||||
|
||||
For each spec, if there is a matching spec in the environment it is used. If no
|
||||
matching spec is found, this will return the given spec but concretized in the
|
||||
context of the active environment and other given specs, with unification rules applied.
|
||||
"""
|
||||
env = ev.active_environment()
|
||||
spec_pairs = [(spec, env.matching_spec(spec) if env else None) for spec in specs]
|
||||
additional_concrete_specs = (
|
||||
[(concrete, concrete) for _, concrete in env.concretized_specs()] if env else []
|
||||
)
|
||||
return _concretize_spec_pairs(spec_pairs + additional_concrete_specs)[: len(spec_pairs)]
|
||||
|
||||
|
||||
def disambiguate_spec(spec, env, local=False, installed=True, first=False):
|
||||
"""Given a spec, figure out which installed package it refers to.
|
||||
|
||||
|
||||
@@ -62,6 +62,13 @@ def setup_parser(subparser):
|
||||
"path to the file where generated jobs file should be written. "
|
||||
"default is .gitlab-ci.yml in the root of the repository",
|
||||
)
|
||||
generate.add_argument(
|
||||
"--copy-to",
|
||||
default=None,
|
||||
help="path to additional directory for job files\n\n"
|
||||
"this option provides an absolute path to a directory where the generated "
|
||||
"jobs yaml file should be copied. default is not to copy",
|
||||
)
|
||||
generate.add_argument(
|
||||
"--optimize",
|
||||
action="store_true",
|
||||
@@ -76,6 +83,12 @@ def setup_parser(subparser):
|
||||
default=False,
|
||||
help="(DEPRECATED) disable DAG scheduling (use 'plain' dependencies)",
|
||||
)
|
||||
generate.add_argument(
|
||||
"--buildcache-destination",
|
||||
default=None,
|
||||
help="override the mirror configured in the environment\n\n"
|
||||
"allows for pushing binaries from the generated pipeline to a different location",
|
||||
)
|
||||
prune_group = generate.add_mutually_exclusive_group()
|
||||
prune_group.add_argument(
|
||||
"--prune-dag",
|
||||
@@ -201,10 +214,20 @@ def ci_generate(args):
|
||||
|
||||
env = spack.cmd.require_active_env(cmd_name="ci generate")
|
||||
|
||||
if args.copy_to:
|
||||
tty.warn("The flag --copy-to is deprecated and will be removed in Spack 0.23")
|
||||
|
||||
if args.buildcache_destination:
|
||||
tty.warn(
|
||||
"The flag --buildcache-destination is deprecated and will be removed in Spack 0.23"
|
||||
)
|
||||
|
||||
output_file = args.output_file
|
||||
copy_yaml_to = args.copy_to
|
||||
prune_dag = args.prune_dag
|
||||
index_only = args.index_only
|
||||
artifacts_root = args.artifacts_root
|
||||
buildcache_destination = args.buildcache_destination
|
||||
|
||||
if not output_file:
|
||||
output_file = os.path.abspath(".gitlab-ci.yml")
|
||||
@@ -222,8 +245,15 @@ def ci_generate(args):
|
||||
prune_dag=prune_dag,
|
||||
check_index_only=index_only,
|
||||
artifacts_root=artifacts_root,
|
||||
remote_mirror_override=buildcache_destination,
|
||||
)
|
||||
|
||||
if copy_yaml_to:
|
||||
copy_to_dir = os.path.dirname(copy_yaml_to)
|
||||
if not os.path.exists(copy_to_dir):
|
||||
os.makedirs(copy_to_dir)
|
||||
shutil.copyfile(output_file, copy_yaml_to)
|
||||
|
||||
|
||||
def ci_reindex(args):
|
||||
"""rebuild the buildcache index for the remote mirror
|
||||
@@ -268,13 +298,22 @@ def ci_rebuild(args):
|
||||
job_log_dir = os.environ.get("SPACK_JOB_LOG_DIR")
|
||||
job_test_dir = os.environ.get("SPACK_JOB_TEST_DIR")
|
||||
repro_dir = os.environ.get("SPACK_JOB_REPRO_DIR")
|
||||
# TODO: Remove this in Spack 0.23
|
||||
local_mirror_dir = os.environ.get("SPACK_LOCAL_MIRROR_DIR")
|
||||
concrete_env_dir = os.environ.get("SPACK_CONCRETE_ENV_DIR")
|
||||
ci_pipeline_id = os.environ.get("CI_PIPELINE_ID")
|
||||
ci_job_name = os.environ.get("CI_JOB_NAME")
|
||||
signing_key = os.environ.get("SPACK_SIGNING_KEY")
|
||||
job_spec_pkg_name = os.environ.get("SPACK_JOB_SPEC_PKG_NAME")
|
||||
job_spec_dag_hash = os.environ.get("SPACK_JOB_SPEC_DAG_HASH")
|
||||
spack_pipeline_type = os.environ.get("SPACK_PIPELINE_TYPE")
|
||||
# TODO: Remove this in Spack 0.23
|
||||
remote_mirror_override = os.environ.get("SPACK_REMOTE_MIRROR_OVERRIDE")
|
||||
# TODO: Remove this in Spack 0.23
|
||||
remote_mirror_url = os.environ.get("SPACK_REMOTE_MIRROR_URL")
|
||||
spack_ci_stack_name = os.environ.get("SPACK_CI_STACK_NAME")
|
||||
# TODO: Remove this in Spack 0.23
|
||||
shared_pr_mirror_url = os.environ.get("SPACK_CI_SHARED_PR_MIRROR_URL")
|
||||
rebuild_everything = os.environ.get("SPACK_REBUILD_EVERYTHING")
|
||||
require_signing = os.environ.get("SPACK_REQUIRE_SIGNING")
|
||||
|
||||
@@ -294,10 +333,12 @@ def ci_rebuild(args):
|
||||
job_log_dir = os.path.join(ci_project_dir, job_log_dir)
|
||||
job_test_dir = os.path.join(ci_project_dir, job_test_dir)
|
||||
repro_dir = os.path.join(ci_project_dir, repro_dir)
|
||||
local_mirror_dir = os.path.join(ci_project_dir, local_mirror_dir)
|
||||
concrete_env_dir = os.path.join(ci_project_dir, concrete_env_dir)
|
||||
|
||||
# Debug print some of the key environment variables we should have received
|
||||
tty.debug("pipeline_artifacts_dir = {0}".format(pipeline_artifacts_dir))
|
||||
tty.debug("remote_mirror_url = {0}".format(remote_mirror_url))
|
||||
tty.debug("job_spec_pkg_name = {0}".format(job_spec_pkg_name))
|
||||
|
||||
# Query the environment manifest to find out whether we're reporting to a
|
||||
@@ -329,11 +370,51 @@ def ci_rebuild(args):
|
||||
full_rebuild = True if rebuild_everything and rebuild_everything.lower() == "true" else False
|
||||
|
||||
pipeline_mirrors = spack.mirror.MirrorCollection(binary=True)
|
||||
deprecated_mirror_config = False
|
||||
buildcache_destination = None
|
||||
if "buildcache-destination" not in pipeline_mirrors:
|
||||
tty.die("spack ci rebuild requires a mirror named 'buildcache-destination")
|
||||
if "buildcache-destination" in pipeline_mirrors:
|
||||
buildcache_destination = pipeline_mirrors["buildcache-destination"]
|
||||
else:
|
||||
deprecated_mirror_config = True
|
||||
# TODO: This will be an error in Spack 0.23
|
||||
|
||||
buildcache_destination = pipeline_mirrors["buildcache-destination"]
|
||||
# If no override url exists, then just push binary package to the
|
||||
# normal remote mirror url.
|
||||
# TODO: Remove in Spack 0.23
|
||||
buildcache_mirror_url = remote_mirror_override or remote_mirror_url
|
||||
if buildcache_destination:
|
||||
buildcache_mirror_url = buildcache_destination.push_url
|
||||
|
||||
# Figure out what is our temporary storage mirror: Is it artifacts
|
||||
# buildcache? Or temporary-storage-url-prefix? In some cases we need to
|
||||
# force something or pipelines might not have a way to propagate build
|
||||
# artifacts from upstream to downstream jobs.
|
||||
# TODO: Remove this in Spack 0.23
|
||||
pipeline_mirror_url = None
|
||||
|
||||
# TODO: Remove this in Spack 0.23
|
||||
temp_storage_url_prefix = None
|
||||
if "temporary-storage-url-prefix" in ci_config:
|
||||
temp_storage_url_prefix = ci_config["temporary-storage-url-prefix"]
|
||||
pipeline_mirror_url = url_util.join(temp_storage_url_prefix, ci_pipeline_id)
|
||||
|
||||
# TODO: Remove this in Spack 0.23
|
||||
enable_artifacts_mirror = False
|
||||
if "enable-artifacts-buildcache" in ci_config:
|
||||
enable_artifacts_mirror = ci_config["enable-artifacts-buildcache"]
|
||||
if enable_artifacts_mirror or (
|
||||
spack_is_pr_pipeline and not enable_artifacts_mirror and not temp_storage_url_prefix
|
||||
):
|
||||
# If you explicitly enabled the artifacts buildcache feature, or
|
||||
# if this is a PR pipeline but you did not enable either of the
|
||||
# per-pipeline temporary storage features, we force the use of
|
||||
# artifacts buildcache. Otherwise jobs will not have binary
|
||||
# dependencies from previous stages available since we do not
|
||||
# allow pushing binaries to the remote mirror during PR pipelines.
|
||||
enable_artifacts_mirror = True
|
||||
pipeline_mirror_url = url_util.path_to_file_url(local_mirror_dir)
|
||||
mirror_msg = "artifact buildcache enabled, mirror url: {0}".format(pipeline_mirror_url)
|
||||
tty.debug(mirror_msg)
|
||||
|
||||
# Get the concrete spec to be built by this job.
|
||||
try:
|
||||
@@ -408,7 +489,48 @@ def ci_rebuild(args):
|
||||
fd.write(spack_info.encode("utf8"))
|
||||
fd.write(b"\n")
|
||||
|
||||
matches = None if full_rebuild else bindist.get_mirrors_for_spec(job_spec, index_only=False)
|
||||
pipeline_mirrors = []
|
||||
|
||||
# If we decided there should be a temporary storage mechanism, add that
|
||||
# mirror now so it's used when we check for a hash match already
|
||||
# built for this spec.
|
||||
# TODO: Remove this block in Spack 0.23
|
||||
if pipeline_mirror_url:
|
||||
mirror = spack.mirror.Mirror(pipeline_mirror_url, name=spack_ci.TEMP_STORAGE_MIRROR_NAME)
|
||||
spack.mirror.add(mirror, cfg.default_modify_scope())
|
||||
pipeline_mirrors.append(pipeline_mirror_url)
|
||||
|
||||
# Check configured mirrors for a built spec with a matching hash
|
||||
# TODO: Remove this block in Spack 0.23
|
||||
mirrors_to_check = None
|
||||
if remote_mirror_override:
|
||||
if spack_pipeline_type == "spack_protected_branch":
|
||||
# Passing "mirrors_to_check" below means we *only* look in the override
|
||||
# mirror to see if we should skip building, which is what we want.
|
||||
mirrors_to_check = {"override": remote_mirror_override}
|
||||
|
||||
# Adding this mirror to the list of configured mirrors means dependencies
|
||||
# could be installed from either the override mirror or any other configured
|
||||
# mirror (e.g. remote_mirror_url which is defined in the environment or
|
||||
# pipeline_mirror_url), which is also what we want.
|
||||
spack.mirror.add(
|
||||
spack.mirror.Mirror(remote_mirror_override, name="mirror_override"),
|
||||
cfg.default_modify_scope(),
|
||||
)
|
||||
pipeline_mirrors.append(remote_mirror_override)
|
||||
|
||||
# TODO: Remove this in Spack 0.23
|
||||
if deprecated_mirror_config and spack_pipeline_type == "spack_pull_request":
|
||||
if shared_pr_mirror_url != "None":
|
||||
pipeline_mirrors.append(shared_pr_mirror_url)
|
||||
|
||||
matches = (
|
||||
None
|
||||
if full_rebuild
|
||||
else bindist.get_mirrors_for_spec(
|
||||
job_spec, mirrors_to_check=mirrors_to_check, index_only=False
|
||||
)
|
||||
)
|
||||
|
||||
if matches:
|
||||
# Got a hash match on at least one configured mirror. All
|
||||
@@ -420,10 +542,25 @@ def ci_rebuild(args):
|
||||
tty.msg("No need to rebuild {0}, found hash match at: ".format(job_spec_pkg_name))
|
||||
for match in matches:
|
||||
tty.msg(" {0}".format(match["mirror_url"]))
|
||||
# TODO: Remove this block in Spack 0.23
|
||||
if enable_artifacts_mirror:
|
||||
matching_mirror = matches[0]["mirror_url"]
|
||||
build_cache_dir = os.path.join(local_mirror_dir, "build_cache")
|
||||
tty.debug("Getting {0} buildcache from {1}".format(job_spec_pkg_name, matching_mirror))
|
||||
tty.debug("Downloading to {0}".format(build_cache_dir))
|
||||
bindist.download_single_spec(job_spec, build_cache_dir, mirror_url=matching_mirror)
|
||||
|
||||
# Now we are done and successful
|
||||
return 0
|
||||
|
||||
# Before beginning the install, if this is a "rebuild everything" pipeline, we
|
||||
# only want to keep the mirror being used by the current pipeline as it's binary
|
||||
# package destination. This ensures that the when we rebuild everything, we only
|
||||
# consume binary dependencies built in this pipeline.
|
||||
# TODO: Remove this in Spack 0.23
|
||||
if deprecated_mirror_config and full_rebuild:
|
||||
spack_ci.remove_other_mirrors(pipeline_mirrors, cfg.default_modify_scope())
|
||||
|
||||
# No hash match anywhere means we need to rebuild spec
|
||||
|
||||
# Start with spack arguments
|
||||
@@ -544,11 +681,17 @@ def ci_rebuild(args):
|
||||
cdash_handler.copy_test_results(reports_dir, job_test_dir)
|
||||
|
||||
if install_exit_code == 0:
|
||||
# If the install succeeded, push it to the buildcache destination. Failure to push
|
||||
# If the install succeeded, push it to one or more mirrors. Failure to push to any mirror
|
||||
# will result in a non-zero exit code. Pushing is best-effort.
|
||||
mirror_urls = [buildcache_mirror_url]
|
||||
|
||||
# TODO: Remove this block in Spack 0.23
|
||||
if pipeline_mirror_url:
|
||||
mirror_urls.append(pipeline_mirror_url)
|
||||
|
||||
for result in spack_ci.create_buildcache(
|
||||
input_spec=job_spec,
|
||||
destination_mirror_urls=[buildcache_destination.push_url],
|
||||
destination_mirror_urls=mirror_urls,
|
||||
sign_binaries=spack_ci.can_sign_binaries(),
|
||||
):
|
||||
if not result.success:
|
||||
|
||||
@@ -105,8 +105,7 @@ def clean(parser, args):
|
||||
# Then do the cleaning falling through the cases
|
||||
if args.specs:
|
||||
specs = spack.cmd.parse_specs(args.specs, concretize=False)
|
||||
specs = spack.cmd.matching_specs_from_env(specs)
|
||||
|
||||
specs = list(spack.cmd.matching_spec_from_env(x) for x in specs)
|
||||
for spec in specs:
|
||||
msg = "Cleaning build stage [{0}]"
|
||||
tty.msg(msg.format(spec.short_spec))
|
||||
|
||||
@@ -99,5 +99,5 @@ def deconcretize(parser, args):
|
||||
" Use `spack deconcretize --all` to deconcretize ALL specs.",
|
||||
)
|
||||
|
||||
specs = spack.cmd.parse_specs(args.specs) if args.specs else [None]
|
||||
specs = spack.cmd.parse_specs(args.specs) if args.specs else [any]
|
||||
deconcretize_specs(args, specs)
|
||||
|
||||
@@ -57,41 +57,35 @@
|
||||
# env create
|
||||
#
|
||||
def env_create_setup_parser(subparser):
|
||||
"""create a new environment
|
||||
|
||||
create a new environment or, optionally, copy an existing environment
|
||||
|
||||
a manifest file results in a new abstract environment while a lock file
|
||||
creates a new concrete environment
|
||||
"""
|
||||
subparser.add_argument(
|
||||
"env_name", metavar="env", help="name or directory of the new environment"
|
||||
)
|
||||
"""create a new environment"""
|
||||
subparser.add_argument("env_name", metavar="env", help="name or directory of environment")
|
||||
subparser.add_argument(
|
||||
"-d", "--dir", action="store_true", help="create an environment in a specific directory"
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--keep-relative",
|
||||
action="store_true",
|
||||
help="copy envfile's relative develop paths verbatim",
|
||||
help="copy relative develop paths verbatim into the new environment"
|
||||
" when initializing from envfile",
|
||||
)
|
||||
view_opts = subparser.add_mutually_exclusive_group()
|
||||
view_opts.add_argument(
|
||||
"--without-view", action="store_true", help="do not maintain a view for this environment"
|
||||
)
|
||||
view_opts.add_argument(
|
||||
"--with-view", help="maintain view at WITH_VIEW (vs. environment's directory)"
|
||||
"--with-view",
|
||||
help="specify that this environment should maintain a view at the"
|
||||
" specified path (by default the view is maintained in the"
|
||||
" environment directory)",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"envfile",
|
||||
nargs="?",
|
||||
default=None,
|
||||
help="manifest or lock file (ends with '.json' or '.lock')",
|
||||
help="either a lockfile (must end with '.json' or '.lock') or a manifest file",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--include-concrete",
|
||||
action="append",
|
||||
help="copy concrete specs from INCLUDE_CONCRETE's environment",
|
||||
"--include-concrete", action="append", help="name of old environment to copy specs from"
|
||||
)
|
||||
|
||||
|
||||
@@ -179,7 +173,7 @@ def _env_create(
|
||||
# env activate
|
||||
#
|
||||
def env_activate_setup_parser(subparser):
|
||||
"""set the active environment"""
|
||||
"""set the current environment"""
|
||||
shells = subparser.add_mutually_exclusive_group()
|
||||
shells.add_argument(
|
||||
"--sh",
|
||||
@@ -219,14 +213,14 @@ def env_activate_setup_parser(subparser):
|
||||
|
||||
view_options = subparser.add_mutually_exclusive_group()
|
||||
view_options.add_argument(
|
||||
"-v",
|
||||
"--with-view",
|
||||
"-v",
|
||||
metavar="name",
|
||||
help="set runtime environment variables for the named view",
|
||||
help="set runtime environment variables for specific view",
|
||||
)
|
||||
view_options.add_argument(
|
||||
"-V",
|
||||
"--without-view",
|
||||
"-V",
|
||||
action="store_true",
|
||||
help="do not set runtime environment variables for any view",
|
||||
)
|
||||
@@ -236,14 +230,14 @@ def env_activate_setup_parser(subparser):
|
||||
"--prompt",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="add the active environment to the command line prompt",
|
||||
help="decorate the command line prompt when activating",
|
||||
)
|
||||
|
||||
subparser.add_argument(
|
||||
"--temp",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="create and activate in a temporary directory",
|
||||
help="create and activate an environment in a temporary directory",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--create",
|
||||
@@ -255,12 +249,13 @@ def env_activate_setup_parser(subparser):
|
||||
"--envfile",
|
||||
nargs="?",
|
||||
default=None,
|
||||
help="manifest or lock file (ends with '.json' or '.lock')",
|
||||
help="either a lockfile (must end with '.json' or '.lock') or a manifest file",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--keep-relative",
|
||||
action="store_true",
|
||||
help="copy envfile's relative develop paths verbatim when create",
|
||||
help="copy relative develop paths verbatim into the new environment"
|
||||
" when initializing from envfile",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-d",
|
||||
@@ -274,7 +269,10 @@ def env_activate_setup_parser(subparser):
|
||||
dest="env_name",
|
||||
nargs="?",
|
||||
default=None,
|
||||
help=("name or directory of the environment being activated"),
|
||||
help=(
|
||||
"name of managed environment or directory of the independent env"
|
||||
" (when using --dir/-d) to activate"
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@@ -387,7 +385,7 @@ def env_activate(args):
|
||||
# env deactivate
|
||||
#
|
||||
def env_deactivate_setup_parser(subparser):
|
||||
"""deactivate the active environment"""
|
||||
"""deactivate any active environment in the shell"""
|
||||
shells = subparser.add_mutually_exclusive_group()
|
||||
shells.add_argument(
|
||||
"--sh",
|
||||
@@ -450,27 +448,23 @@ def env_deactivate(args):
|
||||
# env remove
|
||||
#
|
||||
def env_remove_setup_parser(subparser):
|
||||
"""remove managed environment(s)
|
||||
|
||||
remove existing environment(s) managed by Spack
|
||||
|
||||
directory environments and manifests embedded in repositories must be
|
||||
removed manually
|
||||
"""
|
||||
subparser.add_argument(
|
||||
"rm_env", metavar="env", nargs="+", help="name(s) of the environment(s) being removed"
|
||||
)
|
||||
"""remove an existing environment"""
|
||||
subparser.add_argument("rm_env", metavar="env", nargs="+", help="environment(s) to remove")
|
||||
arguments.add_common_arguments(subparser, ["yes_to_all"])
|
||||
subparser.add_argument(
|
||||
"-f",
|
||||
"--force",
|
||||
action="store_true",
|
||||
help="force removal even when included in other environment(s)",
|
||||
help="remove the environment even if it is included in another environment",
|
||||
)
|
||||
|
||||
|
||||
def env_remove(args):
|
||||
"""remove existing environment(s)"""
|
||||
"""Remove a *named* environment.
|
||||
|
||||
This removes an environment managed by Spack. Directory environments
|
||||
and manifests embedded in repositories should be removed manually.
|
||||
"""
|
||||
remove_envs = []
|
||||
valid_envs = []
|
||||
bad_envs = []
|
||||
@@ -525,32 +519,29 @@ def env_remove(args):
|
||||
# env rename
|
||||
#
|
||||
def env_rename_setup_parser(subparser):
|
||||
"""rename an existing environment
|
||||
|
||||
rename a managed environment or move an independent/directory environment
|
||||
|
||||
operation cannot be performed to or from an active environment
|
||||
"""
|
||||
"""rename an existing environment"""
|
||||
subparser.add_argument(
|
||||
"mv_from", metavar="from", help="current name or directory of the environment"
|
||||
"mv_from", metavar="from", help="name (or path) of existing environment"
|
||||
)
|
||||
subparser.add_argument(
|
||||
"mv_to", metavar="to", help="new name (or path) for existing environment"
|
||||
)
|
||||
subparser.add_argument("mv_to", metavar="to", help="new name or directory for the environment")
|
||||
subparser.add_argument(
|
||||
"-d",
|
||||
"--dir",
|
||||
action="store_true",
|
||||
help="positional arguments are environment directory paths",
|
||||
help="the specified arguments correspond to directory paths",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-f",
|
||||
"--force",
|
||||
action="store_true",
|
||||
help="force renaming even if overwriting an existing environment",
|
||||
"-f", "--force", action="store_true", help="allow overwriting of an existing environment"
|
||||
)
|
||||
|
||||
|
||||
def env_rename(args):
|
||||
"""rename or move an existing environment"""
|
||||
"""Rename an environment.
|
||||
|
||||
This renames a managed environment or moves an independent environment.
|
||||
"""
|
||||
|
||||
# Directory option has been specified
|
||||
if args.dir:
|
||||
@@ -599,7 +590,7 @@ def env_rename(args):
|
||||
# env list
|
||||
#
|
||||
def env_list_setup_parser(subparser):
|
||||
"""list all managed environments"""
|
||||
"""list managed environments"""
|
||||
|
||||
|
||||
def env_list(args):
|
||||
@@ -635,14 +626,13 @@ def actions():
|
||||
# env view
|
||||
#
|
||||
def env_view_setup_parser(subparser):
|
||||
"""manage the environment's view
|
||||
|
||||
provide the path when enabling a view with a non-default path
|
||||
"""
|
||||
"""manage a view associated with the environment"""
|
||||
subparser.add_argument(
|
||||
"action", choices=ViewAction.actions(), help="action to take for the environment's view"
|
||||
)
|
||||
subparser.add_argument("view_path", nargs="?", help="view's non-default path when enabling it")
|
||||
subparser.add_argument(
|
||||
"view_path", nargs="?", help="when enabling a view, optionally set the path manually"
|
||||
)
|
||||
|
||||
|
||||
def env_view(args):
|
||||
@@ -670,7 +660,7 @@ def env_view(args):
|
||||
# env status
|
||||
#
|
||||
def env_status_setup_parser(subparser):
|
||||
"""print active environment status"""
|
||||
"""print whether there is an active environment"""
|
||||
|
||||
|
||||
def env_status(args):
|
||||
@@ -730,22 +720,14 @@ def env_loads(args):
|
||||
|
||||
|
||||
def env_update_setup_parser(subparser):
|
||||
"""update the environment manifest to the latest schema format
|
||||
|
||||
update the environment to the latest schema format, which may not be
|
||||
readable by older versions of spack
|
||||
|
||||
a backup copy of the manifest is retained in case there is a need to revert
|
||||
this operation
|
||||
"""
|
||||
"""update environments to the latest format"""
|
||||
subparser.add_argument(
|
||||
metavar="env", dest="update_env", help="name or directory of the environment"
|
||||
metavar="env", dest="update_env", help="name or directory of the environment to activate"
|
||||
)
|
||||
spack.cmd.common.arguments.add_common_arguments(subparser, ["yes_to_all"])
|
||||
|
||||
|
||||
def env_update(args):
|
||||
"""update the manifest to the latest format"""
|
||||
manifest_file = ev.manifest_file(args.update_env)
|
||||
backup_file = manifest_file + ".bkp"
|
||||
|
||||
@@ -775,22 +757,14 @@ def env_update(args):
|
||||
|
||||
|
||||
def env_revert_setup_parser(subparser):
|
||||
"""restore the environment manifest to its previous format
|
||||
|
||||
revert the environment's manifest to the schema format from its last
|
||||
'spack env update'
|
||||
|
||||
the current manifest will be overwritten by the backup copy and the backup
|
||||
copy will be removed
|
||||
"""
|
||||
"""restore environments to their state before update"""
|
||||
subparser.add_argument(
|
||||
metavar="env", dest="revert_env", help="name or directory of the environment"
|
||||
metavar="env", dest="revert_env", help="name or directory of the environment to activate"
|
||||
)
|
||||
spack.cmd.common.arguments.add_common_arguments(subparser, ["yes_to_all"])
|
||||
|
||||
|
||||
def env_revert(args):
|
||||
"""restore the environment manifest to its previous format"""
|
||||
manifest_file = ev.manifest_file(args.revert_env)
|
||||
backup_file = manifest_file + ".bkp"
|
||||
|
||||
@@ -822,19 +796,15 @@ def env_revert(args):
|
||||
|
||||
|
||||
def env_depfile_setup_parser(subparser):
|
||||
"""generate a depfile to exploit parallel builds across specs
|
||||
|
||||
requires the active environment to be concrete
|
||||
"""
|
||||
"""generate a depfile from the concrete environment specs"""
|
||||
subparser.add_argument(
|
||||
"--make-prefix",
|
||||
"--make-target-prefix",
|
||||
default=None,
|
||||
metavar="TARGET",
|
||||
help="prefix Makefile targets/variables with <TARGET>/<name>,\n"
|
||||
"which can be an empty string (--make-prefix '')\n"
|
||||
"defaults to the absolute path of the environment's makedeps\n"
|
||||
"environment metadata dir\n",
|
||||
help="prefix Makefile targets (and variables) with <TARGET>/<name>\n\nby default "
|
||||
"the absolute path to the directory makedeps under the environment metadata dir is "
|
||||
"used. can be set to an empty string --make-prefix ''",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--make-disable-jobserver",
|
||||
@@ -849,8 +819,8 @@ def env_depfile_setup_parser(subparser):
|
||||
type=arguments.use_buildcache,
|
||||
default="package:auto,dependencies:auto",
|
||||
metavar="[{auto,only,never},][package:{auto,only,never},][dependencies:{auto,only,never}]",
|
||||
help="use `only` to prune redundant build dependencies\n"
|
||||
"option is also passed to generated spack install commands",
|
||||
help="when using `only`, redundant build dependencies are pruned from the DAG\n\n"
|
||||
"this flag is passed on to the generated spack install commands",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-o",
|
||||
@@ -864,14 +834,14 @@ def env_depfile_setup_parser(subparser):
|
||||
"--generator",
|
||||
default="make",
|
||||
choices=("make",),
|
||||
help="specify the depfile type (only supports `make`)",
|
||||
help="specify the depfile type\n\ncurrently only make is supported",
|
||||
)
|
||||
subparser.add_argument(
|
||||
metavar="specs",
|
||||
dest="specs",
|
||||
nargs=argparse.REMAINDER,
|
||||
default=None,
|
||||
help="limit the generated file to matching specs",
|
||||
help="generate a depfile only for matching specs in the environment",
|
||||
)
|
||||
|
||||
|
||||
@@ -940,12 +910,7 @@ def setup_parser(subparser):
|
||||
setup_parser_cmd_name = "env_%s_setup_parser" % name
|
||||
setup_parser_cmd = globals()[setup_parser_cmd_name]
|
||||
|
||||
subsubparser = sp.add_parser(
|
||||
name,
|
||||
aliases=aliases,
|
||||
description=setup_parser_cmd.__doc__,
|
||||
help=spack.cmd.first_line(setup_parser_cmd.__doc__),
|
||||
)
|
||||
subsubparser = sp.add_parser(name, aliases=aliases, help=setup_parser_cmd.__doc__)
|
||||
setup_parser_cmd(subsubparser)
|
||||
|
||||
|
||||
|
||||
@@ -178,7 +178,7 @@ def query_arguments(args):
|
||||
if args.unknown:
|
||||
predicate_fn = lambda x: not spack.repo.PATH.exists(x.spec.name)
|
||||
|
||||
explicit = None
|
||||
explicit = any
|
||||
if args.explicit:
|
||||
explicit = True
|
||||
if args.implicit:
|
||||
|
||||
@@ -80,8 +80,8 @@ def find_matching_specs(specs, allow_multiple_matches=False):
|
||||
has_errors = True
|
||||
|
||||
# No installed package matches the query
|
||||
if len(matching) == 0 and spec is not None:
|
||||
tty.die(f"{spec} does not match any installed packages.")
|
||||
if len(matching) == 0 and spec is not any:
|
||||
tty.die("{0} does not match any installed packages.".format(spec))
|
||||
|
||||
specs_from_cli.extend(matching)
|
||||
|
||||
@@ -98,9 +98,8 @@ def do_mark(specs, explicit):
|
||||
specs (list): list of specs to be marked
|
||||
explicit (bool): whether to mark specs as explicitly installed
|
||||
"""
|
||||
with spack.store.STORE.db.write_transaction():
|
||||
for spec in specs:
|
||||
spack.store.STORE.db.mark(spec, "explicit", explicit)
|
||||
for spec in specs:
|
||||
spack.store.STORE.db.update_explicit(spec, explicit)
|
||||
|
||||
|
||||
def mark_specs(args, specs):
|
||||
@@ -117,6 +116,6 @@ def mark(parser, args):
|
||||
" Use `spack mark --all` to mark ALL packages.",
|
||||
)
|
||||
|
||||
# [None] here handles the --all case by forcing all specs to be returned
|
||||
specs = spack.cmd.parse_specs(args.specs) if args.specs else [None]
|
||||
# [any] here handles the --all case by forcing all specs to be returned
|
||||
specs = spack.cmd.parse_specs(args.specs) if args.specs else [any]
|
||||
mark_specs(args, specs)
|
||||
|
||||
@@ -33,9 +33,8 @@ def patch(parser, args):
|
||||
spack.config.set("config:checksum", False, scope="command_line")
|
||||
|
||||
specs = spack.cmd.parse_specs(args.specs, concretize=False)
|
||||
specs = spack.cmd.matching_specs_from_env(specs)
|
||||
for spec in specs:
|
||||
_patch(spec.package)
|
||||
_patch(spack.cmd.matching_spec_from_env(spec).package)
|
||||
|
||||
|
||||
def _patch_env(env: ev.Environment):
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import argparse
|
||||
import re
|
||||
import sys
|
||||
|
||||
@@ -11,12 +12,13 @@
|
||||
|
||||
import spack
|
||||
import spack.cmd
|
||||
import spack.cmd.spec
|
||||
import spack.cmd.common.arguments
|
||||
import spack.config
|
||||
import spack.environment
|
||||
import spack.hash_types as ht
|
||||
import spack.solver.asp as asp
|
||||
import spack.spec
|
||||
from spack.cmd.common import arguments
|
||||
|
||||
description = "concretize a specs using an ASP solver"
|
||||
section = "developer"
|
||||
@@ -39,6 +41,42 @@ def setup_parser(subparser):
|
||||
" solutions models found by asp program\n"
|
||||
" all all of the above",
|
||||
)
|
||||
|
||||
# Below are arguments w.r.t. spec display (like spack spec)
|
||||
arguments.add_common_arguments(subparser, ["long", "very_long", "namespaces"])
|
||||
|
||||
install_status_group = subparser.add_mutually_exclusive_group()
|
||||
arguments.add_common_arguments(install_status_group, ["install_status", "no_install_status"])
|
||||
|
||||
subparser.add_argument(
|
||||
"-y",
|
||||
"--yaml",
|
||||
action="store_const",
|
||||
dest="format",
|
||||
default=None,
|
||||
const="yaml",
|
||||
help="print concrete spec as yaml",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-j",
|
||||
"--json",
|
||||
action="store_const",
|
||||
dest="format",
|
||||
default=None,
|
||||
const="json",
|
||||
help="print concrete spec as json",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-c",
|
||||
"--cover",
|
||||
action="store",
|
||||
default="nodes",
|
||||
choices=["nodes", "edges", "paths"],
|
||||
help="how extensively to traverse the DAG (default: nodes)",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-t", "--types", action="store_true", default=False, help="show dependency types"
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--timers",
|
||||
action="store_true",
|
||||
@@ -48,8 +86,9 @@ def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
"--stats", action="store_true", default=False, help="print out statistics from clingo"
|
||||
)
|
||||
subparser.add_argument("specs", nargs=argparse.REMAINDER, help="specs of packages")
|
||||
|
||||
spack.cmd.spec.setup_parser(subparser)
|
||||
spack.cmd.common.arguments.add_concretizer_args(subparser)
|
||||
|
||||
|
||||
def _process_result(result, show, required_format, kwargs):
|
||||
@@ -125,12 +164,11 @@ def solve(parser, args):
|
||||
|
||||
# If we have an active environment, pick the specs from there
|
||||
env = spack.environment.active_environment()
|
||||
if args.specs:
|
||||
specs = spack.cmd.parse_specs(args.specs)
|
||||
elif env:
|
||||
specs = list(env.user_specs)
|
||||
else:
|
||||
tty.die("spack solve requires at least one spec or an active environment")
|
||||
if env and args.specs:
|
||||
msg = "cannot give explicit specs when an environment is active"
|
||||
raise RuntimeError(msg)
|
||||
|
||||
specs = list(env.user_specs) if env else spack.cmd.parse_specs(args.specs)
|
||||
|
||||
solver = asp.Solver()
|
||||
output = sys.stdout if "asp" in show else None
|
||||
|
||||
@@ -96,25 +96,26 @@ def spec(parser, args):
|
||||
if args.install_status:
|
||||
tree_context = spack.store.STORE.db.read_transaction
|
||||
|
||||
env = ev.active_environment()
|
||||
|
||||
# Use command line specified specs, otherwise try to use environment specs.
|
||||
if args.specs:
|
||||
input_specs = spack.cmd.parse_specs(args.specs)
|
||||
concretized_specs = spack.cmd.parse_specs(args.specs, concretize=True)
|
||||
specs = list(zip(input_specs, concretized_specs))
|
||||
elif env:
|
||||
env.concretize()
|
||||
specs = env.concretized_specs()
|
||||
else:
|
||||
env = ev.active_environment()
|
||||
if env:
|
||||
env.concretize()
|
||||
specs = env.concretized_specs()
|
||||
|
||||
if not args.format:
|
||||
# environments are printed together in a combined tree() invocation,
|
||||
# except when using --yaml or --json, which we print spec by spec below.
|
||||
tree_kwargs["key"] = spack.traverse.by_dag_hash
|
||||
tree_kwargs["hashes"] = args.long or args.very_long
|
||||
print(spack.spec.tree([concrete for _, concrete in specs], **tree_kwargs))
|
||||
return
|
||||
else:
|
||||
tty.die("spack spec requires at least one spec or an active environment")
|
||||
if not args.format:
|
||||
tree_kwargs["key"] = spack.traverse.by_dag_hash
|
||||
tree_kwargs["hashes"] = args.long or args.very_long
|
||||
print(spack.spec.tree([concrete for _, concrete in specs], **tree_kwargs))
|
||||
return
|
||||
else:
|
||||
tty.die("spack spec requires at least one spec or an active environment")
|
||||
|
||||
for input, output in specs:
|
||||
# With --yaml or --json, just print the raw specs to output
|
||||
|
||||
@@ -47,8 +47,8 @@ def stage(parser, args):
|
||||
if len(specs) > 1 and custom_path:
|
||||
tty.die("`--path` requires a single spec, but multiple were provided")
|
||||
|
||||
specs = spack.cmd.matching_specs_from_env(specs)
|
||||
for spec in specs:
|
||||
spec = spack.cmd.matching_spec_from_env(spec)
|
||||
pkg = spec.package
|
||||
|
||||
if custom_path:
|
||||
|
||||
@@ -165,7 +165,7 @@ def test_run(args):
|
||||
if args.fail_fast:
|
||||
spack.config.set("config:fail_fast", True, scope="command_line")
|
||||
|
||||
explicit = args.explicit or None
|
||||
explicit = args.explicit or any
|
||||
explicit_str = "explicitly " if args.explicit else ""
|
||||
|
||||
# Get specs to test
|
||||
|
||||
@@ -90,7 +90,6 @@ def find_matching_specs(
|
||||
env: optional active environment
|
||||
specs: list of specs to be matched against installed packages
|
||||
allow_multiple_matches: if True multiple matches are admitted
|
||||
origin: origin of the spec
|
||||
|
||||
Return:
|
||||
list: list of specs
|
||||
@@ -99,7 +98,7 @@ def find_matching_specs(
|
||||
hashes = env.all_hashes() if env else None
|
||||
|
||||
# List of specs that match expressions given via command line
|
||||
specs_from_cli: List["spack.spec.Spec"] = []
|
||||
specs_from_cli = []
|
||||
has_errors = False
|
||||
for spec in specs:
|
||||
install_query = [InstallStatuses.INSTALLED, InstallStatuses.DEPRECATED]
|
||||
@@ -117,7 +116,7 @@ def find_matching_specs(
|
||||
has_errors = True
|
||||
|
||||
# No installed package matches the query
|
||||
if len(matching) == 0 and spec is not None:
|
||||
if len(matching) == 0 and spec is not any:
|
||||
if env:
|
||||
pkg_type = "packages in environment '%s'" % env.name
|
||||
else:
|
||||
@@ -214,7 +213,7 @@ def get_uninstall_list(args, specs: List[spack.spec.Spec], env: Optional[ev.Envi
|
||||
|
||||
# Gets the list of installed specs that match the ones given via cli
|
||||
# args.all takes care of the case where '-a' is given in the cli
|
||||
matching_specs = find_matching_specs(env, specs, args.all, origin=args.origin)
|
||||
matching_specs = find_matching_specs(env, specs, args.all)
|
||||
dependent_specs = installed_dependents(matching_specs)
|
||||
all_uninstall_specs = matching_specs + dependent_specs if args.dependents else matching_specs
|
||||
other_dependent_envs = dependent_environments(all_uninstall_specs, current_env=env)
|
||||
@@ -302,6 +301,6 @@ def uninstall(parser, args):
|
||||
" Use `spack uninstall --all` to uninstall ALL packages.",
|
||||
)
|
||||
|
||||
# [None] here handles the --all case by forcing all specs to be returned
|
||||
specs = spack.cmd.parse_specs(args.specs) if args.specs else [None]
|
||||
# [any] here handles the --all case by forcing all specs to be returned
|
||||
specs = spack.cmd.parse_specs(args.specs) if args.specs else [any]
|
||||
uninstall_specs(args, specs)
|
||||
|
||||
@@ -275,7 +275,7 @@ def __init__(
|
||||
operating_system,
|
||||
target,
|
||||
paths,
|
||||
modules: Optional[List[str]] = None,
|
||||
modules=None,
|
||||
alias=None,
|
||||
environment=None,
|
||||
extra_rpaths=None,
|
||||
|
||||
@@ -124,8 +124,9 @@ def setup_custom_environment(self, pkg, env):
|
||||
# Edge cases for Intel's oneAPI compilers when using the legacy classic compilers:
|
||||
# Always pass flags to disable deprecation warnings, since these warnings can
|
||||
# confuse tools that parse the output of compiler commands (e.g. version checks).
|
||||
if self.real_version >= Version("2021") and self.real_version <= Version("2023"):
|
||||
if self.cc and self.cc.endswith("icc") and self.real_version >= Version("2021"):
|
||||
env.append_flags("SPACK_ALWAYS_CFLAGS", "-diag-disable=10441")
|
||||
if self.cxx and self.cxx.endswith("icpc") and self.real_version >= Version("2021"):
|
||||
env.append_flags("SPACK_ALWAYS_CXXFLAGS", "-diag-disable=10441")
|
||||
if self.real_version >= Version("2021") and self.real_version <= Version("2024"):
|
||||
if self.fc and self.fc.endswith("ifort") and self.real_version >= Version("2021"):
|
||||
env.append_flags("SPACK_ALWAYS_FFLAGS", "-diag-disable=10448")
|
||||
|
||||
@@ -151,14 +151,11 @@ def setup_custom_environment(self, pkg, env):
|
||||
# Edge cases for Intel's oneAPI compilers when using the legacy classic compilers:
|
||||
# Always pass flags to disable deprecation warnings, since these warnings can
|
||||
# confuse tools that parse the output of compiler commands (e.g. version checks).
|
||||
# This is really only needed for Fortran, since oneapi@ should be using either
|
||||
# icx+icpx+ifx or icx+icpx+ifort. But to be on the safe side (some users may
|
||||
# want to try to swap icpx against icpc, for example), and since the Intel LLVM
|
||||
# compilers accept these diag-disable flags, we apply them for all compilers.
|
||||
if self.real_version >= Version("2021") and self.real_version <= Version("2023"):
|
||||
if self.cc and self.cc.endswith("icc") and self.real_version >= Version("2021"):
|
||||
env.append_flags("SPACK_ALWAYS_CFLAGS", "-diag-disable=10441")
|
||||
if self.cxx and self.cxx.endswith("icpc") and self.real_version >= Version("2021"):
|
||||
env.append_flags("SPACK_ALWAYS_CXXFLAGS", "-diag-disable=10441")
|
||||
if self.real_version >= Version("2021") and self.real_version <= Version("2024"):
|
||||
if self.fc and self.fc.endswith("ifort") and self.real_version >= Version("2021"):
|
||||
env.append_flags("SPACK_ALWAYS_FFLAGS", "-diag-disable=10448")
|
||||
|
||||
# 2024 release bumped the libsycl version because of an ABI
|
||||
|
||||
@@ -2,20 +2,14 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""High-level functions to concretize list of specs"""
|
||||
import sys
|
||||
import time
|
||||
"""
|
||||
(DEPRECATED) Used to contain the code for the original concretizer
|
||||
"""
|
||||
from contextlib import contextmanager
|
||||
from typing import Iterable, Optional, Sequence, Tuple, Union
|
||||
from itertools import chain
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.compilers
|
||||
import spack.config
|
||||
import spack.error
|
||||
import spack.repo
|
||||
import spack.util.parallel
|
||||
from spack.spec import ArchSpec, CompilerSpec, Spec
|
||||
|
||||
CHECK_COMPILER_EXISTENCE = True
|
||||
|
||||
@@ -36,167 +30,67 @@ def enable_compiler_existence_check():
|
||||
CHECK_COMPILER_EXISTENCE = saved
|
||||
|
||||
|
||||
SpecPair = Tuple[Spec, Spec]
|
||||
SpecLike = Union[Spec, str]
|
||||
TestsType = Union[bool, Iterable[str]]
|
||||
def find_spec(spec, condition, default=None):
|
||||
"""Searches the dag from spec in an intelligent order and looks
|
||||
for a spec that matches a condition"""
|
||||
# First search parents, then search children
|
||||
deptype = ("build", "link")
|
||||
dagiter = chain(
|
||||
spec.traverse(direction="parents", deptype=deptype, root=False),
|
||||
spec.traverse(direction="children", deptype=deptype, root=False),
|
||||
)
|
||||
visited = set()
|
||||
for relative in dagiter:
|
||||
if condition(relative):
|
||||
return relative
|
||||
visited.add(id(relative))
|
||||
|
||||
# Then search all other relatives in the DAG *except* spec
|
||||
for relative in spec.root.traverse(deptype="all"):
|
||||
if relative is spec:
|
||||
continue
|
||||
if id(relative) in visited:
|
||||
continue
|
||||
if condition(relative):
|
||||
return relative
|
||||
|
||||
# Finally search spec itself.
|
||||
if condition(spec):
|
||||
return spec
|
||||
|
||||
return default # Nothing matched the condition; return default.
|
||||
|
||||
|
||||
def concretize_specs_together(
|
||||
abstract_specs: Sequence[SpecLike], tests: TestsType = False
|
||||
) -> Sequence[Spec]:
|
||||
def concretize_specs_together(*abstract_specs, **kwargs):
|
||||
"""Given a number of specs as input, tries to concretize them together.
|
||||
|
||||
Args:
|
||||
abstract_specs: abstract specs to be concretized
|
||||
tests: list of package names for which to consider tests dependencies. If True, all nodes
|
||||
will have test dependencies. If False, test dependencies will be disregarded.
|
||||
tests (bool or list or set): False to run no tests, True to test
|
||||
all packages, or a list of package names to run tests for some
|
||||
*abstract_specs: abstract specs to be concretized, given either
|
||||
as Specs or strings
|
||||
|
||||
Returns:
|
||||
List of concretized specs
|
||||
"""
|
||||
import spack.solver.asp
|
||||
|
||||
allow_deprecated = spack.config.get("config:deprecated", False)
|
||||
solver = spack.solver.asp.Solver()
|
||||
result = solver.solve(abstract_specs, tests=tests, allow_deprecated=allow_deprecated)
|
||||
result = solver.solve(
|
||||
abstract_specs, tests=kwargs.get("tests", False), allow_deprecated=allow_deprecated
|
||||
)
|
||||
return [s.copy() for s in result.specs]
|
||||
|
||||
|
||||
def concretize_together(
|
||||
spec_list: Sequence[SpecPair], tests: TestsType = False
|
||||
) -> Sequence[SpecPair]:
|
||||
"""Given a number of specs as input, tries to concretize them together.
|
||||
|
||||
Args:
|
||||
spec_list: list of tuples to concretize. First entry is abstract spec, second entry is
|
||||
already concrete spec or None if not yet concretized
|
||||
tests: list of package names for which to consider tests dependencies. If True, all nodes
|
||||
will have test dependencies. If False, test dependencies will be disregarded.
|
||||
"""
|
||||
to_concretize = [concrete if concrete else abstract for abstract, concrete in spec_list]
|
||||
abstract_specs = [abstract for abstract, _ in spec_list]
|
||||
concrete_specs = concretize_specs_together(to_concretize, tests=tests)
|
||||
return list(zip(abstract_specs, concrete_specs))
|
||||
|
||||
|
||||
def concretize_together_when_possible(
|
||||
spec_list: Sequence[SpecPair], tests: TestsType = False
|
||||
) -> Sequence[SpecPair]:
|
||||
"""Given a number of specs as input, tries to concretize them together to the extent possible.
|
||||
|
||||
See documentation for ``unify: when_possible`` concretization for the precise definition of
|
||||
"to the extent possible".
|
||||
|
||||
Args:
|
||||
spec_list: list of tuples to concretize. First entry is abstract spec, second entry is
|
||||
already concrete spec or None if not yet concretized
|
||||
tests: list of package names for which to consider tests dependencies. If True, all nodes
|
||||
will have test dependencies. If False, test dependencies will be disregarded.
|
||||
"""
|
||||
import spack.solver.asp
|
||||
|
||||
to_concretize = [concrete if concrete else abstract for abstract, concrete in spec_list]
|
||||
old_concrete_to_abstract = {
|
||||
concrete: abstract for (abstract, concrete) in spec_list if concrete
|
||||
}
|
||||
|
||||
result_by_user_spec = {}
|
||||
solver = spack.solver.asp.Solver()
|
||||
allow_deprecated = spack.config.get("config:deprecated", False)
|
||||
for result in solver.solve_in_rounds(
|
||||
to_concretize, tests=tests, allow_deprecated=allow_deprecated
|
||||
):
|
||||
result_by_user_spec.update(result.specs_by_input)
|
||||
|
||||
# If the "abstract" spec is a concrete spec from the previous concretization
|
||||
# translate it back to an abstract spec. Otherwise, keep the abstract spec
|
||||
return [
|
||||
(old_concrete_to_abstract.get(abstract, abstract), concrete)
|
||||
for abstract, concrete in sorted(result_by_user_spec.items())
|
||||
]
|
||||
|
||||
|
||||
def concretize_separately(
|
||||
spec_list: Sequence[SpecPair], tests: TestsType = False
|
||||
) -> Sequence[SpecPair]:
|
||||
"""Concretizes the input specs separately from each other.
|
||||
|
||||
Args:
|
||||
spec_list: list of tuples to concretize. First entry is abstract spec, second entry is
|
||||
already concrete spec or None if not yet concretized
|
||||
tests: list of package names for which to consider tests dependencies. If True, all nodes
|
||||
will have test dependencies. If False, test dependencies will be disregarded.
|
||||
"""
|
||||
import spack.bootstrap
|
||||
|
||||
to_concretize = [abstract for abstract, concrete in spec_list if not concrete]
|
||||
args = [
|
||||
(i, str(abstract), tests)
|
||||
for i, abstract in enumerate(to_concretize)
|
||||
if not abstract.concrete
|
||||
]
|
||||
ret = [(i, abstract) for i, abstract in enumerate(to_concretize) if abstract.concrete]
|
||||
# Ensure we don't try to bootstrap clingo in parallel
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
spack.bootstrap.ensure_clingo_importable_or_raise()
|
||||
|
||||
# Ensure all the indexes have been built or updated, since
|
||||
# otherwise the processes in the pool may timeout on waiting
|
||||
# for a write lock. We do this indirectly by retrieving the
|
||||
# provider index, which should in turn trigger the update of
|
||||
# all the indexes if there's any need for that.
|
||||
_ = spack.repo.PATH.provider_index
|
||||
|
||||
# Ensure we have compilers in compilers.yaml to avoid that
|
||||
# processes try to write the config file in parallel
|
||||
_ = spack.compilers.all_compilers_config(spack.config.CONFIG)
|
||||
|
||||
# Early return if there is nothing to do
|
||||
if len(args) == 0:
|
||||
# Still have to combine the things that were passed in as abstract with the things
|
||||
# that were passed in as pairs
|
||||
return [(abstract, concrete) for abstract, (_, concrete) in zip(to_concretize, ret)] + [
|
||||
(abstract, concrete) for abstract, concrete in spec_list if concrete
|
||||
]
|
||||
|
||||
# Solve the environment in parallel on Linux
|
||||
# TODO: support parallel concretization on macOS and Windows
|
||||
num_procs = min(len(args), spack.config.determine_number_of_jobs(parallel=True))
|
||||
|
||||
for j, (i, concrete, duration) in enumerate(
|
||||
spack.util.parallel.imap_unordered(
|
||||
_concretize_task, args, processes=num_procs, debug=tty.is_debug(), maxtaskperchild=1
|
||||
)
|
||||
):
|
||||
ret.append((i, concrete))
|
||||
percentage = (j + 1) / len(args) * 100
|
||||
tty.verbose(
|
||||
f"{duration:6.1f}s [{percentage:3.0f}%] {concrete.cformat('{hash:7}')} "
|
||||
f"{to_concretize[i].colored_str}"
|
||||
)
|
||||
sys.stdout.flush()
|
||||
|
||||
# Add specs in original order
|
||||
ret.sort(key=lambda x: x[0])
|
||||
|
||||
return [(abstract, concrete) for abstract, (_, concrete) in zip(to_concretize, ret)] + [
|
||||
(abstract, concrete) for abstract, concrete in spec_list if concrete
|
||||
]
|
||||
|
||||
|
||||
def _concretize_task(packed_arguments: Tuple[int, str, TestsType]) -> Tuple[int, Spec, float]:
|
||||
index, spec_str, tests = packed_arguments
|
||||
with tty.SuppressOutput(msg_enabled=False):
|
||||
start = time.time()
|
||||
spec = Spec(spec_str).concretized(tests=tests)
|
||||
return index, spec, time.time() - start
|
||||
|
||||
|
||||
class UnavailableCompilerVersionError(spack.error.SpackError):
|
||||
"""Raised when there is no available compiler that satisfies a
|
||||
compiler spec."""
|
||||
|
||||
def __init__(self, compiler_spec: CompilerSpec, arch: Optional[ArchSpec] = None) -> None:
|
||||
err_msg = f"No compilers with spec {compiler_spec} found"
|
||||
def __init__(self, compiler_spec, arch=None):
|
||||
err_msg = "No compilers with spec {0} found".format(compiler_spec)
|
||||
if arch:
|
||||
err_msg += f" for operating system {arch.os} and target {arch.target}."
|
||||
err_msg += " for operating system {0} and target {1}.".format(arch.os, arch.target)
|
||||
|
||||
super().__init__(
|
||||
err_msg,
|
||||
|
||||
@@ -427,10 +427,6 @@ def __init__(self, *scopes: ConfigScope) -> None:
|
||||
self.push_scope(scope)
|
||||
self.format_updates: Dict[str, List[ConfigScope]] = collections.defaultdict(list)
|
||||
|
||||
def ensure_unwrapped(self) -> "Configuration":
|
||||
"""Ensure we unwrap this object from any dynamic wrapper (like Singleton)"""
|
||||
return self
|
||||
|
||||
@_config_mutator
|
||||
def push_scope(self, scope: ConfigScope) -> None:
|
||||
"""Add a higher precedence scope to the Configuration."""
|
||||
@@ -718,7 +714,7 @@ def print_section(self, section: str, blame: bool = False, *, scope=None) -> Non
|
||||
@contextlib.contextmanager
|
||||
def override(
|
||||
path_or_scope: Union[ConfigScope, str], value: Optional[Any] = None
|
||||
) -> Generator[Configuration, None, None]:
|
||||
) -> Generator[Union[lang.Singleton, Configuration], None, None]:
|
||||
"""Simple way to override config settings within a context.
|
||||
|
||||
Arguments:
|
||||
@@ -756,7 +752,13 @@ def override(
|
||||
assert scope is overrides
|
||||
|
||||
|
||||
def _add_platform_scope(cfg: Configuration, name: str, path: str, writable: bool = True) -> None:
|
||||
#: configuration scopes added on the command line set by ``spack.main.main()``
|
||||
COMMAND_LINE_SCOPES: List[str] = []
|
||||
|
||||
|
||||
def _add_platform_scope(
|
||||
cfg: Union[Configuration, lang.Singleton], name: str, path: str, writable: bool = True
|
||||
) -> None:
|
||||
"""Add a platform-specific subdirectory for the current platform."""
|
||||
platform = spack.platforms.host().name
|
||||
scope = DirectoryConfigScope(
|
||||
@@ -790,7 +792,9 @@ def config_paths_from_entry_points() -> List[Tuple[str, str]]:
|
||||
return config_paths
|
||||
|
||||
|
||||
def _add_command_line_scopes(cfg: Configuration, command_line_scopes: List[str]) -> None:
|
||||
def _add_command_line_scopes(
|
||||
cfg: Union[Configuration, lang.Singleton], command_line_scopes: List[str]
|
||||
) -> None:
|
||||
"""Add additional scopes from the --config-scope argument, either envs or dirs."""
|
||||
import spack.environment.environment as env # circular import
|
||||
|
||||
@@ -860,11 +864,18 @@ def create() -> Configuration:
|
||||
# Each scope can have per-platfom overrides in subdirectories
|
||||
_add_platform_scope(cfg, name, path)
|
||||
|
||||
# add command-line scopes
|
||||
_add_command_line_scopes(cfg, COMMAND_LINE_SCOPES)
|
||||
|
||||
# we make a special scope for spack commands so that they can
|
||||
# override configuration options.
|
||||
cfg.push_scope(InternalConfigScope("command_line"))
|
||||
|
||||
return cfg
|
||||
|
||||
|
||||
#: This is the singleton configuration instance for Spack.
|
||||
CONFIG: Configuration = lang.Singleton(create) # type: ignore
|
||||
CONFIG: Union[Configuration, lang.Singleton] = lang.Singleton(create)
|
||||
|
||||
|
||||
def add_from_file(filename: str, scope: Optional[str] = None) -> None:
|
||||
|
||||
@@ -32,7 +32,6 @@
|
||||
Container,
|
||||
Dict,
|
||||
Generator,
|
||||
Iterable,
|
||||
List,
|
||||
NamedTuple,
|
||||
Optional,
|
||||
@@ -291,6 +290,52 @@ def __reduce__(self):
|
||||
return ForbiddenLock, tuple()
|
||||
|
||||
|
||||
_QUERY_DOCSTRING = """
|
||||
|
||||
Args:
|
||||
query_spec: queries iterate through specs in the database and
|
||||
return those that satisfy the supplied ``query_spec``. If
|
||||
query_spec is `any`, This will match all specs in the
|
||||
database. If it is a spec, we'll evaluate
|
||||
``spec.satisfies(query_spec)``
|
||||
|
||||
predicate_fn: optional predicate taking an InstallRecord as argument, and returning
|
||||
whether that record is selected for the query. It can be used to craft criteria
|
||||
that need some data for selection not provided by the Database itself.
|
||||
|
||||
installed (bool or InstallStatus or typing.Iterable or None):
|
||||
if ``True``, includes only installed
|
||||
specs in the search; if ``False`` only missing specs, and if
|
||||
``any``, all specs in database. If an InstallStatus or iterable
|
||||
of InstallStatus, returns specs whose install status
|
||||
(installed, deprecated, or missing) matches (one of) the
|
||||
InstallStatus. (default: True)
|
||||
|
||||
explicit (bool or None): A spec that was installed
|
||||
following a specific user request is marked as explicit. If
|
||||
instead it was pulled-in as a dependency of a user requested
|
||||
spec it's considered implicit.
|
||||
|
||||
start_date (datetime.datetime or None): filters the query
|
||||
discarding specs that have been installed before ``start_date``.
|
||||
|
||||
end_date (datetime.datetime or None): filters the query discarding
|
||||
specs that have been installed after ``end_date``.
|
||||
|
||||
hashes (Container): list or set of hashes that we can use to
|
||||
restrict the search
|
||||
|
||||
in_buildcache (bool or None): Specs that are marked in
|
||||
this database as part of an associated binary cache are
|
||||
``in_buildcache``. All other specs are not. This field is used
|
||||
for querying mirror indices. Default is ``any``.
|
||||
|
||||
Returns:
|
||||
list of specs that match the query
|
||||
|
||||
"""
|
||||
|
||||
|
||||
class LockConfiguration(NamedTuple):
|
||||
"""Data class to configure locks in Database objects
|
||||
|
||||
@@ -1336,7 +1381,7 @@ def _deprecate(self, spec: "spack.spec.Spec", deprecator: "spack.spec.Spec") ->
|
||||
self._data[spec_key] = spec_rec
|
||||
|
||||
@_autospec
|
||||
def mark(self, spec: "spack.spec.Spec", key: str, value: Any) -> None:
|
||||
def mark(self, spec: "spack.spec.Spec", key, value) -> None:
|
||||
"""Mark an arbitrary record on a spec."""
|
||||
with self.write_transaction():
|
||||
return self._mark(spec, key, value)
|
||||
@@ -1480,48 +1525,59 @@ def get_by_hash(self, dag_hash, default=None, installed=any):
|
||||
|
||||
def _query(
|
||||
self,
|
||||
query_spec: Optional[Union[str, "spack.spec.Spec"]] = None,
|
||||
*,
|
||||
query_spec=any,
|
||||
predicate_fn: Optional[SelectType] = None,
|
||||
installed: Union[bool, InstallStatus, List[InstallStatus]] = True,
|
||||
explicit: Optional[bool] = None,
|
||||
start_date: Optional[datetime.datetime] = None,
|
||||
end_date: Optional[datetime.datetime] = None,
|
||||
hashes: Optional[Iterable[str]] = None,
|
||||
in_buildcache: Optional[bool] = None,
|
||||
origin: Optional[str] = None,
|
||||
installed=True,
|
||||
explicit=any,
|
||||
start_date=None,
|
||||
end_date=None,
|
||||
hashes=None,
|
||||
in_buildcache=any,
|
||||
origin=None,
|
||||
) -> List["spack.spec.Spec"]:
|
||||
"""Run a query on the database."""
|
||||
|
||||
# Restrict the set of records over which we iterate first
|
||||
matching_hashes = self._data
|
||||
if hashes is not None:
|
||||
matching_hashes = {h: self._data[h] for h in hashes if h in self._data}
|
||||
# TODO: Specs are a lot like queries. Should there be a
|
||||
# TODO: wildcard spec object, and should specs have attributes
|
||||
# TODO: like installed and known that can be queried? Or are
|
||||
# TODO: these really special cases that only belong here?
|
||||
|
||||
if isinstance(query_spec, str):
|
||||
query_spec = spack.spec.Spec(query_spec)
|
||||
if query_spec is not any:
|
||||
if not isinstance(query_spec, spack.spec.Spec):
|
||||
query_spec = spack.spec.Spec(query_spec)
|
||||
|
||||
if query_spec is not None and query_spec.concrete:
|
||||
hash_key = query_spec.dag_hash()
|
||||
if hash_key not in matching_hashes:
|
||||
return []
|
||||
matching_hashes = {hash_key: matching_hashes[hash_key]}
|
||||
# Just look up concrete specs with hashes; no fancy search.
|
||||
if query_spec.concrete:
|
||||
# TODO: handling of hashes restriction is not particularly elegant.
|
||||
hash_key = query_spec.dag_hash()
|
||||
if hash_key in self._data and (not hashes or hash_key in hashes):
|
||||
return [self._data[hash_key].spec]
|
||||
else:
|
||||
return []
|
||||
|
||||
# Abstract specs require more work -- currently we test
|
||||
# against everything.
|
||||
results = []
|
||||
start_date = start_date or datetime.datetime.min
|
||||
end_date = end_date or datetime.datetime.max
|
||||
|
||||
# save specs whose name doesn't match for last, to avoid a virtual check
|
||||
deferred = []
|
||||
for rec in matching_hashes.values():
|
||||
|
||||
for key, rec in self._data.items():
|
||||
if hashes is not None and rec.spec.dag_hash() not in hashes:
|
||||
continue
|
||||
|
||||
if origin and not (origin == rec.origin):
|
||||
continue
|
||||
|
||||
if not rec.install_type_matches(installed):
|
||||
continue
|
||||
|
||||
if in_buildcache is not None and rec.in_buildcache != in_buildcache:
|
||||
if in_buildcache is not any and rec.in_buildcache != in_buildcache:
|
||||
continue
|
||||
|
||||
if explicit is not None and rec.explicit != explicit:
|
||||
if explicit is not any and rec.explicit != explicit:
|
||||
continue
|
||||
|
||||
if predicate_fn is not None and not predicate_fn(rec):
|
||||
@@ -1532,7 +1588,7 @@ def _query(
|
||||
if not (start_date < inst_date < end_date):
|
||||
continue
|
||||
|
||||
if query_spec is None or query_spec.concrete:
|
||||
if query_spec is any:
|
||||
results.append(rec.spec)
|
||||
continue
|
||||
|
||||
@@ -1550,118 +1606,36 @@ def _query(
|
||||
# If we did fine something, the query spec can't be virtual b/c we matched an actual
|
||||
# package installation, so skip the virtual check entirely. If we *didn't* find anything,
|
||||
# check all the deferred specs *if* the query is virtual.
|
||||
if not results and query_spec is not None and deferred and query_spec.virtual:
|
||||
if not results and query_spec is not any and deferred and query_spec.virtual:
|
||||
results = [spec for spec in deferred if spec.satisfies(query_spec)]
|
||||
|
||||
return results
|
||||
|
||||
def query_local(
|
||||
self,
|
||||
query_spec: Optional[Union[str, "spack.spec.Spec"]] = None,
|
||||
*,
|
||||
predicate_fn: Optional[SelectType] = None,
|
||||
installed: Union[bool, InstallStatus, List[InstallStatus]] = True,
|
||||
explicit: Optional[bool] = None,
|
||||
start_date: Optional[datetime.datetime] = None,
|
||||
end_date: Optional[datetime.datetime] = None,
|
||||
hashes: Optional[List[str]] = None,
|
||||
in_buildcache: Optional[bool] = None,
|
||||
origin: Optional[str] = None,
|
||||
) -> List["spack.spec.Spec"]:
|
||||
"""Queries the local Spack database.
|
||||
if _query.__doc__ is None:
|
||||
_query.__doc__ = ""
|
||||
_query.__doc__ += _QUERY_DOCSTRING
|
||||
|
||||
This function doesn't guarantee any sorting of the returned data for performance reason,
|
||||
since comparing specs for __lt__ may be an expensive operation.
|
||||
def query_local(self, *args, **kwargs):
|
||||
"""Query only the local Spack database.
|
||||
|
||||
Args:
|
||||
query_spec: if query_spec is ``None``, match all specs in the database.
|
||||
If it is a spec, return all specs matching ``spec.satisfies(query_spec)``.
|
||||
|
||||
predicate_fn: optional predicate taking an InstallRecord as argument, and returning
|
||||
whether that record is selected for the query. It can be used to craft criteria
|
||||
that need some data for selection not provided by the Database itself.
|
||||
|
||||
installed: if ``True``, includes only installed specs in the search. If ``False`` only
|
||||
missing specs, and if ``any``, all specs in database. If an InstallStatus or
|
||||
iterable of InstallStatus, returns specs whose install status matches at least
|
||||
one of the InstallStatus.
|
||||
|
||||
explicit: a spec that was installed following a specific user request is marked as
|
||||
explicit. If instead it was pulled-in as a dependency of a user requested spec
|
||||
it's considered implicit.
|
||||
|
||||
start_date: if set considers only specs installed from the starting date.
|
||||
|
||||
end_date: if set considers only specs installed until the ending date.
|
||||
|
||||
in_buildcache: specs that are marked in this database as part of an associated binary
|
||||
cache are ``in_buildcache``. All other specs are not. This field is used for
|
||||
querying mirror indices. By default, it does not check this status.
|
||||
|
||||
hashes: list of hashes used to restrict the search
|
||||
|
||||
origin: origin of the spec
|
||||
This function doesn't guarantee any sorting of the returned
|
||||
data for performance reason, since comparing specs for __lt__
|
||||
may be an expensive operation.
|
||||
"""
|
||||
with self.read_transaction():
|
||||
return self._query(
|
||||
query_spec,
|
||||
predicate_fn=predicate_fn,
|
||||
installed=installed,
|
||||
explicit=explicit,
|
||||
start_date=start_date,
|
||||
end_date=end_date,
|
||||
hashes=hashes,
|
||||
in_buildcache=in_buildcache,
|
||||
origin=origin,
|
||||
)
|
||||
return self._query(*args, **kwargs)
|
||||
|
||||
def query(
|
||||
self,
|
||||
query_spec: Optional[Union[str, "spack.spec.Spec"]] = None,
|
||||
*,
|
||||
predicate_fn: Optional[SelectType] = None,
|
||||
installed: Union[bool, InstallStatus, List[InstallStatus]] = True,
|
||||
explicit: Optional[bool] = None,
|
||||
start_date: Optional[datetime.datetime] = None,
|
||||
end_date: Optional[datetime.datetime] = None,
|
||||
in_buildcache: Optional[bool] = None,
|
||||
hashes: Optional[List[str]] = None,
|
||||
origin: Optional[str] = None,
|
||||
install_tree: str = "all",
|
||||
):
|
||||
"""Queries the Spack database including all upstream databases.
|
||||
if query_local.__doc__ is None:
|
||||
query_local.__doc__ = ""
|
||||
query_local.__doc__ += _QUERY_DOCSTRING
|
||||
|
||||
Args:
|
||||
query_spec: if query_spec is ``None``, match all specs in the database.
|
||||
If it is a spec, return all specs matching ``spec.satisfies(query_spec)``.
|
||||
def query(self, *args, **kwargs):
|
||||
"""Query the Spack database including all upstream databases.
|
||||
|
||||
predicate_fn: optional predicate taking an InstallRecord as argument, and returning
|
||||
whether that record is selected for the query. It can be used to craft criteria
|
||||
that need some data for selection not provided by the Database itself.
|
||||
|
||||
installed: if ``True``, includes only installed specs in the search. If ``False`` only
|
||||
missing specs, and if ``any``, all specs in database. If an InstallStatus or
|
||||
iterable of InstallStatus, returns specs whose install status matches at least
|
||||
one of the InstallStatus.
|
||||
|
||||
explicit: a spec that was installed following a specific user request is marked as
|
||||
explicit. If instead it was pulled-in as a dependency of a user requested spec
|
||||
it's considered implicit.
|
||||
|
||||
start_date: if set considers only specs installed from the starting date.
|
||||
|
||||
end_date: if set considers only specs installed until the ending date.
|
||||
|
||||
in_buildcache: specs that are marked in this database as part of an associated binary
|
||||
cache are ``in_buildcache``. All other specs are not. This field is used for
|
||||
querying mirror indices. By default, it does not check this status.
|
||||
|
||||
hashes: list of hashes used to restrict the search
|
||||
|
||||
install_tree: query 'all' (default), 'local', 'upstream', or upstream path
|
||||
|
||||
origin: origin of the spec
|
||||
Additional Arguments:
|
||||
install_tree (str): query 'all' (default), 'local', 'upstream', or upstream path
|
||||
"""
|
||||
install_tree = kwargs.pop("install_tree", "all")
|
||||
valid_trees = ["all", "upstream", "local", self.root] + [u.root for u in self.upstream_dbs]
|
||||
if install_tree not in valid_trees:
|
||||
msg = "Invalid install_tree argument to Database.query()\n"
|
||||
@@ -1677,52 +1651,26 @@ def query(
|
||||
# queries for upstream DBs need to *not* lock - we may not
|
||||
# have permissions to do this and the upstream DBs won't know about
|
||||
# us anyway (so e.g. they should never uninstall specs)
|
||||
upstream_results.extend(
|
||||
upstream_db._query(
|
||||
query_spec,
|
||||
predicate_fn=predicate_fn,
|
||||
installed=installed,
|
||||
explicit=explicit,
|
||||
start_date=start_date,
|
||||
end_date=end_date,
|
||||
hashes=hashes,
|
||||
in_buildcache=in_buildcache,
|
||||
origin=origin,
|
||||
)
|
||||
or []
|
||||
)
|
||||
upstream_results.extend(upstream_db._query(*args, **kwargs) or [])
|
||||
|
||||
local_results: Set["spack.spec.Spec"] = set()
|
||||
local_results = []
|
||||
if install_tree in ("all", "local") or self.root == install_tree:
|
||||
local_results = set(
|
||||
self.query_local(
|
||||
query_spec,
|
||||
predicate_fn=predicate_fn,
|
||||
installed=installed,
|
||||
explicit=explicit,
|
||||
start_date=start_date,
|
||||
end_date=end_date,
|
||||
hashes=hashes,
|
||||
in_buildcache=in_buildcache,
|
||||
origin=origin,
|
||||
)
|
||||
)
|
||||
local_results = set(self.query_local(*args, **kwargs))
|
||||
|
||||
results = list(local_results) + list(x for x in upstream_results if x not in local_results)
|
||||
|
||||
return sorted(results)
|
||||
|
||||
def query_one(
|
||||
self,
|
||||
query_spec: Optional[Union[str, "spack.spec.Spec"]],
|
||||
predicate_fn: Optional[SelectType] = None,
|
||||
installed: Union[bool, InstallStatus, List[InstallStatus]] = True,
|
||||
) -> Optional["spack.spec.Spec"]:
|
||||
if query.__doc__ is None:
|
||||
query.__doc__ = ""
|
||||
query.__doc__ += _QUERY_DOCSTRING
|
||||
|
||||
def query_one(self, query_spec, predicate_fn=None, installed=True):
|
||||
"""Query for exactly one spec that matches the query spec.
|
||||
|
||||
Returns None if no installed package matches.
|
||||
Raises an assertion error if more than one spec matches the
|
||||
query. Returns None if no installed package matches.
|
||||
|
||||
Raises:
|
||||
AssertionError: if more than one spec matches the query.
|
||||
"""
|
||||
concrete_specs = self.query(query_spec, predicate_fn=predicate_fn, installed=installed)
|
||||
assert len(concrete_specs) <= 1
|
||||
@@ -1771,6 +1719,24 @@ def root(key, record):
|
||||
if id(rec.spec) not in needed and rec.installed
|
||||
]
|
||||
|
||||
def update_explicit(self, spec, explicit):
|
||||
"""
|
||||
Update the spec's explicit state in the database.
|
||||
|
||||
Args:
|
||||
spec (spack.spec.Spec): the spec whose install record is being updated
|
||||
explicit (bool): ``True`` if the package was requested explicitly
|
||||
by the user, ``False`` if it was pulled in as a dependency of
|
||||
an explicit package.
|
||||
"""
|
||||
rec = self.get_record(spec)
|
||||
if explicit != rec.explicit:
|
||||
with self.write_transaction():
|
||||
message = "{s.name}@{s.version} : marking the package {0}"
|
||||
status = "explicit" if explicit else "implicit"
|
||||
tty.debug(message.format(status, s=spec))
|
||||
rec.explicit = explicit
|
||||
|
||||
|
||||
class NoUpstreamVisitor:
|
||||
"""Gives edges to upstream specs, but does follow edges from upstream specs."""
|
||||
|
||||
@@ -10,7 +10,6 @@
|
||||
import llnl.util.lang
|
||||
|
||||
import spack.error
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
|
||||
#: Names of possible directives. This list is mostly populated using the @directive decorator.
|
||||
@@ -64,7 +63,7 @@ def __init__(cls, name, bases, attr_dict):
|
||||
# The instance is being initialized: if it is a package we must ensure
|
||||
# that the directives are called to set it up.
|
||||
|
||||
if cls.__module__.startswith(spack.repo.ROOT_PYTHON_NAMESPACE):
|
||||
if "spack.pkg" in cls.__module__:
|
||||
# Ensure the presence of the dictionaries associated with the directives.
|
||||
# All dictionaries are defaultdicts that create lists for missing keys.
|
||||
for d in DirectiveMeta._directive_dict_names:
|
||||
|
||||
@@ -473,7 +473,6 @@
|
||||
active_environment,
|
||||
all_environment_names,
|
||||
all_environments,
|
||||
as_env_dir,
|
||||
create,
|
||||
create_in_dir,
|
||||
deactivate,
|
||||
@@ -481,7 +480,6 @@
|
||||
default_view_name,
|
||||
display_specs,
|
||||
environment_dir_from_name,
|
||||
environment_from_name_or_dir,
|
||||
exists,
|
||||
initialize_environment_dir,
|
||||
installed_specs,
|
||||
@@ -509,7 +507,6 @@
|
||||
"active_environment",
|
||||
"all_environment_names",
|
||||
"all_environments",
|
||||
"as_env_dir",
|
||||
"create",
|
||||
"create_in_dir",
|
||||
"deactivate",
|
||||
@@ -517,7 +514,6 @@
|
||||
"default_view_name",
|
||||
"display_specs",
|
||||
"environment_dir_from_name",
|
||||
"environment_from_name_or_dir",
|
||||
"exists",
|
||||
"initialize_environment_dir",
|
||||
"installed_specs",
|
||||
|
||||
@@ -11,10 +11,12 @@
|
||||
import re
|
||||
import shutil
|
||||
import stat
|
||||
import sys
|
||||
import time
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
import warnings
|
||||
from typing import Any, Dict, Iterable, List, Optional, Sequence, Tuple, Union
|
||||
from typing import Any, Dict, Iterable, List, Optional, Set, Tuple, Union
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
@@ -24,6 +26,7 @@
|
||||
|
||||
import spack
|
||||
import spack.caches
|
||||
import spack.compilers
|
||||
import spack.concretize
|
||||
import spack.config
|
||||
import spack.deptypes as dt
|
||||
@@ -42,6 +45,7 @@
|
||||
import spack.util.environment
|
||||
import spack.util.hash
|
||||
import spack.util.lock as lk
|
||||
import spack.util.parallel
|
||||
import spack.util.path
|
||||
import spack.util.spack_json as sjson
|
||||
import spack.util.spack_yaml as syaml
|
||||
@@ -53,8 +57,6 @@
|
||||
from spack.spec_list import SpecList
|
||||
from spack.util.path import substitute_path_variables
|
||||
|
||||
SpecPair = spack.concretize.SpecPair
|
||||
|
||||
#: environment variable used to indicate the active environment
|
||||
spack_env_var = "SPACK_ENV"
|
||||
|
||||
@@ -275,22 +277,6 @@ def is_env_dir(path):
|
||||
return os.path.isdir(path) and os.path.exists(os.path.join(path, manifest_name))
|
||||
|
||||
|
||||
def as_env_dir(name_or_dir):
|
||||
"""Translate an environment name or directory to the environment directory"""
|
||||
if is_env_dir(name_or_dir):
|
||||
return name_or_dir
|
||||
else:
|
||||
validate_env_name(name_or_dir)
|
||||
if not exists(name_or_dir):
|
||||
raise SpackEnvironmentError("no such environment '%s'" % name_or_dir)
|
||||
return root(name_or_dir)
|
||||
|
||||
|
||||
def environment_from_name_or_dir(name_or_dir):
|
||||
"""Get an environment with the supplied name."""
|
||||
return Environment(as_env_dir(name_or_dir))
|
||||
|
||||
|
||||
def read(name):
|
||||
"""Get an environment with the supplied name."""
|
||||
validate_env_name(name)
|
||||
@@ -1508,7 +1494,7 @@ def deconcretize(self, spec: spack.spec.Spec, concrete: bool = True):
|
||||
|
||||
def _get_specs_to_concretize(
|
||||
self,
|
||||
) -> Tuple[List[spack.spec.Spec], List[spack.spec.Spec], List[SpecPair]]:
|
||||
) -> Tuple[Set[spack.spec.Spec], Set[spack.spec.Spec], List[spack.spec.Spec]]:
|
||||
"""Compute specs to concretize for unify:true and unify:when_possible.
|
||||
|
||||
This includes new user specs and any already concretized specs.
|
||||
@@ -1518,20 +1504,23 @@ def _get_specs_to_concretize(
|
||||
|
||||
"""
|
||||
# Exit early if the set of concretized specs is the set of user specs
|
||||
new_user_specs = list(set(self.user_specs) - set(self.concretized_user_specs))
|
||||
kept_user_specs = list(set(self.user_specs) & set(self.concretized_user_specs))
|
||||
kept_user_specs += self.included_user_specs
|
||||
new_user_specs = set(self.user_specs) - set(self.concretized_user_specs)
|
||||
kept_user_specs = set(self.user_specs) & set(self.concretized_user_specs)
|
||||
if not new_user_specs:
|
||||
return new_user_specs, kept_user_specs, []
|
||||
|
||||
specs_to_concretize = [(s, None) for s in new_user_specs] + [
|
||||
(abstract, concrete)
|
||||
concrete_specs_to_keep = [
|
||||
concrete
|
||||
for abstract, concrete in self.concretized_specs()
|
||||
if abstract in kept_user_specs
|
||||
]
|
||||
|
||||
specs_to_concretize = list(new_user_specs) + concrete_specs_to_keep
|
||||
return new_user_specs, kept_user_specs, specs_to_concretize
|
||||
|
||||
def _concretize_together_where_possible(self, tests: bool = False) -> Sequence[SpecPair]:
|
||||
def _concretize_together_where_possible(
|
||||
self, tests: bool = False
|
||||
) -> List[Tuple[spack.spec.Spec, spack.spec.Spec]]:
|
||||
# Avoid cyclic dependency
|
||||
import spack.solver.asp
|
||||
|
||||
@@ -1540,26 +1529,36 @@ def _concretize_together_where_possible(self, tests: bool = False) -> Sequence[S
|
||||
if not new_user_specs:
|
||||
return []
|
||||
|
||||
old_concrete_to_abstract = {
|
||||
concrete: abstract for (abstract, concrete) in self.concretized_specs()
|
||||
}
|
||||
|
||||
self.concretized_user_specs = []
|
||||
self.concretized_order = []
|
||||
self.specs_by_hash = {}
|
||||
|
||||
ret = []
|
||||
result = spack.concretize.concretize_together_when_possible(
|
||||
specs_to_concretize, tests=tests
|
||||
)
|
||||
for abstract, concrete in result:
|
||||
# Only add to the environment if it's from this environment (not included in)
|
||||
if abstract in self.user_specs:
|
||||
self._add_concrete_spec(abstract, concrete)
|
||||
result_by_user_spec = {}
|
||||
solver = spack.solver.asp.Solver()
|
||||
allow_deprecated = spack.config.get("config:deprecated", False)
|
||||
for result in solver.solve_in_rounds(
|
||||
specs_to_concretize, tests=tests, allow_deprecated=allow_deprecated
|
||||
):
|
||||
result_by_user_spec.update(result.specs_by_input)
|
||||
|
||||
# Return only the new specs
|
||||
result = []
|
||||
for abstract, concrete in sorted(result_by_user_spec.items()):
|
||||
# If the "abstract" spec is a concrete spec from the previous concretization
|
||||
# translate it back to an abstract spec. Otherwise, keep the abstract spec
|
||||
abstract = old_concrete_to_abstract.get(abstract, abstract)
|
||||
if abstract in new_user_specs:
|
||||
ret.append((abstract, concrete))
|
||||
result.append((abstract, concrete))
|
||||
self._add_concrete_spec(abstract, concrete)
|
||||
|
||||
return ret
|
||||
return result
|
||||
|
||||
def _concretize_together(self, tests: bool = False) -> Sequence[SpecPair]:
|
||||
def _concretize_together(
|
||||
self, tests: bool = False
|
||||
) -> List[Tuple[spack.spec.Spec, spack.spec.Spec]]:
|
||||
"""Concretization strategy that concretizes all the specs
|
||||
in the same DAG.
|
||||
"""
|
||||
@@ -1573,8 +1572,8 @@ def _concretize_together(self, tests: bool = False) -> Sequence[SpecPair]:
|
||||
self.specs_by_hash = {}
|
||||
|
||||
try:
|
||||
concretized_specs = spack.concretize.concretize_together(
|
||||
specs_to_concretize, tests=tests
|
||||
concrete_specs: List[spack.spec.Spec] = spack.concretize.concretize_specs_together(
|
||||
*specs_to_concretize, tests=tests
|
||||
)
|
||||
except spack.error.UnsatisfiableSpecError as e:
|
||||
# "Enhance" the error message for multiple root specs, suggest a less strict
|
||||
@@ -1592,13 +1591,14 @@ def _concretize_together(self, tests: bool = False) -> Sequence[SpecPair]:
|
||||
)
|
||||
raise
|
||||
|
||||
# set() | set() does not preserve ordering, even though sets are ordered
|
||||
ordered_user_specs = list(new_user_specs) + list(kept_user_specs)
|
||||
concretized_specs = [x for x in zip(ordered_user_specs, concrete_specs)]
|
||||
for abstract, concrete in concretized_specs:
|
||||
# Don't add if it's just included
|
||||
if abstract in self.user_specs:
|
||||
self._add_concrete_spec(abstract, concrete)
|
||||
self._add_concrete_spec(abstract, concrete)
|
||||
|
||||
# Return the portion of the return value that is new
|
||||
return concretized_specs[: len(new_user_specs)]
|
||||
# zip truncates the longer list, which is exactly what we want here
|
||||
return list(zip(new_user_specs, concrete_specs))
|
||||
|
||||
def _concretize_separately(self, tests=False):
|
||||
"""Concretization strategy that concretizes separately one
|
||||
@@ -1620,16 +1620,71 @@ def _concretize_separately(self, tests=False):
|
||||
concrete = old_specs_by_hash[h]
|
||||
self._add_concrete_spec(s, concrete, new=False)
|
||||
|
||||
to_concretize = [
|
||||
(root, None) for root in self.user_specs if root not in old_concretized_user_specs
|
||||
]
|
||||
concretized_specs = spack.concretize.concretize_separately(to_concretize, tests=tests)
|
||||
# Concretize any new user specs that we haven't concretized yet
|
||||
args, root_specs, i = [], [], 0
|
||||
for uspec in self.user_specs:
|
||||
if uspec not in old_concretized_user_specs:
|
||||
root_specs.append(uspec)
|
||||
args.append((i, str(uspec), tests))
|
||||
i += 1
|
||||
|
||||
by_hash = {}
|
||||
for abstract, concrete in concretized_specs:
|
||||
self._add_concrete_spec(abstract, concrete)
|
||||
# Ensure we don't try to bootstrap clingo in parallel
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
spack.bootstrap.ensure_clingo_importable_or_raise()
|
||||
|
||||
# Ensure all the indexes have been built or updated, since
|
||||
# otherwise the processes in the pool may timeout on waiting
|
||||
# for a write lock. We do this indirectly by retrieving the
|
||||
# provider index, which should in turn trigger the update of
|
||||
# all the indexes if there's any need for that.
|
||||
_ = spack.repo.PATH.provider_index
|
||||
|
||||
# Ensure we have compilers in compilers.yaml to avoid that
|
||||
# processes try to write the config file in parallel
|
||||
_ = spack.compilers.all_compilers_config(spack.config.CONFIG)
|
||||
|
||||
# Early return if there is nothing to do
|
||||
if len(args) == 0:
|
||||
return []
|
||||
|
||||
# Solve the environment in parallel on Linux
|
||||
start = time.time()
|
||||
num_procs = min(len(args), spack.config.determine_number_of_jobs(parallel=True))
|
||||
|
||||
# TODO: support parallel concretization on macOS and Windows
|
||||
msg = "Starting concretization"
|
||||
if sys.platform not in ("darwin", "win32") and num_procs > 1:
|
||||
msg += f" pool with {num_procs} processes"
|
||||
tty.msg(msg)
|
||||
|
||||
batch = []
|
||||
for j, (i, concrete, duration) in enumerate(
|
||||
spack.util.parallel.imap_unordered(
|
||||
_concretize_task,
|
||||
args,
|
||||
processes=num_procs,
|
||||
debug=tty.is_debug(),
|
||||
maxtaskperchild=1,
|
||||
)
|
||||
):
|
||||
batch.append((i, concrete))
|
||||
percentage = (j + 1) / len(args) * 100
|
||||
tty.verbose(
|
||||
f"{duration:6.1f}s [{percentage:3.0f}%] {concrete.cformat('{hash:7}')} "
|
||||
f"{root_specs[i].colored_str}"
|
||||
)
|
||||
sys.stdout.flush()
|
||||
|
||||
# Add specs in original order
|
||||
batch.sort(key=lambda x: x[0])
|
||||
by_hash = {} # for attaching information on test dependencies
|
||||
for root, (_, concrete) in zip(root_specs, batch):
|
||||
self._add_concrete_spec(root, concrete)
|
||||
by_hash[concrete.dag_hash()] = concrete
|
||||
|
||||
finish = time.time()
|
||||
tty.msg(f"Environment concretized in {finish - start:.2f} seconds")
|
||||
|
||||
# Unify the specs objects, so we get correct references to all parents
|
||||
self._read_lockfile_dict(self._to_lockfile_dict())
|
||||
|
||||
@@ -1649,7 +1704,11 @@ def _concretize_separately(self, tests=False):
|
||||
test_dependency.copy(), depflag=dt.TEST, virtuals=current_edge.virtuals
|
||||
)
|
||||
|
||||
return concretized_specs
|
||||
results = [
|
||||
(abstract, self.specs_by_hash[h])
|
||||
for abstract, h in zip(self.concretized_user_specs, self.concretized_order)
|
||||
]
|
||||
return results
|
||||
|
||||
@property
|
||||
def default_view(self):
|
||||
@@ -1897,16 +1956,17 @@ def install_specs(self, specs: Optional[List[Spec]] = None, **install_args):
|
||||
specs = specs if specs is not None else roots
|
||||
|
||||
# Extend the set of specs to overwrite with modified dev specs and their parents
|
||||
install_args["overwrite"] = {
|
||||
*install_args.get("overwrite", ()),
|
||||
*self._dev_specs_that_need_overwrite(),
|
||||
}
|
||||
overwrite: Set[str] = set()
|
||||
overwrite.update(install_args.get("overwrite", []), self._dev_specs_that_need_overwrite())
|
||||
install_args["overwrite"] = overwrite
|
||||
|
||||
# Only environment roots are marked explicit
|
||||
install_args["explicit"] = {
|
||||
*install_args.get("explicit", ()),
|
||||
*(s.dag_hash() for s in roots),
|
||||
}
|
||||
explicit: Set[str] = set()
|
||||
explicit.update(
|
||||
install_args.get("explicit", []),
|
||||
(s.dag_hash() for s in specs),
|
||||
(s.dag_hash() for s in roots),
|
||||
)
|
||||
install_args["explicit"] = explicit
|
||||
|
||||
PackageInstaller([spec.package for spec in specs], **install_args).install()
|
||||
|
||||
@@ -2456,6 +2516,14 @@ def display_specs(specs):
|
||||
print(tree_string)
|
||||
|
||||
|
||||
def _concretize_task(packed_arguments) -> Tuple[int, Spec, float]:
|
||||
index, spec_str, tests = packed_arguments
|
||||
with tty.SuppressOutput(msg_enabled=False):
|
||||
start = time.time()
|
||||
spec = Spec(spec_str).concretized(tests=tests)
|
||||
return index, spec, time.time() - start
|
||||
|
||||
|
||||
def make_repo_path(root):
|
||||
"""Make a RepoPath from the repo subdirectories in an environment."""
|
||||
path = spack.repo.RepoPath(cache=spack.caches.MISC_CACHE)
|
||||
|
||||
@@ -48,6 +48,8 @@ def activate_header(env, shell, prompt=None, view: Optional[str] = None):
|
||||
cmds += 'set "SPACK_ENV=%s"\n' % env.path
|
||||
if view:
|
||||
cmds += 'set "SPACK_ENV_VIEW=%s"\n' % view
|
||||
# TODO: despacktivate
|
||||
# TODO: prompt
|
||||
elif shell == "pwsh":
|
||||
cmds += "$Env:SPACK_ENV='%s'\n" % env.path
|
||||
if view:
|
||||
|
||||
@@ -33,7 +33,6 @@
|
||||
from llnl.util.tty.color import colorize
|
||||
|
||||
import spack.config
|
||||
import spack.directory_layout
|
||||
import spack.paths
|
||||
import spack.projections
|
||||
import spack.relocate
|
||||
@@ -51,7 +50,7 @@
|
||||
_projections_path = ".spack/projections.yaml"
|
||||
|
||||
|
||||
LinkCallbackType = Callable[[str, str, "FilesystemView", Optional[spack.spec.Spec]], None]
|
||||
LinkCallbackType = Callable[[str, str, "FilesystemView", Optional["spack.spec.Spec"]], None]
|
||||
|
||||
|
||||
def view_symlink(src: str, dst: str, *args, **kwargs) -> None:
|
||||
@@ -63,7 +62,7 @@ def view_hardlink(src: str, dst: str, *args, **kwargs) -> None:
|
||||
|
||||
|
||||
def view_copy(
|
||||
src: str, dst: str, view: "FilesystemView", spec: Optional[spack.spec.Spec] = None
|
||||
src: str, dst: str, view: "FilesystemView", spec: Optional["spack.spec.Spec"] = None
|
||||
) -> None:
|
||||
"""
|
||||
Copy a file from src to dst.
|
||||
@@ -161,7 +160,7 @@ class FilesystemView:
|
||||
def __init__(
|
||||
self,
|
||||
root: str,
|
||||
layout: spack.directory_layout.DirectoryLayout,
|
||||
layout: "spack.directory_layout.DirectoryLayout",
|
||||
*,
|
||||
projections: Optional[Dict] = None,
|
||||
ignore_conflicts: bool = False,
|
||||
@@ -183,10 +182,7 @@ def __init__(
|
||||
|
||||
# Setup link function to include view
|
||||
self.link_type = link_type
|
||||
self._link = function_for_link_type(link_type)
|
||||
|
||||
def link(self, src: str, dst: str, spec: Optional[spack.spec.Spec] = None) -> None:
|
||||
self._link(src, dst, self, spec)
|
||||
self.link = ft.partial(function_for_link_type(link_type), view=self)
|
||||
|
||||
def add_specs(self, *specs, **kwargs):
|
||||
"""
|
||||
@@ -287,7 +283,7 @@ class YamlFilesystemView(FilesystemView):
|
||||
def __init__(
|
||||
self,
|
||||
root: str,
|
||||
layout: spack.directory_layout.DirectoryLayout,
|
||||
layout: "spack.directory_layout.DirectoryLayout",
|
||||
*,
|
||||
projections: Optional[Dict] = None,
|
||||
ignore_conflicts: bool = False,
|
||||
|
||||
@@ -21,40 +21,43 @@
|
||||
features.
|
||||
"""
|
||||
import importlib
|
||||
import types
|
||||
from typing import List, Optional
|
||||
|
||||
from llnl.util.lang import ensure_last, list_modules
|
||||
|
||||
import spack.paths
|
||||
|
||||
|
||||
class _HookRunner:
|
||||
#: Order in which hooks are executed
|
||||
HOOK_ORDER = [
|
||||
"spack.hooks.module_file_generation",
|
||||
"spack.hooks.licensing",
|
||||
"spack.hooks.sbang",
|
||||
"spack.hooks.windows_runtime_linkage",
|
||||
"spack.hooks.drop_redundant_rpaths",
|
||||
"spack.hooks.absolutify_elf_sonames",
|
||||
"spack.hooks.permissions_setters",
|
||||
# after all mutations to the install prefix, write metadata
|
||||
"spack.hooks.write_install_manifest",
|
||||
# after all metadata is written
|
||||
"spack.hooks.autopush",
|
||||
]
|
||||
|
||||
#: Contains all hook modules after first call, shared among all HookRunner objects
|
||||
_hooks: Optional[List[types.ModuleType]] = None
|
||||
#: Stores all hooks on first call, shared among
|
||||
#: all HookRunner objects
|
||||
_hooks = None
|
||||
|
||||
def __init__(self, hook_name):
|
||||
self.hook_name = hook_name
|
||||
|
||||
@classmethod
|
||||
def _populate_hooks(cls):
|
||||
# Lazily populate the list of hooks
|
||||
cls._hooks = []
|
||||
|
||||
relative_names = list(list_modules(spack.paths.hooks_path))
|
||||
|
||||
# Ensure that write_install_manifest comes last
|
||||
ensure_last(relative_names, "absolutify_elf_sonames", "write_install_manifest")
|
||||
|
||||
for name in relative_names:
|
||||
module_name = __name__ + "." + name
|
||||
module_obj = importlib.import_module(module_name)
|
||||
cls._hooks.append((module_name, module_obj))
|
||||
|
||||
@property
|
||||
def hooks(self) -> List[types.ModuleType]:
|
||||
def hooks(self):
|
||||
if not self._hooks:
|
||||
self._hooks = [importlib.import_module(module_name) for module_name in self.HOOK_ORDER]
|
||||
self._populate_hooks()
|
||||
return self._hooks
|
||||
|
||||
def __call__(self, *args, **kwargs):
|
||||
for module in self.hooks:
|
||||
for _, module in self.hooks:
|
||||
if hasattr(module, self.hook_name):
|
||||
hook = getattr(module, self.hook_name)
|
||||
if hasattr(hook, "__call__"):
|
||||
|
||||
@@ -36,6 +36,7 @@
|
||||
import shutil
|
||||
import sys
|
||||
import time
|
||||
import traceback
|
||||
from collections import defaultdict
|
||||
from gzip import GzipFile
|
||||
from typing import Dict, Iterator, List, Optional, Set, Tuple, Union
|
||||
@@ -412,7 +413,7 @@ def _process_external_package(pkg: "spack.package_base.PackageBase", explicit: b
|
||||
tty.debug(f"{pre} already registered in DB")
|
||||
record = spack.store.STORE.db.get_record(spec)
|
||||
if explicit and not record.explicit:
|
||||
spack.store.STORE.db.mark(spec, "explicit", True)
|
||||
spack.store.STORE.db.update_explicit(spec, explicit)
|
||||
|
||||
except KeyError:
|
||||
# If not, register it and generate the module file.
|
||||
@@ -1507,8 +1508,8 @@ def _prepare_for_install(self, task: Task) -> None:
|
||||
self._update_installed(task)
|
||||
|
||||
# Only update the explicit entry once for the explicit package
|
||||
if task.explicit and not rec.explicit:
|
||||
spack.store.STORE.db.mark(task.pkg.spec, "explicit", True)
|
||||
if task.explicit:
|
||||
spack.store.STORE.db.update_explicit(task.pkg.spec, True)
|
||||
|
||||
def _cleanup_all_tasks(self) -> None:
|
||||
"""Cleanup all tasks to include releasing their locks."""
|
||||
@@ -2214,7 +2215,7 @@ def install(self) -> None:
|
||||
if task.is_build_request:
|
||||
if single_requested_spec:
|
||||
raise
|
||||
failed_build_requests.append((pkg, pkg_id, str(exc)))
|
||||
failed_build_requests.append((pkg, pkg_id, exc))
|
||||
|
||||
finally:
|
||||
# Remove the install prefix if anything went wrong during
|
||||
@@ -2241,6 +2242,9 @@ def install(self) -> None:
|
||||
if failed_build_requests or missing:
|
||||
for _, pkg_id, err in failed_build_requests:
|
||||
tty.error(f"{pkg_id}: {err}")
|
||||
if spack.error.SHOW_BACKTRACE:
|
||||
# note: in python 3.10+ this can just be print_exception(err)
|
||||
traceback.print_exception(type(err), err, err.__traceback__)
|
||||
|
||||
for _, pkg_id in missing:
|
||||
tty.error(f"{pkg_id}: Package was not installed")
|
||||
|
||||
@@ -489,7 +489,6 @@ def make_argument_parser(**kwargs):
|
||||
help="add stacktraces to all printed statements",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-t",
|
||||
"--backtrace",
|
||||
action="store_true",
|
||||
default="SPACK_BACKTRACE" in os.environ,
|
||||
@@ -911,6 +910,13 @@ def _main(argv=None):
|
||||
# Make spack load / env activate work on macOS
|
||||
restore_macos_dyld_vars()
|
||||
|
||||
# make spack.config aware of any command line configuration scopes
|
||||
if args.config_scopes:
|
||||
spack.config.COMMAND_LINE_SCOPES = args.config_scopes
|
||||
|
||||
# ensure options on spack command come before everything
|
||||
setup_main_options(args)
|
||||
|
||||
# activate an environment if one was specified on the command line
|
||||
env_format_error = None
|
||||
if not args.no_env:
|
||||
@@ -924,12 +930,6 @@ def _main(argv=None):
|
||||
e.print_context()
|
||||
env_format_error = e
|
||||
|
||||
# Push scopes from the command line last
|
||||
if args.config_scopes:
|
||||
spack.config._add_command_line_scopes(spack.config.CONFIG, args.config_scopes)
|
||||
spack.config.CONFIG.push_scope(spack.config.InternalConfigScope("command_line"))
|
||||
setup_main_options(args)
|
||||
|
||||
# ------------------------------------------------------------------------
|
||||
# Things that require configuration should go below here
|
||||
# ------------------------------------------------------------------------
|
||||
|
||||
@@ -29,6 +29,7 @@
|
||||
import spack.config
|
||||
import spack.error
|
||||
import spack.fetch_strategy
|
||||
import spack.mirror
|
||||
import spack.oci.image
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
@@ -755,7 +756,7 @@ def create_mirror_from_package_object(pkg_obj, mirror_cache, mirror_stats):
|
||||
|
||||
def require_mirror_name(mirror_name):
|
||||
"""Find a mirror by name and raise if it does not exist"""
|
||||
mirror = MirrorCollection().get(mirror_name)
|
||||
mirror = spack.mirror.MirrorCollection().get(mirror_name)
|
||||
if not mirror:
|
||||
raise ValueError(f'no mirror named "{mirror_name}"')
|
||||
return mirror
|
||||
|
||||
@@ -32,6 +32,7 @@
|
||||
from llnl.util.lang import classproperty, memoized
|
||||
from llnl.util.link_tree import LinkTree
|
||||
|
||||
import spack.build_environment
|
||||
import spack.builder
|
||||
import spack.compilers
|
||||
import spack.config
|
||||
@@ -49,6 +50,7 @@
|
||||
import spack.store
|
||||
import spack.url
|
||||
import spack.util.environment
|
||||
import spack.util.executable
|
||||
import spack.util.path
|
||||
import spack.util.web
|
||||
from spack.error import InstallError, NoURLError, PackageError
|
||||
|
||||
@@ -39,9 +39,9 @@
|
||||
import spack.error
|
||||
import spack.patch
|
||||
import spack.provider_index
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.tag
|
||||
import spack.util.file_cache
|
||||
import spack.util.git
|
||||
import spack.util.naming as nm
|
||||
import spack.util.path
|
||||
@@ -216,9 +216,9 @@ def compute_loader(self, fullname):
|
||||
def packages_path():
|
||||
"""Get the test repo if it is active, otherwise the builtin repo."""
|
||||
try:
|
||||
return PATH.get_repo("builtin.mock").packages_path
|
||||
except UnknownNamespaceError:
|
||||
return PATH.get_repo("builtin").packages_path
|
||||
return spack.repo.PATH.get_repo("builtin.mock").packages_path
|
||||
except spack.repo.UnknownNamespaceError:
|
||||
return spack.repo.PATH.get_repo("builtin").packages_path
|
||||
|
||||
|
||||
class GitExe:
|
||||
@@ -314,7 +314,7 @@ def add_package_to_git_stage(packages):
|
||||
git = GitExe()
|
||||
|
||||
for pkg_name in packages:
|
||||
filename = PATH.filename_for_package_name(pkg_name)
|
||||
filename = spack.repo.PATH.filename_for_package_name(pkg_name)
|
||||
if not os.path.isfile(filename):
|
||||
tty.die("No such package: %s. Path does not exist:" % pkg_name, filename)
|
||||
|
||||
@@ -590,7 +590,7 @@ def __init__(
|
||||
self,
|
||||
package_checker: FastPackageChecker,
|
||||
namespace: str,
|
||||
cache: spack.util.file_cache.FileCache,
|
||||
cache: "spack.caches.FileCacheType",
|
||||
):
|
||||
self.checker = package_checker
|
||||
self.packages_path = self.checker.packages_path
|
||||
@@ -683,7 +683,7 @@ class RepoPath:
|
||||
def __init__(
|
||||
self,
|
||||
*repos: Union[str, "Repo"],
|
||||
cache: Optional[spack.util.file_cache.FileCache],
|
||||
cache: Optional["spack.caches.FileCacheType"],
|
||||
overrides: Optional[Dict[str, Any]] = None,
|
||||
) -> None:
|
||||
self.repos: List[Repo] = []
|
||||
@@ -965,7 +965,7 @@ def __init__(
|
||||
self,
|
||||
root: str,
|
||||
*,
|
||||
cache: spack.util.file_cache.FileCache,
|
||||
cache: "spack.caches.FileCacheType",
|
||||
overrides: Optional[Dict[str, Any]] = None,
|
||||
) -> None:
|
||||
"""Instantiate a package repository from a filesystem path.
|
||||
@@ -1440,7 +1440,9 @@ def _path(configuration=None):
|
||||
return create(configuration=configuration)
|
||||
|
||||
|
||||
def create(configuration: spack.config.Configuration) -> RepoPath:
|
||||
def create(
|
||||
configuration: Union["spack.config.Configuration", llnl.util.lang.Singleton]
|
||||
) -> RepoPath:
|
||||
"""Create a RepoPath from a configuration object.
|
||||
|
||||
Args:
|
||||
@@ -1463,7 +1465,7 @@ def create(configuration: spack.config.Configuration) -> RepoPath:
|
||||
|
||||
|
||||
#: Singleton repo path instance
|
||||
PATH: RepoPath = llnl.util.lang.Singleton(_path) # type: ignore
|
||||
PATH: Union[RepoPath, llnl.util.lang.Singleton] = llnl.util.lang.Singleton(_path)
|
||||
|
||||
# Add the finder to sys.meta_path
|
||||
REPOS_FINDER = ReposFinder()
|
||||
@@ -1583,7 +1585,7 @@ def __init__(self, name, repo=None):
|
||||
long_msg = "Use 'spack create' to create a new package."
|
||||
|
||||
if not repo:
|
||||
repo = PATH
|
||||
repo = spack.repo.PATH
|
||||
|
||||
# We need to compare the base package name
|
||||
pkg_name = name.rsplit(".", 1)[-1]
|
||||
|
||||
@@ -11,6 +11,8 @@
|
||||
|
||||
from llnl.util.lang import union_dicts
|
||||
|
||||
import spack.schema.gitlab_ci
|
||||
|
||||
# Schema for script fields
|
||||
# List of lists and/or strings
|
||||
# This is similar to what is allowed in
|
||||
@@ -135,8 +137,39 @@ def job_schema(name: str):
|
||||
}
|
||||
)
|
||||
|
||||
# TODO: Remove in Spack 0.23
|
||||
ci_properties = {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
# "required": ["mappings"],
|
||||
"properties": union_dicts(
|
||||
core_shared_properties, {"enable-artifacts-buildcache": {"type": "boolean"}}
|
||||
),
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
# "required": ["mappings"],
|
||||
"properties": union_dicts(
|
||||
core_shared_properties, {"temporary-storage-url-prefix": {"type": "string"}}
|
||||
),
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
#: Properties for inclusion in other schemas
|
||||
properties: Dict[str, Any] = {"ci": core_shared_properties}
|
||||
properties: Dict[str, Any] = {
|
||||
"ci": {
|
||||
"oneOf": [
|
||||
# TODO: Replace with core-shared-properties in Spack 0.23
|
||||
ci_properties,
|
||||
# Allow legacy format under `ci` for `config update ci`
|
||||
spack.schema.gitlab_ci.gitlab_ci_properties,
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
#: Full schema with metadata
|
||||
schema = {
|
||||
@@ -146,3 +179,21 @@ def job_schema(name: str):
|
||||
"additionalProperties": False,
|
||||
"properties": properties,
|
||||
}
|
||||
|
||||
|
||||
def update(data):
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.ci
|
||||
import spack.environment as ev
|
||||
|
||||
# Warn if deprecated section is still in the environment
|
||||
ci_env = ev.active_environment()
|
||||
if ci_env:
|
||||
env_config = ci_env.manifest[ev.TOP_LEVEL_KEY]
|
||||
if "gitlab-ci" in env_config:
|
||||
tty.die("Error: `gitlab-ci` section detected with `ci`, these are not compatible")
|
||||
|
||||
# Detect if the ci section is using the new pipeline-gen
|
||||
# If it is, assume it has already been converted
|
||||
return spack.ci.translate_deprecated_config(data)
|
||||
|
||||
@@ -61,10 +61,7 @@
|
||||
"target": {"type": "string"},
|
||||
"alias": {"anyOf": [{"type": "string"}, {"type": "null"}]},
|
||||
"modules": {
|
||||
"anyOf": [
|
||||
{"type": "null"},
|
||||
{"type": "array", "items": {"type": "string"}},
|
||||
]
|
||||
"anyOf": [{"type": "string"}, {"type": "null"}, {"type": "array"}]
|
||||
},
|
||||
"implicit_rpaths": implicit_rpaths,
|
||||
"environment": spack.schema.environment.definition,
|
||||
|
||||
@@ -33,14 +33,8 @@
|
||||
"properties": {
|
||||
"type": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"local",
|
||||
"buildcache",
|
||||
"external",
|
||||
"environment",
|
||||
],
|
||||
"enum": ["local", "buildcache", "external"],
|
||||
},
|
||||
"path": {"type": "string"},
|
||||
"include": LIST_OF_SPECS,
|
||||
"exclude": LIST_OF_SPECS,
|
||||
},
|
||||
|
||||
@@ -12,6 +12,7 @@
|
||||
|
||||
from llnl.util.lang import union_dicts
|
||||
|
||||
import spack.schema.gitlab_ci # DEPRECATED
|
||||
import spack.schema.merged
|
||||
|
||||
from .spec_list import spec_list_schema
|
||||
@@ -19,21 +20,21 @@
|
||||
#: Top level key in a manifest file
|
||||
TOP_LEVEL_KEY = "spack"
|
||||
|
||||
include_concrete = {"type": "array", "default": [], "items": {"type": "string"}}
|
||||
|
||||
properties: Dict[str, Any] = {
|
||||
"spack": {
|
||||
"type": "object",
|
||||
"default": {},
|
||||
"additionalProperties": False,
|
||||
"properties": union_dicts(
|
||||
# Include deprecated "gitlab-ci" section
|
||||
spack.schema.gitlab_ci.properties,
|
||||
# merged configuration scope schemas
|
||||
spack.schema.merged.properties,
|
||||
# extra environment schema properties
|
||||
{
|
||||
"include": {"type": "array", "default": [], "items": {"type": "string"}},
|
||||
"specs": spec_list_schema,
|
||||
"include_concrete": include_concrete,
|
||||
"include_concrete": {"type": "array", "default": [], "items": {"type": "string"}},
|
||||
},
|
||||
),
|
||||
}
|
||||
@@ -57,6 +58,15 @@ def update(data):
|
||||
Returns:
|
||||
True if data was changed, False otherwise
|
||||
"""
|
||||
|
||||
import spack.ci
|
||||
|
||||
if "gitlab-ci" in data:
|
||||
data["ci"] = data.pop("gitlab-ci")
|
||||
|
||||
if "ci" in data:
|
||||
return spack.ci.translate_deprecated_config(data["ci"])
|
||||
|
||||
# There are not currently any deprecated attributes in this section
|
||||
# that have not been removed
|
||||
return False
|
||||
|
||||
125
lib/spack/spack/schema/gitlab_ci.py
Normal file
125
lib/spack/spack/schema/gitlab_ci.py
Normal file
@@ -0,0 +1,125 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
"""Schema for gitlab-ci.yaml configuration file.
|
||||
|
||||
.. literalinclude:: ../spack/schema/gitlab_ci.py
|
||||
:lines: 15-
|
||||
"""
|
||||
from typing import Any, Dict
|
||||
|
||||
from llnl.util.lang import union_dicts
|
||||
|
||||
image_schema = {
|
||||
"oneOf": [
|
||||
{"type": "string"},
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {"type": "string"},
|
||||
"entrypoint": {"type": "array", "items": {"type": "string"}},
|
||||
},
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
runner_attributes_schema_items = {
|
||||
"image": image_schema,
|
||||
"tags": {"type": "array", "items": {"type": "string"}},
|
||||
"variables": {"type": "object", "patternProperties": {r"[\w\d\-_\.]+": {"type": "string"}}},
|
||||
"before_script": {"type": "array", "items": {"type": "string"}},
|
||||
"script": {"type": "array", "items": {"type": "string"}},
|
||||
"after_script": {"type": "array", "items": {"type": "string"}},
|
||||
}
|
||||
|
||||
runner_selector_schema = {
|
||||
"type": "object",
|
||||
"additionalProperties": True,
|
||||
"required": ["tags"],
|
||||
"properties": runner_attributes_schema_items,
|
||||
}
|
||||
|
||||
remove_attributes_schema = {
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"required": ["tags"],
|
||||
"properties": {"tags": {"type": "array", "items": {"type": "string"}}},
|
||||
}
|
||||
|
||||
|
||||
core_shared_properties = union_dicts(
|
||||
runner_attributes_schema_items,
|
||||
{
|
||||
"bootstrap": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"anyOf": [
|
||||
{"type": "string"},
|
||||
{
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"required": ["name"],
|
||||
"properties": {
|
||||
"name": {"type": "string"},
|
||||
"compiler-agnostic": {"type": "boolean", "default": False},
|
||||
},
|
||||
},
|
||||
]
|
||||
},
|
||||
},
|
||||
"match_behavior": {"type": "string", "enum": ["first", "merge"], "default": "first"},
|
||||
"mappings": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"required": ["match"],
|
||||
"properties": {
|
||||
"match": {"type": "array", "items": {"type": "string"}},
|
||||
"remove-attributes": remove_attributes_schema,
|
||||
"runner-attributes": runner_selector_schema,
|
||||
},
|
||||
},
|
||||
},
|
||||
"service-job-attributes": runner_selector_schema,
|
||||
"signing-job-attributes": runner_selector_schema,
|
||||
"rebuild-index": {"type": "boolean"},
|
||||
"broken-specs-url": {"type": "string"},
|
||||
"broken-tests-packages": {"type": "array", "items": {"type": "string"}},
|
||||
},
|
||||
)
|
||||
|
||||
gitlab_ci_properties = {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"required": ["mappings"],
|
||||
"properties": union_dicts(
|
||||
core_shared_properties, {"enable-artifacts-buildcache": {"type": "boolean"}}
|
||||
),
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"required": ["mappings"],
|
||||
"properties": union_dicts(
|
||||
core_shared_properties, {"temporary-storage-url-prefix": {"type": "string"}}
|
||||
),
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
#: Properties for inclusion in other schemas
|
||||
properties: Dict[str, Any] = {"gitlab-ci": gitlab_ci_properties}
|
||||
|
||||
#: Full schema with metadata
|
||||
schema = {
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"title": "Spack gitlab-ci configuration file schema",
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"properties": properties,
|
||||
}
|
||||
@@ -27,6 +27,7 @@
|
||||
|
||||
import spack
|
||||
import spack.binary_distribution
|
||||
import spack.bootstrap.core
|
||||
import spack.compilers
|
||||
import spack.concretize
|
||||
import spack.config
|
||||
@@ -514,8 +515,6 @@ def _compute_specs_from_answer_set(self):
|
||||
best = min(self.answers)
|
||||
opt, _, answer = best
|
||||
for input_spec in self.abstract_specs:
|
||||
# The specs must be unified to get here, so it is safe to associate any satisfying spec
|
||||
# with the input. Multiple inputs may be matched to the same concrete spec
|
||||
node = SpecBuilder.make_node(pkg=input_spec.name)
|
||||
if input_spec.virtual:
|
||||
providers = [
|
||||
@@ -815,7 +814,7 @@ def solve(self, setup, specs, reuse=None, output=None, control=None, allow_depre
|
||||
solve, and the internal statistics from clingo.
|
||||
"""
|
||||
# avoid circular import
|
||||
import spack.bootstrap.core
|
||||
import spack.bootstrap
|
||||
|
||||
output = output or DEFAULT_OUTPUT_CONFIGURATION
|
||||
timer = spack.util.timer.Timer()
|
||||
@@ -888,7 +887,6 @@ def on_model(model):
|
||||
result.satisfiable = solve_result.satisfiable
|
||||
|
||||
if result.satisfiable:
|
||||
timer.start("construct_specs")
|
||||
# get the best model
|
||||
builder = SpecBuilder(specs, hash_lookup=setup.reusable_and_possible)
|
||||
min_cost, best_model = min(models)
|
||||
@@ -913,8 +911,7 @@ def on_model(model):
|
||||
|
||||
# record the possible dependencies in the solve
|
||||
result.possible_dependencies = setup.pkgs
|
||||
timer.stop("construct_specs")
|
||||
timer.stop()
|
||||
|
||||
elif cores:
|
||||
result.control = self.control
|
||||
result.cores.extend(cores)
|
||||
@@ -2031,12 +2028,9 @@ def _spec_clauses(
|
||||
for variant_def in variant_defs:
|
||||
self.variant_values_from_specs.add((spec.name, id(variant_def), value))
|
||||
|
||||
clauses.append(f.variant_value(spec.name, vname, value))
|
||||
if variant.propagate:
|
||||
clauses.append(f.propagate(spec.name, fn.variant_value(vname, value)))
|
||||
if self.pkg_class(spec.name).has_variant(vname):
|
||||
clauses.append(f.variant_value(spec.name, vname, value))
|
||||
else:
|
||||
clauses.append(f.variant_value(spec.name, vname, value))
|
||||
|
||||
# compiler and compiler version
|
||||
if spec.compiler:
|
||||
@@ -2622,7 +2616,6 @@ def setup(
|
||||
)
|
||||
for name, info in env.dev_specs.items()
|
||||
)
|
||||
|
||||
specs = tuple(specs) # ensure compatible types to add
|
||||
|
||||
self.gen.h1("Reusable concrete specs")
|
||||
@@ -3836,16 +3829,8 @@ def execute_explicit_splices(self):
|
||||
for splice_set in splice_config:
|
||||
target = splice_set["target"]
|
||||
replacement = spack.spec.Spec(splice_set["replacement"])
|
||||
|
||||
if not replacement.abstract_hash:
|
||||
location = getattr(
|
||||
splice_set["replacement"], "_start_mark", " at unknown line number"
|
||||
)
|
||||
msg = f"Explicit splice replacement '{replacement}' does not include a hash.\n"
|
||||
msg += f"{location}\n\n"
|
||||
msg += " Splice replacements must be specified by hash"
|
||||
raise InvalidSpliceError(msg)
|
||||
|
||||
assert replacement.abstract_hash
|
||||
replacement.replace_hash()
|
||||
transitive = splice_set.get("transitive", False)
|
||||
splice_triples.append((target, replacement, transitive))
|
||||
|
||||
@@ -3856,10 +3841,6 @@ def execute_explicit_splices(self):
|
||||
if target in current_spec:
|
||||
# matches root or non-root
|
||||
# e.g. mvapich2%gcc
|
||||
|
||||
# The first iteration, we need to replace the abstract hash
|
||||
if not replacement.concrete:
|
||||
replacement.replace_hash()
|
||||
current_spec = current_spec.splice(replacement, transitive)
|
||||
new_key = NodeArgument(id=key.id, pkg=current_spec.name)
|
||||
specs[new_key] = current_spec
|
||||
@@ -3985,7 +3966,7 @@ def selected_specs(self) -> List[spack.spec.Spec]:
|
||||
return [s for s in self.factory() if self.is_selected(s)]
|
||||
|
||||
@staticmethod
|
||||
def from_store(configuration, *, include, exclude) -> "SpecFilter":
|
||||
def from_store(configuration, include, exclude) -> "SpecFilter":
|
||||
"""Constructs a filter that takes the specs from the current store."""
|
||||
packages = _external_config_with_implicit_externals(configuration)
|
||||
is_reusable = functools.partial(_is_reusable, packages=packages, local=True)
|
||||
@@ -3993,7 +3974,7 @@ def from_store(configuration, *, include, exclude) -> "SpecFilter":
|
||||
return SpecFilter(factory=factory, is_usable=is_reusable, include=include, exclude=exclude)
|
||||
|
||||
@staticmethod
|
||||
def from_buildcache(configuration, *, include, exclude) -> "SpecFilter":
|
||||
def from_buildcache(configuration, include, exclude) -> "SpecFilter":
|
||||
"""Constructs a filter that takes the specs from the configured buildcaches."""
|
||||
packages = _external_config_with_implicit_externals(configuration)
|
||||
is_reusable = functools.partial(_is_reusable, packages=packages, local=False)
|
||||
@@ -4001,29 +3982,6 @@ def from_buildcache(configuration, *, include, exclude) -> "SpecFilter":
|
||||
factory=_specs_from_mirror, is_usable=is_reusable, include=include, exclude=exclude
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def from_environment(configuration, *, include, exclude, env) -> "SpecFilter":
|
||||
packages = _external_config_with_implicit_externals(configuration)
|
||||
is_reusable = functools.partial(_is_reusable, packages=packages, local=True)
|
||||
factory = functools.partial(_specs_from_environment, env=env)
|
||||
return SpecFilter(factory=factory, is_usable=is_reusable, include=include, exclude=exclude)
|
||||
|
||||
@staticmethod
|
||||
def from_environment_included_concrete(
|
||||
configuration,
|
||||
*,
|
||||
include: List[str],
|
||||
exclude: List[str],
|
||||
env: ev.Environment,
|
||||
included_concrete: str,
|
||||
) -> "SpecFilter":
|
||||
packages = _external_config_with_implicit_externals(configuration)
|
||||
is_reusable = functools.partial(_is_reusable, packages=packages, local=True)
|
||||
factory = functools.partial(
|
||||
_specs_from_environment_included_concrete, env=env, included_concrete=included_concrete
|
||||
)
|
||||
return SpecFilter(factory=factory, is_usable=is_reusable, include=include, exclude=exclude)
|
||||
|
||||
|
||||
def _specs_from_store(configuration):
|
||||
store = spack.store.create(configuration)
|
||||
@@ -4041,23 +3999,6 @@ def _specs_from_mirror():
|
||||
return []
|
||||
|
||||
|
||||
def _specs_from_environment(env):
|
||||
"""Return all concrete specs from the environment. This includes all included concrete"""
|
||||
if env:
|
||||
return [concrete for _, concrete in env.concretized_specs()]
|
||||
else:
|
||||
return []
|
||||
|
||||
|
||||
def _specs_from_environment_included_concrete(env, included_concrete):
|
||||
"""Return only concrete specs from the environment included from the included_concrete"""
|
||||
if env:
|
||||
assert included_concrete in env.included_concrete_envs
|
||||
return [concrete for concrete in env.included_specs_by_hash[included_concrete].values()]
|
||||
else:
|
||||
return []
|
||||
|
||||
|
||||
class ReuseStrategy(enum.Enum):
|
||||
ROOTS = enum.auto()
|
||||
DEPENDENCIES = enum.auto()
|
||||
@@ -4087,12 +4028,6 @@ def __init__(self, configuration: spack.config.Configuration) -> None:
|
||||
SpecFilter.from_buildcache(
|
||||
configuration=self.configuration, include=[], exclude=[]
|
||||
),
|
||||
SpecFilter.from_environment(
|
||||
configuration=self.configuration,
|
||||
include=[],
|
||||
exclude=[],
|
||||
env=ev.active_environment(), # includes all concrete includes
|
||||
),
|
||||
]
|
||||
)
|
||||
else:
|
||||
@@ -4107,46 +4042,7 @@ def __init__(self, configuration: spack.config.Configuration) -> None:
|
||||
for source in reuse_yaml.get("from", default_sources):
|
||||
include = source.get("include", default_include)
|
||||
exclude = source.get("exclude", default_exclude)
|
||||
if source["type"] == "environment" and "path" in source:
|
||||
env_dir = ev.as_env_dir(source["path"])
|
||||
active_env = ev.active_environment()
|
||||
if active_env and env_dir in active_env.included_concrete_envs:
|
||||
# If environment is included as a concrete environment, use the local copy
|
||||
# of specs in the active environment.
|
||||
# note: included concrete environments are only updated at concretization
|
||||
# time, and reuse needs to matchthe included specs.
|
||||
self.reuse_sources.append(
|
||||
SpecFilter.from_environment_included_concrete(
|
||||
self.configuration,
|
||||
include=include,
|
||||
exclude=exclude,
|
||||
env=active_env,
|
||||
included_concrete=env_dir,
|
||||
)
|
||||
)
|
||||
else:
|
||||
# If the environment is not included as a concrete environment, use the
|
||||
# current specs from its lockfile.
|
||||
self.reuse_sources.append(
|
||||
SpecFilter.from_environment(
|
||||
self.configuration,
|
||||
include=include,
|
||||
exclude=exclude,
|
||||
env=ev.environment_from_name_or_dir(env_dir),
|
||||
)
|
||||
)
|
||||
elif source["type"] == "environment":
|
||||
# reusing from the current environment implicitly reuses from all of the
|
||||
# included concrete environments
|
||||
self.reuse_sources.append(
|
||||
SpecFilter.from_environment(
|
||||
self.configuration,
|
||||
include=include,
|
||||
exclude=exclude,
|
||||
env=ev.active_environment(),
|
||||
)
|
||||
)
|
||||
elif source["type"] == "local":
|
||||
if source["type"] == "local":
|
||||
self.reuse_sources.append(
|
||||
SpecFilter.from_store(self.configuration, include=include, exclude=exclude)
|
||||
)
|
||||
@@ -4195,7 +4091,7 @@ def _check_input_and_extract_concrete_specs(specs):
|
||||
spack.spec.Spec.ensure_valid_variants(s)
|
||||
return reusable
|
||||
|
||||
def solve_with_stats(
|
||||
def solve(
|
||||
self,
|
||||
specs,
|
||||
out=None,
|
||||
@@ -4206,8 +4102,6 @@ def solve_with_stats(
|
||||
allow_deprecated=False,
|
||||
):
|
||||
"""
|
||||
Concretize a set of specs and track the timing and statistics for the solve
|
||||
|
||||
Arguments:
|
||||
specs (list): List of ``Spec`` objects to solve for.
|
||||
out: Optionally write the generate ASP program to a file-like object.
|
||||
@@ -4219,22 +4113,15 @@ def solve_with_stats(
|
||||
setup_only (bool): if True, stop after setup and don't solve (default False).
|
||||
allow_deprecated (bool): allow deprecated version in the solve
|
||||
"""
|
||||
# Check upfront that the variants are admissible
|
||||
specs = [s.lookup_hash() for s in specs]
|
||||
reusable_specs = self._check_input_and_extract_concrete_specs(specs)
|
||||
reusable_specs.extend(self.selector.reusable_specs(specs))
|
||||
setup = SpackSolverSetup(tests=tests)
|
||||
output = OutputConfiguration(timers=timers, stats=stats, out=out, setup_only=setup_only)
|
||||
return self.driver.solve(
|
||||
result, _, _ = self.driver.solve(
|
||||
setup, specs, reuse=reusable_specs, output=output, allow_deprecated=allow_deprecated
|
||||
)
|
||||
|
||||
def solve(self, specs, **kwargs):
|
||||
"""
|
||||
Convenience function for concretizing a set of specs and ignoring timing
|
||||
and statistics. Uses the same kwargs as solve_with_stats.
|
||||
"""
|
||||
# Check upfront that the variants are admissible
|
||||
result, _, _ = self.solve_with_stats(specs, **kwargs)
|
||||
return result
|
||||
|
||||
def solve_in_rounds(
|
||||
@@ -4339,7 +4226,3 @@ def __init__(self, provided, conflicts):
|
||||
# Add attribute expected of the superclass interface
|
||||
self.required = None
|
||||
self.constraint_type = None
|
||||
|
||||
|
||||
class InvalidSpliceError(spack.error.SpackError):
|
||||
"""For cases in which the splice configuration is invalid."""
|
||||
|
||||
@@ -57,12 +57,6 @@
|
||||
internal_error("provider with no virtual node").
|
||||
:- provider(PackageNode, _), not attr("node", PackageNode),
|
||||
internal_error("provider with no real node").
|
||||
:- node_has_variant(PackageNode, _, _), not attr("node", PackageNode),
|
||||
internal_error("node has variant for a non-node").
|
||||
:- attr("variant_set", PackageNode, _, _), not attr("node", PackageNode),
|
||||
internal_error("variant_set for a non-node").
|
||||
:- variant_is_propagated(PackageNode, _), not attr("node", PackageNode),
|
||||
internal_error("variant_is_propagated for a non-node").
|
||||
|
||||
:- attr("root", node(ID, PackageNode)), ID > min_dupe_id,
|
||||
internal_error("root with a non-minimal duplicate ID").
|
||||
@@ -581,8 +575,7 @@ attr("virtual_on_edge", PackageNode, ProviderNode, Virtual)
|
||||
% or used somewhere
|
||||
:- attr("virtual_node", node(_, Virtual)),
|
||||
not attr("virtual_on_incoming_edges", _, Virtual),
|
||||
not attr("virtual_root", node(_, Virtual)),
|
||||
internal_error("virtual node does not match incoming edge").
|
||||
not attr("virtual_root", node(_, Virtual)).
|
||||
|
||||
attr("virtual_on_incoming_edges", ProviderNode, Virtual)
|
||||
:- attr("virtual_on_edge", _, ProviderNode, Virtual).
|
||||
@@ -636,8 +629,7 @@ do_not_impose(EffectID, node(X, Package))
|
||||
virtual_condition_holds(_, PossibleProvider, Virtual),
|
||||
PossibleProvider != ProviderNode,
|
||||
explicitly_requested_root(PossibleProvider),
|
||||
not explicitly_requested_root(ProviderNode),
|
||||
internal_error("If a root can provide a virtual, it must be the provider").
|
||||
not explicitly_requested_root(ProviderNode).
|
||||
|
||||
% A package cannot be the actual provider for a virtual if it does not
|
||||
% fulfill the conditions to provide that virtual
|
||||
@@ -780,8 +772,7 @@ required_provider(Provider, Virtual)
|
||||
pkg_fact(Virtual, condition_effect(ConditionID, EffectID)),
|
||||
imposed_constraint(EffectID, "node", Provider).
|
||||
|
||||
:- provider(node(Y, Package), node(X, Virtual)), required_provider(Provider, Virtual), Package != Provider,
|
||||
internal_error("If a provider is required the concretizer must use it").
|
||||
:- provider(node(Y, Package), node(X, Virtual)), required_provider(Provider, Virtual), Package != Provider.
|
||||
|
||||
% TODO: the following choice rule allows the solver to add compiler
|
||||
% flags if their only source is from a requirement. This is overly-specific
|
||||
@@ -861,8 +852,7 @@ variant_defined(PackageNode, Name) :- variant_definition(PackageNode, Name, _).
|
||||
% for two or more variant definitions, this prefers the last one defined.
|
||||
:- node_has_variant(node(NodeID, Package), Name, SelectedVariantID),
|
||||
variant_definition(node(NodeID, Package), Name, VariantID),
|
||||
VariantID > SelectedVariantID,
|
||||
internal_error("If the solver picks a variant descriptor it must use that variant descriptor").
|
||||
VariantID > SelectedVariantID.
|
||||
|
||||
% B: Associating applicable package rules with nodes
|
||||
|
||||
@@ -979,7 +969,6 @@ error(100, "{0} variant '{1}' cannot have values '{2}' and '{3}' as they come fr
|
||||
|
||||
:- attr("variant_set", node(ID, Package), Variant, Value),
|
||||
not attr("variant_value", node(ID, Package), Variant, Value).
|
||||
internal_error("If a variant is set to a value it must have that value").
|
||||
|
||||
% The rules below allow us to prefer default values for variants
|
||||
% whenever possible. If a variant is set in a spec, or if it is
|
||||
@@ -990,7 +979,7 @@ variant_not_default(node(ID, Package), Variant, Value)
|
||||
% variants set explicitly on the CLI don't count as non-default
|
||||
not attr("variant_set", node(ID, Package), Variant, Value),
|
||||
% variant values forced by propagation don't count as non-default
|
||||
not propagate(node(ID, Package), variant_value(Variant, Value, _)),
|
||||
not propagate(node(ID, Package), variant_value(Variant, Value)),
|
||||
% variants set on externals that we could use don't count as non-default
|
||||
% this makes spack prefer to use an external over rebuilding with the
|
||||
% default configuration
|
||||
@@ -1002,7 +991,7 @@ variant_default_not_used(node(ID, Package), Variant, Value)
|
||||
:- variant_default_value(node(ID, Package), Variant, Value),
|
||||
node_has_variant(node(ID, Package), Variant, _),
|
||||
not attr("variant_value", node(ID, Package), Variant, Value),
|
||||
not propagate(node(ID, Package), variant_value(Variant, _, _)),
|
||||
not propagate(node(ID, Package), variant_value(Variant, _)),
|
||||
attr("node", node(ID, Package)).
|
||||
|
||||
% The variant is set in an external spec
|
||||
@@ -1047,14 +1036,10 @@ variant_single_value(PackageNode, Variant)
|
||||
% Propagation semantics
|
||||
%-----------------------------------------------------------------------------
|
||||
|
||||
non_default_propagation(variant_value(Name, Value)) :- attr("propagate", RootNode, variant_value(Name, Value)).
|
||||
|
||||
% Propagation roots have a corresponding attr("propagate", ...)
|
||||
propagate(RootNode, PropagatedAttribute) :- attr("propagate", RootNode, PropagatedAttribute), not non_default_propagation(PropagatedAttribute).
|
||||
propagate(RootNode, PropagatedAttribute) :- attr("propagate", RootNode, PropagatedAttribute).
|
||||
propagate(RootNode, PropagatedAttribute, EdgeTypes) :- attr("propagate", RootNode, PropagatedAttribute, EdgeTypes).
|
||||
|
||||
% Special case variants, to inject the source node in the propagated attribute
|
||||
propagate(RootNode, variant_value(Name, Value, RootNode)) :- attr("propagate", RootNode, variant_value(Name, Value)).
|
||||
|
||||
% Propagate an attribute along edges to child nodes
|
||||
propagate(ChildNode, PropagatedAttribute) :-
|
||||
@@ -1076,53 +1061,21 @@ propagate(ChildNode, PropagatedAttribute, edge_types(DepType1, DepType2)) :-
|
||||
|
||||
% If a variant is propagated, and can be accepted, set its value
|
||||
attr("variant_selected", PackageNode, Variant, Value, VariantType, VariantID) :-
|
||||
propagate(PackageNode, variant_value(Variant, Value, _)),
|
||||
propagate(PackageNode, variant_value(Variant, Value)),
|
||||
node_has_variant(PackageNode, Variant, VariantID),
|
||||
variant_type(VariantID, VariantType),
|
||||
variant_possible_value(PackageNode, Variant, Value).
|
||||
variant_possible_value(PackageNode, Variant, Value),
|
||||
not attr("variant_set", PackageNode, Variant).
|
||||
|
||||
% If a variant is propagated, we cannot have extraneous values
|
||||
variant_is_propagated(PackageNode, Variant) :-
|
||||
attr("variant_value", PackageNode, Variant, Value),
|
||||
propagate(PackageNode, variant_value(Variant, Value, _)),
|
||||
propagate(PackageNode, variant_value(Variant, Value)),
|
||||
not attr("variant_set", PackageNode, Variant).
|
||||
|
||||
:- variant_is_propagated(PackageNode, Variant),
|
||||
attr("variant_selected", PackageNode, Variant, Value, _, _),
|
||||
not propagate(PackageNode, variant_value(Variant, Value, _)).
|
||||
|
||||
error(100, "{0} and {1} cannot both propagate variant '{2}' to the shared dependency: {3}",
|
||||
Package1, Package2, Variant, Dependency) :-
|
||||
% The variant is a singlevalued variant
|
||||
variant_single_value(node(X, Package1), Variant),
|
||||
% Dependency is trying to propagate Variant with different values and is not the source package
|
||||
propagate(node(Z, Dependency), variant_value(Variant, Value1, node(X, Package1))),
|
||||
propagate(node(Z, Dependency), variant_value(Variant, Value2, node(Y, Package2))),
|
||||
% Package1 and Package2 and their values are different
|
||||
Package1 > Package2, Value1 != Value2,
|
||||
not propagate(node(Z, Dependency), variant_value(Variant, _, node(Z, Dependency))).
|
||||
|
||||
% Cannot propagate the same variant from two different packages if one is a dependency of the other
|
||||
error(100, "{0} and {1} cannot both propagate variant '{2}'", Package1, Package2, Variant) :-
|
||||
% The variant is a single-valued variant
|
||||
variant_single_value(node(X, Package1), Variant),
|
||||
% Package1 and Package2 and their values are different
|
||||
Package1 != Package2, Value1 != Value2,
|
||||
% Package2 is set to propagate the value from Package1
|
||||
propagate(node(Y, Package2), variant_value(Variant, Value2, node(X, Package2))),
|
||||
propagate(node(Y, Package2), variant_value(Variant, Value1, node(X, Package1))),
|
||||
variant_is_propagated(node(Y, Package2), Variant).
|
||||
|
||||
% Cannot propagate a variant if a different value was set for it in a dependency
|
||||
error(100, "Cannot propagate the variant '{0}' from the package: {1} because package: {2} is set to exclude it", Variant, Source, Package) :-
|
||||
% Package has a Variant and Source is propagating Variant
|
||||
attr("variant_set", node(X, Package), Variant, Value1),
|
||||
% The packages and values are different
|
||||
Source != Package, Value1 != Value2,
|
||||
% The variant is a single-valued variant
|
||||
variant_single_value(node(X, Package1), Variant),
|
||||
% A different value is being propagated from somewhere else
|
||||
propagate(node(X, Package), variant_value(Variant, Value2, node(Y, Source))).
|
||||
not propagate(PackageNode, variant_value(Variant, Value)).
|
||||
|
||||
%----
|
||||
% Flags
|
||||
|
||||
@@ -877,9 +877,8 @@ def constrain(self, other):
|
||||
# Next, if any flags in other propagate, we force them to propagate in our case
|
||||
shared = list(sorted(set(other[flag_type]) - extra_other))
|
||||
for x, y in _shared_subset_pair_iterate(shared, sorted(self[flag_type])):
|
||||
if y.propagate is True and x.propagate is False:
|
||||
changed = True
|
||||
y.propagate = False
|
||||
if x.propagate:
|
||||
y.propagate = True
|
||||
|
||||
# TODO: what happens if flag groups with a partial (but not complete)
|
||||
# intersection specify different behaviors for flag propagation?
|
||||
@@ -934,7 +933,6 @@ def _cmp_iter(self):
|
||||
def flags():
|
||||
for flag in v:
|
||||
yield flag
|
||||
yield flag.propagate
|
||||
|
||||
yield flags
|
||||
|
||||
@@ -965,6 +963,10 @@ def _sort_by_dep_types(dspec: DependencySpec):
|
||||
return dspec.depflag
|
||||
|
||||
|
||||
#: Enum for edge directions
|
||||
EdgeDirection = lang.enum(parent=0, child=1)
|
||||
|
||||
|
||||
@lang.lazy_lexicographic_ordering
|
||||
class _EdgeMap(collections.abc.Mapping):
|
||||
"""Represent a collection of edges (DependencySpec objects) in the DAG.
|
||||
@@ -978,20 +980,26 @@ class _EdgeMap(collections.abc.Mapping):
|
||||
|
||||
__slots__ = "edges", "store_by_child"
|
||||
|
||||
def __init__(self, store_by_child: bool = True) -> None:
|
||||
self.edges: Dict[str, List[DependencySpec]] = {}
|
||||
self.store_by_child = store_by_child
|
||||
def __init__(self, store_by=EdgeDirection.child):
|
||||
# Sanitize input arguments
|
||||
msg = 'unexpected value for "store_by" argument'
|
||||
assert store_by in (EdgeDirection.child, EdgeDirection.parent), msg
|
||||
|
||||
def __getitem__(self, key: str) -> List[DependencySpec]:
|
||||
#: This dictionary maps a package name to a list of edges
|
||||
#: i.e. to a list of DependencySpec objects
|
||||
self.edges = {}
|
||||
self.store_by_child = store_by == EdgeDirection.child
|
||||
|
||||
def __getitem__(self, key):
|
||||
return self.edges[key]
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.edges)
|
||||
|
||||
def __len__(self) -> int:
|
||||
def __len__(self):
|
||||
return len(self.edges)
|
||||
|
||||
def add(self, edge: DependencySpec) -> None:
|
||||
def add(self, edge: DependencySpec):
|
||||
key = edge.spec.name if self.store_by_child else edge.parent.name
|
||||
if key in self.edges:
|
||||
lst = self.edges[key]
|
||||
@@ -1000,8 +1008,8 @@ def add(self, edge: DependencySpec) -> None:
|
||||
else:
|
||||
self.edges[key] = [edge]
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"{{deps: {', '.join(str(d) for d in sorted(self.values()))}}}"
|
||||
def __str__(self):
|
||||
return "{deps: %s}" % ", ".join(str(d) for d in sorted(self.values()))
|
||||
|
||||
def _cmp_iter(self):
|
||||
for item in sorted(itertools.chain.from_iterable(self.edges.values())):
|
||||
@@ -1018,32 +1026,24 @@ def copy(self):
|
||||
|
||||
return clone
|
||||
|
||||
def select(
|
||||
self,
|
||||
*,
|
||||
parent: Optional[str] = None,
|
||||
child: Optional[str] = None,
|
||||
depflag: dt.DepFlag = dt.ALL,
|
||||
virtuals: Optional[List[str]] = None,
|
||||
) -> List[DependencySpec]:
|
||||
"""Selects a list of edges and returns them.
|
||||
def select(self, parent=None, child=None, depflag: dt.DepFlag = dt.ALL):
|
||||
"""Select a list of edges and return them.
|
||||
|
||||
If an edge:
|
||||
|
||||
- Has *any* of the dependency types passed as argument,
|
||||
- Matches the parent and/or child name
|
||||
- Provides *any* of the virtuals passed as argument
|
||||
|
||||
- Matches the parent and/or child name, if passed
|
||||
then it is selected.
|
||||
|
||||
The deptypes argument needs to be a flag, since the method won't
|
||||
convert it for performance reason.
|
||||
|
||||
Args:
|
||||
parent: name of the parent package
|
||||
child: name of the child package
|
||||
parent (str): name of the parent package
|
||||
child (str): name of the child package
|
||||
depflag: allowed dependency types in flag form
|
||||
virtuals: list of virtuals on the edge
|
||||
|
||||
Returns:
|
||||
List of DependencySpec objects
|
||||
"""
|
||||
if not depflag:
|
||||
return []
|
||||
@@ -1062,10 +1062,6 @@ def select(
|
||||
# Filter by allowed dependency types
|
||||
selected = (dep for dep in selected if not dep.depflag or (depflag & dep.depflag))
|
||||
|
||||
# Filter by virtuals
|
||||
if virtuals is not None:
|
||||
selected = (dep for dep in selected if any(v in dep.virtuals for v in virtuals))
|
||||
|
||||
return list(selected)
|
||||
|
||||
def clear(self):
|
||||
@@ -1474,8 +1470,8 @@ def __init__(
|
||||
self.architecture = None
|
||||
self.compiler = None
|
||||
self.compiler_flags = FlagMap(self)
|
||||
self._dependents = _EdgeMap(store_by_child=False)
|
||||
self._dependencies = _EdgeMap(store_by_child=True)
|
||||
self._dependents = _EdgeMap(store_by=EdgeDirection.parent)
|
||||
self._dependencies = _EdgeMap(store_by=EdgeDirection.child)
|
||||
self.namespace = None
|
||||
|
||||
# initial values for all spec hash types
|
||||
@@ -1595,7 +1591,7 @@ def _get_dependency(self, name):
|
||||
return deps[0]
|
||||
|
||||
def edges_from_dependents(
|
||||
self, name=None, depflag: dt.DepFlag = dt.ALL, *, virtuals: Optional[List[str]] = None
|
||||
self, name=None, depflag: dt.DepFlag = dt.ALL
|
||||
) -> List[DependencySpec]:
|
||||
"""Return a list of edges connecting this node in the DAG
|
||||
to parents.
|
||||
@@ -1603,25 +1599,20 @@ def edges_from_dependents(
|
||||
Args:
|
||||
name (str): filter dependents by package name
|
||||
depflag: allowed dependency types
|
||||
virtuals: allowed virtuals
|
||||
"""
|
||||
return [
|
||||
d for d in self._dependents.select(parent=name, depflag=depflag, virtuals=virtuals)
|
||||
]
|
||||
return [d for d in self._dependents.select(parent=name, depflag=depflag)]
|
||||
|
||||
def edges_to_dependencies(
|
||||
self, name=None, depflag: dt.DepFlag = dt.ALL, *, virtuals: Optional[List[str]] = None
|
||||
self, name=None, depflag: dt.DepFlag = dt.ALL
|
||||
) -> List[DependencySpec]:
|
||||
"""Returns a list of edges connecting this node in the DAG to children.
|
||||
"""Return a list of edges connecting this node in the DAG
|
||||
to children.
|
||||
|
||||
Args:
|
||||
name (str): filter dependencies by package name
|
||||
depflag: allowed dependency types
|
||||
virtuals: allowed virtuals
|
||||
"""
|
||||
return [
|
||||
d for d in self._dependencies.select(child=name, depflag=depflag, virtuals=virtuals)
|
||||
]
|
||||
return [d for d in self._dependencies.select(child=name, depflag=depflag)]
|
||||
|
||||
@property
|
||||
def edge_attributes(self) -> str:
|
||||
@@ -1644,24 +1635,17 @@ def edge_attributes(self) -> str:
|
||||
return f"[{result}]"
|
||||
|
||||
def dependencies(
|
||||
self,
|
||||
name=None,
|
||||
deptype: Union[dt.DepTypes, dt.DepFlag] = dt.ALL,
|
||||
*,
|
||||
virtuals: Optional[List[str]] = None,
|
||||
self, name=None, deptype: Union[dt.DepTypes, dt.DepFlag] = dt.ALL
|
||||
) -> List["Spec"]:
|
||||
"""Returns a list of direct dependencies (nodes in the DAG)
|
||||
"""Return a list of direct dependencies (nodes in the DAG).
|
||||
|
||||
Args:
|
||||
name: filter dependencies by package name
|
||||
name (str): filter dependencies by package name
|
||||
deptype: allowed dependency types
|
||||
virtuals: allowed virtuals
|
||||
"""
|
||||
if not isinstance(deptype, dt.DepFlag):
|
||||
deptype = dt.canonicalize(deptype)
|
||||
return [
|
||||
d.spec for d in self.edges_to_dependencies(name, depflag=deptype, virtuals=virtuals)
|
||||
]
|
||||
return [d.spec for d in self.edges_to_dependencies(name, depflag=deptype)]
|
||||
|
||||
def dependents(
|
||||
self, name=None, deptype: Union[dt.DepTypes, dt.DepFlag] = dt.ALL
|
||||
@@ -2201,18 +2185,6 @@ def to_node_dict(self, hash=ht.dag_hash):
|
||||
if params:
|
||||
d["parameters"] = params
|
||||
|
||||
if params and not self.concrete:
|
||||
flag_names = [
|
||||
name
|
||||
for name, flags in self.compiler_flags.items()
|
||||
if any(x.propagate for x in flags)
|
||||
]
|
||||
d["propagate"] = sorted(
|
||||
itertools.chain(
|
||||
[v.name for v in self.variants.values() if v.propagate], flag_names
|
||||
)
|
||||
)
|
||||
|
||||
if self.external:
|
||||
d["external"] = syaml.syaml_dict(
|
||||
[
|
||||
@@ -2385,10 +2357,16 @@ def node_dict_with_hashes(self, hash=ht.dag_hash):
|
||||
spec is concrete, the full hash is added as well. If 'build' is in
|
||||
the hash_type, the build hash is also added."""
|
||||
node = self.to_node_dict(hash)
|
||||
# All specs have at least a DAG hash
|
||||
node[ht.dag_hash.name] = self.dag_hash()
|
||||
|
||||
if not self.concrete:
|
||||
# dag_hash is lazily computed -- but if we write a spec out, we want it
|
||||
# to be included. This is effectively the last chance we get to compute
|
||||
# it accurately.
|
||||
if self.concrete:
|
||||
# all specs have at least a DAG hash
|
||||
node[ht.dag_hash.name] = self.dag_hash()
|
||||
|
||||
else:
|
||||
node["concrete"] = False
|
||||
|
||||
# we can also give them other hash types if we want
|
||||
@@ -3020,12 +2998,7 @@ def ensure_valid_variants(spec):
|
||||
pkg_variants = pkg_cls.variant_names()
|
||||
# reserved names are variants that may be set on any package
|
||||
# but are not necessarily recorded by the package's class
|
||||
propagate_variants = [name for name, variant in spec.variants.items() if variant.propagate]
|
||||
|
||||
not_existing = set(spec.variants) - (
|
||||
set(pkg_variants) | set(vt.reserved_names) | set(propagate_variants)
|
||||
)
|
||||
|
||||
not_existing = set(spec.variants) - (set(pkg_variants) | set(vt.reserved_names))
|
||||
if not_existing:
|
||||
raise vt.UnknownVariantError(
|
||||
f"No such variant {not_existing} for spec: '{spec}'", list(not_existing)
|
||||
@@ -3052,10 +3025,6 @@ def constrain(self, other, deps=True):
|
||||
raise spack.error.UnsatisfiableSpecError(self, other, "constrain a concrete spec")
|
||||
|
||||
other = self._autospec(other)
|
||||
if other.concrete and other.satisfies(self):
|
||||
self._dup(other)
|
||||
return True
|
||||
|
||||
if other.abstract_hash:
|
||||
if not self.abstract_hash or other.abstract_hash.startswith(self.abstract_hash):
|
||||
self.abstract_hash = other.abstract_hash
|
||||
@@ -3550,8 +3519,8 @@ def _dup(self, other, deps: Union[bool, dt.DepTypes, dt.DepFlag] = True, clearde
|
||||
self.architecture = other.architecture.copy() if other.architecture else None
|
||||
self.compiler = other.compiler.copy() if other.compiler else None
|
||||
if cleardeps:
|
||||
self._dependents = _EdgeMap(store_by_child=False)
|
||||
self._dependencies = _EdgeMap(store_by_child=True)
|
||||
self._dependents = _EdgeMap(store_by=EdgeDirection.parent)
|
||||
self._dependencies = _EdgeMap(store_by=EdgeDirection.child)
|
||||
self.compiler_flags = other.compiler_flags.copy()
|
||||
self.compiler_flags.spec = self
|
||||
self.variants = other.variants.copy()
|
||||
@@ -4059,7 +4028,7 @@ def format_path(
|
||||
|
||||
def __str__(self):
|
||||
if self._concrete:
|
||||
return self.format("{name}{@version}{/hash}")
|
||||
return self.format("{name}{@version}{/hash:7}")
|
||||
|
||||
if not self._dependencies:
|
||||
return self.format()
|
||||
@@ -4534,69 +4503,8 @@ def substitute(self, vspec):
|
||||
# Set the item
|
||||
super().__setitem__(vspec.name, vspec)
|
||||
|
||||
def partition_variants(self):
|
||||
non_prop, prop = lang.stable_partition(self.values(), lambda x: not x.propagate)
|
||||
# Just return the names
|
||||
non_prop = [x.name for x in non_prop]
|
||||
prop = [x.name for x in prop]
|
||||
return non_prop, prop
|
||||
|
||||
def satisfies(self, other: "VariantMap") -> bool:
|
||||
if self.spec.concrete:
|
||||
return self._satisfies_when_self_concrete(other)
|
||||
return self._satisfies_when_self_abstract(other)
|
||||
|
||||
def _satisfies_when_self_concrete(self, other: "VariantMap") -> bool:
|
||||
non_propagating, propagating = other.partition_variants()
|
||||
result = all(
|
||||
name in self and self[name].satisfies(other[name]) for name in non_propagating
|
||||
)
|
||||
if not propagating:
|
||||
return result
|
||||
|
||||
for node in self.spec.traverse():
|
||||
if not all(
|
||||
node.variants[name].satisfies(other[name])
|
||||
for name in propagating
|
||||
if name in node.variants
|
||||
):
|
||||
return False
|
||||
return result
|
||||
|
||||
def _satisfies_when_self_abstract(self, other: "VariantMap") -> bool:
|
||||
other_non_propagating, other_propagating = other.partition_variants()
|
||||
self_non_propagating, self_propagating = self.partition_variants()
|
||||
|
||||
# First check variants without propagation set
|
||||
result = all(
|
||||
name in self_non_propagating
|
||||
and (self[name].propagate or self[name].satisfies(other[name]))
|
||||
for name in other_non_propagating
|
||||
)
|
||||
if result is False or (not other_propagating and not self_propagating):
|
||||
return result
|
||||
|
||||
# Check that self doesn't contradict variants propagated by other
|
||||
if other_propagating:
|
||||
for node in self.spec.traverse():
|
||||
if not all(
|
||||
node.variants[name].satisfies(other[name])
|
||||
for name in other_propagating
|
||||
if name in node.variants
|
||||
):
|
||||
return False
|
||||
|
||||
# Check that other doesn't contradict variants propagated by self
|
||||
if self_propagating:
|
||||
for node in other.spec.traverse():
|
||||
if not all(
|
||||
node.variants[name].satisfies(self[name])
|
||||
for name in self_propagating
|
||||
if name in node.variants
|
||||
):
|
||||
return False
|
||||
|
||||
return result
|
||||
def satisfies(self, other):
|
||||
return all(k in self and self[k].satisfies(other[k]) for k in other)
|
||||
|
||||
def intersects(self, other):
|
||||
return all(self[k].intersects(other[k]) for k in other if k in self)
|
||||
@@ -4809,17 +4717,13 @@ def from_node_dict(cls, node):
|
||||
else:
|
||||
spec.compiler = None
|
||||
|
||||
propagated_names = node.get("propagate", [])
|
||||
for name, values in node.get("parameters", {}).items():
|
||||
propagate = name in propagated_names
|
||||
if name in _valid_compiler_flags:
|
||||
spec.compiler_flags[name] = []
|
||||
for val in values:
|
||||
spec.compiler_flags.add_flag(name, val, propagate)
|
||||
spec.compiler_flags.add_flag(name, val, False)
|
||||
else:
|
||||
spec.variants[name] = vt.MultiValuedVariant.from_node_dict(
|
||||
name, values, propagate=propagate
|
||||
)
|
||||
spec.variants[name] = vt.MultiValuedVariant.from_node_dict(name, values)
|
||||
|
||||
spec.external_path = None
|
||||
spec.external_modules = None
|
||||
|
||||
@@ -33,12 +33,16 @@
|
||||
import spack.error
|
||||
import spack.paths
|
||||
import spack.spec
|
||||
import spack.store
|
||||
import spack.util.path
|
||||
|
||||
#: default installation root, relative to the Spack install path
|
||||
DEFAULT_INSTALL_TREE_ROOT = os.path.join(spack.paths.opt_path, "spack")
|
||||
|
||||
|
||||
ConfigurationType = Union["spack.config.Configuration", "llnl.util.lang.Singleton"]
|
||||
|
||||
|
||||
def parse_install_tree(config_dict):
|
||||
"""Parse config settings and return values relevant to the store object.
|
||||
|
||||
@@ -204,7 +208,7 @@ def __reduce__(self):
|
||||
)
|
||||
|
||||
|
||||
def create(configuration: spack.config.Configuration) -> Store:
|
||||
def create(configuration: ConfigurationType) -> Store:
|
||||
"""Create a store from the configuration passed as input.
|
||||
|
||||
Args:
|
||||
@@ -237,7 +241,7 @@ def _create_global() -> Store:
|
||||
|
||||
|
||||
#: Singleton store instance
|
||||
STORE: Store = llnl.util.lang.Singleton(_create_global) # type: ignore
|
||||
STORE: Union[Store, llnl.util.lang.Singleton] = llnl.util.lang.Singleton(_create_global)
|
||||
|
||||
|
||||
def reinitialize():
|
||||
@@ -304,7 +308,7 @@ def find(
|
||||
|
||||
matching_specs: List[spack.spec.Spec] = []
|
||||
errors = []
|
||||
query_fn = query_fn or STORE.db.query
|
||||
query_fn = query_fn or spack.store.STORE.db.query
|
||||
for spec in constraints:
|
||||
current_matches = query_fn(spec, **kwargs)
|
||||
|
||||
@@ -337,7 +341,7 @@ def specfile_matches(filename: str, **kwargs) -> List["spack.spec.Spec"]:
|
||||
**kwargs: keyword arguments forwarded to "find"
|
||||
"""
|
||||
query = [spack.spec.Spec.from_specfile(filename)]
|
||||
return find(query, **kwargs)
|
||||
return spack.store.find(query, **kwargs)
|
||||
|
||||
|
||||
def ensure_singleton_created() -> None:
|
||||
|
||||
@@ -17,6 +17,7 @@
|
||||
import multiprocessing
|
||||
import pickle
|
||||
import pydoc
|
||||
import sys
|
||||
from types import ModuleType
|
||||
|
||||
import spack.config
|
||||
@@ -26,6 +27,9 @@
|
||||
import spack.repo
|
||||
import spack.store
|
||||
|
||||
_SERIALIZE = sys.platform == "win32" or (sys.version_info >= (3, 8) and sys.platform == "darwin")
|
||||
|
||||
|
||||
patches = None
|
||||
|
||||
|
||||
@@ -52,7 +56,7 @@ def _restore_and_run(self, fn, test_state):
|
||||
fn()
|
||||
|
||||
def create(self):
|
||||
test_state = GlobalStateMarshaler()
|
||||
test_state = TestState()
|
||||
return multiprocessing.Process(target=self._restore_and_run, args=(self.fn, test_state))
|
||||
|
||||
|
||||
@@ -61,56 +65,49 @@ class PackageInstallContext:
|
||||
needs to be transmitted to a child process.
|
||||
"""
|
||||
|
||||
def __init__(self, pkg, *, ctx=None):
|
||||
ctx = ctx or multiprocessing.get_context()
|
||||
self.serialize = ctx.get_start_method() != "fork"
|
||||
if self.serialize:
|
||||
def __init__(self, pkg):
|
||||
if _SERIALIZE:
|
||||
self.serialized_pkg = serialize(pkg)
|
||||
self.global_state = GlobalStateMarshaler()
|
||||
self.serialized_env = serialize(spack.environment.active_environment())
|
||||
else:
|
||||
self.pkg = pkg
|
||||
self.global_state = None
|
||||
self.env = spack.environment.active_environment()
|
||||
self.spack_working_dir = spack.paths.spack_working_dir
|
||||
self.test_state = TestState()
|
||||
|
||||
def restore(self):
|
||||
self.test_state.restore()
|
||||
spack.paths.spack_working_dir = self.spack_working_dir
|
||||
env = pickle.load(self.serialized_env) if self.serialize else self.env
|
||||
# Activating the environment modifies the global configuration, so globals have to
|
||||
# be restored afterward, in case other modifications were applied on top (e.g. from
|
||||
# command line)
|
||||
env = pickle.load(self.serialized_env) if _SERIALIZE else self.env
|
||||
if env:
|
||||
spack.environment.activate(env)
|
||||
|
||||
if self.serialize:
|
||||
self.global_state.restore()
|
||||
|
||||
# Order of operation is important, since the package might be retrieved
|
||||
# from a repo defined within the environment configuration
|
||||
pkg = pickle.load(self.serialized_pkg) if self.serialize else self.pkg
|
||||
pkg = pickle.load(self.serialized_pkg) if _SERIALIZE else self.pkg
|
||||
return pkg
|
||||
|
||||
|
||||
class GlobalStateMarshaler:
|
||||
"""Class to serialize and restore global state for child processes.
|
||||
|
||||
Spack may modify state that is normally read from disk or command line in memory;
|
||||
this object is responsible for properly serializing that state to be applied to a subprocess.
|
||||
class TestState:
|
||||
"""Spack tests may modify state that is normally read from disk in memory;
|
||||
this object is responsible for properly serializing that state to be
|
||||
applied to a subprocess. This isn't needed outside of a testing environment
|
||||
but this logic is designed to behave the same inside or outside of tests.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.config = spack.config.CONFIG.ensure_unwrapped()
|
||||
self.platform = spack.platforms.host
|
||||
self.test_patches = store_patches()
|
||||
self.store = spack.store.STORE
|
||||
if _SERIALIZE:
|
||||
self.config = spack.config.CONFIG
|
||||
self.platform = spack.platforms.host
|
||||
self.test_patches = store_patches()
|
||||
self.store = spack.store.STORE
|
||||
|
||||
def restore(self):
|
||||
spack.config.CONFIG = self.config
|
||||
spack.repo.PATH = spack.repo.create(self.config)
|
||||
spack.platforms.host = self.platform
|
||||
spack.store.STORE = self.store
|
||||
self.test_patches.restore()
|
||||
if _SERIALIZE:
|
||||
spack.config.CONFIG = self.config
|
||||
spack.repo.PATH = spack.repo.create(self.config)
|
||||
spack.platforms.host = self.platform
|
||||
spack.store.STORE = self.store
|
||||
self.test_patches.restore()
|
||||
|
||||
|
||||
class TestPatches:
|
||||
|
||||
@@ -199,7 +199,7 @@ def check_args(cc, args, expected):
|
||||
"""
|
||||
with set_env(SPACK_TEST_COMMAND="dump-args"):
|
||||
cc_modified_args = cc(*args, output=str).strip().split("\n")
|
||||
assert cc_modified_args == expected
|
||||
assert expected == cc_modified_args
|
||||
|
||||
|
||||
def check_args_contents(cc, args, must_contain, must_not_contain):
|
||||
@@ -272,43 +272,6 @@ def test_ld_mode(wrapper_environment):
|
||||
assert dump_mode(ld, ["foo.o", "bar.o", "baz.o", "-o", "foo", "-Wl,-rpath,foo"]) == "ld"
|
||||
|
||||
|
||||
def test_ld_unterminated_rpath(wrapper_environment):
|
||||
check_args(
|
||||
ld,
|
||||
["foo.o", "bar.o", "baz.o", "-o", "foo", "-rpath"],
|
||||
["ld", "--disable-new-dtags", "foo.o", "bar.o", "baz.o", "-o", "foo", "-rpath"],
|
||||
)
|
||||
|
||||
|
||||
def test_xlinker_unterminated_rpath(wrapper_environment):
|
||||
check_args(
|
||||
cc,
|
||||
["foo.o", "bar.o", "baz.o", "-o", "foo", "-Xlinker", "-rpath"],
|
||||
[real_cc]
|
||||
+ target_args
|
||||
+ [
|
||||
"-Wl,--disable-new-dtags",
|
||||
"foo.o",
|
||||
"bar.o",
|
||||
"baz.o",
|
||||
"-o",
|
||||
"foo",
|
||||
"-Xlinker",
|
||||
"-rpath",
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
def test_wl_unterminated_rpath(wrapper_environment):
|
||||
check_args(
|
||||
cc,
|
||||
["foo.o", "bar.o", "baz.o", "-o", "foo", "-Wl,-rpath"],
|
||||
[real_cc]
|
||||
+ target_args
|
||||
+ ["-Wl,--disable-new-dtags", "foo.o", "bar.o", "baz.o", "-o", "foo", "-Wl,-rpath"],
|
||||
)
|
||||
|
||||
|
||||
def test_ld_flags(wrapper_environment, wrapper_flags):
|
||||
check_args(
|
||||
ld,
|
||||
|
||||
@@ -170,7 +170,7 @@ def test_remove_and_add_a_source(mutable_config):
|
||||
assert not sources
|
||||
|
||||
# Add it back and check we restored the initial state
|
||||
_bootstrap("add", "github-actions", "$spack/share/spack/bootstrap/github-actions-v0.6")
|
||||
_bootstrap("add", "github-actions", "$spack/share/spack/bootstrap/github-actions-v0.5")
|
||||
sources = spack.bootstrap.core.bootstrapping_sources()
|
||||
assert len(sources) == 1
|
||||
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import filecmp
|
||||
import json
|
||||
import os
|
||||
import pathlib
|
||||
@@ -26,6 +27,7 @@
|
||||
import spack.util.spack_yaml as syaml
|
||||
from spack.cmd.ci import FAILED_CREATE_BUILDCACHE_CODE
|
||||
from spack.schema.buildcache_spec import schema as specfile_schema
|
||||
from spack.schema.ci import schema as ci_schema
|
||||
from spack.schema.database_index import schema as db_idx_schema
|
||||
from spack.spec import Spec
|
||||
|
||||
@@ -195,7 +197,7 @@ def test_ci_generate_with_env(ci_generate_test, tmp_path, mock_binary_index):
|
||||
- matrix:
|
||||
- [$old-gcc-pkgs]
|
||||
mirrors:
|
||||
buildcache-destination: {mirror_url}
|
||||
some-mirror: {mirror_url}
|
||||
ci:
|
||||
pipeline-gen:
|
||||
- submapping:
|
||||
@@ -237,9 +239,7 @@ def test_ci_generate_with_env(ci_generate_test, tmp_path, mock_binary_index):
|
||||
|
||||
assert "rebuild-index" in yaml_contents
|
||||
rebuild_job = yaml_contents["rebuild-index"]
|
||||
assert (
|
||||
rebuild_job["script"][0] == f"spack buildcache update-index --keys {mirror_url.as_uri()}"
|
||||
)
|
||||
assert rebuild_job["script"][0] == f"spack buildcache update-index --keys {mirror_url}"
|
||||
assert rebuild_job["custom_attribute"] == "custom!"
|
||||
|
||||
assert "variables" in yaml_contents
|
||||
@@ -249,28 +249,31 @@ def test_ci_generate_with_env(ci_generate_test, tmp_path, mock_binary_index):
|
||||
|
||||
def test_ci_generate_with_env_missing_section(ci_generate_test, tmp_path, mock_binary_index):
|
||||
"""Make sure we get a reasonable message if we omit gitlab-ci section"""
|
||||
env_yaml = f"""\
|
||||
_, _, output = ci_generate_test(
|
||||
f"""\
|
||||
spack:
|
||||
specs:
|
||||
- archive-files
|
||||
mirrors:
|
||||
buildcache-destination: {tmp_path / 'ci-mirror'}
|
||||
"""
|
||||
expect = "Environment does not have a `ci` configuration"
|
||||
with pytest.raises(ci.SpackCIError, match=expect):
|
||||
ci_generate_test(env_yaml)
|
||||
some-mirror: {tmp_path / 'ci-mirror'}
|
||||
""",
|
||||
fail_on_error=False,
|
||||
)
|
||||
assert "Environment does not have `ci` a configuration" in output
|
||||
|
||||
|
||||
def test_ci_generate_with_cdash_token(ci_generate_test, tmp_path, mock_binary_index, monkeypatch):
|
||||
"""Make sure we it doesn't break if we configure cdash"""
|
||||
monkeypatch.setenv("SPACK_CDASH_AUTH_TOKEN", "notreallyatokenbutshouldnotmatter")
|
||||
backup_file = tmp_path / "backup-ci.yml"
|
||||
spack_yaml_content = f"""\
|
||||
spack:
|
||||
specs:
|
||||
- archive-files
|
||||
mirrors:
|
||||
buildcache-destination: {tmp_path / "ci-mirror"}
|
||||
some-mirror: {tmp_path / "ci-mirror"}
|
||||
ci:
|
||||
enable-artifacts-buildcache: True
|
||||
pipeline-gen:
|
||||
- submapping:
|
||||
- match:
|
||||
@@ -285,15 +288,16 @@ def test_ci_generate_with_cdash_token(ci_generate_test, tmp_path, mock_binary_in
|
||||
project: Not used
|
||||
site: Nothing
|
||||
"""
|
||||
spack_yaml, original_file, output = ci_generate_test(spack_yaml_content)
|
||||
yaml_contents = syaml.load(original_file.read_text())
|
||||
spack_yaml, original_file, output = ci_generate_test(
|
||||
spack_yaml_content, "--copy-to", str(backup_file)
|
||||
)
|
||||
|
||||
# That fake token should have resulted in being unable to
|
||||
# That fake token should still have resulted in being unable to
|
||||
# register build group with cdash, but the workload should
|
||||
# still have been generated.
|
||||
assert "Problem populating buildgroup" in output
|
||||
expected_keys = ["rebuild-index", "stages", "variables", "workflow"]
|
||||
assert all([key in yaml_contents.keys() for key in expected_keys])
|
||||
assert backup_file.exists()
|
||||
assert filecmp.cmp(str(original_file), str(backup_file))
|
||||
|
||||
|
||||
def test_ci_generate_with_custom_settings(
|
||||
@@ -308,7 +312,7 @@ def test_ci_generate_with_custom_settings(
|
||||
specs:
|
||||
- archive-files
|
||||
mirrors:
|
||||
buildcache-destination: {tmp_path / "ci-mirror"}
|
||||
some-mirror: {tmp_path / "ci-mirror"}
|
||||
ci:
|
||||
pipeline-gen:
|
||||
- submapping:
|
||||
@@ -383,8 +387,9 @@ def test_ci_generate_pkg_with_deps(ci_generate_test, tmp_path, ci_base_environme
|
||||
specs:
|
||||
- flatten-deps
|
||||
mirrors:
|
||||
buildcache-destination: {tmp_path / 'ci-mirror'}
|
||||
some-mirror: {tmp_path / 'ci-mirror'}
|
||||
ci:
|
||||
enable-artifacts-buildcache: True
|
||||
pipeline-gen:
|
||||
- submapping:
|
||||
- match:
|
||||
@@ -417,8 +422,13 @@ def test_ci_generate_pkg_with_deps(ci_generate_test, tmp_path, ci_base_environme
|
||||
|
||||
|
||||
def test_ci_generate_for_pr_pipeline(ci_generate_test, tmp_path, monkeypatch):
|
||||
"""Test generation of a PR pipeline with disabled rebuild-index"""
|
||||
"""Test that PR pipelines do not include a final stage job for
|
||||
rebuilding the mirror index, even if that job is specifically
|
||||
configured.
|
||||
"""
|
||||
monkeypatch.setenv("SPACK_PIPELINE_TYPE", "spack_pull_request")
|
||||
monkeypatch.setenv("SPACK_PR_BRANCH", "fake-test-branch")
|
||||
monkeypatch.setattr(spack.ci, "SHARED_PR_MIRROR_URL", f"{tmp_path / 'shared-pr-mirror'}")
|
||||
|
||||
spack_yaml, outputfile, _ = ci_generate_test(
|
||||
f"""\
|
||||
@@ -426,8 +436,9 @@ def test_ci_generate_for_pr_pipeline(ci_generate_test, tmp_path, monkeypatch):
|
||||
specs:
|
||||
- flatten-deps
|
||||
mirrors:
|
||||
buildcache-destination: {tmp_path / 'ci-mirror'}
|
||||
some-mirror: {tmp_path / 'ci-mirror'}
|
||||
ci:
|
||||
enable-artifacts-buildcache: True
|
||||
pipeline-gen:
|
||||
- submapping:
|
||||
- match:
|
||||
@@ -463,7 +474,7 @@ def test_ci_generate_with_external_pkg(ci_generate_test, tmp_path, monkeypatch):
|
||||
- archive-files
|
||||
- externaltest
|
||||
mirrors:
|
||||
buildcache-destination: {tmp_path / "ci-mirror"}
|
||||
some-mirror: {tmp_path / "ci-mirror"}
|
||||
ci:
|
||||
pipeline-gen:
|
||||
- submapping:
|
||||
@@ -529,6 +540,7 @@ def create_rebuild_env(
|
||||
broken_specs_path = scratch / "naughty-list"
|
||||
|
||||
mirror_url = mirror_dir.as_uri()
|
||||
temp_storage_url = (tmp_path / "temp-storage").as_uri()
|
||||
|
||||
ci_job_url = "https://some.domain/group/project/-/jobs/42"
|
||||
ci_pipeline_url = "https://some.domain/group/project/-/pipelines/7"
|
||||
@@ -543,10 +555,11 @@ def create_rebuild_env(
|
||||
specs:
|
||||
- $packages
|
||||
mirrors:
|
||||
buildcache-destination: {mirror_dir}
|
||||
test-mirror: {mirror_dir}
|
||||
ci:
|
||||
broken-specs-url: {broken_specs_path.as_uri()}
|
||||
broken-tests-packages: {json.dumps([pkg_name] if broken_tests else [])}
|
||||
temporary-storage-url-prefix: {temp_storage_url}
|
||||
pipeline-gen:
|
||||
- submapping:
|
||||
- match:
|
||||
@@ -698,7 +711,7 @@ def test_ci_require_signing(
|
||||
specs:
|
||||
- archive-files
|
||||
mirrors:
|
||||
buildcache-destination: {tmp_path / "ci-mirror"}
|
||||
test-mirror: {tmp_path / "ci-mirror"}
|
||||
ci:
|
||||
pipeline-gen:
|
||||
- submapping:
|
||||
@@ -746,8 +759,9 @@ def test_ci_nothing_to_rebuild(
|
||||
specs:
|
||||
- $packages
|
||||
mirrors:
|
||||
buildcache-destination: {mirror_url}
|
||||
test-mirror: {mirror_url}
|
||||
ci:
|
||||
enable-artifacts-buildcache: true
|
||||
pipeline-gen:
|
||||
- submapping:
|
||||
- match:
|
||||
@@ -774,20 +788,103 @@ def test_ci_nothing_to_rebuild(
|
||||
"SPACK_JOB_LOG_DIR": "log_dir",
|
||||
"SPACK_JOB_REPRO_DIR": "repro_dir",
|
||||
"SPACK_JOB_TEST_DIR": "test_dir",
|
||||
"SPACK_LOCAL_MIRROR_DIR": str(mirror_dir),
|
||||
"SPACK_CONCRETE_ENV_DIR": str(tmp_path),
|
||||
"SPACK_JOB_SPEC_DAG_HASH": env.concrete_roots()[0].dag_hash(),
|
||||
"SPACK_JOB_SPEC_PKG_NAME": "archive-files",
|
||||
"SPACK_COMPILER_ACTION": "NONE",
|
||||
"SPACK_REMOTE_MIRROR_URL": mirror_url,
|
||||
}
|
||||
)
|
||||
|
||||
def fake_dl_method(spec, *args, **kwargs):
|
||||
print("fake download buildcache {0}".format(spec.name))
|
||||
|
||||
monkeypatch.setattr(spack.binary_distribution, "download_single_spec", fake_dl_method)
|
||||
|
||||
ci_out = ci_cmd("rebuild", output=str)
|
||||
|
||||
assert "No need to rebuild archive-files" in ci_out
|
||||
assert "fake download buildcache archive-files" in ci_out
|
||||
|
||||
env_cmd("deactivate")
|
||||
|
||||
|
||||
def test_ci_generate_mirror_override(
|
||||
tmp_path: pathlib.Path,
|
||||
mutable_mock_env_path,
|
||||
install_mockery,
|
||||
mock_fetch,
|
||||
mock_binary_index,
|
||||
ci_base_environment,
|
||||
):
|
||||
"""Ensure that protected pipelines using --buildcache-destination do not
|
||||
skip building specs that are not in the override mirror when they are
|
||||
found in the main mirror."""
|
||||
os.environ.update({"SPACK_PIPELINE_TYPE": "spack_protected_branch"})
|
||||
mirror_url = (tmp_path / "mirror").as_uri()
|
||||
|
||||
with open(tmp_path / "spack.yaml", "w") as f:
|
||||
f.write(
|
||||
f"""
|
||||
spack:
|
||||
definitions:
|
||||
- packages: [patchelf]
|
||||
specs:
|
||||
- $packages
|
||||
mirrors:
|
||||
test-mirror: {mirror_url}
|
||||
ci:
|
||||
pipeline-gen:
|
||||
- submapping:
|
||||
- match:
|
||||
- patchelf
|
||||
build-job:
|
||||
tags:
|
||||
- donotcare
|
||||
image: donotcare
|
||||
- cleanup-job:
|
||||
tags:
|
||||
- nonbuildtag
|
||||
image: basicimage
|
||||
"""
|
||||
)
|
||||
|
||||
with working_dir(tmp_path):
|
||||
env_cmd("create", "test", "./spack.yaml")
|
||||
first_ci_yaml = str(tmp_path / ".gitlab-ci-1.yml")
|
||||
second_ci_yaml = str(tmp_path / ".gitlab-ci-2.yml")
|
||||
with ev.read("test"):
|
||||
install_cmd()
|
||||
buildcache_cmd("push", "-u", mirror_url, "patchelf")
|
||||
buildcache_cmd("update-index", mirror_url, output=str)
|
||||
|
||||
# This generate should not trigger a rebuild of patchelf, since it's in
|
||||
# the main mirror referenced in the environment.
|
||||
ci_cmd("generate", "--check-index-only", "--output-file", first_ci_yaml)
|
||||
|
||||
# Because we used a mirror override (--buildcache-destination) on a
|
||||
# spack protected pipeline, we expect to only look in the override
|
||||
# mirror for the spec, and thus the patchelf job should be generated in
|
||||
# this pipeline
|
||||
ci_cmd(
|
||||
"generate",
|
||||
"--check-index-only",
|
||||
"--output-file",
|
||||
second_ci_yaml,
|
||||
"--buildcache-destination",
|
||||
(tmp_path / "does-not-exist").as_uri(),
|
||||
)
|
||||
|
||||
with open(first_ci_yaml) as fd1:
|
||||
first_yaml = fd1.read()
|
||||
assert "no-specs-to-rebuild" in first_yaml
|
||||
|
||||
with open(second_ci_yaml) as fd2:
|
||||
second_yaml = fd2.read()
|
||||
assert "no-specs-to-rebuild" not in second_yaml
|
||||
|
||||
|
||||
@pytest.mark.disable_clean_stage_check
|
||||
def test_push_to_build_cache(
|
||||
tmp_path: pathlib.Path,
|
||||
@@ -814,8 +911,9 @@ def test_push_to_build_cache(
|
||||
specs:
|
||||
- $packages
|
||||
mirrors:
|
||||
buildcache-destination: {mirror_url}
|
||||
test-mirror: {mirror_url}
|
||||
ci:
|
||||
enable-artifacts-buildcache: True
|
||||
pipeline-gen:
|
||||
- submapping:
|
||||
- match:
|
||||
@@ -951,7 +1049,7 @@ def test_ci_generate_override_runner_attrs(
|
||||
- flatten-deps
|
||||
- pkg-a
|
||||
mirrors:
|
||||
buildcache-destination: {tmp_path / "ci-mirror"}
|
||||
some-mirror: {tmp_path / "ci-mirror"}
|
||||
ci:
|
||||
pipeline-gen:
|
||||
- match_behavior: {match_behavior}
|
||||
@@ -1091,7 +1189,7 @@ def test_ci_rebuild_index(
|
||||
specs:
|
||||
- callpath
|
||||
mirrors:
|
||||
buildcache-destination: {mirror_url}
|
||||
test-mirror: {mirror_url}
|
||||
ci:
|
||||
pipeline-gen:
|
||||
- submapping:
|
||||
@@ -1147,7 +1245,7 @@ def fake_stack_changed(env_path, rev1="HEAD^", rev2="HEAD"):
|
||||
- archive-files
|
||||
- callpath
|
||||
mirrors:
|
||||
buildcache-destination: {tmp_path / 'ci-mirror'}
|
||||
some-mirror: {tmp_path / 'ci-mirror'}
|
||||
ci:
|
||||
pipeline-gen:
|
||||
- build-job:
|
||||
@@ -1210,15 +1308,101 @@ def test_ci_subcommands_without_mirror(
|
||||
|
||||
with ev.read("test"):
|
||||
# Check the 'generate' subcommand
|
||||
expect = "spack ci generate requires a mirror named 'buildcache-destination'"
|
||||
with pytest.raises(ci.SpackCIError, match=expect):
|
||||
ci_cmd("generate", "--output-file", str(tmp_path / ".gitlab-ci.yml"))
|
||||
output = ci_cmd(
|
||||
"generate",
|
||||
"--output-file",
|
||||
str(tmp_path / ".gitlab-ci.yml"),
|
||||
output=str,
|
||||
fail_on_error=False,
|
||||
)
|
||||
assert "spack ci generate requires an env containing a mirror" in output
|
||||
|
||||
# Also check the 'rebuild-index' subcommand
|
||||
output = ci_cmd("rebuild-index", output=str, fail_on_error=False)
|
||||
assert "spack ci rebuild-index requires an env containing a mirror" in output
|
||||
|
||||
|
||||
def test_ensure_only_one_temporary_storage():
|
||||
"""Make sure 'gitlab-ci' section of env does not allow specification of
|
||||
both 'enable-artifacts-buildcache' and 'temporary-storage-url-prefix'."""
|
||||
gitlab_ci_template = """
|
||||
ci:
|
||||
{0}
|
||||
pipeline-gen:
|
||||
- submapping:
|
||||
- match:
|
||||
- notcheckedhere
|
||||
build-job:
|
||||
tags:
|
||||
- donotcare
|
||||
"""
|
||||
|
||||
enable_artifacts = "enable-artifacts-buildcache: True"
|
||||
temp_storage = "temporary-storage-url-prefix: file:///temp/mirror"
|
||||
specify_both = f"{enable_artifacts}\n {temp_storage}"
|
||||
|
||||
specify_neither = ""
|
||||
|
||||
# User can specify "enable-artifacts-buildcache" (boolean)
|
||||
yaml_obj = syaml.load(gitlab_ci_template.format(enable_artifacts))
|
||||
jsonschema.validate(yaml_obj, ci_schema)
|
||||
|
||||
# User can also specify "temporary-storage-url-prefix" (string)
|
||||
yaml_obj = syaml.load(gitlab_ci_template.format(temp_storage))
|
||||
jsonschema.validate(yaml_obj, ci_schema)
|
||||
|
||||
# However, specifying both should fail to validate
|
||||
yaml_obj = syaml.load(gitlab_ci_template.format(specify_both))
|
||||
with pytest.raises(jsonschema.ValidationError):
|
||||
jsonschema.validate(yaml_obj, ci_schema)
|
||||
|
||||
# Specifying neither should be fine too, as neither of these properties
|
||||
# should be required
|
||||
yaml_obj = syaml.load(gitlab_ci_template.format(specify_neither))
|
||||
jsonschema.validate(yaml_obj, ci_schema)
|
||||
|
||||
|
||||
def test_ci_generate_temp_storage_url(ci_generate_test, tmp_path, mock_binary_index):
|
||||
"""Verify correct behavior when using temporary-storage-url-prefix"""
|
||||
_, outputfile, _ = ci_generate_test(
|
||||
f"""\
|
||||
spack:
|
||||
specs:
|
||||
- archive-files
|
||||
mirrors:
|
||||
some-mirror: {(tmp_path / "ci-mirror").as_uri()}
|
||||
ci:
|
||||
temporary-storage-url-prefix: {(tmp_path / "temp-mirror").as_uri()}
|
||||
pipeline-gen:
|
||||
- submapping:
|
||||
- match:
|
||||
- archive-files
|
||||
build-job:
|
||||
tags:
|
||||
- donotcare
|
||||
image: donotcare
|
||||
- cleanup-job:
|
||||
custom_attribute: custom!
|
||||
"""
|
||||
)
|
||||
yaml_contents = syaml.load(outputfile.read_text())
|
||||
|
||||
assert "cleanup" in yaml_contents
|
||||
|
||||
cleanup_job = yaml_contents["cleanup"]
|
||||
assert cleanup_job["custom_attribute"] == "custom!"
|
||||
assert "script" in cleanup_job
|
||||
|
||||
cleanup_task = cleanup_job["script"][0]
|
||||
assert cleanup_task.startswith("spack -d mirror destroy")
|
||||
|
||||
assert "stages" in yaml_contents
|
||||
stages = yaml_contents["stages"]
|
||||
# Cleanup job should be 2nd to last, just before rebuild-index
|
||||
assert "stage" in cleanup_job
|
||||
assert cleanup_job["stage"] == stages[-2]
|
||||
|
||||
|
||||
def test_ci_generate_read_broken_specs_url(
|
||||
tmp_path: pathlib.Path,
|
||||
mutable_mock_env_path,
|
||||
@@ -1255,7 +1439,7 @@ def test_ci_generate_read_broken_specs_url(
|
||||
- flatten-deps
|
||||
- pkg-a
|
||||
mirrors:
|
||||
buildcache-destination: {(tmp_path / "ci-mirror").as_uri()}
|
||||
some-mirror: {(tmp_path / "ci-mirror").as_uri()}
|
||||
ci:
|
||||
broken-specs-url: "{broken_specs_url}"
|
||||
pipeline-gen:
|
||||
@@ -1300,8 +1484,9 @@ def test_ci_generate_external_signing_job(ci_generate_test, tmp_path, monkeypatc
|
||||
specs:
|
||||
- archive-files
|
||||
mirrors:
|
||||
buildcache-destination: {(tmp_path / "ci-mirror").as_uri()}
|
||||
some-mirror: {(tmp_path / "ci-mirror").as_uri()}
|
||||
ci:
|
||||
temporary-storage-url-prefix: {(tmp_path / "temp-mirror").as_uri()}
|
||||
pipeline-gen:
|
||||
- submapping:
|
||||
- match:
|
||||
@@ -1356,7 +1541,7 @@ def test_ci_reproduce(
|
||||
specs:
|
||||
- $packages
|
||||
mirrors:
|
||||
buildcache-destination: {tmp_path / "ci-mirror"}
|
||||
test-mirror: {tmp_path / "ci-mirror"}
|
||||
ci:
|
||||
pipeline-gen:
|
||||
- submapping:
|
||||
@@ -1487,6 +1672,106 @@ def test_cmd_first_line():
|
||||
assert spack.cmd.first_line(doc) == first
|
||||
|
||||
|
||||
legacy_spack_yaml_contents = """
|
||||
spack:
|
||||
definitions:
|
||||
- old-gcc-pkgs:
|
||||
- archive-files
|
||||
- callpath
|
||||
# specify ^openblas-with-lapack to ensure that builtin.mock repo flake8
|
||||
# package (which can also provide lapack) is not chosen, as it violates
|
||||
# a package-level check which requires exactly one fetch strategy (this
|
||||
# is apparently not an issue for other tests that use it).
|
||||
- hypre@0.2.15 ^openblas-with-lapack
|
||||
specs:
|
||||
- matrix:
|
||||
- [$old-gcc-pkgs]
|
||||
mirrors:
|
||||
test-mirror: {mirror_url}
|
||||
{key}:
|
||||
match_behavior: first
|
||||
mappings:
|
||||
- match:
|
||||
- arch=test-debian6-core2
|
||||
runner-attributes:
|
||||
tags:
|
||||
- donotcare
|
||||
image: donotcare
|
||||
- match:
|
||||
- arch=test-debian6-m1
|
||||
runner-attributes:
|
||||
tags:
|
||||
- donotcare
|
||||
image: donotcare
|
||||
service-job-attributes:
|
||||
image: donotcare
|
||||
tags: [donotcare]
|
||||
cdash:
|
||||
build-group: Not important
|
||||
url: https://my.fake.cdash
|
||||
project: Not used
|
||||
site: Nothing
|
||||
"""
|
||||
|
||||
|
||||
@pytest.mark.regression("36409")
|
||||
def test_gitlab_ci_deprecated(
|
||||
tmp_path: pathlib.Path,
|
||||
mutable_mock_env_path,
|
||||
install_mockery,
|
||||
monkeypatch,
|
||||
ci_base_environment,
|
||||
mock_binary_index,
|
||||
):
|
||||
mirror_url = (tmp_path / "ci-mirror").as_uri()
|
||||
with open(tmp_path / "spack.yaml", "w") as f:
|
||||
f.write(legacy_spack_yaml_contents.format(mirror_url=mirror_url, key="gitlab-ci"))
|
||||
|
||||
with working_dir(tmp_path):
|
||||
with ev.Environment("."):
|
||||
ci_cmd("generate", "--output-file", "generated-pipeline.yaml")
|
||||
|
||||
with open("generated-pipeline.yaml") as f:
|
||||
yaml_contents = syaml.load(f)
|
||||
|
||||
assert "stages" in yaml_contents
|
||||
assert len(yaml_contents["stages"]) == 5
|
||||
assert yaml_contents["stages"][0] == "stage-0"
|
||||
assert yaml_contents["stages"][4] == "stage-rebuild-index"
|
||||
|
||||
assert "rebuild-index" in yaml_contents
|
||||
rebuild_job = yaml_contents["rebuild-index"]
|
||||
expected = f"spack buildcache update-index --keys {mirror_url}"
|
||||
assert rebuild_job["script"][0] == expected
|
||||
|
||||
assert "variables" in yaml_contents
|
||||
assert "SPACK_ARTIFACTS_ROOT" in yaml_contents["variables"]
|
||||
artifacts_root = yaml_contents["variables"]["SPACK_ARTIFACTS_ROOT"]
|
||||
assert artifacts_root == "jobs_scratch_dir"
|
||||
|
||||
|
||||
@pytest.mark.regression("36045")
|
||||
def test_gitlab_ci_update(
|
||||
tmp_path: pathlib.Path,
|
||||
mutable_mock_env_path,
|
||||
install_mockery,
|
||||
monkeypatch,
|
||||
ci_base_environment,
|
||||
mock_binary_index,
|
||||
):
|
||||
with open(tmp_path / "spack.yaml", "w") as f:
|
||||
f.write(
|
||||
legacy_spack_yaml_contents.format(mirror_url=(tmp_path / "mirror").as_uri(), key="ci")
|
||||
)
|
||||
|
||||
env_cmd("update", "-y", str(tmp_path))
|
||||
|
||||
with open(tmp_path / "spack.yaml") as f:
|
||||
yaml_contents = syaml.load(f)
|
||||
ci_root = yaml_contents["spack"]["ci"]
|
||||
assert "pipeline-gen" in ci_root
|
||||
|
||||
|
||||
def test_gitlab_config_scopes(ci_generate_test, tmp_path):
|
||||
"""Test pipeline generation with real configs included"""
|
||||
configs_path = os.path.join(spack_paths.share_path, "gitlab", "cloud_pipelines", "configs")
|
||||
@@ -1500,7 +1785,7 @@ def test_gitlab_config_scopes(ci_generate_test, tmp_path):
|
||||
specs:
|
||||
- flatten-deps
|
||||
mirrors:
|
||||
buildcache-destination: {tmp_path / "ci-mirror"}
|
||||
some-mirror: {tmp_path / "ci-mirror"}
|
||||
ci:
|
||||
pipeline-gen:
|
||||
- build-job:
|
||||
@@ -1573,7 +1858,7 @@ def dynamic_mapping_setup(tmpdir):
|
||||
specs:
|
||||
- pkg-a
|
||||
mirrors:
|
||||
buildcache-destination: https://my.fake.mirror
|
||||
some-mirror: https://my.fake.mirror
|
||||
ci:
|
||||
pipeline-gen:
|
||||
- dynamic-mapping:
|
||||
|
||||
@@ -9,7 +9,6 @@
|
||||
import pathlib
|
||||
import shutil
|
||||
from argparse import Namespace
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -75,7 +74,7 @@ def setup_combined_multiple_env():
|
||||
env("create", "test1")
|
||||
test1 = ev.read("test1")
|
||||
with test1:
|
||||
add("mpich@1.0")
|
||||
add("zlib")
|
||||
test1.concretize()
|
||||
test1.write()
|
||||
|
||||
@@ -402,17 +401,14 @@ def test_env_install_single_spec(install_mockery, mock_fetch):
|
||||
|
||||
|
||||
@pytest.mark.parametrize("unify", [True, False, "when_possible"])
|
||||
def test_env_install_include_concrete_env(unify, install_mockery, mock_fetch, mutable_config):
|
||||
def test_env_install_include_concrete_env(unify, install_mockery, mock_fetch):
|
||||
test1, test2, combined = setup_combined_multiple_env()
|
||||
|
||||
combined.unify = unify
|
||||
if not unify:
|
||||
combined.manifest.set_default_view(False)
|
||||
|
||||
combined.add("mpileaks")
|
||||
combined.concretize()
|
||||
combined.write()
|
||||
|
||||
combined.unify = unify
|
||||
|
||||
with combined:
|
||||
install()
|
||||
|
||||
@@ -426,14 +422,6 @@ def test_env_install_include_concrete_env(unify, install_mockery, mock_fetch, mu
|
||||
assert test1_roots == combined_included_roots[test1.path]
|
||||
assert test2_roots == combined_included_roots[test2.path]
|
||||
|
||||
mpileaks = combined.specs_by_hash[combined.concretized_order[0]]
|
||||
if unify:
|
||||
assert mpileaks["mpi"].dag_hash() in test1_roots
|
||||
assert mpileaks["libelf"].dag_hash() in test2_roots
|
||||
else:
|
||||
# check that unification is not by accident
|
||||
assert mpileaks["mpi"].dag_hash() not in test1_roots
|
||||
|
||||
|
||||
def test_env_roots_marked_explicit(install_mockery, mock_fetch):
|
||||
install = SpackCommand("install")
|
||||
@@ -1881,7 +1869,7 @@ def test_env_include_concrete_envs_lockfile():
|
||||
def test_env_include_concrete_add_env():
|
||||
test1, test2, combined = setup_combined_multiple_env()
|
||||
|
||||
# create new env & concretize
|
||||
# crete new env & crecretize
|
||||
env("create", "new")
|
||||
new_env = ev.read("new")
|
||||
with new_env:
|
||||
@@ -1933,116 +1921,6 @@ def test_env_include_concrete_remove_env():
|
||||
assert test2.path not in lockfile_as_dict["include_concrete"].keys()
|
||||
|
||||
|
||||
def configure_reuse(reuse_mode, combined_env) -> Optional[ev.Environment]:
|
||||
override_env = None
|
||||
_config: Dict[Any, Any] = {}
|
||||
if reuse_mode == "true":
|
||||
_config = {"concretizer": {"reuse": True}}
|
||||
elif reuse_mode == "from_environment":
|
||||
_config = {"concretizer": {"reuse": {"from": [{"type": "environment"}]}}}
|
||||
elif reuse_mode == "from_environment_test1":
|
||||
_config = {"concretizer": {"reuse": {"from": [{"type": "environment", "path": "test1"}]}}}
|
||||
elif reuse_mode == "from_environment_external_test":
|
||||
# Create a new environment called external_test that enables the "debug"
|
||||
# The default is "~debug"
|
||||
env("create", "external_test")
|
||||
override_env = ev.read("external_test")
|
||||
with override_env:
|
||||
add("mpich@1.0 +debug")
|
||||
override_env.concretize()
|
||||
override_env.write()
|
||||
|
||||
# Reuse from the environment that is not included.
|
||||
# Specify the requirement for the debug variant. By default this would concretize to use
|
||||
# mpich@3.0 but with include concrete the mpich@1.0 +debug version from the
|
||||
# "external_test" environment will be used.
|
||||
_config = {
|
||||
"concretizer": {"reuse": {"from": [{"type": "environment", "path": "external_test"}]}},
|
||||
"packages": {"mpich": {"require": ["+debug"]}},
|
||||
}
|
||||
elif reuse_mode == "from_environment_raise":
|
||||
_config = {
|
||||
"concretizer": {"reuse": {"from": [{"type": "environment", "path": "not-a-real-env"}]}}
|
||||
}
|
||||
# Disable unification in these tests to avoid confusing reuse due to unification using an
|
||||
# include concrete spec vs reuse due to the reuse configuration
|
||||
_config["concretizer"].update({"unify": False})
|
||||
|
||||
combined_env.manifest.configuration.update(_config)
|
||||
combined_env.manifest.changed = True
|
||||
combined_env.write()
|
||||
|
||||
return override_env
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"reuse_mode",
|
||||
[
|
||||
"true",
|
||||
"from_environment",
|
||||
"from_environment_test1",
|
||||
"from_environment_external_test",
|
||||
"from_environment_raise",
|
||||
],
|
||||
)
|
||||
def test_env_include_concrete_reuse(monkeypatch, reuse_mode):
|
||||
|
||||
# The mock packages do not use the gcc-runtime
|
||||
def mock_has_runtime_dependencies(*args, **kwargs):
|
||||
return True
|
||||
|
||||
monkeypatch.setattr(
|
||||
spack.solver.asp, "_has_runtime_dependencies", mock_has_runtime_dependencies
|
||||
)
|
||||
# The default mpi version is 3.x provided by mpich in the mock repo.
|
||||
# This test verifies that concretizing with an included concrete
|
||||
# environment with "concretizer:reuse:true" the included
|
||||
# concrete spec overrides the default with mpi@1.0.
|
||||
test1, _, combined = setup_combined_multiple_env()
|
||||
|
||||
# Set the reuse mode for the environment
|
||||
override_env = configure_reuse(reuse_mode, combined)
|
||||
if override_env:
|
||||
# If there is an override environment (ie. testing reuse with
|
||||
# an external environment) update it here.
|
||||
test1 = override_env
|
||||
|
||||
# Capture the test1 specs included by combined
|
||||
test1_specs_by_hash = test1.specs_by_hash
|
||||
|
||||
try:
|
||||
# Add mpileaks to the combined environment
|
||||
with combined:
|
||||
add("mpileaks")
|
||||
combined.concretize()
|
||||
comb_specs_by_hash = combined.specs_by_hash
|
||||
|
||||
# create reference env with mpileaks that does not use reuse
|
||||
# This should concretize to the default version of mpich (3.0)
|
||||
env("create", "new")
|
||||
ref_env = ev.read("new")
|
||||
with ref_env:
|
||||
add("mpileaks")
|
||||
ref_env.concretize()
|
||||
ref_specs_by_hash = ref_env.specs_by_hash
|
||||
|
||||
# Ensure that the mpich used by the mpileaks is the mpich from the reused test environment
|
||||
comb_mpileaks_spec = [s for s in comb_specs_by_hash.values() if s.name == "mpileaks"]
|
||||
test1_mpich_spec = [s for s in test1_specs_by_hash.values() if s.name == "mpich"]
|
||||
assert len(comb_mpileaks_spec) == 1
|
||||
assert len(test1_mpich_spec) == 1
|
||||
assert comb_mpileaks_spec[0]["mpich"].dag_hash() == test1_mpich_spec[0].dag_hash()
|
||||
|
||||
# None of the references specs (using mpich@3) reuse specs from test1.
|
||||
# This tests that the reuse is not happening coincidently
|
||||
assert not any([s in test1_specs_by_hash for s in ref_specs_by_hash])
|
||||
|
||||
# Make sure the raise tests raises
|
||||
assert "raise" not in reuse_mode
|
||||
except ev.SpackEnvironmentError:
|
||||
assert "raise" in reuse_mode
|
||||
|
||||
|
||||
@pytest.mark.parametrize("unify", [True, False, "when_possible"])
|
||||
def test_env_include_concrete_env_reconcretized(unify):
|
||||
"""Double check to make sure that concrete_specs for the local specs is empty
|
||||
|
||||
@@ -74,7 +74,7 @@ def test_query_arguments():
|
||||
assert "explicit" in q_args
|
||||
assert q_args["installed"] == ["installed"]
|
||||
assert q_args["predicate_fn"] is None
|
||||
assert q_args["explicit"] is None
|
||||
assert q_args["explicit"] is any
|
||||
assert "start_date" in q_args
|
||||
assert "end_date" not in q_args
|
||||
assert q_args["install_tree"] == "all"
|
||||
|
||||
@@ -906,7 +906,7 @@ def test_cdash_configure_warning(tmpdir, mock_fetch, install_mockery, capfd):
|
||||
specfile = "./spec.json"
|
||||
with open(specfile, "w") as f:
|
||||
f.write(spec.to_json())
|
||||
print(spec.to_json())
|
||||
|
||||
install("--log-file=cdash_reports", "--log-format=cdash", specfile)
|
||||
# Verify Configure.xml exists with expected contents.
|
||||
report_dir = tmpdir.join("cdash_reports")
|
||||
|
||||
@@ -14,7 +14,6 @@
|
||||
import llnl.util.lang
|
||||
|
||||
import spack.binary_distribution
|
||||
import spack.cmd
|
||||
import spack.compiler
|
||||
import spack.compilers
|
||||
import spack.concretize
|
||||
@@ -33,6 +32,7 @@
|
||||
import spack.store
|
||||
import spack.util.file_cache
|
||||
import spack.variant as vt
|
||||
from spack.concretize import find_spec
|
||||
from spack.installer import PackageInstaller
|
||||
from spack.spec import CompilerSpec, Spec
|
||||
from spack.version import Version, VersionList, ver
|
||||
@@ -540,17 +540,21 @@ def test_concretize_two_virtuals_with_dual_provider_and_a_conflict(self):
|
||||
@pytest.mark.parametrize(
|
||||
"spec_str,expected_propagation",
|
||||
[
|
||||
("hypre~~shared ^openblas+shared", [("hypre", "~shared"), ("openblas", "+shared")]),
|
||||
# Propagates past a node that doesn't have the variant
|
||||
("hypre~~shared ^openblas", [("hypre", "~shared"), ("openblas", "~shared")]),
|
||||
# Propagates from root node to all nodes
|
||||
(
|
||||
"ascent~~shared +adios2",
|
||||
[("ascent", "~shared"), ("adios2", "~shared"), ("bzip2", "~shared")],
|
||||
),
|
||||
# Propagate from a node that is not the root node
|
||||
# Propagates below a node that uses the other value explicitly
|
||||
(
|
||||
"ascent +adios2 ^adios2~~shared",
|
||||
[("ascent", "+shared"), ("adios2", "~shared"), ("bzip2", "~shared")],
|
||||
"ascent~~shared +adios2 ^adios2+shared",
|
||||
[("ascent", "~shared"), ("adios2", "+shared"), ("bzip2", "~shared")],
|
||||
),
|
||||
(
|
||||
"ascent++shared +adios2 ^adios2~shared",
|
||||
[("ascent", "+shared"), ("adios2", "~shared"), ("bzip2", "+shared")],
|
||||
),
|
||||
],
|
||||
)
|
||||
@@ -560,109 +564,21 @@ def test_concretize_propagate_disabled_variant(self, spec_str, expected_propagat
|
||||
for key, expected_satisfies in expected_propagation:
|
||||
spec[key].satisfies(expected_satisfies)
|
||||
|
||||
def test_concretize_propagate_variant_not_dependencies(self):
|
||||
"""Test that when propagating a variant it is not propagated to dependencies that
|
||||
do not have that variant"""
|
||||
spec = Spec("quantum-espresso~~invino")
|
||||
def test_concretize_propagated_variant_is_not_passed_to_dependent(self):
|
||||
"""Test a package variant value was passed from its parent."""
|
||||
spec = Spec("ascent~~shared +adios2 ^adios2+shared")
|
||||
spec.concretize()
|
||||
|
||||
for dep in spec.traverse(root=False):
|
||||
assert "invino" not in dep.variants.keys()
|
||||
|
||||
def test_concretize_propagate_variant_exclude_dependency_fail(self):
|
||||
"""Tests that a propagating variant cannot be allowed to be excluded by any of
|
||||
the source package's dependencies"""
|
||||
spec = Spec("hypre ~~shared ^openblas +shared")
|
||||
with pytest.raises(spack.error.UnsatisfiableSpecError):
|
||||
spec.concretize()
|
||||
|
||||
def test_concretize_propagate_same_variant_from_direct_dep_fail(self):
|
||||
"""Test that when propagating a variant from the source package and a direct
|
||||
dependency also propagates the same variant with a different value. Raises error"""
|
||||
spec = Spec("ascent +adios2 ++shared ^adios2 ~~shared")
|
||||
with pytest.raises(spack.error.UnsatisfiableSpecError):
|
||||
spec.concretize()
|
||||
|
||||
def test_concretize_propagate_same_variant_in_dependency_fail(self):
|
||||
"""Test that when propagating a variant from the source package, none of it's
|
||||
dependencies can propagate that variant with a different value. Raises error."""
|
||||
spec = Spec("ascent +adios2 ++shared ^bzip2 ~~shared")
|
||||
with pytest.raises(spack.error.UnsatisfiableSpecError):
|
||||
spec.concretize()
|
||||
|
||||
def test_concretize_propagate_same_variant_virtual_dependency_fail(self):
|
||||
"""Test that when propagating a variant from the source package and a direct
|
||||
dependency (that is a virtual pkg) also propagates the same variant with a
|
||||
different value. Raises error"""
|
||||
spec = Spec("hypre ++shared ^openblas ~~shared")
|
||||
with pytest.raises(spack.error.UnsatisfiableSpecError):
|
||||
spec.concretize()
|
||||
|
||||
def test_concretize_propagate_same_variant_multiple_sources_diamond_dep_fail(self):
|
||||
"""Test that fails when propagating the same variant with different values from multiple
|
||||
sources that share a dependency"""
|
||||
spec = Spec("parent-foo-bar ^dependency-foo-bar++bar ^direct-dep-foo-bar~~bar")
|
||||
with pytest.raises(spack.error.UnsatisfiableSpecError):
|
||||
spec.concretize()
|
||||
assert spec.satisfies("^adios2+shared")
|
||||
assert spec.satisfies("^bzip2~shared")
|
||||
|
||||
def test_concretize_propagate_specified_variant(self):
|
||||
"""Test that only the specified variant is propagated to the dependencies"""
|
||||
spec = Spec("parent-foo-bar ~~foo")
|
||||
spec.concretize()
|
||||
|
||||
assert spec.satisfies("^dependency-foo-bar~foo")
|
||||
assert spec.satisfies("^second-dependency-foo-bar-fee~foo")
|
||||
assert spec.satisfies("^direct-dep-foo-bar~foo")
|
||||
|
||||
assert not spec.satisfies("^dependency-foo-bar+bar")
|
||||
assert not spec.satisfies("^second-dependency-foo-bar-fee+bar")
|
||||
assert not spec.satisfies("^direct-dep-foo-bar+bar")
|
||||
|
||||
def test_concretize_propagate_one_variant(self):
|
||||
"""Test that you can specify to propagate one variant and not all"""
|
||||
spec = Spec("parent-foo-bar ++bar ~foo")
|
||||
spec.concretize()
|
||||
|
||||
assert spec.satisfies("~foo") and not spec.satisfies("^dependency-foo-bar~foo")
|
||||
assert spec.satisfies("+bar") and spec.satisfies("^dependency-foo-bar+bar")
|
||||
|
||||
def test_concretize_propagate_through_first_level_deps(self):
|
||||
"""Test that boolean valued variants can be propagated past first level
|
||||
dependecies even if the first level dependency does have the variant"""
|
||||
spec = Spec("parent-foo-bar-fee ++fee")
|
||||
spec.concretize()
|
||||
|
||||
assert spec.satisfies("+fee") and not spec.satisfies("dependency-foo-bar+fee")
|
||||
assert spec.satisfies("^second-dependency-foo-bar-fee+fee")
|
||||
|
||||
def test_concretize_propagate_multiple_variants(self):
|
||||
"""Test that multiple boolean valued variants can be propagated from
|
||||
the same source package"""
|
||||
spec = Spec("parent-foo-bar-fee ~~foo ++bar")
|
||||
spec.concretize()
|
||||
|
||||
assert spec.satisfies("~foo") and spec.satisfies("+bar")
|
||||
assert spec.satisfies("^dependency-foo-bar ~foo +bar")
|
||||
assert spec.satisfies("^second-dependency-foo-bar-fee ~foo +bar")
|
||||
|
||||
def test_concretize_propagate_multiple_variants_mulitple_sources(self):
|
||||
"""Test the propagates multiple different variants for multiple sources
|
||||
in a diamond dependency"""
|
||||
spec = Spec("parent-foo-bar ^dependency-foo-bar++bar ^direct-dep-foo-bar~~foo")
|
||||
spec.concretize()
|
||||
|
||||
assert spec.satisfies("^second-dependency-foo-bar-fee+bar")
|
||||
assert spec.satisfies("^second-dependency-foo-bar-fee~foo")
|
||||
assert not spec.satisfies("^dependency-foo-bar~foo")
|
||||
assert not spec.satisfies("^direct-dep-foo-bar+bar")
|
||||
|
||||
def test_concretize_propagate_single_valued_variant(self):
|
||||
"""Test propagation for single valued variants"""
|
||||
spec = Spec("multivalue-variant libs==static")
|
||||
spec.concretize()
|
||||
|
||||
assert spec.satisfies("libs=static")
|
||||
assert spec.satisfies("^pkg-a libs=static")
|
||||
assert spec.satisfies("~foo") and spec.satisfies("^dependency-foo-bar~foo")
|
||||
assert spec.satisfies("+bar") and not spec.satisfies("^dependency-foo-bar+bar")
|
||||
|
||||
def test_concretize_propagate_multivalue_variant(self):
|
||||
"""Test that multivalue variants are propagating the specified value(s)
|
||||
@@ -675,46 +591,6 @@ def test_concretize_propagate_multivalue_variant(self):
|
||||
assert not spec.satisfies("^pkg-a foo=bar")
|
||||
assert not spec.satisfies("^pkg-b foo=bar")
|
||||
|
||||
def test_concretize_propagate_multiple_multivalue_variant(self):
|
||||
"""Tests propagating the same mulitvalued variant from different sources allows
|
||||
the dependents to accept all propagated values"""
|
||||
spec = Spec("multivalue-variant foo==bar ^pkg-a foo==baz")
|
||||
spec.concretize()
|
||||
|
||||
assert spec.satisfies("multivalue-variant foo=bar")
|
||||
assert spec.satisfies("^pkg-a foo=bar,baz")
|
||||
assert spec.satisfies("^pkg-b foo=bar,baz")
|
||||
|
||||
def test_concretize_propagate_variant_not_in_source(self):
|
||||
"""Test that variant is still propagated even if the source pkg
|
||||
doesn't have the variant"""
|
||||
spec = Spec("callpath++debug")
|
||||
spec.concretize()
|
||||
|
||||
assert spec.satisfies("^mpich+debug")
|
||||
assert not spec.satisfies("callpath+debug")
|
||||
assert not spec.satisfies("^dyninst+debug")
|
||||
|
||||
def test_concretize_propagate_variant_multiple_deps_not_in_source(self):
|
||||
"""Test that a variant can be propagated to multiple dependencies
|
||||
when the variant is not in the source package"""
|
||||
spec = Spec("netlib-lapack++shared")
|
||||
spec.concretize()
|
||||
|
||||
assert spec.satisfies("^openblas+shared")
|
||||
assert spec.satisfies("^perl+shared")
|
||||
assert not spec.satisfies("netlib-lapack+shared")
|
||||
|
||||
def test_concretize_propagate_variant_second_level_dep_not_in_source(self):
|
||||
"""Test that a variant can be propagated past first level dependencies
|
||||
when the variant is not in the source package or any of the first level
|
||||
dependencies"""
|
||||
spec = Spec("parent-foo-bar ++fee")
|
||||
spec.concretize()
|
||||
|
||||
assert spec.satisfies("^second-dependency-foo-bar-fee +fee")
|
||||
assert not spec.satisfies("parent-foo-bar +fee")
|
||||
|
||||
def test_no_matching_compiler_specs(self, mock_low_high_config):
|
||||
# only relevant when not building compilers as needed
|
||||
with spack.concretize.enable_compiler_existence_check():
|
||||
@@ -797,6 +673,39 @@ def test_external_and_virtual(self, mutable_config):
|
||||
assert spec["externaltool"].compiler.satisfies("gcc")
|
||||
assert spec["stuff"].compiler.satisfies("gcc")
|
||||
|
||||
def test_find_spec_parents(self):
|
||||
"""Tests the spec finding logic used by concretization."""
|
||||
s = Spec.from_literal({"a +foo": {"b +foo": {"c": None, "d+foo": None}, "e +foo": None}})
|
||||
|
||||
assert "a" == find_spec(s["b"], lambda s: "+foo" in s).name
|
||||
|
||||
def test_find_spec_children(self):
|
||||
s = Spec.from_literal({"a": {"b +foo": {"c": None, "d+foo": None}, "e +foo": None}})
|
||||
|
||||
assert "d" == find_spec(s["b"], lambda s: "+foo" in s).name
|
||||
|
||||
s = Spec.from_literal({"a": {"b +foo": {"c+foo": None, "d": None}, "e +foo": None}})
|
||||
|
||||
assert "c" == find_spec(s["b"], lambda s: "+foo" in s).name
|
||||
|
||||
def test_find_spec_sibling(self):
|
||||
s = Spec.from_literal({"a": {"b +foo": {"c": None, "d": None}, "e +foo": None}})
|
||||
|
||||
assert "e" == find_spec(s["b"], lambda s: "+foo" in s).name
|
||||
assert "b" == find_spec(s["e"], lambda s: "+foo" in s).name
|
||||
|
||||
s = Spec.from_literal({"a": {"b +foo": {"c": None, "d": None}, "e": {"f +foo": None}}})
|
||||
|
||||
assert "f" == find_spec(s["b"], lambda s: "+foo" in s).name
|
||||
|
||||
def test_find_spec_self(self):
|
||||
s = Spec.from_literal({"a": {"b +foo": {"c": None, "d": None}, "e": None}})
|
||||
assert "b" == find_spec(s["b"], lambda s: "+foo" in s).name
|
||||
|
||||
def test_find_spec_none(self):
|
||||
s = Spec.from_literal({"a": {"b": {"c": None, "d": None}, "e": None}})
|
||||
assert find_spec(s["b"], lambda s: "+foo" in s) is None
|
||||
|
||||
def test_compiler_child(self):
|
||||
s = Spec("mpileaks%clang target=x86_64 ^dyninst%gcc")
|
||||
s.concretize()
|
||||
@@ -905,7 +814,7 @@ def test_regression_issue_7941(self):
|
||||
)
|
||||
def test_simultaneous_concretization_of_specs(self, abstract_specs):
|
||||
abstract_specs = [Spec(x) for x in abstract_specs]
|
||||
concrete_specs = spack.concretize.concretize_specs_together(abstract_specs)
|
||||
concrete_specs = spack.concretize.concretize_specs_together(*abstract_specs)
|
||||
|
||||
# Check there's only one configuration of each package in the DAG
|
||||
names = set(dep.name for spec in concrete_specs for dep in spec.traverse())
|
||||
@@ -2227,7 +2136,7 @@ def test_external_python_extension_find_unified_python(self):
|
||||
spack.config.set("packages", external_conf)
|
||||
|
||||
abstract_specs = [Spec(s) for s in ["py-extension1", "python"]]
|
||||
specs = spack.concretize.concretize_specs_together(abstract_specs)
|
||||
specs = spack.concretize.concretize_specs_together(*abstract_specs)
|
||||
assert specs[0]["python"] == specs[1]["python"]
|
||||
|
||||
@pytest.mark.regression("36190")
|
||||
@@ -2397,30 +2306,6 @@ def test_explicit_splices(
|
||||
assert "hdf5 ^zmpi" in captured.err
|
||||
assert str(spec) in captured.err
|
||||
|
||||
def test_explicit_splice_fails_nonexistent(mutable_config, mock_packages, mock_store):
|
||||
splice_info = {"target": "mpi", "replacement": "mpich/doesnotexist"}
|
||||
spack.config.CONFIG.set("concretizer", {"splice": {"explicit": [splice_info]}})
|
||||
|
||||
with pytest.raises(spack.spec.InvalidHashError):
|
||||
_ = spack.spec.Spec("hdf5^zmpi").concretized()
|
||||
|
||||
def test_explicit_splice_fails_no_hash(mutable_config, mock_packages, mock_store):
|
||||
splice_info = {"target": "mpi", "replacement": "mpich"}
|
||||
spack.config.CONFIG.set("concretizer", {"splice": {"explicit": [splice_info]}})
|
||||
|
||||
with pytest.raises(spack.solver.asp.InvalidSpliceError, match="must be specified by hash"):
|
||||
_ = spack.spec.Spec("hdf5^zmpi").concretized()
|
||||
|
||||
def test_explicit_splice_non_match_nonexistent_succeeds(
|
||||
mutable_config, mock_packages, mock_store
|
||||
):
|
||||
"""When we have a nonexistent splice configured but are not using it, don't fail."""
|
||||
splice_info = {"target": "will_not_match", "replacement": "nonexistent/doesnotexist"}
|
||||
spack.config.CONFIG.set("concretizer", {"splice": {"explicit": [splice_info]}})
|
||||
spec = spack.spec.Spec("zlib").concretized()
|
||||
# the main test is that it does not raise
|
||||
assert not spec.spliced
|
||||
|
||||
@pytest.mark.db
|
||||
@pytest.mark.parametrize(
|
||||
"spec_str,mpi_name",
|
||||
@@ -3197,20 +3082,3 @@ def test_reuse_prefers_standard_over_git_versions(
|
||||
test_spec = spack.spec.Spec("git-ref-package@2").concretized()
|
||||
assert git_spec.dag_hash() != test_spec.dag_hash()
|
||||
assert standard_spec.dag_hash() == test_spec.dag_hash()
|
||||
|
||||
|
||||
@pytest.mark.parametrize("unify", [True, "when_possible", False])
|
||||
def test_spec_unification(unify, mutable_config, mock_packages):
|
||||
spack.config.set("concretizer:unify", unify)
|
||||
a = "pkg-a"
|
||||
a_restricted = "pkg-a^pkg-b foo=baz"
|
||||
b = "pkg-b foo=none"
|
||||
|
||||
unrestricted = spack.cmd.parse_specs([a, b], concretize=True)
|
||||
a_concrete_unrestricted = [s for s in unrestricted if s.name == "pkg-a"][0]
|
||||
b_concrete_unrestricted = [s for s in unrestricted if s.name == "pkg-b"][0]
|
||||
assert (a_concrete_unrestricted["pkg-b"] == b_concrete_unrestricted) == (unify is not False)
|
||||
|
||||
maybe_fails = pytest.raises if unify is True else llnl.util.lang.nullcontext
|
||||
with maybe_fails(spack.solver.asp.UnsatisfiableSpecError):
|
||||
_ = spack.cmd.parse_specs([a_restricted, b], concretize=True)
|
||||
|
||||
@@ -472,13 +472,6 @@ def test_substitute_date(mock_low_high_config):
|
||||
assert date.today().strftime("%Y-%m-%d") in new_path
|
||||
|
||||
|
||||
def test_substitute_spack_version():
|
||||
version = spack.spack_version_info
|
||||
assert spack_path.canonicalize_path(
|
||||
"spack$spack_short_version/test"
|
||||
) == spack_path.canonicalize_path(f"spack{version[0]}.{version[1]}/test")
|
||||
|
||||
|
||||
PAD_STRING = spack_path.SPACK_PATH_PADDING_CHARS
|
||||
MAX_PATH_LEN = spack_path.get_system_path_max()
|
||||
MAX_PADDED_LEN = MAX_PATH_LEN - spack_path.SPACK_MAX_INSTALL_PATH_LENGTH
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
bootstrap:
|
||||
sources:
|
||||
- name: 'github-actions'
|
||||
metadata: $spack/share/spack/bootstrap/github-actions-v0.6
|
||||
metadata: $spack/share/spack/bootstrap/github-actions-v0.5
|
||||
trusted: {}
|
||||
|
||||
@@ -892,32 +892,3 @@ def test_stack_enforcement_is_strict(tmp_path, matrix_line, config, mock_package
|
||||
with pytest.raises(Exception):
|
||||
with ev.Environment(tmp_path) as e:
|
||||
e.concretize()
|
||||
|
||||
|
||||
def test_only_roots_are_explicitly_installed(tmp_path, mock_packages, config, temporary_store):
|
||||
"""When installing specific non-root specs from an environment, we continue to mark them
|
||||
as implicitly installed. What makes installs explicit is that they are root of the env."""
|
||||
env = ev.create_in_dir(tmp_path)
|
||||
env.add("mpileaks")
|
||||
env.concretize()
|
||||
mpileaks = env.concrete_roots()[0]
|
||||
callpath = mpileaks["callpath"]
|
||||
env.install_specs([callpath], fake=True)
|
||||
assert callpath in temporary_store.db.query(explicit=False)
|
||||
env.install_specs([mpileaks], fake=True)
|
||||
assert temporary_store.db.query(explicit=True) == [mpileaks]
|
||||
|
||||
|
||||
def test_environment_from_name_or_dir(mock_packages, mutable_mock_env_path, tmp_path):
|
||||
test_env = ev.create("test")
|
||||
|
||||
name_env = ev.environment_from_name_or_dir(test_env.name)
|
||||
assert name_env.name == test_env.name
|
||||
assert name_env.path == test_env.path
|
||||
|
||||
dir_env = ev.environment_from_name_or_dir(test_env.path)
|
||||
assert dir_env.name == test_env.name
|
||||
assert dir_env.path == test_env.path
|
||||
|
||||
with pytest.raises(ev.SpackEnvironmentError, match="no such environment"):
|
||||
_ = ev.environment_from_name_or_dir("fake-env")
|
||||
|
||||
@@ -644,12 +644,13 @@ def test_prepare_for_install_on_installed(install_mockery, monkeypatch):
|
||||
def test_installer_init_requests(install_mockery):
|
||||
"""Test of installer initial requests."""
|
||||
spec_name = "dependent-install"
|
||||
installer = create_installer([spec_name], {})
|
||||
with spack.config.override("config:install_missing_compilers", True):
|
||||
installer = create_installer([spec_name], {})
|
||||
|
||||
# There is only one explicit request in this case
|
||||
assert len(installer.build_requests) == 1
|
||||
request = installer.build_requests[0]
|
||||
assert request.pkg.name == spec_name
|
||||
# There is only one explicit request in this case
|
||||
assert len(installer.build_requests) == 1
|
||||
request = installer.build_requests[0]
|
||||
assert request.pkg.name == spec_name
|
||||
|
||||
|
||||
@pytest.mark.parametrize("transitive", [True, False])
|
||||
@@ -742,20 +743,21 @@ def _missing(*args, **kwargs):
|
||||
|
||||
# Set the configuration to ensure _requeue_with_build_spec_tasks actually
|
||||
# does something.
|
||||
installer = create_installer(["depb"], {})
|
||||
installer._init_queue()
|
||||
request = installer.build_requests[0]
|
||||
task = create_build_task(request.pkg)
|
||||
with spack.config.override("config:install_missing_compilers", True):
|
||||
installer = create_installer(["depb"], {})
|
||||
installer._init_queue()
|
||||
request = installer.build_requests[0]
|
||||
task = create_build_task(request.pkg)
|
||||
|
||||
# Drop one of the specs so its task is missing before _install_task
|
||||
popped_task = installer._pop_task()
|
||||
assert inst.package_id(popped_task.pkg.spec) not in installer.build_tasks
|
||||
# Drop one of the specs so its task is missing before _install_task
|
||||
popped_task = installer._pop_task()
|
||||
assert inst.package_id(popped_task.pkg.spec) not in installer.build_tasks
|
||||
|
||||
monkeypatch.setattr(task, "execute", _missing)
|
||||
installer._install_task(task, None)
|
||||
monkeypatch.setattr(task, "execute", _missing)
|
||||
installer._install_task(task, None)
|
||||
|
||||
# Ensure the dropped task/spec was added back by _install_task
|
||||
assert inst.package_id(popped_task.pkg.spec) in installer.build_tasks
|
||||
# Ensure the dropped task/spec was added back by _install_task
|
||||
assert inst.package_id(popped_task.pkg.spec) in installer.build_tasks
|
||||
|
||||
|
||||
def test_release_lock_write_n_exception(install_mockery, tmpdir, capsys):
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
|
||||
import pytest
|
||||
|
||||
from llnl.util.filesystem import HeaderList, LibraryList, find_headers, find_libraries
|
||||
from llnl.util.filesystem import HeaderList, LibraryList, find, find_headers, find_libraries
|
||||
|
||||
import spack.paths
|
||||
|
||||
@@ -324,3 +324,33 @@ def test_searching_order(search_fn, search_list, root, kwargs):
|
||||
|
||||
# List should be empty here
|
||||
assert len(rlist) == 0
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"root,search_list,kwargs,expected",
|
||||
[
|
||||
(
|
||||
search_dir,
|
||||
"*/*bar.tx?",
|
||||
{"recursive": False},
|
||||
[
|
||||
os.path.join(search_dir, os.path.join("a", "foobar.txt")),
|
||||
os.path.join(search_dir, os.path.join("b", "bar.txp")),
|
||||
os.path.join(search_dir, os.path.join("c", "bar.txt")),
|
||||
],
|
||||
),
|
||||
(
|
||||
search_dir,
|
||||
"*/*bar.tx?",
|
||||
{"recursive": True},
|
||||
[
|
||||
os.path.join(search_dir, os.path.join("a", "foobar.txt")),
|
||||
os.path.join(search_dir, os.path.join("b", "bar.txp")),
|
||||
os.path.join(search_dir, os.path.join("c", "bar.txt")),
|
||||
],
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_find_with_globbing(root, search_list, kwargs, expected):
|
||||
matches = find(root, search_list, **kwargs)
|
||||
assert sorted(matches) == sorted(expected)
|
||||
|
||||
@@ -6,7 +6,6 @@
|
||||
"""Tests for ``llnl/util/filesystem.py``"""
|
||||
import filecmp
|
||||
import os
|
||||
import pathlib
|
||||
import shutil
|
||||
import stat
|
||||
import sys
|
||||
@@ -15,8 +14,7 @@
|
||||
import pytest
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.symlink
|
||||
from llnl.util.symlink import _windows_can_symlink, islink, readlink, symlink
|
||||
from llnl.util.symlink import islink, readlink, symlink
|
||||
|
||||
import spack.paths
|
||||
|
||||
@@ -1037,177 +1035,3 @@ def test_windows_sfn(tmpdir):
|
||||
assert "d\\LONGER~1" in fs.windows_sfn(d)
|
||||
assert "d\\LONGER~2" in fs.windows_sfn(e)
|
||||
shutil.rmtree(tmpdir.join("d"))
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def dir_structure_with_things_to_find(tmpdir):
|
||||
"""
|
||||
<root>/
|
||||
dir_one/
|
||||
file_one
|
||||
dir_two/
|
||||
dir_three/
|
||||
dir_four/
|
||||
file_two
|
||||
file_three
|
||||
file_four
|
||||
"""
|
||||
dir_one = tmpdir.join("dir_one").ensure(dir=True)
|
||||
tmpdir.join("dir_two").ensure(dir=True)
|
||||
dir_three = tmpdir.join("dir_three").ensure(dir=True)
|
||||
dir_four = dir_three.join("dir_four").ensure(dir=True)
|
||||
|
||||
locations = {}
|
||||
locations["file_one"] = str(dir_one.join("file_one").ensure())
|
||||
locations["file_two"] = str(dir_four.join("file_two").ensure())
|
||||
locations["file_three"] = str(dir_three.join("file_three").ensure())
|
||||
locations["file_four"] = str(tmpdir.join("file_four").ensure())
|
||||
|
||||
return str(tmpdir), locations
|
||||
|
||||
|
||||
def test_find_max_depth(dir_structure_with_things_to_find):
|
||||
root, locations = dir_structure_with_things_to_find
|
||||
|
||||
# Make sure the paths we use to verify are absolute
|
||||
assert os.path.isabs(locations["file_one"])
|
||||
|
||||
assert set(fs.find(root, "file_*", max_depth=0)) == {locations["file_four"]}
|
||||
assert set(fs.find(root, "file_*", max_depth=1)) == {
|
||||
locations["file_one"],
|
||||
locations["file_three"],
|
||||
locations["file_four"],
|
||||
}
|
||||
assert set(fs.find(root, "file_two", max_depth=2)) == {locations["file_two"]}
|
||||
assert not set(fs.find(root, "file_two", max_depth=1))
|
||||
assert set(fs.find(root, "file_two")) == {locations["file_two"]}
|
||||
assert set(fs.find(root, "file_*")) == set(locations.values())
|
||||
|
||||
|
||||
def test_find_max_depth_relative(dir_structure_with_things_to_find):
|
||||
"""find_max_depth should return absolute paths even if
|
||||
the provided path is relative.
|
||||
"""
|
||||
root, locations = dir_structure_with_things_to_find
|
||||
with fs.working_dir(root):
|
||||
assert set(fs.find(".", "file_*", max_depth=0)) == {locations["file_four"]}
|
||||
assert set(fs.find(".", "file_two", max_depth=2)) == {locations["file_two"]}
|
||||
|
||||
|
||||
@pytest.mark.parametrize("recursive,max_depth", [(False, -1), (False, 1)])
|
||||
def test_max_depth_and_recursive_errors(tmpdir, recursive, max_depth):
|
||||
root = str(tmpdir)
|
||||
error_str = "cannot be set if recursive is False"
|
||||
with pytest.raises(ValueError, match=error_str):
|
||||
fs.find(root, ["some_file"], recursive=recursive, max_depth=max_depth)
|
||||
|
||||
with pytest.raises(ValueError, match=error_str):
|
||||
fs.find_libraries(["some_lib"], root, recursive=recursive, max_depth=max_depth)
|
||||
|
||||
|
||||
@pytest.fixture(params=[True, False])
|
||||
def complex_dir_structure(request, tmpdir):
|
||||
"""
|
||||
"lx-dy" means "level x, directory y"
|
||||
"lx-fy" means "level x, file y"
|
||||
"lx-sy" means "level x, symlink y"
|
||||
|
||||
<root>/
|
||||
l1-d1/
|
||||
l2-d1/
|
||||
l3-s1 -> l1-d2 # points to directory above l2-d1
|
||||
l3-d2/
|
||||
l4-f1
|
||||
l3-s3 -> l1-d1 # cyclic link
|
||||
l3-d4/
|
||||
l4-f2
|
||||
l1-d2/
|
||||
l2-f1
|
||||
l2-d2/
|
||||
l3-f3
|
||||
l2-s3 -> l2-d2
|
||||
l1-s3 -> l3-d4 # a link that "skips" a directory level
|
||||
l1-s4 -> l2-s3 # a link to a link to a dir
|
||||
"""
|
||||
use_junctions = request.param
|
||||
if sys.platform == "win32" and not use_junctions and not _windows_can_symlink():
|
||||
pytest.skip("This Windows instance is not configured with symlink support")
|
||||
elif sys.platform != "win32" and use_junctions:
|
||||
pytest.skip("Junctions are a Windows-only feature")
|
||||
|
||||
l1_d1 = tmpdir.join("l1-d1").ensure(dir=True)
|
||||
l2_d1 = l1_d1.join("l2-d1").ensure(dir=True)
|
||||
l3_d2 = l2_d1.join("l3-d2").ensure(dir=True)
|
||||
l3_d4 = l2_d1.join("l3-d4").ensure(dir=True)
|
||||
l1_d2 = tmpdir.join("l1-d2").ensure(dir=True)
|
||||
l2_d2 = l1_d2.join("l1-d2").ensure(dir=True)
|
||||
|
||||
if use_junctions:
|
||||
link_fn = llnl.util.symlink._windows_create_junction
|
||||
else:
|
||||
link_fn = os.symlink
|
||||
|
||||
link_fn(l1_d2, pathlib.Path(l2_d1) / "l3-s1")
|
||||
link_fn(l1_d1, pathlib.Path(l2_d1) / "l3-s3")
|
||||
link_fn(l3_d4, pathlib.Path(tmpdir) / "l1-s3")
|
||||
l2_s3 = pathlib.Path(l1_d2) / "l2-s3"
|
||||
link_fn(l2_d2, l2_s3)
|
||||
link_fn(l2_s3, pathlib.Path(tmpdir) / "l1-s4")
|
||||
|
||||
locations = {
|
||||
"l4-f1": str(l3_d2.join("l4-f1").ensure()),
|
||||
"l4-f2-full": str(l3_d4.join("l4-f2").ensure()),
|
||||
"l4-f2-link": str(pathlib.Path(tmpdir) / "l1-s3" / "l4-f2"),
|
||||
"l2-f1": str(l1_d2.join("l2-f1").ensure()),
|
||||
"l2-f1-link": str(pathlib.Path(tmpdir) / "l1-d1" / "l2-d1" / "l3-s1" / "l2-f1"),
|
||||
"l3-f3-full": str(l2_d2.join("l3-f3").ensure()),
|
||||
"l3-f3-link-l1": str(pathlib.Path(tmpdir) / "l1-s4" / "l3-f3"),
|
||||
}
|
||||
|
||||
return str(tmpdir), locations
|
||||
|
||||
|
||||
def test_find_max_depth_symlinks(complex_dir_structure):
|
||||
root, locations = complex_dir_structure
|
||||
root = pathlib.Path(root)
|
||||
assert set(fs.find(root, "l4-f1")) == {locations["l4-f1"]}
|
||||
assert set(fs.find(root / "l1-s3", "l4-f2", max_depth=0)) == {locations["l4-f2-link"]}
|
||||
assert set(fs.find(root / "l1-d1", "l2-f1")) == {locations["l2-f1-link"]}
|
||||
# File is accessible via symlink and subdir, the link path will be
|
||||
# searched first, and the directory will not be searched again when
|
||||
# it is encountered the second time (via not-link) in the traversal
|
||||
assert set(fs.find(root, "l4-f2")) == {locations["l4-f2-link"]}
|
||||
# File is accessible only via the dir, so the full file path should
|
||||
# be reported
|
||||
assert set(fs.find(root / "l1-d1", "l4-f2")) == {locations["l4-f2-full"]}
|
||||
# Check following links to links
|
||||
assert set(fs.find(root, "l3-f3")) == {locations["l3-f3-link-l1"]}
|
||||
|
||||
|
||||
def test_find_max_depth_multiple_and_repeated_entry_points(complex_dir_structure):
|
||||
root, locations = complex_dir_structure
|
||||
|
||||
fst = str(pathlib.Path(root) / "l1-d1" / "l2-d1")
|
||||
snd = str(pathlib.Path(root) / "l1-d2")
|
||||
nonexistent = str(pathlib.Path(root) / "nonexistent")
|
||||
|
||||
assert set(fs.find([fst, snd, fst, snd, nonexistent], ["l*-f*"], max_depth=1)) == {
|
||||
locations["l2-f1"],
|
||||
locations["l4-f1"],
|
||||
locations["l4-f2-full"],
|
||||
locations["l3-f3-full"],
|
||||
}
|
||||
|
||||
|
||||
def test_multiple_patterns(complex_dir_structure):
|
||||
root, _ = complex_dir_structure
|
||||
paths = fs.find(root, ["l2-f1", "l3-f3", "*"])
|
||||
# There shouldn't be duplicate results with multiple, overlapping patterns
|
||||
assert len(set(paths)) == len(paths)
|
||||
# All files should be found
|
||||
filenames = [os.path.basename(p) for p in paths]
|
||||
assert set(filenames) == {"l2-f1", "l3-f3", "l4-f1", "l4-f2"}
|
||||
# They are ordered by first matching pattern (this is a bit of an implementation detail,
|
||||
# and we could decide to change the exact order in the future)
|
||||
assert filenames[0] == "l2-f1"
|
||||
assert filenames[1] == "l3-f3"
|
||||
|
||||
@@ -336,55 +336,3 @@ def test_grouped_exception_base_type():
|
||||
message = h.grouped_message(with_tracebacks=False)
|
||||
assert "catch-runtime-error" in message
|
||||
assert "catch-value-error" not in message
|
||||
|
||||
|
||||
def test_class_level_constant_value():
|
||||
"""Tests that the Const descriptor does not allow overwriting the value from an instance"""
|
||||
|
||||
class _SomeClass:
|
||||
CONST_VALUE = llnl.util.lang.Const(10)
|
||||
|
||||
with pytest.raises(TypeError, match="not support assignment"):
|
||||
_SomeClass().CONST_VALUE = 11
|
||||
|
||||
|
||||
def test_deprecated_property():
|
||||
"""Tests the behavior of the DeprecatedProperty descriptor, which is can be used when
|
||||
deprecating an attribute.
|
||||
"""
|
||||
|
||||
class _Deprecated(llnl.util.lang.DeprecatedProperty):
|
||||
def factory(self, instance, owner):
|
||||
return 46
|
||||
|
||||
class _SomeClass:
|
||||
deprecated = _Deprecated("deprecated")
|
||||
|
||||
# Default behavior is to just return the deprecated value
|
||||
s = _SomeClass()
|
||||
assert s.deprecated == 46
|
||||
|
||||
# When setting error_level to 1 the attribute warns
|
||||
_SomeClass.deprecated.error_lvl = 1
|
||||
with pytest.warns(UserWarning):
|
||||
assert s.deprecated == 46
|
||||
|
||||
# When setting error_level to 2 an exception is raised
|
||||
_SomeClass.deprecated.error_lvl = 2
|
||||
with pytest.raises(AttributeError):
|
||||
_ = s.deprecated
|
||||
|
||||
|
||||
def test_fnmatch_multiple():
|
||||
regex, groups = llnl.util.lang.fnmatch_translate_multiple(["libf*o.so", "libb*r.so"])
|
||||
|
||||
a = regex.match("libfoo.so")
|
||||
assert a and a.group(groups[0]) == "libfoo.so"
|
||||
|
||||
b = regex.match("libbar.so")
|
||||
assert b and b.group(groups[1]) == "libbar.so"
|
||||
|
||||
assert not regex.match("libfoo.so.1")
|
||||
assert not regex.match("libbar.so.1")
|
||||
assert not regex.match("libfoo.solibbar.so")
|
||||
assert not regex.match("libbaz.so")
|
||||
|
||||
@@ -18,6 +18,7 @@
|
||||
import llnl.util.filesystem as fs
|
||||
|
||||
import spack.compilers
|
||||
import spack.config
|
||||
import spack.deptypes as dt
|
||||
import spack.error
|
||||
import spack.install_test
|
||||
|
||||
@@ -756,48 +756,6 @@ def test_spec_tree_respect_deptypes(self):
|
||||
out = s.tree(deptypes=("link", "run"))
|
||||
assert "version-test-pkg" not in out
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"query,expected_length,expected_satisfies",
|
||||
[
|
||||
({"virtuals": ["mpi"]}, 1, ["mpich", "mpi"]),
|
||||
({"depflag": dt.BUILD}, 2, ["mpich", "mpi", "callpath"]),
|
||||
({"depflag": dt.BUILD, "virtuals": ["mpi"]}, 1, ["mpich", "mpi"]),
|
||||
({"depflag": dt.LINK}, 2, ["mpich", "mpi", "callpath"]),
|
||||
({"depflag": dt.BUILD | dt.LINK}, 2, ["mpich", "mpi", "callpath"]),
|
||||
({"virtuals": ["lapack"]}, 0, []),
|
||||
],
|
||||
)
|
||||
def test_query_dependency_edges(
|
||||
self, default_mock_concretization, query, expected_length, expected_satisfies
|
||||
):
|
||||
"""Tests querying edges to dependencies on the following DAG:
|
||||
|
||||
[ ] mpileaks@=2.3
|
||||
[bl ] ^callpath@=1.0
|
||||
[bl ] ^dyninst@=8.2
|
||||
[bl ] ^libdwarf@=20130729
|
||||
[bl ] ^libelf@=0.8.13
|
||||
[bl ] ^mpich@=3.0.4
|
||||
"""
|
||||
mpileaks = default_mock_concretization("mpileaks")
|
||||
edges = mpileaks.edges_to_dependencies(**query)
|
||||
assert len(edges) == expected_length
|
||||
for constraint in expected_satisfies:
|
||||
assert any(x.spec.satisfies(constraint) for x in edges)
|
||||
|
||||
def test_query_dependents_edges(self, default_mock_concretization):
|
||||
"""Tests querying edges from dependents"""
|
||||
mpileaks = default_mock_concretization("mpileaks")
|
||||
mpich = mpileaks["mpich"]
|
||||
|
||||
# Recover the root with 2 different queries
|
||||
edges_of_link_type = mpich.edges_from_dependents(depflag=dt.LINK)
|
||||
edges_with_mpi = mpich.edges_from_dependents(virtuals=["mpi"])
|
||||
assert edges_with_mpi == edges_of_link_type
|
||||
|
||||
# Check a node dependend upon by 2 parents
|
||||
assert len(mpileaks["libelf"].edges_from_dependents(depflag=dt.LINK)) == 2
|
||||
|
||||
|
||||
def test_tree_cover_nodes_reduce_deptype():
|
||||
"""Test that tree output with deptypes sticks to the sub-dag of interest, instead of looking
|
||||
|
||||
@@ -231,7 +231,7 @@ class TestSpecSemantics:
|
||||
("mpich+foo", "mpich foo=True", "mpich+foo"),
|
||||
("mpich++foo", "mpich foo=True", "mpich+foo"),
|
||||
("mpich foo=true", "mpich+foo", "mpich+foo"),
|
||||
("mpich foo==true", "mpich++foo", "mpich++foo"),
|
||||
("mpich foo==true", "mpich++foo", "mpich+foo"),
|
||||
("mpich~foo", "mpich foo=FALSE", "mpich~foo"),
|
||||
("mpich~~foo", "mpich foo=FALSE", "mpich~foo"),
|
||||
("mpich foo=False", "mpich~foo", "mpich~foo"),
|
||||
@@ -271,17 +271,17 @@ class TestSpecSemantics:
|
||||
("mpich+foo", "mpich", "mpich+foo"),
|
||||
("mpich~foo", "mpich", "mpich~foo"),
|
||||
("mpich foo=1", "mpich", "mpich foo=1"),
|
||||
("mpich", "mpich++foo", "mpich++foo"),
|
||||
("mpich", "mpich++foo", "mpich+foo"),
|
||||
("libelf+debug", "libelf+foo", "libelf+debug+foo"),
|
||||
("libelf+debug", "libelf+debug+foo", "libelf+debug+foo"),
|
||||
("libelf debug=2", "libelf foo=1", "libelf debug=2 foo=1"),
|
||||
("libelf debug=2", "libelf debug=2 foo=1", "libelf debug=2 foo=1"),
|
||||
("libelf+debug", "libelf~foo", "libelf+debug~foo"),
|
||||
("libelf+debug", "libelf+debug~foo", "libelf+debug~foo"),
|
||||
("libelf++debug", "libelf+debug+foo", "libelf+debug+foo"),
|
||||
("libelf debug==2", "libelf foo=1", "libelf debug==2 foo=1"),
|
||||
("libelf debug==2", "libelf debug=2 foo=1", "libelf debug=2 foo=1"),
|
||||
("libelf++debug", "libelf++debug~foo", "libelf++debug~foo"),
|
||||
("libelf++debug", "libelf+debug+foo", "libelf++debug++foo"),
|
||||
("libelf debug==2", "libelf foo=1", "libelf debug==2 foo==1"),
|
||||
("libelf debug==2", "libelf debug=2 foo=1", "libelf debug==2 foo==1"),
|
||||
("libelf++debug", "libelf++debug~foo", "libelf++debug~~foo"),
|
||||
("libelf foo=bar,baz", "libelf foo=*", "libelf foo=bar,baz"),
|
||||
("libelf foo=*", "libelf foo=bar,baz", "libelf foo=bar,baz"),
|
||||
(
|
||||
@@ -367,24 +367,19 @@ def test_abstract_specs_can_constrain_each_other(self, lhs, rhs, expected):
|
||||
'mpich cflags="-O3 -g"',
|
||||
'mpich cflags=="-O3"',
|
||||
'mpich cflags="-O3 -g"',
|
||||
'mpich cflags="-O3 -g"',
|
||||
[],
|
||||
[],
|
||||
),
|
||||
(
|
||||
'mpich cflags=="-O3 -g"',
|
||||
'mpich cflags=="-O3"',
|
||||
'mpich cflags=="-O3 -g"',
|
||||
'mpich cflags=="-O3 -g"',
|
||||
[("cflags", "-O3"), ("cflags", "-g")],
|
||||
[("cflags", "-O3"), ("cflags", "-g")],
|
||||
[("cflags", "-O3")],
|
||||
[("cflags", "-O3")],
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_constrain_compiler_flags(
|
||||
self, lhs, rhs, expected_lhs, expected_rhs, propagated_lhs, propagated_rhs
|
||||
):
|
||||
"""Constraining is asymmetric for compiler flags."""
|
||||
"""Constraining is asymmetric for compiler flags. Also note that
|
||||
Spec equality does not account for flag propagation, so the checks
|
||||
here are manual.
|
||||
"""
|
||||
lhs, rhs, expected_lhs, expected_rhs = (
|
||||
Spec(lhs),
|
||||
Spec(rhs),
|
||||
@@ -512,6 +507,9 @@ def test_constraining_abstract_specs_with_empty_intersection(self, lhs, rhs):
|
||||
("mpich", "mpich +foo"),
|
||||
("mpich", "mpich~foo"),
|
||||
("mpich", "mpich foo=1"),
|
||||
("mpich", "mpich++foo"),
|
||||
("mpich", "mpich~~foo"),
|
||||
("mpich", "mpich foo==1"),
|
||||
("multivalue-variant foo=bar", "multivalue-variant +foo"),
|
||||
("multivalue-variant foo=bar", "multivalue-variant ~foo"),
|
||||
("multivalue-variant fee=bar", "multivalue-variant fee=baz"),
|
||||
@@ -533,58 +531,6 @@ def test_concrete_specs_which_do_not_satisfy_abstract(
|
||||
with pytest.raises(UnsatisfiableSpecError):
|
||||
assert rhs.constrain(lhs)
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"lhs,rhs", [("mpich", "mpich++foo"), ("mpich", "mpich~~foo"), ("mpich", "mpich foo==1")]
|
||||
)
|
||||
def test_concrete_specs_which_satisfy_abstract(self, lhs, rhs, default_mock_concretization):
|
||||
lhs, rhs = default_mock_concretization(lhs), Spec(rhs)
|
||||
|
||||
assert lhs.intersects(rhs)
|
||||
assert rhs.intersects(lhs)
|
||||
assert lhs.satisfies(rhs)
|
||||
|
||||
s1 = lhs.copy()
|
||||
s1.constrain(rhs)
|
||||
assert s1 == lhs and s1.satisfies(lhs)
|
||||
|
||||
s2 = rhs.copy()
|
||||
s2.constrain(lhs)
|
||||
assert s2 == lhs and s2.satisfies(lhs)
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"lhs,rhs,expected,constrained",
|
||||
[
|
||||
# hdf5++mpi satisfies hdf5, and vice versa, because of the non-contradiction semantic
|
||||
("hdf5++mpi", "hdf5", True, "hdf5++mpi"),
|
||||
("hdf5", "hdf5++mpi", True, "hdf5++mpi"),
|
||||
# Same holds true for arbitrary propagated variants
|
||||
("hdf5++mpi", "hdf5++shared", True, "hdf5++mpi++shared"),
|
||||
# Here hdf5+mpi satisfies hdf5++mpi but not vice versa
|
||||
("hdf5++mpi", "hdf5+mpi", False, "hdf5+mpi"),
|
||||
("hdf5+mpi", "hdf5++mpi", True, "hdf5+mpi"),
|
||||
# Non contradiction is violated
|
||||
("hdf5 ^foo~mpi", "hdf5++mpi", False, "hdf5++mpi ^foo~mpi"),
|
||||
("hdf5++mpi", "hdf5 ^foo~mpi", False, "hdf5++mpi ^foo~mpi"),
|
||||
],
|
||||
)
|
||||
def test_abstract_specs_with_propagation(self, lhs, rhs, expected, constrained):
|
||||
"""Tests (and documents) behavior of variant propagation on abstract specs.
|
||||
|
||||
Propagated variants do not comply with subset semantic, making it difficult to give
|
||||
precise definitions. Here we document the behavior that has been decided for the
|
||||
practical cases we face.
|
||||
"""
|
||||
lhs, rhs, constrained = Spec(lhs), Spec(rhs), Spec(constrained)
|
||||
assert lhs.satisfies(rhs) is expected
|
||||
|
||||
c = lhs.copy()
|
||||
c.constrain(rhs)
|
||||
assert c == constrained
|
||||
|
||||
c = rhs.copy()
|
||||
c.constrain(lhs)
|
||||
assert c == constrained
|
||||
|
||||
def test_satisfies_single_valued_variant(self):
|
||||
"""Tests that the case reported in
|
||||
https://github.com/spack/spack/pull/2386#issuecomment-282147639
|
||||
@@ -1958,20 +1904,3 @@ def test_old_format_strings_trigger_error(default_mock_concretization):
|
||||
s = Spec("pkg-a").concretized()
|
||||
with pytest.raises(SpecFormatStringError):
|
||||
s.format("${PACKAGE}-${VERSION}-${HASH}")
|
||||
|
||||
|
||||
@pytest.mark.regression("47362")
|
||||
@pytest.mark.parametrize(
|
||||
"lhs,rhs",
|
||||
[
|
||||
("hdf5 +mpi", "hdf5++mpi"),
|
||||
("hdf5 cflags==-g", "hdf5 cflags=-g"),
|
||||
("hdf5 +mpi ++shared", "hdf5+mpi +shared"),
|
||||
("hdf5 +mpi cflags==-g", "hdf5++mpi cflag=-g"),
|
||||
],
|
||||
)
|
||||
def test_equality_discriminate_on_propagation(lhs, rhs):
|
||||
"""Tests that == can discriminate abstract specs based on their 'propagation' status"""
|
||||
s, t = Spec(lhs), Spec(rhs)
|
||||
assert s != t
|
||||
assert len({s, t}) == 2
|
||||
|
||||
@@ -16,7 +16,6 @@
|
||||
import io
|
||||
import json
|
||||
import os
|
||||
import pickle
|
||||
|
||||
import pytest
|
||||
import ruamel.yaml
|
||||
@@ -552,26 +551,3 @@ def test_anchorify_2():
|
||||
e: *id002
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"spec_str",
|
||||
[
|
||||
"hdf5 ++mpi",
|
||||
"hdf5 cflags==-g",
|
||||
"hdf5 foo==bar",
|
||||
"hdf5~~mpi++shared",
|
||||
"hdf5 cflags==-g foo==bar cxxflags==-O3",
|
||||
"hdf5 cflags=-g foo==bar cxxflags==-O3",
|
||||
],
|
||||
)
|
||||
def test_pickle_roundtrip_for_abstract_specs(spec_str):
|
||||
"""Tests that abstract specs correctly round trip when pickled.
|
||||
|
||||
This test compares both spec objects and their string representation, due to some
|
||||
inconsistencies in how `Spec.__eq__` is implemented.
|
||||
"""
|
||||
s = spack.spec.Spec(spec_str)
|
||||
t = pickle.loads(pickle.dumps(s))
|
||||
assert s == t
|
||||
assert str(s) == str(t)
|
||||
|
||||
@@ -31,11 +31,6 @@ def test_write_and_read_cache_file(file_cache):
|
||||
assert text == "foobar\n"
|
||||
|
||||
|
||||
def test_read_before_init(file_cache):
|
||||
with file_cache.read_transaction("test.yaml") as stream:
|
||||
assert stream is None
|
||||
|
||||
|
||||
@pytest.mark.not_on_windows("Locks not supported on Windows")
|
||||
def test_failed_write_and_read_cache_file(file_cache):
|
||||
"""Test failing to write then attempting to read a cached file."""
|
||||
@@ -51,6 +46,11 @@ def test_failed_write_and_read_cache_file(file_cache):
|
||||
# File does not exist
|
||||
assert not file_cache.init_entry("test.yaml")
|
||||
|
||||
# Attempting to read will cause a FileNotFoundError
|
||||
with pytest.raises(FileNotFoundError, match=r"test\.yaml"):
|
||||
with file_cache.read_transaction("test.yaml"):
|
||||
pass
|
||||
|
||||
|
||||
def test_write_and_remove_cache_file(file_cache):
|
||||
"""Test two write transactions on a cached file. Then try to remove an
|
||||
|
||||
@@ -7,7 +7,6 @@
|
||||
import math
|
||||
import os
|
||||
import shutil
|
||||
from typing import IO, Optional, Tuple
|
||||
|
||||
from llnl.util.filesystem import mkdirp, rename
|
||||
|
||||
@@ -15,51 +14,6 @@
|
||||
from spack.util.lock import Lock, ReadTransaction, WriteTransaction
|
||||
|
||||
|
||||
def _maybe_open(path: str) -> Optional[IO[str]]:
|
||||
try:
|
||||
return open(path, "r")
|
||||
except OSError as e:
|
||||
if e.errno != errno.ENOENT:
|
||||
raise
|
||||
return None
|
||||
|
||||
|
||||
class ReadContextManager:
|
||||
def __init__(self, path: str) -> None:
|
||||
self.path = path
|
||||
|
||||
def __enter__(self) -> Optional[IO[str]]:
|
||||
"""Return a file object for the cache if it exists."""
|
||||
self.cache_file = _maybe_open(self.path)
|
||||
return self.cache_file
|
||||
|
||||
def __exit__(self, type, value, traceback):
|
||||
if self.cache_file:
|
||||
self.cache_file.close()
|
||||
|
||||
|
||||
class WriteContextManager:
|
||||
def __init__(self, path: str) -> None:
|
||||
self.path = path
|
||||
self.tmp_path = f"{self.path}.tmp"
|
||||
|
||||
def __enter__(self) -> Tuple[Optional[IO[str]], IO[str]]:
|
||||
"""Return (old_file, new_file) file objects, where old_file is optional."""
|
||||
self.old_file = _maybe_open(self.path)
|
||||
self.new_file = open(self.tmp_path, "w")
|
||||
return self.old_file, self.new_file
|
||||
|
||||
def __exit__(self, type, value, traceback):
|
||||
if self.old_file:
|
||||
self.old_file.close()
|
||||
self.new_file.close()
|
||||
|
||||
if value:
|
||||
os.remove(self.tmp_path)
|
||||
else:
|
||||
rename(self.tmp_path, self.path)
|
||||
|
||||
|
||||
class FileCache:
|
||||
"""This class manages cached data in the filesystem.
|
||||
|
||||
@@ -153,8 +107,7 @@ def read_transaction(self, key):
|
||||
cache_file.read()
|
||||
|
||||
"""
|
||||
path = self.cache_path(key)
|
||||
return ReadTransaction(self._get_lock(key), acquire=lambda: ReadContextManager(path))
|
||||
return ReadTransaction(self._get_lock(key), acquire=lambda: open(self.cache_path(key)))
|
||||
|
||||
def write_transaction(self, key):
|
||||
"""Get a write transaction on a file cache item.
|
||||
@@ -164,11 +117,40 @@ def write_transaction(self, key):
|
||||
moves the file into place on top of the old file atomically.
|
||||
|
||||
"""
|
||||
path = self.cache_path(key)
|
||||
if os.path.exists(path) and not os.access(path, os.W_OK):
|
||||
raise CacheError(f"Insufficient permissions to write to file cache at {path}")
|
||||
filename = self.cache_path(key)
|
||||
if os.path.exists(filename) and not os.access(filename, os.W_OK):
|
||||
raise CacheError(
|
||||
"Insufficient permissions to write to file cache at {0}".format(filename)
|
||||
)
|
||||
|
||||
return WriteTransaction(self._get_lock(key), acquire=lambda: WriteContextManager(path))
|
||||
# TODO: this nested context manager adds a lot of complexity and
|
||||
# TODO: is pretty hard to reason about in llnl.util.lock. At some
|
||||
# TODO: point we should just replace it with functions and simplify
|
||||
# TODO: the locking code.
|
||||
class WriteContextManager:
|
||||
def __enter__(cm):
|
||||
cm.orig_filename = self.cache_path(key)
|
||||
cm.orig_file = None
|
||||
if os.path.exists(cm.orig_filename):
|
||||
cm.orig_file = open(cm.orig_filename, "r")
|
||||
|
||||
cm.tmp_filename = self.cache_path(key) + ".tmp"
|
||||
cm.tmp_file = open(cm.tmp_filename, "w")
|
||||
|
||||
return cm.orig_file, cm.tmp_file
|
||||
|
||||
def __exit__(cm, type, value, traceback):
|
||||
if cm.orig_file:
|
||||
cm.orig_file.close()
|
||||
cm.tmp_file.close()
|
||||
|
||||
if value:
|
||||
os.remove(cm.tmp_filename)
|
||||
|
||||
else:
|
||||
rename(cm.tmp_filename, cm.orig_filename)
|
||||
|
||||
return WriteTransaction(self._get_lock(key), acquire=WriteContextManager)
|
||||
|
||||
def mtime(self, key) -> float:
|
||||
"""Return modification time of cache file, or -inf if it does not exist.
|
||||
|
||||
@@ -9,30 +9,20 @@
|
||||
import shlex
|
||||
import sys
|
||||
from subprocess import PIPE, run
|
||||
from typing import Dict, List, Optional
|
||||
from typing import List, Optional
|
||||
|
||||
import spack.spec
|
||||
import spack.util.elf
|
||||
|
||||
#: Pattern to distinguish glibc from other libc implementations
|
||||
GLIBC_PATTERN = r"\b(?:Free Software Foundation|Roland McGrath|Ulrich Depper)\b"
|
||||
|
||||
|
||||
def _env() -> Dict[str, str]:
|
||||
"""Currently only set LC_ALL=C without clearing further environment variables"""
|
||||
return {**os.environ, "LC_ALL": "C"}
|
||||
|
||||
|
||||
def _libc_from_ldd(ldd: str) -> Optional["spack.spec.Spec"]:
|
||||
try:
|
||||
result = run([ldd, "--version"], stdout=PIPE, stderr=PIPE, check=False, env=_env())
|
||||
result = run([ldd, "--version"], stdout=PIPE, stderr=PIPE, check=False)
|
||||
stdout = result.stdout.decode("utf-8")
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
# The string "Free Software Foundation" is sometimes translated and not detected, but the names
|
||||
# of the authors are typically present.
|
||||
if not re.search(GLIBC_PATTERN, stdout):
|
||||
if not re.search(r"\bFree Software Foundation\b", stdout):
|
||||
return None
|
||||
|
||||
version_str = re.match(r".+\(.+\) (.+)", stdout)
|
||||
@@ -48,7 +38,7 @@ def default_search_paths_from_dynamic_linker(dynamic_linker: str) -> List[str]:
|
||||
"""If the dynamic linker is glibc at a certain version, we can query the hard-coded library
|
||||
search paths"""
|
||||
try:
|
||||
result = run([dynamic_linker, "--help"], stdout=PIPE, stderr=PIPE, check=False, env=_env())
|
||||
result = run([dynamic_linker, "--help"], stdout=PIPE, stderr=PIPE, check=False)
|
||||
assert result.returncode == 0
|
||||
out = result.stdout.decode("utf-8")
|
||||
except Exception:
|
||||
@@ -84,9 +74,7 @@ def libc_from_dynamic_linker(dynamic_linker: str) -> Optional["spack.spec.Spec"]
|
||||
# Now try to figure out if glibc or musl, which is the only ones we support.
|
||||
# In recent glibc we can simply execute the dynamic loader. In musl that's always the case.
|
||||
try:
|
||||
result = run(
|
||||
[dynamic_linker, "--version"], stdout=PIPE, stderr=PIPE, check=False, env=_env()
|
||||
)
|
||||
result = run([dynamic_linker, "--version"], stdout=PIPE, stderr=PIPE, check=False)
|
||||
stdout = result.stdout.decode("utf-8")
|
||||
stderr = result.stderr.decode("utf-8")
|
||||
except Exception:
|
||||
@@ -103,7 +91,7 @@ def libc_from_dynamic_linker(dynamic_linker: str) -> Optional["spack.spec.Spec"]
|
||||
return spec
|
||||
except Exception:
|
||||
return None
|
||||
elif re.search(GLIBC_PATTERN, stdout):
|
||||
elif re.search(r"\bFree Software Foundation\b", stdout):
|
||||
# output is like "ld.so (...) stable release version 2.33."
|
||||
match = re.search(r"version (\d+\.\d+(?:\.\d+)?)", stdout)
|
||||
if not match:
|
||||
|
||||
@@ -74,7 +74,6 @@ def replacements():
|
||||
"target_family": lambda: arch.target.family,
|
||||
"date": lambda: date.today().strftime("%Y-%m-%d"),
|
||||
"env": lambda: ev.active_environment().path if ev.active_environment() else NOMATCH,
|
||||
"spack_short_version": lambda: spack.get_short_version(),
|
||||
}
|
||||
|
||||
|
||||
@@ -155,20 +154,19 @@ def substitute_config_variables(path):
|
||||
|
||||
Spack allows paths in configs to have some placeholders, as follows:
|
||||
|
||||
- $env The active Spack environment.
|
||||
- $spack The Spack instance's prefix
|
||||
- $tempdir Default temporary directory returned by tempfile.gettempdir()
|
||||
- $user The current user's username
|
||||
- $user_cache_path The user cache directory (~/.spack, unless overridden)
|
||||
- $architecture The spack architecture triple for the current system
|
||||
- $arch The spack architecture triple for the current system
|
||||
- $platform The spack platform for the current system
|
||||
- $os The OS of the current system
|
||||
- $operating_system The OS of the current system
|
||||
- $target The ISA target detected for the system
|
||||
- $target_family The family of the target detected for the system
|
||||
- $date The current date (YYYY-MM-DD)
|
||||
- $spack_short_version The spack short version
|
||||
- $env The active Spack environment.
|
||||
- $spack The Spack instance's prefix
|
||||
- $tempdir Default temporary directory returned by tempfile.gettempdir()
|
||||
- $user The current user's username
|
||||
- $user_cache_path The user cache directory (~/.spack, unless overridden)
|
||||
- $architecture The spack architecture triple for the current system
|
||||
- $arch The spack architecture triple for the current system
|
||||
- $platform The spack platform for the current system
|
||||
- $os The OS of the current system
|
||||
- $operating_system The OS of the current system
|
||||
- $target The ISA target detected for the system
|
||||
- $target_family The family of the target detected for the system
|
||||
- $date The current date (YYYY-MM-DD)
|
||||
|
||||
These are substituted case-insensitively into the path, and users can
|
||||
use either ``$var`` or ``${var}`` syntax for the variables. $env is only
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user