Compare commits
1 Commits
features/p
...
revert-341
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4583161224 |
4
.github/workflows/audit.yaml
vendored
4
.github/workflows/audit.yaml
vendored
@@ -19,8 +19,8 @@ jobs:
|
||||
package-audits:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b # @v2
|
||||
- uses: actions/setup-python@2c3dd9e7e29afd70cc0950079bde6c979d1f69f9 # @v2
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # @v2
|
||||
with:
|
||||
python-version: ${{inputs.python_version}}
|
||||
- name: Install Python packages
|
||||
|
||||
22
.github/workflows/bootstrap.yml
vendored
22
.github/workflows/bootstrap.yml
vendored
@@ -24,7 +24,7 @@ jobs:
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison bison-devel libstdc++-static
|
||||
- name: Checkout
|
||||
uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
|
||||
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -62,7 +62,7 @@ jobs:
|
||||
make patch unzip xz-utils python3 python3-dev tree \
|
||||
cmake bison
|
||||
- name: Checkout
|
||||
uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
|
||||
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -99,7 +99,7 @@ jobs:
|
||||
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
|
||||
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -133,7 +133,7 @@ jobs:
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison
|
||||
- name: Checkout
|
||||
uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
|
||||
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup repo
|
||||
@@ -158,7 +158,7 @@ jobs:
|
||||
run: |
|
||||
brew install cmake bison@2.7 tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
|
||||
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
@@ -179,7 +179,7 @@ jobs:
|
||||
run: |
|
||||
brew install tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
|
||||
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
set -ex
|
||||
@@ -204,7 +204,7 @@ jobs:
|
||||
runs-on: ubuntu-20.04
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
|
||||
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup repo
|
||||
@@ -247,7 +247,7 @@ jobs:
|
||||
bzip2 curl file g++ gcc patchelf gfortran git gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
|
||||
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -283,7 +283,7 @@ jobs:
|
||||
make patch unzip xz-utils python3 python3-dev tree \
|
||||
gawk
|
||||
- name: Checkout
|
||||
uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
|
||||
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -316,7 +316,7 @@ jobs:
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- name: Checkout
|
||||
uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
|
||||
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
@@ -333,7 +333,7 @@ jobs:
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- name: Checkout
|
||||
uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
|
||||
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
|
||||
2
.github/workflows/build-containers.yml
vendored
2
.github/workflows/build-containers.yml
vendored
@@ -50,7 +50,7 @@ jobs:
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b # @v2
|
||||
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2
|
||||
|
||||
- name: Set Container Tag Normal (Nightly)
|
||||
run: |
|
||||
|
||||
2
.github/workflows/ci.yaml
vendored
2
.github/workflows/ci.yaml
vendored
@@ -35,7 +35,7 @@ jobs:
|
||||
core: ${{ steps.filter.outputs.core }}
|
||||
packages: ${{ steps.filter.outputs.packages }}
|
||||
steps:
|
||||
- uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b # @v2
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
4
.github/workflows/setup_git.ps1
vendored
4
.github/workflows/setup_git.ps1
vendored
@@ -1,4 +1,6 @@
|
||||
# (c) 2022 Lawrence Livermore National Laboratory
|
||||
# (c) 2021 Lawrence Livermore National Laboratory
|
||||
|
||||
Set-Location spack
|
||||
|
||||
git config --global user.email "spack@example.com"
|
||||
git config --global user.name "Test User"
|
||||
|
||||
20
.github/workflows/unit_tests.yaml
vendored
20
.github/workflows/unit_tests.yaml
vendored
@@ -47,10 +47,10 @@ jobs:
|
||||
on_develop: false
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b # @v2
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@2c3dd9e7e29afd70cc0950079bde6c979d1f69f9 # @v2
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # @v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install System packages
|
||||
@@ -94,10 +94,10 @@ jobs:
|
||||
shell:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b # @v2
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@2c3dd9e7e29afd70cc0950079bde6c979d1f69f9 # @v2
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # @v2
|
||||
with:
|
||||
python-version: '3.11'
|
||||
- name: Install System packages
|
||||
@@ -133,7 +133,7 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b # @v2
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
@@ -145,16 +145,16 @@ jobs:
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack -d bootstrap now --dev
|
||||
spack -d solve zlib
|
||||
spack unit-test -k 'not cvs and not svn and not hg' -x --verbose
|
||||
# Test for the clingo based solver (using clingo-cffi)
|
||||
clingo-cffi:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b # @v2
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@2c3dd9e7e29afd70cc0950079bde6c979d1f69f9 # @v2
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # @v2
|
||||
with:
|
||||
python-version: '3.11'
|
||||
- name: Install System packages
|
||||
@@ -185,10 +185,10 @@ jobs:
|
||||
matrix:
|
||||
python-version: ["3.10"]
|
||||
steps:
|
||||
- uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b # @v2
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@2c3dd9e7e29afd70cc0950079bde6c979d1f69f9 # @v2
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # @v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install Python packages
|
||||
|
||||
8
.github/workflows/valid-style.yml
vendored
8
.github/workflows/valid-style.yml
vendored
@@ -18,8 +18,8 @@ jobs:
|
||||
validate:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b # @v2
|
||||
- uses: actions/setup-python@2c3dd9e7e29afd70cc0950079bde6c979d1f69f9 # @v2
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # @v2
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
@@ -35,10 +35,10 @@ jobs:
|
||||
style:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b # @v2
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@2c3dd9e7e29afd70cc0950079bde6c979d1f69f9 # @v2
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # @v2
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
|
||||
175
.github/workflows/windows_python.yml
vendored
175
.github/workflows/windows_python.yml
vendored
@@ -10,15 +10,15 @@ concurrency:
|
||||
defaults:
|
||||
run:
|
||||
shell:
|
||||
powershell Invoke-Expression -Command "./share/spack/qa/windows_test_setup.ps1"; {0}
|
||||
powershell Invoke-Expression -Command ".\share\spack\qa\windows_test_setup.ps1"; {0}
|
||||
jobs:
|
||||
unit-tests:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@2c3dd9e7e29afd70cc0950079bde6c979d1f69f9
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
@@ -26,11 +26,13 @@ jobs:
|
||||
python -m pip install --upgrade pip six pywin32 setuptools codecov pytest-cov clingo
|
||||
- name: Create local develop
|
||||
run: |
|
||||
./.github/workflows/setup_git.ps1
|
||||
.\spack\.github\workflows\setup_git.ps1
|
||||
- name: Unit Test
|
||||
run: |
|
||||
echo F|xcopy .\spack\share\spack\qa\configuration\windows_config.yaml $env:USERPROFILE\.spack\windows\config.yaml
|
||||
cd spack
|
||||
dir
|
||||
spack unit-test -x --verbose --cov --cov-config=pyproject.toml --ignore=lib/spack/spack/test/cmd
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
coverage combine -a
|
||||
coverage xml
|
||||
- uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70
|
||||
@@ -39,10 +41,10 @@ jobs:
|
||||
unit-tests-cmd:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@2c3dd9e7e29afd70cc0950079bde6c979d1f69f9
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
@@ -50,11 +52,12 @@ jobs:
|
||||
python -m pip install --upgrade pip six pywin32 setuptools codecov coverage pytest-cov clingo
|
||||
- name: Create local develop
|
||||
run: |
|
||||
./.github/workflows/setup_git.ps1
|
||||
.\spack\.github\workflows\setup_git.ps1
|
||||
- name: Command Unit Test
|
||||
run: |
|
||||
echo F|xcopy .\spack\share\spack\qa\configuration\windows_config.yaml $env:USERPROFILE\.spack\windows\config.yaml
|
||||
cd spack
|
||||
spack unit-test -x --verbose --cov --cov-config=pyproject.toml lib/spack/spack/test/cmd
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
coverage combine -a
|
||||
coverage xml
|
||||
- uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70
|
||||
@@ -63,10 +66,10 @@ jobs:
|
||||
build-abseil:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@2c3dd9e7e29afd70cc0950079bde6c979d1f69f9
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
@@ -75,81 +78,81 @@ jobs:
|
||||
- name: Build Test
|
||||
run: |
|
||||
spack compiler find
|
||||
echo F|xcopy .\spack\share\spack\qa\configuration\windows_config.yaml $env:USERPROFILE\.spack\windows\config.yaml
|
||||
spack external find cmake
|
||||
spack external find ninja
|
||||
spack -d install abseil-cpp
|
||||
# TODO: johnwparent - reduce the size of the installer operations
|
||||
# make-installer:
|
||||
# runs-on: windows-latest
|
||||
# steps:
|
||||
# - name: Disable Windows Symlinks
|
||||
# run: |
|
||||
# git config --global core.symlinks false
|
||||
# shell:
|
||||
# powershell
|
||||
# - uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
|
||||
# with:
|
||||
# fetch-depth: 0
|
||||
# - uses: actions/setup-python@2c3dd9e7e29afd70cc0950079bde6c979d1f69f9
|
||||
# with:
|
||||
# python-version: 3.9
|
||||
# - name: Install Python packages
|
||||
# run: |
|
||||
# python -m pip install --upgrade pip six pywin32 setuptools
|
||||
# - name: Add Light and Candle to Path
|
||||
# run: |
|
||||
# $env:WIX >> $GITHUB_PATH
|
||||
# - name: Run Installer
|
||||
# run: |
|
||||
# ./share/spack/qa/setup_spack_installer.ps1
|
||||
# spack make-installer -s . -g SILENT pkg
|
||||
# echo "installer_root=$((pwd).Path)" | Out-File -FilePath $Env:GITHUB_ENV -Encoding utf8 -Append
|
||||
# env:
|
||||
# ProgressPreference: SilentlyContinue
|
||||
# - uses: actions/upload-artifact@83fd05a356d7e2593de66fc9913b3002723633cb
|
||||
# with:
|
||||
# name: Windows Spack Installer Bundle
|
||||
# path: ${{ env.installer_root }}\pkg\Spack.exe
|
||||
# - uses: actions/upload-artifact@83fd05a356d7e2593de66fc9913b3002723633cb
|
||||
# with:
|
||||
# name: Windows Spack Installer
|
||||
# path: ${{ env.installer_root}}\pkg\Spack.msi
|
||||
# execute-installer:
|
||||
# needs: make-installer
|
||||
# runs-on: windows-latest
|
||||
# defaults:
|
||||
# run:
|
||||
# shell: pwsh
|
||||
# steps:
|
||||
# - uses: actions/setup-python@2c3dd9e7e29afd70cc0950079bde6c979d1f69f9
|
||||
# with:
|
||||
# python-version: 3.9
|
||||
# - name: Install Python packages
|
||||
# run: |
|
||||
# python -m pip install --upgrade pip six pywin32 setuptools
|
||||
# - name: Setup installer directory
|
||||
# run: |
|
||||
# mkdir -p spack_installer
|
||||
# echo "spack_installer=$((pwd).Path)\spack_installer" | Out-File -FilePath $Env:GITHUB_ENV -Encoding utf8 -Append
|
||||
# - uses: actions/download-artifact@v3
|
||||
# with:
|
||||
# name: Windows Spack Installer Bundle
|
||||
# path: ${{ env.spack_installer }}
|
||||
# - name: Execute Bundled Installer
|
||||
# run: |
|
||||
# $proc = Start-Process ${{ env.spack_installer }}\spack.exe "/install /quiet" -Passthru
|
||||
# $handle = $proc.Handle # cache proc.Handle
|
||||
# $proc.WaitForExit();
|
||||
# $LASTEXITCODE
|
||||
# env:
|
||||
# ProgressPreference: SilentlyContinue
|
||||
# - uses: actions/download-artifact@v3
|
||||
# with:
|
||||
# name: Windows Spack Installer
|
||||
# path: ${{ env.spack_installer }}
|
||||
# - name: Execute MSI
|
||||
# run: |
|
||||
# $proc = Start-Process ${{ env.spack_installer }}\spack.msi "/quiet" -Passthru
|
||||
# $handle = $proc.Handle # cache proc.Handle
|
||||
# $proc.WaitForExit();
|
||||
# $LASTEXITCODE
|
||||
make-installer:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- name: Disable Windows Symlinks
|
||||
run: |
|
||||
git config --global core.symlinks false
|
||||
shell:
|
||||
powershell
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
python -m pip install --upgrade pip six pywin32 setuptools
|
||||
- name: Add Light and Candle to Path
|
||||
run: |
|
||||
$env:WIX >> $GITHUB_PATH
|
||||
- name: Run Installer
|
||||
run: |
|
||||
.\spack\share\spack\qa\setup_spack.ps1
|
||||
spack make-installer -s spack -g SILENT pkg
|
||||
echo "installer_root=$((pwd).Path)" | Out-File -FilePath $Env:GITHUB_ENV -Encoding utf8 -Append
|
||||
env:
|
||||
ProgressPreference: SilentlyContinue
|
||||
- uses: actions/upload-artifact@83fd05a356d7e2593de66fc9913b3002723633cb
|
||||
with:
|
||||
name: Windows Spack Installer Bundle
|
||||
path: ${{ env.installer_root }}\pkg\Spack.exe
|
||||
- uses: actions/upload-artifact@83fd05a356d7e2593de66fc9913b3002723633cb
|
||||
with:
|
||||
name: Windows Spack Installer
|
||||
path: ${{ env.installer_root}}\pkg\Spack.msi
|
||||
execute-installer:
|
||||
needs: make-installer
|
||||
runs-on: windows-latest
|
||||
defaults:
|
||||
run:
|
||||
shell: pwsh
|
||||
steps:
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
python -m pip install --upgrade pip six pywin32 setuptools
|
||||
- name: Setup installer directory
|
||||
run: |
|
||||
mkdir -p spack_installer
|
||||
echo "spack_installer=$((pwd).Path)\spack_installer" | Out-File -FilePath $Env:GITHUB_ENV -Encoding utf8 -Append
|
||||
- uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: Windows Spack Installer Bundle
|
||||
path: ${{ env.spack_installer }}
|
||||
- name: Execute Bundled Installer
|
||||
run: |
|
||||
$proc = Start-Process ${{ env.spack_installer }}\spack.exe "/install /quiet" -Passthru
|
||||
$handle = $proc.Handle # cache proc.Handle
|
||||
$proc.WaitForExit();
|
||||
$LASTEXITCODE
|
||||
env:
|
||||
ProgressPreference: SilentlyContinue
|
||||
- uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: Windows Spack Installer
|
||||
path: ${{ env.spack_installer }}
|
||||
- name: Execute MSI
|
||||
run: |
|
||||
$proc = Start-Process ${{ env.spack_installer }}\spack.msi "/quiet" -Passthru
|
||||
$handle = $proc.Handle # cache proc.Handle
|
||||
$proc.WaitForExit();
|
||||
$LASTEXITCODE
|
||||
|
||||
@@ -394,7 +394,7 @@ are indicated at the start of the path with ``~`` or ``~user``.
|
||||
Spack-specific variables
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Spack understands over a dozen special variables. These are:
|
||||
Spack understands several special variables. These are:
|
||||
|
||||
* ``$env``: name of the currently active :ref:`environment <environments>`
|
||||
* ``$spack``: path to the prefix of this Spack installation
|
||||
|
||||
@@ -175,11 +175,14 @@ Spec-related modules
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
:mod:`spack.spec`
|
||||
Contains :class:`~spack.spec.Spec`. Also implements most of the logic for concretization
|
||||
Contains :class:`~spack.spec.Spec` and :class:`~spack.spec.SpecParser`.
|
||||
Also implements most of the logic for normalization and concretization
|
||||
of specs.
|
||||
|
||||
:mod:`spack.parser`
|
||||
Contains :class:`~spack.parser.SpecParser` and functions related to parsing specs.
|
||||
:mod:`spack.parse`
|
||||
Contains some base classes for implementing simple recursive descent
|
||||
parsers: :class:`~spack.parse.Parser` and :class:`~spack.parse.Lexer`.
|
||||
Used by :class:`~spack.spec.SpecParser`.
|
||||
|
||||
:mod:`spack.concretize`
|
||||
Contains :class:`~spack.concretize.Concretizer` implementation,
|
||||
|
||||
@@ -2397,15 +2397,13 @@ this because uninstalling the dependency would break the package.
|
||||
|
||||
``build``, ``link``, and ``run`` dependencies all affect the hash of Spack
|
||||
packages (along with ``sha256`` sums of patches and archives used to build the
|
||||
package, and a `canonical hash <https://github.com/spack/spack/pull/28156>`_ of
|
||||
package, and a [canonical hash](https://github.com/spack/spack/pull/28156) of
|
||||
the ``package.py`` recipes). ``test`` dependencies do not affect the package
|
||||
hash, as they are only used to construct a test environment *after* building and
|
||||
installing a given package installation. Older versions of Spack did not include
|
||||
build dependencies in the hash, but this has been
|
||||
`fixed <https://github.com/spack/spack/pull/28504>`_ as of |Spack v0.18|_.
|
||||
|
||||
.. |Spack v0.18| replace:: Spack ``v0.18``
|
||||
.. _Spack v0.18: https://github.com/spack/spack/releases/tag/v0.18.0
|
||||
build dependencies in the hash, but this has been
|
||||
[fixed](https://github.com/spack/spack/pull/28504) as of [Spack
|
||||
``v0.18``](https://github.com/spack/spack/releases/tag/v0.18.0)
|
||||
|
||||
If the dependency type is not specified, Spack uses a default of
|
||||
``('build', 'link')``. This is the common case for compiler languages.
|
||||
|
||||
@@ -99,9 +99,7 @@ def getuid():
|
||||
def rename(src, dst):
|
||||
# On Windows, os.rename will fail if the destination file already exists
|
||||
if is_windows:
|
||||
# Windows path existence checks will sometimes fail on junctions/links/symlinks
|
||||
# so check for that case
|
||||
if os.path.exists(dst) or os.path.islink(dst):
|
||||
if os.path.exists(dst):
|
||||
os.remove(dst)
|
||||
os.rename(src, dst)
|
||||
|
||||
@@ -290,10 +288,7 @@ def groupid_to_group(x):
|
||||
shutil.copy(filename, tmp_filename)
|
||||
|
||||
try:
|
||||
# To avoid translating line endings (\n to \r\n and vis versa)
|
||||
# we force os.open to ignore translations and use the line endings
|
||||
# the file comes with
|
||||
extra_kwargs = {"errors": "surrogateescape", "newline": ""}
|
||||
extra_kwargs = {"errors": "surrogateescape"}
|
||||
|
||||
# Open as a text file and filter until the end of the file is
|
||||
# reached or we found a marker in the line if it was specified
|
||||
@@ -2283,17 +2278,10 @@ def add_rpath(self, *paths):
|
||||
"""
|
||||
self._addl_rpaths = self._addl_rpaths | set(paths)
|
||||
|
||||
def _link(self, path, dest_dir):
|
||||
"""Perform link step of simulated rpathing, installing
|
||||
simlinks of file in path to the dest_dir
|
||||
location. This method deliberately prevents
|
||||
the case where a path points to a file inside the dest_dir.
|
||||
This is because it is both meaningless from an rpath
|
||||
perspective, and will cause an error when Developer
|
||||
mode is not enabled"""
|
||||
def _link(self, path, dest):
|
||||
file_name = os.path.basename(path)
|
||||
dest_file = os.path.join(dest_dir, file_name)
|
||||
if os.path.exists(dest_dir) and not dest_file == path:
|
||||
dest_file = os.path.join(dest, file_name)
|
||||
if os.path.exists(dest):
|
||||
try:
|
||||
symlink(path, dest_file)
|
||||
# For py2 compatibility, we have to catch the specific Windows error code
|
||||
@@ -2307,7 +2295,7 @@ def _link(self, path, dest_dir):
|
||||
"Linking library %s to %s failed, " % (path, dest_file) + "already linked."
|
||||
if already_linked
|
||||
else "library with name %s already exists at location %s."
|
||||
% (file_name, dest_dir)
|
||||
% (file_name, dest)
|
||||
)
|
||||
pass
|
||||
else:
|
||||
|
||||
@@ -23,10 +23,7 @@ def symlink(real_path, link_path):
|
||||
|
||||
On Windows, use junctions if os.symlink fails.
|
||||
"""
|
||||
if not is_windows:
|
||||
os.symlink(real_path, link_path)
|
||||
elif _win32_can_symlink():
|
||||
# Windows requires target_is_directory=True when the target is a dir.
|
||||
if not is_windows or _win32_can_symlink():
|
||||
os.symlink(real_path, link_path, target_is_directory=os.path.isdir(real_path))
|
||||
else:
|
||||
try:
|
||||
|
||||
@@ -266,7 +266,10 @@ def find_by_hash(self, find_hash, mirrors_to_check=None):
|
||||
None, just assumes all configured mirrors.
|
||||
"""
|
||||
if find_hash not in self._mirrors_for_spec:
|
||||
return []
|
||||
# Not found in the cached index, pull the latest from the server.
|
||||
self.update(with_cooldown=True)
|
||||
if find_hash not in self._mirrors_for_spec:
|
||||
return None
|
||||
results = self._mirrors_for_spec[find_hash]
|
||||
if not mirrors_to_check:
|
||||
return results
|
||||
@@ -415,12 +418,7 @@ def update(self, with_cooldown=False):
|
||||
|
||||
if all_methods_failed:
|
||||
raise FetchCacheError(fetch_errors)
|
||||
if fetch_errors:
|
||||
tty.warn(
|
||||
"The following issues were ignored while updating the indices of binary caches",
|
||||
FetchCacheError(fetch_errors),
|
||||
)
|
||||
if spec_cache_regenerate_needed:
|
||||
elif spec_cache_regenerate_needed:
|
||||
self.regenerate_spec_cache(clear_existing=spec_cache_clear_needed)
|
||||
|
||||
def _fetch_and_cache_index(self, mirror_url, expect_hash=None):
|
||||
@@ -506,9 +504,9 @@ def _fetch_and_cache_index(self, mirror_url, expect_hash=None):
|
||||
|
||||
if fetched_hash is not None and locally_computed_hash != fetched_hash:
|
||||
msg = (
|
||||
"Computed index hash [{0}] did not match remote [{1}, url:{2}] "
|
||||
"Computed hash ({0}) did not match remote ({1}), "
|
||||
"indicating error in index transmission"
|
||||
).format(locally_computed_hash, fetched_hash, hash_fetch_url)
|
||||
).format(locally_computed_hash, expect_hash)
|
||||
errors.append(RuntimeError(msg))
|
||||
# We somehow got an index that doesn't match the remote one, maybe
|
||||
# the next time we try we'll be successful.
|
||||
@@ -916,6 +914,8 @@ def _fetch_spec_from_mirror(spec_url):
|
||||
return Spec.from_dict(specfile_json)
|
||||
if spec_url.endswith(".json"):
|
||||
return Spec.from_json(spec_file_contents)
|
||||
if spec_url.endswith(".yaml"):
|
||||
return Spec.from_yaml(spec_file_contents)
|
||||
|
||||
tp = multiprocessing.pool.ThreadPool(processes=concurrency)
|
||||
try:
|
||||
@@ -990,6 +990,8 @@ def file_read_method(file_path):
|
||||
"*.spec.json.sig",
|
||||
"--include",
|
||||
"*.spec.json",
|
||||
"--include",
|
||||
"*.spec.yaml",
|
||||
cache_prefix,
|
||||
tmpspecsdir,
|
||||
]
|
||||
@@ -999,7 +1001,7 @@ def file_read_method(file_path):
|
||||
"Using aws s3 sync to download specs from {0} to {1}".format(cache_prefix, tmpspecsdir)
|
||||
)
|
||||
aws(*sync_command_args, output=os.devnull, error=os.devnull)
|
||||
file_list = fsys.find(tmpspecsdir, ["*.spec.json.sig", "*.spec.json"])
|
||||
file_list = fsys.find(tmpspecsdir, ["*.spec.json.sig", "*.spec.json", "*.spec.yaml"])
|
||||
read_fn = file_read_method
|
||||
except Exception:
|
||||
tty.warn("Failed to use aws s3 sync to retrieve specs, falling back to parallel fetch")
|
||||
@@ -1035,7 +1037,9 @@ def url_read_method(url):
|
||||
file_list = [
|
||||
url_util.join(cache_prefix, entry)
|
||||
for entry in web_util.list_url(cache_prefix)
|
||||
if entry.endswith("spec.json") or entry.endswith("spec.json.sig")
|
||||
if entry.endswith(".yaml")
|
||||
or entry.endswith("spec.json")
|
||||
or entry.endswith("spec.json.sig")
|
||||
]
|
||||
read_fn = url_read_method
|
||||
except KeyError as inst:
|
||||
@@ -1097,6 +1101,14 @@ def generate_package_index(cache_prefix, concurrency=32):
|
||||
tty.error("Unabled to generate package index, {0}".format(err))
|
||||
return
|
||||
|
||||
if any(x.endswith(".yaml") for x in file_list):
|
||||
msg = (
|
||||
"The mirror in '{}' contains specs in the deprecated YAML format.\n\n\tSupport for "
|
||||
"this format will be removed in v0.20, please regenerate the build cache with a "
|
||||
"recent Spack\n"
|
||||
).format(cache_prefix)
|
||||
warnings.warn(msg)
|
||||
|
||||
tty.debug("Retrieving spec descriptor files from {0} to build index".format(cache_prefix))
|
||||
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
@@ -1183,7 +1195,7 @@ def generate_key_index(key_prefix, tmpdir=None):
|
||||
|
||||
def _build_tarball(
|
||||
spec,
|
||||
out_url,
|
||||
outdir,
|
||||
force=False,
|
||||
relative=False,
|
||||
unsigned=False,
|
||||
@@ -1206,7 +1218,8 @@ def _build_tarball(
|
||||
tarfile_dir = os.path.join(cache_prefix, tarball_directory_name(spec))
|
||||
tarfile_path = os.path.join(tarfile_dir, tarfile_name)
|
||||
spackfile_path = os.path.join(cache_prefix, tarball_path_name(spec, ".spack"))
|
||||
remote_spackfile_path = url_util.join(out_url, os.path.relpath(spackfile_path, tmpdir))
|
||||
|
||||
remote_spackfile_path = url_util.join(outdir, os.path.relpath(spackfile_path, tmpdir))
|
||||
|
||||
mkdirp(tarfile_dir)
|
||||
if web_util.url_exists(remote_spackfile_path):
|
||||
@@ -1223,11 +1236,15 @@ def _build_tarball(
|
||||
specfile_name = tarball_name(spec, ".spec.json")
|
||||
specfile_path = os.path.realpath(os.path.join(cache_prefix, specfile_name))
|
||||
signed_specfile_path = "{0}.sig".format(specfile_path)
|
||||
deprecated_specfile_path = specfile_path.replace(".spec.json", ".spec.yaml")
|
||||
|
||||
remote_specfile_path = url_util.join(
|
||||
out_url, os.path.relpath(specfile_path, os.path.realpath(tmpdir))
|
||||
outdir, os.path.relpath(specfile_path, os.path.realpath(tmpdir))
|
||||
)
|
||||
remote_signed_specfile_path = "{0}.sig".format(remote_specfile_path)
|
||||
remote_specfile_path_deprecated = url_util.join(
|
||||
outdir, os.path.relpath(deprecated_specfile_path, os.path.realpath(tmpdir))
|
||||
)
|
||||
|
||||
# If force and exists, overwrite. Otherwise raise exception on collision.
|
||||
if force:
|
||||
@@ -1235,8 +1252,12 @@ def _build_tarball(
|
||||
web_util.remove_url(remote_specfile_path)
|
||||
if web_util.url_exists(remote_signed_specfile_path):
|
||||
web_util.remove_url(remote_signed_specfile_path)
|
||||
elif web_util.url_exists(remote_specfile_path) or web_util.url_exists(
|
||||
remote_signed_specfile_path
|
||||
if web_util.url_exists(remote_specfile_path_deprecated):
|
||||
web_util.remove_url(remote_specfile_path_deprecated)
|
||||
elif (
|
||||
web_util.url_exists(remote_specfile_path)
|
||||
or web_util.url_exists(remote_signed_specfile_path)
|
||||
or web_util.url_exists(remote_specfile_path_deprecated)
|
||||
):
|
||||
raise NoOverwriteException(url_util.format(remote_specfile_path))
|
||||
|
||||
@@ -1292,10 +1313,12 @@ def _build_tarball(
|
||||
|
||||
with open(spec_file, "r") as inputfile:
|
||||
content = inputfile.read()
|
||||
if spec_file.endswith(".json"):
|
||||
if spec_file.endswith(".yaml"):
|
||||
spec_dict = yaml.load(content)
|
||||
elif spec_file.endswith(".json"):
|
||||
spec_dict = sjson.load(content)
|
||||
else:
|
||||
raise ValueError("{0} not a valid spec file type".format(spec_file))
|
||||
raise ValueError("{0} not a valid spec file type (json or yaml)".format(spec_file))
|
||||
spec_dict["buildcache_layout_version"] = 1
|
||||
bchecksum = {}
|
||||
bchecksum["hash_algorithm"] = "sha256"
|
||||
@@ -1330,12 +1353,12 @@ def _build_tarball(
|
||||
# push the key to the build cache's _pgp directory so it can be
|
||||
# imported
|
||||
if not unsigned:
|
||||
push_keys(out_url, keys=[key], regenerate_index=regenerate_index, tmpdir=tmpdir)
|
||||
push_keys(outdir, keys=[key], regenerate_index=regenerate_index, tmpdir=tmpdir)
|
||||
|
||||
# create an index.json for the build_cache directory so specs can be
|
||||
# found
|
||||
if regenerate_index:
|
||||
generate_package_index(url_util.join(out_url, os.path.relpath(cache_prefix, tmpdir)))
|
||||
generate_package_index(url_util.join(outdir, os.path.relpath(cache_prefix, tmpdir)))
|
||||
finally:
|
||||
shutil.rmtree(tmpdir)
|
||||
|
||||
@@ -1516,7 +1539,7 @@ def download_tarball(spec, unsigned=False, mirrors_for_spec=None):
|
||||
# Assumes we care more about finding a spec file by preferred ext
|
||||
# than by mirrory priority. This can be made less complicated as
|
||||
# we remove support for deprecated spec formats and buildcache layouts.
|
||||
for ext in ["json.sig", "json"]:
|
||||
for ext in ["json.sig", "json", "yaml"]:
|
||||
for mirror_to_try in mirrors_to_try:
|
||||
specfile_url = "{0}.{1}".format(mirror_to_try["specfile"], ext)
|
||||
spackfile_url = mirror_to_try["spackfile"]
|
||||
@@ -1553,6 +1576,13 @@ def download_tarball(spec, unsigned=False, mirrors_for_spec=None):
|
||||
# the remaining mirrors, looking for one we can use.
|
||||
tarball_stage = try_fetch(spackfile_url)
|
||||
if tarball_stage:
|
||||
if ext == "yaml":
|
||||
msg = (
|
||||
"Reading {} from mirror.\n\n\tThe YAML format for buildcaches is "
|
||||
"deprecated and will be removed in v0.20\n"
|
||||
).format(spackfile_url)
|
||||
warnings.warn(msg)
|
||||
|
||||
return {
|
||||
"tarball_stage": tarball_stage,
|
||||
"specfile_stage": local_specfile_stage,
|
||||
@@ -1796,6 +1826,8 @@ def _extract_inner_tarball(spec, filename, extract_to, unsigned, remote_checksum
|
||||
spackfile_path = os.path.join(stagepath, spackfile_name)
|
||||
tarfile_name = tarball_name(spec, ".tar.gz")
|
||||
tarfile_path = os.path.join(extract_to, tarfile_name)
|
||||
deprecated_yaml_name = tarball_name(spec, ".spec.yaml")
|
||||
deprecated_yaml_path = os.path.join(extract_to, deprecated_yaml_name)
|
||||
json_name = tarball_name(spec, ".spec.json")
|
||||
json_path = os.path.join(extract_to, json_name)
|
||||
with closing(tarfile.open(spackfile_path, "r")) as tar:
|
||||
@@ -1807,6 +1839,8 @@ def _extract_inner_tarball(spec, filename, extract_to, unsigned, remote_checksum
|
||||
|
||||
if os.path.exists(json_path):
|
||||
specfile_path = json_path
|
||||
elif os.path.exists(deprecated_yaml_path):
|
||||
specfile_path = deprecated_yaml_path
|
||||
else:
|
||||
raise ValueError("Cannot find spec file for {0}.".format(extract_to))
|
||||
|
||||
@@ -1853,8 +1887,10 @@ def extract_tarball(spec, download_result, allow_root=False, unsigned=False, for
|
||||
content = inputfile.read()
|
||||
if specfile_path.endswith(".json.sig"):
|
||||
spec_dict = Spec.extract_json_from_clearsig(content)
|
||||
else:
|
||||
elif specfile_path.endswith(".json"):
|
||||
spec_dict = sjson.load(content)
|
||||
else:
|
||||
spec_dict = syaml.load(content)
|
||||
|
||||
bchecksum = spec_dict["binary_cache_checksum"]
|
||||
filename = download_result["tarball_stage"].save_filename
|
||||
@@ -1866,7 +1902,7 @@ def extract_tarball(spec, download_result, allow_root=False, unsigned=False, for
|
||||
or int(spec_dict["buildcache_layout_version"]) < 1
|
||||
):
|
||||
# Handle the older buildcache layout where the .spack file
|
||||
# contains a spec json, maybe an .asc file (signature),
|
||||
# contains a spec json/yaml, maybe an .asc file (signature),
|
||||
# and another tarball containing the actual install tree.
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
try:
|
||||
@@ -2017,12 +2053,17 @@ def try_direct_fetch(spec, mirrors=None):
|
||||
"""
|
||||
Try to find the spec directly on the configured mirrors
|
||||
"""
|
||||
deprecated_specfile_name = tarball_name(spec, ".spec.yaml")
|
||||
specfile_name = tarball_name(spec, ".spec.json")
|
||||
signed_specfile_name = tarball_name(spec, ".spec.json.sig")
|
||||
specfile_is_signed = False
|
||||
specfile_is_json = True
|
||||
found_specs = []
|
||||
|
||||
for mirror in spack.mirror.MirrorCollection(mirrors=mirrors).values():
|
||||
buildcache_fetch_url_yaml = url_util.join(
|
||||
mirror.fetch_url, _build_cache_relative_path, deprecated_specfile_name
|
||||
)
|
||||
buildcache_fetch_url_json = url_util.join(
|
||||
mirror.fetch_url, _build_cache_relative_path, specfile_name
|
||||
)
|
||||
@@ -2036,19 +2077,28 @@ def try_direct_fetch(spec, mirrors=None):
|
||||
try:
|
||||
_, _, fs = web_util.read_from_url(buildcache_fetch_url_json)
|
||||
except (URLError, web_util.SpackWebError, HTTPError) as url_err_x:
|
||||
tty.debug(
|
||||
"Did not find {0} on {1}".format(
|
||||
specfile_name, buildcache_fetch_url_signed_json
|
||||
),
|
||||
url_err,
|
||||
level=2,
|
||||
)
|
||||
tty.debug(
|
||||
"Did not find {0} on {1}".format(specfile_name, buildcache_fetch_url_json),
|
||||
url_err_x,
|
||||
level=2,
|
||||
)
|
||||
continue
|
||||
try:
|
||||
_, _, fs = web_util.read_from_url(buildcache_fetch_url_yaml)
|
||||
specfile_is_json = False
|
||||
except (URLError, web_util.SpackWebError, HTTPError) as url_err_y:
|
||||
tty.debug(
|
||||
"Did not find {0} on {1}".format(
|
||||
specfile_name, buildcache_fetch_url_signed_json
|
||||
),
|
||||
url_err,
|
||||
level=2,
|
||||
)
|
||||
tty.debug(
|
||||
"Did not find {0} on {1}".format(specfile_name, buildcache_fetch_url_json),
|
||||
url_err_x,
|
||||
level=2,
|
||||
)
|
||||
tty.debug(
|
||||
"Did not find {0} on {1}".format(specfile_name, buildcache_fetch_url_yaml),
|
||||
url_err_y,
|
||||
level=2,
|
||||
)
|
||||
continue
|
||||
specfile_contents = codecs.getreader("utf-8")(fs).read()
|
||||
|
||||
# read the spec from the build cache file. All specs in build caches
|
||||
@@ -2057,8 +2107,10 @@ def try_direct_fetch(spec, mirrors=None):
|
||||
if specfile_is_signed:
|
||||
specfile_json = Spec.extract_json_from_clearsig(specfile_contents)
|
||||
fetched_spec = Spec.from_dict(specfile_json)
|
||||
else:
|
||||
elif specfile_is_json:
|
||||
fetched_spec = Spec.from_json(specfile_contents)
|
||||
else:
|
||||
fetched_spec = Spec.from_yaml(specfile_contents)
|
||||
fetched_spec._mark_concrete()
|
||||
|
||||
found_specs.append(
|
||||
@@ -2080,8 +2132,8 @@ def get_mirrors_for_spec(spec=None, mirrors_to_check=None, index_only=False):
|
||||
spec (spack.spec.Spec): The spec to look for in binary mirrors
|
||||
mirrors_to_check (dict): Optionally override the configured mirrors
|
||||
with the mirrors in this dictionary.
|
||||
index_only (bool): When ``index_only`` is set to ``True``, only the local
|
||||
cache is checked, no requests are made.
|
||||
index_only (bool): Do not attempt direct fetching of ``spec.json``
|
||||
files from remote mirrors, only consider the indices.
|
||||
|
||||
Return:
|
||||
A list of objects, each containing a ``mirror_url`` and ``spec`` key
|
||||
@@ -2269,7 +2321,7 @@ def needs_rebuild(spec, mirror_url):
|
||||
specfile_path = os.path.join(cache_prefix, specfile_name)
|
||||
|
||||
# Only check for the presence of the json version of the spec. If the
|
||||
# mirror only has the json version, or doesn't have the spec at all, we
|
||||
# mirror only has the yaml version, or doesn't have the spec at all, we
|
||||
# need to rebuild.
|
||||
return not web_util.url_exists(specfile_path)
|
||||
|
||||
@@ -2377,6 +2429,7 @@ def download_single_spec(concrete_spec, destination, mirror_url=None):
|
||||
"url": [
|
||||
tarball_name(concrete_spec, ".spec.json.sig"),
|
||||
tarball_name(concrete_spec, ".spec.json"),
|
||||
tarball_name(concrete_spec, ".spec.yaml"),
|
||||
],
|
||||
"path": destination,
|
||||
"required": True,
|
||||
|
||||
1065
lib/spack/spack/bootstrap.py
Normal file
1065
lib/spack/spack/bootstrap.py
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,25 +0,0 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Function and classes needed to bootstrap Spack itself."""
|
||||
|
||||
from .config import ensure_bootstrap_configuration, is_bootstrapping
|
||||
from .core import (
|
||||
all_core_root_specs,
|
||||
ensure_core_dependencies,
|
||||
ensure_patchelf_in_path_or_raise,
|
||||
)
|
||||
from .environment import BootstrapEnvironment, ensure_environment_dependencies
|
||||
from .status import status_message
|
||||
|
||||
__all__ = [
|
||||
"is_bootstrapping",
|
||||
"ensure_bootstrap_configuration",
|
||||
"ensure_core_dependencies",
|
||||
"ensure_patchelf_in_path_or_raise",
|
||||
"all_core_root_specs",
|
||||
"ensure_environment_dependencies",
|
||||
"BootstrapEnvironment",
|
||||
"status_message",
|
||||
]
|
||||
@@ -1,218 +0,0 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Common basic functions used through the spack.bootstrap package"""
|
||||
import fnmatch
|
||||
import os.path
|
||||
import re
|
||||
import sys
|
||||
import sysconfig
|
||||
import warnings
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
from llnl.util import tty
|
||||
|
||||
import spack.store
|
||||
import spack.util.environment
|
||||
import spack.util.executable
|
||||
|
||||
from .config import spec_for_current_python
|
||||
|
||||
|
||||
def _python_import(module):
|
||||
try:
|
||||
__import__(module)
|
||||
except ImportError:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def _try_import_from_store(module, query_spec, query_info=None):
|
||||
"""Return True if the module can be imported from an already
|
||||
installed spec, False otherwise.
|
||||
|
||||
Args:
|
||||
module: Python module to be imported
|
||||
query_spec: spec that may provide the module
|
||||
query_info (dict or None): if a dict is passed it is populated with the
|
||||
command found and the concrete spec providing it
|
||||
"""
|
||||
# If it is a string assume it's one of the root specs by this module
|
||||
if isinstance(query_spec, str):
|
||||
# We have to run as part of this python interpreter
|
||||
query_spec += " ^" + spec_for_current_python()
|
||||
|
||||
installed_specs = spack.store.db.query(query_spec, installed=True)
|
||||
|
||||
for candidate_spec in installed_specs:
|
||||
pkg = candidate_spec["python"].package
|
||||
module_paths = [
|
||||
os.path.join(candidate_spec.prefix, pkg.purelib),
|
||||
os.path.join(candidate_spec.prefix, pkg.platlib),
|
||||
] # type: list[str]
|
||||
path_before = list(sys.path)
|
||||
|
||||
# NOTE: try module_paths first and last, last allows an existing version in path
|
||||
# to be picked up and used, possibly depending on something in the store, first
|
||||
# allows the bootstrap version to work when an incompatible version is in
|
||||
# sys.path
|
||||
orders = [
|
||||
module_paths + sys.path,
|
||||
sys.path + module_paths,
|
||||
]
|
||||
for path in orders:
|
||||
sys.path = path
|
||||
try:
|
||||
_fix_ext_suffix(candidate_spec)
|
||||
if _python_import(module):
|
||||
msg = (
|
||||
f"[BOOTSTRAP MODULE {module}] The installed spec "
|
||||
f'"{query_spec}/{candidate_spec.dag_hash()}" '
|
||||
f'provides the "{module}" Python module'
|
||||
)
|
||||
tty.debug(msg)
|
||||
if query_info is not None:
|
||||
query_info["spec"] = candidate_spec
|
||||
return True
|
||||
except Exception as exc: # pylint: disable=broad-except
|
||||
msg = (
|
||||
"unexpected error while trying to import module "
|
||||
f'"{module}" from spec "{candidate_spec}" [error="{str(exc)}"]'
|
||||
)
|
||||
warnings.warn(msg)
|
||||
else:
|
||||
msg = "Spec {0} did not provide module {1}"
|
||||
warnings.warn(msg.format(candidate_spec, module))
|
||||
|
||||
sys.path = path_before
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def _fix_ext_suffix(candidate_spec):
|
||||
"""Fix the external suffixes of Python extensions on the fly for
|
||||
platforms that may need it
|
||||
|
||||
Args:
|
||||
candidate_spec (Spec): installed spec with a Python module
|
||||
to be checked.
|
||||
"""
|
||||
# Here we map target families to the patterns expected
|
||||
# by pristine CPython. Only architectures with known issues
|
||||
# are included. Known issues:
|
||||
#
|
||||
# [RHEL + ppc64le]: https://github.com/spack/spack/issues/25734
|
||||
#
|
||||
_suffix_to_be_checked = {
|
||||
"ppc64le": {
|
||||
"glob": "*.cpython-*-powerpc64le-linux-gnu.so",
|
||||
"re": r".cpython-[\w]*-powerpc64le-linux-gnu.so",
|
||||
"fmt": r"{module}.cpython-{major}{minor}m-powerpc64le-linux-gnu.so",
|
||||
}
|
||||
}
|
||||
|
||||
# If the current architecture is not problematic return
|
||||
generic_target = archspec.cpu.host().family
|
||||
if str(generic_target) not in _suffix_to_be_checked:
|
||||
return
|
||||
|
||||
# If there's no EXT_SUFFIX (Python < 3.5) or the suffix matches
|
||||
# the expectations, return since the package is surely good
|
||||
ext_suffix = sysconfig.get_config_var("EXT_SUFFIX")
|
||||
if ext_suffix is None:
|
||||
return
|
||||
|
||||
expected = _suffix_to_be_checked[str(generic_target)]
|
||||
if fnmatch.fnmatch(ext_suffix, expected["glob"]):
|
||||
return
|
||||
|
||||
# If we are here it means the current interpreter expects different names
|
||||
# than pristine CPython. So:
|
||||
# 1. Find what we have installed
|
||||
# 2. Create symbolic links for the other names, it they're not there already
|
||||
|
||||
# Check if standard names are installed and if we have to create
|
||||
# link for this interpreter
|
||||
standard_extensions = fs.find(candidate_spec.prefix, expected["glob"])
|
||||
link_names = [re.sub(expected["re"], ext_suffix, s) for s in standard_extensions]
|
||||
for file_name, link_name in zip(standard_extensions, link_names):
|
||||
if os.path.exists(link_name):
|
||||
continue
|
||||
os.symlink(file_name, link_name)
|
||||
|
||||
# Check if this interpreter installed something and we have to create
|
||||
# links for a standard CPython interpreter
|
||||
non_standard_extensions = fs.find(candidate_spec.prefix, "*" + ext_suffix)
|
||||
for abs_path in non_standard_extensions:
|
||||
directory, filename = os.path.split(abs_path)
|
||||
module = filename.split(".")[0]
|
||||
link_name = os.path.join(
|
||||
directory,
|
||||
expected["fmt"].format(
|
||||
module=module, major=sys.version_info[0], minor=sys.version_info[1]
|
||||
),
|
||||
)
|
||||
if os.path.exists(link_name):
|
||||
continue
|
||||
os.symlink(abs_path, link_name)
|
||||
|
||||
|
||||
def _executables_in_store(executables, query_spec, query_info=None):
|
||||
"""Return True if at least one of the executables can be retrieved from
|
||||
a spec in store, False otherwise.
|
||||
|
||||
The different executables must provide the same functionality and are
|
||||
"alternate" to each other, i.e. the function will exit True on the first
|
||||
executable found.
|
||||
|
||||
Args:
|
||||
executables: list of executables to be searched
|
||||
query_spec: spec that may provide the executable
|
||||
query_info (dict or None): if a dict is passed it is populated with the
|
||||
command found and the concrete spec providing it
|
||||
"""
|
||||
executables_str = ", ".join(executables)
|
||||
msg = "[BOOTSTRAP EXECUTABLES {0}] Try installed specs with query '{1}'"
|
||||
tty.debug(msg.format(executables_str, query_spec))
|
||||
installed_specs = spack.store.db.query(query_spec, installed=True)
|
||||
if installed_specs:
|
||||
for concrete_spec in installed_specs:
|
||||
bin_dir = concrete_spec.prefix.bin
|
||||
# IF we have a "bin" directory and it contains
|
||||
# the executables we are looking for
|
||||
if (
|
||||
os.path.exists(bin_dir)
|
||||
and os.path.isdir(bin_dir)
|
||||
and spack.util.executable.which_string(*executables, path=bin_dir)
|
||||
):
|
||||
spack.util.environment.path_put_first("PATH", [bin_dir])
|
||||
if query_info is not None:
|
||||
query_info["command"] = spack.util.executable.which(*executables, path=bin_dir)
|
||||
query_info["spec"] = concrete_spec
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def _root_spec(spec_str):
|
||||
"""Add a proper compiler and target to a spec used during bootstrapping.
|
||||
|
||||
Args:
|
||||
spec_str (str): spec to be bootstrapped. Must be without compiler and target.
|
||||
"""
|
||||
# Add a proper compiler hint to the root spec. We use GCC for
|
||||
# everything but MacOS and Windows.
|
||||
if str(spack.platforms.host()) == "darwin":
|
||||
spec_str += " %apple-clang"
|
||||
elif str(spack.platforms.host()) == "windows":
|
||||
spec_str += " %msvc"
|
||||
else:
|
||||
spec_str += " %gcc"
|
||||
|
||||
target = archspec.cpu.host().family
|
||||
spec_str += f" target={target}"
|
||||
|
||||
tty.debug(f"[BOOTSTRAP ROOT SPEC] {spec_str}")
|
||||
return spec_str
|
||||
@@ -1,169 +0,0 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Manage configuration swapping for bootstrapping purposes"""
|
||||
|
||||
import contextlib
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
from llnl.util import tty
|
||||
|
||||
import spack.compilers
|
||||
import spack.config
|
||||
import spack.environment
|
||||
import spack.paths
|
||||
import spack.platforms
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.store
|
||||
import spack.util.path
|
||||
|
||||
#: Reference counter for the bootstrapping configuration context manager
|
||||
_REF_COUNT = 0
|
||||
|
||||
|
||||
def is_bootstrapping():
|
||||
"""Return True if we are in a bootstrapping context, False otherwise."""
|
||||
return _REF_COUNT > 0
|
||||
|
||||
|
||||
def spec_for_current_python():
|
||||
"""For bootstrapping purposes we are just interested in the Python
|
||||
minor version (all patches are ABI compatible with the same minor).
|
||||
|
||||
See:
|
||||
https://www.python.org/dev/peps/pep-0513/
|
||||
https://stackoverflow.com/a/35801395/771663
|
||||
"""
|
||||
version_str = ".".join(str(x) for x in sys.version_info[:2])
|
||||
return f"python@{version_str}"
|
||||
|
||||
|
||||
def root_path():
|
||||
"""Root of all the bootstrap related folders"""
|
||||
return spack.util.path.canonicalize_path(
|
||||
spack.config.get("bootstrap:root", spack.paths.default_user_bootstrap_path)
|
||||
)
|
||||
|
||||
|
||||
def store_path():
|
||||
"""Path to the store used for bootstrapped software"""
|
||||
enabled = spack.config.get("bootstrap:enable", True)
|
||||
if not enabled:
|
||||
msg = 'bootstrapping is currently disabled. Use "spack bootstrap enable" to enable it'
|
||||
raise RuntimeError(msg)
|
||||
|
||||
return _store_path()
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def spack_python_interpreter():
|
||||
"""Override the current configuration to set the interpreter under
|
||||
which Spack is currently running as the only Python external spec
|
||||
available.
|
||||
"""
|
||||
python_prefix = sys.exec_prefix
|
||||
external_python = spec_for_current_python()
|
||||
|
||||
entry = {
|
||||
"buildable": False,
|
||||
"externals": [{"prefix": python_prefix, "spec": str(external_python)}],
|
||||
}
|
||||
|
||||
with spack.config.override("packages:python::", entry):
|
||||
yield
|
||||
|
||||
|
||||
def _store_path():
|
||||
bootstrap_root_path = root_path()
|
||||
return spack.util.path.canonicalize_path(os.path.join(bootstrap_root_path, "store"))
|
||||
|
||||
|
||||
def _config_path():
|
||||
bootstrap_root_path = root_path()
|
||||
return spack.util.path.canonicalize_path(os.path.join(bootstrap_root_path, "config"))
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def ensure_bootstrap_configuration():
|
||||
"""Swap the current configuration for the one used to bootstrap Spack.
|
||||
|
||||
The context manager is reference counted to ensure we don't swap multiple
|
||||
times if there's nested use of it in the stack. One compelling use case
|
||||
is bootstrapping patchelf during the bootstrap of clingo.
|
||||
"""
|
||||
global _REF_COUNT # pylint: disable=global-statement
|
||||
already_swapped = bool(_REF_COUNT)
|
||||
_REF_COUNT += 1
|
||||
try:
|
||||
if already_swapped:
|
||||
yield
|
||||
else:
|
||||
with _ensure_bootstrap_configuration():
|
||||
yield
|
||||
finally:
|
||||
_REF_COUNT -= 1
|
||||
|
||||
|
||||
def _read_and_sanitize_configuration():
|
||||
"""Read the user configuration that needs to be reused for bootstrapping
|
||||
and remove the entries that should not be copied over.
|
||||
"""
|
||||
# Read the "config" section but pop the install tree (the entry will not be
|
||||
# considered due to the use_store context manager, so it will be confusing
|
||||
# to have it in the configuration).
|
||||
config_yaml = spack.config.get("config")
|
||||
config_yaml.pop("install_tree", None)
|
||||
user_configuration = {"bootstrap": spack.config.get("bootstrap"), "config": config_yaml}
|
||||
return user_configuration
|
||||
|
||||
|
||||
def _bootstrap_config_scopes():
|
||||
tty.debug("[BOOTSTRAP CONFIG SCOPE] name=_builtin")
|
||||
config_scopes = [spack.config.InternalConfigScope("_builtin", spack.config.config_defaults)]
|
||||
configuration_paths = (spack.config.configuration_defaults_path, ("bootstrap", _config_path()))
|
||||
for name, path in configuration_paths:
|
||||
platform = spack.platforms.host().name
|
||||
platform_scope = spack.config.ConfigScope(
|
||||
"/".join([name, platform]), os.path.join(path, platform)
|
||||
)
|
||||
generic_scope = spack.config.ConfigScope(name, path)
|
||||
config_scopes.extend([generic_scope, platform_scope])
|
||||
msg = "[BOOTSTRAP CONFIG SCOPE] name={0}, path={1}"
|
||||
tty.debug(msg.format(generic_scope.name, generic_scope.path))
|
||||
tty.debug(msg.format(platform_scope.name, platform_scope.path))
|
||||
return config_scopes
|
||||
|
||||
|
||||
def _add_compilers_if_missing():
|
||||
arch = spack.spec.ArchSpec.frontend_arch()
|
||||
if not spack.compilers.compilers_for_arch(arch):
|
||||
new_compilers = spack.compilers.find_new_compilers()
|
||||
if new_compilers:
|
||||
spack.compilers.add_compilers_to_config(new_compilers, init_config=False)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def _ensure_bootstrap_configuration():
|
||||
bootstrap_store_path = store_path()
|
||||
user_configuration = _read_and_sanitize_configuration()
|
||||
with spack.environment.no_active_environment():
|
||||
with spack.platforms.prevent_cray_detection(), spack.platforms.use_platform(
|
||||
spack.platforms.real_host()
|
||||
), spack.repo.use_repositories(spack.paths.packages_path), spack.store.use_store(
|
||||
bootstrap_store_path
|
||||
):
|
||||
# Default configuration scopes excluding command line
|
||||
# and builtin but accounting for platform specific scopes
|
||||
config_scopes = _bootstrap_config_scopes()
|
||||
with spack.config.use_configuration(*config_scopes):
|
||||
# We may need to compile code from sources, so ensure we
|
||||
# have compilers for the current platform
|
||||
_add_compilers_if_missing()
|
||||
spack.config.set("bootstrap", user_configuration["bootstrap"])
|
||||
spack.config.set("config", user_configuration["config"])
|
||||
with spack.modules.disable_modules():
|
||||
with spack_python_interpreter():
|
||||
yield
|
||||
@@ -1,575 +0,0 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Bootstrap Spack core dependencies from binaries.
|
||||
|
||||
This module contains logic to bootstrap software required by Spack from binaries served in the
|
||||
bootstrapping mirrors. The logic is quite different from an installation done from a Spack user,
|
||||
because of the following reasons:
|
||||
|
||||
1. The binaries are all compiled on the same OS for a given platform (e.g. they are compiled on
|
||||
``centos7`` on ``linux``), but they will be installed and used on the host OS. They are also
|
||||
targeted at the most generic architecture possible. That makes the binaries difficult to reuse
|
||||
with other specs in an environment without ad-hoc logic.
|
||||
2. Bootstrapping has a fallback procedure where we try to install software by default from the
|
||||
most recent binaries, and proceed to older versions of the mirror, until we try building from
|
||||
sources as a last resort. This allows us not to be blocked on architectures where we don't
|
||||
have binaries readily available, but is also not compatible with the working of environments
|
||||
(they don't have fallback procedures).
|
||||
3. Among the binaries we have clingo, so we can't concretize that with clingo :-)
|
||||
4. clingo, GnuPG and patchelf binaries need to be verified by sha256 sum (all the other binaries
|
||||
we might add on top of that in principle can be verified with GPG signatures).
|
||||
"""
|
||||
|
||||
import copy
|
||||
import functools
|
||||
import json
|
||||
import os
|
||||
import os.path
|
||||
import sys
|
||||
import uuid
|
||||
|
||||
from llnl.util import tty
|
||||
from llnl.util.lang import GroupedExceptionHandler
|
||||
|
||||
import spack.binary_distribution
|
||||
import spack.config
|
||||
import spack.detection
|
||||
import spack.environment
|
||||
import spack.modules
|
||||
import spack.paths
|
||||
import spack.platforms
|
||||
import spack.platforms.linux
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.store
|
||||
import spack.user_environment
|
||||
import spack.util.environment
|
||||
import spack.util.executable
|
||||
import spack.util.path
|
||||
import spack.util.spack_yaml
|
||||
import spack.util.url
|
||||
import spack.version
|
||||
|
||||
from ._common import (
|
||||
_executables_in_store,
|
||||
_python_import,
|
||||
_root_spec,
|
||||
_try_import_from_store,
|
||||
)
|
||||
from .config import spack_python_interpreter, spec_for_current_python
|
||||
|
||||
#: Name of the file containing metadata about the bootstrapping source
|
||||
METADATA_YAML_FILENAME = "metadata.yaml"
|
||||
|
||||
#: Whether the current platform is Windows
|
||||
IS_WINDOWS = sys.platform == "win32"
|
||||
|
||||
#: Map a bootstrapper type to the corresponding class
|
||||
_bootstrap_methods = {}
|
||||
|
||||
|
||||
def bootstrapper(bootstrapper_type):
|
||||
"""Decorator to register classes implementing bootstrapping
|
||||
methods.
|
||||
|
||||
Args:
|
||||
bootstrapper_type (str): string identifying the class
|
||||
"""
|
||||
|
||||
def _register(cls):
|
||||
_bootstrap_methods[bootstrapper_type] = cls
|
||||
return cls
|
||||
|
||||
return _register
|
||||
|
||||
|
||||
class Bootstrapper:
|
||||
"""Interface for "core" software bootstrappers"""
|
||||
|
||||
config_scope_name = ""
|
||||
|
||||
def __init__(self, conf):
|
||||
self.conf = conf
|
||||
self.name = conf["name"]
|
||||
self.url = conf["info"]["url"]
|
||||
self.metadata_dir = spack.util.path.canonicalize_path(conf["metadata"])
|
||||
|
||||
@property
|
||||
def mirror_url(self):
|
||||
"""Mirror url associated with this bootstrapper"""
|
||||
# Absolute paths
|
||||
if os.path.isabs(self.url):
|
||||
return spack.util.url.format(self.url)
|
||||
|
||||
# Check for :// and assume it's an url if we find it
|
||||
if "://" in self.url:
|
||||
return self.url
|
||||
|
||||
# Otherwise, it's a relative path
|
||||
return spack.util.url.format(os.path.join(self.metadata_dir, self.url))
|
||||
|
||||
@property
|
||||
def mirror_scope(self):
|
||||
"""Mirror scope to be pushed onto the bootstrapping configuration when using
|
||||
this bootstrapper.
|
||||
"""
|
||||
return spack.config.InternalConfigScope(
|
||||
self.config_scope_name, {"mirrors:": {self.name: self.mirror_url}}
|
||||
)
|
||||
|
||||
def try_import(self, module: str, abstract_spec_str: str): # pylint: disable=unused-argument
|
||||
"""Try to import a Python module from a spec satisfying the abstract spec
|
||||
passed as argument.
|
||||
|
||||
Args:
|
||||
module (str): Python module name to try importing
|
||||
abstract_spec_str (str): abstract spec that can provide the Python module
|
||||
|
||||
Return:
|
||||
True if the Python module could be imported, False otherwise
|
||||
"""
|
||||
return False
|
||||
|
||||
def try_search_path(self, executables, abstract_spec_str): # pylint: disable=unused-argument
|
||||
"""Try to search some executables in the prefix of specs satisfying the abstract
|
||||
spec passed as argument.
|
||||
|
||||
Args:
|
||||
executables (list of str): executables to be found
|
||||
abstract_spec_str (str): abstract spec that can provide the Python module
|
||||
|
||||
Return:
|
||||
True if the executables are found, False otherwise
|
||||
"""
|
||||
return False
|
||||
|
||||
|
||||
@bootstrapper(bootstrapper_type="buildcache")
|
||||
class BuildcacheBootstrapper(Bootstrapper):
|
||||
"""Install the software needed during bootstrapping from a buildcache."""
|
||||
|
||||
def __init__(self, conf):
|
||||
super().__init__(conf)
|
||||
self.last_search = None
|
||||
self.config_scope_name = f"bootstrap_buildcache-{uuid.uuid4()}"
|
||||
|
||||
@staticmethod
|
||||
def _spec_and_platform(abstract_spec_str):
|
||||
"""Return the spec object and platform we need to use when
|
||||
querying the buildcache.
|
||||
|
||||
Args:
|
||||
abstract_spec_str: abstract spec string we are looking for
|
||||
"""
|
||||
# Try to install from an unsigned binary cache
|
||||
abstract_spec = spack.spec.Spec(abstract_spec_str)
|
||||
# On Cray we want to use Linux binaries if available from mirrors
|
||||
bincache_platform = spack.platforms.real_host()
|
||||
return abstract_spec, bincache_platform
|
||||
|
||||
def _read_metadata(self, package_name):
|
||||
"""Return metadata about the given package."""
|
||||
json_filename = f"{package_name}.json"
|
||||
json_dir = self.metadata_dir
|
||||
json_path = os.path.join(json_dir, json_filename)
|
||||
with open(json_path, encoding="utf-8") as stream:
|
||||
data = json.load(stream)
|
||||
return data
|
||||
|
||||
def _install_by_hash(self, pkg_hash, pkg_sha256, index, bincache_platform):
|
||||
index_spec = next(x for x in index if x.dag_hash() == pkg_hash)
|
||||
# Reconstruct the compiler that we need to use for bootstrapping
|
||||
compiler_entry = {
|
||||
"modules": [],
|
||||
"operating_system": str(index_spec.os),
|
||||
"paths": {
|
||||
"cc": "/dev/null",
|
||||
"cxx": "/dev/null",
|
||||
"f77": "/dev/null",
|
||||
"fc": "/dev/null",
|
||||
},
|
||||
"spec": str(index_spec.compiler),
|
||||
"target": str(index_spec.target.family),
|
||||
}
|
||||
with spack.platforms.use_platform(bincache_platform):
|
||||
with spack.config.override("compilers", [{"compiler": compiler_entry}]):
|
||||
spec_str = "/" + pkg_hash
|
||||
query = spack.binary_distribution.BinaryCacheQuery(all_architectures=True)
|
||||
matches = spack.store.find([spec_str], multiple=False, query_fn=query)
|
||||
for match in matches:
|
||||
spack.binary_distribution.install_root_node(
|
||||
match, allow_root=True, unsigned=True, force=True, sha256=pkg_sha256
|
||||
)
|
||||
|
||||
def _install_and_test(self, abstract_spec, bincache_platform, bincache_data, test_fn):
|
||||
# Ensure we see only the buildcache being used to bootstrap
|
||||
with spack.config.override(self.mirror_scope):
|
||||
# This index is currently needed to get the compiler used to build some
|
||||
# specs that we know by dag hash.
|
||||
spack.binary_distribution.binary_index.regenerate_spec_cache()
|
||||
index = spack.binary_distribution.update_cache_and_get_specs()
|
||||
|
||||
if not index:
|
||||
raise RuntimeError("The binary index is empty")
|
||||
|
||||
for item in bincache_data["verified"]:
|
||||
candidate_spec = item["spec"]
|
||||
# This will be None for things that don't depend on python
|
||||
python_spec = item.get("python", None)
|
||||
# Skip specs which are not compatible
|
||||
if not abstract_spec.satisfies(candidate_spec):
|
||||
continue
|
||||
|
||||
if python_spec is not None and python_spec not in abstract_spec:
|
||||
continue
|
||||
|
||||
for _, pkg_hash, pkg_sha256 in item["binaries"]:
|
||||
self._install_by_hash(pkg_hash, pkg_sha256, index, bincache_platform)
|
||||
|
||||
info = {}
|
||||
if test_fn(query_spec=abstract_spec, query_info=info):
|
||||
self.last_search = info
|
||||
return True
|
||||
return False
|
||||
|
||||
def try_import(self, module, abstract_spec_str):
|
||||
test_fn, info = functools.partial(_try_import_from_store, module), {}
|
||||
if test_fn(query_spec=abstract_spec_str, query_info=info):
|
||||
return True
|
||||
|
||||
tty.debug(f"Bootstrapping {module} from pre-built binaries")
|
||||
abstract_spec, bincache_platform = self._spec_and_platform(
|
||||
abstract_spec_str + " ^" + spec_for_current_python()
|
||||
)
|
||||
data = self._read_metadata(module)
|
||||
return self._install_and_test(abstract_spec, bincache_platform, data, test_fn)
|
||||
|
||||
def try_search_path(self, executables, abstract_spec_str):
|
||||
test_fn, info = functools.partial(_executables_in_store, executables), {}
|
||||
if test_fn(query_spec=abstract_spec_str, query_info=info):
|
||||
self.last_search = info
|
||||
return True
|
||||
|
||||
abstract_spec, bincache_platform = self._spec_and_platform(abstract_spec_str)
|
||||
tty.debug(f"Bootstrapping {abstract_spec.name} from pre-built binaries")
|
||||
data = self._read_metadata(abstract_spec.name)
|
||||
return self._install_and_test(abstract_spec, bincache_platform, data, test_fn)
|
||||
|
||||
|
||||
@bootstrapper(bootstrapper_type="install")
|
||||
class SourceBootstrapper(Bootstrapper):
|
||||
"""Install the software needed during bootstrapping from sources."""
|
||||
|
||||
def __init__(self, conf):
|
||||
super().__init__(conf)
|
||||
self.last_search = None
|
||||
self.config_scope_name = f"bootstrap_source-{uuid.uuid4()}"
|
||||
|
||||
def try_import(self, module, abstract_spec_str):
|
||||
info = {}
|
||||
if _try_import_from_store(module, abstract_spec_str, query_info=info):
|
||||
self.last_search = info
|
||||
return True
|
||||
|
||||
tty.debug(f"Bootstrapping {module} from sources")
|
||||
|
||||
# If we compile code from sources detecting a few build tools
|
||||
# might reduce compilation time by a fair amount
|
||||
_add_externals_if_missing()
|
||||
|
||||
# Try to build and install from sources
|
||||
with spack_python_interpreter():
|
||||
# Add hint to use frontend operating system on Cray
|
||||
concrete_spec = spack.spec.Spec(abstract_spec_str + " ^" + spec_for_current_python())
|
||||
|
||||
if module == "clingo":
|
||||
# TODO: remove when the old concretizer is deprecated # pylint: disable=fixme
|
||||
concrete_spec._old_concretize( # pylint: disable=protected-access
|
||||
deprecation_warning=False
|
||||
)
|
||||
else:
|
||||
concrete_spec.concretize()
|
||||
|
||||
msg = "[BOOTSTRAP MODULE {0}] Try installing '{1}' from sources"
|
||||
tty.debug(msg.format(module, abstract_spec_str))
|
||||
|
||||
# Install the spec that should make the module importable
|
||||
with spack.config.override(self.mirror_scope):
|
||||
concrete_spec.package.do_install(fail_fast=True)
|
||||
|
||||
if _try_import_from_store(module, query_spec=concrete_spec, query_info=info):
|
||||
self.last_search = info
|
||||
return True
|
||||
return False
|
||||
|
||||
def try_search_path(self, executables, abstract_spec_str):
|
||||
info = {}
|
||||
if _executables_in_store(executables, abstract_spec_str, query_info=info):
|
||||
self.last_search = info
|
||||
return True
|
||||
|
||||
tty.debug(f"Bootstrapping {abstract_spec_str} from sources")
|
||||
|
||||
# If we compile code from sources detecting a few build tools
|
||||
# might reduce compilation time by a fair amount
|
||||
_add_externals_if_missing()
|
||||
|
||||
concrete_spec = spack.spec.Spec(abstract_spec_str)
|
||||
if concrete_spec.name == "patchelf":
|
||||
concrete_spec._old_concretize( # pylint: disable=protected-access
|
||||
deprecation_warning=False
|
||||
)
|
||||
else:
|
||||
concrete_spec.concretize()
|
||||
|
||||
msg = "[BOOTSTRAP] Try installing '{0}' from sources"
|
||||
tty.debug(msg.format(abstract_spec_str))
|
||||
with spack.config.override(self.mirror_scope):
|
||||
concrete_spec.package.do_install()
|
||||
if _executables_in_store(executables, concrete_spec, query_info=info):
|
||||
self.last_search = info
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def create_bootstrapper(conf):
|
||||
"""Return a bootstrap object built according to the configuration argument"""
|
||||
btype = conf["type"]
|
||||
return _bootstrap_methods[btype](conf)
|
||||
|
||||
|
||||
def source_is_enabled_or_raise(conf):
|
||||
"""Raise ValueError if the source is not enabled for bootstrapping"""
|
||||
trusted, name = spack.config.get("bootstrap:trusted"), conf["name"]
|
||||
if not trusted.get(name, False):
|
||||
raise ValueError("source is not trusted")
|
||||
|
||||
|
||||
def ensure_module_importable_or_raise(module, abstract_spec=None):
|
||||
"""Make the requested module available for import, or raise.
|
||||
|
||||
This function tries to import a Python module in the current interpreter
|
||||
using, in order, the methods configured in bootstrap.yaml.
|
||||
|
||||
If none of the methods succeed, an exception is raised. The function exits
|
||||
on first success.
|
||||
|
||||
Args:
|
||||
module (str): module to be imported in the current interpreter
|
||||
abstract_spec (str): abstract spec that might provide the module. If not
|
||||
given it defaults to "module"
|
||||
|
||||
Raises:
|
||||
ImportError: if the module couldn't be imported
|
||||
"""
|
||||
# If we can import it already, that's great
|
||||
tty.debug(f"[BOOTSTRAP MODULE {module}] Try importing from Python")
|
||||
if _python_import(module):
|
||||
return
|
||||
|
||||
abstract_spec = abstract_spec or module
|
||||
|
||||
exception_handler = GroupedExceptionHandler()
|
||||
|
||||
for current_config in bootstrapping_sources():
|
||||
with exception_handler.forward(current_config["name"]):
|
||||
source_is_enabled_or_raise(current_config)
|
||||
current_bootstrapper = create_bootstrapper(current_config)
|
||||
if current_bootstrapper.try_import(module, abstract_spec):
|
||||
return
|
||||
|
||||
assert exception_handler, (
|
||||
f"expected at least one exception to have been raised at this point: "
|
||||
f"while bootstrapping {module}"
|
||||
)
|
||||
msg = f'cannot bootstrap the "{module}" Python module '
|
||||
if abstract_spec:
|
||||
msg += f'from spec "{abstract_spec}" '
|
||||
if tty.is_debug():
|
||||
msg += exception_handler.grouped_message(with_tracebacks=True)
|
||||
else:
|
||||
msg += exception_handler.grouped_message(with_tracebacks=False)
|
||||
msg += "\nRun `spack --debug ...` for more detailed errors"
|
||||
raise ImportError(msg)
|
||||
|
||||
|
||||
def ensure_executables_in_path_or_raise(executables, abstract_spec, cmd_check=None):
|
||||
"""Ensure that some executables are in path or raise.
|
||||
|
||||
Args:
|
||||
executables (list): list of executables to be searched in the PATH,
|
||||
in order. The function exits on the first one found.
|
||||
abstract_spec (str): abstract spec that provides the executables
|
||||
cmd_check (object): callable predicate that takes a
|
||||
``spack.util.executable.Executable`` command and validate it. Should return
|
||||
``True`` if the executable is acceptable, ``False`` otherwise.
|
||||
Can be used to, e.g., ensure a suitable version of the command before
|
||||
accepting for bootstrapping.
|
||||
|
||||
Raises:
|
||||
RuntimeError: if the executables cannot be ensured to be in PATH
|
||||
|
||||
Return:
|
||||
Executable object
|
||||
|
||||
"""
|
||||
cmd = spack.util.executable.which(*executables)
|
||||
if cmd:
|
||||
if not cmd_check or cmd_check(cmd):
|
||||
return cmd
|
||||
|
||||
executables_str = ", ".join(executables)
|
||||
|
||||
exception_handler = GroupedExceptionHandler()
|
||||
|
||||
for current_config in bootstrapping_sources():
|
||||
with exception_handler.forward(current_config["name"]):
|
||||
source_is_enabled_or_raise(current_config)
|
||||
current_bootstrapper = create_bootstrapper(current_config)
|
||||
if current_bootstrapper.try_search_path(executables, abstract_spec):
|
||||
# Additional environment variables needed
|
||||
concrete_spec, cmd = (
|
||||
current_bootstrapper.last_search["spec"],
|
||||
current_bootstrapper.last_search["command"],
|
||||
)
|
||||
env_mods = spack.util.environment.EnvironmentModifications()
|
||||
for dep in concrete_spec.traverse(
|
||||
root=True, order="post", deptype=("link", "run")
|
||||
):
|
||||
env_mods.extend(
|
||||
spack.user_environment.environment_modifications_for_spec(
|
||||
dep, set_package_py_globals=False
|
||||
)
|
||||
)
|
||||
cmd.add_default_envmod(env_mods)
|
||||
return cmd
|
||||
|
||||
assert exception_handler, (
|
||||
f"expected at least one exception to have been raised at this point: "
|
||||
f"while bootstrapping {executables_str}"
|
||||
)
|
||||
msg = f"cannot bootstrap any of the {executables_str} executables "
|
||||
if abstract_spec:
|
||||
msg += f'from spec "{abstract_spec}" '
|
||||
if tty.is_debug():
|
||||
msg += exception_handler.grouped_message(with_tracebacks=True)
|
||||
else:
|
||||
msg += exception_handler.grouped_message(with_tracebacks=False)
|
||||
msg += "\nRun `spack --debug ...` for more detailed errors"
|
||||
raise RuntimeError(msg)
|
||||
|
||||
|
||||
def _add_externals_if_missing():
|
||||
search_list = [
|
||||
# clingo
|
||||
spack.repo.path.get_pkg_class("cmake"),
|
||||
spack.repo.path.get_pkg_class("bison"),
|
||||
# GnuPG
|
||||
spack.repo.path.get_pkg_class("gawk"),
|
||||
]
|
||||
if IS_WINDOWS:
|
||||
search_list.append(spack.repo.path.get_pkg_class("winbison"))
|
||||
detected_packages = spack.detection.by_executable(search_list)
|
||||
spack.detection.update_configuration(detected_packages, scope="bootstrap")
|
||||
|
||||
|
||||
def clingo_root_spec():
|
||||
"""Return the root spec used to bootstrap clingo"""
|
||||
return _root_spec("clingo-bootstrap@spack+python")
|
||||
|
||||
|
||||
def ensure_clingo_importable_or_raise():
|
||||
"""Ensure that the clingo module is available for import."""
|
||||
ensure_module_importable_or_raise(module="clingo", abstract_spec=clingo_root_spec())
|
||||
|
||||
|
||||
def gnupg_root_spec():
|
||||
"""Return the root spec used to bootstrap GnuPG"""
|
||||
return _root_spec("gnupg@2.3:")
|
||||
|
||||
|
||||
def ensure_gpg_in_path_or_raise():
|
||||
"""Ensure gpg or gpg2 are in the PATH or raise."""
|
||||
return ensure_executables_in_path_or_raise(
|
||||
executables=["gpg2", "gpg"], abstract_spec=gnupg_root_spec()
|
||||
)
|
||||
|
||||
|
||||
def patchelf_root_spec():
|
||||
"""Return the root spec used to bootstrap patchelf"""
|
||||
# 0.13.1 is the last version not to require C++17.
|
||||
return _root_spec("patchelf@0.13.1:")
|
||||
|
||||
|
||||
def verify_patchelf(patchelf):
|
||||
"""Older patchelf versions can produce broken binaries, so we
|
||||
verify the version here.
|
||||
|
||||
Arguments:
|
||||
|
||||
patchelf (spack.util.executable.Executable): patchelf executable
|
||||
"""
|
||||
out = patchelf("--version", output=str, error=os.devnull, fail_on_error=False).strip()
|
||||
if patchelf.returncode != 0:
|
||||
return False
|
||||
parts = out.split(" ")
|
||||
if len(parts) < 2:
|
||||
return False
|
||||
try:
|
||||
version = spack.version.Version(parts[1])
|
||||
except ValueError:
|
||||
return False
|
||||
return version >= spack.version.Version("0.13.1")
|
||||
|
||||
|
||||
def ensure_patchelf_in_path_or_raise():
|
||||
"""Ensure patchelf is in the PATH or raise."""
|
||||
# The old concretizer is not smart and we're doing its job: if the latest patchelf
|
||||
# does not concretize because the compiler doesn't support C++17, we try to
|
||||
# concretize again with an upperbound @:13.
|
||||
try:
|
||||
return ensure_executables_in_path_or_raise(
|
||||
executables=["patchelf"], abstract_spec=patchelf_root_spec(), cmd_check=verify_patchelf
|
||||
)
|
||||
except RuntimeError:
|
||||
return ensure_executables_in_path_or_raise(
|
||||
executables=["patchelf"],
|
||||
abstract_spec=_root_spec("patchelf@0.13.1:0.13"),
|
||||
cmd_check=verify_patchelf,
|
||||
)
|
||||
|
||||
|
||||
def ensure_core_dependencies():
|
||||
"""Ensure the presence of all the core dependencies."""
|
||||
if sys.platform.lower() == "linux":
|
||||
ensure_patchelf_in_path_or_raise()
|
||||
if not IS_WINDOWS:
|
||||
ensure_gpg_in_path_or_raise()
|
||||
ensure_clingo_importable_or_raise()
|
||||
|
||||
|
||||
def all_core_root_specs():
|
||||
"""Return a list of all the core root specs that may be used to bootstrap Spack"""
|
||||
return [clingo_root_spec(), gnupg_root_spec(), patchelf_root_spec()]
|
||||
|
||||
|
||||
def bootstrapping_sources(scope=None):
|
||||
"""Return the list of configured sources of software for bootstrapping Spack
|
||||
|
||||
Args:
|
||||
scope (str or None): if a valid configuration scope is given, return the
|
||||
list only from that scope
|
||||
"""
|
||||
source_configs = spack.config.get("bootstrap:sources", default=None, scope=scope)
|
||||
source_configs = source_configs or []
|
||||
list_of_sources = []
|
||||
for entry in source_configs:
|
||||
current = copy.copy(entry)
|
||||
metadata_dir = spack.util.path.canonicalize_path(entry["metadata"])
|
||||
metadata_yaml = os.path.join(metadata_dir, METADATA_YAML_FILENAME)
|
||||
with open(metadata_yaml, encoding="utf-8") as stream:
|
||||
current.update(spack.util.spack_yaml.load(stream))
|
||||
list_of_sources.append(current)
|
||||
return list_of_sources
|
||||
@@ -1,191 +0,0 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Bootstrap non-core Spack dependencies from an environment."""
|
||||
import glob
|
||||
import hashlib
|
||||
import os
|
||||
import pathlib
|
||||
import sys
|
||||
import warnings
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
from llnl.util import tty
|
||||
|
||||
import spack.build_environment
|
||||
import spack.environment
|
||||
import spack.tengine
|
||||
import spack.util.executable
|
||||
|
||||
from ._common import _root_spec
|
||||
from .config import root_path, spec_for_current_python, store_path
|
||||
|
||||
|
||||
class BootstrapEnvironment(spack.environment.Environment):
|
||||
"""Environment to install dependencies of Spack for a given interpreter and architecture"""
|
||||
|
||||
@classmethod
|
||||
def spack_dev_requirements(cls):
|
||||
"""Spack development requirements"""
|
||||
return [
|
||||
isort_root_spec(),
|
||||
mypy_root_spec(),
|
||||
black_root_spec(),
|
||||
flake8_root_spec(),
|
||||
pytest_root_spec(),
|
||||
]
|
||||
|
||||
@classmethod
|
||||
def environment_root(cls):
|
||||
"""Environment root directory"""
|
||||
bootstrap_root_path = root_path()
|
||||
python_part = spec_for_current_python().replace("@", "")
|
||||
arch_part = archspec.cpu.host().family
|
||||
interpreter_part = hashlib.md5(sys.exec_prefix.encode()).hexdigest()[:5]
|
||||
environment_dir = f"{python_part}-{arch_part}-{interpreter_part}"
|
||||
return pathlib.Path(
|
||||
spack.util.path.canonicalize_path(
|
||||
os.path.join(bootstrap_root_path, "environments", environment_dir)
|
||||
)
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def view_root(cls):
|
||||
"""Location of the view"""
|
||||
return cls.environment_root().joinpath("view")
|
||||
|
||||
@classmethod
|
||||
def pythonpaths(cls):
|
||||
"""Paths to be added to sys.path or PYTHONPATH"""
|
||||
python_dir_part = f"python{'.'.join(str(x) for x in sys.version_info[:2])}"
|
||||
glob_expr = str(cls.view_root().joinpath("**", python_dir_part, "**"))
|
||||
result = glob.glob(glob_expr)
|
||||
if not result:
|
||||
msg = f"Cannot find any Python path in {cls.view_root()}"
|
||||
warnings.warn(msg)
|
||||
return result
|
||||
|
||||
@classmethod
|
||||
def bin_dirs(cls):
|
||||
"""Paths to be added to PATH"""
|
||||
return [cls.view_root().joinpath("bin")]
|
||||
|
||||
@classmethod
|
||||
def spack_yaml(cls):
|
||||
"""Environment spack.yaml file"""
|
||||
return cls.environment_root().joinpath("spack.yaml")
|
||||
|
||||
def __init__(self):
|
||||
if not self.spack_yaml().exists():
|
||||
self._write_spack_yaml_file()
|
||||
super().__init__(self.environment_root())
|
||||
|
||||
def update_installations(self):
|
||||
"""Update the installations of this environment.
|
||||
|
||||
The update is done using a depfile on Linux and macOS, and using the ``install_all``
|
||||
method of environments on Windows.
|
||||
"""
|
||||
with tty.SuppressOutput(msg_enabled=False, warn_enabled=False):
|
||||
specs = self.concretize()
|
||||
if specs:
|
||||
colorized_specs = [
|
||||
spack.spec.Spec(x).cformat("{name}{@version}")
|
||||
for x in self.spack_dev_requirements()
|
||||
]
|
||||
tty.msg(f"[BOOTSTRAPPING] Installing dependencies ({', '.join(colorized_specs)})")
|
||||
self.write(regenerate=False)
|
||||
if sys.platform == "win32":
|
||||
self.install_all()
|
||||
else:
|
||||
self._install_with_depfile()
|
||||
self.write(regenerate=True)
|
||||
|
||||
def update_syspath_and_environ(self):
|
||||
"""Update ``sys.path`` and the PATH, PYTHONPATH environment variables to point to
|
||||
the environment view.
|
||||
"""
|
||||
# Do minimal modifications to sys.path and environment variables. In particular, pay
|
||||
# attention to have the smallest PYTHONPATH / sys.path possible, since that may impact
|
||||
# the performance of the current interpreter
|
||||
sys.path.extend(self.pythonpaths())
|
||||
os.environ["PATH"] = os.pathsep.join(
|
||||
[str(x) for x in self.bin_dirs()] + os.environ.get("PATH", "").split(os.pathsep)
|
||||
)
|
||||
os.environ["PYTHONPATH"] = os.pathsep.join(
|
||||
os.environ.get("PYTHONPATH", "").split(os.pathsep)
|
||||
+ [str(x) for x in self.pythonpaths()]
|
||||
)
|
||||
|
||||
def _install_with_depfile(self):
|
||||
spackcmd = spack.util.executable.which("spack")
|
||||
spackcmd(
|
||||
"-e",
|
||||
str(self.environment_root()),
|
||||
"env",
|
||||
"depfile",
|
||||
"-o",
|
||||
str(self.environment_root().joinpath("Makefile")),
|
||||
)
|
||||
make = spack.util.executable.which("make")
|
||||
kwargs = {}
|
||||
if not tty.is_debug():
|
||||
kwargs = {"output": os.devnull, "error": os.devnull}
|
||||
make(
|
||||
"-C",
|
||||
str(self.environment_root()),
|
||||
"-j",
|
||||
str(spack.build_environment.determine_number_of_jobs(parallel=True)),
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
def _write_spack_yaml_file(self):
|
||||
tty.msg(
|
||||
"[BOOTSTRAPPING] Spack has missing dependencies, creating a bootstrapping environment"
|
||||
)
|
||||
env = spack.tengine.make_environment()
|
||||
template = env.get_template("bootstrap/spack.yaml")
|
||||
context = {
|
||||
"python_spec": spec_for_current_python(),
|
||||
"python_prefix": sys.exec_prefix,
|
||||
"architecture": archspec.cpu.host().family,
|
||||
"environment_path": self.environment_root(),
|
||||
"environment_specs": self.spack_dev_requirements(),
|
||||
"store_path": store_path(),
|
||||
}
|
||||
self.environment_root().mkdir(parents=True, exist_ok=True)
|
||||
self.spack_yaml().write_text(template.render(context), encoding="utf-8")
|
||||
|
||||
|
||||
def isort_root_spec():
|
||||
"""Return the root spec used to bootstrap isort"""
|
||||
return _root_spec("py-isort@4.3.5:")
|
||||
|
||||
|
||||
def mypy_root_spec():
|
||||
"""Return the root spec used to bootstrap mypy"""
|
||||
return _root_spec("py-mypy@0.900:")
|
||||
|
||||
|
||||
def black_root_spec():
|
||||
"""Return the root spec used to bootstrap black"""
|
||||
return _root_spec("py-black")
|
||||
|
||||
|
||||
def flake8_root_spec():
|
||||
"""Return the root spec used to bootstrap flake8"""
|
||||
return _root_spec("py-flake8")
|
||||
|
||||
|
||||
def pytest_root_spec():
|
||||
"""Return the root spec used to bootstrap flake8"""
|
||||
return _root_spec("py-pytest")
|
||||
|
||||
|
||||
def ensure_environment_dependencies():
|
||||
"""Ensure Spack dependencies from the bootstrap environment are installed and ready to use"""
|
||||
with BootstrapEnvironment() as env:
|
||||
env.update_installations()
|
||||
env.update_syspath_and_environ()
|
||||
@@ -1,169 +0,0 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Query the status of bootstrapping on this machine"""
|
||||
import platform
|
||||
|
||||
import spack.util.executable
|
||||
|
||||
from ._common import _executables_in_store, _python_import, _try_import_from_store
|
||||
from .config import ensure_bootstrap_configuration
|
||||
from .core import clingo_root_spec, patchelf_root_spec
|
||||
from .environment import (
|
||||
BootstrapEnvironment,
|
||||
black_root_spec,
|
||||
flake8_root_spec,
|
||||
isort_root_spec,
|
||||
mypy_root_spec,
|
||||
pytest_root_spec,
|
||||
)
|
||||
|
||||
|
||||
def _required_system_executable(exes, msg):
|
||||
"""Search for an executable is the system path only."""
|
||||
if isinstance(exes, str):
|
||||
exes = (exes,)
|
||||
if spack.util.executable.which_string(*exes):
|
||||
return True, None
|
||||
return False, msg
|
||||
|
||||
|
||||
def _required_executable(exes, query_spec, msg):
|
||||
"""Search for an executable in the system path or in the bootstrap store."""
|
||||
if isinstance(exes, str):
|
||||
exes = (exes,)
|
||||
if spack.util.executable.which_string(*exes) or _executables_in_store(exes, query_spec):
|
||||
return True, None
|
||||
return False, msg
|
||||
|
||||
|
||||
def _required_python_module(module, query_spec, msg):
|
||||
"""Check if a Python module is available in the current interpreter or
|
||||
if it can be loaded from the bootstrap store
|
||||
"""
|
||||
if _python_import(module) or _try_import_from_store(module, query_spec):
|
||||
return True, None
|
||||
return False, msg
|
||||
|
||||
|
||||
def _missing(name, purpose, system_only=True):
|
||||
"""Message to be printed if an executable is not found"""
|
||||
msg = '[{2}] MISSING "{0}": {1}'
|
||||
if not system_only:
|
||||
return msg.format(name, purpose, "@*y{{B}}")
|
||||
return msg.format(name, purpose, "@*y{{-}}")
|
||||
|
||||
|
||||
def _core_requirements():
|
||||
_core_system_exes = {
|
||||
"make": _missing("make", "required to build software from sources"),
|
||||
"patch": _missing("patch", "required to patch source code before building"),
|
||||
"bash": _missing("bash", "required for Spack compiler wrapper"),
|
||||
"tar": _missing("tar", "required to manage code archives"),
|
||||
"gzip": _missing("gzip", "required to compress/decompress code archives"),
|
||||
"unzip": _missing("unzip", "required to compress/decompress code archives"),
|
||||
"bzip2": _missing("bzip2", "required to compress/decompress code archives"),
|
||||
"git": _missing("git", "required to fetch/manage git repositories"),
|
||||
}
|
||||
if platform.system().lower() == "linux":
|
||||
_core_system_exes["xz"] = _missing("xz", "required to compress/decompress code archives")
|
||||
|
||||
# Executables that are not bootstrapped yet
|
||||
result = [_required_system_executable(exe, msg) for exe, msg in _core_system_exes.items()]
|
||||
# Python modules
|
||||
result.append(
|
||||
_required_python_module(
|
||||
"clingo", clingo_root_spec(), _missing("clingo", "required to concretize specs", False)
|
||||
)
|
||||
)
|
||||
return result
|
||||
|
||||
|
||||
def _buildcache_requirements():
|
||||
_buildcache_exes = {
|
||||
"file": _missing("file", "required to analyze files for buildcaches"),
|
||||
("gpg2", "gpg"): _missing("gpg2", "required to sign/verify buildcaches", False),
|
||||
}
|
||||
if platform.system().lower() == "darwin":
|
||||
_buildcache_exes["otool"] = _missing("otool", "required to relocate binaries")
|
||||
|
||||
# Executables that are not bootstrapped yet
|
||||
result = [_required_system_executable(exe, msg) for exe, msg in _buildcache_exes.items()]
|
||||
|
||||
if platform.system().lower() == "linux":
|
||||
result.append(
|
||||
_required_executable(
|
||||
"patchelf",
|
||||
patchelf_root_spec(),
|
||||
_missing("patchelf", "required to relocate binaries", False),
|
||||
)
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def _optional_requirements():
|
||||
_optional_exes = {
|
||||
"zstd": _missing("zstd", "required to compress/decompress code archives"),
|
||||
"svn": _missing("svn", "required to manage subversion repositories"),
|
||||
"hg": _missing("hg", "required to manage mercurial repositories"),
|
||||
}
|
||||
# Executables that are not bootstrapped yet
|
||||
result = [_required_system_executable(exe, msg) for exe, msg in _optional_exes.items()]
|
||||
return result
|
||||
|
||||
|
||||
def _development_requirements():
|
||||
# Ensure we trigger environment modifications if we have an environment
|
||||
if BootstrapEnvironment.spack_yaml().exists():
|
||||
with BootstrapEnvironment() as env:
|
||||
env.update_syspath_and_environ()
|
||||
|
||||
return [
|
||||
_required_executable(
|
||||
"isort", isort_root_spec(), _missing("isort", "required for style checks", False)
|
||||
),
|
||||
_required_executable(
|
||||
"mypy", mypy_root_spec(), _missing("mypy", "required for style checks", False)
|
||||
),
|
||||
_required_executable(
|
||||
"flake8", flake8_root_spec(), _missing("flake8", "required for style checks", False)
|
||||
),
|
||||
_required_executable(
|
||||
"black", black_root_spec(), _missing("black", "required for code formatting", False)
|
||||
),
|
||||
_required_python_module(
|
||||
"pytest", pytest_root_spec(), _missing("pytest", "required to run unit-test", False)
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
def status_message(section):
|
||||
"""Return a status message to be printed to screen that refers to the
|
||||
section passed as argument and a bool which is True if there are missing
|
||||
dependencies.
|
||||
|
||||
Args:
|
||||
section (str): either 'core' or 'buildcache' or 'optional' or 'develop'
|
||||
"""
|
||||
pass_token, fail_token = "@*g{[PASS]}", "@*r{[FAIL]}"
|
||||
|
||||
# Contain the header of the section and a list of requirements
|
||||
spack_sections = {
|
||||
"core": ("{0} @*{{Core Functionalities}}", _core_requirements),
|
||||
"buildcache": ("{0} @*{{Binary packages}}", _buildcache_requirements),
|
||||
"optional": ("{0} @*{{Optional Features}}", _optional_requirements),
|
||||
"develop": ("{0} @*{{Development Dependencies}}", _development_requirements),
|
||||
}
|
||||
msg, required_software = spack_sections[section]
|
||||
|
||||
with ensure_bootstrap_configuration():
|
||||
missing_software = False
|
||||
for found, err_msg in required_software():
|
||||
if not found:
|
||||
missing_software = True
|
||||
msg += "\n " + err_msg
|
||||
msg += "\n"
|
||||
msg = msg.format(pass_token if not missing_software else fail_token)
|
||||
return msg, missing_software
|
||||
@@ -37,12 +37,13 @@
|
||||
import multiprocessing
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import sys
|
||||
import traceback
|
||||
import types
|
||||
from typing import List, Tuple
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import install, install_tree, mkdirp
|
||||
from llnl.util.lang import dedupe
|
||||
from llnl.util.symlink import symlink
|
||||
from llnl.util.tty.color import cescape, colorize
|
||||
@@ -50,7 +51,6 @@
|
||||
|
||||
import spack.build_systems.cmake
|
||||
import spack.build_systems.meson
|
||||
import spack.build_systems.python
|
||||
import spack.builder
|
||||
import spack.config
|
||||
import spack.install_test
|
||||
@@ -287,7 +287,7 @@ def clean_environment():
|
||||
def _add_werror_handling(keep_werror, env):
|
||||
keep_flags = set()
|
||||
# set of pairs
|
||||
replace_flags: List[Tuple[str, str]] = []
|
||||
replace_flags = [] # type: List[Tuple[str,str]]
|
||||
if keep_werror == "all":
|
||||
keep_flags.add("-Werror*")
|
||||
else:
|
||||
@@ -585,6 +585,9 @@ def set_module_variables_for_package(pkg):
|
||||
m.gmake = MakeExecutable("gmake", jobs)
|
||||
m.ninja = MakeExecutable("ninja", jobs, supports_jobserver=False)
|
||||
|
||||
# easy shortcut to os.environ
|
||||
m.env = os.environ
|
||||
|
||||
# Find the configure script in the archive path
|
||||
# Don't use which for this; we want to find it in the current dir.
|
||||
m.configure = Executable("./configure")
|
||||
@@ -604,6 +607,21 @@ def set_module_variables_for_package(pkg):
|
||||
m.spack_f77 = os.path.join(link_dir, pkg.compiler.link_paths["f77"])
|
||||
m.spack_fc = os.path.join(link_dir, pkg.compiler.link_paths["fc"])
|
||||
|
||||
# Emulate some shell commands for convenience
|
||||
m.pwd = os.getcwd
|
||||
m.cd = os.chdir
|
||||
m.mkdir = os.mkdir
|
||||
m.makedirs = os.makedirs
|
||||
m.remove = os.remove
|
||||
m.removedirs = os.removedirs
|
||||
m.symlink = symlink
|
||||
|
||||
m.mkdirp = mkdirp
|
||||
m.install = install
|
||||
m.install_tree = install_tree
|
||||
m.rmtree = shutil.rmtree
|
||||
m.move = shutil.move
|
||||
|
||||
# Useful directories within the prefix are encapsulated in
|
||||
# a Prefix object.
|
||||
m.prefix = pkg.prefix
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
import os.path
|
||||
import stat
|
||||
import subprocess
|
||||
from typing import List
|
||||
from typing import List # novm
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
@@ -138,7 +138,7 @@ class AutotoolsBuilder(BaseBuilder):
|
||||
patch_libtool = True
|
||||
|
||||
#: Targets for ``make`` during the :py:meth:`~.AutotoolsBuilder.build` phase
|
||||
build_targets: List[str] = []
|
||||
build_targets = [] # type: List[str]
|
||||
#: Targets for ``make`` during the :py:meth:`~.AutotoolsBuilder.install` phase
|
||||
install_targets = ["install"]
|
||||
|
||||
@@ -152,7 +152,7 @@ class AutotoolsBuilder(BaseBuilder):
|
||||
force_autoreconf = False
|
||||
|
||||
#: Options to be passed to autoreconf when using the default implementation
|
||||
autoreconf_extra_args: List[str] = []
|
||||
autoreconf_extra_args = [] # type: List[str]
|
||||
|
||||
#: If False deletes all the .la files in the prefix folder after the installation.
|
||||
#: If True instead it installs them.
|
||||
|
||||
@@ -34,22 +34,22 @@ class CachedCMakeBuilder(CMakeBuilder):
|
||||
|
||||
#: Phases of a Cached CMake package
|
||||
#: Note: the initconfig phase is used for developer builds as a final phase to stop on
|
||||
phases: Tuple[str, ...] = ("initconfig", "cmake", "build", "install")
|
||||
phases = ("initconfig", "cmake", "build", "install") # type: Tuple[str, ...]
|
||||
|
||||
#: Names associated with package methods in the old build-system format
|
||||
legacy_methods: Tuple[str, ...] = CMakeBuilder.legacy_methods + (
|
||||
legacy_methods = CMakeBuilder.legacy_methods + (
|
||||
"initconfig_compiler_entries",
|
||||
"initconfig_mpi_entries",
|
||||
"initconfig_hardware_entries",
|
||||
"std_initconfig_entries",
|
||||
"initconfig_package_entries",
|
||||
)
|
||||
) # type: Tuple[str, ...]
|
||||
|
||||
#: Names associated with package attributes in the old build-system format
|
||||
legacy_attributes: Tuple[str, ...] = CMakeBuilder.legacy_attributes + (
|
||||
legacy_attributes = CMakeBuilder.legacy_attributes + (
|
||||
"cache_name",
|
||||
"cache_path",
|
||||
)
|
||||
) # type: Tuple[str, ...]
|
||||
|
||||
@property
|
||||
def cache_name(self):
|
||||
|
||||
@@ -153,13 +153,13 @@ class CMakeBuilder(BaseBuilder):
|
||||
"""
|
||||
|
||||
#: Phases of a CMake package
|
||||
phases: Tuple[str, ...] = ("cmake", "build", "install")
|
||||
phases = ("cmake", "build", "install") # type: Tuple[str, ...]
|
||||
|
||||
#: Names associated with package methods in the old build-system format
|
||||
legacy_methods: Tuple[str, ...] = ("cmake_args", "check")
|
||||
legacy_methods = ("cmake_args", "check") # type: Tuple[str, ...]
|
||||
|
||||
#: Names associated with package attributes in the old build-system format
|
||||
legacy_attributes: Tuple[str, ...] = (
|
||||
legacy_attributes = (
|
||||
"generator",
|
||||
"build_targets",
|
||||
"install_targets",
|
||||
@@ -169,7 +169,7 @@ class CMakeBuilder(BaseBuilder):
|
||||
"std_cmake_args",
|
||||
"build_dirname",
|
||||
"build_directory",
|
||||
)
|
||||
) # type: Tuple[str, ...]
|
||||
|
||||
#: The build system generator to use.
|
||||
#:
|
||||
@@ -182,7 +182,7 @@ class CMakeBuilder(BaseBuilder):
|
||||
generator = "Ninja" if sys.platform == "win32" else "Unix Makefiles"
|
||||
|
||||
#: Targets to be used during the build phase
|
||||
build_targets: List[str] = []
|
||||
build_targets = [] # type: List[str]
|
||||
#: Targets to be used during the install phase
|
||||
install_targets = ["install"]
|
||||
#: Callback names for build-time test
|
||||
|
||||
@@ -35,10 +35,10 @@ class GenericBuilder(BaseBuilder):
|
||||
phases = ("install",)
|
||||
|
||||
#: Names associated with package methods in the old build-system format
|
||||
legacy_methods: Tuple[str, ...] = ()
|
||||
legacy_methods = () # type: Tuple[str, ...]
|
||||
|
||||
#: Names associated with package attributes in the old build-system format
|
||||
legacy_attributes: Tuple[str, ...] = ("archive_files",)
|
||||
legacy_attributes = ("archive_files",) # type: Tuple[str, ...]
|
||||
|
||||
# On macOS, force rpaths for shared library IDs and remove duplicate rpaths
|
||||
spack.builder.run_after("install", when="platform=darwin")(apply_macos_rpath_fixups)
|
||||
|
||||
@@ -13,7 +13,7 @@ class GNUMirrorPackage(spack.package_base.PackageBase):
|
||||
"""Mixin that takes care of setting url and mirrors for GNU packages."""
|
||||
|
||||
#: Path of the package in a GNU mirror
|
||||
gnu_mirror_path: Optional[str] = None
|
||||
gnu_mirror_path = None # type: Optional[str]
|
||||
|
||||
#: List of GNU mirrors used by Spack
|
||||
base_mirrors = [
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import inspect
|
||||
from typing import List
|
||||
from typing import List # novm
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
|
||||
@@ -77,7 +77,7 @@ class MakefileBuilder(BaseBuilder):
|
||||
)
|
||||
|
||||
#: Targets for ``make`` during the :py:meth:`~.MakefileBuilder.build` phase
|
||||
build_targets: List[str] = []
|
||||
build_targets = [] # type: List[str]
|
||||
#: Targets for ``make`` during the :py:meth:`~.MakefileBuilder.install` phase
|
||||
install_targets = ["install"]
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import inspect
|
||||
import os
|
||||
from typing import List
|
||||
from typing import List # novm
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
|
||||
@@ -95,7 +95,7 @@ class MesonBuilder(BaseBuilder):
|
||||
"build_directory",
|
||||
)
|
||||
|
||||
build_targets: List[str] = []
|
||||
build_targets = [] # type: List[str]
|
||||
install_targets = ["install"]
|
||||
|
||||
build_time_test_callbacks = ["check"]
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import inspect
|
||||
from typing import List
|
||||
from typing import List # novm
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
|
||||
@@ -72,7 +72,7 @@ class NMakeBuilder(BaseBuilder):
|
||||
)
|
||||
|
||||
#: Targets for ``make`` during the :py:meth:`~.NMakeBuilder.build` phase
|
||||
build_targets: List[str] = []
|
||||
build_targets = [] # type: List[str]
|
||||
#: Targets for ``make`` during the :py:meth:`~.NMakeBuilder.install` phase
|
||||
install_targets = ["install"]
|
||||
|
||||
|
||||
@@ -10,7 +10,6 @@
|
||||
|
||||
from llnl.util.filesystem import find_headers, find_libraries, join_path
|
||||
|
||||
from spack.directives import conflicts
|
||||
from spack.util.environment import EnvironmentModifications
|
||||
from spack.util.executable import Executable
|
||||
|
||||
@@ -26,16 +25,6 @@ class IntelOneApiPackage(Package):
|
||||
# organization (e.g. University/Company).
|
||||
redistribute_source = False
|
||||
|
||||
for c in [
|
||||
"target=ppc64:",
|
||||
"target=ppc64le:",
|
||||
"target=aarch64:",
|
||||
"platform=darwin:",
|
||||
"platform=cray:",
|
||||
"platform=windows:",
|
||||
]:
|
||||
conflicts(c, msg="This package in only available for x86_64 and Linux")
|
||||
|
||||
@staticmethod
|
||||
def update_description(cls):
|
||||
"""Updates oneapi package descriptions with common text."""
|
||||
|
||||
@@ -177,7 +177,7 @@ class PythonPackage(PythonExtension):
|
||||
"""Specialized class for packages that are built using pip."""
|
||||
|
||||
#: Package name, version, and extension on PyPI
|
||||
pypi: Optional[str] = None
|
||||
pypi = None # type: Optional[str]
|
||||
|
||||
maintainers = ["adamjstewart", "pradyunsg"]
|
||||
|
||||
@@ -200,7 +200,7 @@ class PythonPackage(PythonExtension):
|
||||
# package manually
|
||||
depends_on("py-wheel", type="build")
|
||||
|
||||
py_namespace: Optional[str] = None
|
||||
py_namespace = None # type: Optional[str]
|
||||
|
||||
@lang.classproperty
|
||||
def homepage(cls):
|
||||
|
||||
@@ -22,10 +22,10 @@ class RBuilder(GenericBuilder):
|
||||
"""
|
||||
|
||||
#: Names associated with package methods in the old build-system format
|
||||
legacy_methods: Tuple[str, ...] = (
|
||||
legacy_methods = (
|
||||
"configure_args",
|
||||
"configure_vars",
|
||||
) + GenericBuilder.legacy_methods
|
||||
) + GenericBuilder.legacy_methods # type: Tuple[str, ...]
|
||||
|
||||
def configure_args(self):
|
||||
"""Arguments to pass to install via ``--configure-args``."""
|
||||
@@ -64,10 +64,10 @@ class RPackage(Package):
|
||||
# package attributes that can be expanded to set the homepage, url,
|
||||
# list_url, and git values
|
||||
# For CRAN packages
|
||||
cran: Optional[str] = None
|
||||
cran = None # type: Optional[str]
|
||||
|
||||
# For Bioconductor packages
|
||||
bioc: Optional[str] = None
|
||||
bioc = None # type: Optional[str]
|
||||
|
||||
GenericBuilder = RBuilder
|
||||
|
||||
|
||||
@@ -34,7 +34,7 @@ class RacketPackage(PackageBase):
|
||||
|
||||
extends("racket", when="build_system=racket")
|
||||
|
||||
racket_name: Optional[str] = None
|
||||
racket_name = None # type: Optional[str]
|
||||
parallel = True
|
||||
|
||||
@lang.classproperty
|
||||
@@ -51,7 +51,7 @@ class RacketBuilder(spack.builder.Builder):
|
||||
phases = ("install",)
|
||||
|
||||
#: Names associated with package methods in the old build-system format
|
||||
legacy_methods: Tuple[str, ...] = tuple()
|
||||
legacy_methods = tuple() # type: Tuple[str, ...]
|
||||
|
||||
#: Names associated with package attributes in the old build-system format
|
||||
legacy_attributes = ("build_directory", "build_time_test_callbacks", "subdirectory")
|
||||
@@ -59,7 +59,7 @@ class RacketBuilder(spack.builder.Builder):
|
||||
#: Callback names for build-time test
|
||||
build_time_test_callbacks = ["check"]
|
||||
|
||||
racket_name: Optional[str] = None
|
||||
racket_name = None # type: Optional[str]
|
||||
|
||||
@property
|
||||
def subdirectory(self):
|
||||
|
||||
@@ -46,10 +46,10 @@ class SConsBuilder(BaseBuilder):
|
||||
phases = ("build", "install")
|
||||
|
||||
#: Names associated with package methods in the old build-system format
|
||||
legacy_methods = ("build_test",)
|
||||
legacy_methods = ("install_args", "build_test")
|
||||
|
||||
#: Same as legacy_methods, but the signature is different
|
||||
legacy_long_methods = ("build_args", "install_args")
|
||||
legacy_long_methods = ("build_args",)
|
||||
|
||||
#: Names associated with package attributes in the old build-system format
|
||||
legacy_attributes = ("build_time_test_callbacks",)
|
||||
@@ -66,13 +66,13 @@ def build(self, pkg, spec, prefix):
|
||||
args = self.build_args(spec, prefix)
|
||||
inspect.getmodule(self.pkg).scons(*args)
|
||||
|
||||
def install_args(self, spec, prefix):
|
||||
def install_args(self):
|
||||
"""Arguments to pass to install."""
|
||||
return []
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
"""Install the package."""
|
||||
args = self.install_args(spec, prefix)
|
||||
args = self.install_args()
|
||||
|
||||
inspect.getmodule(self.pkg).scons("install", *args)
|
||||
|
||||
|
||||
@@ -14,7 +14,7 @@ class SourceforgePackage(spack.package_base.PackageBase):
|
||||
packages."""
|
||||
|
||||
#: Path of the package in a Sourceforge mirror
|
||||
sourceforge_mirror_path: Optional[str] = None
|
||||
sourceforge_mirror_path = None # type: Optional[str]
|
||||
|
||||
#: List of Sourceforge mirrors used by Spack
|
||||
base_mirrors = [
|
||||
|
||||
@@ -13,7 +13,7 @@ class SourcewarePackage(spack.package_base.PackageBase):
|
||||
packages."""
|
||||
|
||||
#: Path of the package in a Sourceware mirror
|
||||
sourceware_mirror_path: Optional[str] = None
|
||||
sourceware_mirror_path = None # type: Optional[str]
|
||||
|
||||
#: List of Sourceware mirrors used by Spack
|
||||
base_mirrors = [
|
||||
|
||||
@@ -14,7 +14,7 @@ class XorgPackage(spack.package_base.PackageBase):
|
||||
packages."""
|
||||
|
||||
#: Path of the package in a x.org mirror
|
||||
xorg_mirror_path: Optional[str] = None
|
||||
xorg_mirror_path = None # type: Optional[str]
|
||||
|
||||
#: List of x.org mirrors used by Spack
|
||||
# Note: x.org mirrors are a bit tricky, since many are out-of-sync or off.
|
||||
|
||||
@@ -466,19 +466,19 @@ class Builder(collections.abc.Sequence, metaclass=BuilderMeta):
|
||||
"""
|
||||
|
||||
#: Sequence of phases. Must be defined in derived classes
|
||||
phases: Tuple[str, ...] = ()
|
||||
phases = () # type: Tuple[str, ...]
|
||||
#: Build system name. Must also be defined in derived classes.
|
||||
build_system: Optional[str] = None
|
||||
build_system = None # type: Optional[str]
|
||||
|
||||
legacy_methods: Tuple[str, ...] = ()
|
||||
legacy_attributes: Tuple[str, ...] = ()
|
||||
legacy_methods = () # type: Tuple[str, ...]
|
||||
legacy_attributes = () # type: Tuple[str, ...]
|
||||
|
||||
#: List of glob expressions. Each expression must either be
|
||||
#: absolute or relative to the package source path.
|
||||
#: Matching artifacts found at the end of the build process will be
|
||||
#: copied in the same directory tree as _spack_build_logfile and
|
||||
#: _spack_build_envfile.
|
||||
archive_files: List[str] = []
|
||||
archive_files = [] # type: List[str]
|
||||
|
||||
def __init__(self, pkg):
|
||||
self.pkg = pkg
|
||||
|
||||
@@ -49,7 +49,6 @@
|
||||
|
||||
TEMP_STORAGE_MIRROR_NAME = "ci_temporary_mirror"
|
||||
SPACK_RESERVED_TAGS = ["public", "protected", "notary"]
|
||||
SHARED_PR_MIRROR_URL = "s3://spack-binaries-prs/shared_pr_mirror"
|
||||
|
||||
spack_gpg = spack.main.SpackCommand("gpg")
|
||||
spack_compiler = spack.main.SpackCommand("compiler")
|
||||
@@ -730,12 +729,6 @@ def generate_gitlab_ci_yaml(
|
||||
# won't fetch its index and include in our local cache.
|
||||
spack.mirror.add("ci_pr_mirror", remote_mirror_override, cfg.default_modify_scope())
|
||||
|
||||
shared_pr_mirror = None
|
||||
if spack_pipeline_type == "spack_pull_request":
|
||||
stack_name = os.environ.get("SPACK_CI_STACK_NAME", "")
|
||||
shared_pr_mirror = url_util.join(SHARED_PR_MIRROR_URL, stack_name)
|
||||
spack.mirror.add("ci_shared_pr_mirror", shared_pr_mirror, cfg.default_modify_scope())
|
||||
|
||||
pipeline_artifacts_dir = artifacts_root
|
||||
if not pipeline_artifacts_dir:
|
||||
proj_dir = os.environ.get("CI_PROJECT_DIR", os.getcwd())
|
||||
@@ -810,8 +803,6 @@ def generate_gitlab_ci_yaml(
|
||||
# Clean up remote mirror override if enabled
|
||||
if remote_mirror_override:
|
||||
spack.mirror.remove("ci_pr_mirror", cfg.default_modify_scope())
|
||||
if spack_pipeline_type == "spack_pull_request":
|
||||
spack.mirror.remove("ci_shared_pr_mirror", cfg.default_modify_scope())
|
||||
|
||||
all_job_names = []
|
||||
output_object = {}
|
||||
@@ -1264,7 +1255,7 @@ def generate_gitlab_ci_yaml(
|
||||
|
||||
final_job["stage"] = "stage-rebuild-index"
|
||||
final_job["script"] = [
|
||||
"spack buildcache update-index --keys --mirror-url {0}".format(index_target_mirror)
|
||||
"spack buildcache update-index --keys -d {0}".format(index_target_mirror)
|
||||
]
|
||||
final_job["when"] = "always"
|
||||
final_job["retry"] = service_job_retries
|
||||
@@ -1301,7 +1292,6 @@ def generate_gitlab_ci_yaml(
|
||||
"SPACK_LOCAL_MIRROR_DIR": rel_local_mirror_dir,
|
||||
"SPACK_PIPELINE_TYPE": str(spack_pipeline_type),
|
||||
"SPACK_CI_STACK_NAME": os.environ.get("SPACK_CI_STACK_NAME", "None"),
|
||||
"SPACK_CI_SHARED_PR_MIRROR_URL": shared_pr_mirror or "None",
|
||||
"SPACK_REBUILD_CHECK_UP_TO_DATE": str(prune_dag),
|
||||
"SPACK_REBUILD_EVERYTHING": str(rebuild_everything),
|
||||
}
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
import shlex
|
||||
import sys
|
||||
from textwrap import dedent
|
||||
from typing import List, Match, Tuple
|
||||
from typing import List, Tuple
|
||||
|
||||
import ruamel.yaml as yaml
|
||||
from ruamel.yaml.error import MarkedYAMLError
|
||||
@@ -26,7 +26,6 @@
|
||||
import spack.environment as ev
|
||||
import spack.error
|
||||
import spack.extensions
|
||||
import spack.parser
|
||||
import spack.paths
|
||||
import spack.spec
|
||||
import spack.store
|
||||
@@ -166,15 +165,18 @@ class _UnquotedFlags(object):
|
||||
)
|
||||
)
|
||||
|
||||
def __init__(self, all_unquoted_flag_pairs: List[Tuple[Match[str], str]]):
|
||||
def __init__(self, all_unquoted_flag_pairs):
|
||||
# type: (List[Tuple[re.Match, str]]) -> None
|
||||
self._flag_pairs = all_unquoted_flag_pairs
|
||||
|
||||
def __bool__(self) -> bool:
|
||||
def __bool__(self):
|
||||
# type: () -> bool
|
||||
return bool(self._flag_pairs)
|
||||
|
||||
@classmethod
|
||||
def extract(cls, sargs: str) -> "_UnquotedFlags":
|
||||
all_unquoted_flag_pairs: List[Tuple[Match[str], str]] = []
|
||||
def extract(cls, sargs):
|
||||
# type: (str) -> _UnquotedFlags
|
||||
all_unquoted_flag_pairs = [] # type: List[Tuple[re.Match, str]]
|
||||
prev_flags_arg = None
|
||||
for arg in shlex.split(sargs):
|
||||
if prev_flags_arg is not None:
|
||||
@@ -182,7 +184,8 @@ def extract(cls, sargs: str) -> "_UnquotedFlags":
|
||||
prev_flags_arg = cls.flags_arg_pattern.match(arg)
|
||||
return cls(all_unquoted_flag_pairs)
|
||||
|
||||
def report(self) -> str:
|
||||
def report(self):
|
||||
# type: () -> str
|
||||
single_errors = [
|
||||
"({0}) {1} {2} => {3}".format(
|
||||
i + 1,
|
||||
@@ -218,7 +221,7 @@ def parse_specs(args, **kwargs):
|
||||
unquoted_flags = _UnquotedFlags.extract(sargs)
|
||||
|
||||
try:
|
||||
specs = spack.parser.parse(sargs)
|
||||
specs = spack.spec.parse(sargs)
|
||||
for spec in specs:
|
||||
if concretize:
|
||||
spec.concretize(tests=tests) # implies normalize
|
||||
|
||||
@@ -5,6 +5,7 @@
|
||||
from __future__ import print_function
|
||||
|
||||
import os.path
|
||||
import platform
|
||||
import shutil
|
||||
import tempfile
|
||||
|
||||
@@ -14,8 +15,6 @@
|
||||
|
||||
import spack
|
||||
import spack.bootstrap
|
||||
import spack.bootstrap.config
|
||||
import spack.bootstrap.core
|
||||
import spack.cmd.common.arguments
|
||||
import spack.config
|
||||
import spack.main
|
||||
@@ -76,8 +75,7 @@ def _add_scope_option(parser):
|
||||
def setup_parser(subparser):
|
||||
sp = subparser.add_subparsers(dest="subcommand")
|
||||
|
||||
now = sp.add_parser("now", help="Spack ready, right now!")
|
||||
now.add_argument("--dev", action="store_true", help="bootstrap dev dependencies too")
|
||||
sp.add_parser("now", help="Spack ready, right now!")
|
||||
|
||||
status = sp.add_parser("status", help="get the status of Spack")
|
||||
status.add_argument(
|
||||
@@ -196,7 +194,7 @@ def _root(args):
|
||||
|
||||
|
||||
def _list(args):
|
||||
sources = spack.bootstrap.core.bootstrapping_sources(scope=args.scope)
|
||||
sources = spack.bootstrap.bootstrapping_sources(scope=args.scope)
|
||||
if not sources:
|
||||
llnl.util.tty.msg("No method available for bootstrapping Spack's dependencies")
|
||||
return
|
||||
@@ -300,7 +298,7 @@ def _status(args):
|
||||
sections.append("develop")
|
||||
|
||||
header = "@*b{{Spack v{0} - {1}}}".format(
|
||||
spack.spack_version, spack.bootstrap.config.spec_for_current_python()
|
||||
spack.spack_version, spack.bootstrap.spec_for_current_python()
|
||||
)
|
||||
print(llnl.util.tty.color.colorize(header))
|
||||
print()
|
||||
@@ -325,7 +323,7 @@ def _status(args):
|
||||
|
||||
|
||||
def _add(args):
|
||||
initial_sources = spack.bootstrap.core.bootstrapping_sources()
|
||||
initial_sources = spack.bootstrap.bootstrapping_sources()
|
||||
names = [s["name"] for s in initial_sources]
|
||||
|
||||
# If the name is already used error out
|
||||
@@ -355,7 +353,7 @@ def _add(args):
|
||||
|
||||
|
||||
def _remove(args):
|
||||
initial_sources = spack.bootstrap.core.bootstrapping_sources()
|
||||
initial_sources = spack.bootstrap.bootstrapping_sources()
|
||||
names = [s["name"] for s in initial_sources]
|
||||
if args.name not in names:
|
||||
msg = (
|
||||
@@ -388,10 +386,7 @@ def _mirror(args):
|
||||
# TODO: Here we are adding gnuconfig manually, but this can be fixed
|
||||
# TODO: as soon as we have an option to add to a mirror all the possible
|
||||
# TODO: dependencies of a spec
|
||||
root_specs = spack.bootstrap.all_core_root_specs() + ["gnuconfig"]
|
||||
if args.dev:
|
||||
root_specs += spack.bootstrap.BootstrapEnvironment.spack_dev_requirements()
|
||||
|
||||
root_specs = spack.bootstrap.all_root_specs(development=args.dev) + ["gnuconfig"]
|
||||
for spec_str in root_specs:
|
||||
msg = 'Adding "{0}" and dependencies to the mirror at {1}'
|
||||
llnl.util.tty.msg(msg.format(spec_str, mirror_dir))
|
||||
@@ -441,9 +436,10 @@ def write_metadata(subdir, metadata):
|
||||
|
||||
def _now(args):
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
spack.bootstrap.ensure_core_dependencies()
|
||||
if args.dev:
|
||||
spack.bootstrap.ensure_environment_dependencies()
|
||||
if platform.system().lower() == "linux":
|
||||
spack.bootstrap.ensure_patchelf_in_path_or_raise()
|
||||
spack.bootstrap.ensure_clingo_importable_or_raise()
|
||||
spack.bootstrap.ensure_gpg_in_path_or_raise()
|
||||
|
||||
|
||||
def bootstrap(parser, args):
|
||||
|
||||
@@ -8,7 +8,6 @@
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
import urllib.parse
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
@@ -46,7 +45,7 @@ def setup_parser(subparser):
|
||||
"-r",
|
||||
"--rel",
|
||||
action="store_true",
|
||||
help="make all rpaths relative before creating tarballs.",
|
||||
help="make all rpaths relative" + " before creating tarballs.",
|
||||
)
|
||||
create.add_argument(
|
||||
"-f", "--force", action="store_true", help="overwrite tarball if it exists."
|
||||
@@ -55,13 +54,13 @@ def setup_parser(subparser):
|
||||
"-u",
|
||||
"--unsigned",
|
||||
action="store_true",
|
||||
help="create unsigned buildcache tarballs for testing",
|
||||
help="create unsigned buildcache" + " tarballs for testing",
|
||||
)
|
||||
create.add_argument(
|
||||
"-a",
|
||||
"--allow-root",
|
||||
action="store_true",
|
||||
help="allow install root string in binary files after RPATH substitution",
|
||||
help="allow install root string in binary files " + "after RPATH substitution",
|
||||
)
|
||||
create.add_argument(
|
||||
"-k", "--key", metavar="key", type=str, default=None, help="Key for signing."
|
||||
@@ -72,31 +71,31 @@ def setup_parser(subparser):
|
||||
"--directory",
|
||||
metavar="directory",
|
||||
type=str,
|
||||
help="local directory where buildcaches will be written.",
|
||||
help="local directory where " + "buildcaches will be written.",
|
||||
)
|
||||
output.add_argument(
|
||||
"-m",
|
||||
"--mirror-name",
|
||||
metavar="mirror-name",
|
||||
type=str,
|
||||
help="name of the mirror where buildcaches will be written.",
|
||||
help="name of the mirror where " + "buildcaches will be written.",
|
||||
)
|
||||
output.add_argument(
|
||||
"--mirror-url",
|
||||
metavar="mirror-url",
|
||||
type=str,
|
||||
help="URL of the mirror where buildcaches will be written.",
|
||||
help="URL of the mirror where " + "buildcaches will be written.",
|
||||
)
|
||||
create.add_argument(
|
||||
"--rebuild-index",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="Regenerate buildcache index after building package(s)",
|
||||
help="Regenerate buildcache index " + "after building package(s)",
|
||||
)
|
||||
create.add_argument(
|
||||
"--spec-file",
|
||||
default=None,
|
||||
help="Create buildcache entry for spec from json or yaml file",
|
||||
help=("Create buildcache entry for spec from json or " + "yaml file"),
|
||||
)
|
||||
create.add_argument(
|
||||
"--only",
|
||||
@@ -125,19 +124,19 @@ def setup_parser(subparser):
|
||||
"-a",
|
||||
"--allow-root",
|
||||
action="store_true",
|
||||
help="allow install root string in binary files after RPATH substitution",
|
||||
help="allow install root string in binary files " + "after RPATH substitution",
|
||||
)
|
||||
install.add_argument(
|
||||
"-u",
|
||||
"--unsigned",
|
||||
action="store_true",
|
||||
help="install unsigned buildcache tarballs for testing",
|
||||
help="install unsigned buildcache" + " tarballs for testing",
|
||||
)
|
||||
install.add_argument(
|
||||
"-o",
|
||||
"--otherarch",
|
||||
action="store_true",
|
||||
help="install specs from other architectures instead of default platform and OS",
|
||||
help="install specs from other architectures" + " instead of default platform and OS",
|
||||
)
|
||||
|
||||
arguments.add_common_arguments(install, ["specs"])
|
||||
@@ -156,7 +155,7 @@ def setup_parser(subparser):
|
||||
"-a",
|
||||
"--allarch",
|
||||
action="store_true",
|
||||
help="list specs for all available architectures instead of default platform and OS",
|
||||
help="list specs for all available architectures" + " instead of default platform and OS",
|
||||
)
|
||||
arguments.add_common_arguments(listcache, ["specs"])
|
||||
listcache.set_defaults(func=list_fn)
|
||||
@@ -205,7 +204,7 @@ def setup_parser(subparser):
|
||||
check.add_argument(
|
||||
"--spec-file",
|
||||
default=None,
|
||||
help=("Check single spec from json or yaml file instead of release specs file"),
|
||||
help=("Check single spec from json or yaml file instead of release " + "specs file"),
|
||||
)
|
||||
|
||||
check.set_defaults(func=check_fn)
|
||||
@@ -218,7 +217,7 @@ def setup_parser(subparser):
|
||||
download.add_argument(
|
||||
"--spec-file",
|
||||
default=None,
|
||||
help=("Download built tarball for spec (from json or yaml file) from mirror"),
|
||||
help=("Download built tarball for spec (from json or yaml file) " + "from mirror"),
|
||||
)
|
||||
download.add_argument(
|
||||
"-p", "--path", default=None, help="Path to directory where tarball should be downloaded"
|
||||
@@ -235,7 +234,7 @@ def setup_parser(subparser):
|
||||
getbuildcachename.add_argument(
|
||||
"--spec-file",
|
||||
default=None,
|
||||
help=("Path to spec json or yaml file for which buildcache name is desired"),
|
||||
help=("Path to spec json or yaml file for which buildcache name is " + "desired"),
|
||||
)
|
||||
getbuildcachename.set_defaults(func=get_buildcache_name_fn)
|
||||
|
||||
@@ -295,27 +294,7 @@ def setup_parser(subparser):
|
||||
|
||||
# Update buildcache index without copying any additional packages
|
||||
update_index = subparsers.add_parser("update-index", help=update_index_fn.__doc__)
|
||||
update_index_out = update_index.add_mutually_exclusive_group(required=True)
|
||||
update_index_out.add_argument(
|
||||
"-d",
|
||||
"--directory",
|
||||
metavar="directory",
|
||||
type=str,
|
||||
help="local directory where buildcaches will be written.",
|
||||
)
|
||||
update_index_out.add_argument(
|
||||
"-m",
|
||||
"--mirror-name",
|
||||
metavar="mirror-name",
|
||||
type=str,
|
||||
help="name of the mirror where buildcaches will be written.",
|
||||
)
|
||||
update_index_out.add_argument(
|
||||
"--mirror-url",
|
||||
metavar="mirror-url",
|
||||
type=str,
|
||||
help="URL of the mirror where buildcaches will be written.",
|
||||
)
|
||||
update_index.add_argument("-d", "--mirror-url", default=None, help="Destination mirror url")
|
||||
update_index.add_argument(
|
||||
"-k",
|
||||
"--keys",
|
||||
@@ -326,15 +305,6 @@ def setup_parser(subparser):
|
||||
update_index.set_defaults(func=update_index_fn)
|
||||
|
||||
|
||||
def _mirror_url_from_args(args):
|
||||
if args.directory:
|
||||
return spack.mirror.push_url_from_directory(args.directory)
|
||||
if args.mirror_name:
|
||||
return spack.mirror.push_url_from_mirror_name(args.mirror_name)
|
||||
if args.mirror_url:
|
||||
return spack.mirror.push_url_from_mirror_url(args.mirror_url)
|
||||
|
||||
|
||||
def _matching_specs(args):
|
||||
"""Return a list of matching specs read from either a spec file (JSON or YAML),
|
||||
a query over the store or a query over the active environment.
|
||||
@@ -353,9 +323,9 @@ def _matching_specs(args):
|
||||
|
||||
tty.die(
|
||||
"build cache file creation requires at least one"
|
||||
" installed package spec, an active environment,"
|
||||
" or else a path to a json or yaml file containing a spec"
|
||||
" to install"
|
||||
+ " installed package spec, an active environment,"
|
||||
+ " or else a path to a json or yaml file containing a spec"
|
||||
+ " to install"
|
||||
)
|
||||
|
||||
|
||||
@@ -383,7 +353,15 @@ def _concrete_spec_from_args(args):
|
||||
|
||||
def create_fn(args):
|
||||
"""create a binary package and push it to a mirror"""
|
||||
push_url = _mirror_url_from_args(args)
|
||||
if args.directory:
|
||||
push_url = spack.mirror.push_url_from_directory(args.directory)
|
||||
|
||||
if args.mirror_name:
|
||||
push_url = spack.mirror.push_url_from_mirror_name(args.mirror_name)
|
||||
|
||||
if args.mirror_url:
|
||||
push_url = spack.mirror.push_url_from_mirror_url(args.mirror_url)
|
||||
|
||||
matches = _matching_specs(args)
|
||||
|
||||
msg = "Pushing binary packages to {0}/build_cache".format(push_url)
|
||||
@@ -597,11 +575,11 @@ def sync_fn(args):
|
||||
source_location = None
|
||||
if args.src_directory:
|
||||
source_location = args.src_directory
|
||||
scheme = urllib.parse.urlparse(source_location, scheme="<missing>").scheme
|
||||
scheme = url_util.parse(source_location, scheme="<missing>").scheme
|
||||
if scheme != "<missing>":
|
||||
raise ValueError('"--src-directory" expected a local path; got a URL, instead')
|
||||
# Ensure that the mirror lookup does not mistake this for named mirror
|
||||
source_location = url_util.path_to_file_url(source_location)
|
||||
source_location = "file://" + source_location
|
||||
elif args.src_mirror_name:
|
||||
source_location = args.src_mirror_name
|
||||
result = spack.mirror.MirrorCollection().lookup(source_location)
|
||||
@@ -609,7 +587,7 @@ def sync_fn(args):
|
||||
raise ValueError('no configured mirror named "{name}"'.format(name=source_location))
|
||||
elif args.src_mirror_url:
|
||||
source_location = args.src_mirror_url
|
||||
scheme = urllib.parse.urlparse(source_location, scheme="<missing>").scheme
|
||||
scheme = url_util.parse(source_location, scheme="<missing>").scheme
|
||||
if scheme == "<missing>":
|
||||
raise ValueError('"{url}" is not a valid URL'.format(url=source_location))
|
||||
|
||||
@@ -620,11 +598,11 @@ def sync_fn(args):
|
||||
dest_location = None
|
||||
if args.dest_directory:
|
||||
dest_location = args.dest_directory
|
||||
scheme = urllib.parse.urlparse(dest_location, scheme="<missing>").scheme
|
||||
scheme = url_util.parse(dest_location, scheme="<missing>").scheme
|
||||
if scheme != "<missing>":
|
||||
raise ValueError('"--dest-directory" expected a local path; got a URL, instead')
|
||||
# Ensure that the mirror lookup does not mistake this for named mirror
|
||||
dest_location = url_util.path_to_file_url(dest_location)
|
||||
dest_location = "file://" + dest_location
|
||||
elif args.dest_mirror_name:
|
||||
dest_location = args.dest_mirror_name
|
||||
result = spack.mirror.MirrorCollection().lookup(dest_location)
|
||||
@@ -632,7 +610,7 @@ def sync_fn(args):
|
||||
raise ValueError('no configured mirror named "{name}"'.format(name=dest_location))
|
||||
elif args.dest_mirror_url:
|
||||
dest_location = args.dest_mirror_url
|
||||
scheme = urllib.parse.urlparse(dest_location, scheme="<missing>").scheme
|
||||
scheme = url_util.parse(dest_location, scheme="<missing>").scheme
|
||||
if scheme == "<missing>":
|
||||
raise ValueError('"{url}" is not a valid URL'.format(url=dest_location))
|
||||
|
||||
@@ -714,8 +692,11 @@ def update_index(mirror_url, update_keys=False):
|
||||
|
||||
def update_index_fn(args):
|
||||
"""Update a buildcache index."""
|
||||
push_url = _mirror_url_from_args(args)
|
||||
update_index(push_url, update_keys=args.keys)
|
||||
outdir = "file://."
|
||||
if args.mirror_url:
|
||||
outdir = args.mirror_url
|
||||
|
||||
update_index(outdir, update_keys=args.keys)
|
||||
|
||||
|
||||
def buildcache(parser, args):
|
||||
|
||||
@@ -284,7 +284,6 @@ def ci_rebuild(args):
|
||||
remote_mirror_override = get_env_var("SPACK_REMOTE_MIRROR_OVERRIDE")
|
||||
remote_mirror_url = get_env_var("SPACK_REMOTE_MIRROR_URL")
|
||||
spack_ci_stack_name = get_env_var("SPACK_CI_STACK_NAME")
|
||||
shared_pr_mirror_url = get_env_var("SPACK_CI_SHARED_PR_MIRROR_URL")
|
||||
rebuild_everything = get_env_var("SPACK_REBUILD_EVERYTHING")
|
||||
|
||||
# Construct absolute paths relative to current $CI_PROJECT_DIR
|
||||
@@ -356,7 +355,7 @@ def ci_rebuild(args):
|
||||
# dependencies from previous stages available since we do not
|
||||
# allow pushing binaries to the remote mirror during PR pipelines.
|
||||
enable_artifacts_mirror = True
|
||||
pipeline_mirror_url = url_util.path_to_file_url(local_mirror_dir)
|
||||
pipeline_mirror_url = "file://" + local_mirror_dir
|
||||
mirror_msg = "artifact buildcache enabled, mirror url: {0}".format(pipeline_mirror_url)
|
||||
tty.debug(mirror_msg)
|
||||
|
||||
@@ -472,10 +471,6 @@ def ci_rebuild(args):
|
||||
spack.mirror.add("mirror_override", remote_mirror_override, cfg.default_modify_scope())
|
||||
pipeline_mirrors.append(remote_mirror_override)
|
||||
|
||||
if spack_pipeline_type == "spack_pull_request":
|
||||
if shared_pr_mirror_url != "None":
|
||||
pipeline_mirrors.append(shared_pr_mirror_url)
|
||||
|
||||
matches = (
|
||||
None
|
||||
if full_rebuild
|
||||
|
||||
@@ -7,7 +7,6 @@
|
||||
|
||||
import os
|
||||
import re
|
||||
import urllib.parse
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import mkdirp
|
||||
@@ -828,8 +827,8 @@ def get_versions(args, name):
|
||||
|
||||
valid_url = True
|
||||
try:
|
||||
parsed = urllib.parse.urlparse(args.url)
|
||||
if not parsed.scheme or parsed.scheme != "file":
|
||||
spack.util.url.require_url_format(args.url)
|
||||
if args.url.startswith("file://"):
|
||||
valid_url = False # No point in spidering these
|
||||
except (ValueError, TypeError):
|
||||
valid_url = False
|
||||
|
||||
@@ -11,7 +11,6 @@
|
||||
import spack.mirror
|
||||
import spack.paths
|
||||
import spack.util.gpg
|
||||
import spack.util.url
|
||||
|
||||
description = "handle GPG actions for spack"
|
||||
section = "packaging"
|
||||
@@ -99,7 +98,7 @@ def setup_parser(subparser):
|
||||
"--directory",
|
||||
metavar="directory",
|
||||
type=str,
|
||||
help="local directory where keys will be published.",
|
||||
help="local directory where " + "keys will be published.",
|
||||
)
|
||||
output.add_argument(
|
||||
"-m",
|
||||
@@ -213,8 +212,7 @@ def gpg_publish(args):
|
||||
|
||||
mirror = None
|
||||
if args.directory:
|
||||
url = spack.util.url.path_to_file_url(args.directory)
|
||||
mirror = spack.mirror.Mirror(url, url)
|
||||
mirror = spack.mirror.Mirror(args.directory, args.directory)
|
||||
elif args.mirror_name:
|
||||
mirror = spack.mirror.MirrorCollection().lookup(args.mirror_name)
|
||||
elif args.mirror_url:
|
||||
|
||||
@@ -357,10 +357,11 @@ def versions_per_spec(args):
|
||||
|
||||
|
||||
def create_mirror_for_individual_specs(mirror_specs, directory_hint, skip_unstable_versions):
|
||||
local_push_url = local_mirror_url_from_user(directory_hint)
|
||||
present, mirrored, error = spack.mirror.create(
|
||||
directory_hint, mirror_specs, skip_unstable_versions
|
||||
local_push_url, mirror_specs, skip_unstable_versions
|
||||
)
|
||||
tty.msg("Summary for mirror in {}".format(directory_hint))
|
||||
tty.msg("Summary for mirror in {}".format(local_push_url))
|
||||
process_mirror_stats(present, mirrored, error)
|
||||
|
||||
|
||||
@@ -388,7 +389,9 @@ def local_mirror_url_from_user(directory_hint):
|
||||
mirror_directory = spack.util.path.canonicalize_path(
|
||||
directory_hint or spack.config.get("config:source_cache")
|
||||
)
|
||||
return url_util.path_to_file_url(mirror_directory)
|
||||
tmp_mirror = spack.mirror.Mirror(mirror_directory)
|
||||
local_url = url_util.format(tmp_mirror.push_url)
|
||||
return local_url
|
||||
|
||||
|
||||
def mirror_create(args):
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from typing import Callable, Dict
|
||||
from typing import Callable, Dict # novm
|
||||
|
||||
import spack.cmd.modules.lmod
|
||||
import spack.cmd.modules.tcl
|
||||
@@ -13,7 +13,7 @@
|
||||
level = "short"
|
||||
|
||||
|
||||
_subcommands: Dict[str, Callable] = {}
|
||||
_subcommands = {} # type: Dict[str, Callable]
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
|
||||
@@ -16,7 +16,6 @@
|
||||
|
||||
def setup_parser(subparser):
|
||||
arguments.add_common_arguments(subparser, ["no_checksum", "deprecated", "specs"])
|
||||
arguments.add_concretizer_args(subparser)
|
||||
|
||||
|
||||
def patch(parser, args):
|
||||
|
||||
@@ -5,9 +5,6 @@
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import itertools
|
||||
import os
|
||||
import sys
|
||||
|
||||
import llnl.util.tty as tty
|
||||
@@ -17,7 +14,6 @@
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
import spack.util.executable as exe
|
||||
import spack.util.package_hash as ph
|
||||
|
||||
description = "query packages associated with particular git revisions"
|
||||
@@ -69,14 +65,6 @@ def setup_parser(subparser):
|
||||
"rev2", nargs="?", default="HEAD", help="revision to compare to rev1 (default is HEAD)"
|
||||
)
|
||||
|
||||
# explicitly add help for `spack pkg grep` with just `--help` and NOT `-h`. This is so
|
||||
# that the very commonly used -h (no filename) argument can be passed through to grep
|
||||
grep_parser = sp.add_parser("grep", help=pkg_grep.__doc__, add_help=False)
|
||||
grep_parser.add_argument(
|
||||
"grep_args", nargs=argparse.REMAINDER, default=None, help="arguments for grep"
|
||||
)
|
||||
grep_parser.add_argument("--help", action="help", help="show this help message and exit")
|
||||
|
||||
source_parser = sp.add_parser("source", help=pkg_source.__doc__)
|
||||
source_parser.add_argument(
|
||||
"-c",
|
||||
@@ -169,88 +157,18 @@ def pkg_hash(args):
|
||||
print(ph.package_hash(spec))
|
||||
|
||||
|
||||
def get_grep(required=False):
|
||||
"""Get a grep command to use with ``spack pkg grep``."""
|
||||
return exe.which(os.environ.get("SPACK_GREP") or "grep", required=required)
|
||||
|
||||
|
||||
def pkg_grep(args, unknown_args):
|
||||
"""grep for strings in package.py files from all repositories"""
|
||||
grep = get_grep(required=True)
|
||||
|
||||
# add a little color to the output if we can
|
||||
if "GNU" in grep("--version", output=str):
|
||||
grep.add_default_arg("--color=auto")
|
||||
|
||||
# determines number of files to grep at a time
|
||||
grouper = lambda e: e[0] // 500
|
||||
|
||||
# set up iterator and save the first group to ensure we don't end up with a group of size 1
|
||||
groups = itertools.groupby(enumerate(spack.repo.path.all_package_paths()), grouper)
|
||||
if not groups:
|
||||
return 0 # no packages to search
|
||||
|
||||
# You can force GNU grep to show filenames on every line with -H, but not POSIX grep.
|
||||
# POSIX grep only shows filenames when you're grepping 2 or more files. Since we
|
||||
# don't know which one we're running, we ensure there are always >= 2 files by
|
||||
# saving the prior group of paths and adding it to a straggling group of 1 if needed.
|
||||
# This works unless somehow there is only one package in all of Spack.
|
||||
_, first_group = next(groups)
|
||||
prior_paths = [path for _, path in first_group]
|
||||
|
||||
# grep returns 1 for nothing found, 0 for something found, and > 1 for error
|
||||
return_code = 1
|
||||
|
||||
# assemble args and run grep on a group of paths
|
||||
def grep_group(paths):
|
||||
all_args = args.grep_args + unknown_args + paths
|
||||
grep(*all_args, fail_on_error=False)
|
||||
return grep.returncode
|
||||
|
||||
for _, group in groups:
|
||||
paths = [path for _, path in group] # extract current path group
|
||||
|
||||
if len(paths) == 1:
|
||||
# Only the very last group can have length 1. If it does, combine
|
||||
# it with the prior group to ensure more than one path is grepped.
|
||||
prior_paths += paths
|
||||
else:
|
||||
# otherwise run grep on the prior group
|
||||
error = grep_group(prior_paths)
|
||||
if error != 1:
|
||||
return_code = error
|
||||
if error > 1: # fail fast on error
|
||||
return error
|
||||
|
||||
prior_paths = paths
|
||||
|
||||
# Handle the last remaining group after the loop
|
||||
error = grep_group(prior_paths)
|
||||
if error != 1:
|
||||
return_code = error
|
||||
|
||||
return return_code
|
||||
|
||||
|
||||
def pkg(parser, args, unknown_args):
|
||||
def pkg(parser, args):
|
||||
if not spack.cmd.spack_is_git_repo():
|
||||
tty.die("This spack is not a git clone. Can't use 'spack pkg'")
|
||||
|
||||
action = {
|
||||
"add": pkg_add,
|
||||
"added": pkg_added,
|
||||
"changed": pkg_changed,
|
||||
"diff": pkg_diff,
|
||||
"hash": pkg_hash,
|
||||
"list": pkg_list,
|
||||
"removed": pkg_removed,
|
||||
"added": pkg_added,
|
||||
"changed": pkg_changed,
|
||||
"source": pkg_source,
|
||||
"hash": pkg_hash,
|
||||
}
|
||||
|
||||
# grep is special as it passes unknown arguments through
|
||||
if args.pkg_command == "grep":
|
||||
return pkg_grep(args, unknown_args)
|
||||
elif unknown_args:
|
||||
tty.die("unrecognized arguments: %s" % " ".join(unknown_args))
|
||||
else:
|
||||
return action[args.pkg_command](args)
|
||||
action[args.pkg_command](args)
|
||||
|
||||
@@ -2,6 +2,9 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import re
|
||||
@@ -12,6 +15,7 @@
|
||||
import llnl.util.tty.color as color
|
||||
from llnl.util.filesystem import working_dir
|
||||
|
||||
import spack.bootstrap
|
||||
import spack.paths
|
||||
from spack.util.executable import which
|
||||
|
||||
@@ -21,7 +25,7 @@
|
||||
|
||||
|
||||
def grouper(iterable, n, fillvalue=None):
|
||||
"""Collect data into fixed-length chunks or blocks"""
|
||||
"Collect data into fixed-length chunks or blocks"
|
||||
# grouper('ABCDEFG', 3, 'x') --> ABC DEF Gxx"
|
||||
args = [iter(iterable)] * n
|
||||
for group in zip_longest(*args, fillvalue=fillvalue):
|
||||
@@ -37,13 +41,16 @@ def grouper(iterable, n, fillvalue=None):
|
||||
#: double-check the results of other tools (if, e.g., --fix was provided)
|
||||
#: The list maps an executable name to a method to ensure the tool is
|
||||
#: bootstrapped or present in the environment.
|
||||
tool_names = [
|
||||
"isort",
|
||||
"mypy",
|
||||
"black",
|
||||
"flake8",
|
||||
tool_order = [
|
||||
("isort", spack.bootstrap.ensure_isort_in_path_or_raise),
|
||||
("mypy", spack.bootstrap.ensure_mypy_in_path_or_raise),
|
||||
("black", spack.bootstrap.ensure_black_in_path_or_raise),
|
||||
("flake8", spack.bootstrap.ensure_flake8_in_path_or_raise),
|
||||
]
|
||||
|
||||
#: list of just the tool names -- for argparse
|
||||
tool_names = [k for k, _ in tool_order]
|
||||
|
||||
#: tools we run in spack style
|
||||
tools = {}
|
||||
|
||||
@@ -215,8 +222,10 @@ def translate(match):
|
||||
print(line)
|
||||
|
||||
|
||||
def print_style_header(file_list, args, tools_to_run):
|
||||
tty.msg("Running style checks on spack", "selected: " + ", ".join(tools_to_run))
|
||||
def print_style_header(file_list, args, selected):
|
||||
tools = [tool for tool in tool_names if tool in selected]
|
||||
tty.msg("Running style checks on spack", "selected: " + ", ".join(tools))
|
||||
|
||||
# translate modified paths to cwd_relative if needed
|
||||
paths = [filename.strip() for filename in file_list]
|
||||
if not args.root_relative:
|
||||
@@ -375,17 +384,6 @@ def validate_toolset(arg_value):
|
||||
return tools
|
||||
|
||||
|
||||
def missing_tools(tools_to_run):
|
||||
return [t for t in tools_to_run if which(t) is None]
|
||||
|
||||
|
||||
def _bootstrap_dev_dependencies():
|
||||
import spack.bootstrap
|
||||
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
spack.bootstrap.ensure_environment_dependencies()
|
||||
|
||||
|
||||
def style(parser, args):
|
||||
# save initial working directory for relativizing paths later
|
||||
args.initial_working_dir = os.getcwd()
|
||||
@@ -420,20 +418,25 @@ def prefix_relative(path):
|
||||
tty.msg("Nothing to run.")
|
||||
return
|
||||
|
||||
tools_to_run = [t for t in tool_names if t in selected]
|
||||
if missing_tools(tools_to_run):
|
||||
_bootstrap_dev_dependencies()
|
||||
|
||||
return_code = 0
|
||||
with working_dir(args.root):
|
||||
if not file_list:
|
||||
file_list = changed_files(args.base, args.untracked, args.all)
|
||||
|
||||
print_style_header(file_list, args, tools_to_run)
|
||||
for tool_name in tools_to_run:
|
||||
run_function, required = tools[tool_name]
|
||||
print_tool_header(tool_name)
|
||||
return_code |= run_function(which(tool_name), file_list, args)
|
||||
print_style_header(file_list, args, selected)
|
||||
|
||||
tools_to_run = [(tool, fn) for tool, fn in tool_order if tool in selected]
|
||||
commands = {}
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
# bootstrap everything first to get commands
|
||||
for tool_name, bootstrap_fn in tools_to_run:
|
||||
commands[tool_name] = bootstrap_fn()
|
||||
|
||||
# run tools once bootstrapping is done
|
||||
for tool_name, bootstrap_fn in tools_to_run:
|
||||
run_function, required = tools[tool_name]
|
||||
print_tool_header(tool_name)
|
||||
return_code |= run_function(commands[tool_name], file_list, args)
|
||||
|
||||
if return_code == 0:
|
||||
tty.msg(color.colorize("@*{spack style checks were clean}"))
|
||||
|
||||
@@ -21,6 +21,7 @@
|
||||
import llnl.util.tty.color as color
|
||||
from llnl.util.tty.colify import colify
|
||||
|
||||
import spack.bootstrap
|
||||
import spack.paths
|
||||
|
||||
description = "run spack's unit tests (wrapper around pytest)"
|
||||
@@ -206,7 +207,6 @@ def add_back_pytest_args(args, unknown_args):
|
||||
|
||||
def unit_test(parser, args, unknown_args):
|
||||
global pytest
|
||||
import spack.bootstrap
|
||||
|
||||
# Ensure clingo is available before switching to the
|
||||
# mock configuration used by unit tests
|
||||
@@ -214,10 +214,12 @@ def unit_test(parser, args, unknown_args):
|
||||
# clingo is wholly unsupported from bootstrap
|
||||
if not is_windows:
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
spack.bootstrap.ensure_core_dependencies()
|
||||
if pytest is None:
|
||||
spack.bootstrap.ensure_environment_dependencies()
|
||||
import pytest
|
||||
spack.bootstrap.ensure_clingo_importable_or_raise()
|
||||
|
||||
if pytest is None:
|
||||
vendored_pytest_dir = os.path.join(spack.paths.external_path, "pytest-fallback")
|
||||
sys.path.append(vendored_pytest_dir)
|
||||
import pytest
|
||||
|
||||
if args.pytest_help:
|
||||
# make the pytest.main help output more accurate
|
||||
|
||||
@@ -9,9 +9,8 @@
|
||||
import platform
|
||||
import re
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
from typing import List, Optional, Sequence
|
||||
from typing import List, Sequence # novm
|
||||
|
||||
import llnl.util.lang
|
||||
import llnl.util.tty as tty
|
||||
@@ -28,8 +27,6 @@
|
||||
|
||||
__all__ = ["Compiler"]
|
||||
|
||||
is_windows = sys.platform == "win32"
|
||||
|
||||
|
||||
@llnl.util.lang.memoized
|
||||
def _get_compiler_version_output(compiler_path, version_arg, ignore_errors=()):
|
||||
@@ -198,20 +195,20 @@ class Compiler(object):
|
||||
and how to identify the particular type of compiler."""
|
||||
|
||||
# Subclasses use possible names of C compiler
|
||||
cc_names: List[str] = []
|
||||
cc_names = [] # type: List[str]
|
||||
|
||||
# Subclasses use possible names of C++ compiler
|
||||
cxx_names: List[str] = []
|
||||
cxx_names = [] # type: List[str]
|
||||
|
||||
# Subclasses use possible names of Fortran 77 compiler
|
||||
f77_names: List[str] = []
|
||||
f77_names = [] # type: List[str]
|
||||
|
||||
# Subclasses use possible names of Fortran 90 compiler
|
||||
fc_names: List[str] = []
|
||||
fc_names = [] # type: List[str]
|
||||
|
||||
# Optional prefix regexes for searching for this type of compiler.
|
||||
# Prefixes are sometimes used for toolchains
|
||||
prefixes: List[str] = []
|
||||
prefixes = [] # type: List[str]
|
||||
|
||||
# Optional suffix regexes for searching for this type of compiler.
|
||||
# Suffixes are used by some frameworks, e.g. macports uses an '-mp-X.Y'
|
||||
@@ -222,7 +219,7 @@ class Compiler(object):
|
||||
version_argument = "-dumpversion"
|
||||
|
||||
#: Return values to ignore when invoking the compiler to get its version
|
||||
ignore_version_errors: Sequence[int] = ()
|
||||
ignore_version_errors = () # type: Sequence[int]
|
||||
|
||||
#: Regex used to extract version from compiler's output
|
||||
version_regex = "(.*)"
|
||||
@@ -274,9 +271,9 @@ def opt_flags(self):
|
||||
return ["-O", "-O0", "-O1", "-O2", "-O3"]
|
||||
|
||||
# Cray PrgEnv name that can be used to load this compiler
|
||||
PrgEnv: Optional[str] = None
|
||||
PrgEnv = None # type: str
|
||||
# Name of module used to switch versions of this compiler
|
||||
PrgEnv_compiler: Optional[str] = None
|
||||
PrgEnv_compiler = None # type: str
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
@@ -289,7 +286,7 @@ def __init__(
|
||||
environment=None,
|
||||
extra_rpaths=None,
|
||||
enable_implicit_rpaths=None,
|
||||
**kwargs,
|
||||
**kwargs
|
||||
):
|
||||
self.spec = cspec
|
||||
self.operating_system = str(operating_system)
|
||||
@@ -595,16 +592,7 @@ def search_regexps(cls, language):
|
||||
# defined for the compiler
|
||||
compiler_names = getattr(cls, "{0}_names".format(language))
|
||||
prefixes = [""] + cls.prefixes
|
||||
suffixes = [""]
|
||||
# Windows compilers generally have an extension of some sort
|
||||
# as do most files on Windows, handle that case here
|
||||
if is_windows:
|
||||
ext = r"\.(?:exe|bat)"
|
||||
cls_suf = [suf + ext for suf in cls.suffixes]
|
||||
ext_suf = [ext]
|
||||
suffixes = suffixes + cls.suffixes + cls_suf + ext_suf
|
||||
else:
|
||||
suffixes = suffixes + cls.suffixes
|
||||
suffixes = [""] + cls.suffixes
|
||||
regexp_fmt = r"^({0}){1}({2})$"
|
||||
return [
|
||||
re.compile(regexp_fmt.format(prefix, re.escape(name), suffix))
|
||||
|
||||
@@ -10,7 +10,7 @@
|
||||
import itertools
|
||||
import multiprocessing.pool
|
||||
import os
|
||||
from typing import Dict
|
||||
from typing import Dict # novm
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
@@ -41,7 +41,7 @@
|
||||
# TODO: Caches at module level make it difficult to mock configurations in
|
||||
# TODO: unit tests. It might be worth reworking their implementation.
|
||||
#: cache of compilers constructed from config data, keyed by config entry id.
|
||||
_compiler_cache: Dict[str, "spack.compiler.Compiler"] = {}
|
||||
_compiler_cache = {} # type: Dict[str, spack.compiler.Compiler]
|
||||
|
||||
_compiler_to_pkg = {
|
||||
"clang": "llvm+clang",
|
||||
@@ -722,8 +722,6 @@ def _default_make_compilers(cmp_id, paths):
|
||||
compiler_cls = spack.compilers.class_for_compiler_name(compiler_name)
|
||||
spec = spack.spec.CompilerSpec(compiler_cls.name, version)
|
||||
paths = [paths.get(x, None) for x in ("cc", "cxx", "f77", "fc")]
|
||||
# TODO: johnwparent - revist the following line as per discussion at:
|
||||
# https://github.com/spack/spack/pull/33385/files#r1040036318
|
||||
target = archspec.cpu.host()
|
||||
compiler = compiler_cls(spec, operating_system, str(target.family), paths)
|
||||
return [compiler]
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
import subprocess
|
||||
import sys
|
||||
from distutils.version import StrictVersion
|
||||
from typing import Dict, List, Set
|
||||
from typing import Dict, List, Set # novm
|
||||
|
||||
import spack.compiler
|
||||
import spack.operating_systems.windows_os
|
||||
@@ -18,8 +18,8 @@
|
||||
from spack.error import SpackError
|
||||
from spack.version import Version
|
||||
|
||||
avail_fc_version: Set[str] = set()
|
||||
fc_path: Dict[str, str] = dict()
|
||||
avail_fc_version = set() # type: Set[str]
|
||||
fc_path = dict() # type: Dict[str, str]
|
||||
|
||||
fortran_mapping = {
|
||||
"2021.3.0": "19.29.30133",
|
||||
@@ -42,16 +42,16 @@ def get_valid_fortran_pth(comp_ver):
|
||||
|
||||
class Msvc(Compiler):
|
||||
# Subclasses use possible names of C compiler
|
||||
cc_names: List[str] = ["cl"]
|
||||
cc_names = ["cl.exe"] # type: List[str]
|
||||
|
||||
# Subclasses use possible names of C++ compiler
|
||||
cxx_names: List[str] = ["cl"]
|
||||
cxx_names = ["cl.exe"] # type: List[str]
|
||||
|
||||
# Subclasses use possible names of Fortran 77 compiler
|
||||
f77_names: List[str] = ["ifx"]
|
||||
f77_names = ["ifx.exe"] # type: List[str]
|
||||
|
||||
# Subclasses use possible names of Fortran 90 compiler
|
||||
fc_names: List[str] = ["ifx"]
|
||||
fc_names = ["ifx.exe"] # type: List[str]
|
||||
|
||||
# Named wrapper links within build_env_path
|
||||
# Due to the challenges of supporting compiler wrappers
|
||||
|
||||
@@ -4,17 +4,17 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os
|
||||
from typing import List
|
||||
from typing import List # novm
|
||||
|
||||
import spack.compiler
|
||||
|
||||
|
||||
class Nag(spack.compiler.Compiler):
|
||||
# Subclasses use possible names of C compiler
|
||||
cc_names: List[str] = []
|
||||
cc_names = [] # type: List[str]
|
||||
|
||||
# Subclasses use possible names of C++ compiler
|
||||
cxx_names: List[str] = []
|
||||
cxx_names = [] # type: List[str]
|
||||
|
||||
# Subclasses use possible names of Fortran 77 compiler
|
||||
f77_names = ["nagfor"]
|
||||
|
||||
@@ -36,7 +36,7 @@
|
||||
import re
|
||||
import sys
|
||||
from contextlib import contextmanager
|
||||
from typing import List
|
||||
from typing import List # novm
|
||||
|
||||
import ruamel.yaml as yaml
|
||||
from ruamel.yaml.error import MarkedYAMLError
|
||||
|
||||
@@ -61,16 +61,9 @@ def compiler_from_entry(entry):
|
||||
def spec_from_entry(entry):
|
||||
arch_str = ""
|
||||
if "arch" in entry:
|
||||
local_platform = spack.platforms.host()
|
||||
spec_platform = entry["arch"]["platform"]
|
||||
# Note that Cray systems are now treated as Linux. Specs
|
||||
# in the manifest which specify "cray" as the platform
|
||||
# should be registered in the DB as "linux"
|
||||
if local_platform.name == "linux" and spec_platform.lower() == "cray":
|
||||
spec_platform = "linux"
|
||||
arch_format = "arch={platform}-{os}-{target}"
|
||||
arch_str = arch_format.format(
|
||||
platform=spec_platform,
|
||||
platform=entry["arch"]["platform"],
|
||||
os=entry["arch"]["platform_os"],
|
||||
target=entry["arch"]["target"]["name"],
|
||||
)
|
||||
|
||||
@@ -26,7 +26,7 @@
|
||||
import socket
|
||||
import sys
|
||||
import time
|
||||
from typing import Dict
|
||||
from typing import Dict # novm
|
||||
|
||||
try:
|
||||
import uuid
|
||||
@@ -304,10 +304,10 @@ class Database(object):
|
||||
|
||||
"""Per-process lock objects for each install prefix."""
|
||||
|
||||
_prefix_locks: Dict[str, lk.Lock] = {}
|
||||
_prefix_locks = {} # type: Dict[str, lk.Lock]
|
||||
|
||||
"""Per-process failure (lock) objects for each install prefix."""
|
||||
_prefix_failures: Dict[str, lk.Lock] = {}
|
||||
_prefix_failures = {} # type: Dict[str, lk.Lock]
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
|
||||
@@ -32,7 +32,7 @@ class OpenMpi(Package):
|
||||
import functools
|
||||
import os.path
|
||||
import re
|
||||
from typing import List, Set
|
||||
from typing import List, Set # novm
|
||||
|
||||
import llnl.util.lang
|
||||
import llnl.util.tty.color
|
||||
@@ -122,9 +122,9 @@ class DirectiveMeta(type):
|
||||
"""
|
||||
|
||||
# Set of all known directives
|
||||
_directive_dict_names: Set[str] = set()
|
||||
_directives_to_be_executed: List[str] = []
|
||||
_when_constraints_from_context: List[str] = []
|
||||
_directive_dict_names = set() # type: Set[str]
|
||||
_directives_to_be_executed = [] # type: List[str]
|
||||
_when_constraints_from_context = [] # type: List[str]
|
||||
|
||||
def __new__(cls, name, bases, attr_dict):
|
||||
# Initialize the attribute containing the list of directives
|
||||
@@ -361,8 +361,6 @@ def _depends_on(pkg, spec, when=None, type=default_deptype, patches=None):
|
||||
return
|
||||
|
||||
dep_spec = spack.spec.Spec(spec)
|
||||
if not dep_spec.name:
|
||||
raise DependencyError("Invalid dependency specification in package '%s':" % pkg.name, spec)
|
||||
if pkg.name == dep_spec.name:
|
||||
raise CircularReferenceError("Package '%s' cannot depend on itself." % pkg.name)
|
||||
|
||||
@@ -497,8 +495,6 @@ def provides(*specs, **kwargs):
|
||||
"""
|
||||
|
||||
def _execute_provides(pkg):
|
||||
import spack.parser # Avoid circular dependency
|
||||
|
||||
when = kwargs.get("when")
|
||||
when_spec = make_when_spec(when)
|
||||
if not when_spec:
|
||||
@@ -509,7 +505,7 @@ def _execute_provides(pkg):
|
||||
when_spec.name = pkg.name
|
||||
|
||||
for string in specs:
|
||||
for provided_spec in spack.parser.parse(string):
|
||||
for provided_spec in spack.spec.parse(string):
|
||||
if pkg.name == provided_spec.name:
|
||||
raise CircularReferenceError("Package '%s' cannot provide itself." % pkg.name)
|
||||
|
||||
@@ -771,11 +767,7 @@ class DirectiveError(spack.error.SpackError):
|
||||
"""This is raised when something is wrong with a package directive."""
|
||||
|
||||
|
||||
class DependencyError(DirectiveError):
|
||||
"""This is raised when a dependency specification is invalid."""
|
||||
|
||||
|
||||
class CircularReferenceError(DependencyError):
|
||||
class CircularReferenceError(DirectiveError):
|
||||
"""This is raised when something depends on itself."""
|
||||
|
||||
|
||||
|
||||
@@ -11,8 +11,6 @@
|
||||
import stat
|
||||
import sys
|
||||
import time
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
|
||||
import ruamel.yaml as yaml
|
||||
|
||||
@@ -21,6 +19,7 @@
|
||||
from llnl.util.lang import dedupe
|
||||
from llnl.util.symlink import symlink
|
||||
|
||||
import spack.bootstrap
|
||||
import spack.compilers
|
||||
import spack.concretize
|
||||
import spack.config
|
||||
@@ -44,7 +43,6 @@
|
||||
import spack.util.path
|
||||
import spack.util.spack_json as sjson
|
||||
import spack.util.spack_yaml as syaml
|
||||
import spack.util.url
|
||||
from spack.filesystem_view import (
|
||||
SimpleFilesystemView,
|
||||
inverse_view_func_parser,
|
||||
@@ -929,54 +927,46 @@ def included_config_scopes(self):
|
||||
# allow paths to contain spack config/environment variables, etc.
|
||||
config_path = substitute_path_variables(config_path)
|
||||
|
||||
include_url = urllib.parse.urlparse(config_path)
|
||||
# strip file URL prefix, if needed, to avoid unnecessary remote
|
||||
# config processing for local files
|
||||
config_path = config_path.replace("file://", "")
|
||||
|
||||
# Transform file:// URLs to direct includes.
|
||||
if include_url.scheme == "file":
|
||||
config_path = urllib.request.url2pathname(include_url.path)
|
||||
|
||||
# Any other URL should be fetched.
|
||||
elif include_url.scheme in ("http", "https", "ftp"):
|
||||
if not os.path.exists(config_path):
|
||||
# Stage any remote configuration file(s)
|
||||
staged_configs = (
|
||||
os.listdir(self.config_stage_dir)
|
||||
if os.path.exists(self.config_stage_dir)
|
||||
else []
|
||||
)
|
||||
remote_path = urllib.request.url2pathname(include_url.path)
|
||||
basename = os.path.basename(remote_path)
|
||||
if basename in staged_configs:
|
||||
# Do NOT re-stage configuration files over existing
|
||||
# ones with the same name since there is a risk of
|
||||
# losing changes (e.g., from 'spack config update').
|
||||
tty.warn(
|
||||
"Will not re-stage configuration from {0} to avoid "
|
||||
"losing changes to the already staged file of the "
|
||||
"same name.".format(remote_path)
|
||||
if spack.util.url.is_url_format(config_path):
|
||||
staged_configs = (
|
||||
os.listdir(self.config_stage_dir)
|
||||
if os.path.exists(self.config_stage_dir)
|
||||
else []
|
||||
)
|
||||
|
||||
# Recognize the configuration stage directory
|
||||
# is flattened to ensure a single copy of each
|
||||
# configuration file.
|
||||
config_path = self.config_stage_dir
|
||||
if basename.endswith(".yaml"):
|
||||
config_path = os.path.join(config_path, basename)
|
||||
else:
|
||||
staged_path = spack.config.fetch_remote_configs(
|
||||
config_path,
|
||||
self.config_stage_dir,
|
||||
skip_existing=True,
|
||||
)
|
||||
if not staged_path:
|
||||
raise SpackEnvironmentError(
|
||||
"Unable to fetch remote configuration {0}".format(config_path)
|
||||
basename = os.path.basename(config_path)
|
||||
if basename in staged_configs:
|
||||
# Do NOT re-stage configuration files over existing
|
||||
# ones with the same name since there is a risk of
|
||||
# losing changes (e.g., from 'spack config update').
|
||||
tty.warn(
|
||||
"Will not re-stage configuration from {0} to avoid "
|
||||
"losing changes to the already staged file of the "
|
||||
"same name.".format(config_path)
|
||||
)
|
||||
config_path = staged_path
|
||||
|
||||
elif include_url.scheme:
|
||||
raise ValueError(
|
||||
"Unsupported URL scheme for environment include: {}".format(config_path)
|
||||
)
|
||||
# Recognize the configuration stage directory
|
||||
# is flattened to ensure a single copy of each
|
||||
# configuration file.
|
||||
config_path = self.config_stage_dir
|
||||
if basename.endswith(".yaml"):
|
||||
config_path = os.path.join(config_path, basename)
|
||||
else:
|
||||
staged_path = spack.config.fetch_remote_configs(
|
||||
config_path,
|
||||
self.config_stage_dir,
|
||||
skip_existing=True,
|
||||
)
|
||||
if not staged_path:
|
||||
raise SpackEnvironmentError(
|
||||
"Unable to fetch remote configuration {0}".format(config_path)
|
||||
)
|
||||
config_path = staged_path
|
||||
|
||||
# treat relative paths as relative to the environment
|
||||
if not os.path.isabs(config_path):
|
||||
@@ -1006,7 +996,7 @@ def included_config_scopes(self):
|
||||
if missing:
|
||||
msg = "Detected {0} missing include path(s):".format(len(missing))
|
||||
msg += "\n {0}".format("\n ".join(missing))
|
||||
raise spack.config.ConfigFileError(msg)
|
||||
tty.die("{0}\nPlease correct and try again.".format(msg))
|
||||
|
||||
return scopes
|
||||
|
||||
@@ -1354,8 +1344,6 @@ def _concretize_separately(self, tests=False):
|
||||
"""Concretization strategy that concretizes separately one
|
||||
user spec after the other.
|
||||
"""
|
||||
import spack.bootstrap
|
||||
|
||||
# keep any concretized specs whose user specs are still in the manifest
|
||||
old_concretized_user_specs = self.concretized_user_specs
|
||||
old_concretized_order = self.concretized_order
|
||||
@@ -1380,7 +1368,7 @@ def _concretize_separately(self, tests=False):
|
||||
# Ensure we don't try to bootstrap clingo in parallel
|
||||
if spack.config.get("config:concretizer", "clingo") == "clingo":
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
spack.bootstrap.ensure_core_dependencies()
|
||||
spack.bootstrap.ensure_clingo_importable_or_raise()
|
||||
|
||||
# Ensure all the indexes have been built or updated, since
|
||||
# otherwise the processes in the pool may timeout on waiting
|
||||
|
||||
@@ -30,7 +30,7 @@
|
||||
import shutil
|
||||
import sys
|
||||
import urllib.parse
|
||||
from typing import List, Optional
|
||||
from typing import List, Optional # novm
|
||||
|
||||
import llnl.util
|
||||
import llnl.util.filesystem as fs
|
||||
@@ -314,7 +314,17 @@ def mirror_id(self):
|
||||
|
||||
@property
|
||||
def candidate_urls(self):
|
||||
return [self.url] + (self.mirrors or [])
|
||||
urls = []
|
||||
|
||||
for url in [self.url] + (self.mirrors or []):
|
||||
# This must be skipped on Windows due to URL encoding
|
||||
# of ':' characters on filepaths on Windows
|
||||
if sys.platform != "win32" and url.startswith("file://"):
|
||||
path = urllib.parse.quote(url[len("file://") :])
|
||||
url = "file://" + path
|
||||
urls.append(url)
|
||||
|
||||
return urls
|
||||
|
||||
@_needs_stage
|
||||
def fetch(self):
|
||||
@@ -486,9 +496,7 @@ def archive(self, destination):
|
||||
if not self.archive_file:
|
||||
raise NoArchiveFileError("Cannot call archive() before fetching.")
|
||||
|
||||
web_util.push_to_url(
|
||||
self.archive_file, url_util.path_to_file_url(destination), keep_original=True
|
||||
)
|
||||
web_util.push_to_url(self.archive_file, destination, keep_original=True)
|
||||
|
||||
@_needs_stage
|
||||
def check(self):
|
||||
@@ -541,7 +549,8 @@ class CacheURLFetchStrategy(URLFetchStrategy):
|
||||
|
||||
@_needs_stage
|
||||
def fetch(self):
|
||||
path = url_util.file_url_string_to_path(self.url)
|
||||
reg_str = r"^file://"
|
||||
path = re.sub(reg_str, "", self.url)
|
||||
|
||||
# check whether the cache file exists.
|
||||
if not os.path.isfile(path):
|
||||
@@ -790,7 +799,7 @@ def source_id(self):
|
||||
def mirror_id(self):
|
||||
repo_ref = self.commit or self.tag or self.branch
|
||||
if repo_ref:
|
||||
repo_path = urllib.parse.urlparse(self.url).path
|
||||
repo_path = url_util.parse(self.url).path
|
||||
result = os.path.sep.join(["git", repo_path, repo_ref])
|
||||
return result
|
||||
|
||||
@@ -1136,7 +1145,7 @@ def source_id(self):
|
||||
|
||||
def mirror_id(self):
|
||||
if self.revision:
|
||||
repo_path = urllib.parse.urlparse(self.url).path
|
||||
repo_path = url_util.parse(self.url).path
|
||||
result = os.path.sep.join(["svn", repo_path, self.revision])
|
||||
return result
|
||||
|
||||
@@ -1247,7 +1256,7 @@ def source_id(self):
|
||||
|
||||
def mirror_id(self):
|
||||
if self.revision:
|
||||
repo_path = urllib.parse.urlparse(self.url).path
|
||||
repo_path = url_util.parse(self.url).path
|
||||
result = os.path.sep.join(["hg", repo_path, self.revision])
|
||||
return result
|
||||
|
||||
@@ -1319,7 +1328,7 @@ def fetch(self):
|
||||
tty.debug("Already downloaded {0}".format(self.archive_file))
|
||||
return
|
||||
|
||||
parsed_url = urllib.parse.urlparse(self.url)
|
||||
parsed_url = url_util.parse(self.url)
|
||||
if parsed_url.scheme != "s3":
|
||||
raise web_util.FetchError("S3FetchStrategy can only fetch from s3:// urls.")
|
||||
|
||||
@@ -1366,7 +1375,7 @@ def fetch(self):
|
||||
tty.debug("Already downloaded {0}".format(self.archive_file))
|
||||
return
|
||||
|
||||
parsed_url = urllib.parse.urlparse(self.url)
|
||||
parsed_url = url_util.parse(self.url)
|
||||
if parsed_url.scheme != "gs":
|
||||
raise web_util.FetchError("GCSFetchStrategy can only fetch from gs:// urls.")
|
||||
|
||||
@@ -1671,8 +1680,7 @@ def store(self, fetcher, relative_dest):
|
||||
|
||||
def fetcher(self, target_path, digest, **kwargs):
|
||||
path = os.path.join(self.root, target_path)
|
||||
url = url_util.path_to_file_url(path)
|
||||
return CacheURLFetchStrategy(url, digest, **kwargs)
|
||||
return CacheURLFetchStrategy(path, digest, **kwargs)
|
||||
|
||||
def destroy(self):
|
||||
shutil.rmtree(self.root, ignore_errors=True)
|
||||
|
||||
@@ -2,9 +2,9 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import urllib.parse
|
||||
import urllib.response
|
||||
|
||||
import spack.util.url as url_util
|
||||
import spack.util.web as web_util
|
||||
|
||||
|
||||
@@ -12,7 +12,7 @@ def gcs_open(req, *args, **kwargs):
|
||||
"""Open a reader stream to a blob object on GCS"""
|
||||
import spack.util.gcs as gcs_util
|
||||
|
||||
url = urllib.parse.urlparse(req.get_full_url())
|
||||
url = url_util.parse(req.get_full_url())
|
||||
gcsblob = gcs_util.GCSBlob(url)
|
||||
|
||||
if not gcsblob.exists():
|
||||
|
||||
@@ -48,7 +48,6 @@
|
||||
import spack.compilers
|
||||
import spack.error
|
||||
import spack.hooks
|
||||
import spack.mirror
|
||||
import spack.package_base
|
||||
import spack.package_prefs as prefs
|
||||
import spack.repo
|
||||
@@ -420,24 +419,18 @@ def _try_install_from_binary_cache(pkg, explicit, unsigned=False, timer=timer.NU
|
||||
otherwise, ``False``
|
||||
timer (Timer):
|
||||
"""
|
||||
# Early exit if no mirrors are configured.
|
||||
if not spack.mirror.MirrorCollection():
|
||||
return False
|
||||
|
||||
pkg_id = package_id(pkg)
|
||||
tty.debug("Searching for binary cache of {0}".format(pkg_id))
|
||||
|
||||
timer.start("search")
|
||||
matches = binary_distribution.get_mirrors_for_spec(pkg.spec, index_only=True)
|
||||
matches = binary_distribution.get_mirrors_for_spec(pkg.spec)
|
||||
timer.stop("search")
|
||||
|
||||
if not matches:
|
||||
return False
|
||||
|
||||
return _process_binary_cache_tarball(
|
||||
pkg,
|
||||
pkg.spec,
|
||||
explicit,
|
||||
unsigned,
|
||||
mirrors_for_spec=matches,
|
||||
timer=timer,
|
||||
pkg, pkg.spec, explicit, unsigned, mirrors_for_spec=matches, timer=timer
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -17,18 +17,15 @@
|
||||
import os.path
|
||||
import sys
|
||||
import traceback
|
||||
import urllib.parse
|
||||
|
||||
import ruamel.yaml.error as yaml_error
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import mkdirp
|
||||
|
||||
import spack.caches
|
||||
import spack.config
|
||||
import spack.error
|
||||
import spack.fetch_strategy as fs
|
||||
import spack.mirror
|
||||
import spack.spec
|
||||
import spack.url as url
|
||||
import spack.util.spack_json as sjson
|
||||
@@ -510,13 +507,19 @@ def mirror_cache_and_stats(path, skip_unstable_versions=False):
|
||||
they do not have a stable archive checksum (as determined by
|
||||
``fetch_strategy.stable_target``)
|
||||
"""
|
||||
parsed = url_util.parse(path)
|
||||
mirror_root = url_util.local_file_path(parsed)
|
||||
if not mirror_root:
|
||||
raise spack.error.SpackError("MirrorCaches only work with file:// URLs")
|
||||
# Get the absolute path of the root before we start jumping around.
|
||||
if not os.path.isdir(path):
|
||||
if not os.path.isdir(mirror_root):
|
||||
try:
|
||||
mkdirp(path)
|
||||
mkdirp(mirror_root)
|
||||
except OSError as e:
|
||||
raise MirrorError("Cannot create directory '%s':" % path, str(e))
|
||||
mirror_cache = spack.caches.MirrorCache(path, skip_unstable_versions=skip_unstable_versions)
|
||||
raise MirrorError("Cannot create directory '%s':" % mirror_root, str(e))
|
||||
mirror_cache = spack.caches.MirrorCache(
|
||||
mirror_root, skip_unstable_versions=skip_unstable_versions
|
||||
)
|
||||
mirror_stats = MirrorStats()
|
||||
return mirror_cache, mirror_stats
|
||||
|
||||
@@ -667,10 +670,10 @@ def push_url_from_directory(output_directory):
|
||||
"""Given a directory in the local filesystem, return the URL on
|
||||
which to push binary packages.
|
||||
"""
|
||||
scheme = urllib.parse.urlparse(output_directory, scheme="<missing>").scheme
|
||||
scheme = url_util.parse(output_directory, scheme="<missing>").scheme
|
||||
if scheme != "<missing>":
|
||||
raise ValueError("expected a local path, but got a URL instead")
|
||||
mirror_url = url_util.path_to_file_url(output_directory)
|
||||
mirror_url = "file://" + output_directory
|
||||
mirror = spack.mirror.MirrorCollection().lookup(mirror_url)
|
||||
return url_util.format(mirror.push_url)
|
||||
|
||||
@@ -685,7 +688,7 @@ def push_url_from_mirror_name(mirror_name):
|
||||
|
||||
def push_url_from_mirror_url(mirror_url):
|
||||
"""Given a mirror URL, return the URL on which to push binary packages."""
|
||||
scheme = urllib.parse.urlparse(mirror_url, scheme="<missing>").scheme
|
||||
scheme = url_util.parse(mirror_url, scheme="<missing>").scheme
|
||||
if scheme == "<missing>":
|
||||
raise ValueError('"{0}" is not a valid URL'.format(mirror_url))
|
||||
mirror = spack.mirror.MirrorCollection().lookup(mirror_url)
|
||||
|
||||
@@ -34,7 +34,7 @@
|
||||
import inspect
|
||||
import os.path
|
||||
import re
|
||||
from typing import Optional
|
||||
from typing import Optional # novm
|
||||
|
||||
import llnl.util.filesystem
|
||||
import llnl.util.tty as tty
|
||||
@@ -588,7 +588,7 @@ class BaseFileLayout(object):
|
||||
"""
|
||||
|
||||
#: This needs to be redefined
|
||||
extension: Optional[str] = None
|
||||
extension = None # type: Optional[str]
|
||||
|
||||
def __init__(self, configuration):
|
||||
self.conf = configuration
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
import itertools
|
||||
import os.path
|
||||
import posixpath
|
||||
from typing import Any, Dict
|
||||
from typing import Any, Dict # novm
|
||||
|
||||
import llnl.util.lang as lang
|
||||
|
||||
@@ -30,7 +30,7 @@ def configuration(module_set_name):
|
||||
|
||||
|
||||
# Caches the configuration {spec_hash: configuration}
|
||||
configuration_registry: Dict[str, Any] = {}
|
||||
configuration_registry = {} # type: Dict[str, Any]
|
||||
|
||||
|
||||
def make_configuration(spec, module_set_name):
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
"""
|
||||
import posixpath
|
||||
import string
|
||||
from typing import Any, Dict
|
||||
from typing import Any, Dict # novm
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
@@ -27,7 +27,7 @@ def configuration(module_set_name):
|
||||
|
||||
|
||||
# Caches the configuration {spec_hash: configuration}
|
||||
configuration_registry: Dict[str, Any] = {}
|
||||
configuration_registry = {} # type: Dict[str, Any]
|
||||
|
||||
|
||||
def make_configuration(spec, module_set_name):
|
||||
|
||||
@@ -8,25 +8,13 @@
|
||||
|
||||
Everything in this module is automatically imported into Spack package files.
|
||||
"""
|
||||
from os import chdir, environ, getcwd, makedirs, mkdir, remove, removedirs
|
||||
from shutil import move, rmtree
|
||||
|
||||
# Emulate some shell commands for convenience
|
||||
env = environ
|
||||
cd = chdir
|
||||
pwd = getcwd
|
||||
|
||||
# import most common types used in packages
|
||||
from typing import Dict, List, Optional
|
||||
|
||||
import llnl.util.filesystem
|
||||
from llnl.util.filesystem import *
|
||||
from llnl.util.symlink import symlink
|
||||
|
||||
import spack.util.executable
|
||||
|
||||
# These props will be overridden when the build env is set up.
|
||||
from spack.build_environment import MakeExecutable
|
||||
from spack.build_systems.aspell_dict import AspellDictPackage
|
||||
from spack.build_systems.autotools import AutotoolsPackage
|
||||
from spack.build_systems.bundle import BundlePackage
|
||||
@@ -95,10 +83,3 @@
|
||||
disjoint_sets,
|
||||
)
|
||||
from spack.version import Version, ver
|
||||
|
||||
# These are just here for editor support; they will be replaced when the build env
|
||||
# is set up.
|
||||
make = MakeExecutable("make", jobs=1)
|
||||
gmake = MakeExecutable("gmake", jobs=1)
|
||||
ninja = MakeExecutable("ninja", jobs=1)
|
||||
configure = Executable(join_path(".", "configure"))
|
||||
|
||||
@@ -27,7 +27,7 @@
|
||||
import traceback
|
||||
import types
|
||||
import warnings
|
||||
from typing import Any, Callable, Dict, Iterable, List, Optional, Tuple, Type
|
||||
from typing import Any, Callable, Dict, Iterable, List, Optional, Tuple, Type # novm
|
||||
|
||||
import llnl.util.filesystem as fsys
|
||||
import llnl.util.tty as tty
|
||||
@@ -548,7 +548,7 @@ class PackageBase(WindowsRPath, PackageViewMixin, metaclass=PackageMeta):
|
||||
|
||||
#: Keep -Werror flags, matches config:flags:keep_werror to override config
|
||||
# NOTE: should be type Optional[Literal['all', 'specific', 'none']] in 3.8+
|
||||
keep_werror: Optional[str] = None
|
||||
keep_werror = None # type: Optional[str]
|
||||
|
||||
#: Most packages are NOT extendable. Set to True if you want extensions.
|
||||
extendable = False
|
||||
@@ -564,17 +564,17 @@ class PackageBase(WindowsRPath, PackageViewMixin, metaclass=PackageMeta):
|
||||
#: for it. Note: accepts both file names and directory names, for example
|
||||
#: ``["libcuda.so", "stubs"]`` will ensure libcuda.so and all libraries in the
|
||||
#: stubs directory are not bound by path."""
|
||||
non_bindable_shared_objects: List[str] = []
|
||||
non_bindable_shared_objects = [] # type: List[str]
|
||||
|
||||
#: List of prefix-relative file paths (or a single path). If these do
|
||||
#: not exist after install, or if they exist but are not files,
|
||||
#: sanity checks fail.
|
||||
sanity_check_is_file: List[str] = []
|
||||
sanity_check_is_file = [] # type: List[str]
|
||||
|
||||
#: List of prefix-relative directory paths (or a single path). If
|
||||
#: these do not exist after install, or if they exist but are not
|
||||
#: directories, sanity checks will fail.
|
||||
sanity_check_is_dir: List[str] = []
|
||||
sanity_check_is_dir = [] # type: List[str]
|
||||
|
||||
#: Boolean. Set to ``True`` for packages that require a manual download.
|
||||
#: This is currently used by package sanity tests and generation of a
|
||||
@@ -582,7 +582,7 @@ class PackageBase(WindowsRPath, PackageViewMixin, metaclass=PackageMeta):
|
||||
manual_download = False
|
||||
|
||||
#: Set of additional options used when fetching package versions.
|
||||
fetch_options: Dict[str, Any] = {}
|
||||
fetch_options = {} # type: Dict[str, Any]
|
||||
|
||||
#
|
||||
# Set default licensing information
|
||||
@@ -600,12 +600,12 @@ class PackageBase(WindowsRPath, PackageViewMixin, metaclass=PackageMeta):
|
||||
#: looking for a license. All file paths must be relative to the
|
||||
#: installation directory. More complex packages like Intel may require
|
||||
#: multiple licenses for individual components. Defaults to the empty list.
|
||||
license_files: List[str] = []
|
||||
license_files = [] # type: List[str]
|
||||
|
||||
#: List of strings. Environment variables that can be set to tell the
|
||||
#: software where to look for a license if it is not in the usual location.
|
||||
#: Defaults to the empty list.
|
||||
license_vars: List[str] = []
|
||||
license_vars = [] # type: List[str]
|
||||
|
||||
#: String. A URL pointing to license setup instructions for the software.
|
||||
#: Defaults to the empty string.
|
||||
@@ -618,17 +618,17 @@ class PackageBase(WindowsRPath, PackageViewMixin, metaclass=PackageMeta):
|
||||
_patches_by_hash = None
|
||||
|
||||
#: Package homepage where users can find more information about the package
|
||||
homepage: Optional[str] = None
|
||||
homepage = None # type: str
|
||||
|
||||
#: Default list URL (place to find available versions)
|
||||
list_url: Optional[str] = None
|
||||
list_url = None # type: str
|
||||
|
||||
#: Link depth to which list_url should be searched for new versions
|
||||
list_depth = 0
|
||||
|
||||
#: List of strings which contains GitHub usernames of package maintainers.
|
||||
#: Do not include @ here in order not to unnecessarily ping the users.
|
||||
maintainers: List[str] = []
|
||||
maintainers = [] # type: List[str]
|
||||
|
||||
#: List of attributes to be excluded from a package's hash.
|
||||
metadata_attrs = [
|
||||
@@ -2073,21 +2073,24 @@ def build_log_path(self):
|
||||
return self.install_log_path if self.spec.installed else self.log_path
|
||||
|
||||
@classmethod
|
||||
def inject_flags(cls: Type, name: str, flags: Iterable[str]) -> FLAG_HANDLER_RETURN_TYPE:
|
||||
def inject_flags(cls, name, flags):
|
||||
# type: (Type, str, Iterable[str]) -> FLAG_HANDLER_RETURN_TYPE
|
||||
"""
|
||||
flag_handler that injects all flags through the compiler wrapper.
|
||||
"""
|
||||
return flags, None, None
|
||||
|
||||
@classmethod
|
||||
def env_flags(cls: Type, name: str, flags: Iterable[str]):
|
||||
def env_flags(cls, name, flags):
|
||||
# type: (Type, str, Iterable[str]) -> FLAG_HANDLER_RETURN_TYPE
|
||||
"""
|
||||
flag_handler that adds all flags to canonical environment variables.
|
||||
"""
|
||||
return None, flags, None
|
||||
|
||||
@classmethod
|
||||
def build_system_flags(cls: Type, name: str, flags: Iterable[str]) -> FLAG_HANDLER_RETURN_TYPE:
|
||||
def build_system_flags(cls, name, flags):
|
||||
# type: (Type, str, Iterable[str]) -> FLAG_HANDLER_RETURN_TYPE
|
||||
"""
|
||||
flag_handler that passes flags to the build system arguments. Any
|
||||
package using `build_system_flags` must also implement
|
||||
@@ -2166,16 +2169,18 @@ def setup_dependent_package(self, module, dependent_spec):
|
||||
"""
|
||||
pass
|
||||
|
||||
_flag_handler: Optional[FLAG_HANDLER_TYPE] = None
|
||||
_flag_handler = None # type: Optional[FLAG_HANDLER_TYPE]
|
||||
|
||||
@property
|
||||
def flag_handler(self) -> FLAG_HANDLER_TYPE:
|
||||
def flag_handler(self):
|
||||
# type: () -> FLAG_HANDLER_TYPE
|
||||
if self._flag_handler is None:
|
||||
self._flag_handler = PackageBase.inject_flags
|
||||
return self._flag_handler
|
||||
|
||||
@flag_handler.setter
|
||||
def flag_handler(self, var: FLAG_HANDLER_TYPE):
|
||||
def flag_handler(self, var):
|
||||
# type: (FLAG_HANDLER_TYPE) -> None
|
||||
self._flag_handler = var
|
||||
|
||||
# The flag handler method is called for each of the allowed compiler flags.
|
||||
|
||||
174
lib/spack/spack/parse.py
Normal file
174
lib/spack/spack/parse.py
Normal file
@@ -0,0 +1,174 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import itertools
|
||||
import re
|
||||
import shlex
|
||||
import sys
|
||||
|
||||
import spack.error
|
||||
import spack.util.path as sp
|
||||
|
||||
|
||||
class Token(object):
|
||||
"""Represents tokens; generated from input by lexer and fed to parse()."""
|
||||
|
||||
__slots__ = "type", "value", "start", "end"
|
||||
|
||||
def __init__(self, type, value="", start=0, end=0):
|
||||
self.type = type
|
||||
self.value = value
|
||||
self.start = start
|
||||
self.end = end
|
||||
|
||||
def __repr__(self):
|
||||
return str(self)
|
||||
|
||||
def __str__(self):
|
||||
return "<%d: '%s'>" % (self.type, self.value)
|
||||
|
||||
def is_a(self, type):
|
||||
return self.type == type
|
||||
|
||||
def __eq__(self, other):
|
||||
return (self.type == other.type) and (self.value == other.value)
|
||||
|
||||
|
||||
class Lexer(object):
|
||||
"""Base class for Lexers that keep track of line numbers."""
|
||||
|
||||
__slots__ = "scanner0", "scanner1", "mode", "mode_switches_01", "mode_switches_10"
|
||||
|
||||
def __init__(self, lexicon0, mode_switches_01=[], lexicon1=[], mode_switches_10=[]):
|
||||
self.scanner0 = re.Scanner(lexicon0)
|
||||
self.mode_switches_01 = mode_switches_01
|
||||
self.scanner1 = re.Scanner(lexicon1)
|
||||
self.mode_switches_10 = mode_switches_10
|
||||
self.mode = 0
|
||||
|
||||
def token(self, type, value=""):
|
||||
if self.mode == 0:
|
||||
return Token(type, value, self.scanner0.match.start(0), self.scanner0.match.end(0))
|
||||
else:
|
||||
return Token(type, value, self.scanner1.match.start(0), self.scanner1.match.end(0))
|
||||
|
||||
def lex_word(self, word):
|
||||
scanner = self.scanner0
|
||||
mode_switches = self.mode_switches_01
|
||||
if self.mode == 1:
|
||||
scanner = self.scanner1
|
||||
mode_switches = self.mode_switches_10
|
||||
|
||||
tokens, remainder = scanner.scan(word)
|
||||
remainder_used = 0
|
||||
|
||||
for i, t in enumerate(tokens):
|
||||
if t.type in mode_switches:
|
||||
# Combine post-switch tokens with remainder and
|
||||
# scan in other mode
|
||||
self.mode = 1 - self.mode # swap 0/1
|
||||
remainder_used = 1
|
||||
tokens = tokens[: i + 1] + self.lex_word(
|
||||
word[word.index(t.value) + len(t.value) :]
|
||||
)
|
||||
break
|
||||
|
||||
if remainder and not remainder_used:
|
||||
msg = "Invalid character, '{0}',".format(remainder[0])
|
||||
msg += " in '{0}' at index {1}".format(word, word.index(remainder))
|
||||
raise LexError(msg, word, word.index(remainder))
|
||||
|
||||
return tokens
|
||||
|
||||
def lex(self, text):
|
||||
lexed = []
|
||||
for word in text:
|
||||
tokens = self.lex_word(word)
|
||||
lexed.extend(tokens)
|
||||
return lexed
|
||||
|
||||
|
||||
class Parser(object):
|
||||
"""Base class for simple recursive descent parsers."""
|
||||
|
||||
__slots__ = "tokens", "token", "next", "lexer", "text"
|
||||
|
||||
def __init__(self, lexer):
|
||||
self.tokens = iter([]) # iterators over tokens, handled in order.
|
||||
self.token = Token(None) # last accepted token
|
||||
self.next = None # next token
|
||||
self.lexer = lexer
|
||||
self.text = None
|
||||
|
||||
def gettok(self):
|
||||
"""Puts the next token in the input stream into self.next."""
|
||||
try:
|
||||
self.next = next(self.tokens)
|
||||
except StopIteration:
|
||||
self.next = None
|
||||
|
||||
def push_tokens(self, iterable):
|
||||
"""Adds all tokens in some iterable to the token stream."""
|
||||
self.tokens = itertools.chain(iter(iterable), iter([self.next]), self.tokens)
|
||||
self.gettok()
|
||||
|
||||
def accept(self, id):
|
||||
"""Put the next symbol in self.token if accepted, then call gettok()"""
|
||||
if self.next and self.next.is_a(id):
|
||||
self.token = self.next
|
||||
self.gettok()
|
||||
return True
|
||||
return False
|
||||
|
||||
def next_token_error(self, message):
|
||||
"""Raise an error about the next token in the stream."""
|
||||
raise ParseError(message, self.text[0], self.token.end)
|
||||
|
||||
def last_token_error(self, message):
|
||||
"""Raise an error about the previous token in the stream."""
|
||||
raise ParseError(message, self.text[0], self.token.start)
|
||||
|
||||
def unexpected_token(self):
|
||||
self.next_token_error("Unexpected token: '%s'" % self.next.value)
|
||||
|
||||
def expect(self, id):
|
||||
"""Like accept(), but fails if we don't like the next token."""
|
||||
if self.accept(id):
|
||||
return True
|
||||
else:
|
||||
if self.next:
|
||||
self.unexpected_token()
|
||||
else:
|
||||
self.next_token_error("Unexpected end of input")
|
||||
sys.exit(1)
|
||||
|
||||
def setup(self, text):
|
||||
if isinstance(text, str):
|
||||
# shlex does not handle Windows path
|
||||
# separators, so we must normalize to posix
|
||||
text = sp.convert_to_posix_path(text)
|
||||
text = shlex.split(str(text))
|
||||
self.text = text
|
||||
self.push_tokens(self.lexer.lex(text))
|
||||
|
||||
def parse(self, text):
|
||||
self.setup(text)
|
||||
return self.do_parse()
|
||||
|
||||
|
||||
class ParseError(spack.error.SpackError):
|
||||
"""Raised when we don't hit an error while parsing."""
|
||||
|
||||
def __init__(self, message, string, pos):
|
||||
super(ParseError, self).__init__(message)
|
||||
self.string = string
|
||||
self.pos = pos
|
||||
|
||||
|
||||
class LexError(ParseError):
|
||||
"""Raised when we don't know how to lex something."""
|
||||
|
||||
def __init__(self, message, string, pos):
|
||||
super(LexError, self).__init__(message, string, pos)
|
||||
@@ -1,522 +0,0 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Parser for spec literals
|
||||
|
||||
Here is the EBNF grammar for a spec::
|
||||
|
||||
spec = [name] [node_options] { ^ node } |
|
||||
[name] [node_options] hash |
|
||||
filename
|
||||
|
||||
node = name [node_options] |
|
||||
[name] [node_options] hash |
|
||||
filename
|
||||
|
||||
node_options = [@(version_list|version_pair)] [%compiler] { variant }
|
||||
|
||||
hash = / id
|
||||
filename = (.|/|[a-zA-Z0-9-_]*/)([a-zA-Z0-9-_./]*)(.json|.yaml)
|
||||
|
||||
name = id | namespace id
|
||||
namespace = { id . }
|
||||
|
||||
variant = bool_variant | key_value | propagated_bv | propagated_kv
|
||||
bool_variant = +id | ~id | -id
|
||||
propagated_bv = ++id | ~~id | --id
|
||||
key_value = id=id | id=quoted_id
|
||||
propagated_kv = id==id | id==quoted_id
|
||||
|
||||
compiler = id [@version_list]
|
||||
|
||||
version_pair = git_version=vid
|
||||
version_list = (version|version_range) [ { , (version|version_range)} ]
|
||||
version_range = vid:vid | vid: | :vid | :
|
||||
version = vid
|
||||
|
||||
git_version = git.(vid) | git_hash
|
||||
git_hash = [A-Fa-f0-9]{40}
|
||||
|
||||
quoted_id = " id_with_ws " | ' id_with_ws '
|
||||
id_with_ws = [a-zA-Z0-9_][a-zA-Z_0-9-.\\s]*
|
||||
vid = [a-zA-Z0-9_][a-zA-Z_0-9-.]*
|
||||
id = [a-zA-Z0-9_][a-zA-Z_0-9-]*
|
||||
|
||||
Identifiers using the <name>=<value> command, such as architectures and
|
||||
compiler flags, require a space before the name.
|
||||
|
||||
There is one context-sensitive part: ids in versions may contain '.', while
|
||||
other ids may not.
|
||||
|
||||
There is one ambiguity: since '-' is allowed in an id, you need to put
|
||||
whitespace space before -variant for it to be tokenized properly. You can
|
||||
either use whitespace, or you can just use ~variant since it means the same
|
||||
thing. Spack uses ~variant in directory names and in the canonical form of
|
||||
specs to avoid ambiguity. Both are provided because ~ can cause shell
|
||||
expansion when it is the first character in an id typed on the command line.
|
||||
"""
|
||||
import enum
|
||||
import pathlib
|
||||
import re
|
||||
from typing import Iterator, List, Match, Optional
|
||||
|
||||
from llnl.util.tty import color
|
||||
|
||||
import spack.error
|
||||
import spack.spec
|
||||
import spack.variant
|
||||
import spack.version
|
||||
|
||||
#: Valid name for specs and variants. Here we are not using
|
||||
#: the previous "w[\w.-]*" since that would match most
|
||||
#: characters that can be part of a word in any language
|
||||
IDENTIFIER = r"([a-zA-Z_0-9][a-zA-Z_0-9\-]*)"
|
||||
DOTTED_IDENTIFIER = rf"({IDENTIFIER}(\.{IDENTIFIER})+)"
|
||||
GIT_HASH = r"([A-Fa-f0-9]{40})"
|
||||
GIT_VERSION = rf"((git\.({DOTTED_IDENTIFIER}|{IDENTIFIER}))|({GIT_HASH}))"
|
||||
|
||||
NAME = r"[a-zA-Z_0-9][a-zA-Z_0-9\-.]*"
|
||||
|
||||
HASH = r"[a-zA-Z_0-9]+"
|
||||
|
||||
#: A filename starts either with a "." or a "/" or a "{name}/"
|
||||
FILENAME = r"(\.|\/|[a-zA-Z0-9-_]*\/)([a-zA-Z0-9-_\.\/]*)(\.json|\.yaml)"
|
||||
|
||||
VALUE = r"([a-zA-Z_0-9\-+\*.,:=\~\/\\]+)"
|
||||
QUOTED_VALUE = r"[\"']+([a-zA-Z_0-9\-+\*.,:=\~\/\\\s]+)[\"']+"
|
||||
|
||||
VERSION = r"([a-zA-Z0-9_][a-zA-Z_0-9\-\.]*\b)"
|
||||
VERSION_RANGE = rf"({VERSION}\s*:\s*{VERSION}(?!\s*=)|:\s*{VERSION}(?!\s*=)|{VERSION}\s*:|:)"
|
||||
VERSION_LIST = rf"({VERSION_RANGE}|{VERSION})(\s*[,]\s*({VERSION_RANGE}|{VERSION}))*"
|
||||
|
||||
|
||||
class TokenBase(enum.Enum):
|
||||
"""Base class for an enum type with a regex value"""
|
||||
|
||||
def __new__(cls, *args, **kwargs):
|
||||
# See
|
||||
value = len(cls.__members__) + 1
|
||||
obj = object.__new__(cls)
|
||||
obj._value_ = value
|
||||
return obj
|
||||
|
||||
def __init__(self, regex):
|
||||
self.regex = regex
|
||||
|
||||
def __str__(self):
|
||||
return f"{self._name_}"
|
||||
|
||||
|
||||
class TokenType(TokenBase):
|
||||
"""Enumeration of the different token kinds in the spec grammar.
|
||||
|
||||
Order of declaration is extremely important, since text containing specs is parsed with a
|
||||
single regex obtained by ``"|".join(...)`` of all the regex in the order of declaration.
|
||||
"""
|
||||
|
||||
# Dependency
|
||||
DEPENDENCY = r"(\^)"
|
||||
# Version
|
||||
VERSION_HASH_PAIR = rf"(@({GIT_VERSION})=({VERSION}))"
|
||||
VERSION = rf"(@\s*({VERSION_LIST}))"
|
||||
# Variants
|
||||
PROPAGATED_BOOL_VARIANT = rf"((\+\+|~~|--)\s*{NAME})"
|
||||
BOOL_VARIANT = rf"([~+-]\s*{NAME})"
|
||||
PROPAGATED_KEY_VALUE_PAIR = rf"({NAME}\s*==\s*({VALUE}|{QUOTED_VALUE}))"
|
||||
KEY_VALUE_PAIR = rf"({NAME}\s*=\s*({VALUE}|{QUOTED_VALUE}))"
|
||||
# Compilers
|
||||
COMPILER_AND_VERSION = rf"(%\s*({NAME})([\s]*)@\s*({VERSION_LIST}))"
|
||||
COMPILER = rf"(%\s*({NAME}))"
|
||||
# FILENAME
|
||||
FILENAME = rf"({FILENAME})"
|
||||
# Package name
|
||||
FULLY_QUALIFIED_PACKAGE_NAME = rf"({DOTTED_IDENTIFIER})"
|
||||
UNQUALIFIED_PACKAGE_NAME = rf"({IDENTIFIER})"
|
||||
# DAG hash
|
||||
DAG_HASH = rf"(/({HASH}))"
|
||||
# White spaces
|
||||
WS = r"(\s+)"
|
||||
|
||||
|
||||
class ErrorTokenType(TokenBase):
|
||||
"""Enum with regexes for error analysis"""
|
||||
|
||||
# Unexpected character
|
||||
UNEXPECTED = r"(.[\s]*)"
|
||||
|
||||
|
||||
class Token:
|
||||
"""Represents tokens; generated from input by lexer and fed to parse()."""
|
||||
|
||||
__slots__ = "kind", "value", "start", "end"
|
||||
|
||||
def __init__(
|
||||
self, kind: TokenType, value: str, start: Optional[int] = None, end: Optional[int] = None
|
||||
):
|
||||
self.kind = kind
|
||||
self.value = value
|
||||
self.start = start
|
||||
self.end = end
|
||||
|
||||
def __repr__(self):
|
||||
return str(self)
|
||||
|
||||
def __str__(self):
|
||||
return f"({self.kind}, {self.value})"
|
||||
|
||||
def __eq__(self, other):
|
||||
return (self.kind == other.kind) and (self.value == other.value)
|
||||
|
||||
|
||||
#: List of all the regexes used to match spec parts, in order of precedence
|
||||
TOKEN_REGEXES = [rf"(?P<{token}>{token.regex})" for token in TokenType]
|
||||
#: List of all valid regexes followed by error analysis regexes
|
||||
ERROR_HANDLING_REGEXES = TOKEN_REGEXES + [
|
||||
rf"(?P<{token}>{token.regex})" for token in ErrorTokenType
|
||||
]
|
||||
#: Regex to scan a valid text
|
||||
ALL_TOKENS = re.compile("|".join(TOKEN_REGEXES))
|
||||
#: Regex to analyze an invalid text
|
||||
ANALYSIS_REGEX = re.compile("|".join(ERROR_HANDLING_REGEXES))
|
||||
|
||||
|
||||
def tokenize(text: str) -> Iterator[Token]:
|
||||
"""Return a token generator from the text passed as input.
|
||||
|
||||
Raises:
|
||||
SpecTokenizationError: if we can't tokenize anymore, but didn't reach the
|
||||
end of the input text.
|
||||
"""
|
||||
scanner = ALL_TOKENS.scanner(text) # type: ignore[attr-defined]
|
||||
match: Optional[Match] = None
|
||||
for match in iter(scanner.match, None):
|
||||
yield Token(
|
||||
TokenType.__members__[match.lastgroup], # type: ignore[attr-defined]
|
||||
match.group(), # type: ignore[attr-defined]
|
||||
match.start(), # type: ignore[attr-defined]
|
||||
match.end(), # type: ignore[attr-defined]
|
||||
)
|
||||
|
||||
if match is None and not text:
|
||||
# We just got an empty string
|
||||
return
|
||||
|
||||
if match is None or match.end() != len(text):
|
||||
scanner = ANALYSIS_REGEX.scanner(text) # type: ignore[attr-defined]
|
||||
matches = [m for m in iter(scanner.match, None)] # type: ignore[var-annotated]
|
||||
raise SpecTokenizationError(matches, text)
|
||||
|
||||
|
||||
class TokenContext:
|
||||
"""Token context passed around by parsers"""
|
||||
|
||||
__slots__ = "token_stream", "current_token", "next_token"
|
||||
|
||||
def __init__(self, token_stream: Iterator[Token]):
|
||||
self.token_stream = token_stream
|
||||
self.current_token = None
|
||||
self.next_token = None
|
||||
self.advance()
|
||||
|
||||
def advance(self):
|
||||
"""Advance one token"""
|
||||
self.current_token, self.next_token = self.next_token, next(self.token_stream, None)
|
||||
|
||||
def accept(self, kind: TokenType):
|
||||
"""If the next token is of the specified kind, advance the stream and return True.
|
||||
Otherwise return False.
|
||||
"""
|
||||
if self.next_token and self.next_token.kind == kind:
|
||||
self.advance()
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
class SpecParser:
|
||||
"""Parse text into specs"""
|
||||
|
||||
__slots__ = "literal_str", "ctx"
|
||||
|
||||
def __init__(self, literal_str: str):
|
||||
self.literal_str = literal_str
|
||||
self.ctx = TokenContext(filter(lambda x: x.kind != TokenType.WS, tokenize(literal_str)))
|
||||
|
||||
def tokens(self) -> List[Token]:
|
||||
"""Return the entire list of token from the initial text. White spaces are
|
||||
filtered out.
|
||||
"""
|
||||
return list(filter(lambda x: x.kind != TokenType.WS, tokenize(self.literal_str)))
|
||||
|
||||
def next_spec(self, initial_spec: Optional[spack.spec.Spec] = None) -> spack.spec.Spec:
|
||||
"""Return the next spec parsed from text.
|
||||
|
||||
Args:
|
||||
initial_spec: object where to parse the spec. If None a new one
|
||||
will be created.
|
||||
|
||||
Return
|
||||
The spec that was parsed
|
||||
"""
|
||||
initial_spec = initial_spec or spack.spec.Spec()
|
||||
root_spec = SpecNodeParser(self.ctx).parse(initial_spec)
|
||||
while True:
|
||||
if self.ctx.accept(TokenType.DEPENDENCY):
|
||||
dependency = SpecNodeParser(self.ctx).parse(spack.spec.Spec())
|
||||
|
||||
if dependency == spack.spec.Spec():
|
||||
msg = (
|
||||
"this dependency sigil needs to be followed by a package name "
|
||||
"or a node attribute (version, variant, etc.)"
|
||||
)
|
||||
raise SpecParsingError(msg, self.ctx.current_token, self.literal_str)
|
||||
|
||||
if root_spec.concrete:
|
||||
raise spack.spec.RedundantSpecError(root_spec, "^" + str(dependency))
|
||||
|
||||
root_spec._add_dependency(dependency, ())
|
||||
|
||||
else:
|
||||
break
|
||||
|
||||
return root_spec
|
||||
|
||||
def all_specs(self) -> List[spack.spec.Spec]:
|
||||
"""Return all the specs that remain to be parsed"""
|
||||
return list(iter(self.next_spec, spack.spec.Spec()))
|
||||
|
||||
|
||||
class SpecNodeParser:
|
||||
"""Parse a single spec node from a stream of tokens"""
|
||||
|
||||
__slots__ = "ctx", "has_compiler", "has_version", "has_hash"
|
||||
|
||||
def __init__(self, ctx):
|
||||
self.ctx = ctx
|
||||
self.has_compiler = False
|
||||
self.has_version = False
|
||||
self.has_hash = False
|
||||
|
||||
def parse(self, initial_spec: spack.spec.Spec) -> spack.spec.Spec:
|
||||
"""Parse a single spec node from a stream of tokens
|
||||
|
||||
Args:
|
||||
initial_spec: object to be constructed
|
||||
|
||||
Return
|
||||
The object passed as argument
|
||||
"""
|
||||
import spack.environment # Needed to retrieve by hash
|
||||
|
||||
# If we start with a package name we have a named spec, we cannot
|
||||
# accept another package name afterwards in a node
|
||||
if self.ctx.accept(TokenType.UNQUALIFIED_PACKAGE_NAME):
|
||||
initial_spec.name = self.ctx.current_token.value
|
||||
elif self.ctx.accept(TokenType.FULLY_QUALIFIED_PACKAGE_NAME):
|
||||
parts = self.ctx.current_token.value.split(".")
|
||||
name = parts[-1]
|
||||
namespace = ".".join(parts[:-1])
|
||||
initial_spec.name = name
|
||||
initial_spec.namespace = namespace
|
||||
elif self.ctx.accept(TokenType.FILENAME):
|
||||
return FileParser(self.ctx).parse(initial_spec)
|
||||
|
||||
while True:
|
||||
if self.ctx.accept(TokenType.COMPILER):
|
||||
self.hash_not_parsed_or_raise(initial_spec, self.ctx.current_token.value)
|
||||
if self.has_compiler:
|
||||
raise spack.spec.DuplicateCompilerSpecError(
|
||||
f"{initial_spec} cannot have multiple compilers"
|
||||
)
|
||||
|
||||
compiler_name = self.ctx.current_token.value[1:]
|
||||
initial_spec.compiler = spack.spec.CompilerSpec(compiler_name.strip(), ":")
|
||||
self.has_compiler = True
|
||||
elif self.ctx.accept(TokenType.COMPILER_AND_VERSION):
|
||||
self.hash_not_parsed_or_raise(initial_spec, self.ctx.current_token.value)
|
||||
if self.has_compiler:
|
||||
raise spack.spec.DuplicateCompilerSpecError(
|
||||
f"{initial_spec} cannot have multiple compilers"
|
||||
)
|
||||
|
||||
compiler_name, compiler_version = self.ctx.current_token.value[1:].split("@")
|
||||
initial_spec.compiler = spack.spec.CompilerSpec(
|
||||
compiler_name.strip(), compiler_version
|
||||
)
|
||||
self.has_compiler = True
|
||||
elif self.ctx.accept(TokenType.VERSION) or self.ctx.accept(
|
||||
TokenType.VERSION_HASH_PAIR
|
||||
):
|
||||
self.hash_not_parsed_or_raise(initial_spec, self.ctx.current_token.value)
|
||||
if self.has_version:
|
||||
raise spack.spec.MultipleVersionError(
|
||||
f"{initial_spec} cannot have multiple versions"
|
||||
)
|
||||
|
||||
version_list = spack.version.VersionList()
|
||||
version_list.add(spack.version.from_string(self.ctx.current_token.value[1:]))
|
||||
initial_spec.versions = version_list
|
||||
|
||||
# Add a git lookup method for GitVersions
|
||||
if (
|
||||
initial_spec.name
|
||||
and initial_spec.versions.concrete
|
||||
and isinstance(initial_spec.version, spack.version.GitVersion)
|
||||
):
|
||||
initial_spec.version.generate_git_lookup(initial_spec.fullname)
|
||||
|
||||
self.has_version = True
|
||||
elif self.ctx.accept(TokenType.BOOL_VARIANT):
|
||||
self.hash_not_parsed_or_raise(initial_spec, self.ctx.current_token.value)
|
||||
variant_value = self.ctx.current_token.value[0] == "+"
|
||||
initial_spec._add_flag(
|
||||
self.ctx.current_token.value[1:].strip(), variant_value, propagate=False
|
||||
)
|
||||
elif self.ctx.accept(TokenType.PROPAGATED_BOOL_VARIANT):
|
||||
self.hash_not_parsed_or_raise(initial_spec, self.ctx.current_token.value)
|
||||
variant_value = self.ctx.current_token.value[0:2] == "++"
|
||||
initial_spec._add_flag(
|
||||
self.ctx.current_token.value[2:].strip(), variant_value, propagate=True
|
||||
)
|
||||
elif self.ctx.accept(TokenType.KEY_VALUE_PAIR):
|
||||
self.hash_not_parsed_or_raise(initial_spec, self.ctx.current_token.value)
|
||||
name, value = self.ctx.current_token.value.split("=", maxsplit=1)
|
||||
name = name.strip("'\" ")
|
||||
value = value.strip("'\" ")
|
||||
initial_spec._add_flag(name, value, propagate=False)
|
||||
elif self.ctx.accept(TokenType.PROPAGATED_KEY_VALUE_PAIR):
|
||||
self.hash_not_parsed_or_raise(initial_spec, self.ctx.current_token.value)
|
||||
name, value = self.ctx.current_token.value.split("==", maxsplit=1)
|
||||
name = name.strip("'\" ")
|
||||
value = value.strip("'\" ")
|
||||
initial_spec._add_flag(name, value, propagate=True)
|
||||
elif not self.has_hash and self.ctx.accept(TokenType.DAG_HASH):
|
||||
dag_hash = self.ctx.current_token.value[1:]
|
||||
matches = []
|
||||
if spack.environment.active_environment():
|
||||
matches = spack.environment.active_environment().get_by_hash(dag_hash)
|
||||
if not matches:
|
||||
matches = spack.store.db.get_by_hash(dag_hash)
|
||||
if not matches:
|
||||
raise spack.spec.NoSuchHashError(dag_hash)
|
||||
|
||||
if len(matches) != 1:
|
||||
raise spack.spec.AmbiguousHashError(
|
||||
f"Multiple packages specify hash beginning '{dag_hash}'.", *matches
|
||||
)
|
||||
spec_by_hash = matches[0]
|
||||
if not spec_by_hash.satisfies(initial_spec):
|
||||
raise spack.spec.InvalidHashError(initial_spec, spec_by_hash.dag_hash())
|
||||
initial_spec._dup(spec_by_hash)
|
||||
|
||||
self.has_hash = True
|
||||
else:
|
||||
break
|
||||
|
||||
return initial_spec
|
||||
|
||||
def hash_not_parsed_or_raise(self, spec, addition):
|
||||
if not self.has_hash:
|
||||
return
|
||||
|
||||
raise spack.spec.RedundantSpecError(spec, addition)
|
||||
|
||||
|
||||
class FileParser:
|
||||
"""Parse a single spec from a JSON or YAML file"""
|
||||
|
||||
__slots__ = ("ctx",)
|
||||
|
||||
def __init__(self, ctx):
|
||||
self.ctx = ctx
|
||||
|
||||
def parse(self, initial_spec: spack.spec.Spec) -> spack.spec.Spec:
|
||||
"""Parse a spec tree from a specfile.
|
||||
|
||||
Args:
|
||||
initial_spec: object where to parse the spec
|
||||
|
||||
Return
|
||||
The initial_spec passed as argument, once constructed
|
||||
"""
|
||||
file = pathlib.Path(self.ctx.current_token.value)
|
||||
|
||||
if not file.exists():
|
||||
raise spack.spec.NoSuchSpecFileError(f"No such spec file: '{file}'")
|
||||
|
||||
with file.open("r", encoding="utf-8") as stream:
|
||||
if str(file).endswith(".json"):
|
||||
spec_from_file = spack.spec.Spec.from_json(stream)
|
||||
else:
|
||||
spec_from_file = spack.spec.Spec.from_yaml(stream)
|
||||
initial_spec._dup(spec_from_file)
|
||||
return initial_spec
|
||||
|
||||
|
||||
def parse(text: str) -> List[spack.spec.Spec]:
|
||||
"""Parse text into a list of strings
|
||||
|
||||
Args:
|
||||
text (str): text to be parsed
|
||||
|
||||
Return:
|
||||
List of specs
|
||||
"""
|
||||
return SpecParser(text).all_specs()
|
||||
|
||||
|
||||
def parse_one_or_raise(
|
||||
text: str, initial_spec: Optional[spack.spec.Spec] = None
|
||||
) -> spack.spec.Spec:
|
||||
"""Parse exactly one spec from text and return it, or raise
|
||||
|
||||
Args:
|
||||
text (str): text to be parsed
|
||||
initial_spec: buffer where to parse the spec. If None a new one will be created.
|
||||
"""
|
||||
stripped_text = text.strip()
|
||||
parser = SpecParser(stripped_text)
|
||||
result = parser.next_spec(initial_spec)
|
||||
last_token = parser.ctx.current_token
|
||||
|
||||
if last_token is not None and last_token.end != len(stripped_text):
|
||||
message = "a single spec was requested, but parsed more than one:"
|
||||
message += f"\n{text}"
|
||||
if last_token is not None:
|
||||
underline = f"\n{' ' * last_token.end}{'^' * (len(text) - last_token.end)}"
|
||||
message += color.colorize(f"@*r{{{underline}}}")
|
||||
raise ValueError(message)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
class SpecSyntaxError(Exception):
|
||||
"""Base class for Spec syntax errors"""
|
||||
|
||||
|
||||
class SpecTokenizationError(SpecSyntaxError):
|
||||
"""Syntax error in a spec string"""
|
||||
|
||||
def __init__(self, matches, text):
|
||||
message = "unexpected tokens in the spec string\n"
|
||||
message += f"{text}"
|
||||
|
||||
underline = "\n"
|
||||
for match in matches:
|
||||
if match.lastgroup == str(ErrorTokenType.UNEXPECTED):
|
||||
underline += f"{'^' * (match.end() - match.start())}"
|
||||
continue
|
||||
underline += f"{' ' * (match.end() - match.start())}"
|
||||
|
||||
message += color.colorize(f"@*r{{{underline}}}")
|
||||
super().__init__(message)
|
||||
|
||||
|
||||
class SpecParsingError(SpecSyntaxError):
|
||||
"""Error when parsing tokens"""
|
||||
|
||||
def __init__(self, message, token, text):
|
||||
message += f"\n{text}"
|
||||
underline = f"\n{' '*token.start}{'^'*(token.end - token.start)}"
|
||||
message += color.colorize(f"@*r{{{underline}}}")
|
||||
super().__init__(message)
|
||||
@@ -2,8 +2,6 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
from typing import Optional
|
||||
|
||||
import llnl.util.lang
|
||||
|
||||
import spack.error
|
||||
@@ -39,18 +37,18 @@ class attributes such as priority, front_target, back_target, front_os, back_os.
|
||||
"""
|
||||
|
||||
# Subclass sets number. Controls detection order
|
||||
priority: Optional[int] = None
|
||||
priority = None # type: int
|
||||
|
||||
#: binary formats used on this platform; used by relocation logic
|
||||
binary_formats = ["elf"]
|
||||
|
||||
front_end: Optional[str] = None
|
||||
back_end: Optional[str] = None
|
||||
default: Optional[str] = None # The default back end target.
|
||||
front_end = None # type: str
|
||||
back_end = None # type: str
|
||||
default = None # type: str # The default back end target.
|
||||
|
||||
front_os: Optional[str] = None
|
||||
back_os: Optional[str] = None
|
||||
default_os: Optional[str] = None
|
||||
front_os = None # type: str
|
||||
back_os = None # type: str
|
||||
default_os = None # type: str
|
||||
|
||||
reserved_targets = ["default_target", "frontend", "fe", "backend", "be"]
|
||||
reserved_oss = ["default_os", "frontend", "fe", "backend", "be"]
|
||||
|
||||
@@ -29,9 +29,8 @@ class Test(Platform):
|
||||
back_os = "debian6"
|
||||
default_os = "debian6"
|
||||
|
||||
def __init__(self, name=None):
|
||||
name = name or "test"
|
||||
super(Test, self).__init__(name)
|
||||
def __init__(self):
|
||||
super(Test, self).__init__("test")
|
||||
self.add_target(self.default, spack.target.Target(self.default))
|
||||
self.add_target(self.front_end, spack.target.Target(self.front_end))
|
||||
|
||||
|
||||
@@ -18,6 +18,7 @@
|
||||
from llnl.util.lang import memoized
|
||||
from llnl.util.symlink import symlink
|
||||
|
||||
import spack.bootstrap
|
||||
import spack.paths
|
||||
import spack.platforms
|
||||
import spack.repo
|
||||
@@ -91,8 +92,6 @@ def __init__(self, old, new, full_old_string):
|
||||
@memoized
|
||||
def _patchelf():
|
||||
"""Return the full path to the patchelf binary, if available, else None."""
|
||||
import spack.bootstrap
|
||||
|
||||
if is_macos:
|
||||
return None
|
||||
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
import errno
|
||||
import functools
|
||||
import importlib
|
||||
import importlib.machinery
|
||||
import importlib.machinery # novm
|
||||
import importlib.util
|
||||
import inspect
|
||||
import itertools
|
||||
@@ -24,7 +24,7 @@
|
||||
import traceback
|
||||
import types
|
||||
import uuid
|
||||
from typing import Dict
|
||||
from typing import Dict # novm
|
||||
|
||||
import ruamel.yaml as yaml
|
||||
|
||||
@@ -79,7 +79,7 @@ def namespace_from_fullname(fullname):
|
||||
return namespace
|
||||
|
||||
|
||||
class _PrependFileLoader(importlib.machinery.SourceFileLoader):
|
||||
class _PrependFileLoader(importlib.machinery.SourceFileLoader): # novm
|
||||
def __init__(self, fullname, path, prepend=None):
|
||||
super(_PrependFileLoader, self).__init__(fullname, path)
|
||||
self.prepend = prepend
|
||||
@@ -144,7 +144,7 @@ def find_spec(self, fullname, python_path, target=None):
|
||||
loader = self.compute_loader(fullname)
|
||||
if loader is None:
|
||||
return None
|
||||
return importlib.util.spec_from_loader(fullname, loader)
|
||||
return importlib.util.spec_from_loader(fullname, loader) # novm
|
||||
|
||||
def compute_loader(self, fullname):
|
||||
# namespaces are added to repo, and package modules are leaves.
|
||||
@@ -366,7 +366,7 @@ class FastPackageChecker(collections.abc.Mapping):
|
||||
"""
|
||||
|
||||
#: Global cache, reused by every instance
|
||||
_paths_cache: Dict[str, Dict[str, os.stat_result]] = {}
|
||||
_paths_cache = {} # type: Dict[str, Dict[str, os.stat_result]]
|
||||
|
||||
def __init__(self, packages_path):
|
||||
# The path of the repository managed by this instance
|
||||
@@ -384,7 +384,7 @@ def invalidate(self):
|
||||
self._paths_cache[self.packages_path] = self._create_new_cache()
|
||||
self._packages_to_stats = self._paths_cache[self.packages_path]
|
||||
|
||||
def _create_new_cache(self) -> Dict[str, os.stat_result]:
|
||||
def _create_new_cache(self): # type: () -> Dict[str, os.stat_result]
|
||||
"""Create a new cache for packages in a repo.
|
||||
|
||||
The implementation here should try to minimize filesystem
|
||||
@@ -394,7 +394,7 @@ def _create_new_cache(self) -> Dict[str, os.stat_result]:
|
||||
"""
|
||||
# Create a dictionary that will store the mapping between a
|
||||
# package name and its stat info
|
||||
cache: Dict[str, os.stat_result] = {}
|
||||
cache = {} # type: Dict[str, os.stat_result]
|
||||
for pkg_name in os.listdir(self.packages_path):
|
||||
# Skip non-directories in the package root.
|
||||
pkg_dir = os.path.join(self.packages_path, pkg_name)
|
||||
@@ -754,14 +754,6 @@ def _all_package_names(self, include_virtuals):
|
||||
def all_package_names(self, include_virtuals=False):
|
||||
return self._all_package_names(include_virtuals)
|
||||
|
||||
def package_path(self, name):
|
||||
"""Get path to package.py file for this repo."""
|
||||
return self.repo_for_pkg(name).package_path(name)
|
||||
|
||||
def all_package_paths(self):
|
||||
for name in self.all_package_names():
|
||||
yield self.package_path(name)
|
||||
|
||||
def packages_with_tags(self, *tags):
|
||||
r = set()
|
||||
for repo in self.repos:
|
||||
@@ -1161,14 +1153,6 @@ def all_package_names(self, include_virtuals=False):
|
||||
return names
|
||||
return [x for x in names if not self.is_virtual(x)]
|
||||
|
||||
def package_path(self, name):
|
||||
"""Get path to package.py file for this repo."""
|
||||
return os.path.join(self.root, packages_dir_name, name, package_file_name)
|
||||
|
||||
def all_package_paths(self):
|
||||
for name in self.all_package_names():
|
||||
yield self.package_path(name)
|
||||
|
||||
def packages_with_tags(self, *tags):
|
||||
v = set(self.all_package_names())
|
||||
index = self.tag_index
|
||||
|
||||
@@ -4,12 +4,12 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import urllib.error
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
import urllib.response
|
||||
from io import BufferedReader, IOBase
|
||||
|
||||
import spack.util.s3 as s3_util
|
||||
import spack.util.url as url_util
|
||||
|
||||
|
||||
# NOTE(opadron): Workaround issue in boto where its StreamingBody
|
||||
@@ -43,8 +43,8 @@ def __getattr__(self, key):
|
||||
|
||||
|
||||
def _s3_open(url):
|
||||
parsed = urllib.parse.urlparse(url)
|
||||
s3 = s3_util.get_s3_session(url, method="fetch")
|
||||
parsed = url_util.parse(url)
|
||||
s3 = s3_util.create_s3_session(parsed, connection=s3_util.get_mirror_connection(parsed))
|
||||
|
||||
bucket = parsed.netloc
|
||||
key = parsed.path
|
||||
@@ -70,6 +70,20 @@ def s3_open(self, req):
|
||||
url, headers, stream = _s3_open(orig_url)
|
||||
return urllib.response.addinfourl(stream, headers, url)
|
||||
except ClientError as err:
|
||||
# if no such [KEY], but [KEY]/index.html exists,
|
||||
# return that, instead.
|
||||
if err.response["Error"]["Code"] == "NoSuchKey":
|
||||
try:
|
||||
_, headers, stream = _s3_open(url_util.join(orig_url, "index.html"))
|
||||
return urllib.response.addinfourl(stream, headers, orig_url)
|
||||
|
||||
except ClientError as err2:
|
||||
if err.response["Error"]["Code"] == "NoSuchKey":
|
||||
# raise original error
|
||||
raise urllib.error.URLError(err) from err
|
||||
|
||||
raise urllib.error.URLError(err2) from err2
|
||||
|
||||
raise urllib.error.URLError(err) from err
|
||||
|
||||
|
||||
|
||||
@@ -8,14 +8,14 @@
|
||||
import llnl.util.lang
|
||||
import llnl.util.tty
|
||||
|
||||
import spack.spec
|
||||
|
||||
|
||||
# jsonschema is imported lazily as it is heavy to import
|
||||
# and increases the start-up time
|
||||
def _make_validator():
|
||||
import jsonschema
|
||||
|
||||
import spack.parser
|
||||
|
||||
def _validate_spec(validator, is_spec, instance, schema):
|
||||
"""Check if the attributes on instance are valid specs."""
|
||||
import jsonschema
|
||||
@@ -25,9 +25,11 @@ def _validate_spec(validator, is_spec, instance, schema):
|
||||
|
||||
for spec_str in instance:
|
||||
try:
|
||||
spack.parser.parse(spec_str)
|
||||
except spack.parser.SpecSyntaxError as e:
|
||||
yield jsonschema.ValidationError(str(e))
|
||||
spack.spec.parse(spec_str)
|
||||
except spack.spec.SpecParseError as e:
|
||||
yield jsonschema.ValidationError(
|
||||
'"{0}" is an invalid spec [{1}]'.format(spec_str, str(e))
|
||||
)
|
||||
|
||||
def _deprecated_properties(validator, deprecated, instance, schema):
|
||||
if not (validator.is_type(instance, "object") or validator.is_type(instance, "array")):
|
||||
|
||||
@@ -30,6 +30,7 @@
|
||||
|
||||
import spack
|
||||
import spack.binary_distribution
|
||||
import spack.bootstrap
|
||||
import spack.cmd
|
||||
import spack.compilers
|
||||
import spack.config
|
||||
@@ -496,12 +497,10 @@ def _compute_specs_from_answer_set(self):
|
||||
best = min(self.answers)
|
||||
opt, _, answer = best
|
||||
for input_spec in self.abstract_specs:
|
||||
key = (input_spec.name, "0")
|
||||
key = input_spec.name
|
||||
if input_spec.virtual:
|
||||
providers = [
|
||||
spec.name for spec in answer.values() if spec.package.provides(input_spec.name)
|
||||
]
|
||||
key = (providers[0], "0")
|
||||
providers = [spec.name for spec in answer.values() if spec.package.provides(key)]
|
||||
key = providers[0]
|
||||
candidate = answer.get(key)
|
||||
|
||||
if candidate and candidate.satisfies(input_spec):
|
||||
@@ -542,10 +541,8 @@ def bootstrap_clingo():
|
||||
global clingo, ASTType, parse_files
|
||||
|
||||
if not clingo:
|
||||
import spack.bootstrap
|
||||
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
spack.bootstrap.ensure_core_dependencies()
|
||||
spack.bootstrap.ensure_clingo_importable_or_raise()
|
||||
import clingo
|
||||
|
||||
from clingo.ast import ASTType
|
||||
@@ -1564,9 +1561,7 @@ class Body(object):
|
||||
for dtype in dspec.deptypes:
|
||||
# skip build dependencies of already-installed specs
|
||||
if concrete_build_deps or dtype != "build":
|
||||
clauses.append(
|
||||
fn.attr("depends_on_unknown", spec.name, dep.name, dtype)
|
||||
)
|
||||
clauses.append(fn.attr("depends_on", spec.name, dep.name, dtype))
|
||||
|
||||
# Ensure Spack will not coconcretize this with another provider
|
||||
# for the same virtual
|
||||
@@ -2175,102 +2170,89 @@ def __init__(self, specs, hash_lookup=None):
|
||||
# from this dictionary during reconstruction
|
||||
self._hash_lookup = hash_lookup or {}
|
||||
|
||||
def hash(self, pkg, psid, h):
|
||||
key = (pkg, psid)
|
||||
if key not in self._specs:
|
||||
self._specs[key] = self._hash_lookup[h]
|
||||
def hash(self, pkg, h):
|
||||
if pkg not in self._specs:
|
||||
self._specs[pkg] = self._hash_lookup[h]
|
||||
|
||||
def node(self, pkg, psid):
|
||||
key = (pkg, psid)
|
||||
if key not in self._specs:
|
||||
self._specs[key] = spack.spec.Spec(pkg)
|
||||
def node(self, pkg):
|
||||
if pkg not in self._specs:
|
||||
self._specs[pkg] = spack.spec.Spec(pkg)
|
||||
|
||||
def _arch(self, pkg, psid):
|
||||
key = (pkg, psid)
|
||||
arch = self._specs[key].architecture
|
||||
def _arch(self, pkg):
|
||||
arch = self._specs[pkg].architecture
|
||||
if not arch:
|
||||
arch = spack.spec.ArchSpec()
|
||||
self._specs[key].architecture = arch
|
||||
self._specs[pkg].architecture = arch
|
||||
return arch
|
||||
|
||||
def node_platform(self, pkg, psid, platform):
|
||||
self._arch(pkg, psid).platform = platform
|
||||
def node_platform(self, pkg, platform):
|
||||
self._arch(pkg).platform = platform
|
||||
|
||||
def node_os(self, pkg, psid, os):
|
||||
self._arch(pkg, psid).os = os
|
||||
def node_os(self, pkg, os):
|
||||
self._arch(pkg).os = os
|
||||
|
||||
def node_target(self, pkg, psid, target):
|
||||
self._arch(pkg, psid).target = target
|
||||
def node_target(self, pkg, target):
|
||||
self._arch(pkg).target = target
|
||||
|
||||
def variant_value(self, pkg, psid, name, value):
|
||||
def variant_value(self, pkg, name, value):
|
||||
# FIXME: is there a way not to special case 'dev_path' everywhere?
|
||||
key = (pkg, psid)
|
||||
if name == "dev_path":
|
||||
self._specs[key].variants.setdefault(
|
||||
self._specs[pkg].variants.setdefault(
|
||||
name, spack.variant.SingleValuedVariant(name, value)
|
||||
)
|
||||
return
|
||||
|
||||
if name == "patches":
|
||||
self._specs[key].variants.setdefault(
|
||||
self._specs[pkg].variants.setdefault(
|
||||
name, spack.variant.MultiValuedVariant(name, value)
|
||||
)
|
||||
return
|
||||
|
||||
self._specs[key].update_variant_validate(name, value)
|
||||
self._specs[pkg].update_variant_validate(name, value)
|
||||
|
||||
def version(self, pkg, psid, version):
|
||||
key = (pkg, psid)
|
||||
self._specs[key].versions = spack.version.ver([version])
|
||||
def version(self, pkg, version):
|
||||
self._specs[pkg].versions = spack.version.ver([version])
|
||||
|
||||
def node_compiler(self, pkg, psid, compiler):
|
||||
key = (pkg, psid)
|
||||
self._specs[key].compiler = spack.spec.CompilerSpec(compiler)
|
||||
def node_compiler(self, pkg, compiler):
|
||||
self._specs[pkg].compiler = spack.spec.CompilerSpec(compiler)
|
||||
|
||||
def node_compiler_version(self, pkg, psid, compiler, version):
|
||||
key = (pkg, psid)
|
||||
self._specs[key].compiler.versions = spack.version.VersionList([version])
|
||||
def node_compiler_version(self, pkg, compiler, version):
|
||||
self._specs[pkg].compiler.versions = spack.version.VersionList([version])
|
||||
|
||||
def node_flag_compiler_default(self, pkg, psid):
|
||||
key = (pkg, psid)
|
||||
self._flag_compiler_defaults.add(key)
|
||||
def node_flag_compiler_default(self, pkg):
|
||||
self._flag_compiler_defaults.add(pkg)
|
||||
|
||||
def node_flag(self, pkg, psid, flag_type, flag):
|
||||
key = (pkg, psid)
|
||||
self._specs[key].compiler_flags.add_flag(flag_type, flag, False)
|
||||
def node_flag(self, pkg, flag_type, flag):
|
||||
self._specs[pkg].compiler_flags.add_flag(flag_type, flag, False)
|
||||
|
||||
def node_flag_source(self, pkg, psid, flag_type, source):
|
||||
self._flag_sources[(pkg, psid, flag_type)].add(source)
|
||||
def node_flag_source(self, pkg, flag_type, source):
|
||||
self._flag_sources[(pkg, flag_type)].add(source)
|
||||
|
||||
def no_flags(self, pkg, psid, flag_type):
|
||||
key = (pkg, psid)
|
||||
self._specs[key].compiler_flags[flag_type] = []
|
||||
def no_flags(self, pkg, flag_type):
|
||||
self._specs[pkg].compiler_flags[flag_type] = []
|
||||
|
||||
def external_spec_selected(self, pkg, psid, idx):
|
||||
def external_spec_selected(self, pkg, idx):
|
||||
"""This means that the external spec and index idx
|
||||
has been selected for this package.
|
||||
"""
|
||||
packages_yaml = spack.config.get("packages")
|
||||
packages_yaml = _normalize_packages_yaml(packages_yaml)
|
||||
spec_info = packages_yaml[pkg]["externals"][int(idx)]
|
||||
key = (pkg, psid)
|
||||
self._specs[key].external_path = spec_info.get("prefix", None)
|
||||
self._specs[key].external_modules = spack.spec.Spec._format_module_list(
|
||||
self._specs[pkg].external_path = spec_info.get("prefix", None)
|
||||
self._specs[pkg].external_modules = spack.spec.Spec._format_module_list(
|
||||
spec_info.get("modules", None)
|
||||
)
|
||||
self._specs[key].extra_attributes = spec_info.get("extra_attributes", {})
|
||||
self._specs[pkg].extra_attributes = spec_info.get("extra_attributes", {})
|
||||
|
||||
def depends_on(self, pkg, psid1, dep, psid2, type):
|
||||
pkg_key = (pkg, psid1)
|
||||
dep_key = (dep, psid2)
|
||||
dependencies = self._specs[pkg_key].edges_to_dependencies(name=dep)
|
||||
def depends_on(self, pkg, dep, type):
|
||||
dependencies = self._specs[pkg].edges_to_dependencies(name=dep)
|
||||
|
||||
# TODO: assertion to be removed when cross-compilation is handled correctly
|
||||
msg = "Current solver does not handle multiple dependency edges of the same name"
|
||||
assert len(dependencies) < 2, msg
|
||||
|
||||
if not dependencies:
|
||||
self._specs[pkg_key].add_dependency_edge(self._specs[dep_key], (type,))
|
||||
self._specs[pkg].add_dependency_edge(self._specs[dep], (type,))
|
||||
else:
|
||||
# TODO: This assumes that each solve unifies dependencies
|
||||
dependencies[0].add_type(type)
|
||||
@@ -2286,44 +2268,50 @@ def reorder_flags(self):
|
||||
The solver determines wihch flags are on nodes; this routine
|
||||
imposes order afterwards.
|
||||
"""
|
||||
# nodes with no flags get flag order from compiler
|
||||
compilers = dict((c.spec, c) for c in all_compilers_in_config())
|
||||
for pkg in self._flag_compiler_defaults:
|
||||
spec = self._specs[pkg]
|
||||
compiler_flags = compilers[spec.compiler].flags
|
||||
for key in spec.compiler_flags:
|
||||
spec_compiler_flags_set = set(spec.compiler_flags.get(key, []))
|
||||
compiler_flags_set = set(compiler_flags.get(key, []))
|
||||
|
||||
assert spec_compiler_flags_set == compiler_flags_set, "%s does not equal %s" % (
|
||||
spec_compiler_flags_set,
|
||||
compiler_flags_set,
|
||||
)
|
||||
|
||||
spec.compiler_flags[key] = compiler_flags.get(key, [])
|
||||
# index of all specs (and deps) from the command line by name
|
||||
cmd_specs = dict((s.name, s) for spec in self._command_line_specs for s in spec.traverse())
|
||||
|
||||
for key, spec in self._specs.items():
|
||||
name, psid = key
|
||||
# if bootstrapping, compiler is not in config and has no flags
|
||||
flagmap_from_compiler = {}
|
||||
if spec.compiler in compilers:
|
||||
flagmap_from_compiler = compilers[spec.compiler].flags
|
||||
# iterate through specs with specified flags
|
||||
for key, sources in self._flag_sources.items():
|
||||
pkg, flag_type = key
|
||||
spec = self._specs[pkg]
|
||||
compiler_flags = spec.compiler_flags.get(flag_type, [])
|
||||
|
||||
for flag_type in spec.compiler_flags.valid_compiler_flags():
|
||||
from_compiler = flagmap_from_compiler.get(flag_type, [])
|
||||
from_sources = []
|
||||
# order is determined by the DAG. A spec's flags come after
|
||||
# any from its ancestors on the compile line.
|
||||
order = [s.name for s in spec.traverse(order="post", direction="parents")]
|
||||
|
||||
# order is determined by the DAG. A spec's flags come after any of its ancestors
|
||||
# on the compile line
|
||||
source_key = (spec.name, psid, flag_type)
|
||||
if source_key in self._flag_sources:
|
||||
order = [s.name for s in spec.traverse(order="post", direction="parents")]
|
||||
sorted_sources = sorted(
|
||||
self._flag_sources[source_key], key=lambda s: order.index(s)
|
||||
)
|
||||
# sort the sources in our DAG order
|
||||
sorted_sources = sorted(sources, key=lambda s: order.index(s))
|
||||
|
||||
# add flags from each source, lowest to highest precedence
|
||||
for source_name in sorted_sources:
|
||||
source = cmd_specs[source_name]
|
||||
extend_flag_list(from_sources, source.compiler_flags.get(flag_type, []))
|
||||
# add flags from each source, lowest to highest precedence
|
||||
flags = []
|
||||
for source_name in sorted_sources:
|
||||
source = cmd_specs[source_name]
|
||||
extend_flag_list(flags, source.compiler_flags.get(flag_type, []))
|
||||
|
||||
# compiler flags from compilers config are lowest precedence
|
||||
ordered_compiler_flags = from_compiler + from_sources
|
||||
compiler_flags = spec.compiler_flags.get(flag_type, [])
|
||||
assert set(compiler_flags) == set(flags), "%s does not equal %s" % (
|
||||
set(compiler_flags),
|
||||
set(flags),
|
||||
)
|
||||
spec.compiler_flags.update({flag_type: source.compiler_flags[flag_type]})
|
||||
|
||||
msg = "%s does not equal %s" % (set(compiler_flags), set(ordered_compiler_flags))
|
||||
assert set(compiler_flags) == set(ordered_compiler_flags), msg
|
||||
|
||||
spec.compiler_flags.update({flag_type: ordered_compiler_flags})
|
||||
|
||||
def deprecated(self, pkg, psid, version):
|
||||
def deprecated(self, pkg, version):
|
||||
msg = 'using "{0}@{1}" which is a deprecated version'
|
||||
tty.warn(msg.format(pkg, version))
|
||||
|
||||
@@ -2371,14 +2359,12 @@ def build_specs(self, function_tuples):
|
||||
# predicates on virtual packages.
|
||||
if name != "error":
|
||||
pkg = args[0]
|
||||
psid = args[1]
|
||||
if spack.repo.path.is_virtual(pkg):
|
||||
continue
|
||||
|
||||
# if we've already gotten a concrete spec for this pkg,
|
||||
# do not bother calling actions on it.
|
||||
key = (pkg, psid)
|
||||
spec = self._specs.get(key)
|
||||
spec = self._specs.get(pkg)
|
||||
if spec and spec.concrete:
|
||||
continue
|
||||
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -10,10 +10,9 @@
|
||||
%==============================================================================
|
||||
|
||||
% Spec attributes
|
||||
#show attr/2.
|
||||
#show attr/3.
|
||||
#show attr/4.
|
||||
#show attr/5.
|
||||
#show attr/6.
|
||||
|
||||
% names of optimization criteria
|
||||
#show opt_criterion/2.
|
||||
|
||||
@@ -47,6 +47,37 @@
|
||||
|
||||
6. The architecture to build with. This is needed on machines where
|
||||
cross-compilation is required
|
||||
|
||||
Here is the EBNF grammar for a spec::
|
||||
|
||||
spec-list = { spec [ dep-list ] }
|
||||
dep_list = { ^ spec }
|
||||
spec = id [ options ]
|
||||
options = { @version-list | ++variant | +variant |
|
||||
--variant | -variant | ~~variant | ~variant |
|
||||
variant=value | variant==value | %compiler |
|
||||
arch=architecture | [ flag ]==value | [ flag ]=value}
|
||||
flag = { cflags | cxxflags | fcflags | fflags | cppflags |
|
||||
ldflags | ldlibs }
|
||||
variant = id
|
||||
architecture = id
|
||||
compiler = id [ version-list ]
|
||||
version-list = version [ { , version } ]
|
||||
version = id | id: | :id | id:id
|
||||
id = [A-Za-z0-9_][A-Za-z0-9_.-]*
|
||||
|
||||
Identifiers using the <name>=<value> command, such as architectures and
|
||||
compiler flags, require a space before the name.
|
||||
|
||||
There is one context-sensitive part: ids in versions may contain '.', while
|
||||
other ids may not.
|
||||
|
||||
There is one ambiguity: since '-' is allowed in an id, you need to put
|
||||
whitespace space before -variant for it to be tokenized properly. You can
|
||||
either use whitespace, or you can just use ~variant since it means the same
|
||||
thing. Spack uses ~variant in directory names and in the canonical form of
|
||||
specs to avoid ambiguity. Both are provided because ~ can cause shell
|
||||
expansion when it is the first character in an id typed on the command line.
|
||||
"""
|
||||
import collections
|
||||
import collections.abc
|
||||
@@ -70,6 +101,7 @@
|
||||
import spack.dependency as dp
|
||||
import spack.error
|
||||
import spack.hash_types as ht
|
||||
import spack.parse
|
||||
import spack.paths
|
||||
import spack.platforms
|
||||
import spack.provider_index
|
||||
@@ -93,6 +125,8 @@
|
||||
__all__ = [
|
||||
"CompilerSpec",
|
||||
"Spec",
|
||||
"SpecParser",
|
||||
"parse",
|
||||
"SpecParseError",
|
||||
"ArchitecturePropagationError",
|
||||
"DuplicateDependencyError",
|
||||
@@ -550,9 +584,9 @@ def __init__(self, *args):
|
||||
# If there is one argument, it's either another CompilerSpec
|
||||
# to copy or a string to parse
|
||||
if isinstance(arg, str):
|
||||
spec = spack.parser.parse_one_or_raise(f"%{arg}")
|
||||
self.name = spec.compiler.name
|
||||
self.versions = spec.compiler.versions
|
||||
c = SpecParser().parse_compiler(arg)
|
||||
self.name = c.name
|
||||
self.versions = c.versions
|
||||
|
||||
elif isinstance(arg, CompilerSpec):
|
||||
self.name = arg.name
|
||||
@@ -568,8 +602,7 @@ def __init__(self, *args):
|
||||
name, version = args
|
||||
self.name = name
|
||||
self.versions = vn.VersionList()
|
||||
versions = vn.ver(version)
|
||||
self.versions.add(versions)
|
||||
self.versions.add(vn.ver(version))
|
||||
|
||||
else:
|
||||
raise TypeError("__init__ takes 1 or 2 arguments. (%d given)" % nargs)
|
||||
@@ -1252,7 +1285,6 @@ def __init__(
|
||||
self.external_path = external_path
|
||||
self.external_module = external_module
|
||||
"""
|
||||
import spack.parser
|
||||
|
||||
# Copy if spec_like is a Spec.
|
||||
if isinstance(spec_like, Spec):
|
||||
@@ -1289,7 +1321,7 @@ def __init__(
|
||||
# have package.py files for.
|
||||
self._normal = normal
|
||||
self._concrete = concrete
|
||||
self._external_path = external_path
|
||||
self.external_path = external_path
|
||||
self.external_modules = Spec._format_module_list(external_modules)
|
||||
|
||||
# This attribute is used to store custom information for
|
||||
@@ -1303,7 +1335,11 @@ def __init__(
|
||||
self._build_spec = None
|
||||
|
||||
if isinstance(spec_like, str):
|
||||
spack.parser.parse_one_or_raise(spec_like, self)
|
||||
spec_list = SpecParser(self).parse(spec_like)
|
||||
if len(spec_list) > 1:
|
||||
raise ValueError("More than one spec in string: " + spec_like)
|
||||
if len(spec_list) < 1:
|
||||
raise ValueError("String contains no specs: " + spec_like)
|
||||
|
||||
elif spec_like is not None:
|
||||
raise TypeError("Can't make spec out of %s" % type(spec_like))
|
||||
@@ -1326,14 +1362,6 @@ def _format_module_list(modules):
|
||||
modules = list(modules)
|
||||
return modules
|
||||
|
||||
@property
|
||||
def external_path(self):
|
||||
return pth.path_to_os_path(self._external_path)[0]
|
||||
|
||||
@external_path.setter
|
||||
def external_path(self, ext_path):
|
||||
self._external_path = ext_path
|
||||
|
||||
@property
|
||||
def external(self):
|
||||
return bool(self.external_path) or bool(self.external_modules)
|
||||
@@ -2933,10 +2961,9 @@ def _new_concretize(self, tests=False):
|
||||
providers = [spec.name for spec in answer.values() if spec.package.provides(name)]
|
||||
name = providers[0]
|
||||
|
||||
key = (name, "0")
|
||||
assert key in answer
|
||||
assert name in answer
|
||||
|
||||
concretized = answer[key]
|
||||
concretized = answer[name]
|
||||
self._dup(concretized)
|
||||
|
||||
def concretize(self, tests=False):
|
||||
@@ -4947,6 +4974,421 @@ def __missing__(self, key):
|
||||
spec_id_re = r"\w[\w.-]*"
|
||||
|
||||
|
||||
class SpecLexer(spack.parse.Lexer):
|
||||
|
||||
"""Parses tokens that make up spack specs."""
|
||||
|
||||
def __init__(self):
|
||||
# Spec strings require posix-style paths on Windows
|
||||
# because the result is later passed to shlex
|
||||
filename_reg = (
|
||||
r"[/\w.-]*/[/\w/-]+\.(yaml|json)[^\b]*"
|
||||
if not is_windows
|
||||
else r"([A-Za-z]:)*?[/\w.-]*/[/\w/-]+\.(yaml|json)[^\b]*"
|
||||
)
|
||||
super(SpecLexer, self).__init__(
|
||||
[
|
||||
(
|
||||
r"\@([\w.\-]*\s*)*(\s*\=\s*\w[\w.\-]*)?",
|
||||
lambda scanner, val: self.token(VER, val),
|
||||
),
|
||||
(r"\:", lambda scanner, val: self.token(COLON, val)),
|
||||
(r"\,", lambda scanner, val: self.token(COMMA, val)),
|
||||
(r"\^", lambda scanner, val: self.token(DEP, val)),
|
||||
(r"\+\+", lambda scanner, val: self.token(D_ON, val)),
|
||||
(r"\+", lambda scanner, val: self.token(ON, val)),
|
||||
(r"\-\-", lambda scanner, val: self.token(D_OFF, val)),
|
||||
(r"\-", lambda scanner, val: self.token(OFF, val)),
|
||||
(r"\~\~", lambda scanner, val: self.token(D_OFF, val)),
|
||||
(r"\~", lambda scanner, val: self.token(OFF, val)),
|
||||
(r"\%", lambda scanner, val: self.token(PCT, val)),
|
||||
(r"\=\=", lambda scanner, val: self.token(D_EQ, val)),
|
||||
(r"\=", lambda scanner, val: self.token(EQ, val)),
|
||||
# Filenames match before identifiers, so no initial filename
|
||||
# component is parsed as a spec (e.g., in subdir/spec.yaml/json)
|
||||
(filename_reg, lambda scanner, v: self.token(FILE, v)),
|
||||
# Hash match after filename. No valid filename can be a hash
|
||||
# (files end w/.yaml), but a hash can match a filename prefix.
|
||||
(r"/", lambda scanner, val: self.token(HASH, val)),
|
||||
# Identifiers match after filenames and hashes.
|
||||
(spec_id_re, lambda scanner, val: self.token(ID, val)),
|
||||
(r"\s+", lambda scanner, val: None),
|
||||
],
|
||||
[D_EQ, EQ],
|
||||
[
|
||||
(r"[\S].*", lambda scanner, val: self.token(VAL, val)),
|
||||
(r"\s+", lambda scanner, val: None),
|
||||
],
|
||||
[VAL],
|
||||
)
|
||||
|
||||
|
||||
# Lexer is always the same for every parser.
|
||||
_lexer = SpecLexer()
|
||||
|
||||
|
||||
class SpecParser(spack.parse.Parser):
|
||||
"""Parses specs."""
|
||||
|
||||
__slots__ = "previous", "_initial"
|
||||
|
||||
def __init__(self, initial_spec=None):
|
||||
"""Construct a new SpecParser.
|
||||
|
||||
Args:
|
||||
initial_spec (Spec, optional): provide a Spec that we'll parse
|
||||
directly into. This is used to avoid construction of a
|
||||
superfluous Spec object in the Spec constructor.
|
||||
"""
|
||||
super(SpecParser, self).__init__(_lexer)
|
||||
self.previous = None
|
||||
self._initial = initial_spec
|
||||
|
||||
def do_parse(self):
|
||||
specs = []
|
||||
|
||||
try:
|
||||
while self.next:
|
||||
# Try a file first, but if it doesn't succeed, keep parsing
|
||||
# as from_file may backtrack and try an id.
|
||||
if self.accept(FILE):
|
||||
spec = self.spec_from_file()
|
||||
if spec:
|
||||
specs.append(spec)
|
||||
continue
|
||||
|
||||
if self.accept(ID):
|
||||
self.previous = self.token
|
||||
if self.accept(EQ) or self.accept(D_EQ):
|
||||
# We're parsing an anonymous spec beginning with a
|
||||
# key-value pair.
|
||||
if not specs:
|
||||
self.push_tokens([self.previous, self.token])
|
||||
self.previous = None
|
||||
specs.append(self.spec(None))
|
||||
else:
|
||||
if specs[-1].concrete:
|
||||
# Trying to add k-v pair to spec from hash
|
||||
raise RedundantSpecError(specs[-1], "key-value pair")
|
||||
# We should never end up here.
|
||||
# This requires starting a new spec with ID, EQ
|
||||
# After another spec that is not concrete
|
||||
# If the previous spec is not concrete, this is
|
||||
# handled in the spec parsing loop
|
||||
# If it is concrete, see the if statement above
|
||||
# If there is no previous spec, we don't land in
|
||||
# this else case.
|
||||
self.unexpected_token()
|
||||
else:
|
||||
# We're parsing a new spec by name
|
||||
self.previous = None
|
||||
specs.append(self.spec(self.token.value))
|
||||
elif self.accept(HASH):
|
||||
# We're finding a spec by hash
|
||||
specs.append(self.spec_by_hash())
|
||||
|
||||
elif self.accept(DEP):
|
||||
if not specs:
|
||||
# We're parsing an anonymous spec beginning with a
|
||||
# dependency. Push the token to recover after creating
|
||||
# anonymous spec
|
||||
self.push_tokens([self.token])
|
||||
specs.append(self.spec(None))
|
||||
else:
|
||||
dep = None
|
||||
if self.accept(FILE):
|
||||
# this may return None, in which case we backtrack
|
||||
dep = self.spec_from_file()
|
||||
|
||||
if not dep and self.accept(HASH):
|
||||
# We're finding a dependency by hash for an
|
||||
# anonymous spec
|
||||
dep = self.spec_by_hash()
|
||||
dep = dep.copy(deps=("link", "run"))
|
||||
|
||||
if not dep:
|
||||
# We're adding a dependency to the last spec
|
||||
if self.accept(ID):
|
||||
self.previous = self.token
|
||||
if self.accept(EQ):
|
||||
# This is an anonymous dep with a key=value
|
||||
# push tokens to be parsed as part of the
|
||||
# dep spec
|
||||
self.push_tokens([self.previous, self.token])
|
||||
dep_name = None
|
||||
else:
|
||||
# named dep (standard)
|
||||
dep_name = self.token.value
|
||||
self.previous = None
|
||||
else:
|
||||
# anonymous dep
|
||||
dep_name = None
|
||||
dep = self.spec(dep_name)
|
||||
|
||||
# Raise an error if the previous spec is already
|
||||
# concrete (assigned by hash)
|
||||
if specs[-1].concrete:
|
||||
raise RedundantSpecError(specs[-1], "dependency")
|
||||
# command line deps get empty deptypes now.
|
||||
# Real deptypes are assigned later per packages.
|
||||
specs[-1]._add_dependency(dep, ())
|
||||
|
||||
else:
|
||||
# If the next token can be part of a valid anonymous spec,
|
||||
# create the anonymous spec
|
||||
if self.next.type in (VER, ON, D_ON, OFF, D_OFF, PCT):
|
||||
# Raise an error if the previous spec is already
|
||||
# concrete (assigned by hash)
|
||||
if specs and specs[-1]._hash:
|
||||
raise RedundantSpecError(specs[-1], "compiler, version, " "or variant")
|
||||
specs.append(self.spec(None))
|
||||
else:
|
||||
self.unexpected_token()
|
||||
|
||||
except spack.parse.ParseError as e:
|
||||
raise SpecParseError(e) from e
|
||||
|
||||
# Generate lookups for git-commit-based versions
|
||||
for spec in specs:
|
||||
# Cannot do lookups for versions in anonymous specs
|
||||
# Only allow Version objects to use git for now
|
||||
# Note: VersionRange(x, x) is currently concrete, hence isinstance(...).
|
||||
if spec.name and spec.versions.concrete and isinstance(spec.version, vn.GitVersion):
|
||||
spec.version.generate_git_lookup(spec.fullname)
|
||||
|
||||
return specs
|
||||
|
||||
def spec_from_file(self):
|
||||
"""Read a spec from a filename parsed on the input stream.
|
||||
|
||||
There is some care taken here to ensure that filenames are a last
|
||||
resort, and that any valid package name is parsed as a name
|
||||
before we consider it as a file. Specs are used in lots of places;
|
||||
we don't want the parser touching the filesystem unnecessarily.
|
||||
|
||||
The parse logic is as follows:
|
||||
|
||||
1. We require that filenames end in .yaml, which means that no valid
|
||||
filename can be interpreted as a hash (hashes can't have '.')
|
||||
|
||||
2. We avoid treating paths like /path/to/spec.json as hashes, or paths
|
||||
like subdir/spec.json as ids by lexing filenames before hashes.
|
||||
|
||||
3. For spec names that match file and id regexes, like 'builtin.yaml',
|
||||
we backtrack from spec_from_file() and treat them as spec names.
|
||||
|
||||
"""
|
||||
path = self.token.value
|
||||
|
||||
# Special case where someone omits a space after a filename. Consider:
|
||||
#
|
||||
# libdwarf^/some/path/to/libelf.yamllibdwarf ^../../libelf.yaml
|
||||
#
|
||||
# The error is clearly an omitted space. To handle this, the FILE
|
||||
# regex admits text *beyond* .yaml, and we raise a nice error for
|
||||
# file names that don't end in .yaml.
|
||||
if not (path.endswith(".yaml") or path.endswith(".json")):
|
||||
raise SpecFilenameError("Spec filename must end in .yaml or .json: '{0}'".format(path))
|
||||
|
||||
if not os.path.exists(path):
|
||||
raise NoSuchSpecFileError("No such spec file: '{0}'".format(path))
|
||||
|
||||
with open(path) as f:
|
||||
if path.endswith(".json"):
|
||||
return Spec.from_json(f)
|
||||
return Spec.from_yaml(f)
|
||||
|
||||
def parse_compiler(self, text):
|
||||
self.setup(text)
|
||||
return self.compiler()
|
||||
|
||||
def spec_by_hash(self):
|
||||
# TODO: Remove parser dependency on active environment and database.
|
||||
import spack.environment
|
||||
|
||||
self.expect(ID)
|
||||
dag_hash = self.token.value
|
||||
matches = []
|
||||
if spack.environment.active_environment():
|
||||
matches = spack.environment.active_environment().get_by_hash(dag_hash)
|
||||
if not matches:
|
||||
matches = spack.store.db.get_by_hash(dag_hash)
|
||||
if not matches:
|
||||
raise NoSuchHashError(dag_hash)
|
||||
|
||||
if len(matches) != 1:
|
||||
raise AmbiguousHashError(
|
||||
"Multiple packages specify hash beginning '%s'." % dag_hash, *matches
|
||||
)
|
||||
|
||||
return matches[0]
|
||||
|
||||
def spec(self, name):
|
||||
"""Parse a spec out of the input. If a spec is supplied, initialize
|
||||
and return it instead of creating a new one."""
|
||||
spec_namespace = None
|
||||
spec_name = None
|
||||
if name:
|
||||
spec_namespace, dot, spec_name = name.rpartition(".")
|
||||
if not spec_namespace:
|
||||
spec_namespace = None
|
||||
self.check_identifier(spec_name)
|
||||
|
||||
if self._initial is None:
|
||||
spec = Spec()
|
||||
else:
|
||||
# this is used by Spec.__init__
|
||||
spec = self._initial
|
||||
self._initial = None
|
||||
|
||||
spec.namespace = spec_namespace
|
||||
spec.name = spec_name
|
||||
|
||||
while self.next:
|
||||
if self.accept(VER):
|
||||
vlist = self.version_list()
|
||||
spec._add_versions(vlist)
|
||||
|
||||
elif self.accept(D_ON):
|
||||
name = self.variant()
|
||||
spec.variants[name] = vt.BoolValuedVariant(name, True, propagate=True)
|
||||
|
||||
elif self.accept(ON):
|
||||
name = self.variant()
|
||||
spec.variants[name] = vt.BoolValuedVariant(name, True, propagate=False)
|
||||
|
||||
elif self.accept(D_OFF):
|
||||
name = self.variant()
|
||||
spec.variants[name] = vt.BoolValuedVariant(name, False, propagate=True)
|
||||
|
||||
elif self.accept(OFF):
|
||||
name = self.variant()
|
||||
spec.variants[name] = vt.BoolValuedVariant(name, False, propagate=False)
|
||||
|
||||
elif self.accept(PCT):
|
||||
spec._set_compiler(self.compiler())
|
||||
|
||||
elif self.accept(ID):
|
||||
self.previous = self.token
|
||||
if self.accept(D_EQ):
|
||||
# We're adding a key-value pair to the spec
|
||||
self.expect(VAL)
|
||||
spec._add_flag(self.previous.value, self.token.value, propagate=True)
|
||||
self.previous = None
|
||||
elif self.accept(EQ):
|
||||
# We're adding a key-value pair to the spec
|
||||
self.expect(VAL)
|
||||
spec._add_flag(self.previous.value, self.token.value, propagate=False)
|
||||
self.previous = None
|
||||
else:
|
||||
# We've found the start of a new spec. Go back to do_parse
|
||||
# and read this token again.
|
||||
self.push_tokens([self.token])
|
||||
self.previous = None
|
||||
break
|
||||
|
||||
elif self.accept(HASH):
|
||||
# Get spec by hash and confirm it matches any constraints we
|
||||
# already read in
|
||||
hash_spec = self.spec_by_hash()
|
||||
if hash_spec.satisfies(spec):
|
||||
spec._dup(hash_spec)
|
||||
break
|
||||
else:
|
||||
raise InvalidHashError(spec, hash_spec.dag_hash())
|
||||
|
||||
else:
|
||||
break
|
||||
|
||||
return spec
|
||||
|
||||
def variant(self, name=None):
|
||||
if name:
|
||||
return name
|
||||
else:
|
||||
self.expect(ID)
|
||||
self.check_identifier()
|
||||
return self.token.value
|
||||
|
||||
def version(self):
|
||||
|
||||
start = None
|
||||
end = None
|
||||
|
||||
def str_translate(value):
|
||||
# return None for empty strings since we can end up with `'@'.strip('@')`
|
||||
if not (value and value.strip()):
|
||||
return None
|
||||
else:
|
||||
return value
|
||||
|
||||
if self.token.type is COMMA:
|
||||
# need to increment commas, could be ID or COLON
|
||||
self.accept(ID)
|
||||
|
||||
if self.token.type in (VER, ID):
|
||||
version_spec = self.token.value.lstrip("@")
|
||||
start = str_translate(version_spec)
|
||||
|
||||
if self.accept(COLON):
|
||||
if self.accept(ID):
|
||||
if self.next and self.next.type is EQ:
|
||||
# This is a start: range followed by a key=value pair
|
||||
self.push_tokens([self.token])
|
||||
else:
|
||||
end = self.token.value
|
||||
elif start:
|
||||
# No colon, but there was a version
|
||||
return vn.Version(start)
|
||||
else:
|
||||
# No colon and no id: invalid version
|
||||
self.next_token_error("Invalid version specifier")
|
||||
|
||||
if start:
|
||||
start = vn.Version(start)
|
||||
if end:
|
||||
end = vn.Version(end)
|
||||
return vn.VersionRange(start, end)
|
||||
|
||||
def version_list(self):
|
||||
vlist = []
|
||||
vlist.append(self.version())
|
||||
while self.accept(COMMA):
|
||||
vlist.append(self.version())
|
||||
return vlist
|
||||
|
||||
def compiler(self):
|
||||
self.expect(ID)
|
||||
self.check_identifier()
|
||||
|
||||
compiler = CompilerSpec.__new__(CompilerSpec)
|
||||
compiler.name = self.token.value
|
||||
compiler.versions = vn.VersionList()
|
||||
if self.accept(VER):
|
||||
vlist = self.version_list()
|
||||
compiler._add_versions(vlist)
|
||||
else:
|
||||
compiler.versions = vn.VersionList(":")
|
||||
return compiler
|
||||
|
||||
def check_identifier(self, id=None):
|
||||
"""The only identifiers that can contain '.' are versions, but version
|
||||
ids are context-sensitive so we have to check on a case-by-case
|
||||
basis. Call this if we detect a version id where it shouldn't be.
|
||||
"""
|
||||
if not id:
|
||||
id = self.token.value
|
||||
if "." in id:
|
||||
self.last_token_error("{0}: Identifier cannot contain '.'".format(id))
|
||||
|
||||
|
||||
def parse(string):
|
||||
"""Returns a list of specs from an input string.
|
||||
For creating one spec, see Spec() constructor.
|
||||
"""
|
||||
return SpecParser().parse(string)
|
||||
|
||||
|
||||
def save_dependency_specfiles(
|
||||
root_spec_info, output_directory, dependencies=None, spec_format="json"
|
||||
):
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
import stat
|
||||
import sys
|
||||
import tempfile
|
||||
from typing import Dict
|
||||
from typing import Dict # novm
|
||||
|
||||
import llnl.util.lang
|
||||
import llnl.util.tty as tty
|
||||
@@ -49,7 +49,9 @@
|
||||
stage_prefix = "spack-stage-"
|
||||
|
||||
|
||||
def create_stage_root(path: str) -> None:
|
||||
def create_stage_root(path):
|
||||
# type: (str) -> None
|
||||
|
||||
"""Create the stage root directory and ensure appropriate access perms."""
|
||||
assert os.path.isabs(path) and len(path.strip()) > 1
|
||||
|
||||
@@ -233,7 +235,7 @@ class Stage(object):
|
||||
"""
|
||||
|
||||
"""Shared dict of all stage locks."""
|
||||
stage_locks: Dict[str, spack.util.lock.Lock] = {}
|
||||
stage_locks = {} # type: Dict[str, spack.util.lock.Lock]
|
||||
|
||||
"""Most staging is managed by Spack. DIYStage is one exception."""
|
||||
managed_by_spack = True
|
||||
|
||||
@@ -191,6 +191,18 @@ def _store():
|
||||
root, unpadded_root, projections = parse_install_tree(config_dict)
|
||||
hash_length = spack.config.get("config:install_hash_length")
|
||||
|
||||
# Check that the user is not trying to install software into the store
|
||||
# reserved by Spack to bootstrap its own dependencies, since this would
|
||||
# lead to bizarre behaviors (e.g. cleaning the bootstrap area would wipe
|
||||
# user installed software)
|
||||
enable_bootstrap = spack.config.get("bootstrap:enable", True)
|
||||
if enable_bootstrap and spack.bootstrap.store_path() == root:
|
||||
msg = (
|
||||
'please change the install tree root "{0}" in your '
|
||||
"configuration [path reserved for Spack internal use]"
|
||||
)
|
||||
raise ValueError(msg.format(root))
|
||||
|
||||
return Store(
|
||||
root=root, unpadded_root=unpadded_root, projections=projections, hash_length=hash_length
|
||||
)
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import itertools
|
||||
import textwrap
|
||||
from typing import List
|
||||
from typing import List # novm
|
||||
|
||||
import llnl.util.lang
|
||||
|
||||
@@ -20,7 +20,7 @@ class ContextMeta(type):
|
||||
|
||||
#: Keeps track of the context properties that have been added
|
||||
#: by the class that is being defined
|
||||
_new_context_properties: List[str] = []
|
||||
_new_context_properties = [] # type: List[str]
|
||||
|
||||
def __new__(cls, name, bases, attr_dict):
|
||||
# Merge all the context properties that are coming from base classes
|
||||
|
||||
@@ -5,6 +5,7 @@
|
||||
import glob
|
||||
import os
|
||||
import platform
|
||||
import shutil
|
||||
import sys
|
||||
|
||||
import py
|
||||
@@ -13,16 +14,13 @@
|
||||
from llnl.util.filesystem import join_path, visit_directory_tree
|
||||
|
||||
import spack.binary_distribution as bindist
|
||||
import spack.caches
|
||||
import spack.config
|
||||
import spack.fetch_strategy
|
||||
import spack.hooks.sbang as sbang
|
||||
import spack.main
|
||||
import spack.mirror
|
||||
import spack.repo
|
||||
import spack.store
|
||||
import spack.util.gpg
|
||||
import spack.util.url as url_util
|
||||
import spack.util.web as web_util
|
||||
from spack.binary_distribution import get_buildfile_manifest
|
||||
from spack.directory_layout import DirectoryLayout
|
||||
@@ -61,12 +59,22 @@ def mirror_dir(tmpdir_factory):
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def test_mirror(mirror_dir):
|
||||
mirror_url = url_util.path_to_file_url(mirror_dir)
|
||||
mirror_url = "file://%s" % mirror_dir
|
||||
mirror_cmd("add", "--scope", "site", "test-mirror-func", mirror_url)
|
||||
yield mirror_dir
|
||||
mirror_cmd("rm", "--scope=site", "test-mirror-func")
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def test_legacy_mirror(mutable_config, tmpdir):
|
||||
mirror_dir = tmpdir.join("legacy_yaml_mirror")
|
||||
shutil.copytree(legacy_mirror_dir, mirror_dir.strpath)
|
||||
mirror_url = "file://%s" % mirror_dir
|
||||
mirror_cmd("add", "--scope", "site", "test-legacy-yaml", mirror_url)
|
||||
yield mirror_dir
|
||||
mirror_cmd("rm", "--scope=site", "test-legacy-yaml")
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def config_directory(tmpdir_factory):
|
||||
tmpdir = tmpdir_factory.mktemp("test_configs")
|
||||
@@ -203,7 +211,8 @@ def test_default_rpaths_create_install_default_layout(mirror_dir):
|
||||
buildcache_cmd("create", "-auf", "-d", mirror_dir, cspec.name)
|
||||
|
||||
# Create mirror index
|
||||
buildcache_cmd("update-index", "-d", mirror_dir)
|
||||
mirror_url = "file://{0}".format(mirror_dir)
|
||||
buildcache_cmd("update-index", "-d", mirror_url)
|
||||
# List the buildcaches in the mirror
|
||||
buildcache_cmd("list", "-alv")
|
||||
|
||||
@@ -268,7 +277,8 @@ def test_relative_rpaths_create_default_layout(mirror_dir):
|
||||
buildcache_cmd("create", "-aur", "-d", mirror_dir, cspec.name)
|
||||
|
||||
# Create mirror index
|
||||
buildcache_cmd("update-index", "-d", mirror_dir)
|
||||
mirror_url = "file://%s" % mirror_dir
|
||||
buildcache_cmd("update-index", "-d", mirror_url)
|
||||
|
||||
# Uninstall the package and deps
|
||||
uninstall_cmd("-y", "--dependents", gspec.name)
|
||||
@@ -324,9 +334,9 @@ def test_push_and_fetch_keys(mock_gnupghome):
|
||||
testpath = str(mock_gnupghome)
|
||||
|
||||
mirror = os.path.join(testpath, "mirror")
|
||||
mirrors = {"test-mirror": url_util.path_to_file_url(mirror)}
|
||||
mirrors = {"test-mirror": mirror}
|
||||
mirrors = spack.mirror.MirrorCollection(mirrors)
|
||||
mirror = spack.mirror.Mirror(url_util.path_to_file_url(mirror))
|
||||
mirror = spack.mirror.Mirror("file://" + mirror)
|
||||
|
||||
gpg_dir1 = os.path.join(testpath, "gpg1")
|
||||
gpg_dir2 = os.path.join(testpath, "gpg2")
|
||||
@@ -390,7 +400,7 @@ def test_spec_needs_rebuild(monkeypatch, tmpdir):
|
||||
|
||||
# Create a temp mirror directory for buildcache usage
|
||||
mirror_dir = tmpdir.join("mirror_dir")
|
||||
mirror_url = url_util.path_to_file_url(mirror_dir.strpath)
|
||||
mirror_url = "file://{0}".format(mirror_dir.strpath)
|
||||
|
||||
s = Spec("libdwarf").concretized()
|
||||
|
||||
@@ -422,7 +432,7 @@ def test_generate_index_missing(monkeypatch, tmpdir, mutable_config):
|
||||
|
||||
# Create a temp mirror directory for buildcache usage
|
||||
mirror_dir = tmpdir.join("mirror_dir")
|
||||
mirror_url = url_util.path_to_file_url(mirror_dir.strpath)
|
||||
mirror_url = "file://{0}".format(mirror_dir.strpath)
|
||||
spack.config.set("mirrors", {"test": mirror_url})
|
||||
|
||||
s = Spec("libdwarf").concretized()
|
||||
@@ -515,6 +525,7 @@ def test_update_sbang(tmpdir, test_mirror):
|
||||
|
||||
# Need a fake mirror with *function* scope.
|
||||
mirror_dir = test_mirror
|
||||
mirror_url = "file://{0}".format(mirror_dir)
|
||||
|
||||
# Assume all commands will concretize old_spec the same way.
|
||||
install_cmd("--no-cache", old_spec.name)
|
||||
@@ -523,7 +534,7 @@ def test_update_sbang(tmpdir, test_mirror):
|
||||
buildcache_cmd("create", "-u", "-a", "-d", mirror_dir, old_spec_hash_str)
|
||||
|
||||
# Need to force an update of the buildcache index
|
||||
buildcache_cmd("update-index", "-d", mirror_dir)
|
||||
buildcache_cmd("update-index", "-d", mirror_url)
|
||||
|
||||
# Uninstall the original package.
|
||||
uninstall_cmd("-y", old_spec_hash_str)
|
||||
@@ -570,6 +581,19 @@ def test_update_sbang(tmpdir, test_mirror):
|
||||
uninstall_cmd("-y", "/%s" % new_spec.dag_hash())
|
||||
|
||||
|
||||
# Need one where the platform has been changed to the test platform.
|
||||
def test_install_legacy_yaml(test_legacy_mirror, install_mockery_mutable_config, mock_packages):
|
||||
install_cmd(
|
||||
"--no-check-signature",
|
||||
"--cache-only",
|
||||
"-f",
|
||||
legacy_mirror_dir
|
||||
+ "/build_cache/test-debian6-core2-gcc-4.5.0-zlib-"
|
||||
+ "1.2.11-t5mczux3tfqpxwmg7egp7axy2jvyulqk.spec.yaml",
|
||||
)
|
||||
uninstall_cmd("-y", "/t5mczux3tfqpxwmg7egp7axy2jvyulqk")
|
||||
|
||||
|
||||
def test_install_legacy_buildcache_layout(install_mockery_mutable_config):
|
||||
"""Legacy buildcache layout involved a nested archive structure
|
||||
where the .spack file contained a repeated spec.json and another
|
||||
|
||||
@@ -7,8 +7,6 @@
|
||||
import pytest
|
||||
|
||||
import spack.bootstrap
|
||||
import spack.bootstrap.config
|
||||
import spack.bootstrap.core
|
||||
import spack.compilers
|
||||
import spack.environment
|
||||
import spack.store
|
||||
@@ -35,7 +33,7 @@ def test_store_is_restored_correctly_after_bootstrap(mutable_config, tmpdir):
|
||||
# Test that within the context manager we use the bootstrap store
|
||||
# and that outside we restore the correct location
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
assert spack.store.root == spack.bootstrap.config.store_path()
|
||||
assert spack.store.root == spack.bootstrap.store_path()
|
||||
assert spack.store.root == user_path
|
||||
|
||||
|
||||
@@ -53,7 +51,7 @@ def test_store_path_customization(config_value, expected, mutable_config):
|
||||
spack.config.set("bootstrap:root", config_value)
|
||||
|
||||
# Check the store path
|
||||
current = spack.bootstrap.config.store_path()
|
||||
current = spack.bootstrap.store_path()
|
||||
assert current == spack.util.path.canonicalize_path(expected)
|
||||
|
||||
|
||||
@@ -63,7 +61,7 @@ def test_raising_exception_if_bootstrap_disabled(mutable_config):
|
||||
|
||||
# Check the correct exception is raised
|
||||
with pytest.raises(RuntimeError, match="bootstrapping is currently disabled"):
|
||||
spack.bootstrap.config.store_path()
|
||||
spack.bootstrap.store_path()
|
||||
|
||||
|
||||
def test_raising_exception_module_importable():
|
||||
@@ -71,7 +69,7 @@ def test_raising_exception_module_importable():
|
||||
ImportError,
|
||||
match='cannot bootstrap the "asdf" Python module',
|
||||
):
|
||||
spack.bootstrap.core.ensure_module_importable_or_raise("asdf")
|
||||
spack.bootstrap.ensure_module_importable_or_raise("asdf")
|
||||
|
||||
|
||||
def test_raising_exception_executables_in_path():
|
||||
@@ -79,7 +77,7 @@ def test_raising_exception_executables_in_path():
|
||||
RuntimeError,
|
||||
match="cannot bootstrap any of the asdf, fdsa executables",
|
||||
):
|
||||
spack.bootstrap.core.ensure_executables_in_path_or_raise(["asdf", "fdsa"], "python")
|
||||
spack.bootstrap.ensure_executables_in_path_or_raise(["asdf", "fdsa"], "python")
|
||||
|
||||
|
||||
@pytest.mark.regression("25603")
|
||||
@@ -177,15 +175,13 @@ def test_nested_use_of_context_manager(mutable_config):
|
||||
def test_status_function_find_files(
|
||||
mutable_config, mock_executable, tmpdir, monkeypatch, expected_missing
|
||||
):
|
||||
import spack.bootstrap.status
|
||||
|
||||
if not expected_missing:
|
||||
mock_executable("foo", "echo Hello WWorld!")
|
||||
|
||||
monkeypatch.setattr(
|
||||
spack.bootstrap.status,
|
||||
spack.bootstrap,
|
||||
"_optional_requirements",
|
||||
lambda: [spack.bootstrap.status._required_system_executable("foo", "NOT FOUND")],
|
||||
lambda: [spack.bootstrap._required_system_executable("foo", "NOT FOUND")],
|
||||
)
|
||||
monkeypatch.setenv("PATH", str(tmpdir.join("bin")))
|
||||
|
||||
@@ -196,15 +192,15 @@ def test_status_function_find_files(
|
||||
@pytest.mark.regression("31042")
|
||||
def test_source_is_disabled(mutable_config):
|
||||
# Get the configuration dictionary of the current bootstrapping source
|
||||
conf = next(iter(spack.bootstrap.core.bootstrapping_sources()))
|
||||
conf = next(iter(spack.bootstrap.bootstrapping_sources()))
|
||||
|
||||
# The source is not explicitly enabled or disabled, so the following
|
||||
# call should raise to skip using it for bootstrapping
|
||||
with pytest.raises(ValueError):
|
||||
spack.bootstrap.core.source_is_enabled_or_raise(conf)
|
||||
spack.bootstrap.source_is_enabled_or_raise(conf)
|
||||
|
||||
# Try to explicitly disable the source and verify that the behavior
|
||||
# is the same as above
|
||||
spack.config.add("bootstrap:trusted:{0}:{1}".format(conf["name"], False))
|
||||
with pytest.raises(ValueError):
|
||||
spack.bootstrap.core.source_is_enabled_or_raise(conf)
|
||||
spack.bootstrap.source_is_enabled_or_raise(conf)
|
||||
|
||||
@@ -10,15 +10,22 @@
|
||||
import pytest
|
||||
|
||||
import spack.binary_distribution
|
||||
import spack.main
|
||||
import spack.spec
|
||||
import spack.util.url
|
||||
|
||||
install = spack.main.SpackCommand("install")
|
||||
|
||||
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
|
||||
|
||||
|
||||
def _validate_url(url):
|
||||
return
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def url_check(monkeypatch):
|
||||
monkeypatch.setattr(spack.util.url, "require_url_format", _validate_url)
|
||||
|
||||
|
||||
def test_build_tarball_overwrite(install_mockery, mock_fetch, monkeypatch, tmpdir):
|
||||
|
||||
with tmpdir.as_cwd():
|
||||
@@ -26,13 +33,12 @@ def test_build_tarball_overwrite(install_mockery, mock_fetch, monkeypatch, tmpdi
|
||||
install(str(spec))
|
||||
|
||||
# Runs fine the first time, throws the second time
|
||||
out_url = spack.util.url.path_to_file_url(str(tmpdir))
|
||||
spack.binary_distribution._build_tarball(spec, out_url, unsigned=True)
|
||||
spack.binary_distribution._build_tarball(spec, ".", unsigned=True)
|
||||
with pytest.raises(spack.binary_distribution.NoOverwriteException):
|
||||
spack.binary_distribution._build_tarball(spec, out_url, unsigned=True)
|
||||
spack.binary_distribution._build_tarball(spec, ".", unsigned=True)
|
||||
|
||||
# Should work fine with force=True
|
||||
spack.binary_distribution._build_tarball(spec, out_url, force=True, unsigned=True)
|
||||
spack.binary_distribution._build_tarball(spec, ".", force=True, unsigned=True)
|
||||
|
||||
# Remove the tarball and try again.
|
||||
# This must *also* throw, because of the existing .spec.json file
|
||||
@@ -45,4 +51,4 @@ def test_build_tarball_overwrite(install_mockery, mock_fetch, monkeypatch, tmpdi
|
||||
)
|
||||
|
||||
with pytest.raises(spack.binary_distribution.NoOverwriteException):
|
||||
spack.binary_distribution._build_tarball(spec, out_url, unsigned=True)
|
||||
spack.binary_distribution._build_tarball(spec, ".", unsigned=True)
|
||||
|
||||
@@ -10,7 +10,6 @@
|
||||
import spack.cmd.create
|
||||
import spack.stage
|
||||
import spack.util.executable
|
||||
import spack.util.url as url_util
|
||||
|
||||
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
|
||||
|
||||
@@ -51,7 +50,7 @@ def url_and_build_system(request, tmpdir):
|
||||
filename, system = request.param
|
||||
tmpdir.ensure("archive", filename)
|
||||
tar("czf", "archive.tar.gz", "archive")
|
||||
url = url_util.path_to_file_url(str(tmpdir.join("archive.tar.gz")))
|
||||
url = "file://" + str(tmpdir.join("archive.tar.gz"))
|
||||
yield url, system
|
||||
orig_dir.chdir()
|
||||
|
||||
|
||||
@@ -3,7 +3,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -124,10 +123,6 @@ def test_old_style_compatibility_with_super(spec_str, method_name, expected):
|
||||
assert value == expected
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
sys.platform == "win32",
|
||||
reason="log_ouput cannot currently be used outside of subprocess on Windows",
|
||||
)
|
||||
@pytest.mark.regression("33928")
|
||||
@pytest.mark.usefixtures("builder_test_repository", "config", "working_env")
|
||||
@pytest.mark.disable_clean_stage_check
|
||||
|
||||
@@ -4,24 +4,26 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
from llnl.util.filesystem import mkdirp, touch
|
||||
|
||||
import spack.config
|
||||
import spack.util.url as url_util
|
||||
from spack.fetch_strategy import CacheURLFetchStrategy, NoCacheError
|
||||
from spack.stage import Stage
|
||||
|
||||
is_windows = sys.platform == "win32"
|
||||
|
||||
|
||||
@pytest.mark.parametrize("_fetch_method", ["curl", "urllib"])
|
||||
def test_fetch_missing_cache(tmpdir, _fetch_method):
|
||||
"""Ensure raise a missing cache file."""
|
||||
testpath = str(tmpdir)
|
||||
non_existing = os.path.join(testpath, "non-existing")
|
||||
with spack.config.override("config:url_fetch_method", _fetch_method):
|
||||
url = url_util.path_to_file_url(non_existing)
|
||||
abs_pref = "" if is_windows else "/"
|
||||
url = "file://" + abs_pref + "not-a-real-cache-file"
|
||||
fetcher = CacheURLFetchStrategy(url=url)
|
||||
with Stage(fetcher, path=testpath):
|
||||
with pytest.raises(NoCacheError, match=r"No cache"):
|
||||
@@ -34,7 +36,11 @@ def test_fetch(tmpdir, _fetch_method):
|
||||
testpath = str(tmpdir)
|
||||
cache = os.path.join(testpath, "cache.tar.gz")
|
||||
touch(cache)
|
||||
url = url_util.path_to_file_url(cache)
|
||||
if is_windows:
|
||||
url_stub = "{0}"
|
||||
else:
|
||||
url_stub = "/{0}"
|
||||
url = "file://" + url_stub.format(cache)
|
||||
with spack.config.override("config:url_fetch_method", _fetch_method):
|
||||
fetcher = CacheURLFetchStrategy(url=url)
|
||||
with Stage(fetcher, path=testpath) as stage:
|
||||
|
||||
@@ -149,7 +149,7 @@
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def wrapper_environment(working_env):
|
||||
def wrapper_environment():
|
||||
with set_env(
|
||||
SPACK_CC=real_cc,
|
||||
SPACK_CXX=real_cc,
|
||||
|
||||
@@ -322,7 +322,7 @@ def make_rebuild_index_job(use_artifact_buildcache, optimize, use_dependencies):
|
||||
|
||||
result = {
|
||||
"stage": "stage-rebuild-index",
|
||||
"script": "spack buildcache update-index --mirror-url s3://mirror",
|
||||
"script": "spack buildcache update-index -d s3://mirror",
|
||||
"tags": ["tag-0", "tag-1"],
|
||||
"image": {"name": "spack/centos7", "entrypoint": [""]},
|
||||
"after_script": ['rm -rf "./spack"'],
|
||||
|
||||
@@ -7,8 +7,6 @@
|
||||
|
||||
import pytest
|
||||
|
||||
import spack.bootstrap
|
||||
import spack.bootstrap.core
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.main
|
||||
@@ -159,17 +157,17 @@ def test_remove_failure_for_non_existing_names(mutable_config):
|
||||
|
||||
def test_remove_and_add_a_source(mutable_config):
|
||||
# Check we start with a single bootstrapping source
|
||||
sources = spack.bootstrap.core.bootstrapping_sources()
|
||||
sources = spack.bootstrap.bootstrapping_sources()
|
||||
assert len(sources) == 1
|
||||
|
||||
# Remove it and check the result
|
||||
_bootstrap("remove", "github-actions")
|
||||
sources = spack.bootstrap.core.bootstrapping_sources()
|
||||
sources = spack.bootstrap.bootstrapping_sources()
|
||||
assert not sources
|
||||
|
||||
# Add it back and check we restored the initial state
|
||||
_bootstrap("add", "github-actions", "$spack/share/spack/bootstrap/github-actions-v0.3")
|
||||
sources = spack.bootstrap.core.bootstrapping_sources()
|
||||
sources = spack.bootstrap.bootstrapping_sources()
|
||||
assert len(sources) == 1
|
||||
|
||||
|
||||
@@ -208,4 +206,4 @@ def test_bootstrap_mirror_metadata(mutable_config, linux_os, monkeypatch, tmpdir
|
||||
_bootstrap("add", "--trust", "test-mirror", str(metadata_dir))
|
||||
|
||||
assert _bootstrap.returncode == 0
|
||||
assert any(m["name"] == "test-mirror" for m in spack.bootstrap.core.bootstrapping_sources())
|
||||
assert any(m["name"] == "test-mirror" for m in spack.bootstrap.bootstrapping_sources())
|
||||
|
||||
@@ -231,7 +231,7 @@ def test_ci_generate_with_env(
|
||||
|
||||
assert "rebuild-index" in yaml_contents
|
||||
rebuild_job = yaml_contents["rebuild-index"]
|
||||
expected = "spack buildcache update-index --keys --mirror-url {0}".format(mirror_url)
|
||||
expected = "spack buildcache update-index --keys -d {0}".format(mirror_url)
|
||||
assert rebuild_job["script"][0] == expected
|
||||
|
||||
assert "variables" in yaml_contents
|
||||
@@ -706,8 +706,6 @@ def test_ci_generate_for_pr_pipeline(
|
||||
"""
|
||||
)
|
||||
|
||||
monkeypatch.setattr(spack.ci, "SHARED_PR_MIRROR_URL", "https://fake.shared.pr.mirror")
|
||||
|
||||
with tmpdir.as_cwd():
|
||||
env_cmd("create", "test", "./spack.yaml")
|
||||
outputfile = str(tmpdir.join(".gitlab-ci.yml"))
|
||||
@@ -810,10 +808,10 @@ def create_rebuild_env(tmpdir, pkg_name, broken_tests=False):
|
||||
env_dir = working_dir.join("concrete_env")
|
||||
|
||||
mirror_dir = working_dir.join("mirror")
|
||||
mirror_url = url_util.path_to_file_url(mirror_dir.strpath)
|
||||
mirror_url = "file://{0}".format(mirror_dir.strpath)
|
||||
|
||||
broken_specs_path = os.path.join(working_dir.strpath, "naughty-list")
|
||||
broken_specs_url = url_util.path_to_file_url(broken_specs_path)
|
||||
broken_specs_url = url_util.join("file://", broken_specs_path)
|
||||
temp_storage_url = "file:///path/to/per/pipeline/storage"
|
||||
|
||||
broken_tests_packages = [pkg_name] if broken_tests else []
|
||||
|
||||
@@ -16,7 +16,6 @@
|
||||
import llnl.util.link_tree
|
||||
|
||||
import spack.cmd.env
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.environment.shell
|
||||
import spack.error
|
||||
@@ -30,6 +29,7 @@
|
||||
from spack.stage import stage_prefix
|
||||
from spack.util.executable import Executable
|
||||
from spack.util.path import substitute_path_variables
|
||||
from spack.util.web import FetchError
|
||||
from spack.version import Version
|
||||
|
||||
# TODO-27021
|
||||
@@ -707,9 +707,9 @@ def test_with_config_bad_include():
|
||||
e.concretize()
|
||||
|
||||
err = str(exc)
|
||||
assert "missing include" in err
|
||||
assert "/no/such/directory" in err
|
||||
assert os.path.join("no", "such", "file.yaml") in err
|
||||
assert "not retrieve configuration" in err
|
||||
assert os.path.join("no", "such", "directory") in err
|
||||
|
||||
assert ev.active_environment() is None
|
||||
|
||||
|
||||
@@ -827,7 +827,7 @@ def test_env_with_included_config_missing_file(tmpdir, mutable_empty_config):
|
||||
f.write("spack:\n include:\n - {0}\n".format(missing_file.strpath))
|
||||
|
||||
env = ev.Environment(tmpdir.strpath)
|
||||
with pytest.raises(spack.config.ConfigError, match="missing include path"):
|
||||
with pytest.raises(FetchError, match="No such file or directory"):
|
||||
ev.activate(env)
|
||||
|
||||
|
||||
|
||||
@@ -347,7 +347,7 @@ def _determine_variants(cls, exes, version_str):
|
||||
assert "externals" in packages_yaml["gcc"]
|
||||
externals = packages_yaml["gcc"]["externals"]
|
||||
assert len(externals) == 1
|
||||
assert externals[0]["prefix"] == os.path.sep + os.path.join("opt", "gcc", "bin")
|
||||
assert externals[0]["prefix"] == "/opt/gcc/bin"
|
||||
|
||||
|
||||
def test_new_entries_are_reported_correctly(
|
||||
|
||||
@@ -26,7 +26,6 @@
|
||||
import spack.util.executable
|
||||
from spack.error import SpackError
|
||||
from spack.main import SpackCommand
|
||||
from spack.parser import SpecSyntaxError
|
||||
from spack.spec import CompilerSpec, Spec
|
||||
|
||||
install = SpackCommand("install")
|
||||
@@ -363,7 +362,7 @@ def test_install_conflicts(conflict_spec):
|
||||
)
|
||||
def test_install_invalid_spec(invalid_spec):
|
||||
# Make sure that invalid specs raise a SpackError
|
||||
with pytest.raises(SpecSyntaxError, match="unexpected tokens"):
|
||||
with pytest.raises(SpackError, match="Unexpected token"):
|
||||
install(invalid_spec)
|
||||
|
||||
|
||||
|
||||
@@ -3,7 +3,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import sys
|
||||
from textwrap import dedent
|
||||
|
||||
from spack.main import SpackCommand
|
||||
@@ -19,24 +18,12 @@ def test_list():
|
||||
|
||||
def test_list_cli_output_format(mock_tty_stdout):
|
||||
out = list("mpileaks")
|
||||
# Currently logging on Windows detaches stdout
|
||||
# from the terminal so we miss some output during tests
|
||||
# TODO: (johnwparent): Once logging is amended on Windows,
|
||||
# restore this test
|
||||
if not sys.platform == "win32":
|
||||
out_str = dedent(
|
||||
"""\
|
||||
assert out == dedent(
|
||||
"""\
|
||||
mpileaks
|
||||
==> 1 packages
|
||||
"""
|
||||
)
|
||||
else:
|
||||
out_str = dedent(
|
||||
"""\
|
||||
mpileaks
|
||||
"""
|
||||
)
|
||||
assert out == out_str
|
||||
)
|
||||
|
||||
|
||||
def test_list_filter(mock_packages):
|
||||
|
||||
@@ -11,8 +11,6 @@
|
||||
import spack.cmd.mirror
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.spec
|
||||
import spack.util.url as url_util
|
||||
from spack.main import SpackCommand, SpackCommandError
|
||||
|
||||
mirror = SpackCommand("mirror")
|
||||
@@ -45,6 +43,15 @@ def tmp_scope():
|
||||
yield scope_name
|
||||
|
||||
|
||||
def _validate_url(url):
|
||||
return
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def url_check(monkeypatch):
|
||||
monkeypatch.setattr(spack.util.url, "require_url_format", _validate_url)
|
||||
|
||||
|
||||
@pytest.mark.disable_clean_stage_check
|
||||
@pytest.mark.regression("8083")
|
||||
def test_regression_8083(tmpdir, capfd, mock_packages, mock_fetch, config):
|
||||
@@ -82,7 +89,7 @@ def source_for_pkg_with_hash(mock_packages, tmpdir):
|
||||
local_path = os.path.join(str(tmpdir), local_url_basename)
|
||||
with open(local_path, "w") as f:
|
||||
f.write(s.package.hashed_content)
|
||||
local_url = url_util.path_to_file_url(local_path)
|
||||
local_url = "file://" + local_path
|
||||
s.package.versions[spack.version.Version("1.0")]["url"] = local_url
|
||||
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user