Compare commits
1 Commits
cws/simmod
...
features/r
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
719d31682f |
12
.github/workflows/bootstrap.yml
vendored
12
.github/workflows/bootstrap.yml
vendored
@@ -12,7 +12,6 @@ on:
|
|||||||
# built-in repository or documentation
|
# built-in repository or documentation
|
||||||
- 'var/spack/repos/builtin/**'
|
- 'var/spack/repos/builtin/**'
|
||||||
- '!var/spack/repos/builtin/packages/clingo-bootstrap/**'
|
- '!var/spack/repos/builtin/packages/clingo-bootstrap/**'
|
||||||
- '!var/spack/repos/builtin/packages/clingo/**'
|
|
||||||
- '!var/spack/repos/builtin/packages/python/**'
|
- '!var/spack/repos/builtin/packages/python/**'
|
||||||
- '!var/spack/repos/builtin/packages/re2c/**'
|
- '!var/spack/repos/builtin/packages/re2c/**'
|
||||||
- 'lib/spack/docs/**'
|
- 'lib/spack/docs/**'
|
||||||
@@ -20,10 +19,6 @@ on:
|
|||||||
# nightly at 2:16 AM
|
# nightly at 2:16 AM
|
||||||
- cron: '16 2 * * *'
|
- cron: '16 2 * * *'
|
||||||
|
|
||||||
concurrency:
|
|
||||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_number }}
|
|
||||||
cancel-in-progress: true
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
|
|
||||||
fedora-clingo-sources:
|
fedora-clingo-sources:
|
||||||
@@ -180,11 +175,10 @@ jobs:
|
|||||||
tree ~/.spack/bootstrap/store/
|
tree ~/.spack/bootstrap/store/
|
||||||
|
|
||||||
macos-clingo-binaries:
|
macos-clingo-binaries:
|
||||||
runs-on: ${{ matrix.macos-version }}
|
runs-on: macos-latest
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
python-version: ['3.5', '3.6', '3.7', '3.8', '3.9', '3.10']
|
python-version: ['3.5', '3.6', '3.7', '3.8', '3.9', '3.10']
|
||||||
macos-version: ['macos-10.15', 'macos-11', 'macos-12']
|
|
||||||
if: github.repository == 'spack/spack'
|
if: github.repository == 'spack/spack'
|
||||||
steps:
|
steps:
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
@@ -192,7 +186,7 @@ jobs:
|
|||||||
brew install tree
|
brew install tree
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||||
- uses: actions/setup-python@c4e89fac7e8767b327bbad6cb4d859eda999cf08
|
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
- name: Bootstrap clingo
|
- name: Bootstrap clingo
|
||||||
@@ -211,7 +205,7 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||||
- uses: actions/setup-python@c4e89fac7e8767b327bbad6cb4d859eda999cf08
|
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
- name: Setup repo
|
- name: Setup repo
|
||||||
|
|||||||
4
.github/workflows/build-containers.yml
vendored
4
.github/workflows/build-containers.yml
vendored
@@ -19,10 +19,6 @@ on:
|
|||||||
release:
|
release:
|
||||||
types: [published]
|
types: [published]
|
||||||
|
|
||||||
concurrency:
|
|
||||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_number }}
|
|
||||||
cancel-in-progress: true
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
deploy-images:
|
deploy-images:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|||||||
10
.github/workflows/macos_python.yml
vendored
10
.github/workflows/macos_python.yml
vendored
@@ -16,10 +16,6 @@ on:
|
|||||||
- '.github/workflows/macos_python.yml'
|
- '.github/workflows/macos_python.yml'
|
||||||
# TODO: run if we touch any of the recipes involved in this
|
# TODO: run if we touch any of the recipes involved in this
|
||||||
|
|
||||||
concurrency:
|
|
||||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_number }}
|
|
||||||
cancel-in-progress: true
|
|
||||||
|
|
||||||
# GitHub Action Limits
|
# GitHub Action Limits
|
||||||
# https://help.github.com/en/actions/reference/workflow-syntax-for-github-actions
|
# https://help.github.com/en/actions/reference/workflow-syntax-for-github-actions
|
||||||
|
|
||||||
@@ -30,7 +26,7 @@ jobs:
|
|||||||
runs-on: macos-latest
|
runs-on: macos-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||||
- uses: actions/setup-python@c4e89fac7e8767b327bbad6cb4d859eda999cf08 # @v2
|
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||||
with:
|
with:
|
||||||
python-version: 3.9
|
python-version: 3.9
|
||||||
- name: spack install
|
- name: spack install
|
||||||
@@ -46,7 +42,7 @@ jobs:
|
|||||||
timeout-minutes: 700
|
timeout-minutes: 700
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||||
- uses: actions/setup-python@c4e89fac7e8767b327bbad6cb4d859eda999cf08 # @v2
|
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||||
with:
|
with:
|
||||||
python-version: 3.9
|
python-version: 3.9
|
||||||
- name: spack install
|
- name: spack install
|
||||||
@@ -60,7 +56,7 @@ jobs:
|
|||||||
runs-on: macos-latest
|
runs-on: macos-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||||
- uses: actions/setup-python@c4e89fac7e8767b327bbad6cb4d859eda999cf08 # @v2
|
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||||
with:
|
with:
|
||||||
python-version: 3.9
|
python-version: 3.9
|
||||||
- name: spack install
|
- name: spack install
|
||||||
|
|||||||
21
.github/workflows/unit_tests.yaml
vendored
21
.github/workflows/unit_tests.yaml
vendored
@@ -9,11 +9,6 @@ on:
|
|||||||
branches:
|
branches:
|
||||||
- develop
|
- develop
|
||||||
- releases/**
|
- releases/**
|
||||||
|
|
||||||
concurrency:
|
|
||||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_number }}
|
|
||||||
cancel-in-progress: true
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
# Validate that the code can be run on all the Python versions
|
# Validate that the code can be run on all the Python versions
|
||||||
# supported by Spack
|
# supported by Spack
|
||||||
@@ -21,7 +16,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||||
- uses: actions/setup-python@c4e89fac7e8767b327bbad6cb4d859eda999cf08 # @v2
|
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||||
with:
|
with:
|
||||||
python-version: '3.10'
|
python-version: '3.10'
|
||||||
- name: Install Python Packages
|
- name: Install Python Packages
|
||||||
@@ -39,7 +34,7 @@ jobs:
|
|||||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@c4e89fac7e8767b327bbad6cb4d859eda999cf08 # @v2
|
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||||
with:
|
with:
|
||||||
python-version: '3.10'
|
python-version: '3.10'
|
||||||
- name: Install Python packages
|
- name: Install Python packages
|
||||||
@@ -114,7 +109,7 @@ jobs:
|
|||||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@c4e89fac7e8767b327bbad6cb4d859eda999cf08 # @v2
|
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
- name: Install System packages
|
- name: Install System packages
|
||||||
@@ -179,7 +174,7 @@ jobs:
|
|||||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@c4e89fac7e8767b327bbad6cb4d859eda999cf08 # @v2
|
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||||
with:
|
with:
|
||||||
python-version: '3.10'
|
python-version: '3.10'
|
||||||
- name: Install System packages
|
- name: Install System packages
|
||||||
@@ -245,7 +240,7 @@ jobs:
|
|||||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@c4e89fac7e8767b327bbad6cb4d859eda999cf08 # @v2
|
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||||
with:
|
with:
|
||||||
python-version: '3.10'
|
python-version: '3.10'
|
||||||
- name: Install System packages
|
- name: Install System packages
|
||||||
@@ -294,7 +289,7 @@ jobs:
|
|||||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@c4e89fac7e8767b327bbad6cb4d859eda999cf08 # @v2
|
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
- name: Install Python packages
|
- name: Install Python packages
|
||||||
@@ -337,7 +332,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||||
- uses: actions/setup-python@c4e89fac7e8767b327bbad6cb4d859eda999cf08 # @v2
|
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||||
with:
|
with:
|
||||||
python-version: '3.10'
|
python-version: '3.10'
|
||||||
- name: Install Python packages
|
- name: Install Python packages
|
||||||
@@ -350,7 +345,7 @@ jobs:
|
|||||||
coverage run $(which spack) audit packages
|
coverage run $(which spack) audit packages
|
||||||
coverage combine
|
coverage combine
|
||||||
coverage xml
|
coverage xml
|
||||||
- name: Package audits (without coverage)
|
- name: Package audits (wwithout coverage)
|
||||||
if: ${{ needs.changes.outputs.with_coverage == 'false' }}
|
if: ${{ needs.changes.outputs.with_coverage == 'false' }}
|
||||||
run: |
|
run: |
|
||||||
. share/spack/setup-env.sh
|
. share/spack/setup-env.sh
|
||||||
|
|||||||
19
.github/workflows/windows_python.yml
vendored
19
.github/workflows/windows_python.yml
vendored
@@ -9,11 +9,6 @@ on:
|
|||||||
branches:
|
branches:
|
||||||
- develop
|
- develop
|
||||||
- releases/**
|
- releases/**
|
||||||
|
|
||||||
concurrency:
|
|
||||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_number }}
|
|
||||||
cancel-in-progress: true
|
|
||||||
|
|
||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
shell:
|
shell:
|
||||||
@@ -23,7 +18,7 @@ jobs:
|
|||||||
runs-on: windows-latest
|
runs-on: windows-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||||
- uses: actions/setup-python@c4e89fac7e8767b327bbad6cb4d859eda999cf08
|
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
||||||
with:
|
with:
|
||||||
python-version: 3.9
|
python-version: 3.9
|
||||||
- name: Install Python Packages
|
- name: Install Python Packages
|
||||||
@@ -41,7 +36,7 @@ jobs:
|
|||||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@c4e89fac7e8767b327bbad6cb4d859eda999cf08
|
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
||||||
with:
|
with:
|
||||||
python-version: 3.9
|
python-version: 3.9
|
||||||
- name: Install Python packages
|
- name: Install Python packages
|
||||||
@@ -63,7 +58,7 @@ jobs:
|
|||||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@c4e89fac7e8767b327bbad6cb4d859eda999cf08
|
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
||||||
with:
|
with:
|
||||||
python-version: 3.9
|
python-version: 3.9
|
||||||
- name: Install Python packages
|
- name: Install Python packages
|
||||||
@@ -83,7 +78,7 @@ jobs:
|
|||||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@c4e89fac7e8767b327bbad6cb4d859eda999cf08
|
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
||||||
with:
|
with:
|
||||||
python-version: 3.9
|
python-version: 3.9
|
||||||
- name: Install Python packages
|
- name: Install Python packages
|
||||||
@@ -103,7 +98,7 @@ jobs:
|
|||||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@c4e89fac7e8767b327bbad6cb4d859eda999cf08
|
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
||||||
with:
|
with:
|
||||||
python-version: 3.9
|
python-version: 3.9
|
||||||
- name: Install Python packages
|
- name: Install Python packages
|
||||||
@@ -128,7 +123,7 @@ jobs:
|
|||||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@c4e89fac7e8767b327bbad6cb4d859eda999cf08
|
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
||||||
with:
|
with:
|
||||||
python-version: 3.9
|
python-version: 3.9
|
||||||
- name: Install Python packages
|
- name: Install Python packages
|
||||||
@@ -159,7 +154,7 @@ jobs:
|
|||||||
run:
|
run:
|
||||||
shell: pwsh
|
shell: pwsh
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/setup-python@c4e89fac7e8767b327bbad6cb4d859eda999cf08
|
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
||||||
with:
|
with:
|
||||||
python-version: 3.9
|
python-version: 3.9
|
||||||
- name: Install Python packages
|
- name: Install Python packages
|
||||||
|
|||||||
@@ -62,12 +62,11 @@ on these ideas for each distinct build system that Spack supports:
|
|||||||
|
|
||||||
build_systems/bundlepackage
|
build_systems/bundlepackage
|
||||||
build_systems/cudapackage
|
build_systems/cudapackage
|
||||||
build_systems/custompackage
|
|
||||||
build_systems/inteloneapipackage
|
build_systems/inteloneapipackage
|
||||||
build_systems/intelpackage
|
build_systems/intelpackage
|
||||||
build_systems/multiplepackage
|
|
||||||
build_systems/rocmpackage
|
build_systems/rocmpackage
|
||||||
build_systems/sourceforgepackage
|
build_systems/custompackage
|
||||||
|
build_systems/multiplepackage
|
||||||
|
|
||||||
For reference, the :py:mod:`Build System API docs <spack.build_systems>`
|
For reference, the :py:mod:`Build System API docs <spack.build_systems>`
|
||||||
provide a list of build systems and methods/attributes that can be
|
provide a list of build systems and methods/attributes that can be
|
||||||
|
|||||||
@@ -84,8 +84,8 @@ build ``hdf5`` with Intel oneAPI MPI do::
|
|||||||
|
|
||||||
spack install hdf5 +mpi ^intel-oneapi-mpi
|
spack install hdf5 +mpi ^intel-oneapi-mpi
|
||||||
|
|
||||||
Using Externally Installed oneAPI Tools
|
Using an Externally Installed oneAPI
|
||||||
=======================================
|
====================================
|
||||||
|
|
||||||
Spack can also use oneAPI tools that are manually installed with
|
Spack can also use oneAPI tools that are manually installed with
|
||||||
`Intel Installers`_. The procedures for configuring Spack to use
|
`Intel Installers`_. The procedures for configuring Spack to use
|
||||||
@@ -110,7 +110,7 @@ Another option is to manually add the configuration to
|
|||||||
Libraries
|
Libraries
|
||||||
---------
|
---------
|
||||||
|
|
||||||
If you want Spack to use oneMKL that you have installed without Spack in
|
If you want Spack to use MKL that you have installed without Spack in
|
||||||
the default location, then add the following to
|
the default location, then add the following to
|
||||||
``~/.spack/packages.yaml``, adjusting the version as appropriate::
|
``~/.spack/packages.yaml``, adjusting the version as appropriate::
|
||||||
|
|
||||||
@@ -139,7 +139,7 @@ You can also use Spack-installed libraries. For example::
|
|||||||
spack load intel-oneapi-mkl
|
spack load intel-oneapi-mkl
|
||||||
|
|
||||||
Will update your environment CPATH, LIBRARY_PATH, and other
|
Will update your environment CPATH, LIBRARY_PATH, and other
|
||||||
environment variables for building an application with oneMKL.
|
environment variables for building an application with MKL.
|
||||||
|
|
||||||
More information
|
More information
|
||||||
================
|
================
|
||||||
|
|||||||
@@ -15,9 +15,6 @@ IntelPackage
|
|||||||
Intel packages in Spack
|
Intel packages in Spack
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
This is an earlier version of Intel software development tools and has
|
|
||||||
now been replaced by Intel oneAPI Toolkits.
|
|
||||||
|
|
||||||
Spack can install and use several software development products offered by Intel.
|
Spack can install and use several software development products offered by Intel.
|
||||||
Some of these are available under no-cost terms, others require a paid license.
|
Some of these are available under no-cost terms, others require a paid license.
|
||||||
All share the same basic steps for configuration, installation, and, where
|
All share the same basic steps for configuration, installation, and, where
|
||||||
|
|||||||
@@ -48,9 +48,8 @@ important to understand.
|
|||||||
**build backend**
|
**build backend**
|
||||||
Libraries used to define how to build a wheel. Examples
|
Libraries used to define how to build a wheel. Examples
|
||||||
include `setuptools <https://setuptools.pypa.io/>`__,
|
include `setuptools <https://setuptools.pypa.io/>`__,
|
||||||
`flit <https://flit.readthedocs.io/>`_,
|
`flit <https://flit.readthedocs.io/>`_, and
|
||||||
`poetry <https://python-poetry.org/>`_, and
|
`poetry <https://python-poetry.org/>`_.
|
||||||
`hatchling <https://hatch.pypa.io/latest/>`_.
|
|
||||||
|
|
||||||
^^^^^^^^^^^
|
^^^^^^^^^^^
|
||||||
Downloading
|
Downloading
|
||||||
@@ -327,33 +326,6 @@ for specifying the version requirements. Note that ``~=`` works
|
|||||||
differently in poetry than in setuptools and flit for versions that
|
differently in poetry than in setuptools and flit for versions that
|
||||||
start with a zero.
|
start with a zero.
|
||||||
|
|
||||||
"""""""""
|
|
||||||
hatchling
|
|
||||||
"""""""""
|
|
||||||
|
|
||||||
If the ``pyproject.toml`` lists ``hatchling.build`` as the
|
|
||||||
``build-backend``, it uses the hatchling build system. Look for
|
|
||||||
dependencies under the following keys:
|
|
||||||
|
|
||||||
* ``requires-python``
|
|
||||||
|
|
||||||
This specifies the version of Python that is required
|
|
||||||
|
|
||||||
* ``project.dependencies``
|
|
||||||
|
|
||||||
These packages are required for building and installation. You can
|
|
||||||
add them with ``type=('build', 'run')``.
|
|
||||||
|
|
||||||
* ``project.optional-dependencies``
|
|
||||||
|
|
||||||
This section includes keys with lists of optional dependencies
|
|
||||||
needed to enable those features. You should add a variant that
|
|
||||||
optionally adds these dependencies. This variant should be ``False``
|
|
||||||
by default.
|
|
||||||
|
|
||||||
See https://hatch.pypa.io/latest/config/dependency/ for more
|
|
||||||
information.
|
|
||||||
|
|
||||||
""""""
|
""""""
|
||||||
wheels
|
wheels
|
||||||
""""""
|
""""""
|
||||||
@@ -694,4 +666,3 @@ For more information on build backend tools, see:
|
|||||||
* setuptools: https://setuptools.pypa.io/
|
* setuptools: https://setuptools.pypa.io/
|
||||||
* flit: https://flit.readthedocs.io/
|
* flit: https://flit.readthedocs.io/
|
||||||
* poetry: https://python-poetry.org/
|
* poetry: https://python-poetry.org/
|
||||||
* hatchling: https://hatch.pypa.io/latest/
|
|
||||||
|
|||||||
@@ -1,55 +0,0 @@
|
|||||||
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
|
||||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
|
||||||
|
|
||||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
|
||||||
|
|
||||||
.. _sourceforgepackage:
|
|
||||||
|
|
||||||
------------------
|
|
||||||
SourceforgePackage
|
|
||||||
------------------
|
|
||||||
|
|
||||||
``SourceforgePackage`` is a
|
|
||||||
`mixin-class <https://en.wikipedia.org/wiki/Mixin>`_. It automatically
|
|
||||||
sets the URL based on a list of Sourceforge mirrors listed in
|
|
||||||
`sourceforge_mirror_path`, which defaults to a half dozen known mirrors.
|
|
||||||
Refer to the package source
|
|
||||||
(`<https://github.com/spack/spack/blob/develop/lib/spack/spack/build_systems/sourceforge.py>`__) for the current list of mirrors used by Spack.
|
|
||||||
|
|
||||||
|
|
||||||
^^^^^^^
|
|
||||||
Methods
|
|
||||||
^^^^^^^
|
|
||||||
|
|
||||||
This package provides a method for populating mirror URLs.
|
|
||||||
|
|
||||||
**urls**
|
|
||||||
|
|
||||||
This method returns a list of possible URLs for package source.
|
|
||||||
It is decorated with `property` so its results are treated as
|
|
||||||
a package attribute.
|
|
||||||
|
|
||||||
Refer to
|
|
||||||
`<https://spack.readthedocs.io/en/latest/packaging_guide.html#mirrors-of-the-main-url>`__
|
|
||||||
for information on how Spack uses the `urls` attribute during
|
|
||||||
fetching.
|
|
||||||
|
|
||||||
^^^^^
|
|
||||||
Usage
|
|
||||||
^^^^^
|
|
||||||
|
|
||||||
This helper package can be added to your package by adding it as a base
|
|
||||||
class of your package and defining the relative location of an archive
|
|
||||||
file for one version of your software.
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
:emphasize-lines: 1,3
|
|
||||||
|
|
||||||
class MyPackage(AutotoolsPackage, SourceforgePackage):
|
|
||||||
...
|
|
||||||
sourceforge_mirror_path = "my-package/mypackage.1.0.0.tar.gz"
|
|
||||||
...
|
|
||||||
|
|
||||||
Over 40 packages are using ``SourceforcePackage`` this mix-in as of
|
|
||||||
July 2022 so there are multiple packages to choose from if you want
|
|
||||||
to see a real example.
|
|
||||||
@@ -109,10 +109,9 @@ Spack Images on Docker Hub
|
|||||||
--------------------------
|
--------------------------
|
||||||
|
|
||||||
Docker images with Spack preinstalled and ready to be used are
|
Docker images with Spack preinstalled and ready to be used are
|
||||||
built when a release is tagged, or nightly on ``develop``. The images
|
built on `Docker Hub <https://hub.docker.com/u/spack>`_
|
||||||
are then pushed both to `Docker Hub <https://hub.docker.com/u/spack>`_
|
at every push to ``develop`` or to a release branch. The OS that
|
||||||
and to `GitHub Container Registry <https://github.com/orgs/spack/packages?repo_name=spack>`_.
|
are currently supported are summarized in the table below:
|
||||||
The OS that are currently supported are summarized in the table below:
|
|
||||||
|
|
||||||
.. _containers-supported-os:
|
.. _containers-supported-os:
|
||||||
|
|
||||||
@@ -122,31 +121,22 @@ The OS that are currently supported are summarized in the table below:
|
|||||||
* - Operating System
|
* - Operating System
|
||||||
- Base Image
|
- Base Image
|
||||||
- Spack Image
|
- Spack Image
|
||||||
|
* - Ubuntu 16.04
|
||||||
|
- ``ubuntu:16.04``
|
||||||
|
- ``spack/ubuntu-xenial``
|
||||||
* - Ubuntu 18.04
|
* - Ubuntu 18.04
|
||||||
- ``ubuntu:18.04``
|
- ``ubuntu:18.04``
|
||||||
- ``spack/ubuntu-bionic``
|
- ``spack/ubuntu-bionic``
|
||||||
* - Ubuntu 20.04
|
|
||||||
- ``ubuntu:20.04``
|
|
||||||
- ``spack/ubuntu-focal``
|
|
||||||
* - Ubuntu 22.04
|
|
||||||
- ``ubuntu:22.04``
|
|
||||||
- ``spack/ubuntu-jammy``
|
|
||||||
* - CentOS 7
|
* - CentOS 7
|
||||||
- ``centos:7``
|
- ``centos:7``
|
||||||
- ``spack/centos7``
|
- ``spack/centos7``
|
||||||
* - CentOS Stream
|
|
||||||
- ``quay.io/centos/centos:stream``
|
|
||||||
- ``spack/centos-stream``
|
|
||||||
* - openSUSE Leap
|
* - openSUSE Leap
|
||||||
- ``opensuse/leap``
|
- ``opensuse/leap``
|
||||||
- ``spack/leap15``
|
- ``spack/leap15``
|
||||||
* - Amazon Linux 2
|
|
||||||
- ``amazonlinux:2``
|
|
||||||
- ``spack/amazon-linux``
|
|
||||||
|
|
||||||
All the images are tagged with the corresponding release of Spack:
|
All the images are tagged with the corresponding release of Spack:
|
||||||
|
|
||||||
.. image:: images/ghcr_spack.png
|
.. image:: dockerhub_spack.png
|
||||||
|
|
||||||
with the exception of the ``latest`` tag that points to the HEAD
|
with the exception of the ``latest`` tag that points to the HEAD
|
||||||
of the ``develop`` branch. These images are available for anyone
|
of the ``develop`` branch. These images are available for anyone
|
||||||
|
|||||||
@@ -107,6 +107,7 @@ with a high level view of Spack's directory structure:
|
|||||||
llnl/ <- some general-use libraries
|
llnl/ <- some general-use libraries
|
||||||
|
|
||||||
spack/ <- spack module; contains Python code
|
spack/ <- spack module; contains Python code
|
||||||
|
analyzers/ <- modules to run analysis on installed packages
|
||||||
build_systems/ <- modules for different build systems
|
build_systems/ <- modules for different build systems
|
||||||
cmd/ <- each file in here is a spack subcommand
|
cmd/ <- each file in here is a spack subcommand
|
||||||
compilers/ <- compiler description files
|
compilers/ <- compiler description files
|
||||||
@@ -241,6 +242,22 @@ Unit tests
|
|||||||
Implements Spack's test suite. Add a module and put its name in
|
Implements Spack's test suite. Add a module and put its name in
|
||||||
the test suite in ``__init__.py`` to add more unit tests.
|
the test suite in ``__init__.py`` to add more unit tests.
|
||||||
|
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
Research and Monitoring Modules
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
:mod:`spack.monitor`
|
||||||
|
Contains :class:`~spack.monitor.SpackMonitorClient`. This is accessed from
|
||||||
|
the ``spack install`` and ``spack analyze`` commands to send build and
|
||||||
|
package metadata up to a `Spack Monitor
|
||||||
|
<https://github.com/spack/spack-monitor>`_ server.
|
||||||
|
|
||||||
|
|
||||||
|
:mod:`spack.analyzers`
|
||||||
|
A module folder with a :class:`~spack.analyzers.analyzer_base.AnalyzerBase`
|
||||||
|
that provides base functions to run, save, and (optionally) upload analysis
|
||||||
|
results to a `Spack Monitor <https://github.com/spack/spack-monitor>`_ server.
|
||||||
|
|
||||||
|
|
||||||
^^^^^^^^^^^^^
|
^^^^^^^^^^^^^
|
||||||
Other Modules
|
Other Modules
|
||||||
@@ -284,6 +301,240 @@ Most spack commands look something like this:
|
|||||||
The information in Package files is used at all stages in this
|
The information in Package files is used at all stages in this
|
||||||
process.
|
process.
|
||||||
|
|
||||||
|
Conceptually, packages are overloaded. They contain:
|
||||||
|
|
||||||
|
-------------
|
||||||
|
Stage objects
|
||||||
|
-------------
|
||||||
|
|
||||||
|
|
||||||
|
.. _writing-analyzers:
|
||||||
|
|
||||||
|
-----------------
|
||||||
|
Writing analyzers
|
||||||
|
-----------------
|
||||||
|
|
||||||
|
To write an analyzer, you should add a new python file to the
|
||||||
|
analyzers module directory at ``lib/spack/spack/analyzers`` .
|
||||||
|
Your analyzer should be a subclass of the :class:`AnalyzerBase <spack.analyzers.analyzer_base.AnalyzerBase>`. For example, if you want
|
||||||
|
to add an analyzer class ``Myanalyzer`` you would write to
|
||||||
|
``spack/analyzers/myanalyzer.py`` and import and
|
||||||
|
use the base as follows:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
from .analyzer_base import AnalyzerBase
|
||||||
|
|
||||||
|
class Myanalyzer(AnalyzerBase):
|
||||||
|
|
||||||
|
|
||||||
|
Note that the class name is your module file name, all lowercase
|
||||||
|
except for the first capital letter. You can look at other analyzers in
|
||||||
|
that analyzer directory for examples. The guide here will tell you about the basic functions needed.
|
||||||
|
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
Analyzer Output Directory
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
By default, when you run ``spack analyze run`` an analyzer output directory will
|
||||||
|
be created in your spack user directory in your ``$HOME``. The reason we output here
|
||||||
|
is because the install directory might not always be writable.
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
~/.spack/
|
||||||
|
analyzers
|
||||||
|
|
||||||
|
Result files will be written here, organized in subfolders in the same structure
|
||||||
|
as the package, with each analyzer owning it's own subfolder. for example:
|
||||||
|
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
$ tree ~/.spack/analyzers/
|
||||||
|
/home/spackuser/.spack/analyzers/
|
||||||
|
└── linux-ubuntu20.04-skylake
|
||||||
|
└── gcc-9.3.0
|
||||||
|
└── zlib-1.2.11-sl7m27mzkbejtkrajigj3a3m37ygv4u2
|
||||||
|
├── environment_variables
|
||||||
|
│ └── spack-analyzer-environment-variables.json
|
||||||
|
├── install_files
|
||||||
|
│ └── spack-analyzer-install-files.json
|
||||||
|
└── libabigail
|
||||||
|
└── lib
|
||||||
|
└── spack-analyzer-libabigail-libz.so.1.2.11.xml
|
||||||
|
|
||||||
|
|
||||||
|
Notice that for the libabigail analyzer, since results are generated per object,
|
||||||
|
we honor the object's folder in case there are equivalently named files in
|
||||||
|
different folders. The result files are typically written as json so they can be easily read and uploaded in a future interaction with a monitor.
|
||||||
|
|
||||||
|
|
||||||
|
^^^^^^^^^^^^^^^^^
|
||||||
|
Analyzer Metadata
|
||||||
|
^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
Your analyzer is required to have the class attributes ``name``, ``outfile``,
|
||||||
|
and ``description``. These are printed to the user with they use the subcommand
|
||||||
|
``spack analyze list-analyzers``. Here is an example.
|
||||||
|
As we mentioned above, note that this analyzer would live in a module named
|
||||||
|
``libabigail.py`` in the analyzers folder so that the class can be discovered.
|
||||||
|
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
class Libabigail(AnalyzerBase):
|
||||||
|
|
||||||
|
name = "libabigail"
|
||||||
|
outfile = "spack-analyzer-libabigail.json"
|
||||||
|
description = "Application Binary Interface (ABI) features for objects"
|
||||||
|
|
||||||
|
|
||||||
|
This means that the name and output file should be unique for your analyzer.
|
||||||
|
Note that "all" cannot be the name of an analyzer, as this key is used to indicate
|
||||||
|
that the user wants to run all analyzers.
|
||||||
|
|
||||||
|
.. _analyzer_run_function:
|
||||||
|
|
||||||
|
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
An analyzer run Function
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
The core of an analyzer is its ``run()`` function, which should accept no
|
||||||
|
arguments. You can assume your analyzer has the package spec of interest at ``self.spec``
|
||||||
|
and it's up to the run function to generate whatever analysis data you need,
|
||||||
|
and then return the object with a key as the analyzer name. The result data
|
||||||
|
should be a list of objects, each with a name, ``analyzer_name``, ``install_file``,
|
||||||
|
and one of ``value`` or ``binary_value``. The install file should be for a relative
|
||||||
|
path, and not the absolute path. For example, let's say we extract a metric called
|
||||||
|
``metric`` for ``bin/wget`` using our analyzer ``thebest-analyzer``.
|
||||||
|
We might have data that looks like this:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
result = {"name": "metric", "analyzer_name": "thebest-analyzer", "value": "1", "install_file": "bin/wget"}
|
||||||
|
|
||||||
|
|
||||||
|
We'd then return it as follows - note that they key is the analyzer name at ``self.name``.
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
return {self.name: result}
|
||||||
|
|
||||||
|
This will save the complete result to the analyzer metadata folder, as described
|
||||||
|
previously. If you want support for adding a different kind of metadata (e.g.,
|
||||||
|
not associated with an install file) then the monitor server would need to be updated
|
||||||
|
to support this first.
|
||||||
|
|
||||||
|
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
An analyzer init Function
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
If you don't need any extra dependencies or checks, you can skip defining an analyzer
|
||||||
|
init function, as the base class will handle it. Typically, it will accept
|
||||||
|
a spec, and an optional output directory (if the user does not want the default
|
||||||
|
metadata folder for analyzer results). The analyzer init function should call
|
||||||
|
it's parent init, and then do any extra checks or validation that are required to
|
||||||
|
work. For example:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
def __init__(self, spec, dirname=None):
|
||||||
|
super(Myanalyzer, self).__init__(spec, dirname)
|
||||||
|
|
||||||
|
# install extra dependencies, do extra preparation and checks here
|
||||||
|
|
||||||
|
|
||||||
|
At the end of the init, you will have available to you:
|
||||||
|
|
||||||
|
- **self.spec**: the spec object
|
||||||
|
- **self.dirname**: an optional directory name the user as provided at init to save
|
||||||
|
- **self.output_dir**: the analyzer metadata directory, where we save by default
|
||||||
|
- **self.meta_dir**: the path to the package metadata directory (.spack) if you need it
|
||||||
|
|
||||||
|
And can proceed to write your analyzer.
|
||||||
|
|
||||||
|
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
Saving Analyzer Results
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
The analyzer will have ``save_result`` called, with the result object generated
|
||||||
|
to save it to the filesystem, and if the user has added the ``--monitor`` flag
|
||||||
|
to upload it to a monitor server. If your result follows an accepted result
|
||||||
|
format and you don't need to parse it further, you don't need to add this
|
||||||
|
function to your class. However, if your result data is large or otherwise
|
||||||
|
needs additional parsing, you can define it. If you define the function, it
|
||||||
|
is useful to know about the ``output_dir`` property, which you can join
|
||||||
|
with your output file relative path of choice:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
outfile = os.path.join(self.output_dir, "my-output-file.txt")
|
||||||
|
|
||||||
|
|
||||||
|
The directory will be provided by the ``output_dir`` property but it won't exist,
|
||||||
|
so you should create it:
|
||||||
|
|
||||||
|
|
||||||
|
.. code::block:: python
|
||||||
|
|
||||||
|
# Create the output directory
|
||||||
|
if not os.path.exists(self._output_dir):
|
||||||
|
os.makedirs(self._output_dir)
|
||||||
|
|
||||||
|
|
||||||
|
If you are generating results that match to specific files in the package
|
||||||
|
install directory, you should try to maintain those paths in the case that
|
||||||
|
there are equivalently named files in different directories that would
|
||||||
|
overwrite one another. As an example of an analyzer with a custom save,
|
||||||
|
the Libabigail analyzer saves ``*.xml`` files to the analyzer metadata
|
||||||
|
folder in ``run()``, as they are either binaries, or as xml (text) would
|
||||||
|
usually be too big to pass in one request. For this reason, the files
|
||||||
|
are saved during ``run()`` and the filenames added to the result object,
|
||||||
|
and then when the result object is passed back into ``save_result()``,
|
||||||
|
we skip saving to the filesystem, and instead read the file and send
|
||||||
|
each one (separately) to the monitor:
|
||||||
|
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
def save_result(self, result, monitor=None, overwrite=False):
|
||||||
|
"""ABI results are saved to individual files, so each one needs to be
|
||||||
|
read and uploaded. Result here should be the lookup generated in run(),
|
||||||
|
the key is the analyzer name, and each value is the result file.
|
||||||
|
We currently upload the entire xml as text because libabigail can't
|
||||||
|
easily read gzipped xml, but this will be updated when it can.
|
||||||
|
"""
|
||||||
|
if not monitor:
|
||||||
|
return
|
||||||
|
|
||||||
|
name = self.spec.package.name
|
||||||
|
|
||||||
|
for obj, filename in result.get(self.name, {}).items():
|
||||||
|
|
||||||
|
# Don't include the prefix
|
||||||
|
rel_path = obj.replace(self.spec.prefix + os.path.sep, "")
|
||||||
|
|
||||||
|
# We've already saved the results to file during run
|
||||||
|
content = spack.monitor.read_file(filename)
|
||||||
|
|
||||||
|
# A result needs an analyzer, value or binary_value, and name
|
||||||
|
data = {"value": content, "install_file": rel_path, "name": "abidw-xml"}
|
||||||
|
tty.info("Sending result for %s %s to monitor." % (name, rel_path))
|
||||||
|
monitor.send_analyze_metadata(self.spec.package, {"libabigail": [data]})
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
Notice that this function, if you define it, requires a result object (generated by
|
||||||
|
``run()``, a monitor (if you want to send), and a boolean ``overwrite`` to be used
|
||||||
|
to check if a result exists first, and not write to it if the result exists and
|
||||||
|
overwrite is False. Also notice that since we already saved these files to the analyzer metadata folder, we return early if a monitor isn't defined, because this function serves to send results to the monitor. If you haven't saved anything to the analyzer metadata folder
|
||||||
|
yet, you might want to do that here. You should also use ``tty.info`` to give
|
||||||
|
the user a message of "Writing result to $DIRNAME."
|
||||||
|
|
||||||
|
|
||||||
.. _writing-commands:
|
.. _writing-commands:
|
||||||
|
|
||||||
@@ -448,6 +699,23 @@ with a hook, and this is the purpose of this particular hook. Akin to
|
|||||||
``on_phase_success`` we require the same variables - the package that failed,
|
``on_phase_success`` we require the same variables - the package that failed,
|
||||||
the name of the phase, and the log file where we might find errors.
|
the name of the phase, and the log file where we might find errors.
|
||||||
|
|
||||||
|
"""""""""""""""""""""""""""""""""
|
||||||
|
``on_analyzer_save(pkg, result)``
|
||||||
|
"""""""""""""""""""""""""""""""""
|
||||||
|
|
||||||
|
After an analyzer has saved some result for a package, this hook is called,
|
||||||
|
and it provides the package that we just ran the analysis for, along with
|
||||||
|
the loaded result. Typically, a result is structured to have the name
|
||||||
|
of the analyzer as key, and the result object that is defined in detail in
|
||||||
|
:ref:`analyzer_run_function`.
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
def on_analyzer_save(pkg, result):
|
||||||
|
"""given a package and a result...
|
||||||
|
"""
|
||||||
|
print('Do something extra with a package analysis result here')
|
||||||
|
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^
|
||||||
Adding a New Hook Type
|
Adding a New Hook Type
|
||||||
|
|||||||
BIN
lib/spack/docs/dockerhub_spack.png
Normal file
BIN
lib/spack/docs/dockerhub_spack.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 88 KiB |
@@ -545,8 +545,8 @@ environment and have a single view of it in the filesystem.
|
|||||||
|
|
||||||
The ``concretizer:unify`` config option was introduced in Spack 0.18 to
|
The ``concretizer:unify`` config option was introduced in Spack 0.18 to
|
||||||
replace the ``concretization`` property. For reference,
|
replace the ``concretization`` property. For reference,
|
||||||
``concretization: together`` is replaced by ``concretizer:unify:true``,
|
``concretization: separately`` is replaced by ``concretizer:unify:true``,
|
||||||
and ``concretization: separately`` is replaced by ``concretizer:unify:false``.
|
and ``concretization: together`` is replaced by ``concretizer:unify:false``.
|
||||||
|
|
||||||
.. admonition:: Re-concretization of user specs
|
.. admonition:: Re-concretization of user specs
|
||||||
|
|
||||||
@@ -799,7 +799,7 @@ directories.
|
|||||||
select: [^mpi]
|
select: [^mpi]
|
||||||
exclude: ['%pgi@18.5']
|
exclude: ['%pgi@18.5']
|
||||||
projections:
|
projections:
|
||||||
all: '{name}/{version}-{compiler.name}'
|
all: {name}/{version}-{compiler.name}
|
||||||
link: all
|
link: all
|
||||||
link_type: symlink
|
link_type: symlink
|
||||||
|
|
||||||
@@ -1013,7 +1013,7 @@ The following advanced example shows how generated targets can be used in a
|
|||||||
|
|
||||||
SPACK ?= spack
|
SPACK ?= spack
|
||||||
|
|
||||||
.PHONY: all clean env
|
.PHONY: all clean fetch env
|
||||||
|
|
||||||
all: env
|
all: env
|
||||||
|
|
||||||
@@ -1022,6 +1022,9 @@ The following advanced example shows how generated targets can be used in a
|
|||||||
|
|
||||||
env.mk: spack.lock
|
env.mk: spack.lock
|
||||||
$(SPACK) -e . env depfile -o $@ --make-target-prefix spack
|
$(SPACK) -e . env depfile -o $@ --make-target-prefix spack
|
||||||
|
|
||||||
|
fetch: spack/fetch
|
||||||
|
$(info Environment fetched!)
|
||||||
|
|
||||||
env: spack/env
|
env: spack/env
|
||||||
$(info Environment installed!)
|
$(info Environment installed!)
|
||||||
@@ -1034,10 +1037,10 @@ The following advanced example shows how generated targets can be used in a
|
|||||||
endif
|
endif
|
||||||
|
|
||||||
When ``make`` is invoked, it first "remakes" the missing include ``env.mk``
|
When ``make`` is invoked, it first "remakes" the missing include ``env.mk``
|
||||||
from its rule, which triggers concretization. When done, the generated target
|
from its rule, which triggers concretization. When done, the generated targets
|
||||||
``spack/env`` is available. In the above example, the ``env`` target uses this generated
|
``spack/fetch`` and ``spack/env`` are available. In the above
|
||||||
target as a prerequisite, meaning that it can make use of the installed packages in
|
example, the ``env`` target uses the latter as a prerequisite, meaning
|
||||||
its commands.
|
that it can make use of the installed packages in its commands.
|
||||||
|
|
||||||
As it is typically undesirable to remake ``env.mk`` as part of ``make clean``,
|
As it is typically undesirable to remake ``env.mk`` as part of ``make clean``,
|
||||||
the include is conditional.
|
the include is conditional.
|
||||||
@@ -1045,6 +1048,7 @@ the include is conditional.
|
|||||||
.. note::
|
.. note::
|
||||||
|
|
||||||
When including generated ``Makefile``\s, it is important to use
|
When including generated ``Makefile``\s, it is important to use
|
||||||
the ``--make-target-prefix`` flag and use the non-phony target
|
the ``--make-target-prefix`` flag and use the non-phony targets
|
||||||
``<target-prefix>/env`` as prerequisite, instead of the phony target
|
``<target-prefix>/env`` and ``<target-prefix>/fetch`` as
|
||||||
``<target-prefix>/all``.
|
prerequisites, instead of the phony targets ``<target-prefix>/all``
|
||||||
|
and ``<target-prefix>/fetch-all`` respectively.
|
||||||
Binary file not shown.
|
Before Width: | Height: | Size: 70 KiB |
@@ -308,7 +308,7 @@ the variable ``FOOBAR`` will be unset.
|
|||||||
spec constraints are instead evaluated top to bottom.
|
spec constraints are instead evaluated top to bottom.
|
||||||
|
|
||||||
""""""""""""""""""""""""""""""""""""""""""""
|
""""""""""""""""""""""""""""""""""""""""""""
|
||||||
Exclude or include specific module files
|
Blacklist or whitelist specific module files
|
||||||
""""""""""""""""""""""""""""""""""""""""""""
|
""""""""""""""""""""""""""""""""""""""""""""
|
||||||
|
|
||||||
You can use anonymous specs also to prevent module files from being written or
|
You can use anonymous specs also to prevent module files from being written or
|
||||||
@@ -322,8 +322,8 @@ your system. If you write a configuration file like:
|
|||||||
modules:
|
modules:
|
||||||
default:
|
default:
|
||||||
tcl:
|
tcl:
|
||||||
include: ['gcc', 'llvm'] # include will have precedence over exclude
|
whitelist: ['gcc', 'llvm'] # Whitelist will have precedence over blacklist
|
||||||
exclude: ['%gcc@4.4.7'] # Assuming gcc@4.4.7 is the system compiler
|
blacklist: ['%gcc@4.4.7'] # Assuming gcc@4.4.7 is the system compiler
|
||||||
|
|
||||||
you will prevent the generation of module files for any package that
|
you will prevent the generation of module files for any package that
|
||||||
is compiled with ``gcc@4.4.7``, with the only exception of any ``gcc``
|
is compiled with ``gcc@4.4.7``, with the only exception of any ``gcc``
|
||||||
@@ -490,7 +490,7 @@ satisfies a default, Spack will generate the module file in the
|
|||||||
appropriate path, and will generate a default symlink to the module
|
appropriate path, and will generate a default symlink to the module
|
||||||
file as well.
|
file as well.
|
||||||
|
|
||||||
.. warning::
|
.. warning::
|
||||||
If Spack is configured to generate multiple default packages in the
|
If Spack is configured to generate multiple default packages in the
|
||||||
same directory, the last modulefile to be generated will be the
|
same directory, the last modulefile to be generated will be the
|
||||||
default module.
|
default module.
|
||||||
@@ -589,7 +589,7 @@ Filter out environment modifications
|
|||||||
Modifications to certain environment variables in module files are there by
|
Modifications to certain environment variables in module files are there by
|
||||||
default, for instance because they are generated by prefix inspections.
|
default, for instance because they are generated by prefix inspections.
|
||||||
If you want to prevent modifications to some environment variables, you can
|
If you want to prevent modifications to some environment variables, you can
|
||||||
do so by using the ``exclude_env_vars``:
|
do so by using the environment blacklist:
|
||||||
|
|
||||||
.. code-block:: yaml
|
.. code-block:: yaml
|
||||||
|
|
||||||
@@ -599,7 +599,7 @@ do so by using the ``exclude_env_vars``:
|
|||||||
all:
|
all:
|
||||||
filter:
|
filter:
|
||||||
# Exclude changes to any of these variables
|
# Exclude changes to any of these variables
|
||||||
exclude_env_vars: ['CPATH', 'LIBRARY_PATH']
|
environment_blacklist: ['CPATH', 'LIBRARY_PATH']
|
||||||
|
|
||||||
The configuration above will generate module files that will not contain
|
The configuration above will generate module files that will not contain
|
||||||
modifications to either ``CPATH`` or ``LIBRARY_PATH``.
|
modifications to either ``CPATH`` or ``LIBRARY_PATH``.
|
||||||
|
|||||||
@@ -1070,32 +1070,13 @@ Commits
|
|||||||
|
|
||||||
Submodules
|
Submodules
|
||||||
You can supply ``submodules=True`` to cause Spack to fetch submodules
|
You can supply ``submodules=True`` to cause Spack to fetch submodules
|
||||||
recursively along with the repository at fetch time.
|
recursively along with the repository at fetch time. For more information
|
||||||
|
about git submodules see the manpage of git: ``man git-submodule``.
|
||||||
|
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
|
||||||
version('1.0.1', tag='v1.0.1', submodules=True)
|
version('1.0.1', tag='v1.0.1', submodules=True)
|
||||||
|
|
||||||
If a package has needs more fine-grained control over submodules, define
|
|
||||||
``submodules`` to be a callable function that takes the package instance as
|
|
||||||
its only argument. The function should return a list of submodules to be fetched.
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
def submodules(package):
|
|
||||||
submodules = []
|
|
||||||
if "+variant-1" in package.spec:
|
|
||||||
submodules.append("submodule_for_variant_1")
|
|
||||||
if "+variant-2" in package.spec:
|
|
||||||
submodules.append("submodule_for_variant_2")
|
|
||||||
return submodules
|
|
||||||
|
|
||||||
|
|
||||||
class MyPackage(Package):
|
|
||||||
version("0.1.0", submodules=submodules)
|
|
||||||
|
|
||||||
For more information about git submodules see the manpage of git: ``man
|
|
||||||
git-submodule``.
|
|
||||||
|
|
||||||
.. _github-fetch:
|
.. _github-fetch:
|
||||||
|
|
||||||
@@ -2794,256 +2775,6 @@ Suppose a user invokes ``spack install`` like this:
|
|||||||
Spack will fail with a constraint violation, because the version of
|
Spack will fail with a constraint violation, because the version of
|
||||||
MPICH requested is too low for the ``mpi`` requirement in ``foo``.
|
MPICH requested is too low for the ``mpi`` requirement in ``foo``.
|
||||||
|
|
||||||
.. _custom-attributes:
|
|
||||||
|
|
||||||
------------------
|
|
||||||
Custom attributes
|
|
||||||
------------------
|
|
||||||
|
|
||||||
Often a package will need to provide attributes for dependents to query
|
|
||||||
various details about what it provides. While any number of custom defined
|
|
||||||
attributes can be implemented by a package, the four specific attributes
|
|
||||||
described below are always available on every package with default
|
|
||||||
implementations and the ability to customize with alternate implementations
|
|
||||||
in the case of virtual packages provided:
|
|
||||||
|
|
||||||
=========== =========================================== =====================
|
|
||||||
Attribute Purpose Default
|
|
||||||
=========== =========================================== =====================
|
|
||||||
``home`` The installation path for the package ``spec.prefix``
|
|
||||||
``command`` An executable command for the package | ``spec.name`` found
|
|
||||||
in
|
|
||||||
| ``.home.bin``
|
|
||||||
``headers`` A list of headers provided by the package | All headers
|
|
||||||
searched
|
|
||||||
| recursively in
|
|
||||||
``.home.include``
|
|
||||||
``libs`` A list of libraries provided by the package | ``lib{spec.name}``
|
|
||||||
searched
|
|
||||||
| recursively in
|
|
||||||
``.home`` starting
|
|
||||||
| with ``lib``,
|
|
||||||
``lib64``, then the
|
|
||||||
| rest of ``.home``
|
|
||||||
=========== =========================================== =====================
|
|
||||||
|
|
||||||
Each of these can be customized by implementing the relevant attribute
|
|
||||||
as a ``@property`` in the package's class:
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
:linenos:
|
|
||||||
|
|
||||||
class Foo(Package):
|
|
||||||
...
|
|
||||||
@property
|
|
||||||
def libs(self):
|
|
||||||
# The library provided by Foo is libMyFoo.so
|
|
||||||
return find_libraries('libMyFoo', root=self.home, recursive=True)
|
|
||||||
|
|
||||||
A package may also provide a custom implementation of each attribute
|
|
||||||
for the virtual packages it provides by implementing the
|
|
||||||
``virtualpackagename_attributename`` property in the package's class.
|
|
||||||
The implementation used is the first one found from:
|
|
||||||
|
|
||||||
#. Specialized virtual: ``Package.virtualpackagename_attributename``
|
|
||||||
#. Generic package: ``Package.attributename``
|
|
||||||
#. Default
|
|
||||||
|
|
||||||
The use of customized attributes is demonstrated in the next example.
|
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
Example: Customized attributes for virtual packages
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
Consider a package ``foo`` that can optionally provide two virtual
|
|
||||||
packages ``bar`` and ``baz``. When both are enabled the installation tree
|
|
||||||
appears as follows:
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
include/foo.h
|
|
||||||
include/bar/bar.h
|
|
||||||
lib64/libFoo.so
|
|
||||||
lib64/libFooBar.so
|
|
||||||
baz/include/baz/baz.h
|
|
||||||
baz/lib/libFooBaz.so
|
|
||||||
|
|
||||||
The install tree shows that ``foo`` is providing the header ``include/foo.h``
|
|
||||||
and library ``lib64/libFoo.so`` in it's install prefix. The virtual
|
|
||||||
package ``bar`` is providing ``include/bar/bar.h`` and library
|
|
||||||
``lib64/libFooBar.so``, also in ``foo``'s install prefix. The ``baz``
|
|
||||||
package, however, is provided in the ``baz`` subdirectory of ``foo``'s
|
|
||||||
prefix with the ``include/baz/baz.h`` header and ``lib/libFooBaz.so``
|
|
||||||
library. Such a package could implement the optional attributes as
|
|
||||||
follows:
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
:linenos:
|
|
||||||
|
|
||||||
class Foo(Package):
|
|
||||||
...
|
|
||||||
variant('bar', default=False, description='Enable the Foo implementation of bar')
|
|
||||||
variant('baz', default=False, description='Enable the Foo implementation of baz')
|
|
||||||
...
|
|
||||||
provides('bar', when='+bar')
|
|
||||||
provides('baz', when='+baz')
|
|
||||||
....
|
|
||||||
|
|
||||||
# Just the foo headers
|
|
||||||
@property
|
|
||||||
def headers(self):
|
|
||||||
return find_headers('foo', root=self.home.include, recursive=False)
|
|
||||||
|
|
||||||
# Just the foo libraries
|
|
||||||
@property
|
|
||||||
def libs(self):
|
|
||||||
return find_libraries('libFoo', root=self.home, recursive=True)
|
|
||||||
|
|
||||||
# The header provided by the bar virutal package
|
|
||||||
@property
|
|
||||||
def bar_headers(self):
|
|
||||||
return find_headers('bar/bar.h', root=self.home.include, recursive=False)
|
|
||||||
|
|
||||||
# The libary provided by the bar virtual package
|
|
||||||
@property
|
|
||||||
def bar_libs(self):
|
|
||||||
return find_libraries('libFooBar', root=sef.home, recursive=True)
|
|
||||||
|
|
||||||
# The baz virtual package home
|
|
||||||
@property
|
|
||||||
def baz_home(self):
|
|
||||||
return self.prefix.baz
|
|
||||||
|
|
||||||
# The header provided by the baz virtual package
|
|
||||||
@property
|
|
||||||
def baz_headers(self):
|
|
||||||
return find_headers('baz/baz', root=self.baz_home.include, recursive=False)
|
|
||||||
|
|
||||||
# The library provided by the baz virtual package
|
|
||||||
@property
|
|
||||||
def baz_libs(self):
|
|
||||||
return find_libraries('libFooBaz', root=self.baz_home, recursive=True)
|
|
||||||
|
|
||||||
Now consider another package, ``foo-app``, depending on all three:
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
:linenos:
|
|
||||||
|
|
||||||
class FooApp(CMakePackage):
|
|
||||||
...
|
|
||||||
depends_on('foo')
|
|
||||||
depends_on('bar')
|
|
||||||
depends_on('baz')
|
|
||||||
|
|
||||||
The resulting spec objects for it's dependencies shows the result of
|
|
||||||
the above attribute implementations:
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
# The core headers and libraries of the foo package
|
|
||||||
|
|
||||||
>>> spec['foo']
|
|
||||||
foo@1.0%gcc@11.3.1+bar+baz arch=linux-fedora35-haswell
|
|
||||||
>>> spec['foo'].prefix
|
|
||||||
'/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6'
|
|
||||||
|
|
||||||
# home defaults to the package install prefix without an explicit implementation
|
|
||||||
>>> spec['foo'].home
|
|
||||||
'/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6'
|
|
||||||
|
|
||||||
# foo headers from the foo prefix
|
|
||||||
>>> spec['foo'].headers
|
|
||||||
HeaderList([
|
|
||||||
'/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6/include/foo.h',
|
|
||||||
])
|
|
||||||
|
|
||||||
# foo include directories from the foo prefix
|
|
||||||
>>> spec['foo'].headers.directories
|
|
||||||
['/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6/include']
|
|
||||||
|
|
||||||
# foo libraries from the foo prefix
|
|
||||||
>>> spec['foo'].libs
|
|
||||||
LibraryList([
|
|
||||||
'/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6/lib64/libFoo.so',
|
|
||||||
])
|
|
||||||
|
|
||||||
# foo library directories from the foo prefix
|
|
||||||
>>> spec['foo'].libs.directories
|
|
||||||
['/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6/lib64']
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
# The virtual bar package in the same prefix as foo
|
|
||||||
|
|
||||||
# bar resolves to the foo package
|
|
||||||
>>> spec['bar']
|
|
||||||
foo@1.0%gcc@11.3.1+bar+baz arch=linux-fedora35-haswell
|
|
||||||
>>> spec['bar'].prefix
|
|
||||||
'/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6'
|
|
||||||
|
|
||||||
# home defaults to the foo prefix without either a Foo.bar_home
|
|
||||||
# or Foo.home implementation
|
|
||||||
>>> spec['bar'].home
|
|
||||||
'/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6'
|
|
||||||
|
|
||||||
# bar header in the foo prefix
|
|
||||||
>>> spec['bar'].headers
|
|
||||||
HeaderList([
|
|
||||||
'/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6/include/bar/bar.h'
|
|
||||||
])
|
|
||||||
|
|
||||||
# bar include dirs from the foo prefix
|
|
||||||
>>> spec['bar'].headers.directories
|
|
||||||
['/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6/include']
|
|
||||||
|
|
||||||
# bar library from the foo prefix
|
|
||||||
>>> spec['bar'].libs
|
|
||||||
LibraryList([
|
|
||||||
'/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6/lib64/libFooBar.so'
|
|
||||||
])
|
|
||||||
|
|
||||||
# bar library directories from the foo prefix
|
|
||||||
>>> spec['bar'].libs.directories
|
|
||||||
['/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6/lib64']
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
# The virtual baz package in a subdirectory of foo's prefix
|
|
||||||
|
|
||||||
# baz resolves to the foo package
|
|
||||||
>>> spec['baz']
|
|
||||||
foo@1.0%gcc@11.3.1+bar+baz arch=linux-fedora35-haswell
|
|
||||||
>>> spec['baz'].prefix
|
|
||||||
'/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6'
|
|
||||||
|
|
||||||
# baz_home implementation provides the subdirectory inside the foo prefix
|
|
||||||
>>> spec['baz'].home
|
|
||||||
'/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6/baz'
|
|
||||||
|
|
||||||
# baz headers in the baz subdirectory of the foo prefix
|
|
||||||
>>> spec['baz'].headers
|
|
||||||
HeaderList([
|
|
||||||
'/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6/baz/include/baz/baz.h'
|
|
||||||
])
|
|
||||||
|
|
||||||
# baz include directories in the baz subdirectory of the foo prefix
|
|
||||||
>>> spec['baz'].headers.directories
|
|
||||||
[
|
|
||||||
'/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6/baz/include'
|
|
||||||
]
|
|
||||||
|
|
||||||
# baz libraries in the baz subdirectory of the foo prefix
|
|
||||||
>>> spec['baz'].libs
|
|
||||||
LibraryList([
|
|
||||||
'/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6/baz/lib/libFooBaz.so'
|
|
||||||
])
|
|
||||||
|
|
||||||
# baz library directories in the baz subdirectory of the foo porefix
|
|
||||||
>>> spec['baz'].libs.directories
|
|
||||||
[
|
|
||||||
'/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6/baz/lib'
|
|
||||||
]
|
|
||||||
|
|
||||||
.. _abstract-and-concrete:
|
.. _abstract-and-concrete:
|
||||||
|
|
||||||
-------------------------
|
-------------------------
|
||||||
@@ -5745,24 +5476,6 @@ Version Lists
|
|||||||
|
|
||||||
Spack packages should list supported versions with the newest first.
|
Spack packages should list supported versions with the newest first.
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
Using ``home`` vs ``prefix``
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
``home`` and ``prefix`` are both attributes that can be queried on a
|
|
||||||
package's dependencies, often when passing configure arguments pointing to the
|
|
||||||
location of a dependency. The difference is that while ``prefix`` is the
|
|
||||||
location on disk where a concrete package resides, ``home`` is the `logical`
|
|
||||||
location that a package resides, which may be different than ``prefix`` in
|
|
||||||
the case of virtual packages or other special circumstances. For most use
|
|
||||||
cases inside a package, it's dependency locations can be accessed via either
|
|
||||||
``self.spec['foo'].home`` or ``self.spec['foo'].prefix``. Specific packages
|
|
||||||
that should be consumed by dependents via ``.home`` instead of ``.prefix``
|
|
||||||
should be noted in their respective documentation.
|
|
||||||
|
|
||||||
See :ref:`custom-attributes` for more details and an example implementing
|
|
||||||
a custom ``home`` attribute.
|
|
||||||
|
|
||||||
---------------------------
|
---------------------------
|
||||||
Packaging workflow commands
|
Packaging workflow commands
|
||||||
---------------------------
|
---------------------------
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ bash, , , Compiler wrappers
|
|||||||
tar, , , Extract/create archives
|
tar, , , Extract/create archives
|
||||||
gzip, , , Compress/Decompress archives
|
gzip, , , Compress/Decompress archives
|
||||||
unzip, , , Compress/Decompress archives
|
unzip, , , Compress/Decompress archives
|
||||||
bzip2, , , Compress/Decompress archives
|
bzip, , , Compress/Decompress archives
|
||||||
xz, , , Compress/Decompress archives
|
xz, , , Compress/Decompress archives
|
||||||
zstd, , Optional, Compress/Decompress archives
|
zstd, , Optional, Compress/Decompress archives
|
||||||
file, , , Create/Use Buildcaches
|
file, , , Create/Use Buildcaches
|
||||||
@@ -15,4 +15,4 @@ gnupg2, , , Sign/Verify Buildcaches
|
|||||||
git, , , Manage Software Repositories
|
git, , , Manage Software Repositories
|
||||||
svn, , Optional, Manage Software Repositories
|
svn, , Optional, Manage Software Repositories
|
||||||
hg, , Optional, Manage Software Repositories
|
hg, , Optional, Manage Software Repositories
|
||||||
Python header files, , Optional (e.g. ``python3-dev`` on Debian), Bootstrapping from sources
|
Python header files, , Optional (e.g. ``python3-dev`` on Debian), Bootstrapping from sources
|
||||||
|
2
lib/spack/external/__init__.py
vendored
2
lib/spack/external/__init__.py
vendored
@@ -18,7 +18,7 @@
|
|||||||
|
|
||||||
* Homepage: https://pypi.python.org/pypi/archspec
|
* Homepage: https://pypi.python.org/pypi/archspec
|
||||||
* Usage: Labeling, comparison and detection of microarchitectures
|
* Usage: Labeling, comparison and detection of microarchitectures
|
||||||
* Version: 0.1.4 (commit b8eea9df2b4204ff27d204452cd46f5199a0b423)
|
* Version: 0.1.4 (commit 53fc4ac91e9b4c5e4079f15772503a80bece72ad)
|
||||||
|
|
||||||
argparse
|
argparse
|
||||||
--------
|
--------
|
||||||
|
|||||||
@@ -85,21 +85,21 @@
|
|||||||
"intel": [
|
"intel": [
|
||||||
{
|
{
|
||||||
"versions": ":",
|
"versions": ":",
|
||||||
"name": "x86-64",
|
"name": "pentium4",
|
||||||
"flags": "-march={name} -mtune=generic"
|
"flags": "-march={name} -mtune=generic"
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"oneapi": [
|
"oneapi": [
|
||||||
{
|
{
|
||||||
"versions": ":",
|
"versions": ":",
|
||||||
"name": "x86-64",
|
"name": "pentium4",
|
||||||
"flags": "-march={name} -mtune=generic"
|
"flags": "-march={name} -mtune=generic"
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"dpcpp": [
|
"dpcpp": [
|
||||||
{
|
{
|
||||||
"versions": ":",
|
"versions": ":",
|
||||||
"name": "x86-64",
|
"name": "pentium4",
|
||||||
"flags": "-march={name} -mtune=generic"
|
"flags": "-march={name} -mtune=generic"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
@@ -143,20 +143,6 @@
|
|||||||
"name": "x86-64",
|
"name": "x86-64",
|
||||||
"flags": "-march={name} -mtune=generic -mcx16 -msahf -mpopcnt -msse3 -msse4.1 -msse4.2 -mssse3"
|
"flags": "-march={name} -mtune=generic -mcx16 -msahf -mpopcnt -msse3 -msse4.1 -msse4.2 -mssse3"
|
||||||
}
|
}
|
||||||
],
|
|
||||||
"oneapi": [
|
|
||||||
{
|
|
||||||
"versions": "2021.2.0:",
|
|
||||||
"name": "x86-64-v2",
|
|
||||||
"flags": "-march={name} -mtune=generic"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"dpcpp": [
|
|
||||||
{
|
|
||||||
"versions": "2021.2.0:",
|
|
||||||
"name": "x86-64-v2",
|
|
||||||
"flags": "-march={name} -mtune=generic"
|
|
||||||
}
|
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -214,20 +200,6 @@
|
|||||||
"name": "x86-64",
|
"name": "x86-64",
|
||||||
"flags": "-march={name} -mtune=generic -mcx16 -msahf -mpopcnt -msse3 -msse4.1 -msse4.2 -mssse3 -mavx -mavx2 -mbmi -mbmi2 -mf16c -mfma -mlzcnt -mmovbe -mxsave"
|
"flags": "-march={name} -mtune=generic -mcx16 -msahf -mpopcnt -msse3 -msse4.1 -msse4.2 -mssse3 -mavx -mavx2 -mbmi -mbmi2 -mf16c -mfma -mlzcnt -mmovbe -mxsave"
|
||||||
}
|
}
|
||||||
],
|
|
||||||
"oneapi": [
|
|
||||||
{
|
|
||||||
"versions": "2021.2.0:",
|
|
||||||
"name": "x86-64-v3",
|
|
||||||
"flags": "-march={name} -mtune=generic"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"dpcpp": [
|
|
||||||
{
|
|
||||||
"versions": "2021.2.0:",
|
|
||||||
"name": "x86-64-v3",
|
|
||||||
"flags": "-march={name} -mtune=generic"
|
|
||||||
}
|
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -290,20 +262,6 @@
|
|||||||
"name": "x86-64",
|
"name": "x86-64",
|
||||||
"flags": "-march={name} -mtune=generic -mcx16 -msahf -mpopcnt -msse3 -msse4.1 -msse4.2 -mssse3 -mavx -mavx2 -mbmi -mbmi2 -mf16c -mfma -mlzcnt -mmovbe -mxsave -mavx512f -mavx512bw -mavx512cd -mavx512dq -mavx512vl"
|
"flags": "-march={name} -mtune=generic -mcx16 -msahf -mpopcnt -msse3 -msse4.1 -msse4.2 -mssse3 -mavx -mavx2 -mbmi -mbmi2 -mf16c -mfma -mlzcnt -mmovbe -mxsave -mavx512f -mavx512bw -mavx512cd -mavx512dq -mavx512vl"
|
||||||
}
|
}
|
||||||
],
|
|
||||||
"oneapi": [
|
|
||||||
{
|
|
||||||
"versions": "2021.2.0:",
|
|
||||||
"name": "x86-64-v4",
|
|
||||||
"flags": "-march={name} -mtune=generic"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"dpcpp": [
|
|
||||||
{
|
|
||||||
"versions": "2021.2.0:",
|
|
||||||
"name": "x86-64-v4",
|
|
||||||
"flags": "-march={name} -mtune=generic"
|
|
||||||
}
|
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -344,19 +302,22 @@
|
|||||||
"intel": [
|
"intel": [
|
||||||
{
|
{
|
||||||
"versions": "16.0:",
|
"versions": "16.0:",
|
||||||
"flags": "-march={name} -mtune={name}"
|
"name": "pentium4",
|
||||||
|
"flags": "-march={name} -mtune=generic"
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"oneapi": [
|
"oneapi": [
|
||||||
{
|
{
|
||||||
"versions": ":",
|
"versions": ":",
|
||||||
"flags": "-march={name} -mtune={name}"
|
"name": "pentium4",
|
||||||
|
"flags": "-march={name} -mtune=generic"
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"dpcpp": [
|
"dpcpp": [
|
||||||
{
|
{
|
||||||
"versions": ":",
|
"versions": ":",
|
||||||
"flags": "-march={name} -mtune={name}"
|
"name": "pentium4",
|
||||||
|
"flags": "-march={name} -mtune=generic"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -308,68 +308,6 @@ def change_sed_delimiter(old_delim, new_delim, *filenames):
|
|||||||
filter_file(double_quoted, '"%s"' % repl, f)
|
filter_file(double_quoted, '"%s"' % repl, f)
|
||||||
|
|
||||||
|
|
||||||
@contextmanager
|
|
||||||
def exploding_archive_catch(stage):
|
|
||||||
# Check for an exploding tarball, i.e. one that doesn't expand to
|
|
||||||
# a single directory. If the tarball *didn't* explode, move its
|
|
||||||
# contents to the staging source directory & remove the container
|
|
||||||
# directory. If the tarball did explode, just rename the tarball
|
|
||||||
# directory to the staging source directory.
|
|
||||||
#
|
|
||||||
# NOTE: The tar program on Mac OS X will encode HFS metadata in
|
|
||||||
# hidden files, which can end up *alongside* a single top-level
|
|
||||||
# directory. We initially ignore presence of hidden files to
|
|
||||||
# accomodate these "semi-exploding" tarballs but ensure the files
|
|
||||||
# are copied to the source directory.
|
|
||||||
|
|
||||||
# Expand all tarballs in their own directory to contain
|
|
||||||
# exploding tarballs.
|
|
||||||
tarball_container = os.path.join(stage.path,
|
|
||||||
"spack-expanded-archive")
|
|
||||||
mkdirp(tarball_container)
|
|
||||||
orig_dir = os.getcwd()
|
|
||||||
os.chdir(tarball_container)
|
|
||||||
try:
|
|
||||||
yield
|
|
||||||
# catch an exploding archive on sucessful extraction
|
|
||||||
os.chdir(orig_dir)
|
|
||||||
exploding_archive_handler(tarball_container, stage)
|
|
||||||
except Exception as e:
|
|
||||||
# return current directory context to previous on failure
|
|
||||||
os.chdir(orig_dir)
|
|
||||||
raise e
|
|
||||||
|
|
||||||
|
|
||||||
@system_path_filter
|
|
||||||
def exploding_archive_handler(tarball_container, stage):
|
|
||||||
"""
|
|
||||||
Args:
|
|
||||||
tarball_container: where the archive was expanded to
|
|
||||||
stage: Stage object referencing filesystem location
|
|
||||||
where archive is being expanded
|
|
||||||
"""
|
|
||||||
files = os.listdir(tarball_container)
|
|
||||||
non_hidden = [f for f in files if not f.startswith('.')]
|
|
||||||
if len(non_hidden) == 1:
|
|
||||||
src = os.path.join(tarball_container, non_hidden[0])
|
|
||||||
if os.path.isdir(src):
|
|
||||||
stage.srcdir = non_hidden[0]
|
|
||||||
shutil.move(src, stage.source_path)
|
|
||||||
if len(files) > 1:
|
|
||||||
files.remove(non_hidden[0])
|
|
||||||
for f in files:
|
|
||||||
src = os.path.join(tarball_container, f)
|
|
||||||
dest = os.path.join(stage.path, f)
|
|
||||||
shutil.move(src, dest)
|
|
||||||
os.rmdir(tarball_container)
|
|
||||||
else:
|
|
||||||
# This is a non-directory entry (e.g., a patch file) so simply
|
|
||||||
# rename the tarball container to be the source path.
|
|
||||||
shutil.move(tarball_container, stage.source_path)
|
|
||||||
else:
|
|
||||||
shutil.move(tarball_container, stage.source_path)
|
|
||||||
|
|
||||||
|
|
||||||
@system_path_filter(arg_slice=slice(1))
|
@system_path_filter(arg_slice=slice(1))
|
||||||
def get_owner_uid(path, err_msg=None):
|
def get_owner_uid(path, err_msg=None):
|
||||||
if not os.path.exists(path):
|
if not os.path.exists(path):
|
||||||
|
|||||||
@@ -1072,15 +1072,3 @@ def __exit__(self, exc_type, exc_value, tb):
|
|||||||
# Suppress any exception from being re-raised:
|
# Suppress any exception from being re-raised:
|
||||||
# https://docs.python.org/3/reference/datamodel.html#object.__exit__.
|
# https://docs.python.org/3/reference/datamodel.html#object.__exit__.
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
class classproperty(object):
|
|
||||||
"""Non-data descriptor to evaluate a class-level property. The function that performs
|
|
||||||
the evaluation is injected at creation time and take an instance (could be None) and
|
|
||||||
an owner (i.e. the class that originated the instance)
|
|
||||||
"""
|
|
||||||
def __init__(self, callback):
|
|
||||||
self.callback = callback
|
|
||||||
|
|
||||||
def __get__(self, instance, owner):
|
|
||||||
return self.callback(owner)
|
|
||||||
|
|||||||
42
lib/spack/spack/analyzers/__init__.py
Normal file
42
lib/spack/spack/analyzers/__init__.py
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||||
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
|
"""This package contains code for creating analyzers to extract Application
|
||||||
|
Binary Interface (ABI) information, along with simple analyses that just load
|
||||||
|
existing metadata.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import llnl.util.tty as tty
|
||||||
|
|
||||||
|
import spack.paths
|
||||||
|
import spack.util.classes
|
||||||
|
|
||||||
|
mod_path = spack.paths.analyzers_path
|
||||||
|
analyzers = spack.util.classes.list_classes("spack.analyzers", mod_path)
|
||||||
|
|
||||||
|
# The base analyzer does not have a name, and cannot do dict comprehension
|
||||||
|
analyzer_types = {}
|
||||||
|
for a in analyzers:
|
||||||
|
if not hasattr(a, "name"):
|
||||||
|
continue
|
||||||
|
analyzer_types[a.name] = a
|
||||||
|
|
||||||
|
|
||||||
|
def list_all():
|
||||||
|
"""A helper function to list all analyzers and their descriptions
|
||||||
|
"""
|
||||||
|
for name, analyzer in analyzer_types.items():
|
||||||
|
print("%-25s: %-35s" % (name, analyzer.description))
|
||||||
|
|
||||||
|
|
||||||
|
def get_analyzer(name):
|
||||||
|
"""Courtesy function to retrieve an analyzer, and exit on error if it
|
||||||
|
does not exist.
|
||||||
|
"""
|
||||||
|
if name in analyzer_types:
|
||||||
|
return analyzer_types[name]
|
||||||
|
tty.die("Analyzer %s does not exist" % name)
|
||||||
116
lib/spack/spack/analyzers/analyzer_base.py
Normal file
116
lib/spack/spack/analyzers/analyzer_base.py
Normal file
@@ -0,0 +1,116 @@
|
|||||||
|
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||||
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
|
"""An analyzer base provides basic functions to run the analysis, save results,
|
||||||
|
and (optionally) interact with a Spack Monitor
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
|
||||||
|
import llnl.util.tty as tty
|
||||||
|
|
||||||
|
import spack.config
|
||||||
|
import spack.hooks
|
||||||
|
import spack.monitor
|
||||||
|
import spack.util.path
|
||||||
|
|
||||||
|
|
||||||
|
def get_analyzer_dir(spec, analyzer_dir=None):
|
||||||
|
"""
|
||||||
|
Given a spec, return the directory to save analyzer results.
|
||||||
|
|
||||||
|
We create the directory if it does not exist. We also check that the
|
||||||
|
spec has an associated package. An analyzer cannot be run if the spec isn't
|
||||||
|
associated with a package. If the user provides a custom analyzer_dir,
|
||||||
|
we use it over checking the config and the default at ~/.spack/analyzers
|
||||||
|
"""
|
||||||
|
# An analyzer cannot be run if the spec isn't associated with a package
|
||||||
|
if not hasattr(spec, "package") or not spec.package:
|
||||||
|
tty.die("A spec can only be analyzed with an associated package.")
|
||||||
|
|
||||||
|
# The top level directory is in the user home, or a custom location
|
||||||
|
if not analyzer_dir:
|
||||||
|
analyzer_dir = spack.util.path.canonicalize_path(
|
||||||
|
spack.config.get('config:analyzers_dir', '~/.spack/analyzers'))
|
||||||
|
|
||||||
|
# We follow the same convention as the spec install (this could be better)
|
||||||
|
package_prefix = os.sep.join(spec.package.prefix.split('/')[-3:])
|
||||||
|
meta_dir = os.path.join(analyzer_dir, package_prefix)
|
||||||
|
return meta_dir
|
||||||
|
|
||||||
|
|
||||||
|
class AnalyzerBase(object):
|
||||||
|
|
||||||
|
def __init__(self, spec, dirname=None):
|
||||||
|
"""
|
||||||
|
Verify that the analyzer has correct metadata.
|
||||||
|
|
||||||
|
An Analyzer is intended to run on one spec install, so the spec
|
||||||
|
with its associated package is required on init. The child analyzer
|
||||||
|
class should define an init function that super's the init here, and
|
||||||
|
also check that the analyzer has all dependencies that it
|
||||||
|
needs. If an analyzer subclass does not have dependencies, it does not
|
||||||
|
need to define an init. An Analyzer should not be allowed to proceed
|
||||||
|
if one or more dependencies are missing. The dirname, if defined,
|
||||||
|
is an optional directory name to save to (instead of the default meta
|
||||||
|
spack directory).
|
||||||
|
"""
|
||||||
|
self.spec = spec
|
||||||
|
self.dirname = dirname
|
||||||
|
self.meta_dir = os.path.dirname(spec.package.install_log_path)
|
||||||
|
|
||||||
|
for required in ["name", "outfile", "description"]:
|
||||||
|
if not hasattr(self, required):
|
||||||
|
tty.die("Please add a %s attribute on the analyzer." % required)
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
"""
|
||||||
|
Given a spec with an installed package, run the analyzer on it.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
@property
|
||||||
|
def output_dir(self):
|
||||||
|
"""
|
||||||
|
The full path to the output directory.
|
||||||
|
|
||||||
|
This includes the nested analyzer directory structure. This function
|
||||||
|
does not create anything.
|
||||||
|
"""
|
||||||
|
if not hasattr(self, "_output_dir"):
|
||||||
|
output_dir = get_analyzer_dir(self.spec, self.dirname)
|
||||||
|
self._output_dir = os.path.join(output_dir, self.name)
|
||||||
|
|
||||||
|
return self._output_dir
|
||||||
|
|
||||||
|
def save_result(self, result, overwrite=False):
|
||||||
|
"""
|
||||||
|
Save a result to the associated spack monitor, if defined.
|
||||||
|
|
||||||
|
This function is on the level of the analyzer because it might be
|
||||||
|
the case that the result is large (appropriate for a single request)
|
||||||
|
or that the data is organized differently (e.g., more than one
|
||||||
|
request per result). If an analyzer subclass needs to over-write
|
||||||
|
this function with a custom save, that is appropriate to do (see abi).
|
||||||
|
"""
|
||||||
|
# We maintain the structure in json with the analyzer as key so
|
||||||
|
# that in the future, we could upload to a monitor server
|
||||||
|
if result[self.name]:
|
||||||
|
|
||||||
|
outfile = os.path.join(self.output_dir, self.outfile)
|
||||||
|
|
||||||
|
# Only try to create the results directory if we have a result
|
||||||
|
if not os.path.exists(self._output_dir):
|
||||||
|
os.makedirs(self._output_dir)
|
||||||
|
|
||||||
|
# Don't overwrite an existing result if overwrite is False
|
||||||
|
if os.path.exists(outfile) and not overwrite:
|
||||||
|
tty.info("%s exists and overwrite is False, skipping." % outfile)
|
||||||
|
else:
|
||||||
|
tty.info("Writing result to %s" % outfile)
|
||||||
|
spack.monitor.write_json(result[self.name], outfile)
|
||||||
|
|
||||||
|
# This hook runs after a save result
|
||||||
|
spack.hooks.on_analyzer_save(self.spec.package, result)
|
||||||
33
lib/spack/spack/analyzers/config_args.py
Normal file
33
lib/spack/spack/analyzers/config_args.py
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||||
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
|
"""A configargs analyzer is a class of analyzer that typically just uploads
|
||||||
|
already existing metadata about config args from a package spec install
|
||||||
|
directory."""
|
||||||
|
|
||||||
|
|
||||||
|
import os
|
||||||
|
|
||||||
|
import spack.monitor
|
||||||
|
|
||||||
|
from .analyzer_base import AnalyzerBase
|
||||||
|
|
||||||
|
|
||||||
|
class ConfigArgs(AnalyzerBase):
|
||||||
|
|
||||||
|
name = "config_args"
|
||||||
|
outfile = "spack-analyzer-config-args.json"
|
||||||
|
description = "config args loaded from spack-configure-args.txt"
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
"""
|
||||||
|
Load the configure-args.txt and save in json.
|
||||||
|
|
||||||
|
The run function will find the spack-config-args.txt file in the
|
||||||
|
package install directory, and read it into a json structure that has
|
||||||
|
the name of the analyzer as the key.
|
||||||
|
"""
|
||||||
|
config_file = os.path.join(self.meta_dir, "spack-configure-args.txt")
|
||||||
|
return {self.name: spack.monitor.read_file(config_file)}
|
||||||
54
lib/spack/spack/analyzers/environment_variables.py
Normal file
54
lib/spack/spack/analyzers/environment_variables.py
Normal file
@@ -0,0 +1,54 @@
|
|||||||
|
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||||
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
|
"""An environment analyzer will read and parse the environment variables
|
||||||
|
file in the installed package directory, generating a json file that has
|
||||||
|
an index of key, value pairs for environment variables."""
|
||||||
|
|
||||||
|
|
||||||
|
import os
|
||||||
|
|
||||||
|
import llnl.util.tty as tty
|
||||||
|
|
||||||
|
from spack.util.environment import EnvironmentModifications
|
||||||
|
|
||||||
|
from .analyzer_base import AnalyzerBase
|
||||||
|
|
||||||
|
|
||||||
|
class EnvironmentVariables(AnalyzerBase):
|
||||||
|
|
||||||
|
name = "environment_variables"
|
||||||
|
outfile = "spack-analyzer-environment-variables.json"
|
||||||
|
description = "environment variables parsed from spack-build-env.txt"
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
"""
|
||||||
|
Load, parse, and save spack-build-env.txt to analyzers.
|
||||||
|
|
||||||
|
Read in the spack-build-env.txt file from the package install
|
||||||
|
directory and parse the environment variables into key value pairs.
|
||||||
|
The result should have the key for the analyzer, the name.
|
||||||
|
"""
|
||||||
|
env_file = os.path.join(self.meta_dir, "spack-build-env.txt")
|
||||||
|
return {self.name: self._read_environment_file(env_file)}
|
||||||
|
|
||||||
|
def _read_environment_file(self, filename):
|
||||||
|
"""
|
||||||
|
Read and parse the environment file.
|
||||||
|
|
||||||
|
Given an environment file, we want to read it, split by semicolons
|
||||||
|
and new lines, and then parse down to the subset of SPACK_* variables.
|
||||||
|
We assume that all spack prefix variables are not secrets, and unlike
|
||||||
|
the install_manifest.json, we don't (at least to start) parse the values
|
||||||
|
to remove path prefixes specific to user systems.
|
||||||
|
"""
|
||||||
|
if not os.path.exists(filename):
|
||||||
|
tty.warn("No environment file available")
|
||||||
|
return
|
||||||
|
|
||||||
|
mods = EnvironmentModifications.from_sourcing_file(filename)
|
||||||
|
env = {}
|
||||||
|
mods.apply_modifications(env)
|
||||||
|
return env
|
||||||
31
lib/spack/spack/analyzers/install_files.py
Normal file
31
lib/spack/spack/analyzers/install_files.py
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||||
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
|
"""The install files json file (install_manifest.json) already exists in
|
||||||
|
the package install folder, so this analyzer simply moves it to the user
|
||||||
|
analyzer folder for further processing."""
|
||||||
|
|
||||||
|
|
||||||
|
import os
|
||||||
|
|
||||||
|
import spack.monitor
|
||||||
|
|
||||||
|
from .analyzer_base import AnalyzerBase
|
||||||
|
|
||||||
|
|
||||||
|
class InstallFiles(AnalyzerBase):
|
||||||
|
|
||||||
|
name = "install_files"
|
||||||
|
outfile = "spack-analyzer-install-files.json"
|
||||||
|
description = "install file listing read from install_manifest.json"
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
"""
|
||||||
|
Load in the install_manifest.json and save to analyzers.
|
||||||
|
|
||||||
|
We write it out to the analyzers folder, with key as the analyzer name.
|
||||||
|
"""
|
||||||
|
manifest_file = os.path.join(self.meta_dir, "install_manifest.json")
|
||||||
|
return {self.name: spack.monitor.read_json(manifest_file)}
|
||||||
114
lib/spack/spack/analyzers/libabigail.py
Normal file
114
lib/spack/spack/analyzers/libabigail.py
Normal file
@@ -0,0 +1,114 @@
|
|||||||
|
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||||
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
import os
|
||||||
|
|
||||||
|
import llnl.util.tty as tty
|
||||||
|
|
||||||
|
import spack
|
||||||
|
import spack.binary_distribution
|
||||||
|
import spack.bootstrap
|
||||||
|
import spack.error
|
||||||
|
import spack.hooks
|
||||||
|
import spack.monitor
|
||||||
|
import spack.package_base
|
||||||
|
import spack.repo
|
||||||
|
import spack.util.executable
|
||||||
|
|
||||||
|
from .analyzer_base import AnalyzerBase
|
||||||
|
|
||||||
|
|
||||||
|
class Libabigail(AnalyzerBase):
|
||||||
|
|
||||||
|
name = "libabigail"
|
||||||
|
outfile = "spack-analyzer-libabigail.json"
|
||||||
|
description = "Application Binary Interface (ABI) features for objects"
|
||||||
|
|
||||||
|
def __init__(self, spec, dirname=None):
|
||||||
|
"""
|
||||||
|
init for an analyzer ensures we have all needed dependencies.
|
||||||
|
|
||||||
|
For the libabigail analyzer, this means Libabigail.
|
||||||
|
Since the output for libabigail is one file per object, we communicate
|
||||||
|
with the monitor multiple times.
|
||||||
|
"""
|
||||||
|
super(Libabigail, self).__init__(spec, dirname)
|
||||||
|
|
||||||
|
# This doesn't seem to work to import on the module level
|
||||||
|
tty.debug("Preparing to use Libabigail, will install if missing.")
|
||||||
|
|
||||||
|
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||||
|
# libabigail won't install lib/bin/share without docs
|
||||||
|
spec = spack.spec.Spec("libabigail+docs")
|
||||||
|
spack.bootstrap.ensure_executables_in_path_or_raise(
|
||||||
|
["abidw"], abstract_spec=spec
|
||||||
|
)
|
||||||
|
self.abidw = spack.util.executable.which('abidw')
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
"""
|
||||||
|
Run libabigail, and save results to filename.
|
||||||
|
|
||||||
|
This run function differs in that we write as we generate and then
|
||||||
|
return a dict with the analyzer name as the key, and the value of a
|
||||||
|
dict of results, where the key is the object name, and the value is
|
||||||
|
the output file written to.
|
||||||
|
"""
|
||||||
|
manifest = spack.binary_distribution.get_buildfile_manifest(self.spec)
|
||||||
|
|
||||||
|
# This result will store a path to each file
|
||||||
|
result = {}
|
||||||
|
|
||||||
|
# Generate an output file for each binary or object
|
||||||
|
for obj in manifest.get("binary_to_relocate_fullpath", []):
|
||||||
|
|
||||||
|
# We want to preserve the path in the install directory in case
|
||||||
|
# a library has an equivalenly named lib or executable, for example
|
||||||
|
outdir = os.path.dirname(obj.replace(self.spec.package.prefix,
|
||||||
|
'').strip(os.path.sep))
|
||||||
|
outfile = "spack-analyzer-libabigail-%s.xml" % os.path.basename(obj)
|
||||||
|
outfile = os.path.join(self.output_dir, outdir, outfile)
|
||||||
|
outdir = os.path.dirname(outfile)
|
||||||
|
|
||||||
|
# Create the output directory
|
||||||
|
if not os.path.exists(outdir):
|
||||||
|
os.makedirs(outdir)
|
||||||
|
|
||||||
|
# Sometimes libabigail segfaults and dumps
|
||||||
|
try:
|
||||||
|
self.abidw(obj, "--out-file", outfile)
|
||||||
|
result[obj] = outfile
|
||||||
|
tty.info("Writing result to %s" % outfile)
|
||||||
|
except spack.error.SpackError:
|
||||||
|
tty.warn("Issue running abidw for %s" % obj)
|
||||||
|
|
||||||
|
return {self.name: result}
|
||||||
|
|
||||||
|
def save_result(self, result, overwrite=False):
|
||||||
|
"""
|
||||||
|
Read saved ABI results and upload to monitor server.
|
||||||
|
|
||||||
|
ABI results are saved to individual files, so each one needs to be
|
||||||
|
read and uploaded. Result here should be the lookup generated in run(),
|
||||||
|
the key is the analyzer name, and each value is the result file.
|
||||||
|
We currently upload the entire xml as text because libabigail can't
|
||||||
|
easily read gzipped xml, but this will be updated when it can.
|
||||||
|
"""
|
||||||
|
if not spack.monitor.cli:
|
||||||
|
return
|
||||||
|
|
||||||
|
name = self.spec.package.name
|
||||||
|
|
||||||
|
for obj, filename in result.get(self.name, {}).items():
|
||||||
|
|
||||||
|
# Don't include the prefix
|
||||||
|
rel_path = obj.replace(self.spec.prefix + os.path.sep, "")
|
||||||
|
|
||||||
|
# We've already saved the results to file during run
|
||||||
|
content = spack.monitor.read_file(filename)
|
||||||
|
|
||||||
|
# A result needs an analyzer, value or binary_value, and name
|
||||||
|
data = {"value": content, "install_file": rel_path, "name": "abidw-xml"}
|
||||||
|
tty.info("Sending result for %s %s to monitor." % (name, rel_path))
|
||||||
|
spack.hooks.on_analyzer_save(self.spec.package, {"libabigail": [data]})
|
||||||
@@ -281,15 +281,15 @@ def _check_build_test_callbacks(pkgs, error_cls):
|
|||||||
"""Ensure stand-alone test method is not included in build-time callbacks"""
|
"""Ensure stand-alone test method is not included in build-time callbacks"""
|
||||||
errors = []
|
errors = []
|
||||||
for pkg_name in pkgs:
|
for pkg_name in pkgs:
|
||||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
pkg = spack.repo.get(pkg_name)
|
||||||
test_callbacks = pkg_cls.build_time_test_callbacks
|
test_callbacks = pkg.build_time_test_callbacks
|
||||||
|
|
||||||
if test_callbacks and 'test' in test_callbacks:
|
if test_callbacks and 'test' in test_callbacks:
|
||||||
msg = ('{0} package contains "test" method in '
|
msg = ('{0} package contains "test" method in '
|
||||||
'build_time_test_callbacks')
|
'build_time_test_callbacks')
|
||||||
instr = ('Remove "test" from: [{0}]'
|
instr = ('Remove "test" from: [{0}]'
|
||||||
.format(', '.join(test_callbacks)))
|
.format(', '.join(test_callbacks)))
|
||||||
errors.append(error_cls(msg.format(pkg_name), [instr]))
|
errors.append(error_cls(msg.format(pkg.name), [instr]))
|
||||||
|
|
||||||
return errors
|
return errors
|
||||||
|
|
||||||
@@ -298,14 +298,13 @@ def _check_build_test_callbacks(pkgs, error_cls):
|
|||||||
def _check_patch_urls(pkgs, error_cls):
|
def _check_patch_urls(pkgs, error_cls):
|
||||||
"""Ensure that patches fetched from GitHub have stable sha256 hashes."""
|
"""Ensure that patches fetched from GitHub have stable sha256 hashes."""
|
||||||
github_patch_url_re = (
|
github_patch_url_re = (
|
||||||
r"^https?://(?:patch-diff\.)?github(?:usercontent)?\.com/"
|
r"^https?://github\.com/.+/.+/(?:commit|pull)/[a-fA-F0-9]*.(?:patch|diff)"
|
||||||
".+/.+/(?:commit|pull)/[a-fA-F0-9]*.(?:patch|diff)"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
errors = []
|
errors = []
|
||||||
for pkg_name in pkgs:
|
for pkg_name in pkgs:
|
||||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
pkg = spack.repo.get(pkg_name)
|
||||||
for condition, patches in pkg_cls.patches.items():
|
for condition, patches in pkg.patches.items():
|
||||||
for patch in patches:
|
for patch in patches:
|
||||||
if not isinstance(patch, spack.patch.UrlPatch):
|
if not isinstance(patch, spack.patch.UrlPatch):
|
||||||
continue
|
continue
|
||||||
@@ -317,7 +316,7 @@ def _check_patch_urls(pkgs, error_cls):
|
|||||||
if not patch.url.endswith(full_index_arg):
|
if not patch.url.endswith(full_index_arg):
|
||||||
errors.append(error_cls(
|
errors.append(error_cls(
|
||||||
"patch URL in package {0} must end with {1}".format(
|
"patch URL in package {0} must end with {1}".format(
|
||||||
pkg_cls.name, full_index_arg,
|
pkg.name, full_index_arg,
|
||||||
),
|
),
|
||||||
[patch.url],
|
[patch.url],
|
||||||
))
|
))
|
||||||
@@ -331,21 +330,21 @@ def _linting_package_file(pkgs, error_cls):
|
|||||||
"""
|
"""
|
||||||
errors = []
|
errors = []
|
||||||
for pkg_name in pkgs:
|
for pkg_name in pkgs:
|
||||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
pkg = spack.repo.get(pkg_name)
|
||||||
|
|
||||||
# Does the homepage have http, and if so, does https work?
|
# Does the homepage have http, and if so, does https work?
|
||||||
if pkg_cls.homepage.startswith('http://'):
|
if pkg.homepage.startswith('http://'):
|
||||||
https = re.sub("http", "https", pkg_cls.homepage, 1)
|
https = re.sub("http", "https", pkg.homepage, 1)
|
||||||
try:
|
try:
|
||||||
response = urlopen(https)
|
response = urlopen(https)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
msg = 'Error with attempting https for "{0}": '
|
msg = 'Error with attempting https for "{0}": '
|
||||||
errors.append(error_cls(msg.format(pkg_cls.name), [str(e)]))
|
errors.append(error_cls(msg.format(pkg.name), [str(e)]))
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if response.getcode() == 200:
|
if response.getcode() == 200:
|
||||||
msg = 'Package "{0}" uses http but has a valid https endpoint.'
|
msg = 'Package "{0}" uses http but has a valid https endpoint.'
|
||||||
errors.append(msg.format(pkg_cls.name))
|
errors.append(msg.format(pkg.name))
|
||||||
|
|
||||||
return llnl.util.lang.dedupe(errors)
|
return llnl.util.lang.dedupe(errors)
|
||||||
|
|
||||||
@@ -355,10 +354,10 @@ def _unknown_variants_in_directives(pkgs, error_cls):
|
|||||||
"""Report unknown or wrong variants in directives for this package"""
|
"""Report unknown or wrong variants in directives for this package"""
|
||||||
errors = []
|
errors = []
|
||||||
for pkg_name in pkgs:
|
for pkg_name in pkgs:
|
||||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
pkg = spack.repo.get(pkg_name)
|
||||||
|
|
||||||
# Check "conflicts" directive
|
# Check "conflicts" directive
|
||||||
for conflict, triggers in pkg_cls.conflicts.items():
|
for conflict, triggers in pkg.conflicts.items():
|
||||||
for trigger, _ in triggers:
|
for trigger, _ in triggers:
|
||||||
vrn = spack.spec.Spec(conflict)
|
vrn = spack.spec.Spec(conflict)
|
||||||
try:
|
try:
|
||||||
@@ -371,34 +370,34 @@ def _unknown_variants_in_directives(pkgs, error_cls):
|
|||||||
# When os and target constraints can be created independently of
|
# When os and target constraints can be created independently of
|
||||||
# the platform, TODO change this back to add an error.
|
# the platform, TODO change this back to add an error.
|
||||||
errors.extend(_analyze_variants_in_directive(
|
errors.extend(_analyze_variants_in_directive(
|
||||||
pkg_cls, spack.spec.Spec(trigger),
|
pkg, spack.spec.Spec(trigger),
|
||||||
directive='conflicts', error_cls=error_cls
|
directive='conflicts', error_cls=error_cls
|
||||||
))
|
))
|
||||||
errors.extend(_analyze_variants_in_directive(
|
errors.extend(_analyze_variants_in_directive(
|
||||||
pkg_cls, vrn, directive='conflicts', error_cls=error_cls
|
pkg, vrn, directive='conflicts', error_cls=error_cls
|
||||||
))
|
))
|
||||||
|
|
||||||
# Check "depends_on" directive
|
# Check "depends_on" directive
|
||||||
for _, triggers in pkg_cls.dependencies.items():
|
for _, triggers in pkg.dependencies.items():
|
||||||
triggers = list(triggers)
|
triggers = list(triggers)
|
||||||
for trigger in list(triggers):
|
for trigger in list(triggers):
|
||||||
vrn = spack.spec.Spec(trigger)
|
vrn = spack.spec.Spec(trigger)
|
||||||
errors.extend(_analyze_variants_in_directive(
|
errors.extend(_analyze_variants_in_directive(
|
||||||
pkg_cls, vrn, directive='depends_on', error_cls=error_cls
|
pkg, vrn, directive='depends_on', error_cls=error_cls
|
||||||
))
|
))
|
||||||
|
|
||||||
# Check "patch" directive
|
# Check "patch" directive
|
||||||
for _, triggers in pkg_cls.provided.items():
|
for _, triggers in pkg.provided.items():
|
||||||
triggers = [spack.spec.Spec(x) for x in triggers]
|
triggers = [spack.spec.Spec(x) for x in triggers]
|
||||||
for vrn in triggers:
|
for vrn in triggers:
|
||||||
errors.extend(_analyze_variants_in_directive(
|
errors.extend(_analyze_variants_in_directive(
|
||||||
pkg_cls, vrn, directive='patch', error_cls=error_cls
|
pkg, vrn, directive='patch', error_cls=error_cls
|
||||||
))
|
))
|
||||||
|
|
||||||
# Check "resource" directive
|
# Check "resource" directive
|
||||||
for vrn in pkg_cls.resources:
|
for vrn in pkg.resources:
|
||||||
errors.extend(_analyze_variants_in_directive(
|
errors.extend(_analyze_variants_in_directive(
|
||||||
pkg_cls, vrn, directive='resource', error_cls=error_cls
|
pkg, vrn, directive='resource', error_cls=error_cls
|
||||||
))
|
))
|
||||||
|
|
||||||
return llnl.util.lang.dedupe(errors)
|
return llnl.util.lang.dedupe(errors)
|
||||||
@@ -409,15 +408,15 @@ def _unknown_variants_in_dependencies(pkgs, error_cls):
|
|||||||
"""Report unknown dependencies and wrong variants for dependencies"""
|
"""Report unknown dependencies and wrong variants for dependencies"""
|
||||||
errors = []
|
errors = []
|
||||||
for pkg_name in pkgs:
|
for pkg_name in pkgs:
|
||||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
pkg = spack.repo.get(pkg_name)
|
||||||
filename = spack.repo.path.filename_for_package_name(pkg_name)
|
filename = spack.repo.path.filename_for_package_name(pkg_name)
|
||||||
for dependency_name, dependency_data in pkg_cls.dependencies.items():
|
for dependency_name, dependency_data in pkg.dependencies.items():
|
||||||
# No need to analyze virtual packages
|
# No need to analyze virtual packages
|
||||||
if spack.repo.path.is_virtual(dependency_name):
|
if spack.repo.path.is_virtual(dependency_name):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
try:
|
try:
|
||||||
dependency_pkg_cls = spack.repo.path.get_pkg_class(dependency_name)
|
dependency_pkg = spack.repo.get(dependency_name)
|
||||||
except spack.repo.UnknownPackageError:
|
except spack.repo.UnknownPackageError:
|
||||||
# This dependency is completely missing, so report
|
# This dependency is completely missing, so report
|
||||||
# and continue the analysis
|
# and continue the analysis
|
||||||
@@ -433,8 +432,8 @@ def _unknown_variants_in_dependencies(pkgs, error_cls):
|
|||||||
dependency_variants = dependency_edge.spec.variants
|
dependency_variants = dependency_edge.spec.variants
|
||||||
for name, value in dependency_variants.items():
|
for name, value in dependency_variants.items():
|
||||||
try:
|
try:
|
||||||
v, _ = dependency_pkg_cls.variants[name]
|
v, _ = dependency_pkg.variants[name]
|
||||||
v.validate_or_raise(value, pkg_cls=dependency_pkg_cls)
|
v.validate_or_raise(value, pkg=dependency_pkg)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
summary = (pkg_name + ": wrong variant used for a "
|
summary = (pkg_name + ": wrong variant used for a "
|
||||||
"dependency in a 'depends_on' directive")
|
"dependency in a 'depends_on' directive")
|
||||||
@@ -456,10 +455,10 @@ def _version_constraints_are_satisfiable_by_some_version_in_repo(pkgs, error_cls
|
|||||||
"""Report if version constraints used in directives are not satisfiable"""
|
"""Report if version constraints used in directives are not satisfiable"""
|
||||||
errors = []
|
errors = []
|
||||||
for pkg_name in pkgs:
|
for pkg_name in pkgs:
|
||||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
pkg = spack.repo.get(pkg_name)
|
||||||
filename = spack.repo.path.filename_for_package_name(pkg_name)
|
filename = spack.repo.path.filename_for_package_name(pkg_name)
|
||||||
dependencies_to_check = []
|
dependencies_to_check = []
|
||||||
for dependency_name, dependency_data in pkg_cls.dependencies.items():
|
for dependency_name, dependency_data in pkg.dependencies.items():
|
||||||
# Skip virtual dependencies for the time being, check on
|
# Skip virtual dependencies for the time being, check on
|
||||||
# their versions can be added later
|
# their versions can be added later
|
||||||
if spack.repo.path.is_virtual(dependency_name):
|
if spack.repo.path.is_virtual(dependency_name):
|
||||||
@@ -470,19 +469,19 @@ def _version_constraints_are_satisfiable_by_some_version_in_repo(pkgs, error_cls
|
|||||||
)
|
)
|
||||||
|
|
||||||
for s in dependencies_to_check:
|
for s in dependencies_to_check:
|
||||||
dependency_pkg_cls = None
|
dependency_pkg = None
|
||||||
try:
|
try:
|
||||||
dependency_pkg_cls = spack.repo.path.get_pkg_class(s.name)
|
dependency_pkg = spack.repo.get(s.name)
|
||||||
assert any(
|
assert any(
|
||||||
v.satisfies(s.versions) for v in list(dependency_pkg_cls.versions)
|
v.satisfies(s.versions) for v in list(dependency_pkg.versions)
|
||||||
)
|
)
|
||||||
except Exception:
|
except Exception:
|
||||||
summary = ("{0}: dependency on {1} cannot be satisfied "
|
summary = ("{0}: dependency on {1} cannot be satisfied "
|
||||||
"by known versions of {1.name}").format(pkg_name, s)
|
"by known versions of {1.name}").format(pkg_name, s)
|
||||||
details = ['happening in ' + filename]
|
details = ['happening in ' + filename]
|
||||||
if dependency_pkg_cls is not None:
|
if dependency_pkg is not None:
|
||||||
details.append('known versions of {0.name} are {1}'.format(
|
details.append('known versions of {0.name} are {1}'.format(
|
||||||
s, ', '.join([str(x) for x in dependency_pkg_cls.versions])
|
s, ', '.join([str(x) for x in dependency_pkg.versions])
|
||||||
))
|
))
|
||||||
errors.append(error_cls(summary=summary, details=details))
|
errors.append(error_cls(summary=summary, details=details))
|
||||||
|
|
||||||
@@ -500,7 +499,7 @@ def _analyze_variants_in_directive(pkg, constraint, directive, error_cls):
|
|||||||
for name, v in constraint.variants.items():
|
for name, v in constraint.variants.items():
|
||||||
try:
|
try:
|
||||||
variant, _ = pkg.variants[name]
|
variant, _ = pkg.variants[name]
|
||||||
variant.validate_or_raise(v, pkg_cls=pkg)
|
variant.validate_or_raise(v, pkg=pkg)
|
||||||
except variant_exceptions as e:
|
except variant_exceptions as e:
|
||||||
summary = pkg.name + ': wrong variant in "{0}" directive'
|
summary = pkg.name + ': wrong variant in "{0}" directive'
|
||||||
summary = summary.format(directive)
|
summary = summary.format(directive)
|
||||||
|
|||||||
@@ -618,20 +618,24 @@ def get_buildfile_manifest(spec):
|
|||||||
Return a data structure with information about a build, including
|
Return a data structure with information about a build, including
|
||||||
text_to_relocate, binary_to_relocate, binary_to_relocate_fullpath
|
text_to_relocate, binary_to_relocate, binary_to_relocate_fullpath
|
||||||
link_to_relocate, and other, which means it doesn't fit any of previous
|
link_to_relocate, and other, which means it doesn't fit any of previous
|
||||||
checks (and should not be relocated). We exclude docs (man) and
|
checks (and should not be relocated). We blacklist docs (man) and
|
||||||
metadata (.spack). This can be used to find a particular kind of file
|
metadata (.spack). This can be used to find a particular kind of file
|
||||||
in spack, or to generate the build metadata.
|
in spack, or to generate the build metadata.
|
||||||
"""
|
"""
|
||||||
data = {"text_to_relocate": [], "binary_to_relocate": [],
|
data = {"text_to_relocate": [], "binary_to_relocate": [],
|
||||||
"link_to_relocate": [], "other": [],
|
"link_to_relocate": [], "other": [],
|
||||||
"binary_to_relocate_fullpath": []}
|
"binary_to_relocate_fullpath": [], "offsets": {}}
|
||||||
|
|
||||||
exclude_list = (".spack", "man")
|
blacklist = (".spack", "man")
|
||||||
|
|
||||||
|
# Get all the paths we will want to relocate in binaries
|
||||||
|
paths_to_relocate = [s.prefix for s in spec.traverse(root=True)]
|
||||||
|
paths_to_relocate.append(spack.store.layout.root)
|
||||||
|
|
||||||
# Do this at during tarball creation to save time when tarball unpacked.
|
# Do this at during tarball creation to save time when tarball unpacked.
|
||||||
# Used by make_package_relative to determine binaries to change.
|
# Used by make_package_relative to determine binaries to change.
|
||||||
for root, dirs, files in os.walk(spec.prefix, topdown=True):
|
for root, dirs, files in os.walk(spec.prefix, topdown=True):
|
||||||
dirs[:] = [d for d in dirs if d not in exclude_list]
|
dirs[:] = [d for d in dirs if d not in blacklist]
|
||||||
|
|
||||||
# Directories may need to be relocated too.
|
# Directories may need to be relocated too.
|
||||||
for directory in dirs:
|
for directory in dirs:
|
||||||
@@ -662,6 +666,11 @@ def get_buildfile_manifest(spec):
|
|||||||
(m_subtype in ('x-mach-binary')
|
(m_subtype in ('x-mach-binary')
|
||||||
and sys.platform == 'darwin') or
|
and sys.platform == 'darwin') or
|
||||||
(not filename.endswith('.o'))):
|
(not filename.endswith('.o'))):
|
||||||
|
|
||||||
|
# Last path to relocate is the layout root, which is a substring
|
||||||
|
# of the others
|
||||||
|
indices = relocate.compute_indices(path_name, paths_to_relocate)
|
||||||
|
data['offsets'][rel_path_name] = indices
|
||||||
data['binary_to_relocate'].append(rel_path_name)
|
data['binary_to_relocate'].append(rel_path_name)
|
||||||
data['binary_to_relocate_fullpath'].append(path_name)
|
data['binary_to_relocate_fullpath'].append(path_name)
|
||||||
added = True
|
added = True
|
||||||
@@ -700,6 +709,7 @@ def write_buildinfo_file(spec, workdir, rel=False):
|
|||||||
buildinfo['relocate_binaries'] = manifest['binary_to_relocate']
|
buildinfo['relocate_binaries'] = manifest['binary_to_relocate']
|
||||||
buildinfo['relocate_links'] = manifest['link_to_relocate']
|
buildinfo['relocate_links'] = manifest['link_to_relocate']
|
||||||
buildinfo['prefix_to_hash'] = prefix_to_hash
|
buildinfo['prefix_to_hash'] = prefix_to_hash
|
||||||
|
buildinfo['offsets'] = manifest['offsets']
|
||||||
filename = buildinfo_file_name(workdir)
|
filename = buildinfo_file_name(workdir)
|
||||||
with open(filename, 'w') as outfile:
|
with open(filename, 'w') as outfile:
|
||||||
outfile.write(syaml.dump(buildinfo, default_flow_style=True))
|
outfile.write(syaml.dump(buildinfo, default_flow_style=True))
|
||||||
@@ -1473,11 +1483,25 @@ def is_backup_file(file):
|
|||||||
|
|
||||||
# If we are not installing back to the same install tree do the relocation
|
# If we are not installing back to the same install tree do the relocation
|
||||||
if old_prefix != new_prefix:
|
if old_prefix != new_prefix:
|
||||||
files_to_relocate = [os.path.join(workdir, filename)
|
# Relocate links to the new install prefix
|
||||||
for filename in buildinfo.get('relocate_binaries')
|
links = [link for link in buildinfo.get('relocate_links', [])]
|
||||||
]
|
relocate.relocate_links(
|
||||||
|
links, old_layout_root, old_prefix, new_prefix
|
||||||
|
)
|
||||||
|
|
||||||
|
# For all buildcaches
|
||||||
|
# relocate the install prefixes in text files including dependencies
|
||||||
|
relocate.relocate_text(text_names, prefix_to_prefix_text)
|
||||||
|
|
||||||
# If the buildcache was not created with relativized rpaths
|
# If the buildcache was not created with relativized rpaths
|
||||||
# do the relocation of path in binaries
|
# do the relocation of rpaths in binaries
|
||||||
|
# TODO: Is this necessary? How are null-terminated strings handled
|
||||||
|
# in the rpath header?
|
||||||
|
files_to_relocate = [
|
||||||
|
os.path.join(workdir, filename)
|
||||||
|
for filename in buildinfo.get('relocate_binaries')
|
||||||
|
]
|
||||||
|
|
||||||
platform = spack.platforms.by_name(spec.platform)
|
platform = spack.platforms.by_name(spec.platform)
|
||||||
if 'macho' in platform.binary_formats:
|
if 'macho' in platform.binary_formats:
|
||||||
relocate.relocate_macho_binaries(files_to_relocate,
|
relocate.relocate_macho_binaries(files_to_relocate,
|
||||||
@@ -1493,25 +1517,11 @@ def is_backup_file(file):
|
|||||||
prefix_to_prefix_bin, rel,
|
prefix_to_prefix_bin, rel,
|
||||||
old_prefix,
|
old_prefix,
|
||||||
new_prefix)
|
new_prefix)
|
||||||
# Relocate links to the new install prefix
|
|
||||||
links = [link for link in buildinfo.get('relocate_links', [])]
|
|
||||||
relocate.relocate_links(
|
|
||||||
links, old_layout_root, old_prefix, new_prefix
|
|
||||||
)
|
|
||||||
|
|
||||||
# For all buildcaches
|
# If offsets is None, we will recompute offsets when needed
|
||||||
# relocate the install prefixes in text files including dependencies
|
offsets = buildinfo.get('offsets', None)
|
||||||
relocate.relocate_text(text_names, prefix_to_prefix_text)
|
relocate.relocate_text_bin(
|
||||||
|
files_to_relocate, prefix_to_prefix_bin, offsets, workdir)
|
||||||
paths_to_relocate = [old_prefix, old_layout_root]
|
|
||||||
paths_to_relocate.extend(prefix_to_hash.keys())
|
|
||||||
files_to_relocate = list(filter(
|
|
||||||
lambda pathname: not relocate.file_is_relocatable(
|
|
||||||
pathname, paths_to_relocate=paths_to_relocate),
|
|
||||||
map(lambda filename: os.path.join(workdir, filename),
|
|
||||||
buildinfo['relocate_binaries'])))
|
|
||||||
# relocate the install prefixes in binary files including dependencies
|
|
||||||
relocate.relocate_text_bin(files_to_relocate, prefix_to_prefix_bin)
|
|
||||||
|
|
||||||
# If we are installing back to the same location
|
# If we are installing back to the same location
|
||||||
# relocate the sbang location if the spack directory changed
|
# relocate the sbang location if the spack directory changed
|
||||||
|
|||||||
@@ -80,41 +80,32 @@ def _try_import_from_store(module, query_spec, query_info=None):
|
|||||||
|
|
||||||
for candidate_spec in installed_specs:
|
for candidate_spec in installed_specs:
|
||||||
pkg = candidate_spec['python'].package
|
pkg = candidate_spec['python'].package
|
||||||
module_paths = [
|
module_paths = {
|
||||||
os.path.join(candidate_spec.prefix, pkg.purelib),
|
os.path.join(candidate_spec.prefix, pkg.purelib),
|
||||||
os.path.join(candidate_spec.prefix, pkg.platlib),
|
os.path.join(candidate_spec.prefix, pkg.platlib),
|
||||||
] # type: list[str]
|
}
|
||||||
path_before = list(sys.path)
|
sys.path.extend(module_paths)
|
||||||
# NOTE: try module_paths first and last, last allows an existing version in path
|
|
||||||
# to be picked up and used, possibly depending on something in the store, first
|
|
||||||
# allows the bootstrap version to work when an incompatible version is in
|
|
||||||
# sys.path
|
|
||||||
orders = [
|
|
||||||
module_paths + sys.path,
|
|
||||||
sys.path + module_paths,
|
|
||||||
]
|
|
||||||
for path in orders:
|
|
||||||
sys.path = path
|
|
||||||
try:
|
|
||||||
_fix_ext_suffix(candidate_spec)
|
|
||||||
if _python_import(module):
|
|
||||||
msg = ('[BOOTSTRAP MODULE {0}] The installed spec "{1}/{2}" '
|
|
||||||
'provides the "{0}" Python module').format(
|
|
||||||
module, query_spec, candidate_spec.dag_hash()
|
|
||||||
)
|
|
||||||
tty.debug(msg)
|
|
||||||
if query_info is not None:
|
|
||||||
query_info['spec'] = candidate_spec
|
|
||||||
return True
|
|
||||||
except Exception as e:
|
|
||||||
msg = ('unexpected error while trying to import module '
|
|
||||||
'"{0}" from spec "{1}" [error="{2}"]')
|
|
||||||
tty.warn(msg.format(module, candidate_spec, str(e)))
|
|
||||||
else:
|
|
||||||
msg = "Spec {0} did not provide module {1}"
|
|
||||||
tty.warn(msg.format(candidate_spec, module))
|
|
||||||
|
|
||||||
sys.path = path_before
|
try:
|
||||||
|
_fix_ext_suffix(candidate_spec)
|
||||||
|
if _python_import(module):
|
||||||
|
msg = ('[BOOTSTRAP MODULE {0}] The installed spec "{1}/{2}" '
|
||||||
|
'provides the "{0}" Python module').format(
|
||||||
|
module, query_spec, candidate_spec.dag_hash()
|
||||||
|
)
|
||||||
|
tty.debug(msg)
|
||||||
|
if query_info is not None:
|
||||||
|
query_info['spec'] = candidate_spec
|
||||||
|
return True
|
||||||
|
except Exception as e:
|
||||||
|
msg = ('unexpected error while trying to import module '
|
||||||
|
'"{0}" from spec "{1}" [error="{2}"]')
|
||||||
|
tty.warn(msg.format(module, candidate_spec, str(e)))
|
||||||
|
else:
|
||||||
|
msg = "Spec {0} did not provide module {1}"
|
||||||
|
tty.warn(msg.format(candidate_spec, module))
|
||||||
|
|
||||||
|
sys.path = sys.path[:-3]
|
||||||
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@@ -465,10 +456,9 @@ def _make_bootstrapper(conf):
|
|||||||
return _bootstrap_methods[btype](conf)
|
return _bootstrap_methods[btype](conf)
|
||||||
|
|
||||||
|
|
||||||
def source_is_enabled_or_raise(conf):
|
def _validate_source_is_trusted(conf):
|
||||||
"""Raise ValueError if the source is not enabled for bootstrapping"""
|
|
||||||
trusted, name = spack.config.get('bootstrap:trusted'), conf['name']
|
trusted, name = spack.config.get('bootstrap:trusted'), conf['name']
|
||||||
if not trusted.get(name, False):
|
if name not in trusted:
|
||||||
raise ValueError('source is not trusted')
|
raise ValueError('source is not trusted')
|
||||||
|
|
||||||
|
|
||||||
@@ -539,7 +529,7 @@ def ensure_module_importable_or_raise(module, abstract_spec=None):
|
|||||||
|
|
||||||
for current_config in bootstrapping_sources():
|
for current_config in bootstrapping_sources():
|
||||||
with h.forward(current_config['name']):
|
with h.forward(current_config['name']):
|
||||||
source_is_enabled_or_raise(current_config)
|
_validate_source_is_trusted(current_config)
|
||||||
|
|
||||||
b = _make_bootstrapper(current_config)
|
b = _make_bootstrapper(current_config)
|
||||||
if b.try_import(module, abstract_spec):
|
if b.try_import(module, abstract_spec):
|
||||||
@@ -581,7 +571,7 @@ def ensure_executables_in_path_or_raise(executables, abstract_spec):
|
|||||||
|
|
||||||
for current_config in bootstrapping_sources():
|
for current_config in bootstrapping_sources():
|
||||||
with h.forward(current_config['name']):
|
with h.forward(current_config['name']):
|
||||||
source_is_enabled_or_raise(current_config)
|
_validate_source_is_trusted(current_config)
|
||||||
|
|
||||||
b = _make_bootstrapper(current_config)
|
b = _make_bootstrapper(current_config)
|
||||||
if b.try_search_path(executables, abstract_spec):
|
if b.try_search_path(executables, abstract_spec):
|
||||||
@@ -652,10 +642,10 @@ def _add_compilers_if_missing():
|
|||||||
def _add_externals_if_missing():
|
def _add_externals_if_missing():
|
||||||
search_list = [
|
search_list = [
|
||||||
# clingo
|
# clingo
|
||||||
spack.repo.path.get_pkg_class('cmake'),
|
spack.repo.path.get('cmake'),
|
||||||
spack.repo.path.get_pkg_class('bison'),
|
spack.repo.path.get('bison'),
|
||||||
# GnuPG
|
# GnuPG
|
||||||
spack.repo.path.get_pkg_class('gawk')
|
spack.repo.path.get('gawk')
|
||||||
]
|
]
|
||||||
detected_packages = spack.detection.by_executable(search_list)
|
detected_packages = spack.detection.by_executable(search_list)
|
||||||
spack.detection.update_configuration(detected_packages, scope='bootstrap')
|
spack.detection.update_configuration(detected_packages, scope='bootstrap')
|
||||||
|
|||||||
@@ -1259,14 +1259,6 @@ def install(self, spec, prefix):
|
|||||||
for f in glob.glob('%s/intel*log' % tmpdir):
|
for f in glob.glob('%s/intel*log' % tmpdir):
|
||||||
install(f, dst)
|
install(f, dst)
|
||||||
|
|
||||||
@run_after('install')
|
|
||||||
def validate_install(self):
|
|
||||||
# Sometimes the installer exits with an error but doesn't pass a
|
|
||||||
# non-zero exit code to spack. Check for the existence of a 'bin'
|
|
||||||
# directory to catch this error condition.
|
|
||||||
if not os.path.exists(self.prefix.bin):
|
|
||||||
raise InstallError('The installer has failed to install anything.')
|
|
||||||
|
|
||||||
@run_after('install')
|
@run_after('install')
|
||||||
def configure_rpath(self):
|
def configure_rpath(self):
|
||||||
if '+rpath' not in self.spec:
|
if '+rpath' not in self.spec:
|
||||||
|
|||||||
@@ -45,16 +45,18 @@ def component_dir(self):
|
|||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def component_prefix(self):
|
def component_path(self):
|
||||||
"""Path to component <prefix>/<component>/<version>."""
|
"""Path to component <prefix>/<component>/<version>."""
|
||||||
return self.prefix.join(join_path(self.component_dir, self.spec.version))
|
return join_path(self.prefix, self.component_dir, str(self.spec.version))
|
||||||
|
|
||||||
def install(self, spec, prefix):
|
def install(self, spec, prefix, installer_path=None):
|
||||||
self.install_component(basename(self.url_for_version(spec.version)))
|
|
||||||
|
|
||||||
def install_component(self, installer_path):
|
|
||||||
"""Shared install method for all oneapi packages."""
|
"""Shared install method for all oneapi packages."""
|
||||||
|
|
||||||
|
# intel-oneapi-compilers overrides the installer_path when
|
||||||
|
# installing fortran, which comes from a spack resource
|
||||||
|
if installer_path is None:
|
||||||
|
installer_path = basename(self.url_for_version(spec.version))
|
||||||
|
|
||||||
if platform.system() == 'Linux':
|
if platform.system() == 'Linux':
|
||||||
# Intel installer assumes and enforces that all components
|
# Intel installer assumes and enforces that all components
|
||||||
# are installed into a single prefix. Spack wants to
|
# are installed into a single prefix. Spack wants to
|
||||||
@@ -75,7 +77,7 @@ def install_component(self, installer_path):
|
|||||||
bash = Executable('bash')
|
bash = Executable('bash')
|
||||||
|
|
||||||
# Installer writes files in ~/intel set HOME so it goes to prefix
|
# Installer writes files in ~/intel set HOME so it goes to prefix
|
||||||
bash.add_default_env('HOME', self.prefix)
|
bash.add_default_env('HOME', prefix)
|
||||||
# Installer checks $XDG_RUNTIME_DIR/.bootstrapper_lock_file as well
|
# Installer checks $XDG_RUNTIME_DIR/.bootstrapper_lock_file as well
|
||||||
bash.add_default_env('XDG_RUNTIME_DIR',
|
bash.add_default_env('XDG_RUNTIME_DIR',
|
||||||
join_path(self.stage.path, 'runtime'))
|
join_path(self.stage.path, 'runtime'))
|
||||||
@@ -83,13 +85,13 @@ def install_component(self, installer_path):
|
|||||||
bash(installer_path,
|
bash(installer_path,
|
||||||
'-s', '-a', '-s', '--action', 'install',
|
'-s', '-a', '-s', '--action', 'install',
|
||||||
'--eula', 'accept',
|
'--eula', 'accept',
|
||||||
'--install-dir', self.prefix)
|
'--install-dir', prefix)
|
||||||
|
|
||||||
if getpass.getuser() == 'root':
|
if getpass.getuser() == 'root':
|
||||||
shutil.rmtree('/var/intel/installercache', ignore_errors=True)
|
shutil.rmtree('/var/intel/installercache', ignore_errors=True)
|
||||||
|
|
||||||
# Some installers have a bug and do not return an error code when failing
|
# Some installers have a bug and do not return an error code when failing
|
||||||
if not isdir(join_path(self.prefix, self.component_dir)):
|
if not isdir(join_path(prefix, self.component_dir)):
|
||||||
raise RuntimeError('install failed')
|
raise RuntimeError('install failed')
|
||||||
|
|
||||||
def setup_run_environment(self, env):
|
def setup_run_environment(self, env):
|
||||||
@@ -102,7 +104,7 @@ def setup_run_environment(self, env):
|
|||||||
$ source {prefix}/{component}/{version}/env/vars.sh
|
$ source {prefix}/{component}/{version}/env/vars.sh
|
||||||
"""
|
"""
|
||||||
env.extend(EnvironmentModifications.from_sourcing_file(
|
env.extend(EnvironmentModifications.from_sourcing_file(
|
||||||
join_path(self.component_prefix, 'env', 'vars.sh')))
|
join_path(self.component_path, 'env', 'vars.sh')))
|
||||||
|
|
||||||
|
|
||||||
class IntelOneApiLibraryPackage(IntelOneApiPackage):
|
class IntelOneApiLibraryPackage(IntelOneApiPackage):
|
||||||
@@ -116,12 +118,12 @@ class IntelOneApiLibraryPackage(IntelOneApiPackage):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def headers(self):
|
def headers(self):
|
||||||
include_path = join_path(self.component_prefix, 'include')
|
include_path = join_path(self.component_path, 'include')
|
||||||
return find_headers('*', include_path, recursive=True)
|
return find_headers('*', include_path, recursive=True)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def libs(self):
|
def libs(self):
|
||||||
lib_path = join_path(self.component_prefix, 'lib', 'intel64')
|
lib_path = join_path(self.component_path, 'lib', 'intel64')
|
||||||
lib_path = lib_path if isdir(lib_path) else dirname(lib_path)
|
lib_path = lib_path if isdir(lib_path) else dirname(lib_path)
|
||||||
return find_libraries('*', root=lib_path, shared=True, recursive=True)
|
return find_libraries('*', root=lib_path, shared=True, recursive=True)
|
||||||
|
|
||||||
|
|||||||
@@ -12,17 +12,14 @@
|
|||||||
from llnl.util.filesystem import (
|
from llnl.util.filesystem import (
|
||||||
filter_file,
|
filter_file,
|
||||||
find,
|
find,
|
||||||
find_all_headers,
|
|
||||||
find_libraries,
|
|
||||||
is_nonsymlink_exe_with_shebang,
|
is_nonsymlink_exe_with_shebang,
|
||||||
path_contains_subdirectory,
|
path_contains_subdirectory,
|
||||||
same_path,
|
same_path,
|
||||||
working_dir,
|
working_dir,
|
||||||
)
|
)
|
||||||
from llnl.util.lang import classproperty, match_predicate
|
from llnl.util.lang import match_predicate
|
||||||
|
|
||||||
from spack.directives import depends_on, extends
|
from spack.directives import depends_on, extends
|
||||||
from spack.error import NoHeadersError, NoLibrariesError
|
|
||||||
from spack.package_base import PackageBase, run_after
|
from spack.package_base import PackageBase, run_after
|
||||||
|
|
||||||
|
|
||||||
@@ -77,21 +74,24 @@ def _std_args(cls):
|
|||||||
'--no-index',
|
'--no-index',
|
||||||
]
|
]
|
||||||
|
|
||||||
@classproperty
|
@property
|
||||||
def homepage(cls):
|
def homepage(self):
|
||||||
if cls.pypi:
|
if self.pypi:
|
||||||
name = cls.pypi.split('/')[0]
|
name = self.pypi.split('/')[0]
|
||||||
return 'https://pypi.org/project/' + name + '/'
|
return 'https://pypi.org/project/' + name + '/'
|
||||||
|
|
||||||
@classproperty
|
@property
|
||||||
def url(cls):
|
def url(self):
|
||||||
if cls.pypi:
|
if self.pypi:
|
||||||
return 'https://files.pythonhosted.org/packages/source/' + cls.pypi[0] + '/' + cls.pypi
|
return (
|
||||||
|
'https://files.pythonhosted.org/packages/source/'
|
||||||
|
+ self.pypi[0] + '/' + self.pypi
|
||||||
|
)
|
||||||
|
|
||||||
@classproperty
|
@property
|
||||||
def list_url(cls):
|
def list_url(self):
|
||||||
if cls.pypi:
|
if self.pypi:
|
||||||
name = cls.pypi.split('/')[0]
|
name = self.pypi.split('/')[0]
|
||||||
return 'https://pypi.org/simple/' + name + '/'
|
return 'https://pypi.org/simple/' + name + '/'
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@@ -178,37 +178,6 @@ def install(self, spec, prefix):
|
|||||||
with working_dir(self.build_directory):
|
with working_dir(self.build_directory):
|
||||||
pip(*args)
|
pip(*args)
|
||||||
|
|
||||||
@property
|
|
||||||
def headers(self):
|
|
||||||
"""Discover header files in platlib."""
|
|
||||||
|
|
||||||
# Headers may be in either location
|
|
||||||
include = inspect.getmodule(self).include
|
|
||||||
platlib = inspect.getmodule(self).platlib
|
|
||||||
headers = find_all_headers(include) + find_all_headers(platlib)
|
|
||||||
|
|
||||||
if headers:
|
|
||||||
return headers
|
|
||||||
|
|
||||||
msg = 'Unable to locate {} headers in {} or {}'
|
|
||||||
raise NoHeadersError(msg.format(self.spec.name, include, platlib))
|
|
||||||
|
|
||||||
@property
|
|
||||||
def libs(self):
|
|
||||||
"""Discover libraries in platlib."""
|
|
||||||
|
|
||||||
# Remove py- prefix in package name
|
|
||||||
library = 'lib' + self.spec.name[3:].replace('-', '?')
|
|
||||||
root = inspect.getmodule(self).platlib
|
|
||||||
|
|
||||||
for shared in [True, False]:
|
|
||||||
libs = find_libraries(library, root, shared=shared, recursive=True)
|
|
||||||
if libs:
|
|
||||||
return libs
|
|
||||||
|
|
||||||
msg = 'Unable to recursively locate {} libraries in {}'
|
|
||||||
raise NoLibrariesError(msg.format(self.spec.name, root))
|
|
||||||
|
|
||||||
# Testing
|
# Testing
|
||||||
|
|
||||||
def test(self):
|
def test(self):
|
||||||
|
|||||||
@@ -2,11 +2,11 @@
|
|||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
|
|
||||||
import inspect
|
import inspect
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
import llnl.util.lang as lang
|
|
||||||
|
|
||||||
from spack.directives import extends
|
from spack.directives import extends
|
||||||
from spack.package_base import PackageBase, run_after
|
from spack.package_base import PackageBase, run_after
|
||||||
|
|
||||||
@@ -42,27 +42,27 @@ class RPackage(PackageBase):
|
|||||||
|
|
||||||
extends('r')
|
extends('r')
|
||||||
|
|
||||||
@lang.classproperty
|
@property
|
||||||
def homepage(cls):
|
def homepage(self):
|
||||||
if cls.cran:
|
if self.cran:
|
||||||
return 'https://cloud.r-project.org/package=' + cls.cran
|
return 'https://cloud.r-project.org/package=' + self.cran
|
||||||
elif cls.bioc:
|
elif self.bioc:
|
||||||
return 'https://bioconductor.org/packages/' + cls.bioc
|
return 'https://bioconductor.org/packages/' + self.bioc
|
||||||
|
|
||||||
@lang.classproperty
|
@property
|
||||||
def url(cls):
|
def url(self):
|
||||||
if cls.cran:
|
if self.cran:
|
||||||
return (
|
return (
|
||||||
'https://cloud.r-project.org/src/contrib/'
|
'https://cloud.r-project.org/src/contrib/'
|
||||||
+ cls.cran + '_' + str(list(cls.versions)[0]) + '.tar.gz'
|
+ self.cran + '_' + str(list(self.versions)[0]) + '.tar.gz'
|
||||||
)
|
)
|
||||||
|
|
||||||
@lang.classproperty
|
@property
|
||||||
def list_url(cls):
|
def list_url(self):
|
||||||
if cls.cran:
|
if self.cran:
|
||||||
return (
|
return (
|
||||||
'https://cloud.r-project.org/src/contrib/Archive/'
|
'https://cloud.r-project.org/src/contrib/Archive/'
|
||||||
+ cls.cran + '/'
|
+ self.cran + '/'
|
||||||
)
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
|||||||
@@ -5,7 +5,6 @@
|
|||||||
import os
|
import os
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
import llnl.util.lang as lang
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.util.filesystem import working_dir
|
from llnl.util.filesystem import working_dir
|
||||||
|
|
||||||
@@ -42,10 +41,10 @@ class RacketPackage(PackageBase):
|
|||||||
name = None # type: Optional[str]
|
name = None # type: Optional[str]
|
||||||
parallel = True
|
parallel = True
|
||||||
|
|
||||||
@lang.classproperty
|
@property
|
||||||
def homepage(cls):
|
def homepage(self):
|
||||||
if cls.pkgs:
|
if self.pkgs:
|
||||||
return 'https://pkgs.racket-lang.org/package/{0}'.format(cls.name)
|
return 'https://pkgs.racket-lang.org/package/{0}'.format(self.name)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def build_directory(self):
|
def build_directory(self):
|
||||||
|
|||||||
@@ -90,10 +90,9 @@ class ROCmPackage(PackageBase):
|
|||||||
# https://llvm.org/docs/AMDGPUUsage.html
|
# https://llvm.org/docs/AMDGPUUsage.html
|
||||||
# Possible architectures
|
# Possible architectures
|
||||||
amdgpu_targets = (
|
amdgpu_targets = (
|
||||||
'gfx701', 'gfx801', 'gfx802', 'gfx803', 'gfx900', 'gfx900:xnack-',
|
'gfx701', 'gfx801', 'gfx802', 'gfx803',
|
||||||
'gfx906', 'gfx908', 'gfx90a',
|
'gfx900', 'gfx906', 'gfx908', 'gfx90a', 'gfx1010',
|
||||||
'gfx906:xnack-', 'gfx908:xnack-', 'gfx90a:xnack-', 'gfx90a:xnack+',
|
'gfx1011', 'gfx1012'
|
||||||
'gfx1010', 'gfx1011', 'gfx1012', 'gfx1030', 'gfx1031',
|
|
||||||
)
|
)
|
||||||
|
|
||||||
variant('rocm', default=False, description='Enable ROCm support')
|
variant('rocm', default=False, description='Enable ROCm support')
|
||||||
|
|||||||
@@ -771,13 +771,9 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
|||||||
mirrors_to_check = {
|
mirrors_to_check = {
|
||||||
'override': remote_mirror_override
|
'override': remote_mirror_override
|
||||||
}
|
}
|
||||||
|
else:
|
||||||
# If we have a remote override and we want generate pipeline using
|
spack.mirror.add(
|
||||||
# --check-index-only, then the override mirror needs to be added to
|
'ci_pr_mirror', remote_mirror_override, cfg.default_modify_scope())
|
||||||
# the configured mirrors when bindist.update() is run, or else we
|
|
||||||
# won't fetch its index and include in our local cache.
|
|
||||||
spack.mirror.add(
|
|
||||||
'ci_pr_mirror', remote_mirror_override, cfg.default_modify_scope())
|
|
||||||
|
|
||||||
pipeline_artifacts_dir = artifacts_root
|
pipeline_artifacts_dir = artifacts_root
|
||||||
if not pipeline_artifacts_dir:
|
if not pipeline_artifacts_dir:
|
||||||
@@ -823,7 +819,7 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
|||||||
user_artifacts_dir, ci_project_dir)
|
user_artifacts_dir, ci_project_dir)
|
||||||
|
|
||||||
# Speed up staging by first fetching binary indices from all mirrors
|
# Speed up staging by first fetching binary indices from all mirrors
|
||||||
# (including the override mirror we may have just added above).
|
# (including the per-PR mirror we may have just added above).
|
||||||
try:
|
try:
|
||||||
bindist.binary_index.update()
|
bindist.binary_index.update()
|
||||||
except bindist.FetchCacheError as e:
|
except bindist.FetchCacheError as e:
|
||||||
@@ -857,7 +853,8 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
|||||||
finally:
|
finally:
|
||||||
# Clean up remote mirror override if enabled
|
# Clean up remote mirror override if enabled
|
||||||
if remote_mirror_override:
|
if remote_mirror_override:
|
||||||
spack.mirror.remove('ci_pr_mirror', cfg.default_modify_scope())
|
if spack_pipeline_type != 'spack_protected_branch':
|
||||||
|
spack.mirror.remove('ci_pr_mirror', cfg.default_modify_scope())
|
||||||
|
|
||||||
all_job_names = []
|
all_job_names = []
|
||||||
output_object = {}
|
output_object = {}
|
||||||
@@ -1628,9 +1625,8 @@ def copy_stage_logs_to_artifacts(job_spec, job_log_dir):
|
|||||||
job_log_dir (str): Path into which build log should be copied
|
job_log_dir (str): Path into which build log should be copied
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
pkg_cls = spack.repo.path.get_pkg_class(job_spec.name)
|
job_pkg = spack.repo.get(job_spec)
|
||||||
job_pkg = pkg_cls(job_spec)
|
tty.debug('job package: {0}'.format(job_pkg))
|
||||||
tty.debug('job package: {0.fullname}'.format(job_pkg))
|
|
||||||
stage_dir = job_pkg.stage.path
|
stage_dir = job_pkg.stage.path
|
||||||
tty.debug('stage dir: {0}'.format(stage_dir))
|
tty.debug('stage dir: {0}'.format(stage_dir))
|
||||||
build_out_src = os.path.join(stage_dir, 'spack-build-out.txt')
|
build_out_src = os.path.join(stage_dir, 'spack-build-out.txt')
|
||||||
|
|||||||
@@ -8,10 +8,7 @@
|
|||||||
import argparse
|
import argparse
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import shlex
|
|
||||||
import sys
|
import sys
|
||||||
from textwrap import dedent
|
|
||||||
from typing import List, Tuple
|
|
||||||
|
|
||||||
import ruamel.yaml as yaml
|
import ruamel.yaml as yaml
|
||||||
import six
|
import six
|
||||||
@@ -150,58 +147,6 @@ def get_command(cmd_name):
|
|||||||
return getattr(get_module(cmd_name), pname)
|
return getattr(get_module(cmd_name), pname)
|
||||||
|
|
||||||
|
|
||||||
class _UnquotedFlags(object):
|
|
||||||
"""Use a heuristic in `.extract()` to detect whether the user is trying to set
|
|
||||||
multiple flags like the docker ENV attribute allows (e.g. 'cflags=-Os -pipe').
|
|
||||||
|
|
||||||
If the heuristic finds a match (which can be checked with `__bool__()`), a warning
|
|
||||||
message explaining how to quote multiple flags correctly can be generated with
|
|
||||||
`.report()`.
|
|
||||||
"""
|
|
||||||
|
|
||||||
flags_arg_pattern = re.compile(
|
|
||||||
r'^({0})=([^\'"].*)$'.format(
|
|
||||||
'|'.join(spack.spec.FlagMap.valid_compiler_flags()),
|
|
||||||
))
|
|
||||||
|
|
||||||
def __init__(self, all_unquoted_flag_pairs):
|
|
||||||
# type: (List[Tuple[re.Match, str]]) -> None
|
|
||||||
self._flag_pairs = all_unquoted_flag_pairs
|
|
||||||
|
|
||||||
def __bool__(self):
|
|
||||||
# type: () -> bool
|
|
||||||
return bool(self._flag_pairs)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def extract(cls, sargs):
|
|
||||||
# type: (str) -> _UnquotedFlags
|
|
||||||
all_unquoted_flag_pairs = [] # type: List[Tuple[re.Match, str]]
|
|
||||||
prev_flags_arg = None
|
|
||||||
for arg in shlex.split(sargs):
|
|
||||||
if prev_flags_arg is not None:
|
|
||||||
all_unquoted_flag_pairs.append((prev_flags_arg, arg))
|
|
||||||
prev_flags_arg = cls.flags_arg_pattern.match(arg)
|
|
||||||
return cls(all_unquoted_flag_pairs)
|
|
||||||
|
|
||||||
def report(self):
|
|
||||||
# type: () -> str
|
|
||||||
single_errors = [
|
|
||||||
'({0}) {1} {2} => {3}'.format(
|
|
||||||
i + 1, match.group(0), next_arg,
|
|
||||||
'{0}="{1} {2}"'.format(match.group(1), match.group(2), next_arg),
|
|
||||||
)
|
|
||||||
for i, (match, next_arg) in enumerate(self._flag_pairs)
|
|
||||||
]
|
|
||||||
return dedent("""\
|
|
||||||
Some compiler or linker flags were provided without quoting their arguments,
|
|
||||||
which now causes spack to try to parse the *next* argument as a spec component
|
|
||||||
such as a variant instead of an additional compiler or linker flag. If the
|
|
||||||
intent was to set multiple flags, try quoting them together as described below.
|
|
||||||
|
|
||||||
Possible flag quotation errors (with the correctly-quoted version after the =>):
|
|
||||||
{0}""").format('\n'.join(single_errors))
|
|
||||||
|
|
||||||
|
|
||||||
def parse_specs(args, **kwargs):
|
def parse_specs(args, **kwargs):
|
||||||
"""Convenience function for parsing arguments from specs. Handles common
|
"""Convenience function for parsing arguments from specs. Handles common
|
||||||
exceptions and dies if there are errors.
|
exceptions and dies if there are errors.
|
||||||
@@ -212,28 +157,15 @@ def parse_specs(args, **kwargs):
|
|||||||
|
|
||||||
sargs = args
|
sargs = args
|
||||||
if not isinstance(args, six.string_types):
|
if not isinstance(args, six.string_types):
|
||||||
sargs = ' '.join(args)
|
sargs = ' '.join(spack.util.string.quote(args))
|
||||||
unquoted_flags = _UnquotedFlags.extract(sargs)
|
specs = spack.spec.parse(sargs)
|
||||||
|
for spec in specs:
|
||||||
|
if concretize:
|
||||||
|
spec.concretize(tests=tests) # implies normalize
|
||||||
|
elif normalize:
|
||||||
|
spec.normalize(tests=tests)
|
||||||
|
|
||||||
try:
|
return specs
|
||||||
specs = spack.spec.parse(sargs)
|
|
||||||
for spec in specs:
|
|
||||||
if concretize:
|
|
||||||
spec.concretize(tests=tests) # implies normalize
|
|
||||||
elif normalize:
|
|
||||||
spec.normalize(tests=tests)
|
|
||||||
return specs
|
|
||||||
|
|
||||||
except spack.error.SpecError as e:
|
|
||||||
|
|
||||||
msg = e.message
|
|
||||||
if e.long_message:
|
|
||||||
msg += e.long_message
|
|
||||||
if unquoted_flags:
|
|
||||||
msg += '\n\n'
|
|
||||||
msg += unquoted_flags.report()
|
|
||||||
|
|
||||||
raise spack.error.SpackError(msg)
|
|
||||||
|
|
||||||
|
|
||||||
def matching_spec_from_env(spec):
|
def matching_spec_from_env(spec):
|
||||||
|
|||||||
116
lib/spack/spack/cmd/analyze.py
Normal file
116
lib/spack/spack/cmd/analyze.py
Normal file
@@ -0,0 +1,116 @@
|
|||||||
|
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||||
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
|
import sys
|
||||||
|
|
||||||
|
import llnl.util.tty as tty
|
||||||
|
|
||||||
|
import spack.analyzers
|
||||||
|
import spack.build_environment
|
||||||
|
import spack.cmd
|
||||||
|
import spack.cmd.common.arguments as arguments
|
||||||
|
import spack.environment as ev
|
||||||
|
import spack.fetch_strategy
|
||||||
|
import spack.monitor
|
||||||
|
import spack.paths
|
||||||
|
import spack.report
|
||||||
|
|
||||||
|
description = "run analyzers on installed packages"
|
||||||
|
section = "analysis"
|
||||||
|
level = "long"
|
||||||
|
|
||||||
|
|
||||||
|
def setup_parser(subparser):
|
||||||
|
sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='analyze_command')
|
||||||
|
|
||||||
|
sp.add_parser('list-analyzers',
|
||||||
|
description="list available analyzers",
|
||||||
|
help="show list of analyzers that are available to run.")
|
||||||
|
|
||||||
|
# This adds the monitor group to the subparser
|
||||||
|
spack.monitor.get_monitor_group(subparser)
|
||||||
|
|
||||||
|
# Run Parser
|
||||||
|
run_parser = sp.add_parser('run', description="run an analyzer",
|
||||||
|
help="provide the name of the analyzer to run.")
|
||||||
|
|
||||||
|
run_parser.add_argument(
|
||||||
|
'--overwrite', action='store_true',
|
||||||
|
help="re-analyze even if the output file already exists.")
|
||||||
|
run_parser.add_argument(
|
||||||
|
'-p', '--path', default=None,
|
||||||
|
dest='path',
|
||||||
|
help="write output to a different directory than ~/.spack/analyzers")
|
||||||
|
run_parser.add_argument(
|
||||||
|
'-a', '--analyzers', default=None,
|
||||||
|
dest="analyzers", action="append",
|
||||||
|
help="add an analyzer (defaults to all available)")
|
||||||
|
arguments.add_common_arguments(run_parser, ['spec'])
|
||||||
|
|
||||||
|
|
||||||
|
def analyze_spec(spec, analyzers=None, outdir=None, monitor=None, overwrite=False):
|
||||||
|
"""
|
||||||
|
Do an analysis for a spec, optionally adding monitoring.
|
||||||
|
|
||||||
|
We also allow the user to specify a custom output directory.
|
||||||
|
analyze_spec(spec, args.analyzers, args.outdir, monitor)
|
||||||
|
|
||||||
|
Args:
|
||||||
|
spec (spack.spec.Spec): spec object of installed package
|
||||||
|
analyzers (list): list of analyzer (keys) to run
|
||||||
|
monitor (spack.monitor.SpackMonitorClient): a monitor client
|
||||||
|
overwrite (bool): overwrite result if already exists
|
||||||
|
"""
|
||||||
|
analyzers = analyzers or list(spack.analyzers.analyzer_types.keys())
|
||||||
|
|
||||||
|
# Load the build environment from the spec install directory, and send
|
||||||
|
# the spec to the monitor if it's not known
|
||||||
|
if monitor:
|
||||||
|
monitor.load_build_environment(spec)
|
||||||
|
monitor.new_configuration([spec])
|
||||||
|
|
||||||
|
for name in analyzers:
|
||||||
|
|
||||||
|
# Instantiate the analyzer with the spec and outdir
|
||||||
|
analyzer = spack.analyzers.get_analyzer(name)(spec, outdir)
|
||||||
|
|
||||||
|
# Run the analyzer to get a json result - results are returned as
|
||||||
|
# a dictionary with a key corresponding to the analyzer type, so
|
||||||
|
# we can just update the data
|
||||||
|
result = analyzer.run()
|
||||||
|
|
||||||
|
# Send the result. We do them separately because:
|
||||||
|
# 1. each analyzer might have differently organized output
|
||||||
|
# 2. the size of a result can be large
|
||||||
|
analyzer.save_result(result, overwrite)
|
||||||
|
|
||||||
|
|
||||||
|
def analyze(parser, args, **kwargs):
|
||||||
|
|
||||||
|
# If the user wants to list analyzers, do so and exit
|
||||||
|
if args.analyze_command == "list-analyzers":
|
||||||
|
spack.analyzers.list_all()
|
||||||
|
sys.exit(0)
|
||||||
|
|
||||||
|
# handle active environment, if any
|
||||||
|
env = ev.active_environment()
|
||||||
|
|
||||||
|
# Get an disambiguate spec (we should only have one)
|
||||||
|
specs = spack.cmd.parse_specs(args.spec)
|
||||||
|
if not specs:
|
||||||
|
tty.die("You must provide one or more specs to analyze.")
|
||||||
|
spec = spack.cmd.disambiguate_spec(specs[0], env)
|
||||||
|
|
||||||
|
# The user wants to monitor builds using github.com/spack/spack-monitor
|
||||||
|
# It is instantianted once here, and then available at spack.monitor.cli
|
||||||
|
monitor = None
|
||||||
|
if args.use_monitor:
|
||||||
|
monitor = spack.monitor.get_client(
|
||||||
|
host=args.monitor_host,
|
||||||
|
prefix=args.monitor_prefix,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Run the analysis
|
||||||
|
analyze_spec(spec, args.analyzers, args.path, monitor, args.overwrite)
|
||||||
@@ -99,8 +99,8 @@ def blame(parser, args):
|
|||||||
blame_file = path
|
blame_file = path
|
||||||
|
|
||||||
if not blame_file:
|
if not blame_file:
|
||||||
pkg_cls = spack.repo.path.get_pkg_class(args.package_or_file)
|
pkg = spack.repo.get(args.package_or_file)
|
||||||
blame_file = pkg_cls.module.__file__.rstrip('c') # .pyc -> .py
|
blame_file = pkg.module.__file__.rstrip('c') # .pyc -> .py
|
||||||
|
|
||||||
# get git blame for the package
|
# get git blame for the package
|
||||||
with working_dir(spack.paths.prefix):
|
with working_dir(spack.paths.prefix):
|
||||||
|
|||||||
@@ -379,9 +379,7 @@ def _remove(args):
|
|||||||
|
|
||||||
|
|
||||||
def _mirror(args):
|
def _mirror(args):
|
||||||
mirror_dir = spack.util.path.canonicalize_path(
|
mirror_dir = os.path.join(args.root_dir, LOCAL_MIRROR_DIR)
|
||||||
os.path.join(args.root_dir, LOCAL_MIRROR_DIR)
|
|
||||||
)
|
|
||||||
|
|
||||||
# TODO: Here we are adding gnuconfig manually, but this can be fixed
|
# TODO: Here we are adding gnuconfig manually, but this can be fixed
|
||||||
# TODO: as soon as we have an option to add to a mirror all the possible
|
# TODO: as soon as we have an option to add to a mirror all the possible
|
||||||
|
|||||||
@@ -12,12 +12,11 @@
|
|||||||
import spack.cmd
|
import spack.cmd
|
||||||
import spack.cmd.common.arguments as arguments
|
import spack.cmd.common.arguments as arguments
|
||||||
import spack.repo
|
import spack.repo
|
||||||
import spack.spec
|
|
||||||
import spack.stage
|
import spack.stage
|
||||||
import spack.util.crypto
|
import spack.util.crypto
|
||||||
from spack.package_base import preferred_version
|
from spack.package_base import preferred_version
|
||||||
from spack.util.naming import valid_fully_qualified_module_name
|
from spack.util.naming import valid_fully_qualified_module_name
|
||||||
from spack.version import VersionBase, ver
|
from spack.version import Version, ver
|
||||||
|
|
||||||
description = "checksum available versions of a package"
|
description = "checksum available versions of a package"
|
||||||
section = "packaging"
|
section = "packaging"
|
||||||
@@ -55,8 +54,7 @@ def checksum(parser, args):
|
|||||||
tty.die("`spack checksum` accepts package names, not URLs.")
|
tty.die("`spack checksum` accepts package names, not URLs.")
|
||||||
|
|
||||||
# Get the package we're going to generate checksums for
|
# Get the package we're going to generate checksums for
|
||||||
pkg_cls = spack.repo.path.get_pkg_class(args.package)
|
pkg = spack.repo.get(args.package)
|
||||||
pkg = pkg_cls(spack.spec.Spec(args.package))
|
|
||||||
|
|
||||||
url_dict = {}
|
url_dict = {}
|
||||||
versions = args.versions
|
versions = args.versions
|
||||||
@@ -67,7 +65,7 @@ def checksum(parser, args):
|
|||||||
remote_versions = None
|
remote_versions = None
|
||||||
for version in versions:
|
for version in versions:
|
||||||
version = ver(version)
|
version = ver(version)
|
||||||
if not isinstance(version, VersionBase):
|
if not isinstance(version, Version):
|
||||||
tty.die("Cannot generate checksums for version lists or "
|
tty.die("Cannot generate checksums for version lists or "
|
||||||
"version ranges. Use unambiguous versions.")
|
"version ranges. Use unambiguous versions.")
|
||||||
url = pkg.find_valid_url_for_version(version)
|
url = pkg.find_valid_url_for_version(version)
|
||||||
|
|||||||
@@ -58,21 +58,6 @@ def setup_parser(subparser):
|
|||||||
arguments.add_common_arguments(subparser, ['specs'])
|
arguments.add_common_arguments(subparser, ['specs'])
|
||||||
|
|
||||||
|
|
||||||
def remove_python_cache():
|
|
||||||
for directory in [lib_path, var_path]:
|
|
||||||
for root, dirs, files in os.walk(directory):
|
|
||||||
for f in files:
|
|
||||||
if f.endswith('.pyc') or f.endswith('.pyo'):
|
|
||||||
fname = os.path.join(root, f)
|
|
||||||
tty.debug('Removing {0}'.format(fname))
|
|
||||||
os.remove(fname)
|
|
||||||
for d in dirs:
|
|
||||||
if d == '__pycache__':
|
|
||||||
dname = os.path.join(root, d)
|
|
||||||
tty.debug('Removing {0}'.format(dname))
|
|
||||||
shutil.rmtree(dname)
|
|
||||||
|
|
||||||
|
|
||||||
def clean(parser, args):
|
def clean(parser, args):
|
||||||
# If nothing was set, activate the default
|
# If nothing was set, activate the default
|
||||||
if not any([args.specs, args.stage, args.downloads, args.failures,
|
if not any([args.specs, args.stage, args.downloads, args.failures,
|
||||||
@@ -85,7 +70,8 @@ def clean(parser, args):
|
|||||||
for spec in specs:
|
for spec in specs:
|
||||||
msg = 'Cleaning build stage [{0}]'
|
msg = 'Cleaning build stage [{0}]'
|
||||||
tty.msg(msg.format(spec.short_spec))
|
tty.msg(msg.format(spec.short_spec))
|
||||||
spec.package.do_clean()
|
package = spack.repo.get(spec)
|
||||||
|
package.do_clean()
|
||||||
|
|
||||||
if args.stage:
|
if args.stage:
|
||||||
tty.msg('Removing all temporary build stages')
|
tty.msg('Removing all temporary build stages')
|
||||||
@@ -109,7 +95,18 @@ def clean(parser, args):
|
|||||||
|
|
||||||
if args.python_cache:
|
if args.python_cache:
|
||||||
tty.msg('Removing python cache files')
|
tty.msg('Removing python cache files')
|
||||||
remove_python_cache()
|
for directory in [lib_path, var_path]:
|
||||||
|
for root, dirs, files in os.walk(directory):
|
||||||
|
for f in files:
|
||||||
|
if f.endswith('.pyc') or f.endswith('.pyo'):
|
||||||
|
fname = os.path.join(root, f)
|
||||||
|
tty.debug('Removing {0}'.format(fname))
|
||||||
|
os.remove(fname)
|
||||||
|
for d in dirs:
|
||||||
|
if d == '__pycache__':
|
||||||
|
dname = os.path.join(root, d)
|
||||||
|
tty.debug('Removing {0}'.format(dname))
|
||||||
|
shutil.rmtree(dname)
|
||||||
|
|
||||||
if args.bootstrap:
|
if args.bootstrap:
|
||||||
bootstrap_prefix = spack.util.path.canonicalize_path(
|
bootstrap_prefix = spack.util.path.canonicalize_path(
|
||||||
|
|||||||
@@ -403,4 +403,4 @@ def add_s3_connection_args(subparser, add_help):
|
|||||||
default=None)
|
default=None)
|
||||||
subparser.add_argument(
|
subparser.add_argument(
|
||||||
'--s3-endpoint-url',
|
'--s3-endpoint-url',
|
||||||
help="Endpoint URL to use to connect to this S3 mirror")
|
help="Access Token to use to connect to this S3 mirror")
|
||||||
|
|||||||
@@ -9,6 +9,7 @@
|
|||||||
|
|
||||||
import spack.container
|
import spack.container
|
||||||
import spack.container.images
|
import spack.container.images
|
||||||
|
import spack.monitor
|
||||||
|
|
||||||
description = ("creates recipes to build images for different"
|
description = ("creates recipes to build images for different"
|
||||||
" container runtimes")
|
" container runtimes")
|
||||||
@@ -17,6 +18,7 @@
|
|||||||
|
|
||||||
|
|
||||||
def setup_parser(subparser):
|
def setup_parser(subparser):
|
||||||
|
monitor_group = spack.monitor.get_monitor_group(subparser) # noqa
|
||||||
subparser.add_argument(
|
subparser.add_argument(
|
||||||
'--list-os', action='store_true', default=False,
|
'--list-os', action='store_true', default=False,
|
||||||
help='list all the OS that can be used in the bootstrap phase and exit'
|
help='list all the OS that can be used in the bootstrap phase and exit'
|
||||||
@@ -44,5 +46,14 @@ def containerize(parser, args):
|
|||||||
raise ValueError(msg.format(config_file))
|
raise ValueError(msg.format(config_file))
|
||||||
|
|
||||||
config = spack.container.validate(config_file)
|
config = spack.container.validate(config_file)
|
||||||
|
|
||||||
|
# If we have a monitor request, add monitor metadata to config
|
||||||
|
if args.use_monitor:
|
||||||
|
config['spack']['monitor'] = {
|
||||||
|
"host": args.monitor_host,
|
||||||
|
"keep_going": args.monitor_keep_going,
|
||||||
|
"prefix": args.monitor_prefix,
|
||||||
|
"tags": args.monitor_tags
|
||||||
|
}
|
||||||
recipe = spack.container.recipe(config, last_phase=args.last_stage)
|
recipe = spack.container.recipe(config, last_phase=args.last_stage)
|
||||||
print(recipe)
|
print(recipe)
|
||||||
|
|||||||
@@ -826,7 +826,7 @@ def get_versions(args, name):
|
|||||||
spack.util.url.require_url_format(args.url)
|
spack.util.url.require_url_format(args.url)
|
||||||
if args.url.startswith('file://'):
|
if args.url.startswith('file://'):
|
||||||
valid_url = False # No point in spidering these
|
valid_url = False # No point in spidering these
|
||||||
except (ValueError, TypeError):
|
except ValueError:
|
||||||
valid_url = False
|
valid_url = False
|
||||||
|
|
||||||
if args.url is not None and args.template != 'bundle' and valid_url:
|
if args.url is not None and args.template != 'bundle' and valid_url:
|
||||||
|
|||||||
@@ -39,9 +39,9 @@ def inverted_dependencies():
|
|||||||
actual dependents.
|
actual dependents.
|
||||||
"""
|
"""
|
||||||
dag = {}
|
dag = {}
|
||||||
for pkg_cls in spack.repo.path.all_package_classes():
|
for pkg in spack.repo.path.all_packages():
|
||||||
dag.setdefault(pkg_cls.name, set())
|
dag.setdefault(pkg.name, set())
|
||||||
for dep in pkg_cls.dependencies:
|
for dep in pkg.dependencies:
|
||||||
deps = [dep]
|
deps = [dep]
|
||||||
|
|
||||||
# expand virtuals if necessary
|
# expand virtuals if necessary
|
||||||
@@ -49,7 +49,7 @@ def inverted_dependencies():
|
|||||||
deps += [s.name for s in spack.repo.path.providers_for(dep)]
|
deps += [s.name for s in spack.repo.path.providers_for(dep)]
|
||||||
|
|
||||||
for d in deps:
|
for d in deps:
|
||||||
dag.setdefault(d, set()).add(pkg_cls.name)
|
dag.setdefault(d, set()).add(pkg.name)
|
||||||
return dag
|
return dag
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -87,7 +87,9 @@ def dev_build(self, args):
|
|||||||
|
|
||||||
# Forces the build to run out of the source directory.
|
# Forces the build to run out of the source directory.
|
||||||
spec.constrain('dev_path=%s' % source_path)
|
spec.constrain('dev_path=%s' % source_path)
|
||||||
|
|
||||||
spec.concretize()
|
spec.concretize()
|
||||||
|
package = spack.repo.get(spec)
|
||||||
|
|
||||||
if spec.installed:
|
if spec.installed:
|
||||||
tty.error("Already installed in %s" % spec.prefix)
|
tty.error("Already installed in %s" % spec.prefix)
|
||||||
@@ -107,7 +109,7 @@ def dev_build(self, args):
|
|||||||
elif args.test == 'root':
|
elif args.test == 'root':
|
||||||
tests = [spec.name for spec in specs]
|
tests = [spec.name for spec in specs]
|
||||||
|
|
||||||
spec.package.do_install(
|
package.do_install(
|
||||||
tests=tests,
|
tests=tests,
|
||||||
make_jobs=args.jobs,
|
make_jobs=args.jobs,
|
||||||
keep_prefix=args.keep_prefix,
|
keep_prefix=args.keep_prefix,
|
||||||
@@ -120,5 +122,5 @@ def dev_build(self, args):
|
|||||||
|
|
||||||
# drop into the build environment of the package?
|
# drop into the build environment of the package?
|
||||||
if args.shell is not None:
|
if args.shell is not None:
|
||||||
spack.build_environment.setup_package(spec.package, dirty=False)
|
spack.build_environment.setup_package(package, dirty=False)
|
||||||
os.execvp(args.shell, [args.shell])
|
os.execvp(args.shell, [args.shell])
|
||||||
|
|||||||
@@ -54,9 +54,8 @@ def develop(parser, args):
|
|||||||
tty.msg(msg)
|
tty.msg(msg)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
spec = spack.spec.Spec(entry['spec'])
|
stage = spack.spec.Spec(entry['spec']).package.stage
|
||||||
pkg_cls = spack.repo.path.get_pkg_class(spec.name)
|
stage.steal_source(abspath)
|
||||||
pkg_cls(spec).stage.steal_source(abspath)
|
|
||||||
|
|
||||||
if not env.dev_specs:
|
if not env.dev_specs:
|
||||||
tty.warn("No develop specs to download")
|
tty.warn("No develop specs to download")
|
||||||
|
|||||||
@@ -104,9 +104,9 @@ def edit(parser, args):
|
|||||||
path = os.path.join(path, name)
|
path = os.path.join(path, name)
|
||||||
if not os.path.exists(path):
|
if not os.path.exists(path):
|
||||||
files = glob.glob(path + '*')
|
files = glob.glob(path + '*')
|
||||||
exclude_list = ['.pyc', '~'] # exclude binaries and backups
|
blacklist = ['.pyc', '~'] # blacklist binaries and backups
|
||||||
files = list(filter(
|
files = list(filter(
|
||||||
lambda x: all(s not in x for s in exclude_list), files))
|
lambda x: all(s not in x for s in blacklist), files))
|
||||||
if len(files) > 1:
|
if len(files) > 1:
|
||||||
m = 'Multiple files exist with the name {0}.'.format(name)
|
m = 'Multiple files exist with the name {0}.'.format(name)
|
||||||
m += ' Please specify a suffix. Files are:\n\n'
|
m += ' Please specify a suffix. Files are:\n\n'
|
||||||
|
|||||||
@@ -559,11 +559,11 @@ def env_depfile(args):
|
|||||||
target_prefix = args.make_target_prefix
|
target_prefix = args.make_target_prefix
|
||||||
|
|
||||||
def get_target(name):
|
def get_target(name):
|
||||||
# The `all` and `clean` targets are phony. It doesn't make sense to
|
# The `all`, `fetch` and `clean` targets are phony. It doesn't make sense to
|
||||||
# have /abs/path/to/env/metadir/{all,clean} targets. But it *does* make
|
# have /abs/path/to/env/metadir/{all,clean} targets. But it *does* make
|
||||||
# sense to have a prefix like `env/all`, `env/clean` when they are
|
# sense to have a prefix like `env/all`, `env/fetch`, `env/clean` when they are
|
||||||
# supposed to be included
|
# supposed to be included
|
||||||
if name in ('all', 'clean') and os.path.isabs(target_prefix):
|
if name in ('all', 'fetch-all', 'clean') and os.path.isabs(target_prefix):
|
||||||
return name
|
return name
|
||||||
else:
|
else:
|
||||||
return os.path.join(target_prefix, name)
|
return os.path.join(target_prefix, name)
|
||||||
@@ -571,6 +571,9 @@ def get_target(name):
|
|||||||
def get_install_target(name):
|
def get_install_target(name):
|
||||||
return os.path.join(target_prefix, '.install', name)
|
return os.path.join(target_prefix, '.install', name)
|
||||||
|
|
||||||
|
def get_fetch_target(name):
|
||||||
|
return os.path.join(target_prefix, '.fetch', name)
|
||||||
|
|
||||||
for _, spec in env.concretized_specs():
|
for _, spec in env.concretized_specs():
|
||||||
for s in spec.traverse(root=True):
|
for s in spec.traverse(root=True):
|
||||||
hash_to_spec[s.dag_hash()] = s
|
hash_to_spec[s.dag_hash()] = s
|
||||||
@@ -585,30 +588,46 @@ def get_install_target(name):
|
|||||||
# All package install targets, not just roots.
|
# All package install targets, not just roots.
|
||||||
all_install_targets = [get_install_target(h) for h in hash_to_spec.keys()]
|
all_install_targets = [get_install_target(h) for h in hash_to_spec.keys()]
|
||||||
|
|
||||||
|
# Fetch targets for all packages in the environment, not just roots.
|
||||||
|
all_fetch_targets = [get_fetch_target(h) for h in hash_to_spec.keys()]
|
||||||
|
|
||||||
buf = six.StringIO()
|
buf = six.StringIO()
|
||||||
|
|
||||||
buf.write("""SPACK ?= spack
|
buf.write("""SPACK ?= spack
|
||||||
|
|
||||||
.PHONY: {} {}
|
.PHONY: {} {} {}
|
||||||
|
|
||||||
|
{}: {}
|
||||||
|
|
||||||
{}: {}
|
{}: {}
|
||||||
|
|
||||||
{}: {}
|
{}: {}
|
||||||
\t@touch $@
|
\t@touch $@
|
||||||
|
|
||||||
|
{}: {}
|
||||||
|
\t@touch $@
|
||||||
|
|
||||||
{}:
|
{}:
|
||||||
\t@mkdir -p {}
|
\t@mkdir -p {} {}
|
||||||
|
|
||||||
{}: | {}
|
{}: | {}
|
||||||
|
\t$(info Fetching $(SPEC))
|
||||||
|
\t$(SPACK) -e '{}' fetch $(SPACK_FETCH_FLAGS) /$(notdir $@) && touch $@
|
||||||
|
|
||||||
|
{}: {}
|
||||||
\t$(info Installing $(SPEC))
|
\t$(info Installing $(SPEC))
|
||||||
\t{}$(SPACK) -e '{}' install $(SPACK_INSTALL_FLAGS) --only-concrete --only=package \
|
\t{}$(SPACK) -e '{}' install $(SPACK_INSTALL_FLAGS) --only-concrete --only=package \
|
||||||
--no-add /$(notdir $@) && touch $@
|
--no-add /$(notdir $@) && touch $@
|
||||||
|
|
||||||
""".format(get_target('all'), get_target('clean'),
|
""".format(get_target('all'), get_target('fetch-all'), get_target('clean'),
|
||||||
get_target('all'), get_target('env'),
|
get_target('all'), get_target('env'),
|
||||||
|
get_target('fetch-all'), get_target('fetch'),
|
||||||
get_target('env'), ' '.join(root_install_targets),
|
get_target('env'), ' '.join(root_install_targets),
|
||||||
get_target('dirs'), get_target('.install'),
|
get_target('fetch'), ' '.join(all_fetch_targets),
|
||||||
get_target('.install/%'), get_target('dirs'),
|
get_target('dirs'), get_target('.fetch'), get_target('.install'),
|
||||||
|
get_target('.fetch/%'), get_target('dirs'),
|
||||||
|
env.path,
|
||||||
|
get_target('.install/%'), get_target('.fetch/%'),
|
||||||
'+' if args.jobserver else '', env.path))
|
'+' if args.jobserver else '', env.path))
|
||||||
|
|
||||||
# Targets are of the form <prefix>/<name>: [<prefix>/<depname>]...,
|
# Targets are of the form <prefix>/<name>: [<prefix>/<depname>]...,
|
||||||
@@ -638,9 +657,11 @@ def get_install_target(name):
|
|||||||
# --make-target-prefix can be any existing directory we do not control,
|
# --make-target-prefix can be any existing directory we do not control,
|
||||||
# including empty string (which means deleting the containing folder
|
# including empty string (which means deleting the containing folder
|
||||||
# would delete the folder with the Makefile)
|
# would delete the folder with the Makefile)
|
||||||
buf.write("{}:\n\trm -f -- {} {}\n".format(
|
buf.write("{}:\n\trm -f -- {} {} {} {}\n".format(
|
||||||
get_target('clean'),
|
get_target('clean'),
|
||||||
get_target('env'),
|
get_target('env'),
|
||||||
|
get_target('fetch'),
|
||||||
|
' '.join(all_fetch_targets),
|
||||||
' '.join(all_install_targets)))
|
' '.join(all_install_targets)))
|
||||||
|
|
||||||
makefile = buf.getvalue()
|
makefile = buf.getvalue()
|
||||||
|
|||||||
@@ -52,8 +52,8 @@ def extensions(parser, args):
|
|||||||
|
|
||||||
extendable_pkgs = []
|
extendable_pkgs = []
|
||||||
for name in spack.repo.all_package_names():
|
for name in spack.repo.all_package_names():
|
||||||
pkg_cls = spack.repo.path.get_pkg_class(name)
|
pkg = spack.repo.get(name)
|
||||||
if pkg_cls.extendable:
|
if pkg.extendable:
|
||||||
extendable_pkgs.append(name)
|
extendable_pkgs.append(name)
|
||||||
|
|
||||||
colify(extendable_pkgs, indent=4)
|
colify(extendable_pkgs, indent=4)
|
||||||
@@ -64,12 +64,12 @@ def extensions(parser, args):
|
|||||||
if len(spec) > 1:
|
if len(spec) > 1:
|
||||||
tty.die("Can only list extensions for one package.")
|
tty.die("Can only list extensions for one package.")
|
||||||
|
|
||||||
|
if not spec[0].package.extendable:
|
||||||
|
tty.die("%s is not an extendable package." % spec[0].name)
|
||||||
|
|
||||||
env = ev.active_environment()
|
env = ev.active_environment()
|
||||||
spec = cmd.disambiguate_spec(spec[0], env)
|
spec = cmd.disambiguate_spec(spec[0], env)
|
||||||
|
|
||||||
if not spec.package.extendable:
|
|
||||||
tty.die("%s is not an extendable package." % spec[0].name)
|
|
||||||
|
|
||||||
if not spec.package.extendable:
|
if not spec.package.extendable:
|
||||||
tty.die("%s does not have extensions." % spec.short_spec)
|
tty.die("%s does not have extensions." % spec.short_spec)
|
||||||
|
|
||||||
|
|||||||
@@ -5,7 +5,6 @@
|
|||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import errno
|
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
@@ -94,21 +93,6 @@ def external_find(args):
|
|||||||
# It's fine to not find any manifest file if we are doing the
|
# It's fine to not find any manifest file if we are doing the
|
||||||
# search implicitly (i.e. as part of 'spack external find')
|
# search implicitly (i.e. as part of 'spack external find')
|
||||||
pass
|
pass
|
||||||
except Exception as e:
|
|
||||||
# For most exceptions, just print a warning and continue.
|
|
||||||
# Note that KeyboardInterrupt does not subclass Exception
|
|
||||||
# (so CTRL-C will terminate the program as expected).
|
|
||||||
skip_msg = ("Skipping manifest and continuing with other external "
|
|
||||||
"checks")
|
|
||||||
if ((isinstance(e, IOError) or isinstance(e, OSError)) and
|
|
||||||
e.errno in [errno.EPERM, errno.EACCES]):
|
|
||||||
# The manifest file does not have sufficient permissions enabled:
|
|
||||||
# print a warning and keep going
|
|
||||||
tty.warn("Unable to read manifest due to insufficient "
|
|
||||||
"permissions.", skip_msg)
|
|
||||||
else:
|
|
||||||
tty.warn("Unable to read manifest, unexpected error: {0}"
|
|
||||||
.format(str(e)), skip_msg)
|
|
||||||
|
|
||||||
# If the user didn't specify anything, search for build tools by default
|
# If the user didn't specify anything, search for build tools by default
|
||||||
if not args.tags and not args.all and not args.packages:
|
if not args.tags and not args.all and not args.packages:
|
||||||
@@ -119,37 +103,34 @@ def external_find(args):
|
|||||||
args.tags = []
|
args.tags = []
|
||||||
|
|
||||||
# Construct the list of possible packages to be detected
|
# Construct the list of possible packages to be detected
|
||||||
pkg_cls_to_check = []
|
packages_to_check = []
|
||||||
|
|
||||||
# Add the packages that have been required explicitly
|
# Add the packages that have been required explicitly
|
||||||
if args.packages:
|
if args.packages:
|
||||||
pkg_cls_to_check = [
|
packages_to_check = list(spack.repo.get(pkg) for pkg in args.packages)
|
||||||
spack.repo.path.get_pkg_class(pkg) for pkg in args.packages
|
|
||||||
]
|
|
||||||
if args.tags:
|
if args.tags:
|
||||||
allowed = set(spack.repo.path.packages_with_tags(*args.tags))
|
allowed = set(spack.repo.path.packages_with_tags(*args.tags))
|
||||||
pkg_cls_to_check = [x for x in pkg_cls_to_check if x.name in allowed]
|
packages_to_check = [x for x in packages_to_check if x in allowed]
|
||||||
|
|
||||||
if args.tags and not pkg_cls_to_check:
|
if args.tags and not packages_to_check:
|
||||||
# If we arrived here we didn't have any explicit package passed
|
# If we arrived here we didn't have any explicit package passed
|
||||||
# as argument, which means to search all packages.
|
# as argument, which means to search all packages.
|
||||||
# Since tags are cached it's much faster to construct what we need
|
# Since tags are cached it's much faster to construct what we need
|
||||||
# to search directly, rather than filtering after the fact
|
# to search directly, rather than filtering after the fact
|
||||||
pkg_cls_to_check = [
|
packages_to_check = [
|
||||||
spack.repo.path.get_pkg_class(pkg_name)
|
spack.repo.get(pkg) for tag in args.tags for pkg in
|
||||||
for tag in args.tags
|
spack.repo.path.packages_with_tags(tag)
|
||||||
for pkg_name in spack.repo.path.packages_with_tags(tag)
|
|
||||||
]
|
]
|
||||||
pkg_cls_to_check = list(set(pkg_cls_to_check))
|
packages_to_check = list(set(packages_to_check))
|
||||||
|
|
||||||
# If the list of packages is empty, search for every possible package
|
# If the list of packages is empty, search for every possible package
|
||||||
if not args.tags and not pkg_cls_to_check:
|
if not args.tags and not packages_to_check:
|
||||||
pkg_cls_to_check = list(spack.repo.path.all_package_classes())
|
packages_to_check = spack.repo.path.all_packages()
|
||||||
|
|
||||||
detected_packages = spack.detection.by_executable(
|
detected_packages = spack.detection.by_executable(
|
||||||
pkg_cls_to_check, path_hints=args.path)
|
packages_to_check, path_hints=args.path)
|
||||||
detected_packages.update(spack.detection.by_library(
|
detected_packages.update(spack.detection.by_library(
|
||||||
pkg_cls_to_check, path_hints=args.path))
|
packages_to_check, path_hints=args.path))
|
||||||
|
|
||||||
new_entries = spack.detection.update_configuration(
|
new_entries = spack.detection.update_configuration(
|
||||||
detected_packages, scope=args.scope, buildable=not args.not_buildable
|
detected_packages, scope=args.scope, buildable=not args.not_buildable
|
||||||
@@ -196,10 +177,7 @@ def _collect_and_consume_cray_manifest_files(
|
|||||||
|
|
||||||
for directory in manifest_dirs:
|
for directory in manifest_dirs:
|
||||||
for fname in os.listdir(directory):
|
for fname in os.listdir(directory):
|
||||||
if fname.endswith('.json'):
|
manifest_files.append(os.path.join(directory, fname))
|
||||||
fpath = os.path.join(directory, fname)
|
|
||||||
tty.debug("Adding manifest file: {0}".format(fpath))
|
|
||||||
manifest_files.append(os.path.join(directory, fpath))
|
|
||||||
|
|
||||||
if not manifest_files:
|
if not manifest_files:
|
||||||
raise NoManifestFileError(
|
raise NoManifestFileError(
|
||||||
@@ -207,7 +185,6 @@ def _collect_and_consume_cray_manifest_files(
|
|||||||
.format(cray_manifest.default_path))
|
.format(cray_manifest.default_path))
|
||||||
|
|
||||||
for path in manifest_files:
|
for path in manifest_files:
|
||||||
tty.debug("Reading manifest file: " + path)
|
|
||||||
try:
|
try:
|
||||||
cray_manifest.read(path, not dry_run)
|
cray_manifest.read(path, not dry_run)
|
||||||
except (spack.compilers.UnknownCompilerError, spack.error.SpackError) as e:
|
except (spack.compilers.UnknownCompilerError, spack.error.SpackError) as e:
|
||||||
@@ -220,7 +197,7 @@ def _collect_and_consume_cray_manifest_files(
|
|||||||
|
|
||||||
def external_list(args):
|
def external_list(args):
|
||||||
# Trigger a read of all packages, might take a long time.
|
# Trigger a read of all packages, might take a long time.
|
||||||
list(spack.repo.path.all_package_classes())
|
list(spack.repo.path.all_packages())
|
||||||
# Print all the detectable packages
|
# Print all the detectable packages
|
||||||
tty.msg("Detectable packages per repository")
|
tty.msg("Detectable packages per repository")
|
||||||
for namespace, pkgs in sorted(spack.package_base.detectable_packages.items()):
|
for namespace, pkgs in sorted(spack.package_base.detectable_packages.items()):
|
||||||
|
|||||||
@@ -292,9 +292,10 @@ def print_tests(pkg):
|
|||||||
v_specs = [spack.spec.Spec(v_name) for v_name in v_names]
|
v_specs = [spack.spec.Spec(v_name) for v_name in v_names]
|
||||||
for v_spec in v_specs:
|
for v_spec in v_specs:
|
||||||
try:
|
try:
|
||||||
pkg_cls = spack.repo.path.get_pkg_class(v_spec.name)
|
pkg = v_spec.package
|
||||||
|
pkg_cls = pkg if inspect.isclass(pkg) else pkg.__class__
|
||||||
if has_test_method(pkg_cls):
|
if has_test_method(pkg_cls):
|
||||||
names.append('{0}.test'.format(pkg_cls.name.lower()))
|
names.append('{0}.test'.format(pkg.name.lower()))
|
||||||
except spack.repo.UnknownPackageError:
|
except spack.repo.UnknownPackageError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@@ -385,9 +386,7 @@ def print_virtuals(pkg):
|
|||||||
|
|
||||||
|
|
||||||
def info(parser, args):
|
def info(parser, args):
|
||||||
spec = spack.spec.Spec(args.package)
|
pkg = spack.repo.get(args.package)
|
||||||
pkg_cls = spack.repo.path.get_pkg_class(spec.name)
|
|
||||||
pkg = pkg_cls(spec)
|
|
||||||
|
|
||||||
# Output core package information
|
# Output core package information
|
||||||
header = section_title(
|
header = section_title(
|
||||||
|
|||||||
@@ -17,6 +17,7 @@
|
|||||||
import spack.cmd.common.arguments as arguments
|
import spack.cmd.common.arguments as arguments
|
||||||
import spack.environment as ev
|
import spack.environment as ev
|
||||||
import spack.fetch_strategy
|
import spack.fetch_strategy
|
||||||
|
import spack.monitor
|
||||||
import spack.paths
|
import spack.paths
|
||||||
import spack.report
|
import spack.report
|
||||||
from spack.error import SpackError
|
from spack.error import SpackError
|
||||||
@@ -104,6 +105,8 @@ def setup_parser(subparser):
|
|||||||
'--cache-only', action='store_true', dest='cache_only', default=False,
|
'--cache-only', action='store_true', dest='cache_only', default=False,
|
||||||
help="only install package from binary mirrors")
|
help="only install package from binary mirrors")
|
||||||
|
|
||||||
|
monitor_group = spack.monitor.get_monitor_group(subparser) # noqa
|
||||||
|
|
||||||
subparser.add_argument(
|
subparser.add_argument(
|
||||||
'--include-build-deps', action='store_true', dest='include_build_deps',
|
'--include-build-deps', action='store_true', dest='include_build_deps',
|
||||||
default=False, help="""include build deps when installing from cache,
|
default=False, help="""include build deps when installing from cache,
|
||||||
@@ -289,6 +292,15 @@ def install(parser, args, **kwargs):
|
|||||||
parser.print_help()
|
parser.print_help()
|
||||||
return
|
return
|
||||||
|
|
||||||
|
# The user wants to monitor builds using github.com/spack/spack-monitor
|
||||||
|
if args.use_monitor:
|
||||||
|
monitor = spack.monitor.get_client(
|
||||||
|
host=args.monitor_host,
|
||||||
|
prefix=args.monitor_prefix,
|
||||||
|
tags=args.monitor_tags,
|
||||||
|
save_local=args.monitor_save_local,
|
||||||
|
)
|
||||||
|
|
||||||
reporter = spack.report.collect_info(
|
reporter = spack.report.collect_info(
|
||||||
spack.package_base.PackageInstaller, '_install_task', args.log_format, args)
|
spack.package_base.PackageInstaller, '_install_task', args.log_format, args)
|
||||||
if args.log_file:
|
if args.log_file:
|
||||||
@@ -329,6 +341,10 @@ def get_tests(specs):
|
|||||||
reporter.filename = default_log_file(specs[0])
|
reporter.filename = default_log_file(specs[0])
|
||||||
reporter.specs = specs
|
reporter.specs = specs
|
||||||
|
|
||||||
|
# Tell the monitor about the specs
|
||||||
|
if args.use_monitor and specs:
|
||||||
|
monitor.new_configuration(specs)
|
||||||
|
|
||||||
tty.msg("Installing environment {0}".format(env.name))
|
tty.msg("Installing environment {0}".format(env.name))
|
||||||
with reporter('build'):
|
with reporter('build'):
|
||||||
env.install_all(**kwargs)
|
env.install_all(**kwargs)
|
||||||
@@ -374,6 +390,10 @@ def get_tests(specs):
|
|||||||
except SpackError as e:
|
except SpackError as e:
|
||||||
tty.debug(e)
|
tty.debug(e)
|
||||||
reporter.concretization_report(e.message)
|
reporter.concretization_report(e.message)
|
||||||
|
|
||||||
|
# Tell spack monitor about it
|
||||||
|
if args.use_monitor and abstract_specs:
|
||||||
|
monitor.failed_concretization(abstract_specs)
|
||||||
raise
|
raise
|
||||||
|
|
||||||
# 2. Concrete specs from yaml files
|
# 2. Concrete specs from yaml files
|
||||||
@@ -434,4 +454,17 @@ def get_tests(specs):
|
|||||||
|
|
||||||
# overwrite all concrete explicit specs from this build
|
# overwrite all concrete explicit specs from this build
|
||||||
kwargs['overwrite'] = [spec.dag_hash() for spec in specs]
|
kwargs['overwrite'] = [spec.dag_hash() for spec in specs]
|
||||||
|
|
||||||
|
# Update install_args with the monitor args, needed for build task
|
||||||
|
kwargs.update({
|
||||||
|
"monitor_keep_going": args.monitor_keep_going,
|
||||||
|
"monitor_host": args.monitor_host,
|
||||||
|
"use_monitor": args.use_monitor,
|
||||||
|
"monitor_prefix": args.monitor_prefix,
|
||||||
|
})
|
||||||
|
|
||||||
|
# If we are using the monitor, we send configs. and create build
|
||||||
|
# The dag_hash is the main package id
|
||||||
|
if args.use_monitor and specs:
|
||||||
|
monitor.new_configuration(specs)
|
||||||
install_specs(args, kwargs, zip(abstract_specs, specs))
|
install_specs(args, kwargs, zip(abstract_specs, specs))
|
||||||
|
|||||||
@@ -84,9 +84,9 @@ def match(p, f):
|
|||||||
if f.match(p):
|
if f.match(p):
|
||||||
return True
|
return True
|
||||||
|
|
||||||
pkg_cls = spack.repo.path.get_pkg_class(p)
|
pkg = spack.repo.get(p)
|
||||||
if pkg_cls.__doc__:
|
if pkg.__doc__:
|
||||||
return f.match(pkg_cls.__doc__)
|
return f.match(pkg.__doc__)
|
||||||
return False
|
return False
|
||||||
else:
|
else:
|
||||||
def match(p, f):
|
def match(p, f):
|
||||||
@@ -133,7 +133,7 @@ def get_dependencies(pkg):
|
|||||||
@formatter
|
@formatter
|
||||||
def version_json(pkg_names, out):
|
def version_json(pkg_names, out):
|
||||||
"""Print all packages with their latest versions."""
|
"""Print all packages with their latest versions."""
|
||||||
pkg_classes = [spack.repo.path.get_pkg_class(name) for name in pkg_names]
|
pkgs = [spack.repo.get(name) for name in pkg_names]
|
||||||
|
|
||||||
out.write('[\n')
|
out.write('[\n')
|
||||||
|
|
||||||
@@ -147,14 +147,14 @@ def version_json(pkg_names, out):
|
|||||||
' "maintainers": {5},\n'
|
' "maintainers": {5},\n'
|
||||||
' "dependencies": {6}'
|
' "dependencies": {6}'
|
||||||
'}}'.format(
|
'}}'.format(
|
||||||
pkg_cls.name,
|
pkg.name,
|
||||||
VersionList(pkg_cls.versions).preferred(),
|
VersionList(pkg.versions).preferred(),
|
||||||
json.dumps([str(v) for v in reversed(sorted(pkg_cls.versions))]),
|
json.dumps([str(v) for v in reversed(sorted(pkg.versions))]),
|
||||||
pkg_cls.homepage,
|
pkg.homepage,
|
||||||
github_url(pkg_cls),
|
github_url(pkg),
|
||||||
json.dumps(pkg_cls.maintainers),
|
json.dumps(pkg.maintainers),
|
||||||
json.dumps(get_dependencies(pkg_cls))
|
json.dumps(get_dependencies(pkg))
|
||||||
) for pkg_cls in pkg_classes
|
) for pkg in pkgs
|
||||||
])
|
])
|
||||||
out.write(pkg_latest)
|
out.write(pkg_latest)
|
||||||
# important: no trailing comma in JSON arrays
|
# important: no trailing comma in JSON arrays
|
||||||
@@ -172,7 +172,7 @@ def html(pkg_names, out):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
# Read in all packages
|
# Read in all packages
|
||||||
pkg_classes = [spack.repo.path.get_pkg_class(name) for name in pkg_names]
|
pkgs = [spack.repo.get(name) for name in pkg_names]
|
||||||
|
|
||||||
# Start at 2 because the title of the page from Sphinx is id1.
|
# Start at 2 because the title of the page from Sphinx is id1.
|
||||||
span_id = 2
|
span_id = 2
|
||||||
@@ -189,7 +189,7 @@ def head(n, span_id, title, anchor=None):
|
|||||||
# Start with the number of packages, skipping the title and intro
|
# Start with the number of packages, skipping the title and intro
|
||||||
# blurb, which we maintain in the RST file.
|
# blurb, which we maintain in the RST file.
|
||||||
out.write('<p>\n')
|
out.write('<p>\n')
|
||||||
out.write('Spack currently has %d mainline packages:\n' % len(pkg_classes))
|
out.write('Spack currently has %d mainline packages:\n' % len(pkgs))
|
||||||
out.write('</p>\n')
|
out.write('</p>\n')
|
||||||
|
|
||||||
# Table of links to all packages
|
# Table of links to all packages
|
||||||
@@ -209,9 +209,9 @@ def head(n, span_id, title, anchor=None):
|
|||||||
out.write('<hr class="docutils"/>\n')
|
out.write('<hr class="docutils"/>\n')
|
||||||
|
|
||||||
# Output some text for each package.
|
# Output some text for each package.
|
||||||
for pkg_cls in pkg_classes:
|
for pkg in pkgs:
|
||||||
out.write('<div class="section" id="%s">\n' % pkg_cls.name)
|
out.write('<div class="section" id="%s">\n' % pkg.name)
|
||||||
head(2, span_id, pkg_cls.name)
|
head(2, span_id, pkg.name)
|
||||||
span_id += 1
|
span_id += 1
|
||||||
|
|
||||||
out.write('<dl class="docutils">\n')
|
out.write('<dl class="docutils">\n')
|
||||||
@@ -219,10 +219,10 @@ def head(n, span_id, title, anchor=None):
|
|||||||
out.write('<dt>Homepage:</dt>\n')
|
out.write('<dt>Homepage:</dt>\n')
|
||||||
out.write('<dd><ul class="first last simple">\n')
|
out.write('<dd><ul class="first last simple">\n')
|
||||||
|
|
||||||
if pkg_cls.homepage:
|
if pkg.homepage:
|
||||||
out.write(('<li>'
|
out.write(('<li>'
|
||||||
'<a class="reference external" href="%s">%s</a>'
|
'<a class="reference external" href="%s">%s</a>'
|
||||||
'</li>\n') % (pkg_cls.homepage, escape(pkg_cls.homepage, True)))
|
'</li>\n') % (pkg.homepage, escape(pkg.homepage, True)))
|
||||||
else:
|
else:
|
||||||
out.write('No homepage\n')
|
out.write('No homepage\n')
|
||||||
out.write('</ul></dd>\n')
|
out.write('</ul></dd>\n')
|
||||||
@@ -231,19 +231,19 @@ def head(n, span_id, title, anchor=None):
|
|||||||
out.write('<dd><ul class="first last simple">\n')
|
out.write('<dd><ul class="first last simple">\n')
|
||||||
out.write(('<li>'
|
out.write(('<li>'
|
||||||
'<a class="reference external" href="%s">%s/package.py</a>'
|
'<a class="reference external" href="%s">%s/package.py</a>'
|
||||||
'</li>\n') % (github_url(pkg_cls), pkg_cls.name))
|
'</li>\n') % (github_url(pkg), pkg.name))
|
||||||
out.write('</ul></dd>\n')
|
out.write('</ul></dd>\n')
|
||||||
|
|
||||||
if pkg_cls.versions:
|
if pkg.versions:
|
||||||
out.write('<dt>Versions:</dt>\n')
|
out.write('<dt>Versions:</dt>\n')
|
||||||
out.write('<dd>\n')
|
out.write('<dd>\n')
|
||||||
out.write(', '.join(
|
out.write(', '.join(
|
||||||
str(v) for v in reversed(sorted(pkg_cls.versions))))
|
str(v) for v in reversed(sorted(pkg.versions))))
|
||||||
out.write('\n')
|
out.write('\n')
|
||||||
out.write('</dd>\n')
|
out.write('</dd>\n')
|
||||||
|
|
||||||
for deptype in spack.dependency.all_deptypes:
|
for deptype in spack.dependency.all_deptypes:
|
||||||
deps = pkg_cls.dependencies_of_type(deptype)
|
deps = pkg.dependencies_of_type(deptype)
|
||||||
if deps:
|
if deps:
|
||||||
out.write('<dt>%s Dependencies:</dt>\n' % deptype.capitalize())
|
out.write('<dt>%s Dependencies:</dt>\n' % deptype.capitalize())
|
||||||
out.write('<dd>\n')
|
out.write('<dd>\n')
|
||||||
@@ -256,7 +256,7 @@ def head(n, span_id, title, anchor=None):
|
|||||||
|
|
||||||
out.write('<dt>Description:</dt>\n')
|
out.write('<dt>Description:</dt>\n')
|
||||||
out.write('<dd>\n')
|
out.write('<dd>\n')
|
||||||
out.write(escape(pkg_cls.format_doc(indent=2), True))
|
out.write(escape(pkg.format_doc(indent=2), True))
|
||||||
out.write('\n')
|
out.write('\n')
|
||||||
out.write('</dd>\n')
|
out.write('</dd>\n')
|
||||||
out.write('</dl>\n')
|
out.write('</dl>\n')
|
||||||
|
|||||||
@@ -221,7 +221,7 @@ def _read_specs_from_file(filename):
|
|||||||
for i, string in enumerate(stream):
|
for i, string in enumerate(stream):
|
||||||
try:
|
try:
|
||||||
s = Spec(string)
|
s = Spec(string)
|
||||||
spack.repo.path.get_pkg_class(s.name)
|
s.package
|
||||||
specs.append(s)
|
specs.append(s)
|
||||||
except SpackError as e:
|
except SpackError as e:
|
||||||
tty.debug(e)
|
tty.debug(e)
|
||||||
|
|||||||
@@ -131,7 +131,7 @@ def check_module_set_name(name):
|
|||||||
|
|
||||||
_missing_modules_warning = (
|
_missing_modules_warning = (
|
||||||
"Modules have been omitted for one or more specs, either"
|
"Modules have been omitted for one or more specs, either"
|
||||||
" because they were excluded or because the spec is"
|
" because they were blacklisted or because the spec is"
|
||||||
" associated with a package that is installed upstream and"
|
" associated with a package that is installed upstream and"
|
||||||
" that installation has not generated a module file. Rerun"
|
" that installation has not generated a module file. Rerun"
|
||||||
" this command with debug output enabled for more details.")
|
" this command with debug output enabled for more details.")
|
||||||
@@ -180,7 +180,7 @@ def loads(module_type, specs, args, out=None):
|
|||||||
for spec, mod in modules:
|
for spec, mod in modules:
|
||||||
if not mod:
|
if not mod:
|
||||||
module_output_for_spec = (
|
module_output_for_spec = (
|
||||||
'## excluded or missing from upstream: {0}'.format(
|
'## blacklisted or missing from upstream: {0}'.format(
|
||||||
spec.format()))
|
spec.format()))
|
||||||
else:
|
else:
|
||||||
d['exclude'] = '## ' if spec.name in exclude_set else ''
|
d['exclude'] = '## ' if spec.name in exclude_set else ''
|
||||||
@@ -293,8 +293,8 @@ def refresh(module_type, specs, args):
|
|||||||
cls(spec, args.module_set_name) for spec in specs
|
cls(spec, args.module_set_name) for spec in specs
|
||||||
if spack.repo.path.exists(spec.name)]
|
if spack.repo.path.exists(spec.name)]
|
||||||
|
|
||||||
# Filter excluded packages early
|
# Filter blacklisted packages early
|
||||||
writers = [x for x in writers if not x.conf.excluded]
|
writers = [x for x in writers if not x.conf.blacklisted]
|
||||||
|
|
||||||
# Detect name clashes in module files
|
# Detect name clashes in module files
|
||||||
file2writer = collections.defaultdict(list)
|
file2writer = collections.defaultdict(list)
|
||||||
|
|||||||
33
lib/spack/spack/cmd/monitor.py
Normal file
33
lib/spack/spack/cmd/monitor.py
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||||
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
|
import spack.monitor
|
||||||
|
|
||||||
|
description = "interact with a monitor server"
|
||||||
|
section = "analysis"
|
||||||
|
level = "long"
|
||||||
|
|
||||||
|
|
||||||
|
def setup_parser(subparser):
|
||||||
|
sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='monitor_command')
|
||||||
|
|
||||||
|
# This adds the monitor group to the subparser
|
||||||
|
spack.monitor.get_monitor_group(subparser)
|
||||||
|
|
||||||
|
# Spack Monitor Uploads
|
||||||
|
monitor_parser = sp.add_parser('upload', description="upload to spack monitor")
|
||||||
|
monitor_parser.add_argument("upload_dir", help="directory root to upload")
|
||||||
|
|
||||||
|
|
||||||
|
def monitor(parser, args, **kwargs):
|
||||||
|
|
||||||
|
if args.monitor_command == "upload":
|
||||||
|
monitor = spack.monitor.get_client(
|
||||||
|
host=args.monitor_host,
|
||||||
|
prefix=args.monitor_prefix,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Upload the directory
|
||||||
|
monitor.upload_local_save(args.upload_dir)
|
||||||
@@ -31,4 +31,5 @@ def patch(parser, args):
|
|||||||
|
|
||||||
specs = spack.cmd.parse_specs(args.specs, concretize=True)
|
specs = spack.cmd.parse_specs(args.specs, concretize=True)
|
||||||
for spec in specs:
|
for spec in specs:
|
||||||
spec.package.do_patch()
|
package = spack.repo.get(spec)
|
||||||
|
package.do_patch()
|
||||||
|
|||||||
@@ -50,7 +50,7 @@ def _show_patch(sha256):
|
|||||||
owner = rec['owner']
|
owner = rec['owner']
|
||||||
|
|
||||||
if 'relative_path' in rec:
|
if 'relative_path' in rec:
|
||||||
pkg_dir = spack.repo.path.get_pkg_class(owner).package_dir
|
pkg_dir = spack.repo.get(owner).package_dir
|
||||||
path = os.path.join(pkg_dir, rec['relative_path'])
|
path = os.path.join(pkg_dir, rec['relative_path'])
|
||||||
print(" path: %s" % path)
|
print(" path: %s" % path)
|
||||||
else:
|
else:
|
||||||
|
|||||||
@@ -24,4 +24,5 @@ def restage(parser, args):
|
|||||||
|
|
||||||
specs = spack.cmd.parse_specs(args.specs, concretize=True)
|
specs = spack.cmd.parse_specs(args.specs, concretize=True)
|
||||||
for spec in specs:
|
for spec in specs:
|
||||||
spec.package.do_restage()
|
package = spack.repo.get(spec)
|
||||||
|
package.do_restage()
|
||||||
|
|||||||
@@ -80,8 +80,7 @@ def spec(parser, args):
|
|||||||
# Use command line specified specs, otherwise try to use environment specs.
|
# Use command line specified specs, otherwise try to use environment specs.
|
||||||
if args.specs:
|
if args.specs:
|
||||||
input_specs = spack.cmd.parse_specs(args.specs)
|
input_specs = spack.cmd.parse_specs(args.specs)
|
||||||
concretized_specs = spack.cmd.parse_specs(args.specs, concretize=True)
|
specs = [(s, s.concretized()) for s in input_specs]
|
||||||
specs = list(zip(input_specs, concretized_specs))
|
|
||||||
else:
|
else:
|
||||||
env = ev.active_environment()
|
env = ev.active_environment()
|
||||||
if env:
|
if env:
|
||||||
|
|||||||
@@ -24,7 +24,6 @@ def setup_parser(subparser):
|
|||||||
subparser.add_argument(
|
subparser.add_argument(
|
||||||
'-p', '--path', dest='path',
|
'-p', '--path', dest='path',
|
||||||
help="path to stage package, does not add to spack tree")
|
help="path to stage package, does not add to spack tree")
|
||||||
arguments.add_concretizer_args(subparser)
|
|
||||||
|
|
||||||
|
|
||||||
def stage(parser, args):
|
def stage(parser, args):
|
||||||
@@ -59,7 +58,8 @@ def stage(parser, args):
|
|||||||
|
|
||||||
for spec in specs:
|
for spec in specs:
|
||||||
spec = spack.cmd.matching_spec_from_env(spec)
|
spec = spack.cmd.matching_spec_from_env(spec)
|
||||||
|
package = spack.repo.get(spec)
|
||||||
if custom_path:
|
if custom_path:
|
||||||
spec.package.path = custom_path
|
package.path = custom_path
|
||||||
spec.package.do_stage()
|
package.do_stage()
|
||||||
tty.msg("Staged {0} in {1}".format(spec.package.name, spec.package.stage.path))
|
tty.msg("Staged {0} in {1}".format(package.name, package.stage.path))
|
||||||
|
|||||||
@@ -94,16 +94,16 @@ def changed_files(base="develop", untracked=True, all_files=False, root=None):
|
|||||||
git = which("git", required=True)
|
git = which("git", required=True)
|
||||||
|
|
||||||
# ensure base is in the repo
|
# ensure base is in the repo
|
||||||
base_sha = git("rev-parse", "--quiet", "--verify", "--revs-only", base,
|
git("show-ref", "--verify", "--quiet", "refs/heads/%s" % base,
|
||||||
fail_on_error=False, output=str)
|
fail_on_error=False)
|
||||||
if git.returncode != 0:
|
if git.returncode != 0:
|
||||||
tty.die(
|
tty.die(
|
||||||
"This repository does not have a '%s' revision." % base,
|
"This repository does not have a '%s' branch." % base,
|
||||||
"spack style needs this branch to determine which files changed.",
|
"spack style needs this branch to determine which files changed.",
|
||||||
"Ensure that '%s' exists, or specify files to check explicitly." % base
|
"Ensure that '%s' exists, or specify files to check explicitly." % base
|
||||||
)
|
)
|
||||||
|
|
||||||
range = "{0}...".format(base_sha.strip())
|
range = "{0}...".format(base)
|
||||||
|
|
||||||
git_args = [
|
git_args = [
|
||||||
# Add changed files committed since branching off of develop
|
# Add changed files committed since branching off of develop
|
||||||
|
|||||||
@@ -14,7 +14,6 @@
|
|||||||
|
|
||||||
import spack.fetch_strategy as fs
|
import spack.fetch_strategy as fs
|
||||||
import spack.repo
|
import spack.repo
|
||||||
import spack.spec
|
|
||||||
import spack.util.crypto as crypto
|
import spack.util.crypto as crypto
|
||||||
from spack.url import (
|
from spack.url import (
|
||||||
UndetectableNameError,
|
UndetectableNameError,
|
||||||
@@ -148,13 +147,13 @@ def url_list(args):
|
|||||||
urls = set()
|
urls = set()
|
||||||
|
|
||||||
# Gather set of URLs from all packages
|
# Gather set of URLs from all packages
|
||||||
for pkg_cls in spack.repo.path.all_package_classes():
|
for pkg in spack.repo.path.all_packages():
|
||||||
url = getattr(pkg_cls, 'url', None)
|
url = getattr(pkg, 'url', None)
|
||||||
urls = url_list_parsing(args, urls, url, pkg_cls)
|
urls = url_list_parsing(args, urls, url, pkg)
|
||||||
|
|
||||||
for params in pkg_cls.versions.values():
|
for params in pkg.versions.values():
|
||||||
url = params.get('url', None)
|
url = params.get('url', None)
|
||||||
urls = url_list_parsing(args, urls, url, pkg_cls)
|
urls = url_list_parsing(args, urls, url, pkg)
|
||||||
|
|
||||||
# Print URLs
|
# Print URLs
|
||||||
for url in sorted(urls):
|
for url in sorted(urls):
|
||||||
@@ -185,9 +184,8 @@ def url_summary(args):
|
|||||||
tty.msg('Generating a summary of URL parsing in Spack...')
|
tty.msg('Generating a summary of URL parsing in Spack...')
|
||||||
|
|
||||||
# Loop through all packages
|
# Loop through all packages
|
||||||
for pkg_cls in spack.repo.path.all_package_classes():
|
for pkg in spack.repo.path.all_packages():
|
||||||
urls = set()
|
urls = set()
|
||||||
pkg = pkg_cls(spack.spec.Spec(pkg_cls.name))
|
|
||||||
|
|
||||||
url = getattr(pkg, 'url', None)
|
url = getattr(pkg, 'url', None)
|
||||||
if url:
|
if url:
|
||||||
@@ -320,20 +318,19 @@ def add(self, pkg_name, fetcher):
|
|||||||
version_stats = UrlStats()
|
version_stats = UrlStats()
|
||||||
resource_stats = UrlStats()
|
resource_stats = UrlStats()
|
||||||
|
|
||||||
for pkg_cls in spack.repo.path.all_package_classes():
|
for pkg in spack.repo.path.all_packages():
|
||||||
npkgs += 1
|
npkgs += 1
|
||||||
|
|
||||||
for v in pkg_cls.versions:
|
for v in pkg.versions:
|
||||||
try:
|
try:
|
||||||
pkg = pkg_cls(spack.spec.Spec(pkg_cls.name))
|
|
||||||
fetcher = fs.for_package_version(pkg, v)
|
fetcher = fs.for_package_version(pkg, v)
|
||||||
except (fs.InvalidArgsError, fs.FetcherConflict):
|
except (fs.InvalidArgsError, fs.FetcherConflict):
|
||||||
continue
|
continue
|
||||||
version_stats.add(pkg_cls.name, fetcher)
|
version_stats.add(pkg.name, fetcher)
|
||||||
|
|
||||||
for _, resources in pkg_cls.resources.items():
|
for _, resources in pkg.resources.items():
|
||||||
for resource in resources:
|
for resource in resources:
|
||||||
resource_stats.add(pkg_cls.name, resource.fetcher)
|
resource_stats.add(pkg.name, resource.fetcher)
|
||||||
|
|
||||||
# print a nice summary table
|
# print a nice summary table
|
||||||
tty.msg("URL stats for %d packages:" % npkgs)
|
tty.msg("URL stats for %d packages:" % npkgs)
|
||||||
@@ -393,8 +390,8 @@ def print_stat(indent, name, stat_name=None):
|
|||||||
tty.msg("Found %d issues." % total_issues)
|
tty.msg("Found %d issues." % total_issues)
|
||||||
for issue_type, pkgs in issues.items():
|
for issue_type, pkgs in issues.items():
|
||||||
tty.msg("Package URLs with %s" % issue_type)
|
tty.msg("Package URLs with %s" % issue_type)
|
||||||
for pkg_cls, pkg_issues in pkgs.items():
|
for pkg, pkg_issues in pkgs.items():
|
||||||
color.cprint(" @*C{%s}" % pkg_cls)
|
color.cprint(" @*C{%s}" % pkg)
|
||||||
for issue in pkg_issues:
|
for issue in pkg_issues:
|
||||||
print(" %s" % issue)
|
print(" %s" % issue)
|
||||||
|
|
||||||
|
|||||||
@@ -12,7 +12,6 @@
|
|||||||
|
|
||||||
import spack.cmd.common.arguments as arguments
|
import spack.cmd.common.arguments as arguments
|
||||||
import spack.repo
|
import spack.repo
|
||||||
import spack.spec
|
|
||||||
from spack.version import infinity_versions, ver
|
from spack.version import infinity_versions, ver
|
||||||
|
|
||||||
description = "list available versions of a package"
|
description = "list available versions of a package"
|
||||||
@@ -40,9 +39,7 @@ def setup_parser(subparser):
|
|||||||
|
|
||||||
|
|
||||||
def versions(parser, args):
|
def versions(parser, args):
|
||||||
spec = spack.spec.Spec(args.package)
|
pkg = spack.repo.get(args.package)
|
||||||
pkg_cls = spack.repo.path.get_pkg_class(spec.name)
|
|
||||||
pkg = pkg_cls(spec)
|
|
||||||
|
|
||||||
safe_versions = pkg.versions
|
safe_versions = pkg.versions
|
||||||
|
|
||||||
|
|||||||
@@ -252,13 +252,6 @@ def find_new_compilers(path_hints=None, scope=None):
|
|||||||
merged configuration.
|
merged configuration.
|
||||||
"""
|
"""
|
||||||
compilers = find_compilers(path_hints)
|
compilers = find_compilers(path_hints)
|
||||||
return select_new_compilers(compilers, scope)
|
|
||||||
|
|
||||||
|
|
||||||
def select_new_compilers(compilers, scope=None):
|
|
||||||
"""Given a list of compilers, remove those that are already defined in
|
|
||||||
the configuration.
|
|
||||||
"""
|
|
||||||
compilers_not_in_config = []
|
compilers_not_in_config = []
|
||||||
for c in compilers:
|
for c in compilers:
|
||||||
arch_spec = spack.spec.ArchSpec((None, c.operating_system, c.target))
|
arch_spec = spack.spec.ArchSpec((None, c.operating_system, c.target))
|
||||||
|
|||||||
@@ -81,14 +81,6 @@ def cxx11_flag(self):
|
|||||||
def cxx14_flag(self):
|
def cxx14_flag(self):
|
||||||
return "-std=c++14"
|
return "-std=c++14"
|
||||||
|
|
||||||
@property
|
|
||||||
def cxx17_flag(self):
|
|
||||||
return "-std=c++17"
|
|
||||||
|
|
||||||
@property
|
|
||||||
def cxx20_flag(self):
|
|
||||||
return "-std=c++20"
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def c99_flag(self):
|
def c99_flag(self):
|
||||||
return "-std=c99"
|
return "-std=c99"
|
||||||
|
|||||||
@@ -110,7 +110,7 @@
|
|||||||
|
|
||||||
#: metavar to use for commands that accept scopes
|
#: metavar to use for commands that accept scopes
|
||||||
#: this is shorter and more readable than listing all choices
|
#: this is shorter and more readable than listing all choices
|
||||||
scopes_metavar = '{defaults,system,site,user}[/PLATFORM] or env:ENVIRONMENT'
|
scopes_metavar = '{defaults,system,site,user}[/PLATFORM]'
|
||||||
|
|
||||||
#: Base name for the (internal) overrides scope.
|
#: Base name for the (internal) overrides scope.
|
||||||
overrides_base_name = 'overrides-'
|
overrides_base_name = 'overrides-'
|
||||||
|
|||||||
@@ -171,15 +171,34 @@ def strip(self):
|
|||||||
def paths(self):
|
def paths(self):
|
||||||
"""Important paths in the image"""
|
"""Important paths in the image"""
|
||||||
Paths = collections.namedtuple('Paths', [
|
Paths = collections.namedtuple('Paths', [
|
||||||
'environment', 'store', 'hidden_view', 'view'
|
'environment', 'store', 'view'
|
||||||
])
|
])
|
||||||
return Paths(
|
return Paths(
|
||||||
environment='/opt/spack-environment',
|
environment='/opt/spack-environment',
|
||||||
store='/opt/software',
|
store='/opt/software',
|
||||||
hidden_view='/opt/._view',
|
|
||||||
view='/opt/view'
|
view='/opt/view'
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@tengine.context_property
|
||||||
|
def monitor(self):
|
||||||
|
"""Enable using spack monitor during build."""
|
||||||
|
Monitor = collections.namedtuple('Monitor', [
|
||||||
|
'enabled', 'host', 'prefix', 'keep_going', 'tags'
|
||||||
|
])
|
||||||
|
monitor = self.config.get("monitor")
|
||||||
|
|
||||||
|
# If we don't have a monitor group, cut out early.
|
||||||
|
if not monitor:
|
||||||
|
return Monitor(False, None, None, None, None)
|
||||||
|
|
||||||
|
return Monitor(
|
||||||
|
enabled=True,
|
||||||
|
host=monitor.get('host'),
|
||||||
|
prefix=monitor.get('prefix'),
|
||||||
|
keep_going=monitor.get("keep_going"),
|
||||||
|
tags=monitor.get('tags')
|
||||||
|
)
|
||||||
|
|
||||||
@tengine.context_property
|
@tengine.context_property
|
||||||
def manifest(self):
|
def manifest(self):
|
||||||
"""The spack.yaml file that should be used in the image"""
|
"""The spack.yaml file that should be used in the image"""
|
||||||
@@ -188,6 +207,8 @@ def manifest(self):
|
|||||||
# Copy in the part of spack.yaml prescribed in the configuration file
|
# Copy in the part of spack.yaml prescribed in the configuration file
|
||||||
manifest = copy.deepcopy(self.config)
|
manifest = copy.deepcopy(self.config)
|
||||||
manifest.pop('container')
|
manifest.pop('container')
|
||||||
|
if "monitor" in manifest:
|
||||||
|
manifest.pop("monitor")
|
||||||
|
|
||||||
# Ensure that a few paths are where they need to be
|
# Ensure that a few paths are where they need to be
|
||||||
manifest.setdefault('config', syaml.syaml_dict())
|
manifest.setdefault('config', syaml.syaml_dict())
|
||||||
|
|||||||
@@ -20,7 +20,6 @@
|
|||||||
|
|
||||||
compiler_name_translation = {
|
compiler_name_translation = {
|
||||||
'nvidia': 'nvhpc',
|
'nvidia': 'nvhpc',
|
||||||
'rocm': 'rocmcc',
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@@ -39,6 +38,10 @@ def translated_compiler_name(manifest_compiler_name):
|
|||||||
elif manifest_compiler_name in spack.compilers.supported_compilers():
|
elif manifest_compiler_name in spack.compilers.supported_compilers():
|
||||||
return manifest_compiler_name
|
return manifest_compiler_name
|
||||||
else:
|
else:
|
||||||
|
# Try to fail quickly. This can occur in two cases: (1) the compiler
|
||||||
|
# definition (2) a spec can specify a compiler that doesn't exist; the
|
||||||
|
# first will be caught when creating compiler definition. The second
|
||||||
|
# will result in Specs with associated undefined compilers.
|
||||||
raise spack.compilers.UnknownCompilerError(
|
raise spack.compilers.UnknownCompilerError(
|
||||||
"Manifest parsing - unknown compiler: {0}"
|
"Manifest parsing - unknown compiler: {0}"
|
||||||
.format(manifest_compiler_name))
|
.format(manifest_compiler_name))
|
||||||
@@ -86,13 +89,13 @@ def spec_from_entry(entry):
|
|||||||
arch=arch_str
|
arch=arch_str
|
||||||
)
|
)
|
||||||
|
|
||||||
pkg_cls = spack.repo.path.get_pkg_class(entry['name'])
|
package = spack.repo.get(entry['name'])
|
||||||
|
|
||||||
if 'parameters' in entry:
|
if 'parameters' in entry:
|
||||||
variant_strs = list()
|
variant_strs = list()
|
||||||
for name, value in entry['parameters'].items():
|
for name, value in entry['parameters'].items():
|
||||||
# TODO: also ensure that the variant value is valid?
|
# TODO: also ensure that the variant value is valid?
|
||||||
if not (name in pkg_cls.variants):
|
if not (name in package.variants):
|
||||||
tty.debug("Omitting variant {0} for entry {1}/{2}"
|
tty.debug("Omitting variant {0} for entry {1}/{2}"
|
||||||
.format(name, entry['name'], entry['hash'][:7]))
|
.format(name, entry['name'], entry['hash'][:7]))
|
||||||
continue
|
continue
|
||||||
@@ -182,8 +185,6 @@ def read(path, apply_updates):
|
|||||||
tty.debug("{0}: {1} compilers read from manifest".format(
|
tty.debug("{0}: {1} compilers read from manifest".format(
|
||||||
path,
|
path,
|
||||||
str(len(compilers))))
|
str(len(compilers))))
|
||||||
# Filter out the compilers that already appear in the configuration
|
|
||||||
compilers = spack.compilers.select_new_compilers(compilers)
|
|
||||||
if apply_updates and compilers:
|
if apply_updates and compilers:
|
||||||
spack.compilers.add_compilers_to_config(
|
spack.compilers.add_compilers_to_config(
|
||||||
compilers, init_config=False)
|
compilers, init_config=False)
|
||||||
|
|||||||
@@ -220,7 +220,7 @@ def by_executable(packages_to_check, path_hints=None):
|
|||||||
searching by path.
|
searching by path.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
packages_to_check (list): list of package classes to be detected
|
packages_to_check (list): list of packages to be detected
|
||||||
path_hints (list): list of paths to be searched. If None the list will be
|
path_hints (list): list of paths to be searched. If None the list will be
|
||||||
constructed based on the PATH environment variable.
|
constructed based on the PATH environment variable.
|
||||||
"""
|
"""
|
||||||
@@ -228,7 +228,7 @@ def by_executable(packages_to_check, path_hints=None):
|
|||||||
exe_pattern_to_pkgs = collections.defaultdict(list)
|
exe_pattern_to_pkgs = collections.defaultdict(list)
|
||||||
for pkg in packages_to_check:
|
for pkg in packages_to_check:
|
||||||
if hasattr(pkg, 'executables'):
|
if hasattr(pkg, 'executables'):
|
||||||
for exe in pkg.platform_executables():
|
for exe in pkg.platform_executables:
|
||||||
exe_pattern_to_pkgs[exe].append(pkg)
|
exe_pattern_to_pkgs[exe].append(pkg)
|
||||||
# Add Windows specific, package related paths to the search paths
|
# Add Windows specific, package related paths to the search paths
|
||||||
path_hints.extend(compute_windows_program_path_for_package(pkg))
|
path_hints.extend(compute_windows_program_path_for_package(pkg))
|
||||||
|
|||||||
@@ -46,7 +46,7 @@ class OpenMpi(Package):
|
|||||||
from spack.dependency import Dependency, canonical_deptype, default_deptype
|
from spack.dependency import Dependency, canonical_deptype, default_deptype
|
||||||
from spack.fetch_strategy import from_kwargs
|
from spack.fetch_strategy import from_kwargs
|
||||||
from spack.resource import Resource
|
from spack.resource import Resource
|
||||||
from spack.version import GitVersion, Version, VersionChecksumError, VersionLookupError
|
from spack.version import Version, VersionChecksumError
|
||||||
|
|
||||||
__all__ = ['DirectiveError', 'DirectiveMeta', 'version', 'conflicts', 'depends_on',
|
__all__ = ['DirectiveError', 'DirectiveMeta', 'version', 'conflicts', 'depends_on',
|
||||||
'extends', 'provides', 'patch', 'variant', 'resource']
|
'extends', 'provides', 'patch', 'variant', 'resource']
|
||||||
@@ -330,17 +330,7 @@ def _execute_version(pkg):
|
|||||||
kwargs['checksum'] = checksum
|
kwargs['checksum'] = checksum
|
||||||
|
|
||||||
# Store kwargs for the package to later with a fetch_strategy.
|
# Store kwargs for the package to later with a fetch_strategy.
|
||||||
version = Version(ver)
|
pkg.versions[Version(ver)] = kwargs
|
||||||
if isinstance(version, GitVersion):
|
|
||||||
if not hasattr(pkg, 'git') and 'git' not in kwargs:
|
|
||||||
msg = "Spack version directives cannot include git hashes fetched from"
|
|
||||||
msg += " URLs. Error in package '%s'\n" % pkg.name
|
|
||||||
msg += " version('%s', " % version.string
|
|
||||||
msg += ', '.join("%s='%s'" % (argname, value)
|
|
||||||
for argname, value in kwargs.items())
|
|
||||||
msg += ")"
|
|
||||||
raise VersionLookupError(msg)
|
|
||||||
pkg.versions[version] = kwargs
|
|
||||||
return _execute_version
|
return _execute_version
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -1113,13 +1113,8 @@ def develop(self, spec, path, clone=False):
|
|||||||
# "steal" the source code via staging API
|
# "steal" the source code via staging API
|
||||||
abspath = os.path.normpath(os.path.join(self.path, path))
|
abspath = os.path.normpath(os.path.join(self.path, path))
|
||||||
|
|
||||||
# Stage, at the moment, requires a concrete Spec, since it needs the
|
stage = spec.package.stage
|
||||||
# dag_hash for the stage dir name. Below though we ask for a stage
|
stage.steal_source(abspath)
|
||||||
# to be created, to copy it afterwards somewhere else. It would be
|
|
||||||
# better if we can create the `source_path` directly into its final
|
|
||||||
# destination.
|
|
||||||
pkg_cls = spack.repo.path.get_pkg_class(spec.name)
|
|
||||||
pkg_cls(spec).stage.steal_source(abspath)
|
|
||||||
|
|
||||||
# If it wasn't already in the list, append it
|
# If it wasn't already in the list, append it
|
||||||
self.dev_specs[spec.name] = {'path': path, 'spec': str(spec)}
|
self.dev_specs[spec.name] = {'path': path, 'spec': str(spec)}
|
||||||
@@ -1617,10 +1612,9 @@ def install_specs(self, specs=None, **install_args):
|
|||||||
# ensure specs already installed are marked explicit
|
# ensure specs already installed are marked explicit
|
||||||
all_specs = specs or [cs for _, cs in self.concretized_specs()]
|
all_specs = specs or [cs for _, cs in self.concretized_specs()]
|
||||||
specs_installed = [s for s in all_specs if s.installed]
|
specs_installed = [s for s in all_specs if s.installed]
|
||||||
if specs_installed:
|
with spack.store.db.write_transaction(): # do all in one transaction
|
||||||
with spack.store.db.write_transaction(): # do all in one transaction
|
for spec in specs_installed:
|
||||||
for spec in specs_installed:
|
spack.store.db.update_explicit(spec, True)
|
||||||
spack.store.db.update_explicit(spec, True)
|
|
||||||
|
|
||||||
if not specs_to_install:
|
if not specs_to_install:
|
||||||
tty.msg('All of the packages are already installed')
|
tty.msg('All of the packages are already installed')
|
||||||
|
|||||||
@@ -35,7 +35,6 @@
|
|||||||
import six.moves.urllib.parse as urllib_parse
|
import six.moves.urllib.parse as urllib_parse
|
||||||
|
|
||||||
import llnl.util
|
import llnl.util
|
||||||
import llnl.util.filesystem as fs
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.util.filesystem import (
|
from llnl.util.filesystem import (
|
||||||
get_single_file,
|
get_single_file,
|
||||||
@@ -120,11 +119,6 @@ def __init__(self, **kwargs):
|
|||||||
# 'no_cache' option from version directive.
|
# 'no_cache' option from version directive.
|
||||||
self.cache_enabled = not kwargs.pop('no_cache', False)
|
self.cache_enabled = not kwargs.pop('no_cache', False)
|
||||||
|
|
||||||
self.package = None
|
|
||||||
|
|
||||||
def set_package(self, package):
|
|
||||||
self.package = package
|
|
||||||
|
|
||||||
# Subclasses need to implement these methods
|
# Subclasses need to implement these methods
|
||||||
def fetch(self):
|
def fetch(self):
|
||||||
"""Fetch source code archive or repo.
|
"""Fetch source code archive or repo.
|
||||||
@@ -248,10 +242,6 @@ def source_id(self):
|
|||||||
if all(component_ids):
|
if all(component_ids):
|
||||||
return component_ids
|
return component_ids
|
||||||
|
|
||||||
def set_package(self, package):
|
|
||||||
for item in self:
|
|
||||||
item.package = package
|
|
||||||
|
|
||||||
|
|
||||||
@fetcher
|
@fetcher
|
||||||
class URLFetchStrategy(FetchStrategy):
|
class URLFetchStrategy(FetchStrategy):
|
||||||
@@ -337,7 +327,9 @@ def fetch(self):
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
try:
|
try:
|
||||||
self._fetch_from_url(url)
|
partial_file, save_file = self._fetch_from_url(url)
|
||||||
|
if save_file and (partial_file is not None):
|
||||||
|
llnl.util.filesystem.rename(partial_file, save_file)
|
||||||
break
|
break
|
||||||
except FailedDownloadError as e:
|
except FailedDownloadError as e:
|
||||||
errors.append(str(e))
|
errors.append(str(e))
|
||||||
@@ -387,7 +379,9 @@ def _check_headers(self, headers):
|
|||||||
|
|
||||||
@_needs_stage
|
@_needs_stage
|
||||||
def _fetch_urllib(self, url):
|
def _fetch_urllib(self, url):
|
||||||
save_file = self.stage.save_filename
|
save_file = None
|
||||||
|
if self.stage.save_filename:
|
||||||
|
save_file = self.stage.save_filename
|
||||||
tty.msg('Fetching {0}'.format(url))
|
tty.msg('Fetching {0}'.format(url))
|
||||||
|
|
||||||
# Run urllib but grab the mime type from the http headers
|
# Run urllib but grab the mime type from the http headers
|
||||||
@@ -397,18 +391,16 @@ def _fetch_urllib(self, url):
|
|||||||
# clean up archive on failure.
|
# clean up archive on failure.
|
||||||
if self.archive_file:
|
if self.archive_file:
|
||||||
os.remove(self.archive_file)
|
os.remove(self.archive_file)
|
||||||
if os.path.lexists(save_file):
|
if save_file and os.path.exists(save_file):
|
||||||
os.remove(save_file)
|
os.remove(save_file)
|
||||||
msg = 'urllib failed to fetch with error {0}'.format(e)
|
msg = 'urllib failed to fetch with error {0}'.format(e)
|
||||||
raise FailedDownloadError(url, msg)
|
raise FailedDownloadError(url, msg)
|
||||||
|
|
||||||
if os.path.lexists(save_file):
|
|
||||||
os.remove(save_file)
|
|
||||||
|
|
||||||
with open(save_file, 'wb') as _open_file:
|
with open(save_file, 'wb') as _open_file:
|
||||||
shutil.copyfileobj(response, _open_file)
|
shutil.copyfileobj(response, _open_file)
|
||||||
|
|
||||||
self._check_headers(str(headers))
|
self._check_headers(str(headers))
|
||||||
|
return None, save_file
|
||||||
|
|
||||||
@_needs_stage
|
@_needs_stage
|
||||||
def _fetch_curl(self, url):
|
def _fetch_curl(self, url):
|
||||||
@@ -469,7 +461,7 @@ def _fetch_curl(self, url):
|
|||||||
if self.archive_file:
|
if self.archive_file:
|
||||||
os.remove(self.archive_file)
|
os.remove(self.archive_file)
|
||||||
|
|
||||||
if partial_file and os.path.lexists(partial_file):
|
if partial_file and os.path.exists(partial_file):
|
||||||
os.remove(partial_file)
|
os.remove(partial_file)
|
||||||
|
|
||||||
if curl.returncode == 22:
|
if curl.returncode == 22:
|
||||||
@@ -496,9 +488,7 @@ def _fetch_curl(self, url):
|
|||||||
"Curl failed with error %d" % curl.returncode)
|
"Curl failed with error %d" % curl.returncode)
|
||||||
|
|
||||||
self._check_headers(headers)
|
self._check_headers(headers)
|
||||||
|
return partial_file, save_file
|
||||||
if save_file and (partial_file is not None):
|
|
||||||
fs.rename(partial_file, save_file)
|
|
||||||
|
|
||||||
@property # type: ignore # decorated properties unsupported in mypy
|
@property # type: ignore # decorated properties unsupported in mypy
|
||||||
@_needs_stage
|
@_needs_stage
|
||||||
@@ -530,7 +520,7 @@ def expand(self):
|
|||||||
"Failed on expand() for URL %s" % self.url)
|
"Failed on expand() for URL %s" % self.url)
|
||||||
|
|
||||||
if not self.extension:
|
if not self.extension:
|
||||||
self.extension = extension(self.url)
|
self.extension = extension(self.archive_file)
|
||||||
|
|
||||||
if self.stage.expanded:
|
if self.stage.expanded:
|
||||||
tty.debug('Source already staged to %s' % self.stage.source_path)
|
tty.debug('Source already staged to %s' % self.stage.source_path)
|
||||||
@@ -538,11 +528,50 @@ def expand(self):
|
|||||||
|
|
||||||
decompress = decompressor_for(self.archive_file, self.extension)
|
decompress = decompressor_for(self.archive_file, self.extension)
|
||||||
|
|
||||||
|
# Expand all tarballs in their own directory to contain
|
||||||
|
# exploding tarballs.
|
||||||
|
tarball_container = os.path.join(self.stage.path,
|
||||||
|
"spack-expanded-archive")
|
||||||
|
|
||||||
# Below we assume that the command to decompress expand the
|
# Below we assume that the command to decompress expand the
|
||||||
# archive in the current working directory
|
# archive in the current working directory
|
||||||
with fs.exploding_archive_catch(self.stage):
|
mkdirp(tarball_container)
|
||||||
|
with working_dir(tarball_container):
|
||||||
decompress(self.archive_file)
|
decompress(self.archive_file)
|
||||||
|
|
||||||
|
# Check for an exploding tarball, i.e. one that doesn't expand to
|
||||||
|
# a single directory. If the tarball *didn't* explode, move its
|
||||||
|
# contents to the staging source directory & remove the container
|
||||||
|
# directory. If the tarball did explode, just rename the tarball
|
||||||
|
# directory to the staging source directory.
|
||||||
|
#
|
||||||
|
# NOTE: The tar program on Mac OS X will encode HFS metadata in
|
||||||
|
# hidden files, which can end up *alongside* a single top-level
|
||||||
|
# directory. We initially ignore presence of hidden files to
|
||||||
|
# accomodate these "semi-exploding" tarballs but ensure the files
|
||||||
|
# are copied to the source directory.
|
||||||
|
files = os.listdir(tarball_container)
|
||||||
|
non_hidden = [f for f in files if not f.startswith('.')]
|
||||||
|
if len(non_hidden) == 1:
|
||||||
|
src = os.path.join(tarball_container, non_hidden[0])
|
||||||
|
if os.path.isdir(src):
|
||||||
|
self.stage.srcdir = non_hidden[0]
|
||||||
|
shutil.move(src, self.stage.source_path)
|
||||||
|
if len(files) > 1:
|
||||||
|
files.remove(non_hidden[0])
|
||||||
|
for f in files:
|
||||||
|
src = os.path.join(tarball_container, f)
|
||||||
|
dest = os.path.join(self.stage.path, f)
|
||||||
|
shutil.move(src, dest)
|
||||||
|
os.rmdir(tarball_container)
|
||||||
|
else:
|
||||||
|
# This is a non-directory entry (e.g., a patch file) so simply
|
||||||
|
# rename the tarball container to be the source path.
|
||||||
|
shutil.move(tarball_container, self.stage.source_path)
|
||||||
|
|
||||||
|
else:
|
||||||
|
shutil.move(tarball_container, self.stage.source_path)
|
||||||
|
|
||||||
def archive(self, destination):
|
def archive(self, destination):
|
||||||
"""Just moves this archive to the destination."""
|
"""Just moves this archive to the destination."""
|
||||||
if not self.archive_file:
|
if not self.archive_file:
|
||||||
@@ -613,7 +642,7 @@ def fetch(self):
|
|||||||
|
|
||||||
# remove old symlink if one is there.
|
# remove old symlink if one is there.
|
||||||
filename = self.stage.save_filename
|
filename = self.stage.save_filename
|
||||||
if os.path.lexists(filename):
|
if os.path.exists(filename):
|
||||||
os.remove(filename)
|
os.remove(filename)
|
||||||
|
|
||||||
# Symlink to local cached archive.
|
# Symlink to local cached archive.
|
||||||
@@ -985,20 +1014,9 @@ def clone(self, dest=None, commit=None, branch=None, tag=None, bare=False):
|
|||||||
git(*args)
|
git(*args)
|
||||||
|
|
||||||
# Init submodules if the user asked for them.
|
# Init submodules if the user asked for them.
|
||||||
git_commands = []
|
if self.submodules:
|
||||||
submodules = self.submodules
|
with working_dir(dest):
|
||||||
if callable(submodules):
|
args = ['submodule', 'update', '--init', '--recursive']
|
||||||
submodules = list(submodules(self.package))
|
|
||||||
git_commands.append(["submodule", "init", "--"] + submodules)
|
|
||||||
git_commands.append(['submodule', 'update', '--recursive'])
|
|
||||||
elif submodules:
|
|
||||||
git_commands.append(["submodule", "update", "--init", "--recursive"])
|
|
||||||
|
|
||||||
if not git_commands:
|
|
||||||
return
|
|
||||||
|
|
||||||
with working_dir(dest):
|
|
||||||
for args in git_commands:
|
|
||||||
if not spack.config.get('config:debug'):
|
if not spack.config.get('config:debug'):
|
||||||
args.insert(1, '--quiet')
|
args.insert(1, '--quiet')
|
||||||
git(*args)
|
git(*args)
|
||||||
@@ -1575,30 +1593,16 @@ def for_package_version(pkg, version):
|
|||||||
|
|
||||||
check_pkg_attributes(pkg)
|
check_pkg_attributes(pkg)
|
||||||
|
|
||||||
if not isinstance(version, spack.version.VersionBase):
|
if not isinstance(version, spack.version.Version):
|
||||||
version = spack.version.Version(version)
|
version = spack.version.Version(version)
|
||||||
|
|
||||||
# if it's a commit, we must use a GitFetchStrategy
|
# if it's a commit, we must use a GitFetchStrategy
|
||||||
if isinstance(version, spack.version.GitVersion):
|
if version.is_commit and hasattr(pkg, "git"):
|
||||||
if not hasattr(pkg, "git"):
|
|
||||||
raise FetchError(
|
|
||||||
"Cannot fetch git version for %s. Package has no 'git' attribute" %
|
|
||||||
pkg.name
|
|
||||||
)
|
|
||||||
# Populate the version with comparisons to other commits
|
# Populate the version with comparisons to other commits
|
||||||
version.generate_git_lookup(pkg.name)
|
version.generate_commit_lookup(pkg.name)
|
||||||
|
|
||||||
# For GitVersion, we have no way to determine whether a ref is a branch or tag
|
|
||||||
# Fortunately, we handle branches and tags identically, except tags are
|
|
||||||
# handled slightly more conservatively for older versions of git.
|
|
||||||
# We call all non-commit refs tags in this context, at the cost of a slight
|
|
||||||
# performance hit for branches on older versions of git.
|
|
||||||
# Branches cannot be cached, so we tell the fetcher not to cache tags/branches
|
|
||||||
ref_type = 'commit' if version.is_commit else 'tag'
|
|
||||||
kwargs = {
|
kwargs = {
|
||||||
'git': pkg.git,
|
'git': pkg.git,
|
||||||
ref_type: version.ref,
|
'commit': str(version)
|
||||||
'no_cache': True,
|
|
||||||
}
|
}
|
||||||
kwargs['submodules'] = getattr(pkg, 'submodules', False)
|
kwargs['submodules'] = getattr(pkg, 'submodules', False)
|
||||||
fetcher = GitFetchStrategy(**kwargs)
|
fetcher = GitFetchStrategy(**kwargs)
|
||||||
|
|||||||
@@ -95,10 +95,7 @@ def view_copy(src, dst, view, spec=None):
|
|||||||
view.get_projection_for_spec(dep)
|
view.get_projection_for_spec(dep)
|
||||||
|
|
||||||
if spack.relocate.is_binary(dst):
|
if spack.relocate.is_binary(dst):
|
||||||
spack.relocate.relocate_text_bin(
|
spack.relocate.relocate_text_bin([dst], prefix_to_projection)
|
||||||
binaries=[dst],
|
|
||||||
prefixes=prefix_to_projection
|
|
||||||
)
|
|
||||||
else:
|
else:
|
||||||
prefix_to_projection[spack.store.layout.root] = view._root
|
prefix_to_projection[spack.store.layout.root] = view._root
|
||||||
prefix_to_projection[orig_sbang] = new_sbang
|
prefix_to_projection[orig_sbang] = new_sbang
|
||||||
|
|||||||
@@ -535,10 +535,9 @@ def graph_dot(specs, deptype='all', static=False, out=None):
|
|||||||
deptype = spack.dependency.canonical_deptype(deptype)
|
deptype = spack.dependency.canonical_deptype(deptype)
|
||||||
|
|
||||||
def static_graph(spec, deptype):
|
def static_graph(spec, deptype):
|
||||||
pkg_cls = spack.repo.path.get_pkg_class(spec.name)
|
pkg = spec.package
|
||||||
possible = pkg_cls.possible_dependencies(
|
possible = pkg.possible_dependencies(
|
||||||
expand_virtuals=True, deptype=deptype
|
expand_virtuals=True, deptype=deptype)
|
||||||
)
|
|
||||||
|
|
||||||
nodes = set() # elements are (node name, node label)
|
nodes = set() # elements are (node name, node label)
|
||||||
edges = set() # elements are (src key, dest key)
|
edges = set() # elements are (src key, dest key)
|
||||||
|
|||||||
@@ -2,10 +2,10 @@
|
|||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
"""Definitions that control how Spack creates Spec hashes."""
|
"""Definitions that control how Spack creates Spec hashes."""
|
||||||
|
|
||||||
import spack.dependency as dp
|
import spack.dependency as dp
|
||||||
import spack.repo
|
|
||||||
|
|
||||||
hashes = []
|
hashes = []
|
||||||
|
|
||||||
@@ -51,16 +51,10 @@ def __call__(self, spec):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def _content_hash_override(spec):
|
|
||||||
pkg_cls = spack.repo.path.get_pkg_class(spec.name)
|
|
||||||
pkg = pkg_cls(spec)
|
|
||||||
return pkg.content_hash()
|
|
||||||
|
|
||||||
|
|
||||||
#: Package hash used as part of dag hash
|
#: Package hash used as part of dag hash
|
||||||
package_hash = SpecHashDescriptor(
|
package_hash = SpecHashDescriptor(
|
||||||
deptype=(), package_hash=True, name='package_hash',
|
deptype=(), package_hash=True, name='package_hash',
|
||||||
override=_content_hash_override)
|
override=lambda s: s.package.content_hash())
|
||||||
|
|
||||||
|
|
||||||
# Deprecated hash types, no longer used, but needed to understand old serialized
|
# Deprecated hash types, no longer used, but needed to understand old serialized
|
||||||
|
|||||||
@@ -21,6 +21,7 @@
|
|||||||
* on_phase_success(pkg, phase_name, log_file)
|
* on_phase_success(pkg, phase_name, log_file)
|
||||||
* on_phase_error(pkg, phase_name, log_file)
|
* on_phase_error(pkg, phase_name, log_file)
|
||||||
* on_phase_error(pkg, phase_name, log_file)
|
* on_phase_error(pkg, phase_name, log_file)
|
||||||
|
* on_analyzer_save(pkg, result)
|
||||||
* post_env_write(env)
|
* post_env_write(env)
|
||||||
|
|
||||||
This can be used to implement support for things like module
|
This can be used to implement support for things like module
|
||||||
@@ -91,5 +92,8 @@ def __call__(self, *args, **kwargs):
|
|||||||
on_install_failure = _HookRunner('on_install_failure')
|
on_install_failure = _HookRunner('on_install_failure')
|
||||||
on_install_cancel = _HookRunner('on_install_cancel')
|
on_install_cancel = _HookRunner('on_install_cancel')
|
||||||
|
|
||||||
|
# Analyzer hooks
|
||||||
|
on_analyzer_save = _HookRunner('on_analyzer_save')
|
||||||
|
|
||||||
# Environment hooks
|
# Environment hooks
|
||||||
post_env_write = _HookRunner('post_env_write')
|
post_env_write = _HookRunner('post_env_write')
|
||||||
|
|||||||
85
lib/spack/spack/hooks/monitor.py
Normal file
85
lib/spack/spack/hooks/monitor.py
Normal file
@@ -0,0 +1,85 @@
|
|||||||
|
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||||
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
|
import llnl.util.tty as tty
|
||||||
|
|
||||||
|
import spack.monitor
|
||||||
|
|
||||||
|
|
||||||
|
def on_install_start(spec):
|
||||||
|
"""On start of an install, we want to ping the server if it exists
|
||||||
|
"""
|
||||||
|
if not spack.monitor.cli:
|
||||||
|
return
|
||||||
|
|
||||||
|
tty.debug("Running on_install_start for %s" % spec)
|
||||||
|
build_id = spack.monitor.cli.new_build(spec)
|
||||||
|
tty.verbose("Build created with id %s" % build_id)
|
||||||
|
|
||||||
|
|
||||||
|
def on_install_success(spec):
|
||||||
|
"""On the success of an install (after everything is complete)
|
||||||
|
"""
|
||||||
|
if not spack.monitor.cli:
|
||||||
|
return
|
||||||
|
|
||||||
|
tty.debug("Running on_install_success for %s" % spec)
|
||||||
|
result = spack.monitor.cli.update_build(spec, status="SUCCESS")
|
||||||
|
tty.verbose(result.get('message'))
|
||||||
|
|
||||||
|
|
||||||
|
def on_install_failure(spec):
|
||||||
|
"""Triggered on failure of an install
|
||||||
|
"""
|
||||||
|
if not spack.monitor.cli:
|
||||||
|
return
|
||||||
|
|
||||||
|
tty.debug("Running on_install_failure for %s" % spec)
|
||||||
|
result = spack.monitor.cli.fail_task(spec)
|
||||||
|
tty.verbose(result.get('message'))
|
||||||
|
|
||||||
|
|
||||||
|
def on_install_cancel(spec):
|
||||||
|
"""Triggered on cancel of an install
|
||||||
|
"""
|
||||||
|
if not spack.monitor.cli:
|
||||||
|
return
|
||||||
|
|
||||||
|
tty.debug("Running on_install_cancel for %s" % spec)
|
||||||
|
result = spack.monitor.cli.cancel_task(spec)
|
||||||
|
tty.verbose(result.get('message'))
|
||||||
|
|
||||||
|
|
||||||
|
def on_phase_success(pkg, phase_name, log_file):
|
||||||
|
"""Triggered on a phase success
|
||||||
|
"""
|
||||||
|
if not spack.monitor.cli:
|
||||||
|
return
|
||||||
|
|
||||||
|
tty.debug("Running on_phase_success %s, phase %s" % (pkg.name, phase_name))
|
||||||
|
result = spack.monitor.cli.send_phase(pkg, phase_name, log_file, "SUCCESS")
|
||||||
|
tty.verbose(result.get('message'))
|
||||||
|
|
||||||
|
|
||||||
|
def on_phase_error(pkg, phase_name, log_file):
|
||||||
|
"""Triggered on a phase error
|
||||||
|
"""
|
||||||
|
if not spack.monitor.cli:
|
||||||
|
return
|
||||||
|
|
||||||
|
tty.debug("Running on_phase_error %s, phase %s" % (pkg.name, phase_name))
|
||||||
|
result = spack.monitor.cli.send_phase(pkg, phase_name, log_file, "ERROR")
|
||||||
|
tty.verbose(result.get('message'))
|
||||||
|
|
||||||
|
|
||||||
|
def on_analyzer_save(pkg, result):
|
||||||
|
"""given a package and a result, if we have a spack monitor, upload
|
||||||
|
the result to it.
|
||||||
|
"""
|
||||||
|
if not spack.monitor.cli:
|
||||||
|
return
|
||||||
|
|
||||||
|
# This hook runs after a save result
|
||||||
|
spack.monitor.cli.send_analyze_metadata(pkg, result)
|
||||||
@@ -49,6 +49,7 @@
|
|||||||
import spack.compilers
|
import spack.compilers
|
||||||
import spack.error
|
import spack.error
|
||||||
import spack.hooks
|
import spack.hooks
|
||||||
|
import spack.monitor
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
import spack.package_prefs as prefs
|
import spack.package_prefs as prefs
|
||||||
import spack.repo
|
import spack.repo
|
||||||
@@ -232,7 +233,6 @@ def _packages_needed_to_bootstrap_compiler(compiler, architecture, pkgs):
|
|||||||
)
|
)
|
||||||
packages = [(s.package, False) for
|
packages = [(s.package, False) for
|
||||||
s in dep.traverse(order='post', root=False)]
|
s in dep.traverse(order='post', root=False)]
|
||||||
|
|
||||||
packages.append((dep.package, True))
|
packages.append((dep.package, True))
|
||||||
return packages
|
return packages
|
||||||
|
|
||||||
@@ -2212,8 +2212,7 @@ def flag_installed(self, installed):
|
|||||||
@property
|
@property
|
||||||
def explicit(self):
|
def explicit(self):
|
||||||
"""The package was explicitly requested by the user."""
|
"""The package was explicitly requested by the user."""
|
||||||
return self.pkg == self.request.pkg and \
|
return self.pkg == self.request.pkg
|
||||||
self.request.install_args.get('explicit', True)
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def key(self):
|
def key(self):
|
||||||
|
|||||||
@@ -391,8 +391,7 @@ def mirror_archive_paths(fetcher, per_package_ref, spec=None):
|
|||||||
storage path of the resource associated with the specified ``fetcher``."""
|
storage path of the resource associated with the specified ``fetcher``."""
|
||||||
ext = None
|
ext = None
|
||||||
if spec:
|
if spec:
|
||||||
pkg_cls = spack.repo.path.get_pkg_class(spec.name)
|
versions = spec.package.versions.get(spec.package.version, {})
|
||||||
versions = pkg_cls.versions.get(spec.version, {})
|
|
||||||
ext = versions.get('extension', None)
|
ext = versions.get('extension', None)
|
||||||
# If the spec does not explicitly specify an extension (the default case),
|
# If the spec does not explicitly specify an extension (the default case),
|
||||||
# then try to determine it automatically. An extension can only be
|
# then try to determine it automatically. An extension can only be
|
||||||
|
|||||||
@@ -54,34 +54,6 @@
|
|||||||
import spack.util.spack_yaml as syaml
|
import spack.util.spack_yaml as syaml
|
||||||
|
|
||||||
|
|
||||||
def get_deprecated(dictionary, name, old_name, default):
|
|
||||||
"""Get a deprecated property from a ``dict``.
|
|
||||||
|
|
||||||
Arguments:
|
|
||||||
dictionary (dict): dictionary to get a value from.
|
|
||||||
name (str): New name for the property. If present, supersedes ``old_name``.
|
|
||||||
old_name (str): Deprecated name for the property. If present, a warning
|
|
||||||
is printed.
|
|
||||||
default (object): value to return if neither name is found.
|
|
||||||
"""
|
|
||||||
value = default
|
|
||||||
|
|
||||||
# always warn if old name is present
|
|
||||||
if old_name in dictionary:
|
|
||||||
value = dictionary.get(old_name, value)
|
|
||||||
main_msg = "`{}:` is deprecated in module config and will be removed in v0.20."
|
|
||||||
details = (
|
|
||||||
"Use `{}:` instead. You can run `spack config update` to translate your "
|
|
||||||
"configuration files automatically."
|
|
||||||
)
|
|
||||||
tty.warn(main_msg.format(old_name), details.format(name))
|
|
||||||
|
|
||||||
# name overrides old name if present
|
|
||||||
value = dictionary.get(name, value)
|
|
||||||
|
|
||||||
return value
|
|
||||||
|
|
||||||
|
|
||||||
#: config section for this file
|
#: config section for this file
|
||||||
def configuration(module_set_name):
|
def configuration(module_set_name):
|
||||||
config_path = 'modules:%s' % module_set_name
|
config_path = 'modules:%s' % module_set_name
|
||||||
@@ -379,14 +351,14 @@ def get_module(
|
|||||||
|
|
||||||
Retrieve the module file for the given spec if it is available. If the
|
Retrieve the module file for the given spec if it is available. If the
|
||||||
module is not available, this will raise an exception unless the module
|
module is not available, this will raise an exception unless the module
|
||||||
is excluded or if the spec is installed upstream.
|
is blacklisted or if the spec is installed upstream.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
module_type: the type of module we want to retrieve (e.g. lmod)
|
module_type: the type of module we want to retrieve (e.g. lmod)
|
||||||
spec: refers to the installed package that we want to retrieve a module
|
spec: refers to the installed package that we want to retrieve a module
|
||||||
for
|
for
|
||||||
required: if the module is required but excluded, this function will
|
required: if the module is required but blacklisted, this function will
|
||||||
print a debug message. If a module is missing but not excluded,
|
print a debug message. If a module is missing but not blacklisted,
|
||||||
then an exception is raised (regardless of whether it is required)
|
then an exception is raised (regardless of whether it is required)
|
||||||
get_full_path: if ``True``, this returns the full path to the module.
|
get_full_path: if ``True``, this returns the full path to the module.
|
||||||
Otherwise, this returns the module name.
|
Otherwise, this returns the module name.
|
||||||
@@ -414,13 +386,13 @@ def get_module(
|
|||||||
else:
|
else:
|
||||||
writer = spack.modules.module_types[module_type](spec, module_set_name)
|
writer = spack.modules.module_types[module_type](spec, module_set_name)
|
||||||
if not os.path.isfile(writer.layout.filename):
|
if not os.path.isfile(writer.layout.filename):
|
||||||
if not writer.conf.excluded:
|
if not writer.conf.blacklisted:
|
||||||
err_msg = "No module available for package {0} at {1}".format(
|
err_msg = "No module available for package {0} at {1}".format(
|
||||||
spec, writer.layout.filename
|
spec, writer.layout.filename
|
||||||
)
|
)
|
||||||
raise ModuleNotFoundError(err_msg)
|
raise ModuleNotFoundError(err_msg)
|
||||||
elif required:
|
elif required:
|
||||||
tty.debug("The module configuration has excluded {0}: "
|
tty.debug("The module configuration has blacklisted {0}: "
|
||||||
"omitting it".format(spec))
|
"omitting it".format(spec))
|
||||||
else:
|
else:
|
||||||
return None
|
return None
|
||||||
@@ -511,30 +483,26 @@ def hash(self):
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def excluded(self):
|
def blacklisted(self):
|
||||||
"""Returns True if the module has been excluded, False otherwise."""
|
"""Returns True if the module has been blacklisted,
|
||||||
|
False otherwise.
|
||||||
|
"""
|
||||||
# A few variables for convenience of writing the method
|
# A few variables for convenience of writing the method
|
||||||
spec = self.spec
|
spec = self.spec
|
||||||
conf = self.module.configuration(self.name)
|
conf = self.module.configuration(self.name)
|
||||||
|
|
||||||
# Compute the list of include rules that match
|
# Compute the list of whitelist rules that match
|
||||||
# DEPRECATED: remove 'whitelist' in v0.20
|
wlrules = conf.get('whitelist', [])
|
||||||
include_rules = get_deprecated(conf, "include", "whitelist", [])
|
whitelist_matches = [x for x in wlrules if spec.satisfies(x)]
|
||||||
include_matches = [x for x in include_rules if spec.satisfies(x)]
|
|
||||||
|
|
||||||
# Compute the list of exclude rules that match
|
# Compute the list of blacklist rules that match
|
||||||
# DEPRECATED: remove 'blacklist' in v0.20
|
blrules = conf.get('blacklist', [])
|
||||||
exclude_rules = get_deprecated(conf, "exclude", "blacklist", [])
|
blacklist_matches = [x for x in blrules if spec.satisfies(x)]
|
||||||
exclude_matches = [x for x in exclude_rules if spec.satisfies(x)]
|
|
||||||
|
|
||||||
# Should I exclude the module because it's implicit?
|
# Should I blacklist the module because it's implicit?
|
||||||
# DEPRECATED: remove 'blacklist_implicits' in v0.20
|
blacklist_implicits = conf.get('blacklist_implicits')
|
||||||
exclude_implicits = get_deprecated(
|
|
||||||
conf, "exclude_implicits", "blacklist_implicits", None
|
|
||||||
)
|
|
||||||
installed_implicitly = not spec._installed_explicitly()
|
installed_implicitly = not spec._installed_explicitly()
|
||||||
excluded_as_implicit = exclude_implicits and installed_implicitly
|
blacklisted_as_implicit = blacklist_implicits and installed_implicitly
|
||||||
|
|
||||||
def debug_info(line_header, match_list):
|
def debug_info(line_header, match_list):
|
||||||
if match_list:
|
if match_list:
|
||||||
@@ -543,15 +511,15 @@ def debug_info(line_header, match_list):
|
|||||||
for rule in match_list:
|
for rule in match_list:
|
||||||
tty.debug('\t\tmatches rule: {0}'.format(rule))
|
tty.debug('\t\tmatches rule: {0}'.format(rule))
|
||||||
|
|
||||||
debug_info('INCLUDE', include_matches)
|
debug_info('WHITELIST', whitelist_matches)
|
||||||
debug_info('EXCLUDE', exclude_matches)
|
debug_info('BLACKLIST', blacklist_matches)
|
||||||
|
|
||||||
if excluded_as_implicit:
|
if blacklisted_as_implicit:
|
||||||
msg = '\tEXCLUDED_AS_IMPLICIT : {0}'.format(spec.cshort_spec)
|
msg = '\tBLACKLISTED_AS_IMPLICIT : {0}'.format(spec.cshort_spec)
|
||||||
tty.debug(msg)
|
tty.debug(msg)
|
||||||
|
|
||||||
is_excluded = exclude_matches or excluded_as_implicit
|
is_blacklisted = blacklist_matches or blacklisted_as_implicit
|
||||||
if not include_matches and is_excluded:
|
if not whitelist_matches and is_blacklisted:
|
||||||
return True
|
return True
|
||||||
|
|
||||||
return False
|
return False
|
||||||
@@ -576,22 +544,17 @@ def specs_to_prereq(self):
|
|||||||
return self._create_list_for('prerequisites')
|
return self._create_list_for('prerequisites')
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def exclude_env_vars(self):
|
def environment_blacklist(self):
|
||||||
"""List of variables that should be left unmodified."""
|
"""List of variables that should be left unmodified."""
|
||||||
filter = self.conf.get('filter', {})
|
return self.conf.get('filter', {}).get('environment_blacklist', {})
|
||||||
|
|
||||||
# DEPRECATED: remove in v0.20
|
|
||||||
return get_deprecated(
|
|
||||||
filter, "exclude_env_vars", "environment_blacklist", {}
|
|
||||||
)
|
|
||||||
|
|
||||||
def _create_list_for(self, what):
|
def _create_list_for(self, what):
|
||||||
include = []
|
whitelist = []
|
||||||
for item in self.conf[what]:
|
for item in self.conf[what]:
|
||||||
conf = type(self)(item, self.name)
|
conf = type(self)(item, self.name)
|
||||||
if not conf.excluded:
|
if not conf.blacklisted:
|
||||||
include.append(item)
|
whitelist.append(item)
|
||||||
return include
|
return whitelist
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def verbose(self):
|
def verbose(self):
|
||||||
@@ -770,8 +733,8 @@ def environment_modifications(self):
|
|||||||
# Modifications required from modules.yaml
|
# Modifications required from modules.yaml
|
||||||
env.extend(self.conf.env)
|
env.extend(self.conf.env)
|
||||||
|
|
||||||
# List of variables that are excluded in modules.yaml
|
# List of variables that are blacklisted in modules.yaml
|
||||||
exclude = self.conf.exclude_env_vars
|
blacklist = self.conf.environment_blacklist
|
||||||
|
|
||||||
# We may have tokens to substitute in environment commands
|
# We may have tokens to substitute in environment commands
|
||||||
|
|
||||||
@@ -795,7 +758,7 @@ def environment_modifications(self):
|
|||||||
pass
|
pass
|
||||||
x.name = str(x.name).replace('-', '_')
|
x.name = str(x.name).replace('-', '_')
|
||||||
|
|
||||||
return [(type(x).__name__, x) for x in env if x.name not in exclude]
|
return [(type(x).__name__, x) for x in env if x.name not in blacklist]
|
||||||
|
|
||||||
@tengine.context_property
|
@tengine.context_property
|
||||||
def autoload(self):
|
def autoload(self):
|
||||||
@@ -868,9 +831,9 @@ def write(self, overwrite=False):
|
|||||||
existing file. If False the operation is skipped an we print
|
existing file. If False the operation is skipped an we print
|
||||||
a warning to the user.
|
a warning to the user.
|
||||||
"""
|
"""
|
||||||
# Return immediately if the module is excluded
|
# Return immediately if the module is blacklisted
|
||||||
if self.conf.excluded:
|
if self.conf.blacklisted:
|
||||||
msg = '\tNOT WRITING: {0} [EXCLUDED]'
|
msg = '\tNOT WRITING: {0} [BLACKLISTED]'
|
||||||
tty.debug(msg.format(self.spec.cshort_spec))
|
tty.debug(msg.format(self.spec.cshort_spec))
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|||||||
738
lib/spack/spack/monitor.py
Normal file
738
lib/spack/spack/monitor.py
Normal file
@@ -0,0 +1,738 @@
|
|||||||
|
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||||
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
|
"""Interact with a Spack Monitor Service. Derived from
|
||||||
|
https://github.com/spack/spack-monitor/blob/main/script/spackmoncli.py
|
||||||
|
"""
|
||||||
|
|
||||||
|
import base64
|
||||||
|
import hashlib
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
try:
|
||||||
|
from urllib.error import URLError
|
||||||
|
from urllib.request import Request, urlopen
|
||||||
|
except ImportError:
|
||||||
|
from urllib2 import urlopen, Request, URLError # type: ignore # novm
|
||||||
|
|
||||||
|
from copy import deepcopy
|
||||||
|
from glob import glob
|
||||||
|
|
||||||
|
import llnl.util.tty as tty
|
||||||
|
|
||||||
|
import spack
|
||||||
|
import spack.config
|
||||||
|
import spack.hash_types as ht
|
||||||
|
import spack.main
|
||||||
|
import spack.paths
|
||||||
|
import spack.store
|
||||||
|
import spack.util.path
|
||||||
|
import spack.util.spack_json as sjson
|
||||||
|
import spack.util.spack_yaml as syaml
|
||||||
|
|
||||||
|
# A global client to instantiate once
|
||||||
|
cli = None
|
||||||
|
|
||||||
|
|
||||||
|
def get_client(host, prefix="ms1", allow_fail=False, tags=None, save_local=False):
|
||||||
|
"""
|
||||||
|
Get a monitor client for a particular host and prefix.
|
||||||
|
|
||||||
|
If the client is not running, we exit early, unless allow_fail is set
|
||||||
|
to true, indicating that we should continue the build even if the
|
||||||
|
server is not present. Note that this client is defined globally as "cli"
|
||||||
|
so we can istantiate it once (checking for credentials, etc.) and then
|
||||||
|
always have access to it via spack.monitor.cli. Also note that
|
||||||
|
typically, we call the monitor by way of hooks in spack.hooks.monitor.
|
||||||
|
So if you want the monitor to have a new interaction with some part of
|
||||||
|
the codebase, it's recommended to write a hook first, and then have
|
||||||
|
the monitor use it.
|
||||||
|
"""
|
||||||
|
global cli
|
||||||
|
cli = SpackMonitorClient(host=host, prefix=prefix, allow_fail=allow_fail,
|
||||||
|
tags=tags, save_local=save_local)
|
||||||
|
|
||||||
|
# Auth is always required unless we are saving locally
|
||||||
|
if not save_local:
|
||||||
|
cli.require_auth()
|
||||||
|
|
||||||
|
# We will exit early if the monitoring service is not running, but
|
||||||
|
# only if we aren't doing a local save
|
||||||
|
if not save_local:
|
||||||
|
info = cli.service_info()
|
||||||
|
|
||||||
|
# If we allow failure, the response will be done
|
||||||
|
if info:
|
||||||
|
tty.debug("%s v.%s has status %s" % (
|
||||||
|
info['id'],
|
||||||
|
info['version'],
|
||||||
|
info['status'])
|
||||||
|
)
|
||||||
|
return cli
|
||||||
|
|
||||||
|
|
||||||
|
def get_monitor_group(subparser):
|
||||||
|
"""
|
||||||
|
Retrieve the monitor group for the argument parser.
|
||||||
|
|
||||||
|
Since the monitor group is shared between commands, we provide a common
|
||||||
|
function to generate the group for it. The user can pass the subparser, and
|
||||||
|
the group is added, and returned.
|
||||||
|
"""
|
||||||
|
# Monitoring via https://github.com/spack/spack-monitor
|
||||||
|
monitor_group = subparser.add_argument_group()
|
||||||
|
monitor_group.add_argument(
|
||||||
|
'--monitor', action='store_true', dest='use_monitor', default=False,
|
||||||
|
help="interact with a monitor server during builds.")
|
||||||
|
monitor_group.add_argument(
|
||||||
|
'--monitor-save-local', action='store_true', dest='monitor_save_local',
|
||||||
|
default=False, help="save monitor results to .spack instead of server.")
|
||||||
|
monitor_group.add_argument(
|
||||||
|
'--monitor-tags', dest='monitor_tags', default=None,
|
||||||
|
help="One or more (comma separated) tags for a build.")
|
||||||
|
monitor_group.add_argument(
|
||||||
|
'--monitor-keep-going', action='store_true', dest='monitor_keep_going',
|
||||||
|
default=False, help="continue the build if a request to monitor fails.")
|
||||||
|
monitor_group.add_argument(
|
||||||
|
'--monitor-host', dest='monitor_host', default="http://127.0.0.1",
|
||||||
|
help="If using a monitor, customize the host.")
|
||||||
|
monitor_group.add_argument(
|
||||||
|
'--monitor-prefix', dest='monitor_prefix', default="ms1",
|
||||||
|
help="The API prefix for the monitor service.")
|
||||||
|
return monitor_group
|
||||||
|
|
||||||
|
|
||||||
|
class SpackMonitorClient:
|
||||||
|
"""Client to interact with a spack monitor server.
|
||||||
|
|
||||||
|
We require the host url, along with the prefix to discover the
|
||||||
|
service_info endpoint. If allow_fail is set to True, we will not exit
|
||||||
|
on error with tty.die given that a request is not successful. The spack
|
||||||
|
version is one of the fields to uniquely identify a spec, so we add it
|
||||||
|
to the client on init.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, host=None, prefix="ms1", allow_fail=False, tags=None,
|
||||||
|
save_local=False):
|
||||||
|
# We can control setting an arbitrary version if needed
|
||||||
|
sv = spack.main.get_version()
|
||||||
|
self.spack_version = os.environ.get("SPACKMON_SPACK_VERSION") or sv
|
||||||
|
|
||||||
|
self.host = host or "http://127.0.0.1"
|
||||||
|
self.baseurl = "%s/%s" % (self.host, prefix.strip("/"))
|
||||||
|
self.token = os.environ.get("SPACKMON_TOKEN")
|
||||||
|
self.username = os.environ.get("SPACKMON_USER")
|
||||||
|
self.headers = {}
|
||||||
|
self.allow_fail = allow_fail
|
||||||
|
self.capture_build_environment()
|
||||||
|
self.tags = tags
|
||||||
|
self.save_local = save_local
|
||||||
|
|
||||||
|
# We key lookup of build_id by dag_hash
|
||||||
|
self.build_ids = {}
|
||||||
|
self.setup_save()
|
||||||
|
|
||||||
|
def setup_save(self):
|
||||||
|
"""Given a local save "save_local" ensure the output directory exists.
|
||||||
|
"""
|
||||||
|
if not self.save_local:
|
||||||
|
return
|
||||||
|
|
||||||
|
save_dir = spack.util.path.canonicalize_path(
|
||||||
|
spack.config.get('config:monitor_dir', spack.paths.default_monitor_path)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Name based on timestamp
|
||||||
|
now = datetime.now().strftime('%Y-%m-%d-%H-%M-%S-%s')
|
||||||
|
self.save_dir = os.path.join(save_dir, now)
|
||||||
|
if not os.path.exists(self.save_dir):
|
||||||
|
os.makedirs(self.save_dir)
|
||||||
|
|
||||||
|
def save(self, obj, filename):
|
||||||
|
"""
|
||||||
|
Save a monitor json result to the save directory.
|
||||||
|
"""
|
||||||
|
filename = os.path.join(self.save_dir, filename)
|
||||||
|
write_json(obj, filename)
|
||||||
|
return {"message": "Build saved locally to %s" % filename}
|
||||||
|
|
||||||
|
def load_build_environment(self, spec):
|
||||||
|
"""
|
||||||
|
Load a build environment from install_environment.json.
|
||||||
|
|
||||||
|
If we are running an analyze command, we will need to load previously
|
||||||
|
used build environment metadata from install_environment.json to capture
|
||||||
|
what was done during the build.
|
||||||
|
"""
|
||||||
|
if not hasattr(spec, "package") or not spec.package:
|
||||||
|
tty.die("A spec must have a package to load the environment.")
|
||||||
|
|
||||||
|
pkg_dir = os.path.dirname(spec.package.install_log_path)
|
||||||
|
env_file = os.path.join(pkg_dir, "install_environment.json")
|
||||||
|
build_environment = read_json(env_file)
|
||||||
|
if not build_environment:
|
||||||
|
tty.warn(
|
||||||
|
"install_environment.json not found in package folder. "
|
||||||
|
" This means that the current environment metadata will be used."
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
self.build_environment = build_environment
|
||||||
|
|
||||||
|
def capture_build_environment(self):
|
||||||
|
"""
|
||||||
|
Capture the environment for the build.
|
||||||
|
|
||||||
|
This uses spack.util.environment.get_host_environment_metadata to do so.
|
||||||
|
This is important because it's a unique identifier, along with the spec,
|
||||||
|
for a Build. It should look something like this:
|
||||||
|
|
||||||
|
{'host_os': 'ubuntu20.04',
|
||||||
|
'platform': 'linux',
|
||||||
|
'host_target': 'skylake',
|
||||||
|
'hostname': 'vanessa-ThinkPad-T490s',
|
||||||
|
'spack_version': '0.16.1-1455-52d5b55b65',
|
||||||
|
'kernel_version': '#73-Ubuntu SMP Mon Jan 18 17:25:17 UTC 2021'}
|
||||||
|
|
||||||
|
This is saved to a package install's metadata folder as
|
||||||
|
install_environment.json, and can be loaded by the monitor for uploading
|
||||||
|
data relevant to a later analysis.
|
||||||
|
"""
|
||||||
|
from spack.util.environment import get_host_environment_metadata
|
||||||
|
self.build_environment = get_host_environment_metadata()
|
||||||
|
keys = list(self.build_environment.keys())
|
||||||
|
|
||||||
|
# Allow to customize any of these values via the environment
|
||||||
|
for key in keys:
|
||||||
|
envar_name = "SPACKMON_%s" % key.upper()
|
||||||
|
envar = os.environ.get(envar_name)
|
||||||
|
if envar:
|
||||||
|
self.build_environment[key] = envar
|
||||||
|
|
||||||
|
def require_auth(self):
|
||||||
|
"""
|
||||||
|
Require authentication.
|
||||||
|
|
||||||
|
The token and username must not be unset
|
||||||
|
"""
|
||||||
|
if not self.save_local and (not self.token or not self.username):
|
||||||
|
tty.die("You are required to export SPACKMON_TOKEN and SPACKMON_USER")
|
||||||
|
|
||||||
|
def set_header(self, name, value):
|
||||||
|
self.headers.update({name: value})
|
||||||
|
|
||||||
|
def set_basic_auth(self, username, password):
|
||||||
|
"""
|
||||||
|
A wrapper to adding basic authentication to the Request
|
||||||
|
"""
|
||||||
|
auth_str = "%s:%s" % (username, password)
|
||||||
|
auth_header = base64.b64encode(auth_str.encode("utf-8"))
|
||||||
|
self.set_header("Authorization", "Basic %s" % auth_header.decode("utf-8"))
|
||||||
|
|
||||||
|
def reset(self):
|
||||||
|
"""
|
||||||
|
Reset and prepare for a new request.
|
||||||
|
"""
|
||||||
|
if "Authorization" in self.headers:
|
||||||
|
self.headers = {"Authorization": self.headers['Authorization']}
|
||||||
|
else:
|
||||||
|
self.headers = {}
|
||||||
|
|
||||||
|
def prepare_request(self, endpoint, data, headers):
|
||||||
|
"""
|
||||||
|
Prepare a request given an endpoint, data, and headers.
|
||||||
|
|
||||||
|
If data is provided, urllib makes the request a POST
|
||||||
|
"""
|
||||||
|
# Always reset headers for new request.
|
||||||
|
self.reset()
|
||||||
|
|
||||||
|
# Preserve previously used auth token
|
||||||
|
headers = headers or self.headers
|
||||||
|
|
||||||
|
# The calling function can provide a full or partial url
|
||||||
|
if not endpoint.startswith("http"):
|
||||||
|
endpoint = "%s/%s" % (self.baseurl, endpoint)
|
||||||
|
|
||||||
|
# If we have data, the request will be POST
|
||||||
|
if data:
|
||||||
|
if not isinstance(data, str):
|
||||||
|
data = sjson.dump(data)
|
||||||
|
data = data.encode('ascii')
|
||||||
|
|
||||||
|
return Request(endpoint, data=data, headers=headers)
|
||||||
|
|
||||||
|
def issue_request(self, request, retry=True):
|
||||||
|
"""
|
||||||
|
Given a prepared request, issue it.
|
||||||
|
|
||||||
|
If we get an error, die. If
|
||||||
|
there are times when we don't want to exit on error (but instead
|
||||||
|
disable using the monitoring service) we could add that here.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
response = urlopen(request)
|
||||||
|
except URLError as e:
|
||||||
|
|
||||||
|
# If we have an authorization request, retry once with auth
|
||||||
|
if hasattr(e, "code") and e.code == 401 and retry:
|
||||||
|
if self.authenticate_request(e):
|
||||||
|
request = self.prepare_request(
|
||||||
|
e.url,
|
||||||
|
sjson.load(request.data.decode('utf-8')),
|
||||||
|
self.headers
|
||||||
|
)
|
||||||
|
return self.issue_request(request, False)
|
||||||
|
|
||||||
|
# Handle permanent re-directs!
|
||||||
|
elif hasattr(e, "code") and e.code == 308:
|
||||||
|
location = e.headers.get('Location')
|
||||||
|
|
||||||
|
request_data = None
|
||||||
|
if request.data:
|
||||||
|
request_data = sjson.load(request.data.decode('utf-8'))[0]
|
||||||
|
|
||||||
|
if location:
|
||||||
|
request = self.prepare_request(
|
||||||
|
location,
|
||||||
|
request_data,
|
||||||
|
self.headers
|
||||||
|
)
|
||||||
|
return self.issue_request(request, True)
|
||||||
|
|
||||||
|
# Otherwise, relay the message and exit on error
|
||||||
|
msg = ""
|
||||||
|
if hasattr(e, 'reason'):
|
||||||
|
msg = e.reason
|
||||||
|
elif hasattr(e, 'code'):
|
||||||
|
msg = e.code
|
||||||
|
|
||||||
|
# If we can parse the message, try it
|
||||||
|
try:
|
||||||
|
msg += "\n%s" % e.read().decode("utf8", 'ignore')
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
if self.allow_fail:
|
||||||
|
tty.warning("Request to %s was not successful, but continuing." % e.url)
|
||||||
|
return
|
||||||
|
|
||||||
|
tty.die(msg)
|
||||||
|
|
||||||
|
return response
|
||||||
|
|
||||||
|
def do_request(self, endpoint, data=None, headers=None, url=None):
|
||||||
|
"""
|
||||||
|
Do the actual request.
|
||||||
|
|
||||||
|
If data is provided, it is POST, otherwise GET.
|
||||||
|
If an entire URL is provided, don't use the endpoint
|
||||||
|
"""
|
||||||
|
request = self.prepare_request(endpoint, data, headers)
|
||||||
|
|
||||||
|
# If we have an authorization error, we retry with
|
||||||
|
response = self.issue_request(request)
|
||||||
|
|
||||||
|
# A 200/201 response incidates success
|
||||||
|
if response.code in [200, 201]:
|
||||||
|
return sjson.load(response.read().decode('utf-8'))
|
||||||
|
|
||||||
|
return response
|
||||||
|
|
||||||
|
def authenticate_request(self, originalResponse):
|
||||||
|
"""
|
||||||
|
Authenticate the request.
|
||||||
|
|
||||||
|
Given a response (an HTTPError 401), look for a Www-Authenticate
|
||||||
|
header to parse. We return True/False to indicate if the request
|
||||||
|
should be retried.
|
||||||
|
"""
|
||||||
|
authHeaderRaw = originalResponse.headers.get("Www-Authenticate")
|
||||||
|
if not authHeaderRaw:
|
||||||
|
return False
|
||||||
|
|
||||||
|
# If we have a username and password, set basic auth automatically
|
||||||
|
if self.token and self.username:
|
||||||
|
self.set_basic_auth(self.username, self.token)
|
||||||
|
|
||||||
|
headers = deepcopy(self.headers)
|
||||||
|
if "Authorization" not in headers:
|
||||||
|
tty.error(
|
||||||
|
"This endpoint requires a token. Please set "
|
||||||
|
"client.set_basic_auth(username, password) first "
|
||||||
|
"or export them to the environment."
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Prepare request to retry
|
||||||
|
h = parse_auth_header(authHeaderRaw)
|
||||||
|
headers.update({
|
||||||
|
"service": h.Service,
|
||||||
|
"Accept": "application/json",
|
||||||
|
"User-Agent": "spackmoncli"}
|
||||||
|
)
|
||||||
|
|
||||||
|
# Currently we don't set a scope (it defaults to build)
|
||||||
|
authResponse = self.do_request(h.Realm, headers=headers)
|
||||||
|
|
||||||
|
# Request the token
|
||||||
|
token = authResponse.get("token")
|
||||||
|
if not token:
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Set the token to the original request and retry
|
||||||
|
self.headers.update({"Authorization": "Bearer %s" % token})
|
||||||
|
return True
|
||||||
|
|
||||||
|
# Functions correspond to endpoints
|
||||||
|
def service_info(self):
|
||||||
|
"""
|
||||||
|
Get the service information endpoint
|
||||||
|
"""
|
||||||
|
# Base endpoint provides service info
|
||||||
|
return self.do_request("")
|
||||||
|
|
||||||
|
def new_configuration(self, specs):
|
||||||
|
"""
|
||||||
|
Given a list of specs, generate a new configuration for each.
|
||||||
|
|
||||||
|
We return a lookup of specs with their package names. This assumes
|
||||||
|
that we are only installing one version of each package. We aren't
|
||||||
|
starting or creating any builds, so we don't need a build environment.
|
||||||
|
"""
|
||||||
|
configs = {}
|
||||||
|
|
||||||
|
# There should only be one spec generally (what cases would have >1?)
|
||||||
|
for spec in specs:
|
||||||
|
# Not sure if this is needed here, but I see it elsewhere
|
||||||
|
if spec.name in spack.repo.path or spec.virtual:
|
||||||
|
spec.concretize()
|
||||||
|
|
||||||
|
# Remove extra level of nesting
|
||||||
|
# This is the only place in Spack we still use full_hash, as `spack monitor`
|
||||||
|
# requires specs with full_hash-keyed dependencies.
|
||||||
|
as_dict = {"spec": spec.to_dict(hash=ht.full_hash)['spec'],
|
||||||
|
"spack_version": self.spack_version}
|
||||||
|
|
||||||
|
if self.save_local:
|
||||||
|
filename = "spec-%s-%s-config.json" % (spec.name, spec.version)
|
||||||
|
self.save(as_dict, filename)
|
||||||
|
else:
|
||||||
|
response = self.do_request("specs/new/", data=sjson.dump(as_dict))
|
||||||
|
configs[spec.package.name] = response.get('data', {})
|
||||||
|
|
||||||
|
return configs
|
||||||
|
|
||||||
|
def failed_concretization(self, specs):
|
||||||
|
"""
|
||||||
|
Given a list of abstract specs, tell spack monitor concretization failed.
|
||||||
|
"""
|
||||||
|
configs = {}
|
||||||
|
|
||||||
|
# There should only be one spec generally (what cases would have >1?)
|
||||||
|
for spec in specs:
|
||||||
|
|
||||||
|
# update the spec to have build hash indicating that cannot be built
|
||||||
|
meta = spec.to_dict()['spec']
|
||||||
|
nodes = []
|
||||||
|
for node in meta.get("nodes", []):
|
||||||
|
node["full_hash"] = "FAILED_CONCRETIZATION"
|
||||||
|
nodes.append(node)
|
||||||
|
meta['nodes'] = nodes
|
||||||
|
|
||||||
|
# We can't concretize / hash
|
||||||
|
as_dict = {"spec": meta,
|
||||||
|
"spack_version": self.spack_version}
|
||||||
|
|
||||||
|
if self.save_local:
|
||||||
|
filename = "spec-%s-%s-config.json" % (spec.name, spec.version)
|
||||||
|
self.save(as_dict, filename)
|
||||||
|
else:
|
||||||
|
response = self.do_request("specs/new/", data=sjson.dump(as_dict))
|
||||||
|
configs[spec.package.name] = response.get('data', {})
|
||||||
|
|
||||||
|
return configs
|
||||||
|
|
||||||
|
def new_build(self, spec):
|
||||||
|
"""
|
||||||
|
Create a new build.
|
||||||
|
|
||||||
|
This means sending the hash of the spec to be built,
|
||||||
|
along with the build environment. These two sets of data uniquely can
|
||||||
|
identify the build, and we will add objects (the binaries produced) to
|
||||||
|
it. We return the build id to the calling client.
|
||||||
|
"""
|
||||||
|
return self.get_build_id(spec, return_response=True)
|
||||||
|
|
||||||
|
def get_build_id(self, spec, return_response=False, spec_exists=True):
|
||||||
|
"""
|
||||||
|
Retrieve a build id, either in the local cache, or query the server.
|
||||||
|
"""
|
||||||
|
dag_hash = spec.dag_hash()
|
||||||
|
if dag_hash in self.build_ids:
|
||||||
|
return self.build_ids[dag_hash]
|
||||||
|
|
||||||
|
# Prepare build environment data (including spack version)
|
||||||
|
data = self.build_environment.copy()
|
||||||
|
data['full_hash'] = dag_hash
|
||||||
|
|
||||||
|
# If the build should be tagged, add it
|
||||||
|
if self.tags:
|
||||||
|
data['tags'] = self.tags
|
||||||
|
|
||||||
|
# If we allow the spec to not exist (meaning we create it) we need to
|
||||||
|
# include the full specfile here
|
||||||
|
if not spec_exists:
|
||||||
|
meta_dir = os.path.dirname(spec.package.install_log_path)
|
||||||
|
spec_file = os.path.join(meta_dir, "spec.json")
|
||||||
|
if os.path.exists(spec_file):
|
||||||
|
data['spec'] = sjson.load(read_file(spec_file))
|
||||||
|
else:
|
||||||
|
spec_file = os.path.join(meta_dir, "spec.yaml")
|
||||||
|
data['spec'] = syaml.load(read_file(spec_file))
|
||||||
|
|
||||||
|
if self.save_local:
|
||||||
|
return self.get_local_build_id(data, dag_hash, return_response)
|
||||||
|
return self.get_server_build_id(data, dag_hash, return_response)
|
||||||
|
|
||||||
|
def get_local_build_id(self, data, dag_hash, return_response):
|
||||||
|
"""
|
||||||
|
Generate a local build id based on hashing the expected data
|
||||||
|
"""
|
||||||
|
hasher = hashlib.md5()
|
||||||
|
hasher.update(str(data).encode('utf-8'))
|
||||||
|
bid = hasher.hexdigest()
|
||||||
|
filename = "build-metadata-%s.json" % bid
|
||||||
|
response = self.save(data, filename)
|
||||||
|
if return_response:
|
||||||
|
return response
|
||||||
|
return bid
|
||||||
|
|
||||||
|
def get_server_build_id(self, data, dag_hash, return_response=False):
|
||||||
|
"""
|
||||||
|
Retrieve a build id from the spack monitor server
|
||||||
|
"""
|
||||||
|
response = self.do_request("builds/new/", data=sjson.dump(data))
|
||||||
|
|
||||||
|
# Add the build id to the lookup
|
||||||
|
bid = self.build_ids[dag_hash] = response['data']['build']['build_id']
|
||||||
|
self.build_ids[dag_hash] = bid
|
||||||
|
|
||||||
|
# If the function is called directly, the user might want output
|
||||||
|
if return_response:
|
||||||
|
return response
|
||||||
|
return bid
|
||||||
|
|
||||||
|
def update_build(self, spec, status="SUCCESS"):
|
||||||
|
"""
|
||||||
|
Update a build with a new status.
|
||||||
|
|
||||||
|
This typically updates the relevant package to indicate a
|
||||||
|
successful install. This endpoint can take a general status to update.
|
||||||
|
"""
|
||||||
|
data = {"build_id": self.get_build_id(spec), "status": status}
|
||||||
|
if self.save_local:
|
||||||
|
filename = "build-%s-status.json" % data['build_id']
|
||||||
|
return self.save(data, filename)
|
||||||
|
|
||||||
|
return self.do_request("builds/update/", data=sjson.dump(data))
|
||||||
|
|
||||||
|
def fail_task(self, spec):
|
||||||
|
"""Given a spec, mark it as failed. This means that Spack Monitor
|
||||||
|
marks all dependencies as cancelled, unless they are already successful
|
||||||
|
"""
|
||||||
|
return self.update_build(spec, status="FAILED")
|
||||||
|
|
||||||
|
def cancel_task(self, spec):
|
||||||
|
"""Given a spec, mark it as cancelled.
|
||||||
|
"""
|
||||||
|
return self.update_build(spec, status="CANCELLED")
|
||||||
|
|
||||||
|
def send_analyze_metadata(self, pkg, metadata):
|
||||||
|
"""
|
||||||
|
Send spack analyzer metadata to the spack monitor server.
|
||||||
|
|
||||||
|
Given a dictionary of analyzers (with key as analyzer type, and
|
||||||
|
value as the data) upload the analyzer output to Spack Monitor.
|
||||||
|
Spack Monitor should either have a known understanding of the analyzer,
|
||||||
|
or if not (the key is not recognized), it's assumed to be a dictionary
|
||||||
|
of objects/files, each with attributes to be updated. E.g.,
|
||||||
|
|
||||||
|
{"analyzer-name": {"object-file-path": {"feature1": "value1"}}}
|
||||||
|
"""
|
||||||
|
# Prepare build environment data (including spack version)
|
||||||
|
# Since the build might not have been generated, we include the spec
|
||||||
|
data = {"build_id": self.get_build_id(pkg.spec, spec_exists=False),
|
||||||
|
"metadata": metadata}
|
||||||
|
return self.do_request("analyze/builds/", data=sjson.dump(data))
|
||||||
|
|
||||||
|
def send_phase(self, pkg, phase_name, phase_output_file, status):
|
||||||
|
"""
|
||||||
|
Send the result of a phase during install.
|
||||||
|
|
||||||
|
Given a package, phase name, and status, update the monitor endpoint
|
||||||
|
to alert of the status of the stage. This includes parsing the package
|
||||||
|
metadata folder for phase output and error files
|
||||||
|
"""
|
||||||
|
data = {"build_id": self.get_build_id(pkg.spec)}
|
||||||
|
|
||||||
|
# Send output specific to the phase (does this include error?)
|
||||||
|
data.update({"status": status,
|
||||||
|
"output": read_file(phase_output_file),
|
||||||
|
"phase_name": phase_name})
|
||||||
|
|
||||||
|
if self.save_local:
|
||||||
|
filename = "build-%s-phase-%s.json" % (data['build_id'], phase_name)
|
||||||
|
return self.save(data, filename)
|
||||||
|
|
||||||
|
return self.do_request("builds/phases/update/", data=sjson.dump(data))
|
||||||
|
|
||||||
|
def upload_specfile(self, filename):
|
||||||
|
"""
|
||||||
|
Upload a spec file to the spack monitor server.
|
||||||
|
|
||||||
|
Given a spec file (must be json) upload to the UploadSpec endpoint.
|
||||||
|
This function is not used in the spack to server workflow, but could
|
||||||
|
be useful is Spack Monitor is intended to send an already generated
|
||||||
|
file in some kind of separate analysis. For the environment file, we
|
||||||
|
parse out SPACK_* variables to include.
|
||||||
|
"""
|
||||||
|
# We load as json just to validate it
|
||||||
|
spec = read_json(filename)
|
||||||
|
data = {"spec": spec, "spack_verison": self.spack_version}
|
||||||
|
|
||||||
|
if self.save_local:
|
||||||
|
filename = "spec-%s-%s.json" % (spec.name, spec.version)
|
||||||
|
return self.save(data, filename)
|
||||||
|
|
||||||
|
return self.do_request("specs/new/", data=sjson.dump(data))
|
||||||
|
|
||||||
|
def iter_read(self, pattern):
|
||||||
|
"""
|
||||||
|
A helper to read json from a directory glob and return it loaded.
|
||||||
|
"""
|
||||||
|
for filename in glob(pattern):
|
||||||
|
basename = os.path.basename(filename)
|
||||||
|
tty.info("Reading %s" % basename)
|
||||||
|
yield read_json(filename)
|
||||||
|
|
||||||
|
def upload_local_save(self, dirname):
|
||||||
|
"""
|
||||||
|
Upload results from a locally saved directory to spack monitor.
|
||||||
|
|
||||||
|
The general workflow will first include an install with save local:
|
||||||
|
spack install --monitor --monitor-save-local
|
||||||
|
And then a request to upload the root or specific directory.
|
||||||
|
spack upload monitor ~/.spack/reports/monitor/<date>/
|
||||||
|
"""
|
||||||
|
dirname = os.path.abspath(dirname)
|
||||||
|
if not os.path.exists(dirname):
|
||||||
|
tty.die("%s does not exist." % dirname)
|
||||||
|
|
||||||
|
# We can't be sure the level of nesting the user has provided
|
||||||
|
# So we walk recursively through and look for build metadata
|
||||||
|
for subdir, dirs, files in os.walk(dirname):
|
||||||
|
root = os.path.join(dirname, subdir)
|
||||||
|
|
||||||
|
# A metadata file signals a monitor export
|
||||||
|
metadata = glob("%s%sbuild-metadata*" % (root, os.sep))
|
||||||
|
if not metadata or not files or not root or not subdir:
|
||||||
|
continue
|
||||||
|
self._upload_local_save(root)
|
||||||
|
tty.info("Upload complete")
|
||||||
|
|
||||||
|
def _upload_local_save(self, dirname):
|
||||||
|
"""
|
||||||
|
Given a found metadata file, upload results to spack monitor.
|
||||||
|
"""
|
||||||
|
# First find all the specs
|
||||||
|
for spec in self.iter_read("%s%sspec*" % (dirname, os.sep)):
|
||||||
|
self.do_request("specs/new/", data=sjson.dump(spec))
|
||||||
|
|
||||||
|
# Load build metadata to generate an id
|
||||||
|
metadata = glob("%s%sbuild-metadata*" % (dirname, os.sep))
|
||||||
|
if not metadata:
|
||||||
|
tty.die("Build metadata file(s) missing in %s" % dirname)
|
||||||
|
|
||||||
|
# Create a build_id lookup based on hash
|
||||||
|
hashes = {}
|
||||||
|
for metafile in metadata:
|
||||||
|
data = read_json(metafile)
|
||||||
|
build = self.do_request("builds/new/", data=sjson.dump(data))
|
||||||
|
localhash = os.path.basename(metafile).replace(".json", "")
|
||||||
|
hashes[localhash.replace('build-metadata-', "")] = build
|
||||||
|
|
||||||
|
# Next upload build phases
|
||||||
|
for phase in self.iter_read("%s%sbuild*phase*" % (dirname, os.sep)):
|
||||||
|
build_id = hashes[phase['build_id']]['data']['build']['build_id']
|
||||||
|
phase['build_id'] = build_id
|
||||||
|
self.do_request("builds/phases/update/", data=sjson.dump(phase))
|
||||||
|
|
||||||
|
# Next find the status objects
|
||||||
|
for status in self.iter_read("%s%sbuild*status*" % (dirname, os.sep)):
|
||||||
|
build_id = hashes[status['build_id']]['data']['build']['build_id']
|
||||||
|
status['build_id'] = build_id
|
||||||
|
self.do_request("builds/update/", data=sjson.dump(status))
|
||||||
|
|
||||||
|
|
||||||
|
# Helper functions
|
||||||
|
|
||||||
|
def parse_auth_header(authHeaderRaw):
|
||||||
|
"""
|
||||||
|
Parse an authentication header into relevant pieces
|
||||||
|
"""
|
||||||
|
regex = re.compile('([a-zA-z]+)="(.+?)"')
|
||||||
|
matches = regex.findall(authHeaderRaw)
|
||||||
|
lookup = dict()
|
||||||
|
for match in matches:
|
||||||
|
lookup[match[0]] = match[1]
|
||||||
|
return authHeader(lookup)
|
||||||
|
|
||||||
|
|
||||||
|
class authHeader:
|
||||||
|
def __init__(self, lookup):
|
||||||
|
"""Given a dictionary of values, match them to class attributes"""
|
||||||
|
for key in lookup:
|
||||||
|
if key in ["realm", "service", "scope"]:
|
||||||
|
setattr(self, key.capitalize(), lookup[key])
|
||||||
|
|
||||||
|
|
||||||
|
def read_file(filename):
|
||||||
|
"""
|
||||||
|
Read a file, if it exists. Otherwise return None
|
||||||
|
"""
|
||||||
|
if not os.path.exists(filename):
|
||||||
|
return
|
||||||
|
with open(filename, 'r') as fd:
|
||||||
|
content = fd.read()
|
||||||
|
return content
|
||||||
|
|
||||||
|
|
||||||
|
def write_file(content, filename):
|
||||||
|
"""
|
||||||
|
Write content to file
|
||||||
|
"""
|
||||||
|
with open(filename, 'w') as fd:
|
||||||
|
fd.writelines(content)
|
||||||
|
return content
|
||||||
|
|
||||||
|
|
||||||
|
def write_json(obj, filename):
|
||||||
|
"""
|
||||||
|
Write a json file, if the output directory exists.
|
||||||
|
"""
|
||||||
|
if not os.path.exists(os.path.dirname(filename)):
|
||||||
|
return
|
||||||
|
return write_file(sjson.dump(obj), filename)
|
||||||
|
|
||||||
|
|
||||||
|
def read_json(filename):
|
||||||
|
"""
|
||||||
|
Read a file and load into json, if it exists. Otherwise return None.
|
||||||
|
"""
|
||||||
|
if not os.path.exists(filename):
|
||||||
|
return
|
||||||
|
return sjson.load(read_file(filename))
|
||||||
@@ -4,7 +4,6 @@
|
|||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import platform as py_platform
|
import platform as py_platform
|
||||||
import re
|
import re
|
||||||
from subprocess import check_output
|
|
||||||
|
|
||||||
from spack.version import Version
|
from spack.version import Version
|
||||||
|
|
||||||
@@ -52,17 +51,6 @@ def __init__(self):
|
|||||||
|
|
||||||
if 'ubuntu' in distname:
|
if 'ubuntu' in distname:
|
||||||
version = '.'.join(version[0:2])
|
version = '.'.join(version[0:2])
|
||||||
# openSUSE Tumbleweed is a rolling release which can change
|
|
||||||
# more than once in a week, so set version to tumbleweed$GLIBVERS
|
|
||||||
elif 'opensuse-tumbleweed' in distname or 'opensusetumbleweed' in distname:
|
|
||||||
distname = 'opensuse'
|
|
||||||
output = check_output(["ldd", "--version"]).decode()
|
|
||||||
libcvers = re.findall(r'ldd \(GNU libc\) (.*)', output)
|
|
||||||
if len(libcvers) == 1:
|
|
||||||
version = 'tumbleweed' + libcvers[0]
|
|
||||||
else:
|
|
||||||
version = 'tumbleweed' + version[0]
|
|
||||||
|
|
||||||
else:
|
else:
|
||||||
version = version[0]
|
version = version[0]
|
||||||
|
|
||||||
|
|||||||
@@ -82,4 +82,4 @@
|
|||||||
conditional,
|
conditional,
|
||||||
disjoint_sets,
|
disjoint_sets,
|
||||||
)
|
)
|
||||||
from spack.version import Version, ver
|
from spack.version import Version, ver
|
||||||
@@ -33,7 +33,7 @@
|
|||||||
|
|
||||||
import llnl.util.filesystem as fsys
|
import llnl.util.filesystem as fsys
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.util.lang import classproperty, match_predicate, memoized, nullcontext
|
from llnl.util.lang import memoized, nullcontext
|
||||||
from llnl.util.link_tree import LinkTree
|
from llnl.util.link_tree import LinkTree
|
||||||
|
|
||||||
import spack.compilers
|
import spack.compilers
|
||||||
@@ -50,7 +50,6 @@
|
|||||||
import spack.multimethod
|
import spack.multimethod
|
||||||
import spack.paths
|
import spack.paths
|
||||||
import spack.repo
|
import spack.repo
|
||||||
import spack.spec
|
|
||||||
import spack.store
|
import spack.store
|
||||||
import spack.url
|
import spack.url
|
||||||
import spack.util.environment
|
import spack.util.environment
|
||||||
@@ -63,7 +62,7 @@
|
|||||||
from spack.util.executable import ProcessError, which
|
from spack.util.executable import ProcessError, which
|
||||||
from spack.util.package_hash import package_hash
|
from spack.util.package_hash import package_hash
|
||||||
from spack.util.prefix import Prefix
|
from spack.util.prefix import Prefix
|
||||||
from spack.version import GitVersion, Version, VersionBase
|
from spack.version import Version
|
||||||
|
|
||||||
if sys.version_info[0] >= 3:
|
if sys.version_info[0] >= 3:
|
||||||
FLAG_HANDLER_RETURN_TYPE = Tuple[
|
FLAG_HANDLER_RETURN_TYPE = Tuple[
|
||||||
@@ -208,8 +207,8 @@ def __init__(cls, name, bases, attr_dict):
|
|||||||
# If a package has the executables or libraries attribute then it's
|
# If a package has the executables or libraries attribute then it's
|
||||||
# assumed to be detectable
|
# assumed to be detectable
|
||||||
if hasattr(cls, 'executables') or hasattr(cls, 'libraries'):
|
if hasattr(cls, 'executables') or hasattr(cls, 'libraries'):
|
||||||
@classmethod
|
@property
|
||||||
def platform_executables(cls):
|
def platform_executables(self):
|
||||||
def to_windows_exe(exe):
|
def to_windows_exe(exe):
|
||||||
if exe.endswith('$'):
|
if exe.endswith('$'):
|
||||||
exe = exe.replace('$', '%s$' % spack.util.path.win_exe_ext())
|
exe = exe.replace('$', '%s$' % spack.util.path.win_exe_ext())
|
||||||
@@ -217,8 +216,8 @@ def to_windows_exe(exe):
|
|||||||
exe += spack.util.path.win_exe_ext()
|
exe += spack.util.path.win_exe_ext()
|
||||||
return exe
|
return exe
|
||||||
plat_exe = []
|
plat_exe = []
|
||||||
if hasattr(cls, 'executables'):
|
if hasattr(self, 'executables'):
|
||||||
for exe in cls.executables:
|
for exe in self.executables:
|
||||||
if sys.platform == 'win32':
|
if sys.platform == 'win32':
|
||||||
exe = to_windows_exe(exe)
|
exe = to_windows_exe(exe)
|
||||||
plat_exe.append(exe)
|
plat_exe.append(exe)
|
||||||
@@ -398,6 +397,63 @@ def _decorator(func):
|
|||||||
return func
|
return func
|
||||||
return _decorator
|
return _decorator
|
||||||
|
|
||||||
|
@property
|
||||||
|
def package_dir(self):
|
||||||
|
"""Directory where the package.py file lives."""
|
||||||
|
return os.path.abspath(os.path.dirname(self.module.__file__))
|
||||||
|
|
||||||
|
@property
|
||||||
|
def module(self):
|
||||||
|
"""Module object (not just the name) that this package is defined in.
|
||||||
|
|
||||||
|
We use this to add variables to package modules. This makes
|
||||||
|
install() methods easier to write (e.g., can call configure())
|
||||||
|
"""
|
||||||
|
return __import__(self.__module__, fromlist=[self.__name__])
|
||||||
|
|
||||||
|
@property
|
||||||
|
def namespace(self):
|
||||||
|
"""Spack namespace for the package, which identifies its repo."""
|
||||||
|
return spack.repo.namespace_from_fullname(self.__module__)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def fullname(self):
|
||||||
|
"""Name of this package, including the namespace"""
|
||||||
|
return '%s.%s' % (self.namespace, self.name)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def fullnames(self):
|
||||||
|
"""
|
||||||
|
Fullnames for this package and any packages from which it inherits.
|
||||||
|
"""
|
||||||
|
fullnames = []
|
||||||
|
for cls in inspect.getmro(self):
|
||||||
|
namespace = getattr(cls, 'namespace', None)
|
||||||
|
if namespace:
|
||||||
|
fullnames.append('%s.%s' % (namespace, self.name))
|
||||||
|
if namespace == 'builtin':
|
||||||
|
# builtin packages cannot inherit from other repos
|
||||||
|
break
|
||||||
|
return fullnames
|
||||||
|
|
||||||
|
@property
|
||||||
|
def name(self):
|
||||||
|
"""The name of this package.
|
||||||
|
|
||||||
|
The name of a package is the name of its Python module, without
|
||||||
|
the containing module names.
|
||||||
|
"""
|
||||||
|
if self._name is None:
|
||||||
|
self._name = self.module.__name__
|
||||||
|
if '.' in self._name:
|
||||||
|
self._name = self._name[self._name.rindex('.') + 1:]
|
||||||
|
return self._name
|
||||||
|
|
||||||
|
@property
|
||||||
|
def global_license_dir(self):
|
||||||
|
"""Returns the directory where license files for all packages are stored."""
|
||||||
|
return spack.util.path.canonicalize_path(spack.config.get('config:license_dir'))
|
||||||
|
|
||||||
|
|
||||||
def run_before(*phases):
|
def run_before(*phases):
|
||||||
"""Registers a method of a package to be run before a given phase"""
|
"""Registers a method of a package to be run before a given phase"""
|
||||||
@@ -750,8 +806,7 @@ def __init__(self, spec):
|
|||||||
self._fetch_time = 0.0
|
self._fetch_time = 0.0
|
||||||
|
|
||||||
if self.is_extension:
|
if self.is_extension:
|
||||||
pkg_cls = spack.repo.path.get_pkg_class(self.extendee_spec.name)
|
spack.repo.get(self.extendee_spec)._check_extendable()
|
||||||
pkg_cls(self.extendee_spec)._check_extendable()
|
|
||||||
|
|
||||||
super(PackageBase, self).__init__()
|
super(PackageBase, self).__init__()
|
||||||
|
|
||||||
@@ -847,60 +902,60 @@ def possible_dependencies(
|
|||||||
|
|
||||||
return visited
|
return visited
|
||||||
|
|
||||||
@classproperty
|
def enum_constraints(self, visited=None):
|
||||||
def package_dir(cls):
|
"""Return transitive dependency constraints on this package."""
|
||||||
|
if visited is None:
|
||||||
|
visited = set()
|
||||||
|
visited.add(self.name)
|
||||||
|
|
||||||
|
names = []
|
||||||
|
clauses = []
|
||||||
|
|
||||||
|
for name in self.dependencies:
|
||||||
|
if name not in visited and not spack.spec.Spec(name).virtual:
|
||||||
|
pkg = spack.repo.get(name)
|
||||||
|
dvis, dnames, dclauses = pkg.enum_constraints(visited)
|
||||||
|
visited |= dvis
|
||||||
|
names.extend(dnames)
|
||||||
|
clauses.extend(dclauses)
|
||||||
|
|
||||||
|
return visited
|
||||||
|
|
||||||
|
# package_dir and module are *class* properties (see PackageMeta),
|
||||||
|
# but to make them work on instances we need these defs as well.
|
||||||
|
@property
|
||||||
|
def package_dir(self):
|
||||||
"""Directory where the package.py file lives."""
|
"""Directory where the package.py file lives."""
|
||||||
return os.path.abspath(os.path.dirname(cls.module.__file__))
|
return type(self).package_dir
|
||||||
|
|
||||||
@classproperty
|
@property
|
||||||
def module(cls):
|
def module(self):
|
||||||
"""Module object (not just the name) that this package is defined in.
|
"""Module object that this package is defined in."""
|
||||||
|
return type(self).module
|
||||||
|
|
||||||
We use this to add variables to package modules. This makes
|
@property
|
||||||
install() methods easier to write (e.g., can call configure())
|
def namespace(self):
|
||||||
"""
|
|
||||||
return __import__(cls.__module__, fromlist=[cls.__name__])
|
|
||||||
|
|
||||||
@classproperty
|
|
||||||
def namespace(cls):
|
|
||||||
"""Spack namespace for the package, which identifies its repo."""
|
"""Spack namespace for the package, which identifies its repo."""
|
||||||
return spack.repo.namespace_from_fullname(cls.__module__)
|
return type(self).namespace
|
||||||
|
|
||||||
@classproperty
|
@property
|
||||||
def fullname(cls):
|
def fullname(self):
|
||||||
"""Name of this package, including the namespace"""
|
"""Name of this package, including namespace: namespace.name."""
|
||||||
return '%s.%s' % (cls.namespace, cls.name)
|
return type(self).fullname
|
||||||
|
|
||||||
@classproperty
|
@property
|
||||||
def fullnames(cls):
|
def fullnames(self):
|
||||||
"""Fullnames for this package and any packages from which it inherits."""
|
return type(self).fullnames
|
||||||
fullnames = []
|
|
||||||
for cls in inspect.getmro(cls):
|
|
||||||
namespace = getattr(cls, 'namespace', None)
|
|
||||||
if namespace:
|
|
||||||
fullnames.append('%s.%s' % (namespace, cls.name))
|
|
||||||
if namespace == 'builtin':
|
|
||||||
# builtin packages cannot inherit from other repos
|
|
||||||
break
|
|
||||||
return fullnames
|
|
||||||
|
|
||||||
@classproperty
|
@property
|
||||||
def name(cls):
|
def name(self):
|
||||||
"""The name of this package.
|
"""Name of this package (the module without parent modules)."""
|
||||||
|
return type(self).name
|
||||||
|
|
||||||
The name of a package is the name of its Python module, without
|
@property
|
||||||
the containing module names.
|
def global_license_dir(self):
|
||||||
"""
|
"""Returns the directory where global license files are stored."""
|
||||||
if cls._name is None:
|
return type(self).global_license_dir
|
||||||
cls._name = cls.module.__name__
|
|
||||||
if '.' in cls._name:
|
|
||||||
cls._name = cls._name[cls._name.rindex('.') + 1:]
|
|
||||||
return cls._name
|
|
||||||
|
|
||||||
@classproperty
|
|
||||||
def global_license_dir(cls):
|
|
||||||
"""Returns the directory where license files for all packages are stored."""
|
|
||||||
return spack.util.path.canonicalize_path(spack.config.get('config:license_dir'))
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def global_license_file(self):
|
def global_license_file(self):
|
||||||
@@ -918,9 +973,8 @@ def version(self):
|
|||||||
" does not have a concrete version.")
|
" does not have a concrete version.")
|
||||||
return self.spec.versions[0]
|
return self.spec.versions[0]
|
||||||
|
|
||||||
@classmethod
|
|
||||||
@memoized
|
@memoized
|
||||||
def version_urls(cls):
|
def version_urls(self):
|
||||||
"""OrderedDict of explicitly defined URLs for versions of this package.
|
"""OrderedDict of explicitly defined URLs for versions of this package.
|
||||||
|
|
||||||
Return:
|
Return:
|
||||||
@@ -932,7 +986,7 @@ def version_urls(cls):
|
|||||||
if a package only defines ``url`` at the top level.
|
if a package only defines ``url`` at the top level.
|
||||||
"""
|
"""
|
||||||
version_urls = collections.OrderedDict()
|
version_urls = collections.OrderedDict()
|
||||||
for v, args in sorted(cls.versions.items()):
|
for v, args in sorted(self.versions.items()):
|
||||||
if 'url' in args:
|
if 'url' in args:
|
||||||
version_urls[v] = args['url']
|
version_urls[v] = args['url']
|
||||||
return version_urls
|
return version_urls
|
||||||
@@ -972,12 +1026,14 @@ def url_for_version(self, version):
|
|||||||
"""
|
"""
|
||||||
return self._implement_all_urls_for_version(version)[0]
|
return self._implement_all_urls_for_version(version)[0]
|
||||||
|
|
||||||
def all_urls_for_version(self, version):
|
def all_urls_for_version(self, version, custom_url_for_version=None):
|
||||||
"""Return all URLs derived from version_urls(), url, urls, and
|
"""Returns all URLs derived from version_urls(), url, urls, and
|
||||||
list_url (if it contains a version) in a package in that order.
|
list_url (if it contains a version) in a package in that order.
|
||||||
|
|
||||||
Args:
|
version: class Version
|
||||||
version (spack.version.Version): the version for which a URL is sought
|
The version for which a URL is sought.
|
||||||
|
|
||||||
|
See Class Version (version.py)
|
||||||
"""
|
"""
|
||||||
uf = None
|
uf = None
|
||||||
if type(self).url_for_version != Package.url_for_version:
|
if type(self).url_for_version != Package.url_for_version:
|
||||||
@@ -985,7 +1041,7 @@ def all_urls_for_version(self, version):
|
|||||||
return self._implement_all_urls_for_version(version, uf)
|
return self._implement_all_urls_for_version(version, uf)
|
||||||
|
|
||||||
def _implement_all_urls_for_version(self, version, custom_url_for_version=None):
|
def _implement_all_urls_for_version(self, version, custom_url_for_version=None):
|
||||||
if not isinstance(version, VersionBase):
|
if not isinstance(version, Version):
|
||||||
version = Version(version)
|
version = Version(version)
|
||||||
|
|
||||||
urls = []
|
urls = []
|
||||||
@@ -1256,7 +1312,6 @@ def _make_fetcher(self):
|
|||||||
resources = self._get_needed_resources()
|
resources = self._get_needed_resources()
|
||||||
for resource in resources:
|
for resource in resources:
|
||||||
fetcher.append(resource.fetcher)
|
fetcher.append(resource.fetcher)
|
||||||
fetcher.set_package(self)
|
|
||||||
return fetcher
|
return fetcher
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@@ -1271,10 +1326,8 @@ def fetcher(self):
|
|||||||
@fetcher.setter
|
@fetcher.setter
|
||||||
def fetcher(self, f):
|
def fetcher(self, f):
|
||||||
self._fetcher = f
|
self._fetcher = f
|
||||||
self._fetcher.set_package(self)
|
|
||||||
|
|
||||||
@classmethod
|
def dependencies_of_type(self, *deptypes):
|
||||||
def dependencies_of_type(cls, *deptypes):
|
|
||||||
"""Get dependencies that can possibly have these deptypes.
|
"""Get dependencies that can possibly have these deptypes.
|
||||||
|
|
||||||
This analyzes the package and determines which dependencies *can*
|
This analyzes the package and determines which dependencies *can*
|
||||||
@@ -1284,8 +1337,8 @@ def dependencies_of_type(cls, *deptypes):
|
|||||||
run dependency in another.
|
run dependency in another.
|
||||||
"""
|
"""
|
||||||
return dict(
|
return dict(
|
||||||
(name, conds) for name, conds in cls.dependencies.items()
|
(name, conds) for name, conds in self.dependencies.items()
|
||||||
if any(dt in cls.dependencies[name][cond].type
|
if any(dt in self.dependencies[name][cond].type
|
||||||
for cond in conds for dt in deptypes))
|
for cond in conds for dt in deptypes))
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@@ -1316,8 +1369,8 @@ def extendee_spec(self):
|
|||||||
# TODO: do something sane here with more than one extendee
|
# TODO: do something sane here with more than one extendee
|
||||||
# If it's not concrete, then return the spec from the
|
# If it's not concrete, then return the spec from the
|
||||||
# extends() directive since that is all we know so far.
|
# extends() directive since that is all we know so far.
|
||||||
spec_str, kwargs = next(iter(self.extendees.items()))
|
spec, kwargs = next(iter(self.extendees.items()))
|
||||||
return spack.spec.Spec(spec_str)
|
return spec
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def extendee_args(self):
|
def extendee_args(self):
|
||||||
@@ -1392,10 +1445,6 @@ def prefix(self):
|
|||||||
"""Get the prefix into which this package should be installed."""
|
"""Get the prefix into which this package should be installed."""
|
||||||
return self.spec.prefix
|
return self.spec.prefix
|
||||||
|
|
||||||
@property
|
|
||||||
def home(self):
|
|
||||||
return self.prefix
|
|
||||||
|
|
||||||
@property # type: ignore[misc]
|
@property # type: ignore[misc]
|
||||||
@memoized
|
@memoized
|
||||||
def compiler(self):
|
def compiler(self):
|
||||||
@@ -1450,7 +1499,7 @@ def do_fetch(self, mirror_only=False):
|
|||||||
checksum = spack.config.get('config:checksum')
|
checksum = spack.config.get('config:checksum')
|
||||||
fetch = self.stage.managed_by_spack
|
fetch = self.stage.managed_by_spack
|
||||||
if checksum and fetch and (self.version not in self.versions) \
|
if checksum and fetch and (self.version not in self.versions) \
|
||||||
and (not isinstance(self.version, GitVersion)):
|
and (not self.version.is_commit):
|
||||||
tty.warn("There is no checksum on file to fetch %s safely." %
|
tty.warn("There is no checksum on file to fetch %s safely." %
|
||||||
self.spec.cformat('{name}{@version}'))
|
self.spec.cformat('{name}{@version}'))
|
||||||
|
|
||||||
@@ -1672,7 +1721,7 @@ def content_hash(self, content=None):
|
|||||||
# referenced by branch name rather than tag or commit ID.
|
# referenced by branch name rather than tag or commit ID.
|
||||||
env = spack.environment.active_environment()
|
env = spack.environment.active_environment()
|
||||||
from_local_sources = env and env.is_develop(self.spec)
|
from_local_sources = env and env.is_develop(self.spec)
|
||||||
if self.has_code and not self.spec.external and not from_local_sources:
|
if not self.spec.external and not from_local_sources:
|
||||||
message = 'Missing a source id for {s.name}@{s.version}'
|
message = 'Missing a source id for {s.name}@{s.version}'
|
||||||
tty.warn(message.format(s=self))
|
tty.warn(message.format(s=self))
|
||||||
hash_content.append(''.encode('utf-8'))
|
hash_content.append(''.encode('utf-8'))
|
||||||
@@ -2129,8 +2178,10 @@ def check_paths(path_list, filetype, predicate):
|
|||||||
check_paths(self.sanity_check_is_file, 'file', os.path.isfile)
|
check_paths(self.sanity_check_is_file, 'file', os.path.isfile)
|
||||||
check_paths(self.sanity_check_is_dir, 'directory', os.path.isdir)
|
check_paths(self.sanity_check_is_dir, 'directory', os.path.isdir)
|
||||||
|
|
||||||
ignore_file = match_predicate(spack.store.layout.hidden_file_regexes)
|
installed = set(os.listdir(self.prefix))
|
||||||
if all(map(ignore_file, os.listdir(self.prefix))):
|
installed.difference_update(
|
||||||
|
spack.store.layout.hidden_file_regexes)
|
||||||
|
if not installed:
|
||||||
raise InstallError(
|
raise InstallError(
|
||||||
"Install failed for %s. Nothing was installed!" % self.name)
|
"Install failed for %s. Nothing was installed!" % self.name)
|
||||||
|
|
||||||
@@ -2652,15 +2703,14 @@ def do_clean(self):
|
|||||||
|
|
||||||
self.stage.destroy()
|
self.stage.destroy()
|
||||||
|
|
||||||
@classmethod
|
def format_doc(self, **kwargs):
|
||||||
def format_doc(cls, **kwargs):
|
|
||||||
"""Wrap doc string at 72 characters and format nicely"""
|
"""Wrap doc string at 72 characters and format nicely"""
|
||||||
indent = kwargs.get('indent', 0)
|
indent = kwargs.get('indent', 0)
|
||||||
|
|
||||||
if not cls.__doc__:
|
if not self.__doc__:
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
doc = re.sub(r'\s+', ' ', cls.__doc__)
|
doc = re.sub(r'\s+', ' ', self.__doc__)
|
||||||
lines = textwrap.wrap(doc, 72)
|
lines = textwrap.wrap(doc, 72)
|
||||||
results = six.StringIO()
|
results = six.StringIO()
|
||||||
for line in lines:
|
for line in lines:
|
||||||
|
|||||||
@@ -138,8 +138,8 @@ def has_preferred_targets(cls, pkg_name):
|
|||||||
@classmethod
|
@classmethod
|
||||||
def preferred_variants(cls, pkg_name):
|
def preferred_variants(cls, pkg_name):
|
||||||
"""Return a VariantMap of preferred variants/values for a spec."""
|
"""Return a VariantMap of preferred variants/values for a spec."""
|
||||||
for pkg_cls in (pkg_name, 'all'):
|
for pkg in (pkg_name, 'all'):
|
||||||
variants = spack.config.get('packages').get(pkg_cls, {}).get(
|
variants = spack.config.get('packages').get(pkg, {}).get(
|
||||||
'variants', '')
|
'variants', '')
|
||||||
if variants:
|
if variants:
|
||||||
break
|
break
|
||||||
@@ -149,26 +149,21 @@ def preferred_variants(cls, pkg_name):
|
|||||||
variants = " ".join(variants)
|
variants = " ".join(variants)
|
||||||
|
|
||||||
# Only return variants that are actually supported by the package
|
# Only return variants that are actually supported by the package
|
||||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
pkg = spack.repo.get(pkg_name)
|
||||||
spec = spack.spec.Spec("%s %s" % (pkg_name, variants))
|
spec = spack.spec.Spec("%s %s" % (pkg_name, variants))
|
||||||
return dict((name, variant) for name, variant in spec.variants.items()
|
return dict((name, variant) for name, variant in spec.variants.items()
|
||||||
if name in pkg_cls.variants)
|
if name in pkg.variants)
|
||||||
|
|
||||||
|
|
||||||
def spec_externals(spec):
|
def spec_externals(spec):
|
||||||
"""Return a list of external specs (w/external directory path filled in),
|
"""Return a list of external specs (w/external directory path filled in),
|
||||||
one for each known external installation.
|
one for each known external installation."""
|
||||||
"""
|
|
||||||
# break circular import.
|
# break circular import.
|
||||||
from spack.util.module_cmd import path_from_modules # NOQA: ignore=F401
|
from spack.util.module_cmd import path_from_modules # NOQA: ignore=F401
|
||||||
|
|
||||||
def _package(maybe_abstract_spec):
|
|
||||||
pkg_cls = spack.repo.path.get_pkg_class(spec.name)
|
|
||||||
return pkg_cls(maybe_abstract_spec)
|
|
||||||
|
|
||||||
allpkgs = spack.config.get('packages')
|
allpkgs = spack.config.get('packages')
|
||||||
names = set([spec.name])
|
names = set([spec.name])
|
||||||
names |= set(vspec.name for vspec in _package(spec).virtuals_provided)
|
names |= set(vspec.name for vspec in spec.package.virtuals_provided)
|
||||||
|
|
||||||
external_specs = []
|
external_specs = []
|
||||||
for name in names:
|
for name in names:
|
||||||
@@ -195,21 +190,17 @@ def _package(maybe_abstract_spec):
|
|||||||
|
|
||||||
|
|
||||||
def is_spec_buildable(spec):
|
def is_spec_buildable(spec):
|
||||||
"""Return true if the spec is configured as buildable"""
|
"""Return true if the spec pkgspec is configured as buildable"""
|
||||||
|
|
||||||
allpkgs = spack.config.get('packages')
|
allpkgs = spack.config.get('packages')
|
||||||
all_buildable = allpkgs.get('all', {}).get('buildable', True)
|
all_buildable = allpkgs.get('all', {}).get('buildable', True)
|
||||||
|
|
||||||
def _package(s):
|
|
||||||
pkg_cls = spack.repo.path.get_pkg_class(s.name)
|
|
||||||
return pkg_cls(s)
|
|
||||||
|
|
||||||
# Get the list of names for which all_buildable is overridden
|
# Get the list of names for which all_buildable is overridden
|
||||||
reverse = [name for name, entry in allpkgs.items()
|
reverse = [name for name, entry in allpkgs.items()
|
||||||
if entry.get('buildable', all_buildable) != all_buildable]
|
if entry.get('buildable', all_buildable) != all_buildable]
|
||||||
# Does this spec override all_buildable
|
# Does this spec override all_buildable
|
||||||
spec_reversed = (spec.name in reverse or
|
spec_reversed = (spec.name in reverse or
|
||||||
any(_package(spec).provides(name) for name in reverse))
|
any(spec.package.provides(name) for name in reverse))
|
||||||
return not all_buildable if spec_reversed else all_buildable
|
return not all_buildable if spec_reversed else all_buildable
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -284,11 +284,11 @@ def from_dict(dictionary):
|
|||||||
owner = dictionary.get('owner')
|
owner = dictionary.get('owner')
|
||||||
if 'owner' not in dictionary:
|
if 'owner' not in dictionary:
|
||||||
raise ValueError('Invalid patch dictionary: %s' % dictionary)
|
raise ValueError('Invalid patch dictionary: %s' % dictionary)
|
||||||
pkg_cls = spack.repo.path.get_pkg_class(owner)
|
pkg = spack.repo.get(owner)
|
||||||
|
|
||||||
if 'url' in dictionary:
|
if 'url' in dictionary:
|
||||||
return UrlPatch(
|
return UrlPatch(
|
||||||
pkg_cls,
|
pkg,
|
||||||
dictionary['url'],
|
dictionary['url'],
|
||||||
dictionary['level'],
|
dictionary['level'],
|
||||||
dictionary['working_dir'],
|
dictionary['working_dir'],
|
||||||
@@ -297,7 +297,7 @@ def from_dict(dictionary):
|
|||||||
|
|
||||||
elif 'relative_path' in dictionary:
|
elif 'relative_path' in dictionary:
|
||||||
patch = FilePatch(
|
patch = FilePatch(
|
||||||
pkg_cls,
|
pkg,
|
||||||
dictionary['relative_path'],
|
dictionary['relative_path'],
|
||||||
dictionary['level'],
|
dictionary['level'],
|
||||||
dictionary['working_dir'])
|
dictionary['working_dir'])
|
||||||
@@ -404,8 +404,8 @@ def update_package(self, pkg_fullname):
|
|||||||
del self.index[sha256]
|
del self.index[sha256]
|
||||||
|
|
||||||
# update the index with per-package patch indexes
|
# update the index with per-package patch indexes
|
||||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_fullname)
|
pkg = spack.repo.get(pkg_fullname)
|
||||||
partial_index = self._index_patches(pkg_cls)
|
partial_index = self._index_patches(pkg)
|
||||||
for sha256, package_to_patch in partial_index.items():
|
for sha256, package_to_patch in partial_index.items():
|
||||||
p2p = self.index.setdefault(sha256, {})
|
p2p = self.index.setdefault(sha256, {})
|
||||||
p2p.update(package_to_patch)
|
p2p.update(package_to_patch)
|
||||||
@@ -432,10 +432,10 @@ def _index_patches(pkg_class):
|
|||||||
for cond, dependency in conditions.items():
|
for cond, dependency in conditions.items():
|
||||||
for pcond, patch_list in dependency.patches.items():
|
for pcond, patch_list in dependency.patches.items():
|
||||||
for patch in patch_list:
|
for patch in patch_list:
|
||||||
dspec_cls = spack.repo.path.get_pkg_class(dependency.spec.name)
|
dspec = spack.repo.get(dependency.spec.name)
|
||||||
patch_dict = patch.to_dict()
|
patch_dict = patch.to_dict()
|
||||||
patch_dict.pop('sha256') # save some space
|
patch_dict.pop('sha256') # save some space
|
||||||
index[patch.sha256] = {dspec_cls.fullname: patch_dict}
|
index[patch.sha256] = {dspec.fullname: patch_dict}
|
||||||
|
|
||||||
return index
|
return index
|
||||||
|
|
||||||
|
|||||||
@@ -433,9 +433,8 @@ def needs_binary_relocation(m_type, m_subtype):
|
|||||||
m_type (str): MIME type of the file
|
m_type (str): MIME type of the file
|
||||||
m_subtype (str): MIME subtype of the file
|
m_subtype (str): MIME subtype of the file
|
||||||
"""
|
"""
|
||||||
subtypes = ('x-executable', 'x-sharedlib', 'x-mach-binary', 'x-pie-executable')
|
|
||||||
if m_type == 'application':
|
if m_type == 'application':
|
||||||
if m_subtype in subtypes:
|
if m_subtype in ('x-executable', 'x-sharedlib', 'x-mach-binary'):
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@@ -470,47 +469,6 @@ def _replace_prefix_text(filename, compiled_prefixes):
|
|||||||
f.truncate()
|
f.truncate()
|
||||||
|
|
||||||
|
|
||||||
def _replace_prefix_bin(filename, byte_prefixes):
|
|
||||||
"""Replace all the occurrences of the old install prefix with a
|
|
||||||
new install prefix in binary files.
|
|
||||||
|
|
||||||
The new install prefix is prefixed with ``os.sep`` until the
|
|
||||||
lengths of the prefixes are the same.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
filename (str): target binary file
|
|
||||||
byte_prefixes (OrderedDict): OrderedDictionary where the keys are
|
|
||||||
precompiled regex of the old prefixes and the values are the new
|
|
||||||
prefixes (uft-8 encoded)
|
|
||||||
"""
|
|
||||||
|
|
||||||
with open(filename, 'rb+') as f:
|
|
||||||
data = f.read()
|
|
||||||
f.seek(0)
|
|
||||||
for orig_bytes, new_bytes in byte_prefixes.items():
|
|
||||||
original_data_len = len(data)
|
|
||||||
# Skip this hassle if not found
|
|
||||||
if orig_bytes not in data:
|
|
||||||
continue
|
|
||||||
# We only care about this problem if we are about to replace
|
|
||||||
length_compatible = len(new_bytes) <= len(orig_bytes)
|
|
||||||
if not length_compatible:
|
|
||||||
tty.debug('Binary failing to relocate is %s' % filename)
|
|
||||||
raise BinaryTextReplaceError(orig_bytes, new_bytes)
|
|
||||||
pad_length = len(orig_bytes) - len(new_bytes)
|
|
||||||
padding = os.sep * pad_length
|
|
||||||
padding = padding.encode('utf-8')
|
|
||||||
data = data.replace(orig_bytes, new_bytes + padding)
|
|
||||||
# Really needs to be the same length
|
|
||||||
if not len(data) == original_data_len:
|
|
||||||
print('Length of pad:', pad_length, 'should be', len(padding))
|
|
||||||
print(new_bytes, 'was to replace', orig_bytes)
|
|
||||||
raise BinaryStringReplacementError(
|
|
||||||
filename, original_data_len, len(data))
|
|
||||||
f.write(data)
|
|
||||||
f.truncate()
|
|
||||||
|
|
||||||
|
|
||||||
def relocate_macho_binaries(path_names, old_layout_root, new_layout_root,
|
def relocate_macho_binaries(path_names, old_layout_root, new_layout_root,
|
||||||
prefix_to_prefix, rel, old_prefix, new_prefix):
|
prefix_to_prefix, rel, old_prefix, new_prefix):
|
||||||
"""
|
"""
|
||||||
@@ -818,49 +776,6 @@ def relocate_text(files, prefixes, concurrency=32):
|
|||||||
tp.join()
|
tp.join()
|
||||||
|
|
||||||
|
|
||||||
def relocate_text_bin(binaries, prefixes, concurrency=32):
|
|
||||||
"""Replace null terminated path strings hard coded into binaries.
|
|
||||||
|
|
||||||
The new install prefix must be shorter than the original one.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
binaries (list): binaries to be relocated
|
|
||||||
prefixes (OrderedDict): String prefixes which need to be changed.
|
|
||||||
concurrency (int): Desired degree of parallelism.
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
BinaryTextReplaceError: when the new path is longer than the old path
|
|
||||||
"""
|
|
||||||
byte_prefixes = collections.OrderedDict({})
|
|
||||||
|
|
||||||
for orig_prefix, new_prefix in prefixes.items():
|
|
||||||
if orig_prefix != new_prefix:
|
|
||||||
if isinstance(orig_prefix, bytes):
|
|
||||||
orig_bytes = orig_prefix
|
|
||||||
else:
|
|
||||||
orig_bytes = orig_prefix.encode('utf-8')
|
|
||||||
if isinstance(new_prefix, bytes):
|
|
||||||
new_bytes = new_prefix
|
|
||||||
else:
|
|
||||||
new_bytes = new_prefix.encode('utf-8')
|
|
||||||
byte_prefixes[orig_bytes] = new_bytes
|
|
||||||
|
|
||||||
# Do relocations on text in binaries that refers to the install tree
|
|
||||||
# multiprocesing.ThreadPool.map requires single argument
|
|
||||||
args = []
|
|
||||||
|
|
||||||
for binary in binaries:
|
|
||||||
args.append((binary, byte_prefixes))
|
|
||||||
|
|
||||||
tp = multiprocessing.pool.ThreadPool(processes=concurrency)
|
|
||||||
|
|
||||||
try:
|
|
||||||
tp.map(llnl.util.lang.star(_replace_prefix_bin), args)
|
|
||||||
finally:
|
|
||||||
tp.terminate()
|
|
||||||
tp.join()
|
|
||||||
|
|
||||||
|
|
||||||
def is_relocatable(spec):
|
def is_relocatable(spec):
|
||||||
"""Returns True if an installed spec is relocatable.
|
"""Returns True if an installed spec is relocatable.
|
||||||
|
|
||||||
@@ -1127,3 +1042,120 @@ def fixup_macos_rpaths(spec):
|
|||||||
))
|
))
|
||||||
else:
|
else:
|
||||||
tty.debug('No rpath fixup needed for ' + specname)
|
tty.debug('No rpath fixup needed for ' + specname)
|
||||||
|
|
||||||
|
|
||||||
|
def compute_indices(filename, paths_to_relocate):
|
||||||
|
"""
|
||||||
|
Compute the indices in filename at which each of paths_to_relocate occurs.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
filename (str): file to compute indices for
|
||||||
|
paths_to_relocate (List[str]): paths to find indices of
|
||||||
|
Returns:
|
||||||
|
Dict
|
||||||
|
"""
|
||||||
|
with open(filename, 'rb') as f:
|
||||||
|
contents = f.read()
|
||||||
|
|
||||||
|
substring_prefix = os.path.commonprefix(paths_to_relocate).encode('utf-8')
|
||||||
|
|
||||||
|
indices = {}
|
||||||
|
index = 0
|
||||||
|
max_length = max(len(path) for path in paths_to_relocate)
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
# We search for the smallest substring of all paths we relocate
|
||||||
|
# In practice, this is the spack install root, and we relocate
|
||||||
|
# prefixes in the root and the root itself
|
||||||
|
index = contents.index(substring_prefix, index)
|
||||||
|
except ValueError:
|
||||||
|
# The string isn't found in the rest of the binary
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
# only copy the smallest portion of the binary for comparisons
|
||||||
|
substring_to_check = contents[index:index + max_length]
|
||||||
|
for path in paths_to_relocate:
|
||||||
|
# We guarantee any substring in the list comes after any superstring
|
||||||
|
p = path.encode('utf-8')
|
||||||
|
if substring_to_check.startswith(p):
|
||||||
|
indices[index] = str(path)
|
||||||
|
index += len(path)
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
index += 1
|
||||||
|
return indices
|
||||||
|
|
||||||
|
|
||||||
|
def _relocate_binary_text(filename, offsets, prefix_to_prefix):
|
||||||
|
"""
|
||||||
|
Relocate the text of a single binary file, given the offsets at which the
|
||||||
|
replacements need to be made
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
filename (str): file to modify
|
||||||
|
offsets (Dict[int, str]): locations of the strings to replace
|
||||||
|
prefix_to_prefix (Dict[str, str]): strings to replace and their replacements
|
||||||
|
"""
|
||||||
|
with open(filename, 'rb+') as f:
|
||||||
|
for index, prefix in offsets.items():
|
||||||
|
replacement = prefix_to_prefix[prefix].encode('utf-8')
|
||||||
|
if len(replacement) > len(prefix):
|
||||||
|
raise BinaryTextReplaceError(prefix, replacement)
|
||||||
|
|
||||||
|
# read forward until we find the end of the string including
|
||||||
|
# the prefix and compute the replacement as we go
|
||||||
|
f.seek(index + len(prefix))
|
||||||
|
c = f.read(1)
|
||||||
|
while c not in (None, b'\x00'):
|
||||||
|
replacement += c
|
||||||
|
c = f.read(1)
|
||||||
|
|
||||||
|
# seek back to the index position and write the replacement in
|
||||||
|
# and add null-terminator
|
||||||
|
f.seek(index)
|
||||||
|
f.write(replacement)
|
||||||
|
f.write(b'\x00')
|
||||||
|
|
||||||
|
|
||||||
|
def relocate_text_bin(
|
||||||
|
files_to_relocate, prefix_to_prefix, offsets=None,
|
||||||
|
relative_root=None, concurrency=32
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
For each file given, replace all keys in the given translation dict with
|
||||||
|
the associated values. Optionally executes using precomputed memoized offsets
|
||||||
|
for the substitutions.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
files_to_relocate (List[str]): The files to modify
|
||||||
|
prefix_to_prefix (Dict[str, str]): keys are strings to replace, values are
|
||||||
|
replacements
|
||||||
|
offsets (Dict[str, Dict[int, str]): (optional) Mapping from relative filenames to
|
||||||
|
a mapping from indices to strings to replace found at each index
|
||||||
|
relative_root (str): (optional) prefix for relative paths in offsets
|
||||||
|
"""
|
||||||
|
# defaults to the common prefix of all input files
|
||||||
|
rel_root = relative_root or os.path.commonprefix(files_to_relocate)
|
||||||
|
|
||||||
|
if offsets is None:
|
||||||
|
offsets = {}
|
||||||
|
for filename in files_to_relocate:
|
||||||
|
indices = compute_indices(
|
||||||
|
filename,
|
||||||
|
list(prefix_to_prefix.keys()),
|
||||||
|
)
|
||||||
|
relpath = os.path.relpath(filename, rel_root)
|
||||||
|
offsets[relpath] = indices
|
||||||
|
|
||||||
|
args = [
|
||||||
|
(filename, offsets[os.path.relpath(filename, rel_root)], prefix_to_prefix)
|
||||||
|
for filename in files_to_relocate
|
||||||
|
]
|
||||||
|
|
||||||
|
tp = multiprocessing.pool.ThreadPool(processes=concurrency)
|
||||||
|
|
||||||
|
try:
|
||||||
|
tp.map(llnl.util.lang.star(_relocate_binary_text), args)
|
||||||
|
finally:
|
||||||
|
tp.terminate()
|
||||||
|
tp.join()
|
||||||
|
|||||||
@@ -862,6 +862,10 @@ def packages_with_tags(self, *tags):
|
|||||||
r |= set(repo.packages_with_tags(*tags))
|
r |= set(repo.packages_with_tags(*tags))
|
||||||
return sorted(r)
|
return sorted(r)
|
||||||
|
|
||||||
|
def all_packages(self):
|
||||||
|
for name in self.all_package_names():
|
||||||
|
yield self.get(name)
|
||||||
|
|
||||||
def all_package_classes(self):
|
def all_package_classes(self):
|
||||||
for name in self.all_package_names():
|
for name in self.all_package_names():
|
||||||
yield self.get_pkg_class(name)
|
yield self.get_pkg_class(name)
|
||||||
@@ -905,9 +909,7 @@ def providers_for(self, vpkg_spec):
|
|||||||
|
|
||||||
@autospec
|
@autospec
|
||||||
def extensions_for(self, extendee_spec):
|
def extensions_for(self, extendee_spec):
|
||||||
return [pkg_cls(spack.spec.Spec(pkg_cls.name))
|
return [p for p in self.all_packages() if p.extends(extendee_spec)]
|
||||||
for pkg_cls in self.all_package_classes()
|
|
||||||
if pkg_cls(spack.spec.Spec(pkg_cls.name)).extends(extendee_spec)]
|
|
||||||
|
|
||||||
def last_mtime(self):
|
def last_mtime(self):
|
||||||
"""Time a package file in this repo was last updated."""
|
"""Time a package file in this repo was last updated."""
|
||||||
@@ -943,10 +945,9 @@ def repo_for_pkg(self, spec):
|
|||||||
# that can operate on packages that don't exist yet.
|
# that can operate on packages that don't exist yet.
|
||||||
return self.first_repo()
|
return self.first_repo()
|
||||||
|
|
||||||
|
@autospec
|
||||||
def get(self, spec):
|
def get(self, spec):
|
||||||
"""Returns the package associated with the supplied spec."""
|
"""Returns the package associated with the supplied spec."""
|
||||||
msg = "RepoPath.get can only be called on concrete specs"
|
|
||||||
assert isinstance(spec, spack.spec.Spec) and spec.concrete, msg
|
|
||||||
return self.repo_for_pkg(spec).get(spec)
|
return self.repo_for_pkg(spec).get(spec)
|
||||||
|
|
||||||
def get_pkg_class(self, pkg_name):
|
def get_pkg_class(self, pkg_name):
|
||||||
@@ -1106,10 +1107,9 @@ def _read_config(self):
|
|||||||
tty.die("Error reading %s when opening %s"
|
tty.die("Error reading %s when opening %s"
|
||||||
% (self.config_file, self.root))
|
% (self.config_file, self.root))
|
||||||
|
|
||||||
|
@autospec
|
||||||
def get(self, spec):
|
def get(self, spec):
|
||||||
"""Returns the package associated with the supplied spec."""
|
"""Returns the package associated with the supplied spec."""
|
||||||
msg = "Repo.get can only be called on concrete specs"
|
|
||||||
assert isinstance(spec, spack.spec.Spec) and spec.concrete, msg
|
|
||||||
# NOTE: we only check whether the package is None here, not whether it
|
# NOTE: we only check whether the package is None here, not whether it
|
||||||
# actually exists, because we have to load it anyway, and that ends up
|
# actually exists, because we have to load it anyway, and that ends up
|
||||||
# checking for existence. We avoid constructing FastPackageChecker,
|
# checking for existence. We avoid constructing FastPackageChecker,
|
||||||
@@ -1199,9 +1199,7 @@ def providers_for(self, vpkg_spec):
|
|||||||
|
|
||||||
@autospec
|
@autospec
|
||||||
def extensions_for(self, extendee_spec):
|
def extensions_for(self, extendee_spec):
|
||||||
return [pkg_cls(spack.spec.Spec(pkg_cls.name))
|
return [p for p in self.all_packages() if p.extends(extendee_spec)]
|
||||||
for pkg_cls in self.all_package_classes()
|
|
||||||
if pkg_cls(spack.spec.Spec(pkg_cls.name)).extends(extendee_spec)]
|
|
||||||
|
|
||||||
def dirname_for_package_name(self, pkg_name):
|
def dirname_for_package_name(self, pkg_name):
|
||||||
"""Get the directory name for a particular package. This is the
|
"""Get the directory name for a particular package. This is the
|
||||||
@@ -1243,6 +1241,15 @@ def packages_with_tags(self, *tags):
|
|||||||
|
|
||||||
return sorted(v)
|
return sorted(v)
|
||||||
|
|
||||||
|
def all_packages(self):
|
||||||
|
"""Iterator over all packages in the repository.
|
||||||
|
|
||||||
|
Use this with care, because loading packages is slow.
|
||||||
|
|
||||||
|
"""
|
||||||
|
for name in self.all_package_names():
|
||||||
|
yield self.get(name)
|
||||||
|
|
||||||
def all_package_classes(self):
|
def all_package_classes(self):
|
||||||
"""Iterator over all package *classes* in the repository.
|
"""Iterator over all package *classes* in the repository.
|
||||||
|
|
||||||
@@ -1391,6 +1398,11 @@ def _path(repo_dirs=None):
|
|||||||
sys.meta_path.append(ReposFinder())
|
sys.meta_path.append(ReposFinder())
|
||||||
|
|
||||||
|
|
||||||
|
def get(spec):
|
||||||
|
"""Convenience wrapper around ``spack.repo.get()``."""
|
||||||
|
return path.get(spec)
|
||||||
|
|
||||||
|
|
||||||
def all_package_names(include_virtuals=False):
|
def all_package_names(include_virtuals=False):
|
||||||
"""Convenience wrapper around ``spack.repo.all_package_names()``."""
|
"""Convenience wrapper around ``spack.repo.all_package_names()``."""
|
||||||
return path.all_package_names(include_virtuals)
|
return path.all_package_names(include_virtuals)
|
||||||
|
|||||||
@@ -2,6 +2,7 @@
|
|||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
"""Tools to produce reports of spec installations"""
|
"""Tools to produce reports of spec installations"""
|
||||||
import codecs
|
import codecs
|
||||||
import collections
|
import collections
|
||||||
@@ -280,9 +281,9 @@ def __init__(self, cls, function, format_name, args):
|
|||||||
.format(self.format_name))
|
.format(self.format_name))
|
||||||
self.report_writer = report_writers[self.format_name](args)
|
self.report_writer = report_writers[self.format_name](args)
|
||||||
|
|
||||||
def __call__(self, type, dir=None):
|
def __call__(self, type, dir=os.getcwd()):
|
||||||
self.type = type
|
self.type = type
|
||||||
self.dir = dir or os.getcwd()
|
self.dir = dir
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def concretization_report(self, msg):
|
def concretization_report(self, msg):
|
||||||
|
|||||||
@@ -93,8 +93,10 @@ def rewire_node(spec, explicit):
|
|||||||
False,
|
False,
|
||||||
spec.build_spec.prefix,
|
spec.build_spec.prefix,
|
||||||
spec.prefix)
|
spec.prefix)
|
||||||
relocate.relocate_text_bin(binaries=bins_to_relocate,
|
|
||||||
prefixes=prefix_to_prefix)
|
# Relocate text strings of prefixes embedded in binaries
|
||||||
|
relocate.relocate_text_bin(bins_to_relocate, prefix_to_prefix)
|
||||||
|
|
||||||
# Copy package into place, except for spec.json (because spec.json
|
# Copy package into place, except for spec.json (because spec.json
|
||||||
# describes the old spec and not the new spliced spec).
|
# describes the old spec and not the new spliced spec).
|
||||||
shutil.copytree(os.path.join(tempdir, spec.dag_hash()), spec.prefix,
|
shutil.copytree(os.path.join(tempdir, spec.dag_hash()), spec.prefix,
|
||||||
|
|||||||
@@ -3,7 +3,7 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
from io import BufferedReader, IOBase
|
from io import BufferedReader
|
||||||
|
|
||||||
import six
|
import six
|
||||||
import six.moves.urllib.error as urllib_error
|
import six.moves.urllib.error as urllib_error
|
||||||
@@ -23,15 +23,11 @@
|
|||||||
# https://github.com/python/cpython/pull/3249
|
# https://github.com/python/cpython/pull/3249
|
||||||
class WrapStream(BufferedReader):
|
class WrapStream(BufferedReader):
|
||||||
def __init__(self, raw):
|
def __init__(self, raw):
|
||||||
# In botocore >=1.23.47, StreamingBody inherits from IOBase, so we
|
raw.readable = lambda: True
|
||||||
# only add missing attributes in older versions.
|
raw.writable = lambda: False
|
||||||
# https://github.com/boto/botocore/commit/a624815eabac50442ed7404f3c4f2664cd0aa784
|
raw.seekable = lambda: False
|
||||||
if not isinstance(raw, IOBase):
|
raw.closed = False
|
||||||
raw.readable = lambda: True
|
raw.flush = lambda: None
|
||||||
raw.writable = lambda: False
|
|
||||||
raw.seekable = lambda: False
|
|
||||||
raw.closed = False
|
|
||||||
raw.flush = lambda: None
|
|
||||||
super(WrapStream, self).__init__(raw)
|
super(WrapStream, self).__init__(raw)
|
||||||
|
|
||||||
def detach(self):
|
def detach(self):
|
||||||
|
|||||||
@@ -26,9 +26,6 @@
|
|||||||
"cpe-version": {"type": "string", "minLength": 1},
|
"cpe-version": {"type": "string", "minLength": 1},
|
||||||
"system-type": {"type": "string", "minLength": 1},
|
"system-type": {"type": "string", "minLength": 1},
|
||||||
"schema-version": {"type": "string", "minLength": 1},
|
"schema-version": {"type": "string", "minLength": 1},
|
||||||
# Older schemas use did not have "cpe-version", just the
|
|
||||||
# schema version; in that case it was just called "version"
|
|
||||||
"version": {"type": "string", "minLength": 1},
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"compilers": {
|
"compilers": {
|
||||||
|
|||||||
@@ -198,6 +198,9 @@ def update(data):
|
|||||||
" [files={0}]")
|
" [files={0}]")
|
||||||
warnings.warn(msg.format(', '.join(data['include'])))
|
warnings.warn(msg.format(', '.join(data['include'])))
|
||||||
|
|
||||||
|
if 'packages' in data:
|
||||||
|
updated |= spack.schema.packages.update(data['packages'])
|
||||||
|
|
||||||
# Spack 0.19 drops support for `spack:concretization` in favor of
|
# Spack 0.19 drops support for `spack:concretization` in favor of
|
||||||
# `spack:concretizer:unify`. Here we provide an upgrade path that changes the former
|
# `spack:concretizer:unify`. Here we provide an upgrade path that changes the former
|
||||||
# into the latter, or warns when there's an ambiguity. Note that Spack 0.17 is not
|
# into the latter, or warns when there's an ambiguity. Note that Spack 0.17 is not
|
||||||
|
|||||||
@@ -18,13 +18,9 @@
|
|||||||
#:
|
#:
|
||||||
#: THIS NEEDS TO BE UPDATED FOR EVERY NEW KEYWORD THAT
|
#: THIS NEEDS TO BE UPDATED FOR EVERY NEW KEYWORD THAT
|
||||||
#: IS ADDED IMMEDIATELY BELOW THE MODULE TYPE ATTRIBUTE
|
#: IS ADDED IMMEDIATELY BELOW THE MODULE TYPE ATTRIBUTE
|
||||||
spec_regex = (
|
spec_regex = r'(?!hierarchy|core_specs|verbose|hash_length|whitelist|' \
|
||||||
r'(?!hierarchy|core_specs|verbose|hash_length|defaults|'
|
r'blacklist|projections|naming_scheme|core_compilers|all|' \
|
||||||
r'whitelist|blacklist|' # DEPRECATED: remove in 0.20.
|
r'defaults)(^\w[\w-]*)'
|
||||||
r'include|exclude|' # use these more inclusive/consistent options
|
|
||||||
r'projections|naming_scheme|core_compilers|all)(^\w[\w-]*)'
|
|
||||||
|
|
||||||
)
|
|
||||||
|
|
||||||
#: Matches a valid name for a module set
|
#: Matches a valid name for a module set
|
||||||
valid_module_set_name = r'^(?!arch_folder$|lmod$|roots$|enable$|prefix_inspections$|'\
|
valid_module_set_name = r'^(?!arch_folder$|lmod$|roots$|enable$|prefix_inspections$|'\
|
||||||
@@ -54,21 +50,12 @@
|
|||||||
'default': {},
|
'default': {},
|
||||||
'additionalProperties': False,
|
'additionalProperties': False,
|
||||||
'properties': {
|
'properties': {
|
||||||
# DEPRECATED: remove in 0.20.
|
|
||||||
'environment_blacklist': {
|
'environment_blacklist': {
|
||||||
'type': 'array',
|
'type': 'array',
|
||||||
'default': [],
|
'default': [],
|
||||||
'items': {
|
'items': {
|
||||||
'type': 'string'
|
'type': 'string'
|
||||||
}
|
}
|
||||||
},
|
|
||||||
# use exclude_env_vars instead
|
|
||||||
'exclude_env_vars': {
|
|
||||||
'type': 'array',
|
|
||||||
'default': [],
|
|
||||||
'items': {
|
|
||||||
'type': 'string'
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -108,20 +95,12 @@
|
|||||||
'minimum': 0,
|
'minimum': 0,
|
||||||
'default': 7
|
'default': 7
|
||||||
},
|
},
|
||||||
# DEPRECATED: remove in 0.20.
|
|
||||||
'whitelist': array_of_strings,
|
'whitelist': array_of_strings,
|
||||||
'blacklist': array_of_strings,
|
'blacklist': array_of_strings,
|
||||||
'blacklist_implicits': {
|
'blacklist_implicits': {
|
||||||
'type': 'boolean',
|
'type': 'boolean',
|
||||||
'default': False
|
'default': False
|
||||||
},
|
},
|
||||||
# whitelist/blacklist have been replaced with include/exclude
|
|
||||||
'include': array_of_strings,
|
|
||||||
'exclude': array_of_strings,
|
|
||||||
'exclude_implicits': {
|
|
||||||
'type': 'boolean',
|
|
||||||
'default': False
|
|
||||||
},
|
|
||||||
'defaults': array_of_strings,
|
'defaults': array_of_strings,
|
||||||
'naming_scheme': {
|
'naming_scheme': {
|
||||||
'type': 'string' # Can we be more specific here?
|
'type': 'string' # Can we be more specific here?
|
||||||
@@ -245,51 +224,14 @@ def deprecation_msg_default_module_set(instance, props):
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
# deprecated keys and their replacements
|
def update(data):
|
||||||
exclude_include_translations = {
|
"""Update the data in place to remove deprecated properties.
|
||||||
"whitelist": "include",
|
|
||||||
"blacklist": "exclude",
|
|
||||||
"blacklist_implicits": "exclude_implicits",
|
|
||||||
"environment_blacklist": "exclude_env_vars",
|
|
||||||
}
|
|
||||||
|
|
||||||
|
Args:
|
||||||
|
data (dict): dictionary to be updated
|
||||||
|
|
||||||
def update_keys(data, key_translations):
|
Returns:
|
||||||
"""Change blacklist/whitelist to exclude/include.
|
True if data was changed, False otherwise
|
||||||
|
|
||||||
Arguments:
|
|
||||||
data (dict): data from a valid modules configuration.
|
|
||||||
key_translations (dict): A dictionary of keys to translate to
|
|
||||||
their respective values.
|
|
||||||
|
|
||||||
Return:
|
|
||||||
(bool) whether anything was changed in data
|
|
||||||
"""
|
|
||||||
changed = False
|
|
||||||
|
|
||||||
if isinstance(data, dict):
|
|
||||||
keys = list(data.keys())
|
|
||||||
for key in keys:
|
|
||||||
value = data[key]
|
|
||||||
|
|
||||||
translation = key_translations.get(key)
|
|
||||||
if translation:
|
|
||||||
data[translation] = data.pop(key)
|
|
||||||
changed = True
|
|
||||||
|
|
||||||
changed |= update_keys(value, key_translations)
|
|
||||||
|
|
||||||
elif isinstance(data, list):
|
|
||||||
for elt in data:
|
|
||||||
changed |= update_keys(elt, key_translations)
|
|
||||||
|
|
||||||
return changed
|
|
||||||
|
|
||||||
|
|
||||||
def update_default_module_set(data):
|
|
||||||
"""Update module configuration to move top-level keys inside default module set.
|
|
||||||
|
|
||||||
This change was introduced in v0.18 (see 99083f1706 or #28659).
|
|
||||||
"""
|
"""
|
||||||
changed = False
|
changed = False
|
||||||
|
|
||||||
@@ -316,21 +258,3 @@ def update_default_module_set(data):
|
|||||||
data['default'] = default
|
data['default'] = default
|
||||||
|
|
||||||
return changed
|
return changed
|
||||||
|
|
||||||
|
|
||||||
def update(data):
|
|
||||||
"""Update the data in place to remove deprecated properties.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
data (dict): dictionary to be updated
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
True if data was changed, False otherwise
|
|
||||||
"""
|
|
||||||
# deprecated top-level module config (everything in default module set)
|
|
||||||
changed = update_default_module_set(data)
|
|
||||||
|
|
||||||
# translate blacklist/whitelist to exclude/include
|
|
||||||
changed |= update_keys(data, exclude_include_translations)
|
|
||||||
|
|
||||||
return changed
|
|
||||||
|
|||||||
@@ -9,6 +9,54 @@
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def deprecate_paths_and_modules(instance, deprecated_properties):
|
||||||
|
"""Function to produce warning/error messages if "paths" and "modules" are
|
||||||
|
found in "packages.yaml"
|
||||||
|
|
||||||
|
Args:
|
||||||
|
instance: instance of the configuration file
|
||||||
|
deprecated_properties: deprecated properties in instance
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Warning/Error message to be printed
|
||||||
|
"""
|
||||||
|
import copy
|
||||||
|
import os.path
|
||||||
|
|
||||||
|
import llnl.util.tty
|
||||||
|
|
||||||
|
import spack.util.spack_yaml as syaml
|
||||||
|
|
||||||
|
# Copy the instance to remove default attributes that are not related
|
||||||
|
# to the part that needs to be reported
|
||||||
|
instance_copy = copy.copy(instance)
|
||||||
|
|
||||||
|
# Check if this configuration comes from an environment or not
|
||||||
|
absolute_path = instance_copy._end_mark.name
|
||||||
|
command_to_suggest = '$ spack config update packages'
|
||||||
|
if os.path.basename(absolute_path) == 'spack.yaml':
|
||||||
|
command_to_suggest = '$ spack env update <environment>'
|
||||||
|
|
||||||
|
# Retrieve the relevant part of the configuration as YAML
|
||||||
|
keys_to_be_removed = [
|
||||||
|
x for x in instance_copy if x not in deprecated_properties
|
||||||
|
]
|
||||||
|
for key in keys_to_be_removed:
|
||||||
|
instance_copy.pop(key)
|
||||||
|
yaml_as_str = syaml.dump_config(instance_copy, blame=True)
|
||||||
|
|
||||||
|
if llnl.util.tty.is_debug():
|
||||||
|
msg = 'OUTDATED CONFIGURATION FILE [file={0}]\n{1}'
|
||||||
|
llnl.util.tty.debug(msg.format(absolute_path, yaml_as_str))
|
||||||
|
|
||||||
|
msg = ('detected deprecated properties in {0}\nActivate the debug '
|
||||||
|
'flag to have more information on the deprecated parts or '
|
||||||
|
'run:\n\n\t{2}\n\nto update the file to the new format\n')
|
||||||
|
return msg.format(
|
||||||
|
absolute_path, yaml_as_str, command_to_suggest
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
#: Properties for inclusion in other schemas
|
#: Properties for inclusion in other schemas
|
||||||
properties = {
|
properties = {
|
||||||
'packages': {
|
'packages': {
|
||||||
@@ -88,7 +136,16 @@
|
|||||||
'required': ['spec']
|
'required': ['spec']
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
# Deprecated properties, will trigger an error with a
|
||||||
|
# message telling how to update.
|
||||||
|
'paths': {'type': 'object'},
|
||||||
|
'modules': {'type': 'object'},
|
||||||
},
|
},
|
||||||
|
'deprecatedProperties': {
|
||||||
|
'properties': ['modules', 'paths'],
|
||||||
|
'message': deprecate_paths_and_modules,
|
||||||
|
'error': False
|
||||||
|
}
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@@ -103,3 +160,41 @@
|
|||||||
'additionalProperties': False,
|
'additionalProperties': False,
|
||||||
'properties': properties,
|
'properties': properties,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def update(data):
|
||||||
|
"""Update the data in place to remove deprecated properties.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
data (dict): dictionary to be updated
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if data was changed, False otherwise
|
||||||
|
"""
|
||||||
|
changed = False
|
||||||
|
for cfg_object in data.values():
|
||||||
|
externals = []
|
||||||
|
|
||||||
|
# If we don't have these deprecated attributes, continue
|
||||||
|
if not any(x in cfg_object for x in ('paths', 'modules')):
|
||||||
|
continue
|
||||||
|
|
||||||
|
# If we arrive here we need to make some changes i.e.
|
||||||
|
# we need to remove and eventually convert some attributes
|
||||||
|
changed = True
|
||||||
|
paths = cfg_object.pop('paths', {})
|
||||||
|
for spec, prefix in paths.items():
|
||||||
|
externals.append({
|
||||||
|
'spec': str(spec),
|
||||||
|
'prefix': str(prefix)
|
||||||
|
})
|
||||||
|
modules = cfg_object.pop('modules', {})
|
||||||
|
for spec, module in modules.items():
|
||||||
|
externals.append({
|
||||||
|
'spec': str(spec),
|
||||||
|
'modules': [str(module)]
|
||||||
|
})
|
||||||
|
if externals:
|
||||||
|
cfg_object['externals'] = externals
|
||||||
|
|
||||||
|
return changed
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user