Merge branch 'develop' into f/env-location
This commit is contained in:
commit
bce2d38bfc
1
.flake8
1
.flake8
@ -29,6 +29,7 @@ max-line-length = 99
|
|||||||
#
|
#
|
||||||
per-file-ignores =
|
per-file-ignores =
|
||||||
var/spack/repos/*/package.py:F403,F405,F821
|
var/spack/repos/*/package.py:F403,F405,F821
|
||||||
|
*-ci-package.py:F403,F405,F821
|
||||||
|
|
||||||
# exclude things we usually do not want linting for.
|
# exclude things we usually do not want linting for.
|
||||||
# These still get linted when passed explicitly, as when spack flake8 passes
|
# These still get linted when passed explicitly, as when spack flake8 passes
|
||||||
|
62
.github/ISSUE_TEMPLATE/test_error.yml
vendored
Normal file
62
.github/ISSUE_TEMPLATE/test_error.yml
vendored
Normal file
@ -0,0 +1,62 @@
|
|||||||
|
name: "\U0001F4A5 Tests error"
|
||||||
|
description: Some package in Spack had stand-alone tests that didn't pass
|
||||||
|
title: "Testing issue: "
|
||||||
|
labels: [test-error]
|
||||||
|
body:
|
||||||
|
- type: textarea
|
||||||
|
id: reproduce
|
||||||
|
attributes:
|
||||||
|
label: Steps to reproduce the failure(s) or link(s) to test output(s)
|
||||||
|
description: |
|
||||||
|
Fill in the test output from the exact spec that is having stand-alone test failures. Links to test outputs (e.g., CDash) can also be provided.
|
||||||
|
value: |
|
||||||
|
```console
|
||||||
|
$ spack spec -I <spec>
|
||||||
|
...
|
||||||
|
```
|
||||||
|
- type: textarea
|
||||||
|
id: error
|
||||||
|
attributes:
|
||||||
|
label: Error message
|
||||||
|
description: |
|
||||||
|
Please post the error message from spack inside the `<details>` tag below:
|
||||||
|
value: |
|
||||||
|
<details><summary>Error message</summary><pre>
|
||||||
|
...
|
||||||
|
</pre></details>
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
- type: textarea
|
||||||
|
id: information
|
||||||
|
attributes:
|
||||||
|
label: Information on your system or the test runner
|
||||||
|
description: Please include the output of `spack debug report` for your system.
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
- type: markdown
|
||||||
|
attributes:
|
||||||
|
value: |
|
||||||
|
If you have any relevant configuration detail (custom `packages.yaml` or `modules.yaml`, etc.) you can add that here as well.
|
||||||
|
- type: textarea
|
||||||
|
id: additional_information
|
||||||
|
attributes:
|
||||||
|
label: Additional information
|
||||||
|
description: |
|
||||||
|
Please upload test logs or any additional information about the problem.
|
||||||
|
- type: markdown
|
||||||
|
attributes:
|
||||||
|
value: |
|
||||||
|
Some packages have maintainers who have volunteered to debug build failures. Run `spack maintainers <name-of-the-package>` and **@mention** them here if they exist.
|
||||||
|
- type: checkboxes
|
||||||
|
id: checks
|
||||||
|
attributes:
|
||||||
|
label: General information
|
||||||
|
options:
|
||||||
|
- label: I have reported the version of Spack/Python/Platform/Runner
|
||||||
|
required: true
|
||||||
|
- label: I have run `spack maintainers <name-of-the-package>` and **@mentioned** any maintainers
|
||||||
|
required: true
|
||||||
|
- label: I have uploaded any available logs
|
||||||
|
required: true
|
||||||
|
- label: I have searched the issues of this repo and believe this is not a duplicate
|
||||||
|
required: true
|
44
.github/workflows/audit.yaml
vendored
Normal file
44
.github/workflows/audit.yaml
vendored
Normal file
@ -0,0 +1,44 @@
|
|||||||
|
name: audit
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_call:
|
||||||
|
inputs:
|
||||||
|
with_coverage:
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
python_version:
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: audit-${{inputs.python_version}}-${{github.ref}}-${{github.event.pull_request.number || github.run_number}}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
# Run audits on all the packages in the built-in repository
|
||||||
|
package-audits:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||||
|
- uses: actions/setup-python@b55428b1882923874294fa556849718a1d7f2ca5 # @v2
|
||||||
|
with:
|
||||||
|
python-version: ${{inputs.python_version}}
|
||||||
|
- name: Install Python packages
|
||||||
|
run: |
|
||||||
|
pip install --upgrade pip six setuptools pytest codecov 'coverage[toml]<=6.2'
|
||||||
|
- name: Package audits (with coverage)
|
||||||
|
if: ${{ inputs.with_coverage == 'true' }}
|
||||||
|
run: |
|
||||||
|
. share/spack/setup-env.sh
|
||||||
|
coverage run $(which spack) audit packages
|
||||||
|
coverage combine
|
||||||
|
coverage xml
|
||||||
|
- name: Package audits (without coverage)
|
||||||
|
if: ${{ inputs.with_coverage == 'false' }}
|
||||||
|
run: |
|
||||||
|
. share/spack/setup-env.sh
|
||||||
|
$(which spack) audit packages
|
||||||
|
- uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70 # @v2.1.0
|
||||||
|
if: ${{ inputs.with_coverage == 'true' }}
|
||||||
|
with:
|
||||||
|
flags: unittests,linux,audits
|
2
.github/workflows/bootstrap.yml
vendored
2
.github/workflows/bootstrap.yml
vendored
@ -9,7 +9,7 @@ on:
|
|||||||
- cron: '16 2 * * *'
|
- cron: '16 2 * * *'
|
||||||
|
|
||||||
concurrency:
|
concurrency:
|
||||||
group: bootstrap-${{ github.workflow }}-${{ github.event.pull_request.number || github.run_number }}
|
group: bootstrap-${{github.ref}}-${{github.event.pull_request.number || github.run_number}}
|
||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
|
2
.github/workflows/build-containers.yml
vendored
2
.github/workflows/build-containers.yml
vendored
@ -20,7 +20,7 @@ on:
|
|||||||
types: [published]
|
types: [published]
|
||||||
|
|
||||||
concurrency:
|
concurrency:
|
||||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_number }}
|
group: build_containers-${{github.ref}}-${{github.event.pull_request.number || github.run_number}}
|
||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
|
47
.github/workflows/ci.yaml
vendored
47
.github/workflows/ci.yaml
vendored
@ -11,7 +11,7 @@ on:
|
|||||||
- releases/**
|
- releases/**
|
||||||
|
|
||||||
concurrency:
|
concurrency:
|
||||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_number }}
|
group: ci-${{github.ref}}-${{github.event.pull_request.number || github.run_number}}
|
||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
@ -19,7 +19,19 @@ jobs:
|
|||||||
needs: [ changes ]
|
needs: [ changes ]
|
||||||
uses: ./.github/workflows/valid-style.yml
|
uses: ./.github/workflows/valid-style.yml
|
||||||
with:
|
with:
|
||||||
with_coverage: ${{ needs.changes.outputs.with_coverage }}
|
with_coverage: ${{ needs.changes.outputs.core }}
|
||||||
|
audit-ancient-python:
|
||||||
|
uses: ./.github/workflows/audit.yaml
|
||||||
|
needs: [ changes ]
|
||||||
|
with:
|
||||||
|
with_coverage: ${{ needs.changes.outputs.core }}
|
||||||
|
python_version: 2.7
|
||||||
|
all-prechecks:
|
||||||
|
needs: [ prechecks ]
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Success
|
||||||
|
run: "true"
|
||||||
# Check which files have been updated by the PR
|
# Check which files have been updated by the PR
|
||||||
changes:
|
changes:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
@ -28,7 +40,6 @@ jobs:
|
|||||||
bootstrap: ${{ steps.filter.outputs.bootstrap }}
|
bootstrap: ${{ steps.filter.outputs.bootstrap }}
|
||||||
core: ${{ steps.filter.outputs.core }}
|
core: ${{ steps.filter.outputs.core }}
|
||||||
packages: ${{ steps.filter.outputs.packages }}
|
packages: ${{ steps.filter.outputs.packages }}
|
||||||
with_coverage: ${{ steps.coverage.outputs.with_coverage }}
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||||
if: ${{ github.event_name == 'push' }}
|
if: ${{ github.event_name == 'push' }}
|
||||||
@ -43,15 +54,14 @@ jobs:
|
|||||||
# built-in repository or documentation
|
# built-in repository or documentation
|
||||||
filters: |
|
filters: |
|
||||||
bootstrap:
|
bootstrap:
|
||||||
- '!var/spack/repos/builtin/**'
|
|
||||||
- 'var/spack/repos/builtin/packages/clingo-bootstrap/**'
|
- 'var/spack/repos/builtin/packages/clingo-bootstrap/**'
|
||||||
- 'var/spack/repos/builtin/packages/clingo/**'
|
- 'var/spack/repos/builtin/packages/clingo/**'
|
||||||
- 'var/spack/repos/builtin/packages/python/**'
|
- 'var/spack/repos/builtin/packages/python/**'
|
||||||
- 'var/spack/repos/builtin/packages/re2c/**'
|
- 'var/spack/repos/builtin/packages/re2c/**'
|
||||||
- '!lib/spack/docs/**'
|
|
||||||
- 'lib/spack/**'
|
- 'lib/spack/**'
|
||||||
- 'share/spack/**'
|
- 'share/spack/**'
|
||||||
- '.github/workflows/bootstrap.yml'
|
- '.github/workflows/bootstrap.yml'
|
||||||
|
- '.github/workflows/ci.yaml'
|
||||||
core:
|
core:
|
||||||
- './!(var/**)/**'
|
- './!(var/**)/**'
|
||||||
packages:
|
packages:
|
||||||
@ -62,32 +72,21 @@ jobs:
|
|||||||
# job outputs: https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#jobsjob_idoutputs
|
# job outputs: https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#jobsjob_idoutputs
|
||||||
# setting environment variables from earlier steps: https://docs.github.com/en/actions/reference/workflow-commands-for-github-actions#setting-an-environment-variable
|
# setting environment variables from earlier steps: https://docs.github.com/en/actions/reference/workflow-commands-for-github-actions#setting-an-environment-variable
|
||||||
#
|
#
|
||||||
- id: coverage
|
|
||||||
# Run the subsequent jobs with coverage if core has been modified,
|
|
||||||
# regardless of whether this is a pull request or a push to a branch
|
|
||||||
run: |
|
|
||||||
echo Core changes: ${{ steps.filter.outputs.core }}
|
|
||||||
echo Event name: ${{ github.event_name }}
|
|
||||||
if [ "${{ steps.filter.outputs.core }}" == "true" ]
|
|
||||||
then
|
|
||||||
echo "::set-output name=with_coverage::true"
|
|
||||||
else
|
|
||||||
echo "::set-output name=with_coverage::false"
|
|
||||||
fi
|
|
||||||
bootstrap:
|
bootstrap:
|
||||||
if: ${{ github.repository == 'spack/spack' && needs.changes.outputs.bootstrap == 'true' }}
|
if: ${{ github.repository == 'spack/spack' && needs.changes.outputs.bootstrap == 'true' }}
|
||||||
needs: [ prechecks, changes ]
|
needs: [ prechecks, changes ]
|
||||||
uses: ./.github/workflows/bootstrap.yml
|
uses: ./.github/workflows/bootstrap.yml
|
||||||
unit-tests:
|
unit-tests:
|
||||||
if: ${{ github.repository == 'spack/spack' }}
|
if: ${{ github.repository == 'spack/spack' && needs.changes.outputs.core == 'true' }}
|
||||||
needs: [ prechecks, changes ]
|
needs: [ prechecks, changes ]
|
||||||
uses: ./.github/workflows/unit_tests.yaml
|
uses: ./.github/workflows/unit_tests.yaml
|
||||||
with:
|
|
||||||
core: ${{ needs.changes.outputs.core }}
|
|
||||||
packages: ${{ needs.changes.outputs.packages }}
|
|
||||||
with_coverage: ${{ needs.changes.outputs.with_coverage }}
|
|
||||||
windows:
|
windows:
|
||||||
if: ${{ github.repository == 'spack/spack' }}
|
if: ${{ github.repository == 'spack/spack' && needs.changes.outputs.core == 'true' }}
|
||||||
needs: [ prechecks ]
|
needs: [ prechecks ]
|
||||||
uses: ./.github/workflows/windows_python.yml
|
uses: ./.github/workflows/windows_python.yml
|
||||||
|
all:
|
||||||
|
needs: [ windows, unit-tests, bootstrap, audit-ancient-python ]
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Success
|
||||||
|
run: "true"
|
||||||
|
99
.github/workflows/unit_tests.yaml
vendored
99
.github/workflows/unit_tests.yaml
vendored
@ -1,20 +1,11 @@
|
|||||||
name: unit tests
|
name: unit tests
|
||||||
|
|
||||||
on:
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
workflow_call:
|
workflow_call:
|
||||||
inputs:
|
|
||||||
core:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
packages:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
with_coverage:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
|
|
||||||
concurrency:
|
concurrency:
|
||||||
group: unit_tests-${{ github.workflow }}-${{ github.event.pull_request.number || github.run_number }}
|
group: unit_tests-${{github.ref}}-${{github.event.pull_request.number || github.run_number}}
|
||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
@ -25,11 +16,26 @@ jobs:
|
|||||||
matrix:
|
matrix:
|
||||||
python-version: ['2.7', '3.6', '3.7', '3.8', '3.9', '3.10']
|
python-version: ['2.7', '3.6', '3.7', '3.8', '3.9', '3.10']
|
||||||
concretizer: ['clingo']
|
concretizer: ['clingo']
|
||||||
|
on_develop:
|
||||||
|
- ${{ github.ref == 'refs/heads/develop' }}
|
||||||
include:
|
include:
|
||||||
- python-version: 2.7
|
- python-version: 2.7
|
||||||
concretizer: original
|
concretizer: original
|
||||||
- python-version: 3.9
|
on_develop: ${{ github.ref == 'refs/heads/develop' }}
|
||||||
|
- python-version: '3.10'
|
||||||
concretizer: original
|
concretizer: original
|
||||||
|
on_develop: ${{ github.ref == 'refs/heads/develop' }}
|
||||||
|
exclude:
|
||||||
|
- python-version: '3.7'
|
||||||
|
concretizer: 'clingo'
|
||||||
|
on_develop: false
|
||||||
|
- python-version: '3.8'
|
||||||
|
concretizer: 'clingo'
|
||||||
|
on_develop: false
|
||||||
|
- python-version: '3.9'
|
||||||
|
concretizer: 'clingo'
|
||||||
|
on_develop: false
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||||
with:
|
with:
|
||||||
@ -46,7 +52,7 @@ jobs:
|
|||||||
patchelf cmake bison libbison-dev kcov
|
patchelf cmake bison libbison-dev kcov
|
||||||
- name: Install Python packages
|
- name: Install Python packages
|
||||||
run: |
|
run: |
|
||||||
pip install --upgrade pip six setuptools pytest codecov "coverage[toml]<=6.2"
|
pip install --upgrade pip six setuptools pytest codecov[toml] pytest-cov pytest-xdist
|
||||||
# ensure style checks are not skipped in unit tests for python >= 3.6
|
# ensure style checks are not skipped in unit tests for python >= 3.6
|
||||||
# note that true/false (i.e., 1/0) are opposite in conditions in python and bash
|
# note that true/false (i.e., 1/0) are opposite in conditions in python and bash
|
||||||
if python -c 'import sys; sys.exit(not sys.version_info >= (3, 6))'; then
|
if python -c 'import sys; sys.exit(not sys.version_info >= (3, 6))'; then
|
||||||
@ -69,26 +75,18 @@ jobs:
|
|||||||
. share/spack/setup-env.sh
|
. share/spack/setup-env.sh
|
||||||
spack bootstrap untrust spack-install
|
spack bootstrap untrust spack-install
|
||||||
spack -v solve zlib
|
spack -v solve zlib
|
||||||
- name: Run unit tests (full suite with coverage)
|
- name: Run unit tests
|
||||||
if: ${{ inputs.with_coverage == 'true' }}
|
|
||||||
env:
|
env:
|
||||||
SPACK_PYTHON: python
|
SPACK_PYTHON: python
|
||||||
|
SPACK_TEST_SOLVER: ${{ matrix.concretizer }}
|
||||||
|
SPACK_TEST_PARALLEL: 2
|
||||||
COVERAGE: true
|
COVERAGE: true
|
||||||
SPACK_TEST_SOLVER: ${{ matrix.concretizer }}
|
UNIT_TEST_COVERAGE: ${{ (matrix.concretizer == 'original' && matrix.python-version == '2.7') || (matrix.python-version == '3.10') }}
|
||||||
run: |
|
run: |
|
||||||
share/spack/qa/run-unit-tests
|
share/spack/qa/run-unit-tests
|
||||||
coverage combine
|
coverage combine -a
|
||||||
coverage xml
|
coverage xml
|
||||||
- name: Run unit tests (reduced suite without coverage)
|
- uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70
|
||||||
if: ${{ inputs.with_coverage == 'false' }}
|
|
||||||
env:
|
|
||||||
SPACK_PYTHON: python
|
|
||||||
ONLY_PACKAGES: true
|
|
||||||
SPACK_TEST_SOLVER: ${{ matrix.concretizer }}
|
|
||||||
run: |
|
|
||||||
share/spack/qa/run-unit-tests
|
|
||||||
- uses: codecov/codecov-action@81cd2dc8148241f03f5839d295e000b8f761e378 # @v2.1.0
|
|
||||||
if: ${{ inputs.with_coverage == 'true' }}
|
|
||||||
with:
|
with:
|
||||||
flags: unittests,linux,${{ matrix.concretizer }}
|
flags: unittests,linux,${{ matrix.concretizer }}
|
||||||
# Test shell integration
|
# Test shell integration
|
||||||
@ -108,24 +106,18 @@ jobs:
|
|||||||
sudo apt-get install -y coreutils kcov csh zsh tcsh fish dash bash
|
sudo apt-get install -y coreutils kcov csh zsh tcsh fish dash bash
|
||||||
- name: Install Python packages
|
- name: Install Python packages
|
||||||
run: |
|
run: |
|
||||||
pip install --upgrade pip six setuptools pytest codecov coverage[toml]==6.2
|
pip install --upgrade pip six setuptools pytest codecov coverage[toml]==6.2 pytest-xdist
|
||||||
- name: Setup git configuration
|
- name: Setup git configuration
|
||||||
run: |
|
run: |
|
||||||
# Need this for the git tests to succeed.
|
# Need this for the git tests to succeed.
|
||||||
git --version
|
git --version
|
||||||
. .github/workflows/setup_git.sh
|
. .github/workflows/setup_git.sh
|
||||||
- name: Run shell tests (without coverage)
|
- name: Run shell tests
|
||||||
if: ${{ inputs.with_coverage == 'false' }}
|
|
||||||
run: |
|
|
||||||
share/spack/qa/run-shell-tests
|
|
||||||
- name: Run shell tests (with coverage)
|
|
||||||
if: ${{ inputs.with_coverage == 'true' }}
|
|
||||||
env:
|
env:
|
||||||
COVERAGE: true
|
COVERAGE: true
|
||||||
run: |
|
run: |
|
||||||
share/spack/qa/run-shell-tests
|
share/spack/qa/run-shell-tests
|
||||||
- uses: codecov/codecov-action@81cd2dc8148241f03f5839d295e000b8f761e378 # @v2.1.0
|
- uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70
|
||||||
if: ${{ inputs.with_coverage == 'true' }}
|
|
||||||
with:
|
with:
|
||||||
flags: shelltests,linux
|
flags: shelltests,linux
|
||||||
|
|
||||||
@ -133,7 +125,6 @@ jobs:
|
|||||||
# only on PRs modifying core Spack
|
# only on PRs modifying core Spack
|
||||||
rhel8-platform-python:
|
rhel8-platform-python:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
if: ${{ inputs.with_coverage == 'true' }}
|
|
||||||
container: registry.access.redhat.com/ubi8/ubi
|
container: registry.access.redhat.com/ubi8/ubi
|
||||||
steps:
|
steps:
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
@ -174,30 +165,21 @@ jobs:
|
|||||||
patchelf kcov
|
patchelf kcov
|
||||||
- name: Install Python packages
|
- name: Install Python packages
|
||||||
run: |
|
run: |
|
||||||
pip install --upgrade pip six setuptools pytest codecov coverage[toml]==6.2 clingo
|
pip install --upgrade pip six setuptools pytest codecov coverage[toml] pytest-cov clingo pytest-xdist
|
||||||
- name: Setup git configuration
|
- name: Setup git configuration
|
||||||
run: |
|
run: |
|
||||||
# Need this for the git tests to succeed.
|
# Need this for the git tests to succeed.
|
||||||
git --version
|
git --version
|
||||||
. .github/workflows/setup_git.sh
|
. .github/workflows/setup_git.sh
|
||||||
- name: Run unit tests (full suite with coverage)
|
- name: Run unit tests (full suite with coverage)
|
||||||
if: ${{ inputs.with_coverage == 'true' }}
|
|
||||||
env:
|
env:
|
||||||
COVERAGE: true
|
COVERAGE: true
|
||||||
SPACK_TEST_SOLVER: clingo
|
SPACK_TEST_SOLVER: clingo
|
||||||
run: |
|
run: |
|
||||||
share/spack/qa/run-unit-tests
|
share/spack/qa/run-unit-tests
|
||||||
coverage combine
|
coverage combine -a
|
||||||
coverage xml
|
coverage xml
|
||||||
- name: Run unit tests (reduced suite without coverage)
|
- uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70 # @v2.1.0
|
||||||
if: ${{ inputs.with_coverage == 'false' }}
|
|
||||||
env:
|
|
||||||
ONLY_PACKAGES: true
|
|
||||||
SPACK_TEST_SOLVER: clingo
|
|
||||||
run: |
|
|
||||||
share/spack/qa/run-unit-tests
|
|
||||||
- uses: codecov/codecov-action@81cd2dc8148241f03f5839d295e000b8f761e378 # @v2.1.0
|
|
||||||
if: ${{ inputs.with_coverage == 'true' }}
|
|
||||||
with:
|
with:
|
||||||
flags: unittests,linux,clingo
|
flags: unittests,linux,clingo
|
||||||
# Run unit tests on MacOS
|
# Run unit tests on MacOS
|
||||||
@ -216,34 +198,27 @@ jobs:
|
|||||||
- name: Install Python packages
|
- name: Install Python packages
|
||||||
run: |
|
run: |
|
||||||
pip install --upgrade pip six setuptools
|
pip install --upgrade pip six setuptools
|
||||||
pip install --upgrade pytest codecov coverage[toml]==6.2
|
pip install --upgrade pytest codecov coverage[toml] pytest-xdist pytest-cov
|
||||||
- name: Setup Homebrew packages
|
- name: Setup Homebrew packages
|
||||||
run: |
|
run: |
|
||||||
brew install dash fish gcc gnupg2 kcov
|
brew install dash fish gcc gnupg2 kcov
|
||||||
- name: Run unit tests
|
- name: Run unit tests
|
||||||
env:
|
env:
|
||||||
SPACK_TEST_SOLVER: clingo
|
SPACK_TEST_SOLVER: clingo
|
||||||
|
SPACK_TEST_PARALLEL: 4
|
||||||
run: |
|
run: |
|
||||||
git --version
|
git --version
|
||||||
. .github/workflows/setup_git.sh
|
. .github/workflows/setup_git.sh
|
||||||
. share/spack/setup-env.sh
|
. share/spack/setup-env.sh
|
||||||
$(which spack) bootstrap untrust spack-install
|
$(which spack) bootstrap untrust spack-install
|
||||||
$(which spack) solve zlib
|
$(which spack) solve zlib
|
||||||
if [ "${{ inputs.with_coverage }}" == "true" ]
|
common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python' -x)
|
||||||
then
|
$(which spack) unit-test --cov --cov-config=pyproject.toml "${common_args[@]}"
|
||||||
coverage run $(which spack) unit-test -x
|
coverage combine -a
|
||||||
coverage combine
|
|
||||||
coverage xml
|
coverage xml
|
||||||
# Delete the symlink going from ./lib/spack/docs/_spack_root back to
|
# Delete the symlink going from ./lib/spack/docs/_spack_root back to
|
||||||
# the initial directory, since it causes ELOOP errors with codecov/actions@2
|
# the initial directory, since it causes ELOOP errors with codecov/actions@2
|
||||||
rm lib/spack/docs/_spack_root
|
rm lib/spack/docs/_spack_root
|
||||||
else
|
- uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70
|
||||||
echo "ONLY PACKAGE RECIPES CHANGED [skipping coverage]"
|
|
||||||
$(which spack) unit-test -x -m "not maybeslow" -k "test_all_virtual_packages_have_default_providers"
|
|
||||||
fi
|
|
||||||
- uses: codecov/codecov-action@81cd2dc8148241f03f5839d295e000b8f761e378 # @v2.1.0
|
|
||||||
if: ${{ inputs.with_coverage == 'true' }}
|
|
||||||
with:
|
with:
|
||||||
files: ./coverage.xml
|
|
||||||
flags: unittests,macos
|
flags: unittests,macos
|
||||||
|
|
||||||
|
32
.github/workflows/valid-style.yml
vendored
32
.github/workflows/valid-style.yml
vendored
@ -8,7 +8,7 @@ on:
|
|||||||
type: string
|
type: string
|
||||||
|
|
||||||
concurrency:
|
concurrency:
|
||||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_number }}
|
group: style-${{github.ref}}-${{github.event.pull_request.number || github.run_number}}
|
||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
|
||||||
@ -53,30 +53,8 @@ jobs:
|
|||||||
- name: Run style tests
|
- name: Run style tests
|
||||||
run: |
|
run: |
|
||||||
share/spack/qa/run-style-tests
|
share/spack/qa/run-style-tests
|
||||||
# Run audits on all the packages in the built-in repository
|
audit:
|
||||||
package-audits:
|
uses: ./.github/workflows/audit.yaml
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
|
||||||
- uses: actions/setup-python@b55428b1882923874294fa556849718a1d7f2ca5 # @v2
|
|
||||||
with:
|
with:
|
||||||
python-version: '3.10'
|
with_coverage: ${{ inputs.with_coverage }}
|
||||||
- name: Install Python packages
|
python_version: '3.10'
|
||||||
run: |
|
|
||||||
pip install --upgrade pip six setuptools pytest codecov coverage[toml]==6.2
|
|
||||||
- name: Package audits (with coverage)
|
|
||||||
if: ${{ inputs.with_coverage == 'true' }}
|
|
||||||
run: |
|
|
||||||
. share/spack/setup-env.sh
|
|
||||||
coverage run $(which spack) audit packages
|
|
||||||
coverage combine
|
|
||||||
coverage xml
|
|
||||||
- name: Package audits (without coverage)
|
|
||||||
if: ${{ inputs.with_coverage == 'false' }}
|
|
||||||
run: |
|
|
||||||
. share/spack/setup-env.sh
|
|
||||||
$(which spack) audit packages
|
|
||||||
- uses: codecov/codecov-action@81cd2dc8148241f03f5839d295e000b8f761e378 # @v2.1.0
|
|
||||||
if: ${{ inputs.with_coverage == 'true' }}
|
|
||||||
with:
|
|
||||||
flags: unittests,linux,audits
|
|
||||||
|
29
.github/workflows/windows_python.yml
vendored
29
.github/workflows/windows_python.yml
vendored
@ -4,7 +4,7 @@ on:
|
|||||||
workflow_call:
|
workflow_call:
|
||||||
|
|
||||||
concurrency:
|
concurrency:
|
||||||
group: windows-${{ github.workflow }}-${{ github.event.pull_request.number || github.run_number }}
|
group: windows-${{github.ref}}-${{github.event.pull_request.number || github.run_number}}
|
||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
|
|
||||||
defaults:
|
defaults:
|
||||||
@ -23,14 +23,22 @@ jobs:
|
|||||||
python-version: 3.9
|
python-version: 3.9
|
||||||
- name: Install Python packages
|
- name: Install Python packages
|
||||||
run: |
|
run: |
|
||||||
python -m pip install --upgrade pip six pywin32 setuptools codecov coverage
|
python -m pip install --upgrade pip six pywin32 setuptools codecov pytest-cov
|
||||||
- name: Create local develop
|
- name: Create local develop
|
||||||
run: |
|
run: |
|
||||||
.\spack\.github\workflows\setup_git.ps1
|
.\spack\.github\workflows\setup_git.ps1
|
||||||
- name: Unit Test
|
- name: Unit Test
|
||||||
run: |
|
run: |
|
||||||
echo F|xcopy .\spack\share\spack\qa\configuration\windows_config.yaml $env:USERPROFILE\.spack\windows\config.yaml
|
echo F|xcopy .\spack\share\spack\qa\configuration\windows_config.yaml $env:USERPROFILE\.spack\windows\config.yaml
|
||||||
spack unit-test --verbose --ignore=lib/spack/spack/test/cmd
|
cd spack
|
||||||
|
dir
|
||||||
|
(Get-Item '.\lib\spack\docs\_spack_root').Delete()
|
||||||
|
spack unit-test --verbose --cov --cov-config=pyproject.toml --ignore=lib/spack/spack/test/cmd
|
||||||
|
coverage combine -a
|
||||||
|
coverage xml
|
||||||
|
- uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70
|
||||||
|
with:
|
||||||
|
flags: unittests,windows
|
||||||
unit-tests-cmd:
|
unit-tests-cmd:
|
||||||
runs-on: windows-latest
|
runs-on: windows-latest
|
||||||
steps:
|
steps:
|
||||||
@ -42,14 +50,21 @@ jobs:
|
|||||||
python-version: 3.9
|
python-version: 3.9
|
||||||
- name: Install Python packages
|
- name: Install Python packages
|
||||||
run: |
|
run: |
|
||||||
python -m pip install --upgrade pip six pywin32 setuptools codecov coverage
|
python -m pip install --upgrade pip six pywin32 setuptools codecov coverage pytest-cov
|
||||||
- name: Create local develop
|
- name: Create local develop
|
||||||
run: |
|
run: |
|
||||||
.\spack\.github\workflows\setup_git.ps1
|
.\spack\.github\workflows\setup_git.ps1
|
||||||
- name: Command Unit Test
|
- name: Command Unit Test
|
||||||
run: |
|
run: |
|
||||||
echo F|xcopy .\spack\share\spack\qa\configuration\windows_config.yaml $env:USERPROFILE\.spack\windows\config.yaml
|
echo F|xcopy .\spack\share\spack\qa\configuration\windows_config.yaml $env:USERPROFILE\.spack\windows\config.yaml
|
||||||
spack unit-test lib/spack/spack/test/cmd --verbose
|
cd spack
|
||||||
|
(Get-Item '.\lib\spack\docs\_spack_root').Delete()
|
||||||
|
spack unit-test --verbose --cov --cov-config=pyproject.toml lib/spack/spack/test/cmd
|
||||||
|
coverage combine -a
|
||||||
|
coverage xml
|
||||||
|
- uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70
|
||||||
|
with:
|
||||||
|
flags: unittests,windows
|
||||||
build-abseil:
|
build-abseil:
|
||||||
runs-on: windows-latest
|
runs-on: windows-latest
|
||||||
steps:
|
steps:
|
||||||
@ -85,7 +100,7 @@ jobs:
|
|||||||
python-version: 3.9
|
python-version: 3.9
|
||||||
- name: Install Python packages
|
- name: Install Python packages
|
||||||
run: |
|
run: |
|
||||||
python -m pip install --upgrade pip six pywin32 setuptools codecov coverage
|
python -m pip install --upgrade pip six pywin32 setuptools
|
||||||
- name: Add Light and Candle to Path
|
- name: Add Light and Candle to Path
|
||||||
run: |
|
run: |
|
||||||
$env:WIX >> $GITHUB_PATH
|
$env:WIX >> $GITHUB_PATH
|
||||||
@ -116,7 +131,7 @@ jobs:
|
|||||||
python-version: 3.9
|
python-version: 3.9
|
||||||
- name: Install Python packages
|
- name: Install Python packages
|
||||||
run: |
|
run: |
|
||||||
python -m pip install --upgrade pip six pywin32 setuptools codecov coverage
|
python -m pip install --upgrade pip six pywin32 setuptools
|
||||||
- name: Setup installer directory
|
- name: Setup installer directory
|
||||||
run: |
|
run: |
|
||||||
mkdir -p spack_installer
|
mkdir -p spack_installer
|
||||||
|
@ -7,7 +7,6 @@ export TMPDIR="${XDG_RUNTIME_DIR}"
|
|||||||
export TMP_DIR="$(mktemp -d -t spack-test-XXXXX)"
|
export TMP_DIR="$(mktemp -d -t spack-test-XXXXX)"
|
||||||
clean_up() {
|
clean_up() {
|
||||||
[[ -n "$TMPCONFIG_DEBUG" ]] && printf "cleaning up: $TMP_DIR\n"
|
[[ -n "$TMPCONFIG_DEBUG" ]] && printf "cleaning up: $TMP_DIR\n"
|
||||||
[[ -n "$TMPCONFIG_DEBUG" ]] && tree "$TMP_DIR"
|
|
||||||
rm -rf "$TMP_DIR"
|
rm -rf "$TMP_DIR"
|
||||||
}
|
}
|
||||||
trap clean_up EXIT
|
trap clean_up EXIT
|
||||||
|
@ -582,6 +582,19 @@ libraries. Make sure not to add modules/packages containing the word
|
|||||||
"test", as these likely won't end up in the installation directory,
|
"test", as these likely won't end up in the installation directory,
|
||||||
or may require test dependencies like pytest to be installed.
|
or may require test dependencies like pytest to be installed.
|
||||||
|
|
||||||
|
Instead of defining the ``import_modules`` explicity, only the subset
|
||||||
|
of module names to be skipped can be defined by using ``skip_modules``.
|
||||||
|
If a defined module has submodules, they are skipped as well, e.g.,
|
||||||
|
in case the ``plotting`` modules should be excluded from the
|
||||||
|
automatically detected ``import_modules`` ``['nilearn', 'nilearn.surface',
|
||||||
|
'nilearn.plotting', 'nilearn.plotting.data']`` set:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
skip_modules = ['nilearn.plotting']
|
||||||
|
|
||||||
|
This will set ``import_modules`` to ``['nilearn', 'nilearn.surface']``
|
||||||
|
|
||||||
Import tests can be run during the installation using ``spack install
|
Import tests can be run during the installation using ``spack install
|
||||||
--test=root`` or at any time after the installation using
|
--test=root`` or at any time after the installation using
|
||||||
``spack test run``.
|
``spack test run``.
|
||||||
|
@ -77,7 +77,7 @@ installation of a package.
|
|||||||
|
|
||||||
Spack only generates modulefiles when a package is installed. If
|
Spack only generates modulefiles when a package is installed. If
|
||||||
you attempt to install a package and it is already installed, Spack
|
you attempt to install a package and it is already installed, Spack
|
||||||
will not regenerate modulefiles for the package. This may to
|
will not regenerate modulefiles for the package. This may lead to
|
||||||
inconsistent modulefiles if the Spack module configuration has
|
inconsistent modulefiles if the Spack module configuration has
|
||||||
changed since the package was installed, either by editing a file
|
changed since the package was installed, either by editing a file
|
||||||
or changing scopes or environments.
|
or changing scopes or environments.
|
||||||
|
@ -4561,6 +4561,9 @@ other checks.
|
|||||||
* - :ref:`AutotoolsPackage <autotoolspackage>`
|
* - :ref:`AutotoolsPackage <autotoolspackage>`
|
||||||
- ``check`` (``make test``, ``make check``)
|
- ``check`` (``make test``, ``make check``)
|
||||||
- ``installcheck`` (``make installcheck``)
|
- ``installcheck`` (``make installcheck``)
|
||||||
|
* - :ref:`CachedCMakePackage <cachedcmakepackage>`
|
||||||
|
- ``check`` (``make check``, ``make test``)
|
||||||
|
- Not applicable
|
||||||
* - :ref:`CMakePackage <cmakepackage>`
|
* - :ref:`CMakePackage <cmakepackage>`
|
||||||
- ``check`` (``make check``, ``make test``)
|
- ``check`` (``make check``, ``make test``)
|
||||||
- Not applicable
|
- Not applicable
|
||||||
@ -4585,6 +4588,9 @@ other checks.
|
|||||||
* - :ref:`SIPPackage <sippackage>`
|
* - :ref:`SIPPackage <sippackage>`
|
||||||
- Not applicable
|
- Not applicable
|
||||||
- ``test`` (module imports)
|
- ``test`` (module imports)
|
||||||
|
* - :ref:`WafPackage <wafpackage>`
|
||||||
|
- ``build_test`` (must be overridden)
|
||||||
|
- ``install_test`` (must be overridden)
|
||||||
|
|
||||||
For example, the ``Libelf`` package inherits from ``AutotoolsPackage``
|
For example, the ``Libelf`` package inherits from ``AutotoolsPackage``
|
||||||
and its ``Makefile`` has a standard ``check`` target. So Spack will
|
and its ``Makefile`` has a standard ``check`` target. So Spack will
|
||||||
|
@ -5,9 +5,9 @@
|
|||||||
|
|
||||||
.. _pipelines:
|
.. _pipelines:
|
||||||
|
|
||||||
=========
|
============
|
||||||
Pipelines
|
CI Pipelines
|
||||||
=========
|
============
|
||||||
|
|
||||||
Spack provides commands that support generating and running automated build
|
Spack provides commands that support generating and running automated build
|
||||||
pipelines designed for Gitlab CI. At the highest level it works like this:
|
pipelines designed for Gitlab CI. At the highest level it works like this:
|
||||||
@ -168,7 +168,7 @@ which specs are up to date and which need to be rebuilt (it's a good idea for ot
|
|||||||
reasons as well, but those are out of scope for this discussion). In this case we
|
reasons as well, but those are out of scope for this discussion). In this case we
|
||||||
have disabled it (using ``rebuild-index: False``) because the index would only be
|
have disabled it (using ``rebuild-index: False``) because the index would only be
|
||||||
generated in the artifacts mirror anyway, and consequently would not be available
|
generated in the artifacts mirror anyway, and consequently would not be available
|
||||||
during subesequent pipeline runs.
|
during subsequent pipeline runs.
|
||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
With the addition of reproducible builds (#22887) a previously working
|
With the addition of reproducible builds (#22887) a previously working
|
||||||
@ -267,24 +267,64 @@ generated by jobs in the pipeline.
|
|||||||
``spack ci rebuild``
|
``spack ci rebuild``
|
||||||
^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
The purpose of the ``spack ci rebuild`` is straightforward: take its assigned
|
The purpose of ``spack ci rebuild`` is straightforward: take its assigned
|
||||||
spec job, check whether the target mirror already has a binary for that spec,
|
spec and ensure a binary of a successful build exists on the target mirror.
|
||||||
and if not, build the spec from source and push the binary to the mirror. To
|
If the binary does not already exist, it is built from source and pushed
|
||||||
accomplish this in a reproducible way, the sub-command prepares a ``spack install``
|
to the mirror. The associated stand-alone tests are optionally run against
|
||||||
command line to build a single spec in the DAG, saves that command in a
|
the new build. Additionally, files for reproducing the build outside of the
|
||||||
shell script, ``install.sh``, in the current working directory, and then runs
|
CI environment are created to facilitate debugging.
|
||||||
it to install the spec. The shell script is also exported as an artifact to
|
|
||||||
aid in reproducing the build outside of the CI environment.
|
|
||||||
|
|
||||||
If it was necessary to install the spec from source, ``spack ci rebuild`` will
|
If a binary for the spec does not exist on the target mirror, an install
|
||||||
also subsequently create a binary package for the spec and try to push it to the
|
shell script, ``install.sh``, is created and saved in the current working
|
||||||
mirror.
|
directory. The script is run in a job to install the spec from source. The
|
||||||
|
resulting binary package is pushed to the mirror. If ``cdash`` is configured
|
||||||
|
for the environment, then the build results will be uploaded to the site.
|
||||||
|
|
||||||
The ``spack ci rebuild`` sub-command mainly expects its "input" to come either
|
Environment variables and values in the ``gitlab-ci`` section of the
|
||||||
from environment variables or from the ``gitlab-ci`` section of the ``spack.yaml``
|
``spack.yaml`` environment file provide inputs to this process. The
|
||||||
environment file. There are two main sources of the environment variables, some
|
two main sources of environment variables are variables written into
|
||||||
are written into ``.gitlab-ci.yml`` by ``spack ci generate``, and some are
|
``.gitlab-ci.yml`` by ``spack ci generate`` and the GitLab CI runtime.
|
||||||
provided by the GitLab CI runtime.
|
Several key CI pipeline variables are described in
|
||||||
|
:ref:`ci_environment_variables`.
|
||||||
|
|
||||||
|
If the ``--tests`` option is provided, stand-alone tests are performed but
|
||||||
|
only if the build was successful *and* the package does not appear in the
|
||||||
|
list of ``broken-tests-packages``. A shell script, ``test.sh``, is created
|
||||||
|
and run to perform the tests. On completion, test logs are exported as job
|
||||||
|
artifacts for review and to facilitate debugging. If `cdash` is configured,
|
||||||
|
test results are also uploaded to the site.
|
||||||
|
|
||||||
|
A snippet from an example ``spack.yaml`` file illustrating use of this
|
||||||
|
option *and* specification of a package with broken tests is given below.
|
||||||
|
The inclusion of a spec for building ``gptune`` is not shown here. Note
|
||||||
|
that ``--tests`` is passed to ``spack ci rebuild`` as part of the
|
||||||
|
``gitlab-ci`` script.
|
||||||
|
|
||||||
|
.. code-block:: yaml
|
||||||
|
|
||||||
|
gitlab-ci:
|
||||||
|
script:
|
||||||
|
- . "./share/spack/setup-env.sh"
|
||||||
|
- spack --version
|
||||||
|
- cd ${SPACK_CONCRETE_ENV_DIR}
|
||||||
|
- spack env activate --without-view .
|
||||||
|
- spack config add "config:install_tree:projections:${SPACK_JOB_SPEC_PKG_NAME}:'morepadding/{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}'"
|
||||||
|
- mkdir -p ${SPACK_ARTIFACTS_ROOT}/user_data
|
||||||
|
- if [[ -r /mnt/key/intermediate_ci_signing_key.gpg ]]; then spack gpg trust /mnt/key/intermediate_ci_signing_key.gpg; fi
|
||||||
|
- if [[ -r /mnt/key/spack_public_key.gpg ]]; then spack gpg trust /mnt/key/spack_public_key.gpg; fi
|
||||||
|
- spack -d ci rebuild --tests > >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_out.txt) 2> >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_err.txt >&2)
|
||||||
|
|
||||||
|
broken-tests-packages:
|
||||||
|
- gptune
|
||||||
|
|
||||||
|
In this case, even if ``gptune`` is successfully built from source, the
|
||||||
|
pipeline will *not* run its stand-alone tests since the package is listed
|
||||||
|
under ``broken-tests-packages``.
|
||||||
|
|
||||||
|
Spack's cloud pipelines provide actual, up-to-date examples of the CI/CD
|
||||||
|
configuration and environment files used by Spack. You can find them
|
||||||
|
under Spack's `stacks
|
||||||
|
<https://github.com/spack/spack/tree/develop/share/spack/gitlab/cloud_pipelines/stacks>`_ repository directory.
|
||||||
|
|
||||||
.. _cmd-spack-ci-rebuild-index:
|
.. _cmd-spack-ci-rebuild-index:
|
||||||
|
|
||||||
@ -447,7 +487,7 @@ Note about "no-op" jobs
|
|||||||
^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
If no specs in an environment need to be rebuilt during a given pipeline run
|
If no specs in an environment need to be rebuilt during a given pipeline run
|
||||||
(meaning all are already up to date on the mirror), a single succesful job
|
(meaning all are already up to date on the mirror), a single successful job
|
||||||
(a NO-OP) is still generated to avoid an empty pipeline (which GitLab
|
(a NO-OP) is still generated to avoid an empty pipeline (which GitLab
|
||||||
considers to be an error). An optional ``service-job-attributes`` section
|
considers to be an error). An optional ``service-job-attributes`` section
|
||||||
can be added to your ``spack.yaml`` where you can provide ``tags`` and
|
can be added to your ``spack.yaml`` where you can provide ``tags`` and
|
||||||
@ -725,7 +765,7 @@ above with ``git checkout ${SPACK_CHECKOUT_VERSION}``.
|
|||||||
On the other hand, if you're pointing to a spack repository and branch under your
|
On the other hand, if you're pointing to a spack repository and branch under your
|
||||||
control, there may be no benefit in using the captured ``SPACK_CHECKOUT_VERSION``,
|
control, there may be no benefit in using the captured ``SPACK_CHECKOUT_VERSION``,
|
||||||
and you can instead just clone using the variables you define (``SPACK_REPO``
|
and you can instead just clone using the variables you define (``SPACK_REPO``
|
||||||
and ``SPACK_REF`` in the example aboves).
|
and ``SPACK_REF`` in the example above).
|
||||||
|
|
||||||
.. _custom_workflow:
|
.. _custom_workflow:
|
||||||
|
|
||||||
|
@ -22,9 +22,9 @@
|
|||||||
from llnl.util import tty
|
from llnl.util import tty
|
||||||
from llnl.util.compat import Sequence
|
from llnl.util.compat import Sequence
|
||||||
from llnl.util.lang import dedupe, memoized
|
from llnl.util.lang import dedupe, memoized
|
||||||
from llnl.util.symlink import symlink
|
from llnl.util.symlink import islink, symlink
|
||||||
|
|
||||||
from spack.util.executable import Executable
|
from spack.util.executable import CommandNotFoundError, Executable, which
|
||||||
from spack.util.path import path_to_os_path, system_path_filter
|
from spack.util.path import path_to_os_path, system_path_filter
|
||||||
|
|
||||||
is_windows = _platform == "win32"
|
is_windows = _platform == "win32"
|
||||||
@ -113,6 +113,69 @@ def path_contains_subdirectory(path, root):
|
|||||||
return norm_path.startswith(norm_root)
|
return norm_path.startswith(norm_root)
|
||||||
|
|
||||||
|
|
||||||
|
@memoized
|
||||||
|
def file_command(*args):
|
||||||
|
"""Creates entry point to `file` system command with provided arguments"""
|
||||||
|
try:
|
||||||
|
file_cmd = which("file", required=True)
|
||||||
|
except CommandNotFoundError as e:
|
||||||
|
if is_windows:
|
||||||
|
raise CommandNotFoundError("`file` utility is not available on Windows")
|
||||||
|
else:
|
||||||
|
raise e
|
||||||
|
for arg in args:
|
||||||
|
file_cmd.add_default_arg(arg)
|
||||||
|
return file_cmd
|
||||||
|
|
||||||
|
|
||||||
|
@memoized
|
||||||
|
def _get_mime_type():
|
||||||
|
"""Generate method to call `file` system command to aquire mime type
|
||||||
|
for a specified path
|
||||||
|
"""
|
||||||
|
return file_command("-b", "-h", "--mime-type")
|
||||||
|
|
||||||
|
|
||||||
|
@memoized
|
||||||
|
def _get_mime_type_compressed():
|
||||||
|
"""Same as _get_mime_type but attempts to check for
|
||||||
|
compression first
|
||||||
|
"""
|
||||||
|
mime_uncompressed = _get_mime_type()
|
||||||
|
mime_uncompressed.add_default_arg("-Z")
|
||||||
|
return mime_uncompressed
|
||||||
|
|
||||||
|
|
||||||
|
def mime_type(filename):
|
||||||
|
"""Returns the mime type and subtype of a file.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
filename: file to be analyzed
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Tuple containing the MIME type and subtype
|
||||||
|
"""
|
||||||
|
output = _get_mime_type()(filename, output=str, error=str).strip()
|
||||||
|
tty.debug("==> " + output)
|
||||||
|
type, _, subtype = output.partition("/")
|
||||||
|
return type, subtype
|
||||||
|
|
||||||
|
|
||||||
|
def compressed_mime_type(filename):
|
||||||
|
"""Same as mime_type but checks for type that has been compressed
|
||||||
|
|
||||||
|
Args:
|
||||||
|
filename (str): file to be analyzed
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Tuple containing the MIME type and subtype
|
||||||
|
"""
|
||||||
|
output = _get_mime_type_compressed()(filename, output=str, error=str).strip()
|
||||||
|
tty.debug("==> " + output)
|
||||||
|
type, _, subtype = output.partition("/")
|
||||||
|
return type, subtype
|
||||||
|
|
||||||
|
|
||||||
#: This generates the library filenames that may appear on any OS.
|
#: This generates the library filenames that may appear on any OS.
|
||||||
library_extensions = ["a", "la", "so", "tbd", "dylib"]
|
library_extensions = ["a", "la", "so", "tbd", "dylib"]
|
||||||
|
|
||||||
@ -637,7 +700,11 @@ def copy_tree(src, dest, symlinks=True, ignore=None, _permissions=False):
|
|||||||
if symlinks:
|
if symlinks:
|
||||||
target = os.readlink(s)
|
target = os.readlink(s)
|
||||||
if os.path.isabs(target):
|
if os.path.isabs(target):
|
||||||
new_target = re.sub(abs_src, abs_dest, target)
|
|
||||||
|
def escaped_path(path):
|
||||||
|
return path.replace("\\", r"\\")
|
||||||
|
|
||||||
|
new_target = re.sub(escaped_path(abs_src), escaped_path(abs_dest), target)
|
||||||
if new_target != target:
|
if new_target != target:
|
||||||
tty.debug("Redirecting link {0} to {1}".format(target, new_target))
|
tty.debug("Redirecting link {0} to {1}".format(target, new_target))
|
||||||
target = new_target
|
target = new_target
|
||||||
@ -1903,7 +1970,11 @@ def names(self):
|
|||||||
name = x[3:]
|
name = x[3:]
|
||||||
|
|
||||||
# Valid extensions include: ['.dylib', '.so', '.a']
|
# Valid extensions include: ['.dylib', '.so', '.a']
|
||||||
for ext in [".dylib", ".so", ".a"]:
|
# on non Windows platform
|
||||||
|
# Windows valid library extensions are:
|
||||||
|
# ['.dll', '.lib']
|
||||||
|
valid_exts = [".dll", ".lib"] if is_windows else [".dylib", ".so", ".a"]
|
||||||
|
for ext in valid_exts:
|
||||||
i = name.rfind(ext)
|
i = name.rfind(ext)
|
||||||
if i != -1:
|
if i != -1:
|
||||||
names.append(name[:i])
|
names.append(name[:i])
|
||||||
@ -2046,15 +2117,23 @@ def find_libraries(libraries, root, shared=True, recursive=False):
|
|||||||
message = message.format(find_libraries.__name__, type(libraries))
|
message = message.format(find_libraries.__name__, type(libraries))
|
||||||
raise TypeError(message)
|
raise TypeError(message)
|
||||||
|
|
||||||
|
if is_windows:
|
||||||
|
static_ext = "lib"
|
||||||
|
shared_ext = "dll"
|
||||||
|
else:
|
||||||
|
# Used on both Linux and macOS
|
||||||
|
static_ext = "a"
|
||||||
|
shared_ext = "so"
|
||||||
|
|
||||||
# Construct the right suffix for the library
|
# Construct the right suffix for the library
|
||||||
if shared:
|
if shared:
|
||||||
# Used on both Linux and macOS
|
# Used on both Linux and macOS
|
||||||
suffixes = ["so"]
|
suffixes = [shared_ext]
|
||||||
if sys.platform == "darwin":
|
if sys.platform == "darwin":
|
||||||
# Only used on macOS
|
# Only used on macOS
|
||||||
suffixes.append("dylib")
|
suffixes.append("dylib")
|
||||||
else:
|
else:
|
||||||
suffixes = ["a"]
|
suffixes = [static_ext]
|
||||||
|
|
||||||
# List of libraries we are searching with suffixes
|
# List of libraries we are searching with suffixes
|
||||||
libraries = ["{0}.{1}".format(lib, suffix) for lib in libraries for suffix in suffixes]
|
libraries = ["{0}.{1}".format(lib, suffix) for lib in libraries for suffix in suffixes]
|
||||||
@ -2067,7 +2146,11 @@ def find_libraries(libraries, root, shared=True, recursive=False):
|
|||||||
# perform first non-recursive search in root/lib then in root/lib64 and
|
# perform first non-recursive search in root/lib then in root/lib64 and
|
||||||
# finally search all of root recursively. The search stops when the first
|
# finally search all of root recursively. The search stops when the first
|
||||||
# match is found.
|
# match is found.
|
||||||
for subdir in ("lib", "lib64"):
|
common_lib_dirs = ["lib", "lib64"]
|
||||||
|
if is_windows:
|
||||||
|
common_lib_dirs.extend(["bin", "Lib"])
|
||||||
|
|
||||||
|
for subdir in common_lib_dirs:
|
||||||
dirname = join_path(root, subdir)
|
dirname = join_path(root, subdir)
|
||||||
if not os.path.isdir(dirname):
|
if not os.path.isdir(dirname):
|
||||||
continue
|
continue
|
||||||
@ -2080,6 +2163,155 @@ def find_libraries(libraries, root, shared=True, recursive=False):
|
|||||||
return LibraryList(found_libs)
|
return LibraryList(found_libs)
|
||||||
|
|
||||||
|
|
||||||
|
def find_all_shared_libraries(root, recursive=False):
|
||||||
|
"""Convenience function that returns the list of all shared libraries found
|
||||||
|
in the directory passed as argument.
|
||||||
|
|
||||||
|
See documentation for `llnl.util.filesystem.find_libraries` for more information
|
||||||
|
"""
|
||||||
|
return find_libraries("*", root=root, shared=True, recursive=recursive)
|
||||||
|
|
||||||
|
|
||||||
|
def find_all_static_libraries(root, recursive=False):
|
||||||
|
"""Convenience function that returns the list of all static libraries found
|
||||||
|
in the directory passed as argument.
|
||||||
|
|
||||||
|
See documentation for `llnl.util.filesystem.find_libraries` for more information
|
||||||
|
"""
|
||||||
|
return find_libraries("*", root=root, shared=False, recursive=recursive)
|
||||||
|
|
||||||
|
|
||||||
|
def find_all_libraries(root, recursive=False):
|
||||||
|
"""Convenience function that returns the list of all libraries found
|
||||||
|
in the directory passed as argument.
|
||||||
|
|
||||||
|
See documentation for `llnl.util.filesystem.find_libraries` for more information
|
||||||
|
"""
|
||||||
|
|
||||||
|
return find_all_shared_libraries(root, recursive=recursive) + find_all_static_libraries(
|
||||||
|
root, recursive=recursive
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class WindowsSimulatedRPath(object):
|
||||||
|
"""Class representing Windows filesystem rpath analog
|
||||||
|
|
||||||
|
One instance of this class is associated with a package (only on Windows)
|
||||||
|
For each lib/binary directory in an associated package, this class introduces
|
||||||
|
a symlink to any/all dependent libraries/binaries. This includes the packages
|
||||||
|
own bin/lib directories, meaning the libraries are linked to the bianry directory
|
||||||
|
and vis versa.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, package, link_install_prefix=True):
|
||||||
|
"""
|
||||||
|
Args:
|
||||||
|
package (spack.package_base.PackageBase): Package requiring links
|
||||||
|
link_install_prefix (bool): Link against package's own install or stage root.
|
||||||
|
Packages that run their own executables during build and require rpaths to
|
||||||
|
the build directory during build time require this option. Default: install
|
||||||
|
root
|
||||||
|
"""
|
||||||
|
self.pkg = package
|
||||||
|
self._addl_rpaths = set()
|
||||||
|
self.link_install_prefix = link_install_prefix
|
||||||
|
self._internal_links = set()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def link_dest(self):
|
||||||
|
"""
|
||||||
|
Set of directories where package binaries/libraries are located.
|
||||||
|
"""
|
||||||
|
if hasattr(self.pkg, "libs") and self.pkg.libs:
|
||||||
|
pkg_libs = set(self.pkg.libs.directories)
|
||||||
|
else:
|
||||||
|
pkg_libs = set((self.pkg.prefix.lib, self.pkg.prefix.lib64))
|
||||||
|
|
||||||
|
return pkg_libs | set([self.pkg.prefix.bin]) | self.internal_links
|
||||||
|
|
||||||
|
@property
|
||||||
|
def internal_links(self):
|
||||||
|
"""
|
||||||
|
linking that would need to be established within the package itself. Useful for links
|
||||||
|
against extension modules/build time executables/internal linkage
|
||||||
|
"""
|
||||||
|
return self._internal_links
|
||||||
|
|
||||||
|
def add_internal_links(self, *dest):
|
||||||
|
"""
|
||||||
|
Incorporate additional paths into the rpath (sym)linking scheme.
|
||||||
|
|
||||||
|
Paths provided to this method are linked against by a package's libraries
|
||||||
|
and libraries found at these paths are linked against a package's binaries.
|
||||||
|
(i.e. /site-packages -> /bin and /bin -> /site-packages)
|
||||||
|
|
||||||
|
Specified paths should be outside of a package's lib, lib64, and bin
|
||||||
|
directories.
|
||||||
|
"""
|
||||||
|
self._internal_links = self._internal_links | set(*dest)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def link_targets(self):
|
||||||
|
"""
|
||||||
|
Set of libraries this package needs to link against during runtime
|
||||||
|
These packages will each be symlinked into the packages lib and binary dir
|
||||||
|
"""
|
||||||
|
|
||||||
|
dependent_libs = []
|
||||||
|
for path in self.pkg.rpath:
|
||||||
|
dependent_libs.extend(list(find_all_shared_libraries(path, recursive=True)))
|
||||||
|
for extra_path in self._addl_rpaths:
|
||||||
|
dependent_libs.extend(list(find_all_shared_libraries(extra_path, recursive=True)))
|
||||||
|
return set(dependent_libs)
|
||||||
|
|
||||||
|
def include_additional_link_paths(self, *paths):
|
||||||
|
"""
|
||||||
|
Add libraries found at the root of provided paths to runtime linking
|
||||||
|
|
||||||
|
These are libraries found outside of the typical scope of rpath linking
|
||||||
|
that require manual inclusion in a runtime linking scheme
|
||||||
|
|
||||||
|
Args:
|
||||||
|
*paths (str): arbitrary number of paths to be added to runtime linking
|
||||||
|
"""
|
||||||
|
self._addl_rpaths = self._addl_rpaths | set(paths)
|
||||||
|
|
||||||
|
def establish_link(self):
|
||||||
|
"""
|
||||||
|
(sym)link packages to runtime dependencies based on RPath configuration for
|
||||||
|
Windows heuristics
|
||||||
|
"""
|
||||||
|
# from build_environment.py:463
|
||||||
|
# The top-level package is always RPATHed. It hasn't been installed yet
|
||||||
|
# so the RPATHs are added unconditionally
|
||||||
|
|
||||||
|
# for each binary install dir in self.pkg (i.e. pkg.prefix.bin, pkg.prefix.lib)
|
||||||
|
# install a symlink to each dependent library
|
||||||
|
for library, lib_dir in itertools.product(self.link_targets, self.link_dest):
|
||||||
|
if not path_contains_subdirectory(library, lib_dir):
|
||||||
|
file_name = os.path.basename(library)
|
||||||
|
dest_file = os.path.join(lib_dir, file_name)
|
||||||
|
if os.path.exists(lib_dir):
|
||||||
|
try:
|
||||||
|
symlink(library, dest_file)
|
||||||
|
# For py2 compatibility, we have to catch the specific Windows error code
|
||||||
|
# associate with trying to create a file that already exists (winerror 183)
|
||||||
|
except OSError as e:
|
||||||
|
if e.winerror == 183:
|
||||||
|
# We have either already symlinked or we are encoutering a naming clash
|
||||||
|
# either way, we don't want to overwrite existing libraries
|
||||||
|
already_linked = islink(dest_file)
|
||||||
|
tty.debug(
|
||||||
|
"Linking library %s to %s failed, " % (library, dest_file)
|
||||||
|
+ "already linked."
|
||||||
|
if already_linked
|
||||||
|
else "library with name %s already exists." % file_name
|
||||||
|
)
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
raise e
|
||||||
|
|
||||||
|
|
||||||
@system_path_filter
|
@system_path_filter
|
||||||
@memoized
|
@memoized
|
||||||
def can_access_dir(path):
|
def can_access_dir(path):
|
||||||
|
@ -386,8 +386,12 @@ def _ensure_parent_directory(self):
|
|||||||
try:
|
try:
|
||||||
os.makedirs(parent)
|
os.makedirs(parent)
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
# makedirs can fail when diretory already exists.
|
# os.makedirs can fail in a number of ways when the directory already exists.
|
||||||
if not (e.errno == errno.EEXIST and os.path.isdir(parent) or e.errno == errno.EISDIR):
|
# With EISDIR, we know it exists, and others like EEXIST, EACCES, and EROFS
|
||||||
|
# are fine if we ensure that the directory exists.
|
||||||
|
# Python 3 allows an exist_ok parameter and ignores any OSError as long as
|
||||||
|
# the directory exists.
|
||||||
|
if not (e.errno == errno.EISDIR or os.path.isdir(parent)):
|
||||||
raise
|
raise
|
||||||
return parent
|
return parent
|
||||||
|
|
||||||
|
@ -228,8 +228,8 @@ def __init__(self, controller_function, minion_function):
|
|||||||
self.minion_function = minion_function
|
self.minion_function = minion_function
|
||||||
|
|
||||||
# these can be optionally set to change defaults
|
# these can be optionally set to change defaults
|
||||||
self.controller_timeout = 1
|
self.controller_timeout = 3
|
||||||
self.sleep_time = 0
|
self.sleep_time = 0.1
|
||||||
|
|
||||||
def start(self, **kwargs):
|
def start(self, **kwargs):
|
||||||
"""Start the controller and minion processes.
|
"""Start the controller and minion processes.
|
||||||
|
@ -19,6 +19,7 @@
|
|||||||
import ruamel.yaml as yaml
|
import ruamel.yaml as yaml
|
||||||
from six.moves.urllib.error import HTTPError, URLError
|
from six.moves.urllib.error import HTTPError, URLError
|
||||||
|
|
||||||
|
import llnl.util.filesystem as fsys
|
||||||
import llnl.util.lang
|
import llnl.util.lang
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.util.filesystem import mkdirp
|
from llnl.util.filesystem import mkdirp
|
||||||
@ -653,7 +654,7 @@ def get_buildfile_manifest(spec):
|
|||||||
|
|
||||||
for filename in files:
|
for filename in files:
|
||||||
path_name = os.path.join(root, filename)
|
path_name = os.path.join(root, filename)
|
||||||
m_type, m_subtype = relocate.mime_type(path_name)
|
m_type, m_subtype = fsys.mime_type(path_name)
|
||||||
rel_path_name = os.path.relpath(path_name, spec.prefix)
|
rel_path_name = os.path.relpath(path_name, spec.prefix)
|
||||||
added = False
|
added = False
|
||||||
|
|
||||||
|
@ -1030,8 +1030,11 @@ def get_cmake_prefix_path(pkg):
|
|||||||
spack_built.insert(0, dspec)
|
spack_built.insert(0, dspec)
|
||||||
|
|
||||||
ordered_build_link_deps = spack_built + externals
|
ordered_build_link_deps = spack_built + externals
|
||||||
build_link_prefixes = filter_system_paths(x.prefix for x in ordered_build_link_deps)
|
cmake_prefix_path_entries = []
|
||||||
return build_link_prefixes
|
for spec in ordered_build_link_deps:
|
||||||
|
cmake_prefix_path_entries.extend(spec.package.cmake_prefix_paths)
|
||||||
|
|
||||||
|
return filter_system_paths(cmake_prefix_path_entries)
|
||||||
|
|
||||||
|
|
||||||
def _setup_pkg_and_run(
|
def _setup_pkg_and_run(
|
||||||
|
@ -138,12 +138,28 @@ def import_modules(self):
|
|||||||
path.replace(root + os.sep, "", 1).replace(".py", "").replace("/", ".")
|
path.replace(root + os.sep, "", 1).replace(".py", "").replace("/", ".")
|
||||||
)
|
)
|
||||||
|
|
||||||
modules = [mod for mod in modules if re.match("[a-zA-Z0-9._]+$", mod)]
|
modules = [
|
||||||
|
mod
|
||||||
|
for mod in modules
|
||||||
|
if re.match("[a-zA-Z0-9._]+$", mod) and not any(map(mod.startswith, self.skip_modules))
|
||||||
|
]
|
||||||
|
|
||||||
tty.debug("Detected the following modules: {0}".format(modules))
|
tty.debug("Detected the following modules: {0}".format(modules))
|
||||||
|
|
||||||
return modules
|
return modules
|
||||||
|
|
||||||
|
@property
|
||||||
|
def skip_modules(self):
|
||||||
|
"""Names of modules that should be skipped when running tests.
|
||||||
|
|
||||||
|
These are a subset of import_modules. If a module has submodules,
|
||||||
|
they are skipped as well (meaning a.b is skipped if a is contained).
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list: list of strings of module names
|
||||||
|
"""
|
||||||
|
return []
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def build_directory(self):
|
def build_directory(self):
|
||||||
"""The root directory of the Python package.
|
"""The root directory of the Python package.
|
||||||
|
@ -291,18 +291,23 @@ def disambiguate_spec_from_hashes(spec, hashes, local=False, installed=True, fir
|
|||||||
elif first:
|
elif first:
|
||||||
return matching_specs[0]
|
return matching_specs[0]
|
||||||
|
|
||||||
elif len(matching_specs) > 1:
|
ensure_single_spec_or_die(spec, matching_specs)
|
||||||
|
|
||||||
|
return matching_specs[0]
|
||||||
|
|
||||||
|
|
||||||
|
def ensure_single_spec_or_die(spec, matching_specs):
|
||||||
|
if len(matching_specs) <= 1:
|
||||||
|
return
|
||||||
|
|
||||||
format_string = "{name}{@version}{%compiler}{arch=architecture}"
|
format_string = "{name}{@version}{%compiler}{arch=architecture}"
|
||||||
args = ["%s matches multiple packages." % spec, "Matching packages:"]
|
args = ["%s matches multiple packages." % spec, "Matching packages:"]
|
||||||
args += [
|
args += [
|
||||||
colorize(" @K{%s} " % s.dag_hash(7)) + s.cformat(format_string)
|
colorize(" @K{%s} " % s.dag_hash(7)) + s.cformat(format_string) for s in matching_specs
|
||||||
for s in matching_specs
|
|
||||||
]
|
]
|
||||||
args += ["Use a more specific spec."]
|
args += ["Use a more specific spec (e.g., prepend '/' to the hash)."]
|
||||||
tty.die(*args)
|
tty.die(*args)
|
||||||
|
|
||||||
return matching_specs[0]
|
|
||||||
|
|
||||||
|
|
||||||
def gray_hash(spec, length):
|
def gray_hash(spec, length):
|
||||||
if not length:
|
if not length:
|
||||||
|
@ -219,7 +219,7 @@ def _collect_and_consume_cray_manifest_files(
|
|||||||
tty.debug("Reading manifest file: " + path)
|
tty.debug("Reading manifest file: " + path)
|
||||||
try:
|
try:
|
||||||
cray_manifest.read(path, not dry_run)
|
cray_manifest.read(path, not dry_run)
|
||||||
except (spack.compilers.UnknownCompilerError, spack.error.SpackError) as e:
|
except spack.error.SpackError as e:
|
||||||
if fail_on_error:
|
if fail_on_error:
|
||||||
raise
|
raise
|
||||||
else:
|
else:
|
||||||
|
@ -30,6 +30,12 @@ def setup_parser(subparser):
|
|||||||
help="print the Python version number and exit",
|
help="print the Python version number and exit",
|
||||||
)
|
)
|
||||||
subparser.add_argument("-c", dest="python_command", help="command to execute")
|
subparser.add_argument("-c", dest="python_command", help="command to execute")
|
||||||
|
subparser.add_argument(
|
||||||
|
"-u",
|
||||||
|
dest="unbuffered",
|
||||||
|
action="store_true",
|
||||||
|
help="for compatibility with xdist, do not use without adding -u to the interpreter",
|
||||||
|
)
|
||||||
subparser.add_argument(
|
subparser.add_argument(
|
||||||
"-i",
|
"-i",
|
||||||
dest="python_interpreter",
|
dest="python_interpreter",
|
||||||
|
@ -35,7 +35,6 @@
|
|||||||
"""
|
"""
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.util.link_tree import MergeConflictError
|
from llnl.util.link_tree import MergeConflictError
|
||||||
from llnl.util.tty.color import colorize
|
|
||||||
|
|
||||||
import spack.cmd
|
import spack.cmd
|
||||||
import spack.environment as ev
|
import spack.environment as ev
|
||||||
@ -66,16 +65,7 @@ def squash(matching_specs):
|
|||||||
tty.die("Spec matches no installed packages.")
|
tty.die("Spec matches no installed packages.")
|
||||||
|
|
||||||
matching_in_view = [ms for ms in matching_specs if ms in view_specs]
|
matching_in_view = [ms for ms in matching_specs if ms in view_specs]
|
||||||
|
spack.cmd.ensure_single_spec_or_die("Spec", matching_in_view)
|
||||||
if len(matching_in_view) > 1:
|
|
||||||
spec_format = "{name}{@version}{%compiler}{arch=architecture}"
|
|
||||||
args = ["Spec matches multiple packages.", "Matching packages:"]
|
|
||||||
args += [
|
|
||||||
colorize(" @K{%s} " % s.dag_hash(7)) + s.cformat(spec_format)
|
|
||||||
for s in matching_in_view
|
|
||||||
]
|
|
||||||
args += ["Use a more specific spec."]
|
|
||||||
tty.die(*args)
|
|
||||||
|
|
||||||
return matching_in_view[0] if matching_in_view else matching_specs[0]
|
return matching_in_view[0] if matching_in_view else matching_specs[0]
|
||||||
|
|
||||||
|
@ -4,8 +4,10 @@
|
|||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
import json
|
import json
|
||||||
|
import sys
|
||||||
|
|
||||||
import jsonschema
|
import jsonschema
|
||||||
|
import jsonschema.exceptions
|
||||||
import six
|
import six
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
@ -161,10 +163,21 @@ def entries_to_specs(entries):
|
|||||||
|
|
||||||
|
|
||||||
def read(path, apply_updates):
|
def read(path, apply_updates):
|
||||||
|
if sys.version_info >= (3, 0):
|
||||||
|
decode_exception_type = json.decoder.JSONDecodeError
|
||||||
|
else:
|
||||||
|
decode_exception_type = ValueError
|
||||||
|
|
||||||
|
try:
|
||||||
with open(path, "r") as json_file:
|
with open(path, "r") as json_file:
|
||||||
json_data = json.load(json_file)
|
json_data = json.load(json_file)
|
||||||
|
|
||||||
jsonschema.validate(json_data, manifest_schema)
|
jsonschema.validate(json_data, manifest_schema)
|
||||||
|
except (jsonschema.exceptions.ValidationError, decode_exception_type) as e:
|
||||||
|
raise six.raise_from(
|
||||||
|
ManifestValidationError("error parsing manifest JSON:", str(e)),
|
||||||
|
e,
|
||||||
|
)
|
||||||
|
|
||||||
specs = entries_to_specs(json_data["specs"])
|
specs = entries_to_specs(json_data["specs"])
|
||||||
tty.debug("{0}: {1} specs read from manifest".format(path, str(len(specs))))
|
tty.debug("{0}: {1} specs read from manifest".format(path, str(len(specs))))
|
||||||
@ -179,3 +192,8 @@ def read(path, apply_updates):
|
|||||||
if apply_updates:
|
if apply_updates:
|
||||||
for spec in specs.values():
|
for spec in specs.values():
|
||||||
spack.store.db.add(spec, directory_layout=None)
|
spack.store.db.add(spec, directory_layout=None)
|
||||||
|
|
||||||
|
|
||||||
|
class ManifestValidationError(spack.error.SpackError):
|
||||||
|
def __init__(self, msg, long_msg=None):
|
||||||
|
super(ManifestValidationError, self).__init__(msg, long_msg)
|
||||||
|
@ -65,8 +65,8 @@ def deactivate_header(shell):
|
|||||||
if shell == "csh":
|
if shell == "csh":
|
||||||
cmds += "unsetenv SPACK_ENV;\n"
|
cmds += "unsetenv SPACK_ENV;\n"
|
||||||
cmds += "if ( $?SPACK_OLD_PROMPT ) "
|
cmds += "if ( $?SPACK_OLD_PROMPT ) "
|
||||||
cmds += 'set prompt="$SPACK_OLD_PROMPT" && '
|
cmds += ' eval \'set prompt="$SPACK_OLD_PROMPT" &&'
|
||||||
cmds += "unsetenv SPACK_OLD_PROMPT;\n"
|
cmds += " unsetenv SPACK_OLD_PROMPT';\n"
|
||||||
cmds += "unalias despacktivate;\n"
|
cmds += "unalias despacktivate;\n"
|
||||||
elif shell == "fish":
|
elif shell == "fish":
|
||||||
cmds += "set -e SPACK_ENV;\n"
|
cmds += "set -e SPACK_ENV;\n"
|
||||||
|
@ -54,7 +54,7 @@
|
|||||||
import spack.util.url as url_util
|
import spack.util.url as url_util
|
||||||
import spack.util.web as web_util
|
import spack.util.web as web_util
|
||||||
import spack.version
|
import spack.version
|
||||||
from spack.util.compression import decompressor_for, extension
|
from spack.util.compression import decompressor_for, extension_from_path
|
||||||
from spack.util.executable import CommandNotFoundError, which
|
from spack.util.executable import CommandNotFoundError, which
|
||||||
from spack.util.string import comma_and, quote
|
from spack.util.string import comma_and, quote
|
||||||
|
|
||||||
@ -338,6 +338,7 @@ def fetch(self):
|
|||||||
errors = []
|
errors = []
|
||||||
for url in self.candidate_urls:
|
for url in self.candidate_urls:
|
||||||
if not web_util.url_exists(url, self.curl):
|
if not web_util.url_exists(url, self.curl):
|
||||||
|
tty.debug("URL does not exist: " + url)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@ -612,7 +613,7 @@ def expand(self):
|
|||||||
|
|
||||||
@_needs_stage
|
@_needs_stage
|
||||||
def archive(self, destination, **kwargs):
|
def archive(self, destination, **kwargs):
|
||||||
assert extension(destination) == "tar.gz"
|
assert extension_from_path(destination) == "tar.gz"
|
||||||
assert self.stage.source_path.startswith(self.stage.path)
|
assert self.stage.source_path.startswith(self.stage.path)
|
||||||
|
|
||||||
tar = which("tar", required=True)
|
tar = which("tar", required=True)
|
||||||
@ -1543,7 +1544,19 @@ def for_package_version(pkg, version):
|
|||||||
ref_type: version.ref,
|
ref_type: version.ref,
|
||||||
"no_cache": True,
|
"no_cache": True,
|
||||||
}
|
}
|
||||||
|
|
||||||
kwargs["submodules"] = getattr(pkg, "submodules", False)
|
kwargs["submodules"] = getattr(pkg, "submodules", False)
|
||||||
|
|
||||||
|
# if we have a ref_version already, and it is a version from the package
|
||||||
|
# we can use that version's submodule specifications
|
||||||
|
if pkg.version.ref_version:
|
||||||
|
ref_version = spack.version.Version(pkg.version.ref_version[0])
|
||||||
|
ref_version_attributes = pkg.versions.get(ref_version)
|
||||||
|
if ref_version_attributes:
|
||||||
|
kwargs["submodules"] = ref_version_attributes.get(
|
||||||
|
"submodules", kwargs["submodules"]
|
||||||
|
)
|
||||||
|
|
||||||
fetcher = GitFetchStrategy(**kwargs)
|
fetcher = GitFetchStrategy(**kwargs)
|
||||||
return fetcher
|
return fetcher
|
||||||
|
|
||||||
|
@ -44,7 +44,7 @@ def __call__(self, spec):
|
|||||||
|
|
||||||
#: Hash descriptor used only to transfer a DAG, as is, across processes
|
#: Hash descriptor used only to transfer a DAG, as is, across processes
|
||||||
process_hash = SpecHashDescriptor(
|
process_hash = SpecHashDescriptor(
|
||||||
deptype=("build", "link", "run", "test"), package_hash=False, name="process_hash"
|
deptype=("build", "link", "run", "test"), package_hash=True, name="process_hash"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -84,6 +84,9 @@
|
|||||||
#: queue invariants).
|
#: queue invariants).
|
||||||
STATUS_REMOVED = "removed"
|
STATUS_REMOVED = "removed"
|
||||||
|
|
||||||
|
is_windows = sys.platform == "win32"
|
||||||
|
is_osx = sys.platform == "darwin"
|
||||||
|
|
||||||
|
|
||||||
class InstallAction(object):
|
class InstallAction(object):
|
||||||
#: Don't perform an install
|
#: Don't perform an install
|
||||||
@ -165,7 +168,9 @@ def _do_fake_install(pkg):
|
|||||||
if not pkg.name.startswith("lib"):
|
if not pkg.name.startswith("lib"):
|
||||||
library = "lib" + library
|
library = "lib" + library
|
||||||
|
|
||||||
dso_suffix = ".dylib" if sys.platform == "darwin" else ".so"
|
plat_shared = ".dll" if is_windows else ".so"
|
||||||
|
plat_static = ".lib" if is_windows else ".a"
|
||||||
|
dso_suffix = ".dylib" if is_osx else plat_shared
|
||||||
|
|
||||||
# Install fake command
|
# Install fake command
|
||||||
fs.mkdirp(pkg.prefix.bin)
|
fs.mkdirp(pkg.prefix.bin)
|
||||||
@ -180,7 +185,7 @@ def _do_fake_install(pkg):
|
|||||||
|
|
||||||
# Install fake shared and static libraries
|
# Install fake shared and static libraries
|
||||||
fs.mkdirp(pkg.prefix.lib)
|
fs.mkdirp(pkg.prefix.lib)
|
||||||
for suffix in [dso_suffix, ".a"]:
|
for suffix in [dso_suffix, plat_static]:
|
||||||
fs.touch(os.path.join(pkg.prefix.lib, library + suffix))
|
fs.touch(os.path.join(pkg.prefix.lib, library + suffix))
|
||||||
|
|
||||||
# Install fake man page
|
# Install fake man page
|
||||||
@ -1214,7 +1219,10 @@ def _install_task(self, task):
|
|||||||
spack.package_base.PackageBase._verbose = spack.build_environment.start_build_process(
|
spack.package_base.PackageBase._verbose = spack.build_environment.start_build_process(
|
||||||
pkg, build_process, install_args
|
pkg, build_process, install_args
|
||||||
)
|
)
|
||||||
|
# Currently this is how RPATH-like behavior is achieved on Windows, after install
|
||||||
|
# establish runtime linkage via Windows Runtime link object
|
||||||
|
# Note: this is a no-op on non Windows platforms
|
||||||
|
pkg.windows_establish_runtime_linkage()
|
||||||
# Note: PARENT of the build process adds the new package to
|
# Note: PARENT of the build process adds the new package to
|
||||||
# the database, so that we don't need to re-read from file.
|
# the database, so that we don't need to re-read from file.
|
||||||
spack.store.db.add(pkg.spec, spack.store.layout, explicit=explicit)
|
spack.store.db.add(pkg.spec, spack.store.layout, explicit=explicit)
|
||||||
|
@ -18,6 +18,7 @@
|
|||||||
import pstats
|
import pstats
|
||||||
import re
|
import re
|
||||||
import signal
|
import signal
|
||||||
|
import subprocess as sp
|
||||||
import sys
|
import sys
|
||||||
import traceback
|
import traceback
|
||||||
import warnings
|
import warnings
|
||||||
@ -623,15 +624,19 @@ class SpackCommand(object):
|
|||||||
their output.
|
their output.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, command_name):
|
def __init__(self, command_name, subprocess=False):
|
||||||
"""Create a new SpackCommand that invokes ``command_name`` when called.
|
"""Create a new SpackCommand that invokes ``command_name`` when called.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
command_name (str): name of the command to invoke
|
command_name (str): name of the command to invoke
|
||||||
|
subprocess (bool): whether to fork a subprocess or not. Currently not supported on
|
||||||
|
Windows, where it is always False.
|
||||||
"""
|
"""
|
||||||
self.parser = make_argument_parser()
|
self.parser = make_argument_parser()
|
||||||
self.command = self.parser.add_command(command_name)
|
self.command = self.parser.add_command(command_name)
|
||||||
self.command_name = command_name
|
self.command_name = command_name
|
||||||
|
# TODO: figure out how to support this on windows
|
||||||
|
self.subprocess = subprocess if sys.platform != "win32" else False
|
||||||
|
|
||||||
def __call__(self, *argv, **kwargs):
|
def __call__(self, *argv, **kwargs):
|
||||||
"""Invoke this SpackCommand.
|
"""Invoke this SpackCommand.
|
||||||
@ -656,11 +661,21 @@ def __call__(self, *argv, **kwargs):
|
|||||||
self.error = None
|
self.error = None
|
||||||
|
|
||||||
prepend = kwargs["global_args"] if "global_args" in kwargs else []
|
prepend = kwargs["global_args"] if "global_args" in kwargs else []
|
||||||
|
|
||||||
args, unknown = self.parser.parse_known_args(prepend + [self.command_name] + list(argv))
|
|
||||||
|
|
||||||
fail_on_error = kwargs.get("fail_on_error", True)
|
fail_on_error = kwargs.get("fail_on_error", True)
|
||||||
|
|
||||||
|
if self.subprocess:
|
||||||
|
p = sp.Popen(
|
||||||
|
[spack.paths.spack_script, self.command_name] + prepend + list(argv),
|
||||||
|
stdout=sp.PIPE,
|
||||||
|
stderr=sp.STDOUT,
|
||||||
|
)
|
||||||
|
out, self.returncode = p.communicate()
|
||||||
|
out = out.decode()
|
||||||
|
else:
|
||||||
|
args, unknown = self.parser.parse_known_args(
|
||||||
|
prepend + [self.command_name] + list(argv)
|
||||||
|
)
|
||||||
|
|
||||||
out = StringIO()
|
out = StringIO()
|
||||||
try:
|
try:
|
||||||
with log_output(out):
|
with log_output(out):
|
||||||
@ -675,6 +690,7 @@ def __call__(self, *argv, **kwargs):
|
|||||||
if fail_on_error:
|
if fail_on_error:
|
||||||
self._log_command_output(out)
|
self._log_command_output(out)
|
||||||
raise
|
raise
|
||||||
|
out = out.getvalue()
|
||||||
|
|
||||||
if fail_on_error and self.returncode not in (None, 0):
|
if fail_on_error and self.returncode not in (None, 0):
|
||||||
self._log_command_output(out)
|
self._log_command_output(out)
|
||||||
@ -683,7 +699,7 @@ def __call__(self, *argv, **kwargs):
|
|||||||
% (self.returncode, self.command_name, ", ".join("'%s'" % a for a in argv))
|
% (self.returncode, self.command_name, ", ".join("'%s'" % a for a in argv))
|
||||||
)
|
)
|
||||||
|
|
||||||
return out.getvalue()
|
return out
|
||||||
|
|
||||||
def _log_command_output(self, out):
|
def _log_command_output(self, out):
|
||||||
if tty.is_verbose():
|
if tty.is_verbose():
|
||||||
|
@ -97,6 +97,9 @@
|
|||||||
_spack_configure_argsfile = "spack-configure-args.txt"
|
_spack_configure_argsfile = "spack-configure-args.txt"
|
||||||
|
|
||||||
|
|
||||||
|
is_windows = sys.platform == "win32"
|
||||||
|
|
||||||
|
|
||||||
def preferred_version(pkg):
|
def preferred_version(pkg):
|
||||||
"""
|
"""
|
||||||
Returns a sorted list of the preferred versions of the package.
|
Returns a sorted list of the preferred versions of the package.
|
||||||
@ -182,6 +185,30 @@ def copy(self):
|
|||||||
return other
|
return other
|
||||||
|
|
||||||
|
|
||||||
|
class WindowsRPathMeta(object):
|
||||||
|
"""Collection of functionality surrounding Windows RPATH specific features
|
||||||
|
|
||||||
|
This is essentially meaningless for all other platforms
|
||||||
|
due to their use of RPATH. All methods within this class are no-ops on
|
||||||
|
non Windows. Packages can customize and manipulate this class as
|
||||||
|
they would a genuine RPATH, i.e. adding directories that contain
|
||||||
|
runtime library dependencies"""
|
||||||
|
|
||||||
|
def add_search_paths(self, *path):
|
||||||
|
"""Add additional rpaths that are not implicitly included in the search
|
||||||
|
scheme
|
||||||
|
"""
|
||||||
|
self.win_rpath.include_additional_link_paths(*path)
|
||||||
|
|
||||||
|
def windows_establish_runtime_linkage(self):
|
||||||
|
"""Establish RPATH on Windows
|
||||||
|
|
||||||
|
Performs symlinking to incorporate rpath dependencies to Windows runtime search paths
|
||||||
|
"""
|
||||||
|
if is_windows:
|
||||||
|
self.win_rpath.establish_link()
|
||||||
|
|
||||||
|
|
||||||
#: Registers which are the detectable packages, by repo and package name
|
#: Registers which are the detectable packages, by repo and package name
|
||||||
#: Need a pass of package repositories to be filled.
|
#: Need a pass of package repositories to be filled.
|
||||||
detectable_packages = collections.defaultdict(list)
|
detectable_packages = collections.defaultdict(list)
|
||||||
@ -221,7 +248,7 @@ def to_windows_exe(exe):
|
|||||||
plat_exe = []
|
plat_exe = []
|
||||||
if hasattr(cls, "executables"):
|
if hasattr(cls, "executables"):
|
||||||
for exe in cls.executables:
|
for exe in cls.executables:
|
||||||
if sys.platform == "win32":
|
if is_windows:
|
||||||
exe = to_windows_exe(exe)
|
exe = to_windows_exe(exe)
|
||||||
plat_exe.append(exe)
|
plat_exe.append(exe)
|
||||||
return plat_exe
|
return plat_exe
|
||||||
@ -513,7 +540,7 @@ def test_log_pathname(test_stage, spec):
|
|||||||
return os.path.join(test_stage, "test-{0}-out.txt".format(TestSuite.test_pkg_id(spec)))
|
return os.path.join(test_stage, "test-{0}-out.txt".format(TestSuite.test_pkg_id(spec)))
|
||||||
|
|
||||||
|
|
||||||
class PackageBase(six.with_metaclass(PackageMeta, PackageViewMixin, object)):
|
class PackageBase(six.with_metaclass(PackageMeta, WindowsRPathMeta, PackageViewMixin, object)):
|
||||||
"""This is the superclass for all spack packages.
|
"""This is the superclass for all spack packages.
|
||||||
|
|
||||||
***The Package class***
|
***The Package class***
|
||||||
@ -753,6 +780,8 @@ def __init__(self, spec):
|
|||||||
# Set up timing variables
|
# Set up timing variables
|
||||||
self._fetch_time = 0.0
|
self._fetch_time = 0.0
|
||||||
|
|
||||||
|
self.win_rpath = fsys.WindowsSimulatedRPath(self)
|
||||||
|
|
||||||
if self.is_extension:
|
if self.is_extension:
|
||||||
pkg_cls = spack.repo.path.get_pkg_class(self.extendee_spec.name)
|
pkg_cls = spack.repo.path.get_pkg_class(self.extendee_spec.name)
|
||||||
pkg_cls(self.extendee_spec)._check_extendable()
|
pkg_cls(self.extendee_spec)._check_extendable()
|
||||||
@ -1740,6 +1769,10 @@ def content_hash(self, content=None):
|
|||||||
|
|
||||||
return b32_hash
|
return b32_hash
|
||||||
|
|
||||||
|
@property
|
||||||
|
def cmake_prefix_paths(self):
|
||||||
|
return [self.prefix]
|
||||||
|
|
||||||
def _has_make_target(self, target):
|
def _has_make_target(self, target):
|
||||||
"""Checks to see if 'target' is a valid target in a Makefile.
|
"""Checks to see if 'target' is a valid target in a Makefile.
|
||||||
|
|
||||||
@ -2750,6 +2783,8 @@ def rpath(self):
|
|||||||
deps = self.spec.dependencies(deptype="link")
|
deps = self.spec.dependencies(deptype="link")
|
||||||
rpaths.extend(d.prefix.lib for d in deps if os.path.isdir(d.prefix.lib))
|
rpaths.extend(d.prefix.lib for d in deps if os.path.isdir(d.prefix.lib))
|
||||||
rpaths.extend(d.prefix.lib64 for d in deps if os.path.isdir(d.prefix.lib64))
|
rpaths.extend(d.prefix.lib64 for d in deps if os.path.isdir(d.prefix.lib64))
|
||||||
|
if is_windows:
|
||||||
|
rpaths.extend(d.prefix.bin for d in deps if os.path.isdir(d.prefix.bin))
|
||||||
return rpaths
|
return rpaths
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
@ -78,7 +78,9 @@ def lex_word(self, word):
|
|||||||
break
|
break
|
||||||
|
|
||||||
if remainder and not remainder_used:
|
if remainder and not remainder_used:
|
||||||
raise LexError("Invalid character", word, word.index(remainder))
|
msg = "Invalid character, '{0}',".format(remainder[0])
|
||||||
|
msg += " in '{0}' at index {1}".format(word, word.index(remainder))
|
||||||
|
raise LexError(msg, word, word.index(remainder))
|
||||||
|
|
||||||
return tokens
|
return tokens
|
||||||
|
|
||||||
|
@ -11,6 +11,7 @@
|
|||||||
import macholib.mach_o
|
import macholib.mach_o
|
||||||
import macholib.MachO
|
import macholib.MachO
|
||||||
|
|
||||||
|
import llnl.util.filesystem as fs
|
||||||
import llnl.util.lang
|
import llnl.util.lang
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.util.lang import memoized
|
from llnl.util.lang import memoized
|
||||||
@ -887,7 +888,7 @@ def file_is_relocatable(filename, paths_to_relocate=None):
|
|||||||
# Remove the RPATHS from the strings in the executable
|
# Remove the RPATHS from the strings in the executable
|
||||||
set_of_strings = set(strings(filename, output=str).split())
|
set_of_strings = set(strings(filename, output=str).split())
|
||||||
|
|
||||||
m_type, m_subtype = mime_type(filename)
|
m_type, m_subtype = fs.mime_type(filename)
|
||||||
if m_type == "application":
|
if m_type == "application":
|
||||||
tty.debug("{0},{1}".format(m_type, m_subtype), level=2)
|
tty.debug("{0},{1}".format(m_type, m_subtype), level=2)
|
||||||
|
|
||||||
@ -923,7 +924,7 @@ def is_binary(filename):
|
|||||||
Returns:
|
Returns:
|
||||||
True or False
|
True or False
|
||||||
"""
|
"""
|
||||||
m_type, _ = mime_type(filename)
|
m_type, _ = fs.mime_type(filename)
|
||||||
|
|
||||||
msg = "[{0}] -> ".format(filename)
|
msg = "[{0}] -> ".format(filename)
|
||||||
if m_type == "application":
|
if m_type == "application":
|
||||||
@ -934,30 +935,6 @@ def is_binary(filename):
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
@llnl.util.lang.memoized
|
|
||||||
def _get_mime_type():
|
|
||||||
file_cmd = executable.which("file")
|
|
||||||
for arg in ["-b", "-h", "--mime-type"]:
|
|
||||||
file_cmd.add_default_arg(arg)
|
|
||||||
return file_cmd
|
|
||||||
|
|
||||||
|
|
||||||
@llnl.util.lang.memoized
|
|
||||||
def mime_type(filename):
|
|
||||||
"""Returns the mime type and subtype of a file.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
filename: file to be analyzed
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Tuple containing the MIME type and subtype
|
|
||||||
"""
|
|
||||||
output = _get_mime_type()(filename, output=str, error=str).strip()
|
|
||||||
tty.debug("==> " + output, level=2)
|
|
||||||
type, _, subtype = output.partition("/")
|
|
||||||
return type, subtype
|
|
||||||
|
|
||||||
|
|
||||||
# Memoize this due to repeated calls to libraries in the same directory.
|
# Memoize this due to repeated calls to libraries in the same directory.
|
||||||
@llnl.util.lang.memoized
|
@llnl.util.lang.memoized
|
||||||
def _exists_dir(dirname):
|
def _exists_dir(dirname):
|
||||||
@ -975,7 +952,7 @@ def fixup_macos_rpath(root, filename):
|
|||||||
True if fixups were applied, else False
|
True if fixups were applied, else False
|
||||||
"""
|
"""
|
||||||
abspath = os.path.join(root, filename)
|
abspath = os.path.join(root, filename)
|
||||||
if mime_type(abspath) != ("application", "x-mach-binary"):
|
if fs.mime_type(abspath) != ("application", "x-mach-binary"):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# Get Mach-O header commands
|
# Get Mach-O header commands
|
||||||
|
@ -1445,6 +1445,9 @@ class Body(object):
|
|||||||
|
|
||||||
# dependencies
|
# dependencies
|
||||||
if spec.concrete:
|
if spec.concrete:
|
||||||
|
# older specs do not have package hashes, so we have to do this carefully
|
||||||
|
if getattr(spec, "_package_hash", None):
|
||||||
|
clauses.append(fn.package_hash(spec.name, spec._package_hash))
|
||||||
clauses.append(fn.hash(spec.name, spec.dag_hash()))
|
clauses.append(fn.hash(spec.name, spec.dag_hash()))
|
||||||
|
|
||||||
# add all clauses from dependencies
|
# add all clauses from dependencies
|
||||||
@ -1516,8 +1519,10 @@ def key_fn(item):
|
|||||||
# specs will be computed later
|
# specs will be computed later
|
||||||
version_preferences = packages_yaml.get(pkg_name, {}).get("version", [])
|
version_preferences = packages_yaml.get(pkg_name, {}).get("version", [])
|
||||||
for idx, v in enumerate(version_preferences):
|
for idx, v in enumerate(version_preferences):
|
||||||
|
# v can be a string so force it into an actual version for comparisons
|
||||||
|
ver = spack.version.Version(v)
|
||||||
self.declared_versions[pkg_name].append(
|
self.declared_versions[pkg_name].append(
|
||||||
DeclaredVersion(version=v, idx=idx, origin=version_provenance.packages_yaml)
|
DeclaredVersion(version=ver, idx=idx, origin=version_provenance.packages_yaml)
|
||||||
)
|
)
|
||||||
|
|
||||||
for spec in specs:
|
for spec in specs:
|
||||||
|
@ -4056,6 +4056,9 @@ def _cmp_node(self):
|
|||||||
yield self.compiler_flags
|
yield self.compiler_flags
|
||||||
yield self.architecture
|
yield self.architecture
|
||||||
|
|
||||||
|
# this is not present on older specs
|
||||||
|
yield getattr(self, "_package_hash", None)
|
||||||
|
|
||||||
def eq_node(self, other):
|
def eq_node(self, other):
|
||||||
"""Equality with another spec, not including dependencies."""
|
"""Equality with another spec, not including dependencies."""
|
||||||
return (other is not None) and lang.lazy_eq(self._cmp_node, other._cmp_node)
|
return (other is not None) and lang.lazy_eq(self._cmp_node, other._cmp_node)
|
||||||
@ -4065,6 +4068,16 @@ def _cmp_iter(self):
|
|||||||
for item in self._cmp_node():
|
for item in self._cmp_node():
|
||||||
yield item
|
yield item
|
||||||
|
|
||||||
|
# This needs to be in _cmp_iter so that no specs with different process hashes
|
||||||
|
# are considered the same by `__hash__` or `__eq__`.
|
||||||
|
#
|
||||||
|
# TODO: We should eventually unify the `_cmp_*` methods with `to_node_dict` so
|
||||||
|
# TODO: there aren't two sources of truth, but this needs some thought, since
|
||||||
|
# TODO: they exist for speed. We should benchmark whether it's really worth
|
||||||
|
# TODO: having two types of hashing now that we use `json` instead of `yaml` for
|
||||||
|
# TODO: spec hashing.
|
||||||
|
yield self.process_hash() if self.concrete else None
|
||||||
|
|
||||||
def deps():
|
def deps():
|
||||||
for dep in sorted(itertools.chain.from_iterable(self._dependencies.values())):
|
for dep in sorted(itertools.chain.from_iterable(self._dependencies.values())):
|
||||||
yield dep.spec.name
|
yield dep.spec.name
|
||||||
@ -4981,7 +4994,7 @@ def __missing__(self, key):
|
|||||||
|
|
||||||
|
|
||||||
#: These are possible token types in the spec grammar.
|
#: These are possible token types in the spec grammar.
|
||||||
HASH, DEP, AT, COLON, COMMA, ON, OFF, PCT, EQ, ID, VAL, FILE = range(12)
|
HASH, DEP, VER, COLON, COMMA, ON, OFF, PCT, EQ, ID, VAL, FILE = range(12)
|
||||||
|
|
||||||
#: Regex for fully qualified spec names. (e.g., builtin.hdf5)
|
#: Regex for fully qualified spec names. (e.g., builtin.hdf5)
|
||||||
spec_id_re = r"\w[\w.-]*"
|
spec_id_re = r"\w[\w.-]*"
|
||||||
@ -5001,10 +5014,13 @@ def __init__(self):
|
|||||||
)
|
)
|
||||||
super(SpecLexer, self).__init__(
|
super(SpecLexer, self).__init__(
|
||||||
[
|
[
|
||||||
(r"\^", lambda scanner, val: self.token(DEP, val)),
|
(
|
||||||
(r"\@", lambda scanner, val: self.token(AT, val)),
|
r"\@([\w.\-]*\s*)*(\s*\=\s*\w[\w.\-]*)?",
|
||||||
|
lambda scanner, val: self.token(VER, val),
|
||||||
|
),
|
||||||
(r"\:", lambda scanner, val: self.token(COLON, val)),
|
(r"\:", lambda scanner, val: self.token(COLON, val)),
|
||||||
(r"\,", lambda scanner, val: self.token(COMMA, val)),
|
(r"\,", lambda scanner, val: self.token(COMMA, val)),
|
||||||
|
(r"\^", lambda scanner, val: self.token(DEP, val)),
|
||||||
(r"\+", lambda scanner, val: self.token(ON, val)),
|
(r"\+", lambda scanner, val: self.token(ON, val)),
|
||||||
(r"\-", lambda scanner, val: self.token(OFF, val)),
|
(r"\-", lambda scanner, val: self.token(OFF, val)),
|
||||||
(r"\~", lambda scanner, val: self.token(OFF, val)),
|
(r"\~", lambda scanner, val: self.token(OFF, val)),
|
||||||
@ -5142,7 +5158,7 @@ def do_parse(self):
|
|||||||
else:
|
else:
|
||||||
# If the next token can be part of a valid anonymous spec,
|
# If the next token can be part of a valid anonymous spec,
|
||||||
# create the anonymous spec
|
# create the anonymous spec
|
||||||
if self.next.type in (AT, ON, OFF, PCT):
|
if self.next.type in (VER, ON, OFF, PCT):
|
||||||
# Raise an error if the previous spec is already concrete
|
# Raise an error if the previous spec is already concrete
|
||||||
if specs and specs[-1].concrete:
|
if specs and specs[-1].concrete:
|
||||||
raise RedundantSpecError(specs[-1], "compiler, version, " "or variant")
|
raise RedundantSpecError(specs[-1], "compiler, version, " "or variant")
|
||||||
@ -5250,7 +5266,7 @@ def spec(self, name):
|
|||||||
spec.name = spec_name
|
spec.name = spec_name
|
||||||
|
|
||||||
while self.next:
|
while self.next:
|
||||||
if self.accept(AT):
|
if self.accept(VER):
|
||||||
vlist = self.version_list()
|
vlist = self.version_list()
|
||||||
spec._add_versions(vlist)
|
spec._add_versions(vlist)
|
||||||
|
|
||||||
@ -5268,7 +5284,6 @@ def spec(self, name):
|
|||||||
elif self.accept(ID):
|
elif self.accept(ID):
|
||||||
self.previous = self.token
|
self.previous = self.token
|
||||||
if self.accept(EQ):
|
if self.accept(EQ):
|
||||||
# We're adding a key-value pair to the spec
|
|
||||||
self.expect(VAL)
|
self.expect(VAL)
|
||||||
spec._add_flag(self.previous.value, self.token.value)
|
spec._add_flag(self.previous.value, self.token.value)
|
||||||
self.previous = None
|
self.previous = None
|
||||||
@ -5304,16 +5319,24 @@ def variant(self, name=None):
|
|||||||
return self.token.value
|
return self.token.value
|
||||||
|
|
||||||
def version(self):
|
def version(self):
|
||||||
|
|
||||||
start = None
|
start = None
|
||||||
end = None
|
end = None
|
||||||
if self.accept(ID):
|
|
||||||
start = self.token.value
|
def str_translate(value):
|
||||||
if self.accept(EQ):
|
# return None for empty strings since we can end up with `'@'.strip('@')`
|
||||||
# This is for versions that are associated with a hash
|
if not (value and value.strip()):
|
||||||
# i.e. @[40 char hash]=version
|
return None
|
||||||
start += self.token.value
|
else:
|
||||||
self.expect(VAL)
|
return value
|
||||||
start += self.token.value
|
|
||||||
|
if self.token.type is COMMA:
|
||||||
|
# need to increment commas, could be ID or COLON
|
||||||
|
self.accept(ID)
|
||||||
|
|
||||||
|
if self.token.type in (VER, ID):
|
||||||
|
version_spec = self.token.value.lstrip("@")
|
||||||
|
start = str_translate(version_spec)
|
||||||
|
|
||||||
if self.accept(COLON):
|
if self.accept(COLON):
|
||||||
if self.accept(ID):
|
if self.accept(ID):
|
||||||
@ -5323,10 +5346,10 @@ def version(self):
|
|||||||
else:
|
else:
|
||||||
end = self.token.value
|
end = self.token.value
|
||||||
elif start:
|
elif start:
|
||||||
# No colon, but there was a version.
|
# No colon, but there was a version
|
||||||
return vn.Version(start)
|
return vn.Version(start)
|
||||||
else:
|
else:
|
||||||
# No colon and no id: invalid version.
|
# No colon and no id: invalid version
|
||||||
self.next_token_error("Invalid version specifier")
|
self.next_token_error("Invalid version specifier")
|
||||||
|
|
||||||
if start:
|
if start:
|
||||||
@ -5349,7 +5372,7 @@ def compiler(self):
|
|||||||
compiler = CompilerSpec.__new__(CompilerSpec)
|
compiler = CompilerSpec.__new__(CompilerSpec)
|
||||||
compiler.name = self.token.value
|
compiler.name = self.token.value
|
||||||
compiler.versions = vn.VersionList()
|
compiler.versions = vn.VersionList()
|
||||||
if self.accept(AT):
|
if self.accept(VER):
|
||||||
vlist = self.version_list()
|
vlist = self.version_list()
|
||||||
compiler._add_versions(vlist)
|
compiler._add_versions(vlist)
|
||||||
else:
|
else:
|
||||||
|
@ -484,6 +484,7 @@ def test_get_spec_filter_list(mutable_mock_env_path, config, mutable_mock_repo):
|
|||||||
assert affected_pkg_names == expected_affected_pkg_names
|
assert affected_pkg_names == expected_affected_pkg_names
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.maybeslow
|
||||||
@pytest.mark.regression("29947")
|
@pytest.mark.regression("29947")
|
||||||
def test_affected_specs_on_first_concretization(mutable_mock_env_path, config):
|
def test_affected_specs_on_first_concretization(mutable_mock_env_path, config):
|
||||||
e = ev.create("first_concretization")
|
e = ev.create("first_concretization")
|
||||||
|
@ -50,12 +50,14 @@ def test_checksum(arguments, expected, mock_packages, mock_stage):
|
|||||||
|
|
||||||
@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
|
@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
|
||||||
def test_checksum_interactive(mock_packages, mock_fetch, mock_stage, monkeypatch):
|
def test_checksum_interactive(mock_packages, mock_fetch, mock_stage, monkeypatch):
|
||||||
|
# TODO: mock_fetch doesn't actually work with stage, working around with ignoring
|
||||||
|
# fail_on_error for now
|
||||||
def _get_number(*args, **kwargs):
|
def _get_number(*args, **kwargs):
|
||||||
return 1
|
return 1
|
||||||
|
|
||||||
monkeypatch.setattr(tty, "get_number", _get_number)
|
monkeypatch.setattr(tty, "get_number", _get_number)
|
||||||
|
|
||||||
output = spack_checksum("preferred-test")
|
output = spack_checksum("preferred-test", fail_on_error=False)
|
||||||
assert "version of preferred-test" in output
|
assert "version of preferred-test" in output
|
||||||
assert "version(" in output
|
assert "version(" in output
|
||||||
|
|
||||||
|
@ -2192,10 +2192,8 @@ def fake_download_and_extract_artifacts(url, work_dir):
|
|||||||
)
|
)
|
||||||
def test_ci_help(subcmd, capsys):
|
def test_ci_help(subcmd, capsys):
|
||||||
"""Make sure `spack ci` --help describes the (sub)command help."""
|
"""Make sure `spack ci` --help describes the (sub)command help."""
|
||||||
with pytest.raises(SystemExit):
|
out = spack.main.SpackCommand("ci", subprocess=True)(subcmd, "--help")
|
||||||
ci_cmd(subcmd, "--help")
|
|
||||||
|
|
||||||
out = str(capsys.readouterr())
|
|
||||||
usage = "usage: spack ci {0}{1}[".format(subcmd, " " if subcmd else "")
|
usage = "usage: spack ci {0}{1}[".format(subcmd, " " if subcmd else "")
|
||||||
assert usage in out
|
assert usage in out
|
||||||
|
|
||||||
|
@ -16,7 +16,7 @@
|
|||||||
import spack.paths
|
import spack.paths
|
||||||
from spack.cmd.commands import _positional_to_subroutine
|
from spack.cmd.commands import _positional_to_subroutine
|
||||||
|
|
||||||
commands = spack.main.SpackCommand("commands")
|
commands = spack.main.SpackCommand("commands", subprocess=True)
|
||||||
|
|
||||||
parser = spack.main.make_argument_parser()
|
parser = spack.main.make_argument_parser()
|
||||||
spack.main.add_all_commands(parser)
|
spack.main.add_all_commands(parser)
|
||||||
@ -104,17 +104,18 @@ def test_rst_with_input_files(tmpdir):
|
|||||||
|
|
||||||
|
|
||||||
def test_rst_with_header(tmpdir):
|
def test_rst_with_header(tmpdir):
|
||||||
|
local_commands = spack.main.SpackCommand("commands")
|
||||||
fake_header = "this is a header!\n\n"
|
fake_header = "this is a header!\n\n"
|
||||||
|
|
||||||
filename = tmpdir.join("header.txt")
|
filename = tmpdir.join("header.txt")
|
||||||
with filename.open("w") as f:
|
with filename.open("w") as f:
|
||||||
f.write(fake_header)
|
f.write(fake_header)
|
||||||
|
|
||||||
out = commands("--format=rst", "--header", str(filename))
|
out = local_commands("--format=rst", "--header", str(filename))
|
||||||
assert out.startswith(fake_header)
|
assert out.startswith(fake_header)
|
||||||
|
|
||||||
with pytest.raises(spack.main.SpackCommandError):
|
with pytest.raises(spack.main.SpackCommandError):
|
||||||
commands("--format=rst", "--header", "asdfjhkf")
|
local_commands("--format=rst", "--header", "asdfjhkf")
|
||||||
|
|
||||||
|
|
||||||
def test_rst_update(tmpdir):
|
def test_rst_update(tmpdir):
|
||||||
@ -207,13 +208,14 @@ def test_update_completion_arg(tmpdir, monkeypatch):
|
|||||||
|
|
||||||
monkeypatch.setattr(spack.cmd.commands, "update_completion_args", mock_args)
|
monkeypatch.setattr(spack.cmd.commands, "update_completion_args", mock_args)
|
||||||
|
|
||||||
|
local_commands = spack.main.SpackCommand("commands")
|
||||||
# ensure things fail if --update-completion isn't specified alone
|
# ensure things fail if --update-completion isn't specified alone
|
||||||
with pytest.raises(spack.main.SpackCommandError):
|
with pytest.raises(spack.main.SpackCommandError):
|
||||||
commands("--update-completion", "-a")
|
local_commands("--update-completion", "-a")
|
||||||
|
|
||||||
# ensure arg is restored
|
# ensure arg is restored
|
||||||
assert "--update-completion" not in mock_bashfile.read()
|
assert "--update-completion" not in mock_bashfile.read()
|
||||||
commands("--update-completion")
|
local_commands("--update-completion")
|
||||||
assert "--update-completion" in mock_bashfile.read()
|
assert "--update-completion" in mock_bashfile.read()
|
||||||
|
|
||||||
|
|
||||||
|
@ -27,7 +27,9 @@ def test_create_db_tarball(tmpdir, database):
|
|||||||
|
|
||||||
# get the first non-dotfile to avoid coverage files in the directory
|
# get the first non-dotfile to avoid coverage files in the directory
|
||||||
files = os.listdir(os.getcwd())
|
files = os.listdir(os.getcwd())
|
||||||
tarball_name = next(f for f in files if not f.startswith("."))
|
tarball_name = next(
|
||||||
|
f for f in files if not f.startswith(".") and not f.startswith("tests")
|
||||||
|
)
|
||||||
|
|
||||||
# debug command made an archive
|
# debug command made an archive
|
||||||
assert os.path.exists(tarball_name)
|
assert os.path.exists(tarball_name)
|
||||||
|
@ -86,6 +86,7 @@ def test_load_first(install_mockery, mock_fetch, mock_archive, mock_packages):
|
|||||||
"node_compiler",
|
"node_compiler",
|
||||||
"node_compiler_version",
|
"node_compiler_version",
|
||||||
"node",
|
"node",
|
||||||
|
"package_hash",
|
||||||
"hash",
|
"hash",
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
@ -869,7 +869,7 @@ def test_env_with_included_config_var_path(packages_file):
|
|||||||
|
|
||||||
config_real_path = substitute_path_variables(config_var_path)
|
config_real_path = substitute_path_variables(config_var_path)
|
||||||
fs.mkdirp(os.path.dirname(config_real_path))
|
fs.mkdirp(os.path.dirname(config_real_path))
|
||||||
fs.rename(packages_file.strpath, config_real_path)
|
shutil.move(packages_file.strpath, config_real_path)
|
||||||
assert os.path.exists(config_real_path)
|
assert os.path.exists(config_real_path)
|
||||||
|
|
||||||
with e:
|
with e:
|
||||||
|
@ -11,7 +11,7 @@
|
|||||||
@pytest.mark.xfail
|
@pytest.mark.xfail
|
||||||
def test_reuse_after_help():
|
def test_reuse_after_help():
|
||||||
"""Test `spack help` can be called twice with the same SpackCommand."""
|
"""Test `spack help` can be called twice with the same SpackCommand."""
|
||||||
help_cmd = SpackCommand("help")
|
help_cmd = SpackCommand("help", subprocess=True)
|
||||||
help_cmd()
|
help_cmd()
|
||||||
|
|
||||||
# This second invocation will somehow fail because the parser no
|
# This second invocation will somehow fail because the parser no
|
||||||
@ -30,14 +30,14 @@ def test_reuse_after_help():
|
|||||||
|
|
||||||
def test_help():
|
def test_help():
|
||||||
"""Sanity check the help command to make sure it works."""
|
"""Sanity check the help command to make sure it works."""
|
||||||
help_cmd = SpackCommand("help")
|
help_cmd = SpackCommand("help", subprocess=True)
|
||||||
out = help_cmd()
|
out = help_cmd()
|
||||||
assert "These are common spack commands:" in out
|
assert "These are common spack commands:" in out
|
||||||
|
|
||||||
|
|
||||||
def test_help_all():
|
def test_help_all():
|
||||||
"""Test the spack help --all flag"""
|
"""Test the spack help --all flag"""
|
||||||
help_cmd = SpackCommand("help")
|
help_cmd = SpackCommand("help", subprocess=True)
|
||||||
out = help_cmd("--all")
|
out = help_cmd("--all")
|
||||||
assert "Complete list of spack commands:" in out
|
assert "Complete list of spack commands:" in out
|
||||||
|
|
||||||
|
@ -10,7 +10,7 @@
|
|||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from llnl.util.filesystem import mkdirp
|
from llnl.util.filesystem import mkdirp, working_dir
|
||||||
|
|
||||||
import spack
|
import spack
|
||||||
from spack.util.executable import which
|
from spack.util.executable import which
|
||||||
@ -40,10 +40,11 @@ def check_git_version():
|
|||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope="function")
|
@pytest.fixture(scope="function")
|
||||||
def git_tmp_worktree(tmpdir):
|
def git_tmp_worktree(tmpdir, mock_git_version_info):
|
||||||
"""Create new worktree in a temporary folder and monkeypatch
|
"""Create new worktree in a temporary folder and monkeypatch
|
||||||
spack.paths.prefix to point to it.
|
spack.paths.prefix to point to it.
|
||||||
"""
|
"""
|
||||||
|
with working_dir(mock_git_version_info[0]):
|
||||||
# TODO: This is fragile and should be high priority for
|
# TODO: This is fragile and should be high priority for
|
||||||
# follow up fixes. 27021
|
# follow up fixes. 27021
|
||||||
# Path length is occasionally too long on Windows
|
# Path length is occasionally too long on Windows
|
||||||
|
@ -10,7 +10,7 @@
|
|||||||
|
|
||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.user_environment as uenv
|
import spack.user_environment as uenv
|
||||||
from spack.main import SpackCommand, SpackCommandError
|
from spack.main import SpackCommand
|
||||||
|
|
||||||
load = SpackCommand("load")
|
load = SpackCommand("load")
|
||||||
unload = SpackCommand("unload")
|
unload = SpackCommand("unload")
|
||||||
@ -115,10 +115,12 @@ def test_load_first(install_mockery, mock_fetch, mock_archive, mock_packages):
|
|||||||
"""Test with and without the --first option"""
|
"""Test with and without the --first option"""
|
||||||
install("libelf@0.8.12")
|
install("libelf@0.8.12")
|
||||||
install("libelf@0.8.13")
|
install("libelf@0.8.13")
|
||||||
# Now there are two versions of libelf
|
|
||||||
with pytest.raises(SpackCommandError):
|
# Now there are two versions of libelf, which should cause an error
|
||||||
# This should cause an error due to multiple versions
|
out = load("--sh", "libelf", fail_on_error=False)
|
||||||
load("--sh", "libelf")
|
assert "matches multiple packages" in out
|
||||||
|
assert "Use a more specific spec" in out
|
||||||
|
|
||||||
# Using --first should avoid the error condition
|
# Using --first should avoid the error condition
|
||||||
load("--sh", "--first", "libelf")
|
load("--sh", "--first", "libelf")
|
||||||
|
|
||||||
|
@ -210,7 +210,7 @@ def test_setdefault_command(mutable_database, mutable_config):
|
|||||||
for k in preferred, other_spec:
|
for k in preferred, other_spec:
|
||||||
assert os.path.exists(writers[k].layout.filename)
|
assert os.path.exists(writers[k].layout.filename)
|
||||||
assert os.path.exists(link_name) and os.path.islink(link_name)
|
assert os.path.exists(link_name) and os.path.islink(link_name)
|
||||||
assert os.path.realpath(link_name) == writers[other_spec].layout.filename
|
assert os.path.realpath(link_name) == os.path.realpath(writers[other_spec].layout.filename)
|
||||||
|
|
||||||
# Reset the default to be the preferred spec
|
# Reset the default to be the preferred spec
|
||||||
module("lmod", "setdefault", preferred)
|
module("lmod", "setdefault", preferred)
|
||||||
@ -219,4 +219,4 @@ def test_setdefault_command(mutable_database, mutable_config):
|
|||||||
for k in preferred, other_spec:
|
for k in preferred, other_spec:
|
||||||
assert os.path.exists(writers[k].layout.filename)
|
assert os.path.exists(writers[k].layout.filename)
|
||||||
assert os.path.exists(link_name) and os.path.islink(link_name)
|
assert os.path.exists(link_name) and os.path.islink(link_name)
|
||||||
assert os.path.realpath(link_name) == writers[preferred].layout.filename
|
assert os.path.realpath(link_name) == os.path.realpath(writers[preferred].layout.filename)
|
||||||
|
@ -46,22 +46,22 @@ def has_develop_branch():
|
|||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope="function")
|
@pytest.fixture(scope="function")
|
||||||
def flake8_package():
|
def flake8_package(tmpdir):
|
||||||
"""Style only checks files that have been modified. This fixture makes a small
|
"""Style only checks files that have been modified. This fixture makes a small
|
||||||
change to the ``flake8`` mock package, yields the filename, then undoes the
|
change to the ``flake8`` mock package, yields the filename, then undoes the
|
||||||
change on cleanup.
|
change on cleanup.
|
||||||
"""
|
"""
|
||||||
repo = spack.repo.Repo(spack.paths.mock_packages_path)
|
repo = spack.repo.Repo(spack.paths.mock_packages_path)
|
||||||
filename = repo.filename_for_package_name("flake8")
|
filename = repo.filename_for_package_name("flake8")
|
||||||
tmp = filename + ".tmp"
|
rel_path = os.path.dirname(os.path.relpath(filename, spack.paths.prefix))
|
||||||
|
tmp = tmpdir / rel_path / "flake8-ci-package.py"
|
||||||
|
tmp.ensure()
|
||||||
|
tmp = str(tmp)
|
||||||
|
|
||||||
try:
|
|
||||||
shutil.copy(filename, tmp)
|
shutil.copy(filename, tmp)
|
||||||
package = FileFilter(filename)
|
package = FileFilter(tmp)
|
||||||
package.filter("state = 'unmodified'", "state = 'modified'", string=True)
|
package.filter("state = 'unmodified'", "state = 'modified'", string=True)
|
||||||
yield filename
|
yield tmp
|
||||||
finally:
|
|
||||||
shutil.move(tmp, filename)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
@ -71,9 +71,8 @@ def flake8_package_with_errors(scope="function"):
|
|||||||
filename = repo.filename_for_package_name("flake8")
|
filename = repo.filename_for_package_name("flake8")
|
||||||
tmp = filename + ".tmp"
|
tmp = filename + ".tmp"
|
||||||
|
|
||||||
try:
|
|
||||||
shutil.copy(filename, tmp)
|
shutil.copy(filename, tmp)
|
||||||
package = FileFilter(filename)
|
package = FileFilter(tmp)
|
||||||
|
|
||||||
# this is a black error (quote style and spacing before/after operator)
|
# this is a black error (quote style and spacing before/after operator)
|
||||||
package.filter('state = "unmodified"', "state = 'modified'", string=True)
|
package.filter('state = "unmodified"', "state = 'modified'", string=True)
|
||||||
@ -82,9 +81,7 @@ def flake8_package_with_errors(scope="function"):
|
|||||||
package.filter(
|
package.filter(
|
||||||
"from spack.package import *", "from spack.package import *\nimport os", string=True
|
"from spack.package import *", "from spack.package import *\nimport os", string=True
|
||||||
)
|
)
|
||||||
yield filename
|
yield tmp
|
||||||
finally:
|
|
||||||
shutil.move(tmp, filename)
|
|
||||||
|
|
||||||
|
|
||||||
def test_changed_files_from_git_rev_base(tmpdir, capfd):
|
def test_changed_files_from_git_rev_base(tmpdir, capfd):
|
||||||
@ -125,7 +122,7 @@ def test_changed_no_base(tmpdir, capfd):
|
|||||||
assert "This repository does not have a 'foobar'" in err
|
assert "This repository does not have a 'foobar'" in err
|
||||||
|
|
||||||
|
|
||||||
def test_changed_files_all_files(flake8_package):
|
def test_changed_files_all_files():
|
||||||
# it's hard to guarantee "all files", so do some sanity checks.
|
# it's hard to guarantee "all files", so do some sanity checks.
|
||||||
files = set(
|
files = set(
|
||||||
[
|
[
|
||||||
@ -139,13 +136,18 @@ def test_changed_files_all_files(flake8_package):
|
|||||||
|
|
||||||
# a builtin package
|
# a builtin package
|
||||||
zlib = spack.repo.path.get_pkg_class("zlib")
|
zlib = spack.repo.path.get_pkg_class("zlib")
|
||||||
assert zlib.module.__file__ in files
|
zlib_file = zlib.module.__file__
|
||||||
|
if zlib_file.endswith("pyc"):
|
||||||
|
zlib_file = zlib_file[:-1]
|
||||||
|
assert zlib_file in files
|
||||||
|
|
||||||
# a core spack file
|
# a core spack file
|
||||||
assert os.path.join(spack.paths.module_path, "spec.py") in files
|
assert os.path.join(spack.paths.module_path, "spec.py") in files
|
||||||
|
|
||||||
# a mock package
|
# a mock package
|
||||||
assert flake8_package in files
|
repo = spack.repo.Repo(spack.paths.mock_packages_path)
|
||||||
|
filename = repo.filename_for_package_name("flake8")
|
||||||
|
assert filename in files
|
||||||
|
|
||||||
# this test
|
# this test
|
||||||
assert __file__ in files
|
assert __file__ in files
|
||||||
|
@ -228,11 +228,14 @@ def test_missing_command():
|
|||||||
],
|
],
|
||||||
ids=["no_stem", "vacuous", "leading_hyphen", "basic_good", "trailing_slash", "hyphenated"],
|
ids=["no_stem", "vacuous", "leading_hyphen", "basic_good", "trailing_slash", "hyphenated"],
|
||||||
)
|
)
|
||||||
def test_extension_naming(extension_path, expected_exception, config):
|
def test_extension_naming(tmpdir, extension_path, expected_exception, config):
|
||||||
"""Ensure that we are correctly validating configured extension paths
|
"""Ensure that we are correctly validating configured extension paths
|
||||||
for conformity with the rules: the basename should match
|
for conformity with the rules: the basename should match
|
||||||
``spack-<name>``; <name> may have embedded hyphens but not begin with one.
|
``spack-<name>``; <name> may have embedded hyphens but not begin with one.
|
||||||
"""
|
"""
|
||||||
|
# NOTE: if the directory is a valid extension directory name the "vacuous" test will
|
||||||
|
# fail because it resolves to current working directory
|
||||||
|
with tmpdir.as_cwd():
|
||||||
with spack.config.override("config:extensions", [extension_path]):
|
with spack.config.override("config:extensions", [extension_path]):
|
||||||
with pytest.raises(expected_exception):
|
with pytest.raises(expected_exception):
|
||||||
spack.cmd.get_module("no-such-command")
|
spack.cmd.get_module("no-such-command")
|
||||||
|
@ -1188,6 +1188,9 @@ def mock_fn(*args, **kwargs):
|
|||||||
second_spec.concretize()
|
second_spec.concretize()
|
||||||
assert first_spec.dag_hash() != second_spec.dag_hash()
|
assert first_spec.dag_hash() != second_spec.dag_hash()
|
||||||
|
|
||||||
|
@pytest.mark.skipif(
|
||||||
|
sys.version_info[:2] == (2, 7), reason="Fixture fails intermittently with Python 2.7"
|
||||||
|
)
|
||||||
@pytest.mark.regression("20292")
|
@pytest.mark.regression("20292")
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
"context",
|
"context",
|
||||||
@ -1510,6 +1513,9 @@ def test_add_microarchitectures_on_explicit_request(self):
|
|||||||
s = Spec("python target=k10").concretized()
|
s = Spec("python target=k10").concretized()
|
||||||
assert s.satisfies("target=k10")
|
assert s.satisfies("target=k10")
|
||||||
|
|
||||||
|
@pytest.mark.skipif(
|
||||||
|
sys.version_info[:2] == (2, 7), reason="Fixture fails intermittently with Python 2.7"
|
||||||
|
)
|
||||||
@pytest.mark.regression("29201")
|
@pytest.mark.regression("29201")
|
||||||
def test_delete_version_and_reuse(self, mutable_database, repo_with_changing_recipe):
|
def test_delete_version_and_reuse(self, mutable_database, repo_with_changing_recipe):
|
||||||
"""Test that we can reuse installed specs with versions not
|
"""Test that we can reuse installed specs with versions not
|
||||||
@ -1528,6 +1534,9 @@ def test_delete_version_and_reuse(self, mutable_database, repo_with_changing_rec
|
|||||||
assert root.dag_hash() == new_root.dag_hash()
|
assert root.dag_hash() == new_root.dag_hash()
|
||||||
|
|
||||||
@pytest.mark.regression("29201")
|
@pytest.mark.regression("29201")
|
||||||
|
@pytest.mark.skipif(
|
||||||
|
sys.version_info[:2] == (2, 7), reason="Fixture fails intermittently with Python 2.7"
|
||||||
|
)
|
||||||
def test_installed_version_is_selected_only_for_reuse(
|
def test_installed_version_is_selected_only_for_reuse(
|
||||||
self, mutable_database, repo_with_changing_recipe
|
self, mutable_database, repo_with_changing_recipe
|
||||||
):
|
):
|
||||||
@ -1769,6 +1778,9 @@ def test_git_ref_version_errors_if_unknown_version(self, git_ref):
|
|||||||
s.concretized()
|
s.concretized()
|
||||||
|
|
||||||
@pytest.mark.regression("31484")
|
@pytest.mark.regression("31484")
|
||||||
|
@pytest.mark.skipif(
|
||||||
|
sys.version_info[:2] == (2, 7), reason="Fixture fails intermittently with Python 2.7"
|
||||||
|
)
|
||||||
def test_installed_externals_are_reused(self, mutable_database, repo_with_changing_recipe):
|
def test_installed_externals_are_reused(self, mutable_database, repo_with_changing_recipe):
|
||||||
"""Test that external specs that are in the DB can be reused."""
|
"""Test that external specs that are in the DB can be reused."""
|
||||||
if spack.config.get("config:concretizer") == "original":
|
if spack.config.get("config:concretizer") == "original":
|
||||||
|
@ -27,7 +27,7 @@
|
|||||||
|
|
||||||
import llnl.util.lang
|
import llnl.util.lang
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.util.filesystem import mkdirp, remove_linked_tree, working_dir
|
from llnl.util.filesystem import copy_tree, mkdirp, remove_linked_tree, working_dir
|
||||||
|
|
||||||
import spack.binary_distribution
|
import spack.binary_distribution
|
||||||
import spack.caches
|
import spack.caches
|
||||||
@ -803,7 +803,7 @@ def mock_store(tmpdir_factory, mock_repo_path, mock_configuration_scopes, _store
|
|||||||
with spack.store.use_store(str(store_path)) as store:
|
with spack.store.use_store(str(store_path)) as store:
|
||||||
with spack.repo.use_repositories(mock_repo_path):
|
with spack.repo.use_repositories(mock_repo_path):
|
||||||
_populate(store.db)
|
_populate(store.db)
|
||||||
store_path.copy(store_cache, mode=True, stat=True)
|
copy_tree(str(store_path), str(store_cache))
|
||||||
|
|
||||||
# Make the DB filesystem read-only to ensure we can't modify entries
|
# Make the DB filesystem read-only to ensure we can't modify entries
|
||||||
store_path.join(".spack-db").chmod(mode=0o555, rec=1)
|
store_path.join(".spack-db").chmod(mode=0o555, rec=1)
|
||||||
@ -844,7 +844,7 @@ def mutable_database(database_mutable_config, _store_dir_and_cache):
|
|||||||
# Restore the initial state by copying the content of the cache back into
|
# Restore the initial state by copying the content of the cache back into
|
||||||
# the store and making the database read-only
|
# the store and making the database read-only
|
||||||
store_path.remove(rec=1)
|
store_path.remove(rec=1)
|
||||||
store_cache.copy(store_path, mode=True, stat=True)
|
copy_tree(str(store_cache), str(store_path))
|
||||||
store_path.join(".spack-db").chmod(mode=0o555, rec=1)
|
store_path.join(".spack-db").chmod(mode=0o555, rec=1)
|
||||||
|
|
||||||
|
|
||||||
|
@ -365,3 +365,38 @@ def test_read_old_manifest_v1_2(tmpdir, mutable_config, mock_packages, mutable_d
|
|||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
cray_manifest.read(manifest_file_path, True)
|
cray_manifest.read(manifest_file_path, True)
|
||||||
|
|
||||||
|
|
||||||
|
def test_convert_validation_error(tmpdir, mutable_config, mock_packages, mutable_database):
|
||||||
|
manifest_dir = str(tmpdir.mkdir("manifest_dir"))
|
||||||
|
# Does not parse as valid JSON
|
||||||
|
invalid_json_path = os.path.join(manifest_dir, "invalid-json.json")
|
||||||
|
with open(invalid_json_path, "w") as f:
|
||||||
|
f.write(
|
||||||
|
"""\
|
||||||
|
{
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
with pytest.raises(cray_manifest.ManifestValidationError) as e:
|
||||||
|
cray_manifest.read(invalid_json_path, True)
|
||||||
|
str(e)
|
||||||
|
|
||||||
|
# Valid JSON, but does not conform to schema (schema-version is not a string
|
||||||
|
# of length > 0)
|
||||||
|
invalid_schema_path = os.path.join(manifest_dir, "invalid-schema.json")
|
||||||
|
with open(invalid_schema_path, "w") as f:
|
||||||
|
f.write(
|
||||||
|
"""\
|
||||||
|
{
|
||||||
|
"_meta": {
|
||||||
|
"file-type": "cray-pe-json",
|
||||||
|
"system-type": "EX",
|
||||||
|
"schema-version": ""
|
||||||
|
},
|
||||||
|
"specs": []
|
||||||
|
}
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
with pytest.raises(cray_manifest.ManifestValidationError) as e:
|
||||||
|
cray_manifest.read(invalid_schema_path, True)
|
||||||
|
str(e)
|
||||||
|
0
lib/spack/spack/test/data/compression/Foo.cxx
Normal file
0
lib/spack/spack/test/data/compression/Foo.cxx
Normal file
@ -5,6 +5,7 @@
|
|||||||
|
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
|
import sys
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
@ -85,12 +86,11 @@ def test_pkg_attributes(install_mockery, mock_fetch, monkeypatch):
|
|||||||
# assert baz_headers.basenames == ['baz.h']
|
# assert baz_headers.basenames == ['baz.h']
|
||||||
assert baz_headers.directories == [spec["baz"].home.include]
|
assert baz_headers.directories == [spec["baz"].home.include]
|
||||||
|
|
||||||
if "platform=windows" in spec:
|
|
||||||
lib_suffix = ".lib"
|
|
||||||
elif "platform=darwin" in spec:
|
|
||||||
lib_suffix = ".dylib"
|
|
||||||
else:
|
|
||||||
lib_suffix = ".so"
|
lib_suffix = ".so"
|
||||||
|
if sys.platform == "win32":
|
||||||
|
lib_suffix = ".dll"
|
||||||
|
elif sys.platform == "darwin":
|
||||||
|
lib_suffix = ".dylib"
|
||||||
|
|
||||||
foo_libs = spec[foo].libs
|
foo_libs = spec[foo].libs
|
||||||
assert foo_libs.basenames == ["libFoo" + lib_suffix]
|
assert foo_libs.basenames == ["libFoo" + lib_suffix]
|
||||||
|
@ -5,6 +5,7 @@
|
|||||||
|
|
||||||
import fnmatch
|
import fnmatch
|
||||||
import os.path
|
import os.path
|
||||||
|
import sys
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
import six
|
import six
|
||||||
@ -19,18 +20,30 @@
|
|||||||
|
|
||||||
import spack.paths
|
import spack.paths
|
||||||
|
|
||||||
|
is_windows = sys.platform == "win32"
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture()
|
@pytest.fixture()
|
||||||
def library_list():
|
def library_list():
|
||||||
"""Returns an instance of LibraryList."""
|
"""Returns an instance of LibraryList."""
|
||||||
# Test all valid extensions: ['.a', '.dylib', '.so']
|
# Test all valid extensions: ['.a', '.dylib', '.so']
|
||||||
libs = [
|
libs = (
|
||||||
|
[
|
||||||
"/dir1/liblapack.a",
|
"/dir1/liblapack.a",
|
||||||
"/dir2/libpython3.6.dylib", # name may contain periods
|
"/dir2/libpython3.6.dylib", # name may contain periods
|
||||||
"/dir1/libblas.a",
|
"/dir1/libblas.a",
|
||||||
"/dir3/libz.so",
|
"/dir3/libz.so",
|
||||||
"libmpi.so.20.10.1", # shared object libraries may be versioned
|
"libmpi.so.20.10.1", # shared object libraries may be versioned
|
||||||
]
|
]
|
||||||
|
if not is_windows
|
||||||
|
else [
|
||||||
|
"/dir1/liblapack.lib",
|
||||||
|
"/dir2/libpython3.6.dll",
|
||||||
|
"/dir1/libblas.lib",
|
||||||
|
"/dir3/libz.dll",
|
||||||
|
"libmpi.dll.20.10.1",
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
return LibraryList(libs)
|
return LibraryList(libs)
|
||||||
|
|
||||||
@ -52,6 +65,16 @@ def header_list():
|
|||||||
return h
|
return h
|
||||||
|
|
||||||
|
|
||||||
|
# TODO: Remove below when llnl.util.filesystem.find_libraries becomes spec aware
|
||||||
|
plat_static_ext = "lib" if is_windows else "a"
|
||||||
|
|
||||||
|
|
||||||
|
plat_shared_ext = "dll" if is_windows else "so"
|
||||||
|
|
||||||
|
|
||||||
|
plat_apple_shared_ext = "dylib"
|
||||||
|
|
||||||
|
|
||||||
class TestLibraryList(object):
|
class TestLibraryList(object):
|
||||||
def test_repr(self, library_list):
|
def test_repr(self, library_list):
|
||||||
x = eval(repr(library_list))
|
x = eval(repr(library_list))
|
||||||
@ -62,11 +85,11 @@ def test_joined_and_str(self, library_list):
|
|||||||
s1 = library_list.joined()
|
s1 = library_list.joined()
|
||||||
expected = " ".join(
|
expected = " ".join(
|
||||||
[
|
[
|
||||||
"/dir1/liblapack.a",
|
"/dir1/liblapack.%s" % plat_static_ext,
|
||||||
"/dir2/libpython3.6.dylib",
|
"/dir2/libpython3.6.%s" % (plat_apple_shared_ext if not is_windows else "dll"),
|
||||||
"/dir1/libblas.a",
|
"/dir1/libblas.%s" % plat_static_ext,
|
||||||
"/dir3/libz.so",
|
"/dir3/libz.%s" % plat_shared_ext,
|
||||||
"libmpi.so.20.10.1",
|
"libmpi.%s.20.10.1" % plat_shared_ext,
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
assert s1 == expected
|
assert s1 == expected
|
||||||
@ -77,11 +100,11 @@ def test_joined_and_str(self, library_list):
|
|||||||
s3 = library_list.joined(";")
|
s3 = library_list.joined(";")
|
||||||
expected = ";".join(
|
expected = ";".join(
|
||||||
[
|
[
|
||||||
"/dir1/liblapack.a",
|
"/dir1/liblapack.%s" % plat_static_ext,
|
||||||
"/dir2/libpython3.6.dylib",
|
"/dir2/libpython3.6.%s" % (plat_apple_shared_ext if not is_windows else "dll"),
|
||||||
"/dir1/libblas.a",
|
"/dir1/libblas.%s" % plat_static_ext,
|
||||||
"/dir3/libz.so",
|
"/dir3/libz.%s" % plat_shared_ext,
|
||||||
"libmpi.so.20.10.1",
|
"libmpi.%s.20.10.1" % plat_shared_ext,
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
assert s3 == expected
|
assert s3 == expected
|
||||||
@ -117,7 +140,7 @@ def test_paths_manipulation(self, library_list):
|
|||||||
|
|
||||||
def test_get_item(self, library_list):
|
def test_get_item(self, library_list):
|
||||||
a = library_list[0]
|
a = library_list[0]
|
||||||
assert a == "/dir1/liblapack.a"
|
assert a == "/dir1/liblapack.%s" % plat_static_ext
|
||||||
|
|
||||||
b = library_list[:]
|
b = library_list[:]
|
||||||
assert type(b) == type(library_list)
|
assert type(b) == type(library_list)
|
||||||
@ -126,9 +149,9 @@ def test_get_item(self, library_list):
|
|||||||
|
|
||||||
def test_add(self, library_list):
|
def test_add(self, library_list):
|
||||||
pylist = [
|
pylist = [
|
||||||
"/dir1/liblapack.a", # removed from the final list
|
"/dir1/liblapack.%s" % plat_static_ext, # removed from the final list
|
||||||
"/dir2/libmpi.so",
|
"/dir2/libmpi.%s" % plat_shared_ext,
|
||||||
"/dir4/libnew.a",
|
"/dir4/libnew.%s" % plat_static_ext,
|
||||||
]
|
]
|
||||||
another = LibraryList(pylist)
|
another = LibraryList(pylist)
|
||||||
both = library_list + another
|
both = library_list + another
|
||||||
@ -231,6 +254,29 @@ def test_add(self, header_list):
|
|||||||
search_dir = os.path.join(spack.paths.test_path, "data", "directory_search")
|
search_dir = os.path.join(spack.paths.test_path, "data", "directory_search")
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"lib_list,kwargs",
|
||||||
|
[
|
||||||
|
(["liba"], {"shared": True, "recursive": True}),
|
||||||
|
(["liba"], {"shared": False, "recursive": True}),
|
||||||
|
(["libc", "liba"], {"shared": True, "recursive": True}),
|
||||||
|
(["liba", "libc"], {"shared": False, "recursive": True}),
|
||||||
|
(["libc", "libb", "liba"], {"shared": True, "recursive": True}),
|
||||||
|
(["liba", "libb", "libc"], {"shared": False, "recursive": True}),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_library_type_search(lib_list, kwargs):
|
||||||
|
results = find_libraries(lib_list, search_dir, **kwargs)
|
||||||
|
assert len(results) != 0
|
||||||
|
for result in results:
|
||||||
|
lib_type_ext = plat_shared_ext
|
||||||
|
if not kwargs["shared"]:
|
||||||
|
lib_type_ext = plat_static_ext
|
||||||
|
assert result.endswith(lib_type_ext) or (
|
||||||
|
kwargs["shared"] and result.endswith(plat_apple_shared_ext)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
"search_fn,search_list,root,kwargs",
|
"search_fn,search_list,root,kwargs",
|
||||||
[
|
[
|
||||||
|
@ -341,6 +341,7 @@ def no_termios():
|
|||||||
(mock_shell_tstp_tstp_cont_cont, no_termios),
|
(mock_shell_tstp_tstp_cont_cont, no_termios),
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
@pytest.mark.xfail(reason="Fails almost consistently when run with coverage and xdist")
|
||||||
def test_foreground_background(test_fn, termios_on_or_off, tmpdir):
|
def test_foreground_background(test_fn, termios_on_or_off, tmpdir):
|
||||||
"""Functional tests for foregrounding and backgrounding a logged process.
|
"""Functional tests for foregrounding and backgrounding a logged process.
|
||||||
|
|
||||||
@ -460,6 +461,7 @@ def mock_shell_v_v_no_termios(proc, ctl, **kwargs):
|
|||||||
(mock_shell_v_v_no_termios, no_termios),
|
(mock_shell_v_v_no_termios, no_termios),
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
@pytest.mark.xfail(reason="Fails almost consistently when run with coverage and xdist")
|
||||||
def test_foreground_background_output(test_fn, capfd, termios_on_or_off, tmpdir):
|
def test_foreground_background_output(test_fn, capfd, termios_on_or_off, tmpdir):
|
||||||
"""Tests hitting 'v' toggles output, and that force_echo works."""
|
"""Tests hitting 'v' toggles output, and that force_echo works."""
|
||||||
if sys.version_info >= (3, 8) and sys.platform == "darwin" and termios_on_or_off == no_termios:
|
if sys.version_info >= (3, 8) and sys.platform == "darwin" and termios_on_or_off == no_termios:
|
||||||
|
@ -133,6 +133,14 @@ def test_url_for_version_with_no_urls(mock_packages, config):
|
|||||||
pkg_cls(spec).url_for_version("1.1")
|
pkg_cls(spec).url_for_version("1.1")
|
||||||
|
|
||||||
|
|
||||||
|
def test_custom_cmake_prefix_path(mock_packages, config):
|
||||||
|
spec = Spec("depends-on-define-cmake-prefix-paths").concretized()
|
||||||
|
|
||||||
|
assert spack.build_environment.get_cmake_prefix_path(spec.package) == [
|
||||||
|
spec["define-cmake-prefix-paths"].prefix.test
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
def test_url_for_version_with_only_overrides(mock_packages, config):
|
def test_url_for_version_with_only_overrides(mock_packages, config):
|
||||||
s = Spec("url-only-override").concretized()
|
s = Spec("url-only-override").concretized()
|
||||||
|
|
||||||
|
@ -1257,3 +1257,25 @@ def test_concretize_partial_old_dag_hash_spec(mock_packages, config):
|
|||||||
def test_unsupported_compiler():
|
def test_unsupported_compiler():
|
||||||
with pytest.raises(UnsupportedCompilerError):
|
with pytest.raises(UnsupportedCompilerError):
|
||||||
Spec("gcc%fake-compiler").validate_or_raise()
|
Spec("gcc%fake-compiler").validate_or_raise()
|
||||||
|
|
||||||
|
|
||||||
|
def test_package_hash_affects_dunder_and_dag_hash(mock_packages, config):
|
||||||
|
a1 = Spec("a").concretized()
|
||||||
|
a2 = Spec("a").concretized()
|
||||||
|
|
||||||
|
assert hash(a1) == hash(a2)
|
||||||
|
assert a1.dag_hash() == a2.dag_hash()
|
||||||
|
assert a1.process_hash() == a2.process_hash()
|
||||||
|
|
||||||
|
a1.clear_cached_hashes()
|
||||||
|
a2.clear_cached_hashes()
|
||||||
|
|
||||||
|
# tweak the dag hash of one of these specs
|
||||||
|
new_hash = "00000000000000000000000000000000"
|
||||||
|
if new_hash == a1._package_hash:
|
||||||
|
new_hash = "11111111111111111111111111111111"
|
||||||
|
a1._package_hash = new_hash
|
||||||
|
|
||||||
|
assert hash(a1) != hash(a2)
|
||||||
|
assert a1.dag_hash() != a2.dag_hash()
|
||||||
|
assert a1.process_hash() != a2.process_hash()
|
||||||
|
@ -31,63 +31,97 @@
|
|||||||
)
|
)
|
||||||
from spack.variant import DuplicateVariantError
|
from spack.variant import DuplicateVariantError
|
||||||
|
|
||||||
# Sample output for a complex lexing.
|
# Building blocks for complex lexing.
|
||||||
complex_lex = [
|
complex_root = [
|
||||||
Token(sp.ID, "mvapich_foo"),
|
Token(sp.ID, "mvapich_foo"),
|
||||||
Token(sp.DEP),
|
|
||||||
Token(sp.ID, "_openmpi"),
|
|
||||||
Token(sp.AT),
|
|
||||||
Token(sp.ID, "1.2"),
|
|
||||||
Token(sp.COLON),
|
|
||||||
Token(sp.ID, "1.4"),
|
|
||||||
Token(sp.COMMA),
|
|
||||||
Token(sp.ID, "1.6"),
|
|
||||||
Token(sp.PCT),
|
|
||||||
Token(sp.ID, "intel"),
|
|
||||||
Token(sp.AT),
|
|
||||||
Token(sp.ID, "12.1"),
|
|
||||||
Token(sp.COLON),
|
|
||||||
Token(sp.ID, "12.6"),
|
|
||||||
Token(sp.ON),
|
|
||||||
Token(sp.ID, "debug"),
|
|
||||||
Token(sp.OFF),
|
|
||||||
Token(sp.ID, "qt_4"),
|
|
||||||
Token(sp.DEP),
|
|
||||||
Token(sp.ID, "stackwalker"),
|
|
||||||
Token(sp.AT),
|
|
||||||
Token(sp.ID, "8.1_1e"),
|
|
||||||
]
|
]
|
||||||
|
|
||||||
# Another sample lexer output with a kv pair.
|
kv_root = [
|
||||||
kv_lex = [
|
|
||||||
Token(sp.ID, "mvapich_foo"),
|
Token(sp.ID, "mvapich_foo"),
|
||||||
Token(sp.ID, "debug"),
|
Token(sp.ID, "debug"),
|
||||||
Token(sp.EQ),
|
Token(sp.EQ),
|
||||||
Token(sp.VAL, "4"),
|
Token(sp.VAL, "4"),
|
||||||
|
]
|
||||||
|
|
||||||
|
complex_compiler = [
|
||||||
|
Token(sp.PCT),
|
||||||
|
Token(sp.ID, "intel"),
|
||||||
|
]
|
||||||
|
|
||||||
|
complex_compiler_v = [
|
||||||
|
Token(sp.VER, "@12.1"),
|
||||||
|
Token(sp.COLON),
|
||||||
|
Token(sp.ID, "12.6"),
|
||||||
|
]
|
||||||
|
|
||||||
|
complex_compiler_v_space = [
|
||||||
|
Token(sp.VER, "@"),
|
||||||
|
Token(sp.ID, "12.1"),
|
||||||
|
Token(sp.COLON),
|
||||||
|
Token(sp.ID, "12.6"),
|
||||||
|
]
|
||||||
|
|
||||||
|
complex_dep1 = [
|
||||||
Token(sp.DEP),
|
Token(sp.DEP),
|
||||||
Token(sp.ID, "_openmpi"),
|
Token(sp.ID, "_openmpi"),
|
||||||
Token(sp.AT),
|
Token(sp.VER, "@1.2"),
|
||||||
|
Token(sp.COLON),
|
||||||
|
Token(sp.ID, "1.4"),
|
||||||
|
Token(sp.COMMA),
|
||||||
|
Token(sp.ID, "1.6"),
|
||||||
|
]
|
||||||
|
|
||||||
|
complex_dep1_space = [
|
||||||
|
Token(sp.DEP),
|
||||||
|
Token(sp.ID, "_openmpi"),
|
||||||
|
Token(sp.VER, "@"),
|
||||||
Token(sp.ID, "1.2"),
|
Token(sp.ID, "1.2"),
|
||||||
Token(sp.COLON),
|
Token(sp.COLON),
|
||||||
Token(sp.ID, "1.4"),
|
Token(sp.ID, "1.4"),
|
||||||
Token(sp.COMMA),
|
Token(sp.COMMA),
|
||||||
Token(sp.ID, "1.6"),
|
Token(sp.ID, "1.6"),
|
||||||
Token(sp.PCT),
|
]
|
||||||
Token(sp.ID, "intel"),
|
|
||||||
Token(sp.AT),
|
complex_dep1_var = [
|
||||||
Token(sp.ID, "12.1"),
|
|
||||||
Token(sp.COLON),
|
|
||||||
Token(sp.ID, "12.6"),
|
|
||||||
Token(sp.ON),
|
Token(sp.ON),
|
||||||
Token(sp.ID, "debug"),
|
Token(sp.ID, "debug"),
|
||||||
Token(sp.OFF),
|
Token(sp.OFF),
|
||||||
Token(sp.ID, "qt_4"),
|
Token(sp.ID, "qt_4"),
|
||||||
|
]
|
||||||
|
|
||||||
|
complex_dep2 = [
|
||||||
Token(sp.DEP),
|
Token(sp.DEP),
|
||||||
Token(sp.ID, "stackwalker"),
|
Token(sp.ID, "stackwalker"),
|
||||||
Token(sp.AT),
|
Token(sp.VER, "@8.1_1e"),
|
||||||
|
]
|
||||||
|
|
||||||
|
complex_dep2_space = [
|
||||||
|
Token(sp.DEP),
|
||||||
|
Token(sp.ID, "stackwalker"),
|
||||||
|
Token(sp.VER, "@"),
|
||||||
Token(sp.ID, "8.1_1e"),
|
Token(sp.ID, "8.1_1e"),
|
||||||
]
|
]
|
||||||
|
|
||||||
|
# Sample output from complex lexing
|
||||||
|
complex_lex = (
|
||||||
|
complex_root
|
||||||
|
+ complex_dep1
|
||||||
|
+ complex_compiler
|
||||||
|
+ complex_compiler_v
|
||||||
|
+ complex_dep1_var
|
||||||
|
+ complex_dep2
|
||||||
|
)
|
||||||
|
|
||||||
|
# Another sample lexer output with a kv pair.
|
||||||
|
kv_lex = (
|
||||||
|
kv_root
|
||||||
|
+ complex_dep1
|
||||||
|
+ complex_compiler
|
||||||
|
+ complex_compiler_v_space
|
||||||
|
+ complex_dep1_var
|
||||||
|
+ complex_dep2_space
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class TestSpecSyntax(object):
|
class TestSpecSyntax(object):
|
||||||
# ========================================================================
|
# ========================================================================
|
||||||
@ -120,7 +154,7 @@ def check_lex(self, tokens, spec):
|
|||||||
lex_output = sp.SpecLexer().lex(spec)
|
lex_output = sp.SpecLexer().lex(spec)
|
||||||
assert len(tokens) == len(lex_output), "unexpected number of tokens"
|
assert len(tokens) == len(lex_output), "unexpected number of tokens"
|
||||||
for tok, spec_tok in zip(tokens, lex_output):
|
for tok, spec_tok in zip(tokens, lex_output):
|
||||||
if tok.type == sp.ID or tok.type == sp.VAL:
|
if tok.type in (sp.ID, sp.VAL, sp.VER):
|
||||||
assert tok == spec_tok
|
assert tok == spec_tok
|
||||||
else:
|
else:
|
||||||
# Only check the type for non-identifiers.
|
# Only check the type for non-identifiers.
|
||||||
@ -716,14 +750,22 @@ def test_minimal_spaces(self):
|
|||||||
)
|
)
|
||||||
|
|
||||||
def test_spaces_between_dependences(self):
|
def test_spaces_between_dependences(self):
|
||||||
|
lex_key = (
|
||||||
|
complex_root
|
||||||
|
+ complex_dep1
|
||||||
|
+ complex_compiler
|
||||||
|
+ complex_compiler_v
|
||||||
|
+ complex_dep1_var
|
||||||
|
+ complex_dep2_space
|
||||||
|
)
|
||||||
self.check_lex(
|
self.check_lex(
|
||||||
complex_lex,
|
lex_key,
|
||||||
"mvapich_foo "
|
"mvapich_foo "
|
||||||
"^_openmpi@1.2:1.4,1.6%intel@12.1:12.6+debug -qt_4 "
|
"^_openmpi@1.2:1.4,1.6%intel@12.1:12.6+debug -qt_4 "
|
||||||
"^stackwalker @ 8.1_1e",
|
"^stackwalker @ 8.1_1e",
|
||||||
)
|
)
|
||||||
self.check_lex(
|
self.check_lex(
|
||||||
complex_lex,
|
lex_key,
|
||||||
"mvapich_foo "
|
"mvapich_foo "
|
||||||
"^_openmpi@1.2:1.4,1.6%intel@12.1:12.6+debug~qt_4 "
|
"^_openmpi@1.2:1.4,1.6%intel@12.1:12.6+debug~qt_4 "
|
||||||
"^stackwalker @ 8.1_1e",
|
"^stackwalker @ 8.1_1e",
|
||||||
@ -738,14 +780,30 @@ def test_spaces_between_options(self):
|
|||||||
)
|
)
|
||||||
|
|
||||||
def test_way_too_many_spaces(self):
|
def test_way_too_many_spaces(self):
|
||||||
|
lex_key = (
|
||||||
|
complex_root
|
||||||
|
+ complex_dep1
|
||||||
|
+ complex_compiler
|
||||||
|
+ complex_compiler_v_space
|
||||||
|
+ complex_dep1_var
|
||||||
|
+ complex_dep2_space
|
||||||
|
)
|
||||||
self.check_lex(
|
self.check_lex(
|
||||||
complex_lex,
|
lex_key,
|
||||||
"mvapich_foo "
|
"mvapich_foo "
|
||||||
"^ _openmpi @1.2 : 1.4 , 1.6 % intel @ 12.1 : 12.6 + debug - qt_4 "
|
"^ _openmpi @1.2 : 1.4 , 1.6 % intel @ 12.1 : 12.6 + debug - qt_4 "
|
||||||
"^ stackwalker @ 8.1_1e",
|
"^ stackwalker @ 8.1_1e",
|
||||||
)
|
)
|
||||||
|
lex_key = (
|
||||||
|
complex_root
|
||||||
|
+ complex_dep1
|
||||||
|
+ complex_compiler
|
||||||
|
+ complex_compiler_v_space
|
||||||
|
+ complex_dep1_var
|
||||||
|
+ complex_dep2_space
|
||||||
|
)
|
||||||
self.check_lex(
|
self.check_lex(
|
||||||
complex_lex,
|
lex_key,
|
||||||
"mvapich_foo "
|
"mvapich_foo "
|
||||||
"^ _openmpi @1.2 : 1.4 , 1.6 % intel @ 12.1 : 12.6 + debug ~ qt_4 "
|
"^ _openmpi @1.2 : 1.4 , 1.6 % intel @ 12.1 : 12.6 + debug ~ qt_4 "
|
||||||
"^ stackwalker @ 8.1_1e",
|
"^ stackwalker @ 8.1_1e",
|
||||||
@ -837,3 +895,19 @@ def test_compare_abstract_specs(self):
|
|||||||
for a, b in itertools.product(specs, repeat=2):
|
for a, b in itertools.product(specs, repeat=2):
|
||||||
# Check that we can compare without raising an error
|
# Check that we can compare without raising an error
|
||||||
assert a <= b or b < a
|
assert a <= b or b < a
|
||||||
|
|
||||||
|
def test_git_ref_specs_with_variants(self):
|
||||||
|
spec_str = "develop-branch-version@git.{h}=develop+var1+var2".format(h="a" * 40)
|
||||||
|
self.check_parse(spec_str)
|
||||||
|
|
||||||
|
def test_git_ref_spec_equivalences(self, mock_packages, mock_stage):
|
||||||
|
s1 = sp.Spec("develop-branch-version@git.{hash}=develop".format(hash="a" * 40))
|
||||||
|
s2 = sp.Spec("develop-branch-version@git.{hash}=develop".format(hash="b" * 40))
|
||||||
|
s3 = sp.Spec("develop-branch-version@git.0.2.15=develop")
|
||||||
|
s_no_git = sp.Spec("develop-branch-version@develop")
|
||||||
|
|
||||||
|
assert s1.satisfies(s_no_git)
|
||||||
|
assert s2.satisfies(s_no_git)
|
||||||
|
assert not s_no_git.satisfies(s1)
|
||||||
|
assert not s2.satisfies(s1)
|
||||||
|
assert not s3.satisfies(s1)
|
||||||
|
@ -22,6 +22,9 @@
|
|||||||
for ext in scomp.ALLOWED_ARCHIVE_TYPES
|
for ext in scomp.ALLOWED_ARCHIVE_TYPES
|
||||||
if "TAR" not in ext
|
if "TAR" not in ext
|
||||||
]
|
]
|
||||||
|
# Spack does not use Python native handling for tarballs or zip
|
||||||
|
# Don't test tarballs or zip in native test
|
||||||
|
native_archive_list = [key for key in ext_archive.keys() if "tar" not in key and "zip" not in key]
|
||||||
|
|
||||||
|
|
||||||
def support_stub():
|
def support_stub():
|
||||||
@ -30,10 +33,9 @@ def support_stub():
|
|||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def compr_support_check(monkeypatch):
|
def compr_support_check(monkeypatch):
|
||||||
monkeypatch.setattr(scomp, "lzma_support", support_stub)
|
monkeypatch.setattr(scomp, "is_lzma_supported", support_stub)
|
||||||
monkeypatch.setattr(scomp, "tar_support", support_stub)
|
monkeypatch.setattr(scomp, "is_gzip_supported", support_stub)
|
||||||
monkeypatch.setattr(scomp, "gzip_support", support_stub)
|
monkeypatch.setattr(scomp, "is_bz2_supported", support_stub)
|
||||||
monkeypatch.setattr(scomp, "bz2_support", support_stub)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
@ -46,10 +48,9 @@ def archive_file(tmpdir_factory, request):
|
|||||||
return os.path.join(str(tmpdir), "Foo.%s" % extension)
|
return os.path.join(str(tmpdir), "Foo.%s" % extension)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("archive_file", ext_archive.keys(), indirect=True)
|
@pytest.mark.parametrize("archive_file", native_archive_list, indirect=True)
|
||||||
def test_native_unpacking(tmpdir_factory, archive_file):
|
def test_native_unpacking(tmpdir_factory, archive_file):
|
||||||
extension = scomp.extension(archive_file)
|
util = scomp.decompressor_for(archive_file)
|
||||||
util = scomp.decompressor_for(archive_file, extension)
|
|
||||||
tmpdir = tmpdir_factory.mktemp("comp_test")
|
tmpdir = tmpdir_factory.mktemp("comp_test")
|
||||||
with working_dir(str(tmpdir)):
|
with working_dir(str(tmpdir)):
|
||||||
assert not os.listdir(os.getcwd())
|
assert not os.listdir(os.getcwd())
|
||||||
@ -63,9 +64,8 @@ def test_native_unpacking(tmpdir_factory, archive_file):
|
|||||||
|
|
||||||
@pytest.mark.parametrize("archive_file", ext_archive.keys(), indirect=True)
|
@pytest.mark.parametrize("archive_file", ext_archive.keys(), indirect=True)
|
||||||
def test_system_unpacking(tmpdir_factory, archive_file, compr_support_check):
|
def test_system_unpacking(tmpdir_factory, archive_file, compr_support_check):
|
||||||
extension = scomp.extension(archive_file)
|
|
||||||
# actually run test
|
# actually run test
|
||||||
util = scomp.decompressor_for(archive_file, extension)
|
util = scomp.decompressor_for(archive_file)
|
||||||
tmpdir = tmpdir_factory.mktemp("system_comp_test")
|
tmpdir = tmpdir_factory.mktemp("system_comp_test")
|
||||||
with working_dir(str(tmpdir)):
|
with working_dir(str(tmpdir)):
|
||||||
assert not os.listdir(os.getcwd())
|
assert not os.listdir(os.getcwd())
|
||||||
@ -78,23 +78,25 @@ def test_system_unpacking(tmpdir_factory, archive_file, compr_support_check):
|
|||||||
|
|
||||||
|
|
||||||
def test_unallowed_extension():
|
def test_unallowed_extension():
|
||||||
bad_ext_archive = "Foo.py"
|
# use a cxx file as python files included for the test
|
||||||
|
# are picked up by the linter and break style checks
|
||||||
|
bad_ext_archive = "Foo.cxx"
|
||||||
with pytest.raises(CommandNotFoundError):
|
with pytest.raises(CommandNotFoundError):
|
||||||
scomp.decompressor_for(bad_ext_archive, "py")
|
scomp.decompressor_for(bad_ext_archive)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("archive", ext_archive.values())
|
@pytest.mark.parametrize("archive", ext_archive.values())
|
||||||
def test_get_extension(archive):
|
def test_get_extension(archive):
|
||||||
ext = scomp.extension(archive)
|
ext = scomp.extension_from_path(archive)
|
||||||
assert ext_archive[ext] == archive
|
assert ext_archive[ext] == archive
|
||||||
|
|
||||||
|
|
||||||
def test_get_bad_extension():
|
def test_get_bad_extension():
|
||||||
archive = "Foo.py"
|
archive = "Foo.cxx"
|
||||||
ext = scomp.extension(archive)
|
ext = scomp.extension_from_path(archive)
|
||||||
assert ext is None
|
assert ext is None
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("path", ext_archive.values())
|
@pytest.mark.parametrize("path", ext_archive.values())
|
||||||
def test_allowed_archvie(path):
|
def test_allowed_archive(path):
|
||||||
assert scomp.allowed_archive(path)
|
assert scomp.allowed_archive(path)
|
||||||
|
@ -2,13 +2,12 @@
|
|||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
|
|
||||||
import numbers
|
import numbers
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
import spack.error
|
import spack.error
|
||||||
|
import spack.variant
|
||||||
from spack.variant import (
|
from spack.variant import (
|
||||||
BoolValuedVariant,
|
BoolValuedVariant,
|
||||||
DuplicateVariantError,
|
DuplicateVariantError,
|
||||||
@ -737,3 +736,11 @@ def test_disjoint_set_fluent_methods():
|
|||||||
assert "none" not in d
|
assert "none" not in d
|
||||||
assert "none" not in [x for x in d]
|
assert "none" not in [x for x in d]
|
||||||
assert "none" not in d.feature_values
|
assert "none" not in d.feature_values
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.regression("32694")
|
||||||
|
@pytest.mark.parametrize("other", [True, False])
|
||||||
|
def test_conditional_value_comparable_to_bool(other):
|
||||||
|
value = spack.variant.Value("98", when="@1.0")
|
||||||
|
comparison = value == other
|
||||||
|
assert comparison is False
|
||||||
|
@ -36,6 +36,7 @@
|
|||||||
|
|
||||||
import spack.error
|
import spack.error
|
||||||
import spack.util.compression as comp
|
import spack.util.compression as comp
|
||||||
|
import spack.util.path as spath
|
||||||
import spack.version
|
import spack.version
|
||||||
|
|
||||||
|
|
||||||
@ -366,17 +367,15 @@ def split_url_extension(path):
|
|||||||
|
|
||||||
# Strip off sourceforge download suffix.
|
# Strip off sourceforge download suffix.
|
||||||
# e.g. https://sourceforge.net/projects/glew/files/glew/2.0.0/glew-2.0.0.tgz/download
|
# e.g. https://sourceforge.net/projects/glew/files/glew/2.0.0/glew-2.0.0.tgz/download
|
||||||
match = re.search(r"(.*(?:sourceforge\.net|sf\.net)/.*)(/download)$", path)
|
prefix, suffix = spath.find_sourceforge_suffix(path)
|
||||||
if match:
|
|
||||||
prefix, suffix = match.groups()
|
|
||||||
|
|
||||||
ext = comp.extension(prefix)
|
ext = comp.extension_from_path(prefix)
|
||||||
if ext is not None:
|
if ext is not None:
|
||||||
prefix = comp.strip_extension(prefix)
|
prefix = comp.strip_extension(prefix)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
prefix, suf = strip_query_and_fragment(prefix)
|
prefix, suf = strip_query_and_fragment(prefix)
|
||||||
ext = comp.extension(prefix)
|
ext = comp.extension_from_path(prefix)
|
||||||
prefix = comp.strip_extension(prefix)
|
prefix = comp.strip_extension(prefix)
|
||||||
suffix = suf + suffix
|
suffix = suf + suffix
|
||||||
if ext is None:
|
if ext is None:
|
||||||
|
@ -3,61 +3,67 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
|
import inspect
|
||||||
|
import io
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import shutil
|
import shutil
|
||||||
import sys
|
import sys
|
||||||
from itertools import product
|
from itertools import product
|
||||||
|
|
||||||
|
from llnl.util import tty
|
||||||
|
|
||||||
|
import spack.util.path as spath
|
||||||
from spack.util.executable import CommandNotFoundError, which
|
from spack.util.executable import CommandNotFoundError, which
|
||||||
|
|
||||||
# Supported archive extensions.
|
# Supported archive extensions.
|
||||||
PRE_EXTS = ["tar", "TAR"]
|
PRE_EXTS = ["tar", "TAR"]
|
||||||
EXTS = ["gz", "bz2", "xz", "Z"]
|
EXTS = ["gz", "bz2", "xz", "Z"]
|
||||||
NOTAR_EXTS = ["zip", "tgz", "tbz", "tbz2", "txz"]
|
NOTAR_EXTS = ["zip", "tgz", "tbz2", "tbz", "txz"]
|
||||||
|
|
||||||
# Add PRE_EXTS and EXTS last so that .tar.gz is matched *before* .tar or .gz
|
# Add PRE_EXTS and EXTS last so that .tar.gz is matched *before* .tar or .gz
|
||||||
ALLOWED_ARCHIVE_TYPES = (
|
ALLOWED_ARCHIVE_TYPES = (
|
||||||
[".".join(ext) for ext in product(PRE_EXTS, EXTS)] + PRE_EXTS + EXTS + NOTAR_EXTS
|
[".".join(ext) for ext in product(PRE_EXTS, EXTS)] + PRE_EXTS + EXTS + NOTAR_EXTS
|
||||||
)
|
)
|
||||||
|
|
||||||
|
ALLOWED_SINGLE_EXT_ARCHIVE_TYPES = PRE_EXTS + EXTS + NOTAR_EXTS
|
||||||
|
|
||||||
is_windows = sys.platform == "win32"
|
is_windows = sys.platform == "win32"
|
||||||
|
|
||||||
|
|
||||||
def bz2_support():
|
|
||||||
try:
|
try:
|
||||||
import bz2 # noqa: F401
|
import bz2 # noqa
|
||||||
|
|
||||||
return True
|
_bz2_support = True
|
||||||
except ImportError:
|
except ImportError:
|
||||||
return False
|
_bz2_support = False
|
||||||
|
|
||||||
|
|
||||||
def gzip_support():
|
|
||||||
try:
|
try:
|
||||||
import gzip # noqa: F401
|
import gzip # noqa
|
||||||
|
|
||||||
return True
|
_gzip_support = True
|
||||||
except ImportError:
|
except ImportError:
|
||||||
return False
|
_gzip_support = False
|
||||||
|
|
||||||
|
|
||||||
def lzma_support():
|
|
||||||
try:
|
try:
|
||||||
import lzma # noqa: F401 # novm
|
import lzma # noqa # novermin
|
||||||
|
|
||||||
return True
|
_lzma_support = True
|
||||||
except ImportError:
|
except ImportError:
|
||||||
return False
|
_lzma_support = False
|
||||||
|
|
||||||
|
|
||||||
def tar_support():
|
def is_lzma_supported():
|
||||||
try:
|
return _lzma_support
|
||||||
import tarfile # noqa: F401
|
|
||||||
|
|
||||||
return True
|
|
||||||
except ImportError:
|
def is_gzip_supported():
|
||||||
return False
|
return _gzip_support
|
||||||
|
|
||||||
|
|
||||||
|
def is_bz2_supported():
|
||||||
|
return _bz2_support
|
||||||
|
|
||||||
|
|
||||||
def allowed_archive(path):
|
def allowed_archive(path):
|
||||||
@ -75,8 +81,7 @@ def _untar(archive_file):
|
|||||||
archive_file (str): absolute path to the archive to be extracted.
|
archive_file (str): absolute path to the archive to be extracted.
|
||||||
Can be one of .tar(.[gz|bz2|xz|Z]) or .(tgz|tbz|tbz2|txz).
|
Can be one of .tar(.[gz|bz2|xz|Z]) or .(tgz|tbz|tbz2|txz).
|
||||||
"""
|
"""
|
||||||
_, ext = os.path.splitext(archive_file)
|
outfile = os.path.basename(strip_extension(archive_file, "tar"))
|
||||||
outfile = os.path.basename(archive_file.strip(ext))
|
|
||||||
|
|
||||||
tar = which("tar", required=True)
|
tar = which("tar", required=True)
|
||||||
tar.add_default_arg("-oxf")
|
tar.add_default_arg("-oxf")
|
||||||
@ -91,15 +96,12 @@ def _bunzip2(archive_file):
|
|||||||
Args:
|
Args:
|
||||||
archive_file (str): absolute path to the bz2 archive to be decompressed
|
archive_file (str): absolute path to the bz2 archive to be decompressed
|
||||||
"""
|
"""
|
||||||
_, ext = os.path.splitext(archive_file)
|
|
||||||
compressed_file_name = os.path.basename(archive_file)
|
compressed_file_name = os.path.basename(archive_file)
|
||||||
decompressed_file = os.path.basename(archive_file.strip(ext))
|
decompressed_file = os.path.basename(strip_extension(archive_file, "bz2"))
|
||||||
working_dir = os.getcwd()
|
working_dir = os.getcwd()
|
||||||
archive_out = os.path.join(working_dir, decompressed_file)
|
archive_out = os.path.join(working_dir, decompressed_file)
|
||||||
copy_path = os.path.join(working_dir, compressed_file_name)
|
copy_path = os.path.join(working_dir, compressed_file_name)
|
||||||
if bz2_support():
|
if is_bz2_supported():
|
||||||
import bz2
|
|
||||||
|
|
||||||
f_bz = bz2.BZ2File(archive_file, mode="rb")
|
f_bz = bz2.BZ2File(archive_file, mode="rb")
|
||||||
with open(archive_out, "wb") as ar:
|
with open(archive_out, "wb") as ar:
|
||||||
shutil.copyfileobj(f_bz, ar)
|
shutil.copyfileobj(f_bz, ar)
|
||||||
@ -121,13 +123,10 @@ def _gunzip(archive_file):
|
|||||||
Args:
|
Args:
|
||||||
archive_file (str): absolute path of the file to be decompressed
|
archive_file (str): absolute path of the file to be decompressed
|
||||||
"""
|
"""
|
||||||
_, ext = os.path.splitext(archive_file)
|
decompressed_file = os.path.basename(strip_extension(archive_file, "gz"))
|
||||||
decompressed_file = os.path.basename(archive_file.strip(ext))
|
|
||||||
working_dir = os.getcwd()
|
working_dir = os.getcwd()
|
||||||
destination_abspath = os.path.join(working_dir, decompressed_file)
|
destination_abspath = os.path.join(working_dir, decompressed_file)
|
||||||
if gzip_support():
|
if is_gzip_supported():
|
||||||
import gzip
|
|
||||||
|
|
||||||
f_in = gzip.open(archive_file, "rb")
|
f_in = gzip.open(archive_file, "rb")
|
||||||
with open(destination_abspath, "wb") as f_out:
|
with open(destination_abspath, "wb") as f_out:
|
||||||
shutil.copyfileobj(f_in, f_out)
|
shutil.copyfileobj(f_in, f_out)
|
||||||
@ -138,8 +137,7 @@ def _gunzip(archive_file):
|
|||||||
|
|
||||||
|
|
||||||
def _system_gunzip(archive_file):
|
def _system_gunzip(archive_file):
|
||||||
_, ext = os.path.splitext(archive_file)
|
decompressed_file = os.path.basename(strip_extension(archive_file, "gz"))
|
||||||
decompressed_file = os.path.basename(archive_file.strip(ext))
|
|
||||||
working_dir = os.getcwd()
|
working_dir = os.getcwd()
|
||||||
destination_abspath = os.path.join(working_dir, decompressed_file)
|
destination_abspath = os.path.join(working_dir, decompressed_file)
|
||||||
compressed_file = os.path.basename(archive_file)
|
compressed_file = os.path.basename(archive_file)
|
||||||
@ -159,17 +157,16 @@ def _unzip(archive_file):
|
|||||||
Args:
|
Args:
|
||||||
archive_file (str): absolute path of the file to be decompressed
|
archive_file (str): absolute path of the file to be decompressed
|
||||||
"""
|
"""
|
||||||
|
extracted_file = os.path.basename(strip_extension(archive_file, "zip"))
|
||||||
destination_abspath = os.getcwd()
|
if is_windows:
|
||||||
|
return _untar(archive_file)
|
||||||
|
else:
|
||||||
exe = "unzip"
|
exe = "unzip"
|
||||||
arg = "-q"
|
arg = "-q"
|
||||||
if is_windows:
|
|
||||||
exe = "tar"
|
|
||||||
arg = "-xf"
|
|
||||||
unzip = which(exe, required=True)
|
unzip = which(exe, required=True)
|
||||||
unzip.add_default_arg(arg)
|
unzip.add_default_arg(arg)
|
||||||
unzip(archive_file)
|
unzip(archive_file)
|
||||||
return destination_abspath
|
return extracted_file
|
||||||
|
|
||||||
|
|
||||||
def _unZ(archive_file):
|
def _unZ(archive_file):
|
||||||
@ -185,11 +182,8 @@ def _lzma_decomp(archive_file):
|
|||||||
lzma module, but fall back on command line xz tooling
|
lzma module, but fall back on command line xz tooling
|
||||||
to find available Python support. This is the xz command
|
to find available Python support. This is the xz command
|
||||||
on Unix and 7z on Windows"""
|
on Unix and 7z on Windows"""
|
||||||
if lzma_support():
|
if is_lzma_supported():
|
||||||
import lzma # novermin
|
decompressed_file = os.path.basename(strip_extension(archive_file, "xz"))
|
||||||
|
|
||||||
_, ext = os.path.splitext(archive_file)
|
|
||||||
decompressed_file = os.path.basename(archive_file.strip(ext))
|
|
||||||
archive_out = os.path.join(os.getcwd(), decompressed_file)
|
archive_out = os.path.join(os.getcwd(), decompressed_file)
|
||||||
with open(archive_out, "wb") as ar:
|
with open(archive_out, "wb") as ar:
|
||||||
with lzma.open(archive_file) as lar:
|
with lzma.open(archive_file) as lar:
|
||||||
@ -201,14 +195,41 @@ def _lzma_decomp(archive_file):
|
|||||||
return _xz(archive_file)
|
return _xz(archive_file)
|
||||||
|
|
||||||
|
|
||||||
|
def _win_compressed_tarball_handler(archive_file):
|
||||||
|
"""Decompress and extract compressed tarballs on Windows.
|
||||||
|
This method uses 7zip in conjunction with the tar utility
|
||||||
|
to perform decompression and extraction in a two step process
|
||||||
|
first using 7zip to decompress, and tar to extract.
|
||||||
|
|
||||||
|
The motivation for this method is the inability of 7zip
|
||||||
|
to directly decompress and extract compressed archives
|
||||||
|
in a single shot without undocumented workarounds, and
|
||||||
|
the Windows tar utility's lack of access to the xz tool (unsupported on Windows)
|
||||||
|
"""
|
||||||
|
# perform intermediate extraction step
|
||||||
|
# record name of new archive so we can extract
|
||||||
|
# and later clean up
|
||||||
|
decomped_tarball = _7zip(archive_file)
|
||||||
|
# 7zip is able to one shot extract compressed archives
|
||||||
|
# that have been named .txz. If that is the case, there will
|
||||||
|
# be no intermediate archvie to extract.
|
||||||
|
if check_extension(decomped_tarball, "tar"):
|
||||||
|
# run tar on newly decomped archive
|
||||||
|
outfile = _untar(decomped_tarball)
|
||||||
|
# clean intermediate archive to mimic end result
|
||||||
|
# produced by one shot decomp/extraction
|
||||||
|
os.remove(decomped_tarball)
|
||||||
|
return outfile
|
||||||
|
return decomped_tarball
|
||||||
|
|
||||||
|
|
||||||
def _xz(archive_file):
|
def _xz(archive_file):
|
||||||
"""Decompress lzma compressed .xz files via xz command line
|
"""Decompress lzma compressed .xz files via xz command line
|
||||||
tool. Available only on Unix
|
tool. Available only on Unix
|
||||||
"""
|
"""
|
||||||
if is_windows:
|
if is_windows:
|
||||||
raise RuntimeError("XZ tool unavailable on Windows")
|
raise RuntimeError("XZ tool unavailable on Windows")
|
||||||
_, ext = os.path.splitext(archive_file)
|
decompressed_file = os.path.basename(strip_extension(archive_file, "xz"))
|
||||||
decompressed_file = os.path.basename(archive_file.strip(ext))
|
|
||||||
working_dir = os.getcwd()
|
working_dir = os.getcwd()
|
||||||
destination_abspath = os.path.join(working_dir, decompressed_file)
|
destination_abspath = os.path.join(working_dir, decompressed_file)
|
||||||
compressed_file = os.path.basename(archive_file)
|
compressed_file = os.path.basename(archive_file)
|
||||||
@ -234,84 +255,399 @@ def _7zip(archive_file):
|
|||||||
Args:
|
Args:
|
||||||
archive_file (str): absolute path of file to be unarchived
|
archive_file (str): absolute path of file to be unarchived
|
||||||
"""
|
"""
|
||||||
_, ext = os.path.splitext(archive_file)
|
outfile = os.path.basename(strip_last_extension(archive_file))
|
||||||
outfile = os.path.basename(archive_file.strip(ext))
|
|
||||||
_7z = which("7z")
|
_7z = which("7z")
|
||||||
if not _7z:
|
if not _7z:
|
||||||
raise CommandNotFoundError(
|
raise CommandNotFoundError(
|
||||||
"7z unavailable,\
|
"7z unavailable,\
|
||||||
unable to extract %s files. 7z can be installed via Spack"
|
unable to extract %s files. 7z can be installed via Spack"
|
||||||
% ext
|
% extension_from_path(archive_file)
|
||||||
)
|
)
|
||||||
_7z.add_default_arg("e")
|
_7z.add_default_arg("e")
|
||||||
_7z(archive_file)
|
_7z(archive_file)
|
||||||
return outfile
|
return outfile
|
||||||
|
|
||||||
|
|
||||||
def decompressor_for(path, ext):
|
def decompressor_for(path, extension=None):
|
||||||
"""Returns a function pointer to appropriate decompression
|
"""Returns a function pointer to appropriate decompression
|
||||||
algorithm based on extension type.
|
algorithm based on extension type.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
path (str): path of the archive file requiring decompression
|
path (str): path of the archive file requiring decompression
|
||||||
ext (str): Extension of archive file
|
|
||||||
"""
|
"""
|
||||||
if not allowed_archive(ext):
|
if not extension:
|
||||||
|
extension = extension_from_file(path, decompress=True)
|
||||||
|
|
||||||
|
if not allowed_archive(extension):
|
||||||
raise CommandNotFoundError(
|
raise CommandNotFoundError(
|
||||||
"Cannot extract archive, \
|
"Cannot extract archive, \
|
||||||
unrecognized file extension: '%s'"
|
unrecognized file extension: '%s'"
|
||||||
% ext
|
% extension
|
||||||
)
|
)
|
||||||
|
|
||||||
if re.match(r"\.?zip$", ext) or path.endswith(".zip"):
|
if re.match(r"\.?zip$", extension) or path.endswith(".zip"):
|
||||||
return _unzip
|
return _unzip
|
||||||
|
|
||||||
if re.match(r"gz", ext):
|
if re.match(r"gz", extension):
|
||||||
return _gunzip
|
return _gunzip
|
||||||
|
|
||||||
if re.match(r"bz2", ext):
|
if re.match(r"bz2", extension):
|
||||||
return _bunzip2
|
return _bunzip2
|
||||||
|
|
||||||
# Python does not have native support
|
# Python does not have native support
|
||||||
# of any kind for .Z files. In these cases,
|
# of any kind for .Z files. In these cases,
|
||||||
# we rely on external tools such as tar,
|
# we rely on external tools such as tar,
|
||||||
# 7z, or uncompressZ
|
# 7z, or uncompressZ
|
||||||
if re.match(r"Z$", ext):
|
if re.match(r"Z$", extension):
|
||||||
return _unZ
|
return _unZ
|
||||||
|
|
||||||
# Python and platform may not have support for lzma
|
# Python and platform may not have support for lzma
|
||||||
# compression. If no lzma support, use tools available on systems
|
# compression. If no lzma support, use tools available on systems
|
||||||
# 7zip on Windows and the xz tool on Unix systems.
|
# 7zip on Windows and the xz tool on Unix systems.
|
||||||
if re.match(r"xz", ext):
|
if re.match(r"xz", extension):
|
||||||
return _lzma_decomp
|
return _lzma_decomp
|
||||||
|
|
||||||
if ("xz" in ext or "Z" in ext) and is_windows:
|
# Catch tar.xz/tar.Z files here for Windows
|
||||||
return _7zip
|
# as the tar utility on Windows cannot handle such
|
||||||
|
# compression types directly
|
||||||
|
if ("xz" in extension or "Z" in extension) and is_windows:
|
||||||
|
return _win_compressed_tarball_handler
|
||||||
|
|
||||||
return _untar
|
return _untar
|
||||||
|
|
||||||
|
|
||||||
def strip_extension(path):
|
class FileTypeInterface:
|
||||||
|
"""
|
||||||
|
Base interface class for describing and querying file type information.
|
||||||
|
FileType describes information about a single file type
|
||||||
|
such as extension, and byte header properties, and provides an interface
|
||||||
|
to check a given file against said type based on magic number.
|
||||||
|
|
||||||
|
This class should be subclassed each time a new type is to be
|
||||||
|
described.
|
||||||
|
|
||||||
|
Note: This class should not be used directly as it does not define any specific
|
||||||
|
file. Attempts to directly use this class will fail, as it does not define
|
||||||
|
a magic number or extension string.
|
||||||
|
|
||||||
|
Subclasses should each describe a different
|
||||||
|
type of file. In order to do so, they must define
|
||||||
|
the extension string, magic number, and header offset (if non zero).
|
||||||
|
If a class has multiple magic numbers, it will need to
|
||||||
|
override the method describin that file types magic numbers and
|
||||||
|
the method that checks a types magic numbers against a given file's.
|
||||||
|
"""
|
||||||
|
|
||||||
|
OFFSET = 0
|
||||||
|
compressed = False
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def name():
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def magic_number(cls):
|
||||||
|
"""Return a list of all potential magic numbers for a filetype"""
|
||||||
|
return [x[1] for x in inspect.getmembers(cls) if x[0].startswith("_MAGIC_NUMBER")]
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def header_size(cls):
|
||||||
|
"""Return size of largest magic number associated with file type"""
|
||||||
|
return max([len(x) for x in cls.magic_number()])
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _bytes_check(cls, magic_bytes):
|
||||||
|
for magic in cls.magic_number():
|
||||||
|
if magic_bytes.startswith(magic):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def is_file_of_type(cls, iostream):
|
||||||
|
"""Query byte stream for appropriate magic number
|
||||||
|
|
||||||
|
Args:
|
||||||
|
iostream: file byte stream
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Bool denoting whether file is of class file type
|
||||||
|
based on magic number
|
||||||
|
"""
|
||||||
|
if not iostream:
|
||||||
|
return False
|
||||||
|
# move to location of magic bytes
|
||||||
|
iostream.seek(cls.OFFSET)
|
||||||
|
magic_bytes = iostream.read(cls.header_size())
|
||||||
|
# return to beginning of file
|
||||||
|
iostream.seek(0)
|
||||||
|
if cls._bytes_check(magic_bytes):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
class CompressedFileTypeInterface(FileTypeInterface):
|
||||||
|
"""Interface class for FileTypes that include compression information"""
|
||||||
|
|
||||||
|
compressed = True
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def decomp_in_memory(stream):
|
||||||
|
"""This method decompresses and loads the first 200 or so bytes of a compressed file
|
||||||
|
to check for compressed archives. This does not decompress the entire file and should
|
||||||
|
not be used for direct expansion of archives/compressed files
|
||||||
|
"""
|
||||||
|
raise NotImplementedError("Implementation by compression subclass required")
|
||||||
|
|
||||||
|
|
||||||
|
class BZipFileType(CompressedFileTypeInterface):
|
||||||
|
_MAGIC_NUMBER = b"\x42\x5a\x68"
|
||||||
|
extension = "bz2"
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def name():
|
||||||
|
return "bzip2 compressed data"
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def decomp_in_memory(stream):
|
||||||
|
if is_bz2_supported():
|
||||||
|
# checking for underlying archive, only decomp as many bytes
|
||||||
|
# as is absolutely neccesary for largest archive header (tar)
|
||||||
|
comp_stream = stream.read(TarFileType.OFFSET + TarFileType.header_size())
|
||||||
|
return io.BytesIO(initial_bytes=bz2.BZ2Decompressor().decompress(comp_stream))
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
class ZCompressedFileType(CompressedFileTypeInterface):
|
||||||
|
_MAGIC_NUMBER_LZW = b"\x1f\x9d"
|
||||||
|
_MAGIC_NUMBER_LZH = b"\x1f\xa0"
|
||||||
|
extension = "Z"
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def name():
|
||||||
|
return "compress'd data"
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def decomp_in_memory(stream):
|
||||||
|
# python has no method of decompressing `.Z` files in memory
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
class GZipFileType(CompressedFileTypeInterface):
|
||||||
|
_MAGIC_NUMBER = b"\x1f\x8b\x08"
|
||||||
|
extension = "gz"
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def name():
|
||||||
|
return "gzip compressed data"
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def decomp_in_memory(stream):
|
||||||
|
if is_gzip_supported():
|
||||||
|
# checking for underlying archive, only decomp as many bytes
|
||||||
|
# as is absolutely neccesary for largest archive header (tar)
|
||||||
|
return io.BytesIO(
|
||||||
|
initial_bytes=gzip.GzipFile(fileobj=stream).read(
|
||||||
|
TarFileType.OFFSET + TarFileType.header_size()
|
||||||
|
)
|
||||||
|
)
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
class LzmaFileType(CompressedFileTypeInterface):
|
||||||
|
_MAGIC_NUMBER = b"\xfd7zXZ"
|
||||||
|
extension = "xz"
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def name():
|
||||||
|
return "xz compressed data"
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def decomp_in_memory(stream):
|
||||||
|
if is_lzma_supported():
|
||||||
|
# checking for underlying archive, only decomp as many bytes
|
||||||
|
# as is absolutely neccesary for largest archive header (tar)
|
||||||
|
max_size = TarFileType.OFFSET + TarFileType.header_size()
|
||||||
|
return io.BytesIO(
|
||||||
|
initial_bytes=lzma.LZMADecompressor().decompress(
|
||||||
|
stream.read(max_size), max_length=max_size
|
||||||
|
)
|
||||||
|
)
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
class TarFileType(FileTypeInterface):
|
||||||
|
OFFSET = 257
|
||||||
|
_MAGIC_NUMBER_GNU = b"ustar \0"
|
||||||
|
_MAGIC_NUMBER_POSIX = b"ustar\x0000"
|
||||||
|
extension = "tar"
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def name():
|
||||||
|
return "tar archive"
|
||||||
|
|
||||||
|
|
||||||
|
class ZipFleType(FileTypeInterface):
|
||||||
|
_MAGIC_NUMBER = b"PK\003\004"
|
||||||
|
extension = "zip"
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def name():
|
||||||
|
return "Zip archive data"
|
||||||
|
|
||||||
|
|
||||||
|
# collection of valid Spack recognized archive and compression
|
||||||
|
# file type identifier classes.
|
||||||
|
VALID_FILETYPES = [
|
||||||
|
BZipFileType,
|
||||||
|
ZCompressedFileType,
|
||||||
|
GZipFileType,
|
||||||
|
LzmaFileType,
|
||||||
|
TarFileType,
|
||||||
|
ZipFleType,
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def extension_from_stream(stream, decompress=False):
|
||||||
|
"""Return extension represented by stream corresponding to archive file
|
||||||
|
If stream does not represent an archive type recongized by Spack
|
||||||
|
(see `spack.util.compression.ALLOWED_ARCHIVE_TYPES`) method will return None
|
||||||
|
|
||||||
|
Extension type is derived by searching for identifying bytes
|
||||||
|
in file stream.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
stream : stream representing a file on system
|
||||||
|
decompress (bool) : if True, compressed files are checked
|
||||||
|
for archive types beneath compression i.e. tar.gz
|
||||||
|
default is False, otherwise, return top level type i.e. gz
|
||||||
|
|
||||||
|
Return:
|
||||||
|
A string represting corresponding archive extension
|
||||||
|
or None as relevant.
|
||||||
|
|
||||||
|
"""
|
||||||
|
for arc_type in VALID_FILETYPES:
|
||||||
|
if arc_type.is_file_of_type(stream):
|
||||||
|
suffix_ext = arc_type.extension
|
||||||
|
prefix_ext = ""
|
||||||
|
if arc_type.compressed and decompress:
|
||||||
|
# stream represents compressed file
|
||||||
|
# get decompressed stream (if possible)
|
||||||
|
decomp_stream = arc_type.decomp_in_memory(stream)
|
||||||
|
prefix_ext = extension_from_stream(decomp_stream, decompress=decompress)
|
||||||
|
if not prefix_ext:
|
||||||
|
# We were unable to decompress or unable to derive
|
||||||
|
# a nested extension from decompressed file.
|
||||||
|
# Try to use filename parsing to check for
|
||||||
|
# potential nested extensions if there are any
|
||||||
|
tty.debug(
|
||||||
|
"Cannot derive file extension from magic number;"
|
||||||
|
" falling back to regex path parsing."
|
||||||
|
)
|
||||||
|
return extension_from_path(stream.name)
|
||||||
|
resultant_ext = suffix_ext if not prefix_ext else ".".join([prefix_ext, suffix_ext])
|
||||||
|
tty.debug("File extension %s successfully derived by magic number." % resultant_ext)
|
||||||
|
return resultant_ext
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def extension_from_file(file, decompress=False):
|
||||||
|
"""Return extension from archive file path
|
||||||
|
Extension is derived based on magic number parsing similar
|
||||||
|
to the `file` utility. Attempts to return abbreviated file extensions
|
||||||
|
whenever a file has an abbreviated extension such as `.tgz` or `.txz`.
|
||||||
|
This distinction in abbreivated extension names is accomplished
|
||||||
|
by string parsing.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
file (os.PathLike): path descibing file on system for which ext
|
||||||
|
will be determined.
|
||||||
|
decompress (bool): If True, method will peek into compressed
|
||||||
|
files to check for archive file types. default is False.
|
||||||
|
If false, method will be unable to distinguish `.tar.gz` from `.gz`
|
||||||
|
or similar.
|
||||||
|
Return:
|
||||||
|
Spack recognized archive file extension as determined by file's magic number and
|
||||||
|
file name. If file is not on system or is of an type not recognized by Spack as
|
||||||
|
an archive or compression type, None is returned.
|
||||||
|
"""
|
||||||
|
if os.path.exists(file):
|
||||||
|
with open(file, "rb") as f:
|
||||||
|
ext = extension_from_stream(f, decompress)
|
||||||
|
# based on magic number, file is compressed
|
||||||
|
# tar archive. Check to see if file is abbreviated as
|
||||||
|
# t[xz|gz|bz2|bz]
|
||||||
|
if ext and ext.startswith("tar."):
|
||||||
|
suf = ext.split(".")[1]
|
||||||
|
abbr = "t" + suf
|
||||||
|
if check_extension(file, abbr):
|
||||||
|
return abbr
|
||||||
|
if not ext:
|
||||||
|
# If unable to parse extension from stream,
|
||||||
|
# attempt to fall back to string parsing
|
||||||
|
ext = extension_from_path(file)
|
||||||
|
return ext
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def extension_from_path(path):
|
||||||
|
"""Get the allowed archive extension for a path.
|
||||||
|
If path does not include a valid archive extension
|
||||||
|
(see`spack.util.compression.ALLOWED_ARCHIVE_TYPES`) return None
|
||||||
|
"""
|
||||||
|
if path is None:
|
||||||
|
raise ValueError("Can't call extension() on None")
|
||||||
|
|
||||||
|
for t in ALLOWED_ARCHIVE_TYPES:
|
||||||
|
if check_extension(path, t):
|
||||||
|
return t
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def strip_last_extension(path):
|
||||||
|
"""Strips last supported archive extension from path"""
|
||||||
|
if path:
|
||||||
|
for ext in ALLOWED_SINGLE_EXT_ARCHIVE_TYPES:
|
||||||
|
mod_path = check_and_remove_ext(path, ext)
|
||||||
|
if mod_path != path:
|
||||||
|
return mod_path
|
||||||
|
return path
|
||||||
|
|
||||||
|
|
||||||
|
def strip_extension(path, ext=None):
|
||||||
"""Get the part of a path that does not include its compressed
|
"""Get the part of a path that does not include its compressed
|
||||||
type extension."""
|
type extension."""
|
||||||
for type in ALLOWED_ARCHIVE_TYPES:
|
if ext:
|
||||||
suffix = r"\.%s$" % type
|
return check_and_remove_ext(path, ext)
|
||||||
if re.search(suffix, path):
|
for t in ALLOWED_ARCHIVE_TYPES:
|
||||||
|
mod_path = check_and_remove_ext(path, t)
|
||||||
|
if mod_path != path:
|
||||||
|
return mod_path
|
||||||
|
return path
|
||||||
|
|
||||||
|
|
||||||
|
def check_extension(path, ext):
|
||||||
|
"""Check if extension is present in path"""
|
||||||
|
# Strip sourceforge suffix.
|
||||||
|
prefix, _ = spath.find_sourceforge_suffix(path)
|
||||||
|
if not ext.startswith(r"\."):
|
||||||
|
ext = r"\.%s$" % ext
|
||||||
|
if re.search(ext, prefix):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def reg_remove_ext(path, ext):
|
||||||
|
"""Regex remove ext from path"""
|
||||||
|
if path and ext:
|
||||||
|
suffix = r"\.%s$" % ext
|
||||||
return re.sub(suffix, "", path)
|
return re.sub(suffix, "", path)
|
||||||
return path
|
return path
|
||||||
|
|
||||||
|
|
||||||
def extension(path):
|
def check_and_remove_ext(path, ext):
|
||||||
"""Get the archive extension for a path."""
|
"""If given extension is present in path, remove and return,
|
||||||
if path is None:
|
otherwise just return path"""
|
||||||
raise ValueError("Can't call extension() on None")
|
if check_extension(path, ext):
|
||||||
|
return reg_remove_ext(path, ext)
|
||||||
# Strip sourceforge suffix.
|
return path
|
||||||
if re.search(r"((?:sourceforge.net|sf.net)/.*)/download$", path):
|
|
||||||
path = os.path.dirname(path)
|
|
||||||
|
|
||||||
for t in ALLOWED_ARCHIVE_TYPES:
|
|
||||||
suffix = r"\.%s$" % t
|
|
||||||
if re.search(suffix, path):
|
|
||||||
return t
|
|
||||||
return None
|
|
||||||
|
@ -10,6 +10,7 @@
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
import code
|
import code
|
||||||
|
import io
|
||||||
import os
|
import os
|
||||||
import pdb
|
import pdb
|
||||||
import signal
|
import signal
|
||||||
@ -53,7 +54,10 @@ class and use as a drop in for Pdb, although the syntax here is slightly differe
|
|||||||
the run of Spack.install, or any where else Spack spawns a child process.
|
the run of Spack.install, or any where else Spack spawns a child process.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
try:
|
||||||
_original_stdin_fd = sys.stdin.fileno()
|
_original_stdin_fd = sys.stdin.fileno()
|
||||||
|
except io.UnsupportedOperation:
|
||||||
|
_original_stdin_fd = None
|
||||||
_original_stdin = None
|
_original_stdin = None
|
||||||
|
|
||||||
def __init__(self, stdout_fd=None, stderr_fd=None):
|
def __init__(self, stdout_fd=None, stderr_fd=None):
|
||||||
|
@ -71,6 +71,15 @@ def win_exe_ext():
|
|||||||
return ".exe"
|
return ".exe"
|
||||||
|
|
||||||
|
|
||||||
|
def find_sourceforge_suffix(path):
|
||||||
|
"""find and match sourceforge filepath components
|
||||||
|
Return match object"""
|
||||||
|
match = re.search(r"(.*(?:sourceforge\.net|sf\.net)/.*)(/download)$", path)
|
||||||
|
if match:
|
||||||
|
return match.groups()
|
||||||
|
return path, ""
|
||||||
|
|
||||||
|
|
||||||
def path_to_os_path(*pths):
|
def path_to_os_path(*pths):
|
||||||
"""
|
"""
|
||||||
Takes an arbitrary number of positional parameters
|
Takes an arbitrary number of positional parameters
|
||||||
|
@ -444,7 +444,8 @@ def url_exists(url, curl=None):
|
|||||||
try:
|
try:
|
||||||
read_from_url(url)
|
read_from_url(url)
|
||||||
return True
|
return True
|
||||||
except (SpackWebError, URLError):
|
except (SpackWebError, URLError) as e:
|
||||||
|
tty.debug("Failure reading URL: " + str(e))
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
@ -886,7 +886,7 @@ def __hash__(self):
|
|||||||
return hash(self.value)
|
return hash(self.value)
|
||||||
|
|
||||||
def __eq__(self, other):
|
def __eq__(self, other):
|
||||||
if isinstance(other, six.string_types):
|
if isinstance(other, (six.string_types, bool)):
|
||||||
return self.value == other
|
return self.value == other
|
||||||
return self.value == other.value
|
return self.value == other.value
|
||||||
|
|
||||||
|
@ -504,7 +504,7 @@ class GitVersion(VersionBase):
|
|||||||
1) GitVersions instantiated with an associated reference version (e.g. 'git.foo=1.2')
|
1) GitVersions instantiated with an associated reference version (e.g. 'git.foo=1.2')
|
||||||
2) GitVersions requiring commit lookups
|
2) GitVersions requiring commit lookups
|
||||||
|
|
||||||
Git ref versions that are not paried with a known version
|
Git ref versions that are not paired with a known version
|
||||||
are handled separately from all other version comparisons.
|
are handled separately from all other version comparisons.
|
||||||
When Spack identifies a git ref version, it associates a
|
When Spack identifies a git ref version, it associates a
|
||||||
``CommitLookup`` object with the version. This object
|
``CommitLookup`` object with the version. This object
|
||||||
@ -599,15 +599,33 @@ def satisfies(self, other):
|
|||||||
"""A Version 'satisfies' another if it is at least as specific and has
|
"""A Version 'satisfies' another if it is at least as specific and has
|
||||||
a common prefix. e.g., we want gcc@4.7.3 to satisfy a request for
|
a common prefix. e.g., we want gcc@4.7.3 to satisfy a request for
|
||||||
gcc@4.7 so that when a user asks to build with gcc@4.7, we can find
|
gcc@4.7 so that when a user asks to build with gcc@4.7, we can find
|
||||||
a suitable compiler.
|
a suitable compiler. In the case of two GitVersions we require the ref_versions
|
||||||
|
to satisfy one another and the versions to be an exact match.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
self_cmp = self._cmp(other.ref_lookup)
|
self_cmp = self._cmp(other.ref_lookup)
|
||||||
other_cmp = other._cmp(self.ref_lookup)
|
other_cmp = other._cmp(self.ref_lookup)
|
||||||
|
|
||||||
|
if other.is_ref:
|
||||||
|
# if other is a ref then satisfaction requires an exact version match
|
||||||
|
# i.e. the GitRef must match this.version for satisfaction
|
||||||
|
# this creates an asymmetric comparison:
|
||||||
|
# - 'foo@main'.satisfies('foo@git.hash=main') == False
|
||||||
|
# - 'foo@git.hash=main'.satisfies('foo@main') == True
|
||||||
|
version_match = self.version == other.version
|
||||||
|
elif self.is_ref:
|
||||||
|
# other is not a ref then it is a version base and we need to compare
|
||||||
|
# this.ref
|
||||||
|
version_match = self.ref_version == other.version
|
||||||
|
else:
|
||||||
|
# neither is a git ref. We shouldn't ever be here, but if we are this variable
|
||||||
|
# is not meaningful and defaults to true
|
||||||
|
version_match = True
|
||||||
|
|
||||||
# Do the final comparison
|
# Do the final comparison
|
||||||
nself = len(self_cmp)
|
nself = len(self_cmp)
|
||||||
nother = len(other_cmp)
|
nother = len(other_cmp)
|
||||||
return nother <= nself and self_cmp[:nother] == other_cmp
|
return nother <= nself and self_cmp[:nother] == other_cmp and version_match
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return "GitVersion(" + repr(self.string) + ")"
|
return "GitVersion(" + repr(self.string) + ")"
|
||||||
|
@ -90,6 +90,7 @@ parallel = true
|
|||||||
concurrency = ["multiprocessing"]
|
concurrency = ["multiprocessing"]
|
||||||
branch = true
|
branch = true
|
||||||
source = ["bin", "lib"]
|
source = ["bin", "lib"]
|
||||||
|
data_file = "./tests-coverage/.coverage"
|
||||||
omit = [
|
omit = [
|
||||||
'lib/spack/spack/test/*',
|
'lib/spack/spack/test/*',
|
||||||
'lib/spack/docs/*',
|
'lib/spack/docs/*',
|
||||||
|
@ -70,12 +70,6 @@ spack:
|
|||||||
# - intel-oneapi-compilers@2022.1
|
# - intel-oneapi-compilers@2022.1
|
||||||
# - nvhpc
|
# - nvhpc
|
||||||
|
|
||||||
- cuda_specs:
|
|
||||||
# Depends on ctffind which embeds fsincos (x86-specific asm) within code. Will not build on ARM
|
|
||||||
#- relion +cuda cuda_arch=70
|
|
||||||
- raja +cuda cuda_arch=70
|
|
||||||
- mfem +cuda cuda_arch=70
|
|
||||||
|
|
||||||
- app_specs:
|
- app_specs:
|
||||||
- bwa
|
- bwa
|
||||||
# Depends on simde which requires newer compiler?
|
# Depends on simde which requires newer compiler?
|
||||||
@ -133,11 +127,6 @@ spack:
|
|||||||
|
|
||||||
specs:
|
specs:
|
||||||
|
|
||||||
- matrix:
|
|
||||||
- - $cuda_specs
|
|
||||||
- - $compiler
|
|
||||||
- - $target
|
|
||||||
|
|
||||||
- matrix:
|
- matrix:
|
||||||
- - $app_specs
|
- - $app_specs
|
||||||
- - $compiler
|
- - $compiler
|
||||||
|
@ -16,7 +16,6 @@ spack:
|
|||||||
- default_specs:
|
- default_specs:
|
||||||
- lz4 # MakefilePackage
|
- lz4 # MakefilePackage
|
||||||
- mpich~fortran # AutotoolsPackage
|
- mpich~fortran # AutotoolsPackage
|
||||||
- tut # WafPackage
|
|
||||||
- py-setuptools # PythonPackage
|
- py-setuptools # PythonPackage
|
||||||
- openjpeg # CMakePackage
|
- openjpeg # CMakePackage
|
||||||
- r-rcpp # RPackage
|
- r-rcpp # RPackage
|
||||||
|
@ -178,6 +178,7 @@ spack:
|
|||||||
- mfem +cuda cuda_arch=80
|
- mfem +cuda cuda_arch=80
|
||||||
- papi +cuda
|
- papi +cuda
|
||||||
- petsc +cuda cuda_arch=80
|
- petsc +cuda cuda_arch=80
|
||||||
|
- py-torch +cuda cuda_arch=80
|
||||||
- raja +cuda cuda_arch=80
|
- raja +cuda cuda_arch=80
|
||||||
- slate +cuda cuda_arch=80
|
- slate +cuda cuda_arch=80
|
||||||
- slepc +cuda cuda_arch=80
|
- slepc +cuda cuda_arch=80
|
||||||
|
@ -30,18 +30,12 @@ spack:
|
|||||||
- ascent
|
- ascent
|
||||||
- blt
|
- blt
|
||||||
- caliper
|
- caliper
|
||||||
- caliper +cuda cuda_arch=70
|
|
||||||
- camp
|
- camp
|
||||||
- camp +cuda
|
|
||||||
- chai
|
- chai
|
||||||
- chai +cuda +raja
|
|
||||||
- mfem
|
- mfem
|
||||||
- mfem +superlu-dist+petsc+sundials
|
- mfem +superlu-dist+petsc+sundials
|
||||||
- mfem +cuda cuda_arch=70 ^hypre+cuda
|
|
||||||
- raja
|
- raja
|
||||||
- raja +cuda cuda_arch=70
|
|
||||||
- umpire
|
- umpire
|
||||||
- umpire +cuda
|
|
||||||
|
|
||||||
- compiler:
|
- compiler:
|
||||||
- '%gcc@7.3.1'
|
- '%gcc@7.3.1'
|
||||||
|
@ -46,21 +46,35 @@ $coverage_run $(which spack) python -c "import spack.pkg.builtin.mpileaks; repr(
|
|||||||
#-----------------------------------------------------------
|
#-----------------------------------------------------------
|
||||||
# Run unit tests with code coverage
|
# Run unit tests with code coverage
|
||||||
#-----------------------------------------------------------
|
#-----------------------------------------------------------
|
||||||
if [[ "$ONLY_PACKAGES" == "true" ]]; then
|
if [[ "$SPACK_TEST_SOLVER" == "original" ]]; then
|
||||||
echo "ONLY PACKAGE RECIPES CHANGED [running only package sanity]"
|
|
||||||
export PYTEST_ADDOPTS='-k "test_all_virtual_packages_have_default_providers" -m "not maybeslow"'
|
|
||||||
elif [[ "$SPACK_TEST_SOLVER" == "original" ]]; then
|
|
||||||
echo "ORIGINAL CONCRETIZER [skipping slow unit tests]"
|
echo "ORIGINAL CONCRETIZER [skipping slow unit tests]"
|
||||||
export PYTEST_ADDOPTS='-m "not maybeslow"'
|
export PYTEST_ADDOPTS='-m "not maybeslow"'
|
||||||
fi
|
fi
|
||||||
|
|
||||||
$coverage_run $(which spack) unit-test -x --verbose
|
# Check if xdist is available
|
||||||
|
if python -m pytest --trace-config 2>&1 | grep xdist; then
|
||||||
|
export PYTEST_ADDOPTS="$PYTEST_ADDOPTS --dist loadfile --tx '${SPACK_TEST_PARALLEL:=3}*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python'"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# We are running pytest-cov after the addition of pytest-xdist, since it integrates
|
||||||
|
# other pugins for pytest automatically. We still need to use "coverage" explicitly
|
||||||
|
# for the commands above.
|
||||||
|
#
|
||||||
|
# There is a need to pass the configuration file explicitly due to a bug:
|
||||||
|
# https://github.com/pytest-dev/pytest-cov/issues/243
|
||||||
|
# https://github.com/pytest-dev/pytest-cov/issues/237
|
||||||
|
# where it seems that otherwise the configuration file might not be located by subprocesses
|
||||||
|
# in some, not better specified, cases.
|
||||||
|
if [[ "$UNIT_TEST_COVERAGE" == "true" ]]; then
|
||||||
|
$(which spack) unit-test -x --verbose --cov --cov-config=pyproject.toml
|
||||||
|
else
|
||||||
|
$(which spack) unit-test -x --verbose
|
||||||
|
fi
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
bash "$QA_DIR/test-env-cfg.sh"
|
bash "$QA_DIR/test-env-cfg.sh"
|
||||||
|
|
||||||
# Delete the symlink going from ./lib/spack/docs/_spack_root back to
|
# Delete the symlink going from ./lib/spack/docs/_spack_root back to
|
||||||
# the initial directory, since it causes ELOOP errors with codecov/actions@2
|
# the initial directory, since it causes ELOOP errors with codecov/actions@2
|
||||||
if [[ "$COVERAGE" == "true" ]]; then
|
|
||||||
rm lib/spack/docs/_spack_root
|
rm lib/spack/docs/_spack_root
|
||||||
fi
|
|
||||||
|
|
||||||
|
@ -1568,7 +1568,7 @@ _spack_pydoc() {
|
|||||||
_spack_python() {
|
_spack_python() {
|
||||||
if $list_options
|
if $list_options
|
||||||
then
|
then
|
||||||
SPACK_COMPREPLY="-h --help -V --version -c -i -m --path"
|
SPACK_COMPREPLY="-h --help -V --version -c -u -i -m --path"
|
||||||
else
|
else
|
||||||
SPACK_COMPREPLY=""
|
SPACK_COMPREPLY=""
|
||||||
fi
|
fi
|
||||||
|
@ -2,6 +2,7 @@
|
|||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
import sys
|
||||||
|
|
||||||
from spack.package import *
|
from spack.package import *
|
||||||
|
|
||||||
@ -14,13 +15,12 @@ class AttributesFoo(BundlePackage):
|
|||||||
provides("baz")
|
provides("baz")
|
||||||
|
|
||||||
def install(self, spec, prefix):
|
def install(self, spec, prefix):
|
||||||
if "platform=windows" in spec:
|
|
||||||
lib_suffix = ".lib"
|
|
||||||
elif "platform=darwin" in spec:
|
|
||||||
lib_suffix = ".dylib"
|
|
||||||
else:
|
|
||||||
lib_suffix = ".so"
|
|
||||||
|
|
||||||
|
lib_suffix = ".so"
|
||||||
|
if sys.platform == "win32":
|
||||||
|
lib_suffix = ".dll"
|
||||||
|
elif sys.platform == "darwin":
|
||||||
|
lib_suffix = ".dylib"
|
||||||
mkdirp(prefix.include)
|
mkdirp(prefix.include)
|
||||||
touch(prefix.include.join("foo.h"))
|
touch(prefix.include.join("foo.h"))
|
||||||
mkdirp(prefix.include.bar)
|
mkdirp(prefix.include.bar)
|
||||||
|
@ -0,0 +1,20 @@
|
|||||||
|
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||||
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
|
from spack.package import *
|
||||||
|
|
||||||
|
|
||||||
|
class DefineCmakePrefixPaths(Package):
|
||||||
|
"""Package that defines cmake_prefix_paths"""
|
||||||
|
|
||||||
|
homepage = "http://www.example.com"
|
||||||
|
url = "http://www.example.com/definecmakeprefixpaths-1.0.tar.gz"
|
||||||
|
|
||||||
|
version("1.0", "0123456789abcdef0123456789abcdef")
|
||||||
|
|
||||||
|
@property
|
||||||
|
def cmake_prefix_paths(self):
|
||||||
|
paths = [self.prefix.test]
|
||||||
|
return paths
|
@ -0,0 +1,17 @@
|
|||||||
|
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||||
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
|
from spack.package import *
|
||||||
|
|
||||||
|
|
||||||
|
class DependsOnDefineCmakePrefixPaths(Package):
|
||||||
|
"""Package that defines cmake_prefix_paths"""
|
||||||
|
|
||||||
|
homepage = "http://www.example.com"
|
||||||
|
url = "http://www.example.com/dependsonefinecmakeprefixpaths-1.0.tar.gz"
|
||||||
|
|
||||||
|
version("1.0", "0123456789abcdef0123456789abcdef")
|
||||||
|
|
||||||
|
depends_on("define-cmake-prefix-paths")
|
57
var/spack/repos/builtin/packages/apple-gl/package.py
Normal file
57
var/spack/repos/builtin/packages/apple-gl/package.py
Normal file
@ -0,0 +1,57 @@
|
|||||||
|
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||||
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
|
|
||||||
|
from spack.package import *
|
||||||
|
|
||||||
|
|
||||||
|
class AppleGl(Package):
|
||||||
|
"""Shim package for the core OpenGL library from Apple"""
|
||||||
|
|
||||||
|
homepage = "https://developer.apple.com/library/archive/documentation/GraphicsImaging/Conceptual/OpenGL-MacProgGuide/opengl_intro/opengl_intro.html"
|
||||||
|
|
||||||
|
maintainers = ["aphecetche"]
|
||||||
|
|
||||||
|
has_code = False
|
||||||
|
|
||||||
|
version("4.1.0")
|
||||||
|
|
||||||
|
provides("gl@4.1")
|
||||||
|
|
||||||
|
# Only supported on 'platform=darwin' and compiler=apple-clang
|
||||||
|
conflicts("platform=linux")
|
||||||
|
conflicts("platform=cray")
|
||||||
|
conflicts("%gcc")
|
||||||
|
conflicts("%clang")
|
||||||
|
|
||||||
|
phases = []
|
||||||
|
|
||||||
|
sdk_base = (
|
||||||
|
"/Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/"
|
||||||
|
"Developer/SDKs/MacOSX"
|
||||||
|
)
|
||||||
|
|
||||||
|
def setup_dependent_build_environment(self, env, dependent_spec):
|
||||||
|
# we try to setup a build environment with enough hints
|
||||||
|
# for the build system to pick up on the Apple framework version
|
||||||
|
# of OpenGL.
|
||||||
|
# - for a cmake build we actually needs nothing at all as
|
||||||
|
# find_package(OpenGL) will do the right thing
|
||||||
|
# - for the rest of the build systems we'll assume that
|
||||||
|
# setting the C_INCLUDE_PATH will be enough for the compilation phase
|
||||||
|
# and *** for the link phase.
|
||||||
|
env.prepend_path("C_INCLUDE_PATH", self.sdk_base)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def headers(self):
|
||||||
|
return HeaderList(
|
||||||
|
"{}.sdk/System/Library/Frameworks/OpenGL.framework/Headers".format(self.sdk_base)
|
||||||
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def libs(self):
|
||||||
|
return LibraryList(
|
||||||
|
"{}.sdk/System/Library/Frameworks/OpenGL.framework".format(self.sdk_base)
|
||||||
|
)
|
57
var/spack/repos/builtin/packages/apple-glu/package.py
Normal file
57
var/spack/repos/builtin/packages/apple-glu/package.py
Normal file
@ -0,0 +1,57 @@
|
|||||||
|
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||||
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
|
|
||||||
|
from spack.package import *
|
||||||
|
|
||||||
|
|
||||||
|
class AppleGlu(Package):
|
||||||
|
"""Shim package for Apple implementation of OpenGL Utility Libray (GLU)"""
|
||||||
|
|
||||||
|
homepage = ""
|
||||||
|
|
||||||
|
maintainers = ["aphecetche"]
|
||||||
|
|
||||||
|
has_code = False
|
||||||
|
|
||||||
|
version("1.3.0")
|
||||||
|
|
||||||
|
provides("glu@1.3")
|
||||||
|
|
||||||
|
# Only supported on 'platform=darwin' and compiler=apple-clang
|
||||||
|
conflicts("platform=linux")
|
||||||
|
conflicts("platform=cray")
|
||||||
|
conflicts("%gcc")
|
||||||
|
conflicts("%clang")
|
||||||
|
|
||||||
|
phases = []
|
||||||
|
|
||||||
|
sdk_base = (
|
||||||
|
"/Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/"
|
||||||
|
"Developer/SDKs/MacOSX"
|
||||||
|
)
|
||||||
|
|
||||||
|
def setup_dependent_build_environment(self, env, dependent_spec):
|
||||||
|
# we try to setup a build environment with enough hints
|
||||||
|
# for the build system to pick up on the Apple framework version
|
||||||
|
# of OpenGL.
|
||||||
|
# - for a cmake build we actually needs nothing at all as
|
||||||
|
# find_package(OpenGL) will do the right thing
|
||||||
|
# - for the rest of the build systems we'll assume that
|
||||||
|
# setting the C_INCLUDE_PATH will be enough for the compilation phase
|
||||||
|
# and *** for the link phase.
|
||||||
|
env.prepend_path("C_INCLUDE_PATH", self.sdk_base)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def headers(self):
|
||||||
|
return HeaderList(
|
||||||
|
"{}.sdk/System/Library/Frameworks/OpenGL.framework/Headers".format(self.sdk_base)
|
||||||
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def libs(self):
|
||||||
|
return LibraryList(
|
||||||
|
"{}.sdk/System/Library/Frameworks/OpenGL.framework".format(self.sdk_base)
|
||||||
|
)
|
@ -3,6 +3,7 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
|
import os
|
||||||
import platform
|
import platform
|
||||||
import subprocess
|
import subprocess
|
||||||
|
|
||||||
@ -130,6 +131,12 @@ class ArmForge(Package):
|
|||||||
description='Detect available PMU counters via "forge-probe" during install',
|
description='Detect available PMU counters via "forge-probe" during install',
|
||||||
)
|
)
|
||||||
|
|
||||||
|
variant(
|
||||||
|
"accept-eula",
|
||||||
|
default=False,
|
||||||
|
description="Accept the EULA",
|
||||||
|
)
|
||||||
|
|
||||||
# forge-probe executes with "/usr/bin/env python"
|
# forge-probe executes with "/usr/bin/env python"
|
||||||
depends_on("python@2.7:", type="build", when="+probe")
|
depends_on("python@2.7:", type="build", when="+probe")
|
||||||
|
|
||||||
@ -143,7 +150,7 @@ class ArmForge(Package):
|
|||||||
"ALLINEA_LICENSE_FILE",
|
"ALLINEA_LICENSE_FILE",
|
||||||
"ALLINEA_LICENCE_FILE",
|
"ALLINEA_LICENCE_FILE",
|
||||||
]
|
]
|
||||||
license_url = "https://developer.arm.com/tools-and-software/server-and-hpc/help/help-and-tutorials/system-administration/licensing/arm-licence-server"
|
license_url = "https://developer.arm.com/documentation/101169/latest/Use-Arm-Licence-Server"
|
||||||
|
|
||||||
def url_for_version(self, version):
|
def url_for_version(self, version):
|
||||||
return "https://content.allinea.com/downloads/arm-forge-%s-linux-%s.tar" % (
|
return "https://content.allinea.com/downloads/arm-forge-%s-linux-%s.tar" % (
|
||||||
@ -151,8 +158,22 @@ def url_for_version(self, version):
|
|||||||
platform.machine(),
|
platform.machine(),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@run_before("install")
|
||||||
|
def abort_without_eula_acceptance(self):
|
||||||
|
install_example = "spack install arm-forge +accept-eula"
|
||||||
|
license_terms_path = os.path.join(self.stage.source_path, "license_terms")
|
||||||
|
if not self.spec.variants["accept-eula"].value:
|
||||||
|
raise InstallError(
|
||||||
|
"\n\n\nNOTE:\nUse +accept-eula "
|
||||||
|
+ "during installation "
|
||||||
|
+ "to accept the license terms in:\n"
|
||||||
|
+ " {0}\n".format(os.path.join(license_terms_path, "license_agreement.txt"))
|
||||||
|
+ " {0}\n\n".format(os.path.join(license_terms_path, "supplementary_terms.txt"))
|
||||||
|
+ "Example: '{0}'\n".format(install_example)
|
||||||
|
)
|
||||||
|
|
||||||
def install(self, spec, prefix):
|
def install(self, spec, prefix):
|
||||||
subprocess.call(["./textinstall.sh", "--accept-licence", prefix])
|
subprocess.call(["./textinstall.sh", "--accept-license", prefix])
|
||||||
if spec.satisfies("+probe"):
|
if spec.satisfies("+probe"):
|
||||||
probe = join_path(prefix, "bin", "forge-probe")
|
probe = join_path(prefix, "bin", "forge-probe")
|
||||||
subprocess.call([probe, "--install", "global"])
|
subprocess.call([probe, "--install", "global"])
|
||||||
|
@ -2,8 +2,8 @@
|
|||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
from spack.package import *
|
from spack.package import *
|
||||||
from spack.pkg.builtin.boost import Boost
|
|
||||||
|
|
||||||
|
|
||||||
class Arrow(CMakePackage, CudaPackage):
|
class Arrow(CMakePackage, CudaPackage):
|
||||||
@ -15,6 +15,7 @@ class Arrow(CMakePackage, CudaPackage):
|
|||||||
homepage = "https://arrow.apache.org"
|
homepage = "https://arrow.apache.org"
|
||||||
url = "https://github.com/apache/arrow/archive/apache-arrow-0.9.0.tar.gz"
|
url = "https://github.com/apache/arrow/archive/apache-arrow-0.9.0.tar.gz"
|
||||||
|
|
||||||
|
version("9.0.0", sha256="bb187b4b0af8dcc027fffed3700a7b891c9f76c9b63ad8925b4afb8257a2bb1b")
|
||||||
version("8.0.0", sha256="19ece12de48e51ce4287d2dee00dc358fbc5ff02f41629d16076f77b8579e272")
|
version("8.0.0", sha256="19ece12de48e51ce4287d2dee00dc358fbc5ff02f41629d16076f77b8579e272")
|
||||||
version("7.0.0", sha256="57e13c62f27b710e1de54fd30faed612aefa22aa41fa2c0c3bacd204dd18a8f3")
|
version("7.0.0", sha256="57e13c62f27b710e1de54fd30faed612aefa22aa41fa2c0c3bacd204dd18a8f3")
|
||||||
version("4.0.1", sha256="79d3e807df4a179cfab1e7a1ab5f79d95f7b72ac2c33aba030febd125d77eb3b")
|
version("4.0.1", sha256="79d3e807df4a179cfab1e7a1ab5f79d95f7b72ac2c33aba030febd125d77eb3b")
|
||||||
@ -28,32 +29,67 @@ class Arrow(CMakePackage, CudaPackage):
|
|||||||
version("0.9.0", sha256="65f89a3910b6df02ac71e4d4283db9b02c5b3f1e627346c7b6a5982ae994af91")
|
version("0.9.0", sha256="65f89a3910b6df02ac71e4d4283db9b02c5b3f1e627346c7b6a5982ae994af91")
|
||||||
version("0.8.0", sha256="c61a60c298c30546fc0b418a35be66ef330fb81b06c49928acca7f1a34671d54")
|
version("0.8.0", sha256="c61a60c298c30546fc0b418a35be66ef330fb81b06c49928acca7f1a34671d54")
|
||||||
|
|
||||||
depends_on("boost@1.60:")
|
depends_on("boost@1.60: +filesystem +system")
|
||||||
|
|
||||||
# TODO: replace this with an explicit list of components of Boost,
|
|
||||||
# for instance depends_on('boost +filesystem')
|
|
||||||
# See https://github.com/spack/spack/pull/22303 for reference
|
|
||||||
depends_on(Boost.with_default_variants)
|
|
||||||
depends_on("cmake@3.2.0:", type="build")
|
depends_on("cmake@3.2.0:", type="build")
|
||||||
depends_on("flatbuffers build_type=Release") # only Release contains flatc
|
depends_on("flatbuffers")
|
||||||
depends_on("python", when="+python")
|
depends_on("llvm@:11 +clang", when="+gandiva @:3", type="build")
|
||||||
depends_on("py-numpy", when="+python")
|
depends_on("llvm@:12 +clang", when="+gandiva @:4", type="build")
|
||||||
depends_on("rapidjson")
|
depends_on("llvm@:13 +clang", when="+gandiva @:7", type="build")
|
||||||
depends_on("snappy~shared")
|
depends_on("llvm@:14 +clang", when="+gandiva @8:", type="build")
|
||||||
depends_on("zlib+pic")
|
depends_on("lz4", when="+lz4")
|
||||||
depends_on("zstd")
|
depends_on("ninja", type="build")
|
||||||
depends_on("thrift+pic", when="+parquet")
|
depends_on("openssl", when="+gandiva @6.0.0:")
|
||||||
|
depends_on("openssl", when="@4.0.0:")
|
||||||
depends_on("orc", when="+orc")
|
depends_on("orc", when="+orc")
|
||||||
|
depends_on("protobuf", when="+gandiva")
|
||||||
|
depends_on("py-numpy", when="+python")
|
||||||
|
depends_on("python", when="+python")
|
||||||
|
depends_on("rapidjson")
|
||||||
|
depends_on("re2+shared", when="+compute")
|
||||||
|
depends_on("re2+shared", when="+gandiva")
|
||||||
|
depends_on("snappy~shared", when="+snappy @9:")
|
||||||
|
depends_on("snappy~shared", when="@8:")
|
||||||
|
depends_on("thrift+pic", when="+parquet")
|
||||||
|
depends_on("utf8proc@2.7.0: +shared", when="+compute")
|
||||||
|
depends_on("utf8proc@2.7.0: +shared", when="+gandiva")
|
||||||
|
depends_on("xsimd@8.1.0:", when="@9.0.0:")
|
||||||
|
depends_on("zlib+pic", when="+zlib @9:")
|
||||||
|
depends_on("zlib+pic", when="@:8")
|
||||||
|
depends_on("zstd", when="+zstd @9:")
|
||||||
|
depends_on("zstd", when="@:8")
|
||||||
|
|
||||||
|
variant("brotli", default=False, description="Build support for Brotli compression")
|
||||||
variant(
|
variant(
|
||||||
"build_type",
|
"build_type",
|
||||||
default="Release",
|
default="Release",
|
||||||
description="CMake build type",
|
description="CMake build type",
|
||||||
values=("Debug", "FastDebug", "Release"),
|
values=("Debug", "FastDebug", "Release"),
|
||||||
)
|
)
|
||||||
variant("python", default=False, description="Build Python interface")
|
variant(
|
||||||
|
"compute", default=False, description="Computational kernel functions and other support"
|
||||||
|
)
|
||||||
|
variant("gandiva", default=False, description="Build Gandiva support")
|
||||||
|
variant(
|
||||||
|
"glog",
|
||||||
|
default=False,
|
||||||
|
description="Build libraries with glog support for pluggable logging",
|
||||||
|
)
|
||||||
|
variant(
|
||||||
|
"hdfs",
|
||||||
|
default=False,
|
||||||
|
description="Integration with libhdfs for accessing the Hadoop Filesystem",
|
||||||
|
)
|
||||||
|
variant("ipc", default=True, description="Build the Arrow IPC extensions")
|
||||||
|
variant("jemalloc", default=False, description="Build the Arrow jemalloc-based allocator")
|
||||||
|
variant("lz4", default=False, description="Build support for lz4 compression")
|
||||||
|
variant("orc", default=False, description="Build integration with Apache ORC")
|
||||||
variant("parquet", default=False, description="Build Parquet interface")
|
variant("parquet", default=False, description="Build Parquet interface")
|
||||||
variant("orc", default=False, description="Build ORC support")
|
variant("python", default=False, description="Build Python interface")
|
||||||
|
variant("shared", default=True, description="Build shared libs")
|
||||||
|
variant("snappy", default=False, description="Build support for Snappy compression")
|
||||||
|
variant("tensorflow", default=False, description="Build Arrow with TensorFlow support enabled")
|
||||||
|
variant("zlib", default=False, description="Build support for zlib (gzip) compression")
|
||||||
|
variant("zstd", default=False, description="Build support for ZSTD compression")
|
||||||
|
|
||||||
root_cmakelists_dir = "cpp"
|
root_cmakelists_dir = "cpp"
|
||||||
|
|
||||||
@ -63,37 +99,54 @@ def patch(self):
|
|||||||
r"(include_directories\()SYSTEM ", r"\1", "cpp/cmake_modules/ThirdpartyToolchain.cmake"
|
r"(include_directories\()SYSTEM ", r"\1", "cpp/cmake_modules/ThirdpartyToolchain.cmake"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
filter_file(
|
||||||
|
r'set\(ARROW_LLVM_VERSIONS "10" "9" "8" "7"\)',
|
||||||
|
'set(ARROW_LLVM_VERSIONS "11" "10" "9" "8" "7")',
|
||||||
|
"cpp/CMakeLists.txt",
|
||||||
|
when="@:2.0.0",
|
||||||
|
)
|
||||||
|
|
||||||
|
filter_file(
|
||||||
|
r"#include <llvm/Support/DynamicLibrary\.h>",
|
||||||
|
r"#include <llvm/Support/DynamicLibrary.h>" + "\n" + r"#include <llvm/Support/Host.h>",
|
||||||
|
"cpp/src/gandiva/engine.cc",
|
||||||
|
when="@2.0.0",
|
||||||
|
)
|
||||||
|
|
||||||
def cmake_args(self):
|
def cmake_args(self):
|
||||||
args = [
|
args = ["-DARROW_DEPENDENCY_SOURCE=SYSTEM", "-DARROW_NO_DEPRECATED_API=ON"]
|
||||||
"-DARROW_USE_SSE=ON",
|
|
||||||
"-DARROW_BUILD_SHARED=ON",
|
|
||||||
"-DARROW_BUILD_STATIC=OFF",
|
|
||||||
"-DARROW_BUILD_TESTS=OFF",
|
|
||||||
"-DARROW_WITH_BROTLI=OFF",
|
|
||||||
"-DARROW_WITH_LZ4=OFF",
|
|
||||||
]
|
|
||||||
|
|
||||||
if self.spec.satisfies("+cuda"):
|
if self.spec.satisfies("+shared"):
|
||||||
args.append("-DARROW_CUDA:BOOL=ON")
|
args.append(self.define("BUILD_SHARED", "ON"))
|
||||||
else:
|
else:
|
||||||
args.append("-DARROW_CUDA:BOOL=OFF")
|
args.append(self.define("BUILD_SHARED", "OFF"))
|
||||||
|
args.append(self.define("BUILD_STATIC", "ON"))
|
||||||
|
|
||||||
if self.spec.satisfies("+python"):
|
if self.spec.satisfies("@:0.11.99"):
|
||||||
args.append("-DARROW_PYTHON:BOOL=ON")
|
# ARROW_USE_SSE was removed in 0.12
|
||||||
else:
|
# see https://issues.apache.org/jira/browse/ARROW-3844
|
||||||
args.append("-DARROW_PYTHON:BOOL=OFF")
|
args.append(self.define("ARROW_USE_SSE", "ON"))
|
||||||
|
|
||||||
if self.spec.satisfies("+parquet"):
|
args.append(self.define_from_variant("ARROW_COMPUTE", "compute"))
|
||||||
args.append("-DARROW_PARQUET:BOOL=ON")
|
args.append(self.define_from_variant("ARROW_CUDA", "cuda"))
|
||||||
else:
|
args.append(self.define_from_variant("ARROW_GANDIVA", "gandiva"))
|
||||||
args.append("-DARROW_PARQUET:BOOL=OFF")
|
args.append(self.define_from_variant("ARROW_GLOG", "glog"))
|
||||||
|
args.append(self.define_from_variant("ARROW_HDFS", "hdfs"))
|
||||||
if self.spec.satisfies("+orc"):
|
args.append(self.define_from_variant("ARROW_IPC", "ipc"))
|
||||||
args.append("-DARROW_ORC:BOOL=ON")
|
args.append(self.define_from_variant("ARROW_JEMALLOC", "jemalloc"))
|
||||||
else:
|
args.append(self.define_from_variant("ARROW_ORC", "orc"))
|
||||||
args.append("-DARROW_ORC:BOOL=OFF")
|
args.append(self.define_from_variant("ARROW_PARQUET", "parquet"))
|
||||||
|
args.append(self.define_from_variant("ARROW_PYTHON", "python"))
|
||||||
|
args.append(self.define_from_variant("ARROW_TENSORFLOW", "tensorflow"))
|
||||||
|
args.append(self.define_from_variant("ARROW_WITH_BROTLI", "brotli"))
|
||||||
|
args.append(self.define_from_variant("ARROW_WITH_LZ4", "lz4"))
|
||||||
|
args.append(self.define_from_variant("ARROW_WITH_SNAPPY", "snappy"))
|
||||||
|
args.append(self.define_from_variant("ARROW_WITH_ZLIB", "zlib"))
|
||||||
|
args.append(self.define_from_variant("ARROW_WITH_ZSTD", "zstd"))
|
||||||
|
|
||||||
|
with when("@:8"):
|
||||||
for dep in ("flatbuffers", "rapidjson", "snappy", "zlib", "zstd"):
|
for dep in ("flatbuffers", "rapidjson", "snappy", "zlib", "zstd"):
|
||||||
args.append("-D{0}_HOME={1}".format(dep.upper(), self.spec[dep].prefix))
|
args.append("-D{0}_HOME={1}".format(dep.upper(), self.spec[dep].prefix))
|
||||||
args.append("-DZLIB_LIBRARIES={0}".format(self.spec["zlib"].libs))
|
args.append("-DZLIB_LIBRARIES={0}".format(self.spec["zlib"].libs))
|
||||||
|
|
||||||
return args
|
return args
|
||||||
|
@ -0,0 +1,83 @@
|
|||||||
|
From f3d2e44472e2f713d6a3fe7a9cfb0c6007632ad9 Mon Sep 17 00:00:00 2001
|
||||||
|
From: sreenivasa murthy kolam <sreenivasamurthy.kolam@amd.com>
|
||||||
|
Date: Mon, 15 Aug 2022 22:28:37 +0000
|
||||||
|
Subject: [PATCH] Remove direct reference to /usr/bin/rysnc for rsync command
|
||||||
|
|
||||||
|
---
|
||||||
|
src/CMakeLists.txt | 4 ++--
|
||||||
|
src/device_runtime/CMakeLists.txt | 2 +-
|
||||||
|
src/runtime/core/CMakeLists.txt | 4 ++--
|
||||||
|
src/runtime/interop/hsa/CMakeLists.txt | 2 +-
|
||||||
|
4 files changed, 6 insertions(+), 6 deletions(-)
|
||||||
|
|
||||||
|
diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt
|
||||||
|
index bbd3196..51a8119 100644
|
||||||
|
--- a/src/CMakeLists.txt
|
||||||
|
+++ b/src/CMakeLists.txt
|
||||||
|
@@ -72,7 +72,7 @@ endif()
|
||||||
|
# make examples available in local build
|
||||||
|
add_custom_command(
|
||||||
|
OUTPUT examples
|
||||||
|
- COMMAND /usr/bin/rsync -rl ${CMAKE_CURRENT_SOURCE_DIR}/../examples .
|
||||||
|
+ COMMAND rsync -rl ${CMAKE_CURRENT_SOURCE_DIR}/../examples .
|
||||||
|
DEPENDS ${CMAKE_CURRENT_SOURCE_DIR}/../examples/*
|
||||||
|
)
|
||||||
|
add_custom_target(example ALL DEPENDS examples)
|
||||||
|
@@ -80,7 +80,7 @@ add_custom_target(example ALL DEPENDS examples)
|
||||||
|
# make bin available in local build
|
||||||
|
add_custom_command(
|
||||||
|
OUTPUT bin
|
||||||
|
- COMMAND /usr/bin/rsync -rl ${CMAKE_CURRENT_SOURCE_DIR}/../bin .
|
||||||
|
+ COMMAND rsync -rl ${CMAKE_CURRENT_SOURCE_DIR}/../bin .
|
||||||
|
DEPENDS ${CMAKE_CURRENT_SOURCE_DIR}/../bin/*
|
||||||
|
)
|
||||||
|
add_custom_target(script ALL DEPENDS bin)
|
||||||
|
diff --git a/src/device_runtime/CMakeLists.txt b/src/device_runtime/CMakeLists.txt
|
||||||
|
index 6688af2..6901e01 100644
|
||||||
|
--- a/src/device_runtime/CMakeLists.txt
|
||||||
|
+++ b/src/device_runtime/CMakeLists.txt
|
||||||
|
@@ -108,7 +108,7 @@ set (OUTPUT_INC_DIRECTORY ${ATMI_RUNTIME_PATH}/include)
|
||||||
|
execute_process(COMMAND "/bin/mkdir" "-p" "${OUTPUT_INC_DIRECTORY}")
|
||||||
|
add_custom_command(
|
||||||
|
OUTPUT ${OUTPUT_INC_DIRECTORY}/atmi_kl.h
|
||||||
|
- COMMAND /usr/bin/rsync ${CMAKE_CURRENT_SOURCE_DIR}/../../include/atmi_kl.h ${OUTPUT_INC_DIRECTORY}/atmi_kl.h
|
||||||
|
+ COMMAND rsync ${CMAKE_CURRENT_SOURCE_DIR}/../../include/atmi_kl.h ${OUTPUT_INC_DIRECTORY}/atmi_kl.h
|
||||||
|
DEPENDS ${CMAKE_CURRENT_SOURCE_DIR}/../../include/*.h
|
||||||
|
)
|
||||||
|
add_custom_target(device_header ALL DEPENDS ${OUTPUT_INC_DIRECTORY}/atmi_kl.h)
|
||||||
|
diff --git a/src/runtime/core/CMakeLists.txt b/src/runtime/core/CMakeLists.txt
|
||||||
|
index 88b3a47..000153a 100644
|
||||||
|
--- a/src/runtime/core/CMakeLists.txt
|
||||||
|
+++ b/src/runtime/core/CMakeLists.txt
|
||||||
|
@@ -128,13 +128,13 @@ execute_process(COMMAND "/bin/mkdir" "-p" "${OUTPUT_INC_DIRECTORY}")
|
||||||
|
|
||||||
|
add_custom_command(
|
||||||
|
OUTPUT ${OUTPUT_INC_DIRECTORY}/atmi.h
|
||||||
|
- COMMAND /usr/bin/rsync ${CMAKE_CURRENT_SOURCE_DIR}/../../../include/atmi.h ${OUTPUT_INC_DIRECTORY}/atmi.h
|
||||||
|
+ COMMAND rsync ${CMAKE_CURRENT_SOURCE_DIR}/../../../include/atmi.h ${OUTPUT_INC_DIRECTORY}/atmi.h
|
||||||
|
DEPENDS ${CMAKE_CURRENT_SOURCE_DIR}/../../../include/*.h
|
||||||
|
)
|
||||||
|
|
||||||
|
add_custom_command(
|
||||||
|
OUTPUT ${OUTPUT_INC_DIRECTORY}/atmi_runtime.h
|
||||||
|
- COMMAND /usr/bin/rsync ${CMAKE_CURRENT_SOURCE_DIR}/../../../include/atmi_runtime.h ${OUTPUT_INC_DIRECTORY}/atmi_runtime.h
|
||||||
|
+ COMMAND rsync ${CMAKE_CURRENT_SOURCE_DIR}/../../../include/atmi_runtime.h ${OUTPUT_INC_DIRECTORY}/atmi_runtime.h
|
||||||
|
DEPENDS ${CMAKE_CURRENT_SOURCE_DIR}/../../../include/*.h
|
||||||
|
)
|
||||||
|
|
||||||
|
diff --git a/src/runtime/interop/hsa/CMakeLists.txt b/src/runtime/interop/hsa/CMakeLists.txt
|
||||||
|
index af1012d..c58b716 100644
|
||||||
|
--- a/src/runtime/interop/hsa/CMakeLists.txt
|
||||||
|
+++ b/src/runtime/interop/hsa/CMakeLists.txt
|
||||||
|
@@ -22,7 +22,7 @@ execute_process(COMMAND "/bin/mkdir" "-p" "${OUTPUT_INC_DIRECTORY}")
|
||||||
|
|
||||||
|
add_custom_command(
|
||||||
|
OUTPUT ${OUTPUT_INC_DIRECTORY}/atmi_interop_hsa.h
|
||||||
|
- COMMAND /usr/bin/rsync ${CMAKE_CURRENT_SOURCE_DIR}/../../../../include/atmi_interop_hsa.h ${OUTPUT_INC_DIRECTORY}/atmi_interop_hsa.h
|
||||||
|
+ COMMAND rsync ${CMAKE_CURRENT_SOURCE_DIR}/../../../../include/atmi_interop_hsa.h ${OUTPUT_INC_DIRECTORY}/atmi_interop_hsa.h
|
||||||
|
DEPENDS ${CMAKE_CURRENT_SOURCE_DIR}/../../../../include/*.h
|
||||||
|
)
|
||||||
|
|
||||||
|
--
|
||||||
|
2.18.4
|
||||||
|
|
@ -20,6 +20,8 @@ class Atmi(CMakePackage):
|
|||||||
|
|
||||||
maintainers = ["srekolam", "renjithravindrankannath"]
|
maintainers = ["srekolam", "renjithravindrankannath"]
|
||||||
|
|
||||||
|
version("5.2.3", sha256="5f66c59e668cf968e86b556a0a52ee0202d1b370d8406e291a874cbfd200ee17")
|
||||||
|
version("5.2.1", sha256="6b33445aa67444c038cd756f855a58a72dd35db57e7b63da37fe78a8585b982b")
|
||||||
version("5.2.0", sha256="33e77905a607734157d46c736c924c7c50b6b13f2b2ddbf711cb08e37f2efa4f")
|
version("5.2.0", sha256="33e77905a607734157d46c736c924c7c50b6b13f2b2ddbf711cb08e37f2efa4f")
|
||||||
version("5.1.3", sha256="a43448d77705b2b07e1758ffe8035aa6ba146abc2167984e8cb0f1615797b341")
|
version("5.1.3", sha256="a43448d77705b2b07e1758ffe8035aa6ba146abc2167984e8cb0f1615797b341")
|
||||||
version("5.1.0", sha256="6a758f5a8332e6774cd8e14a4e5ce05e43b1e05298d817b4068c35fa1793d333")
|
version("5.1.0", sha256="6a758f5a8332e6774cd8e14a4e5ce05e43b1e05298d817b4068c35fa1793d333")
|
||||||
@ -106,6 +108,8 @@ class Atmi(CMakePackage):
|
|||||||
"5.1.0",
|
"5.1.0",
|
||||||
"5.1.3",
|
"5.1.3",
|
||||||
"5.2.0",
|
"5.2.0",
|
||||||
|
"5.2.1",
|
||||||
|
"5.2.3",
|
||||||
]:
|
]:
|
||||||
depends_on("comgr@" + ver, type="link", when="@" + ver)
|
depends_on("comgr@" + ver, type="link", when="@" + ver)
|
||||||
depends_on("hsa-rocr-dev@" + ver, type="link", when="@" + ver)
|
depends_on("hsa-rocr-dev@" + ver, type="link", when="@" + ver)
|
||||||
@ -117,7 +121,11 @@ class Atmi(CMakePackage):
|
|||||||
# Removing direct reference to /usr/bin/rysnc for rsync command.
|
# Removing direct reference to /usr/bin/rysnc for rsync command.
|
||||||
patch("0002-Remove-usr-bin-rsync-reference.patch", when="@4.0.0:5.0.0")
|
patch("0002-Remove-usr-bin-rsync-reference.patch", when="@4.0.0:5.0.0")
|
||||||
# Reset the installation path and remove direct reference to rsync.
|
# Reset the installation path and remove direct reference to rsync.
|
||||||
patch("0002-Remove-usr-bin-rsync-reference-5.2.0.patch", when="@5.0.2:")
|
patch("0002-Remove-usr-bin-rsync-reference-5.2.0.patch", when="@5.0.2:5.2.0")
|
||||||
|
# Remove direct reference to /usr/bin/rsync path for rsync command
|
||||||
|
patch(
|
||||||
|
"0002-Remove-direct-reference-to-usr-bin-rysnc-for-rsync-cmd-5.2.1.patch", when="@5.2.1:"
|
||||||
|
)
|
||||||
|
|
||||||
def cmake_args(self):
|
def cmake_args(self):
|
||||||
args = [self.define("ROCM_VERSION", self.spec.version)]
|
args = [self.define("ROCM_VERSION", self.spec.version)]
|
||||||
|
@ -25,6 +25,7 @@ class Axl(CMakePackage):
|
|||||||
maintainers = ["CamStan", "gonsie"]
|
maintainers = ["CamStan", "gonsie"]
|
||||||
|
|
||||||
version("main", branch="main")
|
version("main", branch="main")
|
||||||
|
version("0.7.1", sha256="526a055c072c85cc989beca656717e06b128f148fda8eb19d1d9b43a3325b399")
|
||||||
version("0.7.0", sha256="840ef61eadc9aa277d128df08db4cdf6cfa46b8fcf47b0eee0972582a61fbc50")
|
version("0.7.0", sha256="840ef61eadc9aa277d128df08db4cdf6cfa46b8fcf47b0eee0972582a61fbc50")
|
||||||
version("0.6.0", sha256="86edb35f99b63c0ffb9dd644a019a63b062923b4efc95c377e92a1b13e79f537")
|
version("0.6.0", sha256="86edb35f99b63c0ffb9dd644a019a63b062923b4efc95c377e92a1b13e79f537")
|
||||||
version("0.5.0", sha256="9f3bbb4de563896551bdb68e889ba93ea1984586961ad8c627ed766bff020acf")
|
version("0.5.0", sha256="9f3bbb4de563896551bdb68e889ba93ea1984586961ad8c627ed766bff020acf")
|
||||||
@ -45,7 +46,7 @@ class Axl(CMakePackage):
|
|||||||
depends_on("zlib", type="link")
|
depends_on("zlib", type="link")
|
||||||
|
|
||||||
depends_on("kvtree@main", when="@main")
|
depends_on("kvtree@main", when="@main")
|
||||||
depends_on("kvtree@1.3.0", when="@0.6.0")
|
depends_on("kvtree@1.3.0", when="@0.6.0:")
|
||||||
|
|
||||||
variant(
|
variant(
|
||||||
"async_api",
|
"async_api",
|
||||||
|
@ -15,7 +15,7 @@ class Benchmark(CMakePackage):
|
|||||||
|
|
||||||
# first properly installed CMake config packages in
|
# first properly installed CMake config packages in
|
||||||
# 1.2.0 release: https://github.com/google/benchmark/issues/363
|
# 1.2.0 release: https://github.com/google/benchmark/issues/363
|
||||||
version("develop", branch="master")
|
version("main", branch="main")
|
||||||
version("1.6.0", sha256="1f71c72ce08d2c1310011ea6436b31e39ccab8c2db94186d26657d41747c85d6")
|
version("1.6.0", sha256="1f71c72ce08d2c1310011ea6436b31e39ccab8c2db94186d26657d41747c85d6")
|
||||||
version("1.5.5", sha256="3bff5f237c317ddfd8d5a9b96b3eede7c0802e799db520d38ce756a2a46a18a0")
|
version("1.5.5", sha256="3bff5f237c317ddfd8d5a9b96b3eede7c0802e799db520d38ce756a2a46a18a0")
|
||||||
version("1.5.4", sha256="e3adf8c98bb38a198822725c0fc6c0ae4711f16fbbf6aeb311d5ad11e5a081b5")
|
version("1.5.4", sha256="e3adf8c98bb38a198822725c0fc6c0ae4711f16fbbf6aeb311d5ad11e5a081b5")
|
||||||
|
@ -39,6 +39,11 @@ class BerkeleyDb(AutotoolsPackage):
|
|||||||
build_directory = "build_unix"
|
build_directory = "build_unix"
|
||||||
|
|
||||||
patch("drop-docs.patch", when="~docs")
|
patch("drop-docs.patch", when="~docs")
|
||||||
|
# Correct autoconf macro to detect TLS support.
|
||||||
|
# Patch developed by @eschnett. There is no upstream issue because
|
||||||
|
# Oracle's web site does not have instructions for submitting such
|
||||||
|
# an issue or pull request.
|
||||||
|
patch("tls.patch")
|
||||||
|
|
||||||
conflicts("%clang@7:", when="@5.3.28")
|
conflicts("%clang@7:", when="@5.3.28")
|
||||||
conflicts("%gcc@8:", when="@5.3.28")
|
conflicts("%gcc@8:", when="@5.3.28")
|
||||||
|
24
var/spack/repos/builtin/packages/berkeley-db/tls.patch
Normal file
24
var/spack/repos/builtin/packages/berkeley-db/tls.patch
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
--- a/dist/aclocal/tls.m4
|
||||||
|
+++ b/dist/aclocal/tls.m4
|
||||||
|
@@ -15,7 +15,8 @@
|
||||||
|
for ax_tls_defn_keyword in $ax_tls_keywords ""; do
|
||||||
|
test -z "$ax_tls_decl_keyword" &&
|
||||||
|
test -z "$ax_tls_defn_keyword" && continue
|
||||||
|
- AC_TRY_COMPILE([template <typename T>class TLSClass {
|
||||||
|
+ AC_TRY_COMPILE([#include <stdlib.h>
|
||||||
|
+ template <typename T>class TLSClass {
|
||||||
|
public: static ] $ax_tls_decl_keyword [ T *tlsvar;
|
||||||
|
};
|
||||||
|
class TLSClass2 {
|
||||||
|
--- a/dist/configure
|
||||||
|
+++ b/dist/configure
|
||||||
|
@@ -19044,7 +19044,8 @@
|
||||||
|
test -z "$ax_tls_defn_keyword" && continue
|
||||||
|
cat confdefs.h - <<_ACEOF >conftest.$ac_ext
|
||||||
|
/* end confdefs.h. */
|
||||||
|
-template <typename T>class TLSClass {
|
||||||
|
+#include <stdlib.h>
|
||||||
|
+ template <typename T>class TLSClass {
|
||||||
|
public: static $ax_tls_decl_keyword T *tlsvar;
|
||||||
|
};
|
||||||
|
class TLSClass2 {
|
@ -22,10 +22,11 @@ class Butterflypack(CMakePackage):
|
|||||||
|
|
||||||
homepage = "https://github.com/liuyangzhuan/ButterflyPACK"
|
homepage = "https://github.com/liuyangzhuan/ButterflyPACK"
|
||||||
git = "https://github.com/liuyangzhuan/ButterflyPACK.git"
|
git = "https://github.com/liuyangzhuan/ButterflyPACK.git"
|
||||||
url = "https://github.com/liuyangzhuan/ButterflyPACK/archive/v2.1.1.tar.gz"
|
url = "https://github.com/liuyangzhuan/ButterflyPACK/archive/v2.2.0.tar.gz"
|
||||||
maintainers = ["liuyangzhuan"]
|
maintainers = ["liuyangzhuan"]
|
||||||
|
|
||||||
version("master", branch="master")
|
version("master", branch="master")
|
||||||
|
version("2.2.0", sha256="1ce5b8461b3c4f488cee6396419e8a6f0a1bcf95254f24d7c27bfa53b391c30b")
|
||||||
version("2.1.1", sha256="0d4a1ce540c84de37e4398f72ecf685ea0c4eabceba13015add5b445a4ca3a15")
|
version("2.1.1", sha256="0d4a1ce540c84de37e4398f72ecf685ea0c4eabceba13015add5b445a4ca3a15")
|
||||||
version("2.1.0", sha256="ac76cc8d431797c1a3641b23124e3de5eb8c3a3afb71c336e7ba69c6cdf150ef")
|
version("2.1.0", sha256="ac76cc8d431797c1a3641b23124e3de5eb8c3a3afb71c336e7ba69c6cdf150ef")
|
||||||
version("2.0.0", sha256="84f0e5ac40997409f3c80324238a07f9c700a1263b84140ed97275d67b577b80")
|
version("2.0.0", sha256="84f0e5ac40997409f3c80324238a07f9c700a1263b84140ed97275d67b577b80")
|
||||||
|
@ -17,6 +17,8 @@ class Celeritas(CMakePackage, CudaPackage, ROCmPackage):
|
|||||||
|
|
||||||
maintainers = ["sethrj"]
|
maintainers = ["sethrj"]
|
||||||
|
|
||||||
|
version("0.1.3", sha256="992c49a48adba884fe3933c9624da5bf480ef0694809430ae98903f2c28cc881")
|
||||||
|
version("0.1.2", sha256="d123ea2e34267adba387d46bae8c9a1146a2e047f87f2ea5f823878c1684678d")
|
||||||
version("0.1.1", sha256="a1d58e29226e89a2330d69c40049d61e7c885cf991824e60ff8c9ccc95fc5ec6")
|
version("0.1.1", sha256="a1d58e29226e89a2330d69c40049d61e7c885cf991824e60ff8c9ccc95fc5ec6")
|
||||||
version("0.1.0", sha256="46692977b9b31d73662252cc122d7f016f94139475788bca7fdcb97279b93af8")
|
version("0.1.0", sha256="46692977b9b31d73662252cc122d7f016f94139475788bca7fdcb97279b93af8")
|
||||||
|
|
||||||
|
30
var/spack/repos/builtin/packages/cepgen/package.py
Normal file
30
var/spack/repos/builtin/packages/cepgen/package.py
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||||
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
|
from spack.package import *
|
||||||
|
|
||||||
|
|
||||||
|
class Cepgen(CMakePackage):
|
||||||
|
"""A generic central exclusive processes event generator"""
|
||||||
|
|
||||||
|
homepage = "https://cepgen.hepforge.org/"
|
||||||
|
url = "https://github.com/cepgen/cepgen/archive/refs/tags/1.0.2patch1.tar.gz"
|
||||||
|
generator = "Ninja"
|
||||||
|
|
||||||
|
tags = ["hep"]
|
||||||
|
|
||||||
|
version(
|
||||||
|
"1.0.2patch1", sha256="333bba0cb1965a98dec127e00c150eab1a515cd348a90f7b1d66d5cd8d206d21"
|
||||||
|
)
|
||||||
|
|
||||||
|
depends_on("gsl")
|
||||||
|
depends_on("openblas")
|
||||||
|
depends_on("hepmc")
|
||||||
|
depends_on("hepmc3")
|
||||||
|
depends_on("lhapdf")
|
||||||
|
depends_on("pythia6")
|
||||||
|
depends_on("root")
|
||||||
|
|
||||||
|
depends_on("ninja", type="build")
|
@ -52,7 +52,8 @@ class Cgal(CMakePackage):
|
|||||||
depends_on("cmake@2.8.11:", type="build")
|
depends_on("cmake@2.8.11:", type="build")
|
||||||
|
|
||||||
# Essential Third Party Libraries
|
# Essential Third Party Libraries
|
||||||
depends_on("boost+exception+math+random+container")
|
depends_on("boost+exception+math+random+container", when="@5.0:")
|
||||||
|
depends_on("boost+thread+system", when="@:5.0")
|
||||||
depends_on("gmp")
|
depends_on("gmp")
|
||||||
depends_on("mpfr")
|
depends_on("mpfr")
|
||||||
|
|
||||||
|
@ -18,43 +18,85 @@ class Charliecloud(AutotoolsPackage):
|
|||||||
|
|
||||||
version("master", branch="master")
|
version("master", branch="master")
|
||||||
version("0.29", sha256="c89562e9dce4c10027434ad52eaca2140e2ba8667aa1ec9eadf789b4d7c1a6db")
|
version("0.29", sha256="c89562e9dce4c10027434ad52eaca2140e2ba8667aa1ec9eadf789b4d7c1a6db")
|
||||||
version("0.28", sha256="1ce43b012f475bddb514bb75993efeda9e58ffa93ddbdbd9b86d647f57254c3b")
|
version(
|
||||||
version("0.27", sha256="1142938ce73ec8a5dfe3a19a241b1f1ffbb63b582ac63d459aebec842c3f4b72")
|
"0.28",
|
||||||
version("0.26", sha256="5e1e64e869c59905fac0cbbd6ceb82340ee54728415d28ef588fd5de5557038a")
|
deprecated=True,
|
||||||
version("0.25", sha256="62d6fd211e3a573f54578e1b01d5c298f9788b7eaf2db46ac94c2dcef604cc94")
|
sha256="1ce43b012f475bddb514bb75993efeda9e58ffa93ddbdbd9b86d647f57254c3b",
|
||||||
version("0.24", sha256="63379bcbad7b90b33457251696d6720416e4acefcf2b49cd6cb495a567e511c2")
|
)
|
||||||
version("0.23", sha256="5e458b943ad0e27d1264bb089e48d4a676219179b0e96a7d761387a36c45b4d9")
|
version(
|
||||||
version("0.22", sha256="f65e4111ce87e449c656032da69f3b1cfc70a5a416a5e410329c1b0b2e953907")
|
"0.27",
|
||||||
version("0.21", sha256="024884074d283c4a0387d899161610fa4ae739ac1efcc9e53d7d626ddc20359f")
|
deprecated=True,
|
||||||
version("0.19", sha256="99619fd86860cda18f7f7a7cf7391f702ec9ebd3193791320dea647769996447")
|
sha256="1142938ce73ec8a5dfe3a19a241b1f1ffbb63b582ac63d459aebec842c3f4b72",
|
||||||
version("0.18", sha256="15ce63353afe1fc6bcc10979496a54fcd5628f997cb13c827c9fc7afb795bdc5")
|
)
|
||||||
|
version(
|
||||||
|
"0.26",
|
||||||
|
deprecated=True,
|
||||||
|
sha256="5e1e64e869c59905fac0cbbd6ceb82340ee54728415d28ef588fd5de5557038a",
|
||||||
|
)
|
||||||
|
version(
|
||||||
|
"0.25",
|
||||||
|
deprecated=True,
|
||||||
|
sha256="62d6fd211e3a573f54578e1b01d5c298f9788b7eaf2db46ac94c2dcef604cc94",
|
||||||
|
)
|
||||||
|
version(
|
||||||
|
"0.24",
|
||||||
|
deprecated=True,
|
||||||
|
sha256="63379bcbad7b90b33457251696d6720416e4acefcf2b49cd6cb495a567e511c2",
|
||||||
|
)
|
||||||
|
version(
|
||||||
|
"0.23",
|
||||||
|
deprecated=True,
|
||||||
|
sha256="5e458b943ad0e27d1264bb089e48d4a676219179b0e96a7d761387a36c45b4d9",
|
||||||
|
)
|
||||||
|
version(
|
||||||
|
"0.22",
|
||||||
|
deprecated=True,
|
||||||
|
sha256="f65e4111ce87e449c656032da69f3b1cfc70a5a416a5e410329c1b0b2e953907",
|
||||||
|
)
|
||||||
|
version(
|
||||||
|
"0.21",
|
||||||
|
deprecated=True,
|
||||||
|
sha256="024884074d283c4a0387d899161610fa4ae739ac1efcc9e53d7d626ddc20359f",
|
||||||
|
)
|
||||||
|
version(
|
||||||
|
"0.19",
|
||||||
|
deprecated=True,
|
||||||
|
sha256="99619fd86860cda18f7f7a7cf7391f702ec9ebd3193791320dea647769996447",
|
||||||
|
)
|
||||||
|
version(
|
||||||
|
"0.18",
|
||||||
|
deprecated=True,
|
||||||
|
sha256="15ce63353afe1fc6bcc10979496a54fcd5628f997cb13c827c9fc7afb795bdc5",
|
||||||
|
)
|
||||||
|
variant("docs", default=False, description="Build man pages and html docs")
|
||||||
|
|
||||||
|
# Autoconf.
|
||||||
depends_on("m4", type="build")
|
depends_on("m4", type="build")
|
||||||
depends_on("autoconf", type="build")
|
depends_on("autoconf", type="build")
|
||||||
depends_on("automake", type="build")
|
depends_on("automake", type="build")
|
||||||
depends_on("libtool", type="build")
|
depends_on("libtool", type="build")
|
||||||
|
|
||||||
depends_on("python@3.5:", type="run")
|
# Image manipulation.
|
||||||
# Version 0.25+ bundle the preferred lark version.
|
depends_on("python@3.6:", type="run")
|
||||||
depends_on("py-lark", type="run", when="@:0.24")
|
|
||||||
depends_on("py-requests", type="run")
|
depends_on("py-requests", type="run")
|
||||||
# autogen.sh requires pip and wheel (only needed for git checkouts)
|
depends_on("git@2.28.1:", type="run", when="@0.29:") # build cache
|
||||||
depends_on("py-pip@21.1.2:", type="build", when="@master")
|
depends_on("py-lark", type="run", when="@:0.24") # 0.25+ bundles lark
|
||||||
depends_on("py-wheel", type="build", when="@master")
|
|
||||||
depends_on("git@2.28.1:", type="run", when="@0.29:")
|
|
||||||
|
|
||||||
# Man pages and html docs variant.
|
# Man page and html docs.
|
||||||
variant("docs", default=False, description="Build man pages and html docs")
|
|
||||||
depends_on("rsync", type="build", when="+docs")
|
depends_on("rsync", type="build", when="+docs")
|
||||||
depends_on("py-sphinx", type="build", when="+docs")
|
depends_on("py-sphinx", type="build", when="+docs")
|
||||||
depends_on("py-sphinx-rtd-theme", type="build", when="+docs")
|
depends_on("py-sphinx-rtd-theme", type="build", when="+docs")
|
||||||
|
|
||||||
# See https://github.com/spack/spack/pull/16049.
|
|
||||||
conflicts("platform=darwin", msg="This package does not build on macOS")
|
|
||||||
|
|
||||||
# Bash automated testing harness (bats).
|
# Bash automated testing harness (bats).
|
||||||
depends_on("bats@0.4.0", type="test")
|
depends_on("bats@0.4.0", type="test")
|
||||||
|
|
||||||
|
# Require pip and wheel for git checkout builds (master).
|
||||||
|
depends_on("py-pip@21.1.2:", type="build", when="@master")
|
||||||
|
depends_on("py-wheel", type="build", when="@master")
|
||||||
|
|
||||||
|
# See https://github.com/spack/spack/pull/16049.
|
||||||
|
conflicts("platform=darwin", msg="This package does not build on macOS")
|
||||||
|
|
||||||
def autoreconf(self, spec, prefix):
|
def autoreconf(self, spec, prefix):
|
||||||
which("bash")("autogen.sh")
|
which("bash")("autogen.sh")
|
||||||
|
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user