Compare commits

..

179 Commits

Author SHA1 Message Date
Carson Woods
2b05d2bf5c Merge tag 'v0.15.1' into features/shared 2020-07-15 15:43:59 -04:00
Carson Woods
cc00619929 Remove unnecessary if/else logic for global upstream 2020-07-15 14:34:30 -04:00
Carson Woods
188a371595 Fix flake8 errors 2020-07-15 13:44:42 -04:00
Carson Woods
dce7be9932 Fix bug that caused packages installed upstream to install module files to user directory 2020-07-15 13:40:27 -04:00
Carson Woods
4bb26802ed Merge tag 'v0.15.0' into features/shared 2020-06-30 19:24:23 -05:00
Carson Woods
1965e1e606 Merge branch 'develop' into features/shared 2020-06-26 12:19:32 -04:00
Carson Woods
6ed3db6c14 Re-add testing code 2020-06-25 12:29:59 -04:00
Carson Woods
a8fbc96271 Add updated bash completion file 2020-06-24 16:00:19 -04:00
Carson Woods
d8956a3bbe Merge branch 'develop' into features/shared
Conflicts:
	share/spack/spack-completion.bash
2020-06-24 10:59:49 -06:00
Carson Woods
a807b95081 Merge branch 'develop' into features/shared 2020-06-23 11:37:54 -04:00
Carson Woods
1b608d6041 Merge branch 'develop' into features/shared 2020-04-20 11:02:54 -05:00
Carson Woods
be143d7dff Merge branch 'develop' into features/shared 2020-04-06 13:49:46 -04:00
Carson Woods
05fe92e086 Merge branch 'develop' into features/shared 2020-03-09 23:23:37 +00:00
Carson Woods
cd54fb95b8 Fix for infinite recursion bug that stemmed from duplicate code existing
after an improper merge with develop
2020-02-25 12:18:03 -05:00
Carson Woods
8b63c4555c Merge branch 'develop' into features/shared 2020-02-25 11:50:23 -05:00
Carson Woods
ec78160569 Update spack-completition.bash 2020-02-12 12:26:41 -05:00
Carson Woods
e1379f132d Merge branch 'develop' into features/shared 2020-02-12 12:21:19 -05:00
Carson Woods
cdcd3dcedd Merge branch 'develop' into features/shared 2020-01-27 15:28:28 -05:00
Carson Woods
7c1083916a Fix bash completion script 2020-01-24 11:36:26 -05:00
Carson Woods
c07bbe1a25 Fix flake8 error 2020-01-24 11:00:08 -05:00
Carson Woods
85032c6224 Resolve merge conflicts with develop 2020-01-24 10:41:44 -05:00
Carson Woods
7b7898a69c Merge branch 'develop' into features/shared 2020-01-21 18:55:21 -05:00
Carson Woods
84c5d76eae Merge branch 'develop' into features/shared 2020-01-18 13:57:50 -08:00
Carson Woods
bcd47f0bd6 Merge branch 'develop' into features/shared 2020-01-17 14:32:47 -08:00
Carson Woods
cb6a959cdb Merge branch 'develop' into features/shared 2020-01-15 14:41:14 -05:00
Carson Woods
32cd12bff7 Merge branch 'develop' into features/shared 2020-01-10 16:19:37 -08:00
Carson Woods
7021965159 Fix merge conflicts and repair broken unit test. 2020-01-09 20:12:39 -08:00
Carson Woods
5c5743ca33 Merge branch 'develop' into features/shared to support Spack 0.13.3 2019-12-26 21:00:09 -06:00
Carson Woods
034a7662ac Merge branch 'develop' into features/shared 2019-11-21 12:52:24 -07:00
Carson Woods
e6b6ac5898 Fixed error message to use proper --upstream rather than -u 2019-11-21 12:30:15 -07:00
Carson Woods
35037bf088 Merge branch 'develop' into features/shared 2019-11-17 16:37:47 -07:00
Carson Woods
d14c245411 Merge branch 'develop' into features/shared 2019-11-10 22:05:20 -05:00
Carson Woods
6e2ad01f20 Fix flake8 formatting 2019-11-06 13:25:15 -05:00
Carson Woods
ef9b5a8f74 Fix unit test failing 2019-11-06 13:24:10 -05:00
Carson Woods
4921ed29d5 Fix a few broken unit tests 2019-11-06 09:56:22 -05:00
Carson Woods
f4c720e902 Ensure feature supports Spack version 0.13.0-0.13.1 2019-11-05 16:38:18 -05:00
Carson Woods
0a71b1d5ac Merge branch 'develop' into features/shared 2019-10-31 21:29:33 -04:00
Carson Woods
3593a7be6a Better comment the purpose of new unit tests 2019-09-20 19:05:56 -04:00
Carson Woods
e4d2cf4441 Fix flake8 error to avoid failing CI testing 2019-09-20 15:29:46 -04:00
Carson Woods
911e51bd89 Merge branch 'develop' into features/shared
Done to resolve merge conflicts that had arisen since work on this
feature completed.
2019-09-20 15:28:44 -04:00
Carson Woods
6ec8aea6f7 Rebase branch 'features/shared' of github.com:carsonwoods/spack against develop 2019-08-07 18:57:48 -06:00
Carson Woods
5b11f7aa4c Fix bug where environments would ignore global path 2019-08-07 18:32:28 -06:00
Carson Woods
97e46981b9 Remove old doc from doc index 2019-08-07 18:32:28 -06:00
Carson Woods
873ac5e890 Remove old documentation for shared mode 2019-08-07 18:32:28 -06:00
Carson Woods
4d7dae5716 Remove old share command from tests 2019-08-07 18:32:28 -06:00
Carson Woods
b19f0fafcc Remove outdate share command 2019-08-07 18:32:28 -06:00
Carson Woods
11b1bdd119 Pep8 Compliance Fix 2019-08-07 18:32:28 -06:00
Carson Woods
f749821dc2 Pep8 Compliance 2019-08-07 18:32:28 -06:00
Carson Woods
5abb20dcab Rename test 2019-08-07 18:32:28 -06:00
Carson Woods
0c233bdd0f Add test for validating upstream database initialization 2019-08-07 18:32:28 -06:00
Carson Woods
0f171c7ded Replace space with = in command parameter 2019-08-07 18:32:28 -06:00
Carson Woods
b4c7520dd8 Flake8 Test Compliance 2019-08-07 18:32:28 -06:00
Carson Woods
9ab7d8f01d Add config parameter for active upstream to set install location for modules 2019-08-07 18:32:28 -06:00
Carson Woods
a7ad344c2a Add upstreams.yaml testing file so calls to upstreams['global] doesn't cause tests to fail 2019-08-07 18:32:28 -06:00
Carson Woods
deb2d3745c Fix .spack-db/index.json not being created in global upstream if previously uninitialized 2019-08-07 18:32:28 -06:00
Carson Woods
ff96ec430b Can now specify upstream of anyname through -u/--upstream flag 2019-08-07 18:32:28 -06:00
Carson Woods
d4a959736a Flake8 Compliance Changes 2019-08-07 18:32:28 -06:00
Carson Woods
5ba51a0be0 --global option now works for both install and uninstall 2019-08-07 18:32:28 -06:00
Carson Woods
27e1140df7 Reset active directory after each global install 2019-08-07 18:32:28 -06:00
Carson Woods
7ab6af8a3b Add scope to setting active tree to ensure that it is set at user level 2019-08-07 18:32:28 -06:00
Carson Woods
0e6e93eaac Fix unit test config.yaml 2019-08-07 18:32:28 -06:00
Carson Woods
38f8bdd2bb Home expansion was removed because it was no longer being used 2019-08-07 18:32:27 -06:00
Carson Woods
8e45a3fc2f Fix flake8 compliance 2019-08-07 18:32:27 -06:00
Carson Woods
c22af99b04 Fix how upstream db paths are canonicalized 2019-08-07 18:32:27 -06:00
Carson Woods
fc3a909fbc Set staging location to ~/.spack/var 2019-08-07 18:32:27 -06:00
Carson Woods
9665754eae Fix default install tree 2019-08-07 18:32:27 -06:00
Carson Woods
0f9f9f3a85 Revise default var path 2019-08-07 18:32:27 -06:00
Carson Woods
777a5682a6 Fix default install location 2019-08-07 18:32:27 -06:00
Carson Woods
8994b4aab6 Fix flake8 compliance 2019-08-07 18:32:27 -06:00
Carson Woods
98ec366470 Set root of store object to active tree 2019-08-07 18:32:27 -06:00
Carson Woods
c61f4d7c82 Add logic to set the active install tree 2019-08-07 18:32:27 -06:00
Carson Woods
811b304230 Remove old code 2019-08-07 18:32:27 -06:00
Carson Woods
8f0c9ad409 Change name of global parameter to install_global 2019-08-07 18:32:27 -06:00
Carson Woods
6a423a5d8a Typo fix 2019-08-07 18:32:27 -06:00
Carson Woods
23c37063bd Add default global upstream of /opt/spack 2019-08-07 18:32:27 -06:00
Carson Woods
478f3a5a99 Fix whitespace issue 2019-08-07 18:32:27 -06:00
Carson Woods
02afb30990 Remove unit testing for shared spack mode 2019-08-07 18:32:27 -06:00
Carson Woods
06e3f15e47 Remove old shared spack code 2019-08-07 18:32:27 -06:00
Carson Woods
f13ce3540d Add dest name of install_global to --global parameter 2019-08-07 18:32:27 -06:00
Carson Woods
7ae34087e3 Set remove old shared spack code 2019-08-07 18:32:27 -06:00
Carson Woods
f0fea97e88 Set source_cache to user's ~/.spack directory 2019-08-07 18:32:27 -06:00
Carson Woods
54893197ed Set staging location to be based out of users .spack directory 2019-08-07 18:32:27 -06:00
Carson Woods
80da1d50d1 Make var_path point to ~/.spack/var/spack 2019-08-07 18:32:27 -06:00
Carson Woods
944c5d75cd Add --global flag to install cmd to install to globally accessible location 2019-08-07 18:32:27 -06:00
Carson Woods
9ef4bc9d50 Add macro for expanding home directory 2019-08-07 18:32:27 -06:00
Carson Woods
a2af432833 Temporarily disable module file location overrride while feature is being implemented 2019-08-07 18:32:27 -06:00
Carson Woods
aefed311af Change modulefiles install location 2019-08-07 18:32:27 -06:00
Carson Woods
6ffacddcf4 Change default install tree to user's ~/.spack directory 2019-08-07 18:32:27 -06:00
Carson Woods
e17824f82f Remove shared mode set self as upstream 2019-08-07 18:32:27 -06:00
Carson Woods
57ca47f035 Remove testing for shared mode 2019-08-07 18:32:27 -06:00
Carson Woods
4532a56b4e Remove shared disable from unit testing 2019-08-07 18:32:27 -06:00
Carson Woods
86e69a48a2 Fix flake8 error 2019-08-07 18:32:27 -06:00
Carson Woods
2508295d81 Fix error caused by SPACK_PATH environment variable not existing 2019-08-07 18:32:27 -06:00
Carson Woods
1a041c051a Fix flake8 error 2019-08-07 18:32:27 -06:00
Carson Woods
2262ca2e67 Add test for install in shared mode 2019-08-07 18:32:27 -06:00
Carson Woods
2269771a91 Fix typo 2019-08-07 18:32:27 -06:00
Carson Woods
7f32574dd8 Fix shared cmd test file 2019-08-07 18:32:27 -06:00
Carson Woods
d15ac30f62 Add shared to toctree 2019-08-07 18:32:27 -06:00
Carson Woods
1f41347ab8 Share feature Unit testing 2019-08-07 18:32:27 -06:00
Carson Woods
1f4f01103b Add command interface for share feature 2019-08-07 18:32:27 -06:00
Carson Woods
8f46fcb512 When running tests, disable shared mode because it will break other tests. Custom tests must be written 2019-08-07 18:32:27 -06:00
Carson Woods
2d3b973ebc When shared mode is active store installed packages in SPACK_PATH 2019-08-07 18:32:27 -06:00
Carson Woods
7e62e0f27f When shared mode is active set stage path to SPACK_PATH 2019-08-07 18:32:27 -06:00
Carson Woods
ea0db4c0f9 Prevent packages from being installed upstream 2019-08-07 18:32:27 -06:00
Carson Woods
0afc68e60b Change module root path when shared mode is active 2019-08-07 18:32:27 -06:00
Carson Woods
8ad25d5013 Uninstall from SPACK_PATH when shared mode is active 2019-08-07 18:32:27 -06:00
Carson Woods
e90db68321 Install to SPACK_PATH when shared mode is active 2019-08-07 18:32:27 -06:00
Carson Woods
9e96b89f02 Add documentation for spack share command 2019-08-07 18:32:27 -06:00
Carson Woods
b4dae1b7fd When shared mode is active, spack treats the normal install directory as an upstream 2019-08-07 18:32:27 -06:00
Carson Woods
9e9adf1d2f When shared mode is active, set cache location to SPACK_PATH 2019-08-07 18:32:27 -06:00
Carson Woods
de9255247a Fix bug where environments would ignore global path 2019-08-06 17:49:17 -06:00
Carson Woods
de5d3e3229 Remove old doc from doc index 2019-07-26 08:54:12 -06:00
Carson Woods
e621aafc77 Remove old documentation for shared mode 2019-07-25 16:40:00 -06:00
Carson Woods
c53427c98d Remove old share command from tests 2019-07-25 14:22:43 -06:00
Carson Woods
7a75148d1b Remove outdate share command 2019-07-25 13:32:44 -06:00
Carson Woods
4210520c9d Pep8 Compliance Fix 2019-07-25 13:32:44 -06:00
Carson Woods
4f3fb50ae7 Pep8 Compliance 2019-07-25 13:32:44 -06:00
Carson Woods
7660659107 Rename test 2019-07-25 13:32:44 -06:00
Carson Woods
fcca2a518b Add test for validating upstream database initialization 2019-07-25 13:32:44 -06:00
Carson Woods
23e1cd7775 Replace space with = in command parameter 2019-07-25 13:32:44 -06:00
Carson Woods
58e794e95a Flake8 Test Compliance 2019-07-25 13:32:44 -06:00
Carson Woods
7ed59ed835 Add config parameter for active upstream to set install location for modules 2019-07-25 13:32:43 -06:00
Carson Woods
512726ae5b Add upstreams.yaml testing file so calls to upstreams['global] doesn't cause tests to fail 2019-07-25 13:32:43 -06:00
Carson Woods
20851a6e6c Fix .spack-db/index.json not being created in global upstream if previously uninitialized 2019-07-25 13:32:43 -06:00
Carson Woods
92bbbb9659 Can now specify upstream of anyname through -u/--upstream flag 2019-07-25 13:32:43 -06:00
Carson Woods
5f2f2bfb84 Flake8 Compliance Changes 2019-07-25 13:32:43 -06:00
Carson Woods
9b63f72d6b --global option now works for both install and uninstall 2019-07-25 13:32:43 -06:00
Carson Woods
4c60f01bae Reset active directory after each global install 2019-07-25 13:32:43 -06:00
Carson Woods
cd08308463 Add scope to setting active tree to ensure that it is set at user level 2019-07-25 13:32:43 -06:00
Carson Woods
fe69997043 Fix unit test config.yaml 2019-07-25 13:32:43 -06:00
Carson Woods
1584a6e3c6 Home expansion was removed because it was no longer being used 2019-07-25 13:32:43 -06:00
Carson Woods
c393880852 Fix flake8 compliance 2019-07-25 13:32:43 -06:00
Carson Woods
bbe9e6bf54 Fix how upstream db paths are canonicalized 2019-07-25 13:32:43 -06:00
Carson Woods
d7a00b71d4 Set staging location to ~/.spack/var 2019-07-25 13:32:43 -06:00
Carson Woods
6775d2546a Fix default install tree 2019-07-25 13:32:43 -06:00
Carson Woods
8a154333f2 Revise default var path 2019-07-25 13:32:43 -06:00
Carson Woods
5e637a04fd Fix default install location 2019-07-25 13:32:43 -06:00
Carson Woods
0213869439 Fix flake8 compliance 2019-07-25 13:32:43 -06:00
Carson Woods
22e9a9792a Set root of store object to active tree 2019-07-25 13:32:43 -06:00
Carson Woods
4f23da9d26 Add logic to set the active install tree 2019-07-25 13:32:43 -06:00
Carson Woods
f9430e2fd4 Remove old code 2019-07-25 13:32:43 -06:00
Carson Woods
a2f86d5d18 Change name of global parameter to install_global 2019-07-25 13:32:43 -06:00
Carson Woods
0efab6637c Typo fix 2019-07-25 13:32:43 -06:00
Carson Woods
2b11694b94 Add default global upstream of /opt/spack 2019-07-25 13:32:43 -06:00
Carson Woods
088798a727 Fix whitespace issue 2019-07-25 13:32:43 -06:00
Carson Woods
bddbb1c22e Remove unit testing for shared spack mode 2019-07-25 13:32:42 -06:00
Carson Woods
92f447cf1c Remove old shared spack code 2019-07-25 13:32:42 -06:00
Carson Woods
96f266c3e3 Add dest name of install_global to --global parameter 2019-07-25 13:32:42 -06:00
Carson Woods
d5093c20c5 Set remove old shared spack code 2019-07-25 13:32:42 -06:00
Carson Woods
2064241c37 Set source_cache to user's ~/.spack directory 2019-07-25 13:32:42 -06:00
Carson Woods
721742b764 Set staging location to be based out of users .spack directory 2019-07-25 13:32:42 -06:00
Carson Woods
c45bf153d8 Make var_path point to ~/.spack/var/spack 2019-07-25 13:32:42 -06:00
Carson Woods
b98e5e66e7 Add --global flag to install cmd to install to globally accessible location 2019-07-25 13:32:42 -06:00
Carson Woods
3d18bf345f Add macro for expanding home directory 2019-07-25 13:32:42 -06:00
Carson Woods
f8e9cf4081 Temporarily disable module file location overrride while feature is being implemented 2019-07-25 13:32:42 -06:00
Carson Woods
98e0f8b89b Change modulefiles install location 2019-07-25 13:32:42 -06:00
Carson Woods
263275b7ea Change default install tree to user's ~/.spack directory 2019-07-25 13:32:42 -06:00
Carson Woods
3e13002d7f Remove shared mode set self as upstream 2019-07-25 13:32:42 -06:00
Carson Woods
654e5cc924 Remove testing for shared mode 2019-07-25 13:32:42 -06:00
Carson Woods
04a72c1834 Remove shared disable from unit testing 2019-07-25 13:32:42 -06:00
Carson Woods
53cf6eb194 Fix flake8 error 2019-07-25 13:32:42 -06:00
Carson Woods
5a7f186176 Fix error caused by SPACK_PATH environment variable not existing 2019-07-25 13:32:42 -06:00
Carson Woods
987adfa9c9 Fix flake8 error 2019-07-25 13:32:42 -06:00
Carson Woods
e476bb1400 Add test for install in shared mode 2019-07-25 13:32:42 -06:00
Carson Woods
dc12233610 Fix typo 2019-07-25 13:32:42 -06:00
Carson Woods
29d21a0a5d Fix shared cmd test file 2019-07-25 13:32:42 -06:00
Carson Woods
762f505da5 Add shared to toctree 2019-07-25 13:32:42 -06:00
Carson Woods
8e1c326174 Share feature Unit testing 2019-07-25 13:32:42 -06:00
Carson Woods
0bac5d527d Add command interface for share feature 2019-07-25 13:32:42 -06:00
Carson Woods
79256eeb5c When running tests, disable shared mode because it will break other tests. Custom tests must be written 2019-07-25 13:32:42 -06:00
Carson Woods
de760942f2 When shared mode is active store installed packages in SPACK_PATH 2019-07-25 13:32:41 -06:00
Carson Woods
860641bfab When shared mode is active set stage path to SPACK_PATH 2019-07-25 13:32:41 -06:00
Carson Woods
673e55f14d Prevent packages from being installed upstream 2019-07-25 13:32:41 -06:00
Carson Woods
54777a4f3e Change module root path when shared mode is active 2019-07-25 13:32:41 -06:00
Carson Woods
db36e66592 Uninstall from SPACK_PATH when shared mode is active 2019-07-25 13:32:41 -06:00
Carson Woods
0d36e94407 Install to SPACK_PATH when shared mode is active 2019-07-25 13:32:41 -06:00
Carson Woods
92c3b5b8b2 Add documentation for spack share command 2019-07-25 13:32:41 -06:00
Carson Woods
71220a3656 When shared mode is active, spack treats the normal install directory as an upstream 2019-07-25 13:32:41 -06:00
Carson Woods
09bd29d816 When shared mode is active, set cache location to SPACK_PATH 2019-07-25 13:32:41 -06:00
85 changed files with 869 additions and 2639 deletions

View File

@@ -3,12 +3,13 @@ name: linux builds
on:
push:
branches:
- master
- develop
- releases/**
pull_request:
branches:
- master
- develop
- releases/**
paths-ignore:
# Don't run if we only modified packages in the built-in repository
- 'var/spack/repos/builtin/**'
@@ -23,19 +24,23 @@ on:
jobs:
build:
runs-on: ubuntu-latest
strategy:
max-parallel: 4
matrix:
package: [lz4, mpich, tut, py-setuptools, openjpeg, r-rcpp]
steps:
- uses: actions/checkout@v2
- uses: actions/cache@v2
- name: Cache ccache's store
uses: actions/cache@v1
with:
path: ~/.ccache
key: ccache-build-${{ matrix.package }}
restore-keys: |
ccache-build-${{ matrix.package }}
- uses: actions/setup-python@v2
- name: Setup Python
uses: actions/setup-python@v1
with:
python-version: 3.8
- name: Install System Packages

View File

@@ -3,12 +3,13 @@ name: linux tests
on:
push:
branches:
- master
- develop
- releases/**
pull_request:
branches:
- master
- develop
- releases/**
jobs:
unittests:
runs-on: ubuntu-latest
@@ -18,9 +19,8 @@ jobs:
steps:
- uses: actions/checkout@v2
with:
fetch-depth: 0
- uses: actions/setup-python@v2
- name: Setup Python ${{ matrix.python-version }}
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- name: Install System packages
@@ -36,7 +36,9 @@ jobs:
run: |
# Need this for the git tests to succeed.
git --version
. .github/workflows/setup_git.sh
git config --global user.email "spack@example.com"
git config --global user.name "Test User"
git fetch -u origin develop:develop
- name: Install kcov for bash script coverage
env:
KCOV_VERSION: 34
@@ -54,61 +56,7 @@ jobs:
share/spack/qa/run-unit-tests
coverage combine
coverage xml
- uses: codecov/codecov-action@v1
- name: Upload to codecov.io
uses: codecov/codecov-action@v1
with:
flags: unittests,linux
shell:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
with:
fetch-depth: 0
- uses: actions/setup-python@v2
with:
python-version: 3.8
- name: Install System packages
run: |
sudo apt-get -y update
sudo apt-get install -y coreutils gfortran gnupg2 mercurial ninja-build patchelf zsh fish
# Needed for kcov
sudo apt-get -y install cmake binutils-dev libcurl4-openssl-dev zlib1g-dev libdw-dev libiberty-dev
- name: Install Python packages
run: |
pip install --upgrade pip six setuptools codecov coverage
- name: Setup git configuration
run: |
# Need this for the git tests to succeed.
git --version
. .github/workflows/setup_git.sh
- name: Install kcov for bash script coverage
env:
KCOV_VERSION: 38
run: |
KCOV_ROOT=$(mktemp -d)
wget --output-document=${KCOV_ROOT}/${KCOV_VERSION}.tar.gz https://github.com/SimonKagstrom/kcov/archive/v${KCOV_VERSION}.tar.gz
tar -C ${KCOV_ROOT} -xzvf ${KCOV_ROOT}/${KCOV_VERSION}.tar.gz
mkdir -p ${KCOV_ROOT}/build
cd ${KCOV_ROOT}/build && cmake -Wno-dev ${KCOV_ROOT}/kcov-${KCOV_VERSION} && cd -
make -C ${KCOV_ROOT}/build && sudo make -C ${KCOV_ROOT}/build install
- name: Run shell tests
env:
COVERAGE: true
run: |
share/spack/qa/run-shell-tests
- uses: codecov/codecov-action@v1
with:
flags: shelltests,linux
centos6:
# Test for Python2.6 run on Centos 6
runs-on: ubuntu-latest
container: spack/github-actions:centos6
steps:
- name: Run unit tests
env:
HOME: /home/spack-test
run: |
whoami && echo $HOME && cd $HOME
git clone https://github.com/spack/spack.git && cd spack
git fetch origin ${{ github.ref }}:test-branch
git checkout test-branch
share/spack/qa/run-unit-tests

View File

@@ -3,22 +3,27 @@ name: macos tests
on:
push:
branches:
- master
- develop
- releases/**
pull_request:
branches:
- master
- develop
- releases/**
jobs:
build:
runs-on: macos-latest
strategy:
matrix:
python-version: [3.7]
steps:
- uses: actions/checkout@v2
- name: Setup Python ${{ matrix.python-version }}
uses: actions/setup-python@v2
with:
fetch-depth: 0
- uses: actions/setup-python@v2
with:
python-version: 3.7
python-version: ${{ matrix.python-version }}
- name: Install Python packages
run: |
pip install --upgrade pip six setuptools
@@ -32,12 +37,13 @@ jobs:
- name: Run unit tests
run: |
git --version
. .github/workflows/setup_git.sh
git fetch -u origin develop:develop
. share/spack/setup-env.sh
coverage run $(which spack) test
coverage combine
coverage xml
- uses: codecov/codecov-action@v1
- name: Upload to codecov.io
uses: codecov/codecov-action@v1
with:
file: ./coverage.xml
flags: unittests,macos

View File

@@ -0,0 +1,31 @@
name: python version check
on:
push:
branches:
- master
- develop
- releases/**
pull_request:
branches:
- master
- develop
jobs:
validate:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Setup Python
uses: actions/setup-python@v1
with:
python-version: 3.7
- name: Install Python Packages
run: |
pip install --upgrade pip
pip install --upgrade vermin
- name: Minimum Version (Spack's Core)
run: vermin --backport argparse -t=2.6- -t=3.5- -v lib/spack/spack/ lib/spack/llnl/ bin/
- name: Minimum Version (Repositories)
run: vermin --backport argparse -t=2.6- -t=3.5- -v var/spack/repos

View File

@@ -1,9 +0,0 @@
#!/usr/bin/env sh
git config --global user.email "spack@example.com"
git config --global user.name "Test User"
# With fetch-depth: 0 we have a remote develop
# but not a local branch. Don't do this on develop
if [ "$(git branch --show-current)" != "develop" ]
then
git branch develop origin/develop
fi

View File

@@ -1,65 +0,0 @@
name: style and docs
on:
push:
branches:
- develop
- releases/**
pull_request:
branches:
- develop
- releases/**
jobs:
validate:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
with:
python-version: 3.7
- name: Install Python Packages
run: |
pip install --upgrade pip
pip install --upgrade vermin
- name: Minimum Version (Spack's Core)
run: vermin --backport argparse -t=2.6- -t=3.5- -v lib/spack/spack/ lib/spack/llnl/ bin/
- name: Minimum Version (Repositories)
run: vermin --backport argparse -t=2.6- -t=3.5- -v var/spack/repos
flake8:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
with:
fetch-depth: 0
- uses: actions/setup-python@v2
with:
python-version: 3.8
- name: Install Python packages
run: |
pip install --upgrade pip six setuptools flake8
- name: Setup git configuration
run: |
# Need this for the git tests to succeed.
git --version
. .github/workflows/setup_git.sh
- name: Run flake8 tests
run: |
share/spack/qa/run-flake8-tests
documentation:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
with:
python-version: 3.8
- name: Install System packages
run: |
sudo apt-get -y update
sudo apt-get install -y coreutils ninja-build graphviz
- name: Install Python packages
run: |
pip install --upgrade pip six setuptools
pip install --upgrade -r lib/spack/docs/requirements.txt
- name: Build documentation
run: |
share/spack/qa/run-doc-tests

152
.travis.yml Normal file
View File

@@ -0,0 +1,152 @@
#=============================================================================
# Project settings
#=============================================================================
# Only build master and develop on push; do not build every branch.
branches:
only:
- master
- develop
- /^releases\/.*$/
#=============================================================================
# Build matrix
#=============================================================================
dist: bionic
jobs:
fast_finish: true
include:
- stage: 'style checks'
python: '3.8'
os: linux
language: python
env: TEST_SUITE=flake8
- stage: 'unit tests + documentation'
python: '2.6'
dist: trusty
os: linux
language: python
addons:
apt:
# Everything but patchelf, that is not available for trusty
packages:
- ccache
- gfortran
- graphviz
- gnupg2
- kcov
- mercurial
- ninja-build
- realpath
- zsh
- fish
env: [ TEST_SUITE=unit, COVERAGE=true ]
- python: '3.8'
os: linux
language: python
env: [ TEST_SUITE=shell, COVERAGE=true, KCOV_VERSION=38 ]
- python: '3.8'
os: linux
language: python
env: TEST_SUITE=doc
stages:
- 'style checks'
- 'unit tests + documentation'
#=============================================================================
# Environment
#=============================================================================
# Docs need graphviz to build
addons:
# for Linux builds, we use APT
apt:
packages:
- ccache
- coreutils
- gfortran
- graphviz
- gnupg2
- mercurial
- ninja-build
- patchelf
- zsh
- fish
update: true
# ~/.ccache needs to be cached directly as Travis is not taking care of it
# (possibly because we use 'language: python' and not 'language: c')
cache:
pip: true
ccache: true
directories:
- ~/.ccache
before_install:
- ccache -M 2G && ccache -z
# Install kcov manually, since it's not packaged for bionic beaver
- if [[ "$KCOV_VERSION" ]]; then
sudo apt-get -y install cmake binutils-dev libcurl4-openssl-dev zlib1g-dev libdw-dev libiberty-dev;
KCOV_ROOT=$(mktemp -d);
wget --output-document=${KCOV_ROOT}/${KCOV_VERSION}.tar.gz https://github.com/SimonKagstrom/kcov/archive/v${KCOV_VERSION}.tar.gz;
tar -C ${KCOV_ROOT} -xzvf ${KCOV_ROOT}/${KCOV_VERSION}.tar.gz;
mkdir -p ${KCOV_ROOT}/build;
cd ${KCOV_ROOT}/build && cmake -Wno-dev ${KCOV_ROOT}/kcov-${KCOV_VERSION} && cd - ;
make -C ${KCOV_ROOT}/build && sudo make -C ${KCOV_ROOT}/build install;
fi
# Install various dependencies
install:
- pip install --upgrade pip
- pip install --upgrade six
- pip install --upgrade setuptools
- pip install --upgrade codecov coverage==4.5.4
- pip install --upgrade flake8
- pip install --upgrade pep8-naming
- if [[ "$TEST_SUITE" == "doc" ]]; then
pip install --upgrade -r lib/spack/docs/requirements.txt;
fi
before_script:
# Need this for the git tests to succeed.
- git config --global user.email "spack@example.com"
- git config --global user.name "Test User"
# Need this to be able to compute the list of changed files
- git fetch origin ${TRAVIS_BRANCH}:${TRAVIS_BRANCH}
#=============================================================================
# Building
#=============================================================================
script:
- share/spack/qa/run-$TEST_SUITE-tests
after_success:
- ccache -s
- case "$TEST_SUITE" in
unit)
if [[ "$COVERAGE" == "true" ]]; then
codecov --env PYTHON_VERSION
--required
--flags "${TEST_SUITE}${TRAVIS_OS_NAME}";
fi
;;
shell)
codecov --env PYTHON_VERSION
--required
--flags "${TEST_SUITE}${TRAVIS_OS_NAME}";
esac
#=============================================================================
# Notifications
#=============================================================================
notifications:
email:
recipients:
- tgamblin@llnl.gov
- massimiliano.culpo@gmail.com
on_success: change
on_failure: always

View File

@@ -1,51 +1,3 @@
# v0.15.4 (2020-08-12)
This release contains one feature addition:
* Users can set `SPACK_GNUPGHOME` to override Spack's GPG path (#17139)
Several bugfixes for CUDA, binary packaging, and `spack -V`:
* CUDA package's `.libs` method searches for `libcudart` instead of `libcuda` (#18000)
* Don't set `CUDAHOSTCXX` in environments that contain CUDA (#17826)
* `buildcache create`: `NoOverwriteException` is a warning, not an error (#17832)
* Fix `spack buildcache list --allarch` (#17884)
* `spack -V` works with `releases/latest` tag and shallow clones (#17884)
And fixes for GitHub Actions and tests to ensure that CI passes on the
release branch (#15687, #17279, #17328, #17377, #17732).
# v0.15.3 (2020-07-28)
This release contains the following bugfixes:
* Fix handling of relative view paths (#17721)
* Fixes for binary relocation (#17418, #17455)
* Fix redundant printing of error messages in build environment (#17709)
It also adds a support script for Spack tutorials:
* Add a tutorial setup script to share/spack (#17705, #17722)
# v0.15.2 (2020-07-23)
This minor release includes two new features:
* Spack install verbosity is decreased, and more debug levels are added (#17546)
* The $spack/share/spack/keys directory contains public keys that may be optionally trusted for public binary mirrors (#17684)
This release also includes several important fixes:
* MPICC and related variables are now cleand in the build environment (#17450)
* LLVM flang only builds CUDA offload components when +cuda (#17466)
* CI pipelines no longer upload user environments that can contain secrets to the internet (#17545)
* CI pipelines add bootstrapped compilers to the compiler config (#17536)
* `spack buildcache list` does not exit on first failure and lists later mirrors (#17565)
* Apple's "gcc" executable that is an apple-clang compiler does not generate a gcc compiler config (#17589)
* Mixed compiler toolchains are merged more naturally across different compiler suffixes (#17590)
* Cray Shasta platforms detect the OS properly (#17467)
* Additional more minor fixes.
# v0.15.1 (2020-07-10)
This minor release includes several important fixes:

View File

@@ -4,6 +4,7 @@
[![Linux Tests](https://github.com/spack/spack/workflows/linux%20tests/badge.svg)](https://github.com/spack/spack/actions)
[![Linux Builds](https://github.com/spack/spack/workflows/linux%20builds/badge.svg)](https://github.com/spack/spack/actions)
[![macOS Builds (nightly)](https://github.com/spack/spack/workflows/macOS%20builds%20nightly/badge.svg?branch=develop)](https://github.com/spack/spack/actions?query=workflow%3A%22macOS+builds+nightly%22)
[![Build Status](https://travis-ci.com/spack/spack.svg?branch=develop)](https://travis-ci.com/spack/spack)
[![codecov](https://codecov.io/gh/spack/spack/branch/develop/graph/badge.svg)](https://codecov.io/gh/spack/spack)
[![Read the Docs](https://readthedocs.org/projects/spack/badge/?version=latest)](https://spack.readthedocs.io)
[![Slack](https://spackpm.herokuapp.com/badge.svg)](https://spackpm.herokuapp.com)
@@ -73,31 +74,15 @@ When you send your request, make ``develop`` the destination branch on the
Your PR must pass Spack's unit tests and documentation tests, and must be
[PEP 8](https://www.python.org/dev/peps/pep-0008/) compliant. We enforce
these guidelines with our CI process. To run these tests locally, and for
helpful tips on git, see our
these guidelines with [Travis CI](https://travis-ci.org/spack/spack). To
run these tests locally, and for helpful tips on git, see our
[Contribution Guide](http://spack.readthedocs.io/en/latest/contribution_guide.html).
Spack's `develop` branch has the latest contributions. Pull requests
should target `develop`, and users who want the latest package versions,
features, etc. can use `develop`.
Releases
--------
For multi-user site deployments or other use cases that need very stable
software installations, we recommend using Spack's
[stable releases](https://github.com/spack/spack/releases).
Each Spack release series also has a corresponding branch, e.g.
`releases/v0.14` has `0.14.x` versions of Spack, and `releases/v0.13` has
`0.13.x` versions. We backport important bug fixes to these branches but
we do not advance the package versions or make other changes that would
change the way Spack concretizes dependencies within a release branch.
So, you can base your Spack deployment on a release branch and `git pull`
to get fixes, without the package churn that comes with `develop`.
See the [docs on releases](https://spack.readthedocs.io/en/latest/developer_guide.html#releases)
for more details.
Spack uses a rough approximation of the
[Git Flow](http://nvie.com/posts/a-successful-git-branching-model/)
branching model. The ``develop`` branch contains the latest
contributions, and ``master`` is always tagged and points to the latest
stable release.
Code of Conduct
------------------------

View File

@@ -16,7 +16,7 @@
config:
# This is the path to the root of the Spack install tree.
# You can use $spack here to refer to the root of the spack instance.
install_tree: $spack/opt/spack
install_tree: ~/.spack/opt/spack
# Locations where templates should be found
@@ -30,8 +30,8 @@ config:
# Locations where different types of modules should be installed.
module_roots:
tcl: $spack/share/spack/modules
lmod: $spack/share/spack/lmod
tcl: ~/.spack/share/spack/modules
lmod: ~/.spack/share/spack/lmod
# Temporary locations Spack can try to use for builds.
@@ -67,7 +67,7 @@ config:
# Cache directory for already downloaded source tarballs and archived
# repositories. This can be purged with `spack clean --downloads`.
source_cache: $spack/var/spack/cache
source_cache: ~/.spack/var/spack/cache
# Cache directory for miscellaneous files, like the package index.

View File

@@ -0,0 +1,7 @@
upstreams:
global:
install_tree: $spack/opt/spack
modules:
tcl: $spack/share/spack/modules
lmod: $spack/share/spack/lmod
dotkit: $spack/share/spack/dotkit

View File

@@ -27,28 +27,17 @@ correspond to one feature/bugfix/extension/etc. One can create PRs with
changes relevant to different ideas, however reviewing such PRs becomes tedious
and error prone. If possible, try to follow the **one-PR-one-package/feature** rule.
--------
Branches
--------
Spack's ``develop`` branch has the latest contributions. Nearly all pull
requests should start from ``develop`` and target ``develop``.
There is a branch for each major release series. Release branches
originate from ``develop`` and have tags for each point release in the
series. For example, ``releases/v0.14`` has tags for ``0.14.0``,
``0.14.1``, ``0.14.2``, etc. versions of Spack. We backport important bug
fixes to these branches, but we do not advance the package versions or
make other changes that would change the way Spack concretizes
dependencies. Currently, the maintainers manage these branches by
cherry-picking from ``develop``. See :ref:`releases` for more
information.
Spack uses a rough approximation of the `Git Flow <http://nvie.com/posts/a-successful-git-branching-model/>`_
branching model. The develop branch contains the latest contributions, and
master is always tagged and points to the latest stable release. Therefore, when
you send your request, make ``develop`` the destination branch on the
`Spack repository <https://github.com/spack/spack>`_.
----------------------
Continuous Integration
----------------------
Spack uses `Github Actions <https://docs.github.com/en/actions>`_ for Continuous Integration
Spack uses `Travis CI <https://travis-ci.org/spack/spack>`_ for Continuous Integration
testing. This means that every time you submit a pull request, a series of tests will
be run to make sure you didn't accidentally introduce any bugs into Spack. **Your PR
will not be accepted until it passes all of these tests.** While you can certainly wait
@@ -57,21 +46,22 @@ locally to speed up the review process.
.. note::
Oftentimes, CI will fail for reasons other than a problem with your PR.
Oftentimes, Travis will fail for reasons other than a problem with your PR.
For example, apt-get, pip, or homebrew will fail to download one of the
dependencies for the test suite, or a transient bug will cause the unit tests
to timeout. If any job fails, click the "Details" link and click on the test(s)
to timeout. If Travis fails, click the "Details" link and click on the test(s)
that is failing. If it doesn't look like it is failing for reasons related to
your PR, you have two options. If you have write permissions for the Spack
repository, you should see a "Restart workflow" button on the right-hand side. If
repository, you should see a "Restart job" button on the right-hand side. If
not, you can close and reopen your PR to rerun all of the tests. If the same
test keeps failing, there may be a problem with your PR. If you notice that
every recent PR is failing with the same error message, it may be that an issue
occurred with the CI infrastructure or one of Spack's dependencies put out a
new release that is causing problems. If this is the case, please file an issue.
every recent PR is failing with the same error message, it may be that Travis
is down or one of Spack's dependencies put out a new release that is causing
problems. If this is the case, please file an issue.
We currently test against Python 2.6, 2.7, and 3.5-3.7 on both macOS and Linux and
If you take a look in ``$SPACK_ROOT/.travis.yml``, you'll notice that we test
against Python 2.6, 2.7, and 3.4-3.7 on both macOS and Linux. We currently
perform 3 types of tests:
.. _cmd-spack-test:
@@ -105,7 +95,7 @@ time. For example, this would run all the tests in
.. code-block:: console
$ spack test lib/spack/spack/test/architecture.py
$ spack test architecture.py
And this would run the ``test_platform`` test from that file:
@@ -115,7 +105,7 @@ And this would run the ``test_platform`` test from that file:
This allows you to develop iteratively: make a change, test that change,
make another change, test that change, etc. We use `pytest
<http://pytest.org/>`_ as our tests framework, and these types of
<http://pytest.org/>`_ as our tests fromework, and these types of
arguments are just passed to the ``pytest`` command underneath. See `the
pytest docs
<http://doc.pytest.org/en/latest/usage.html#specifying-tests-selecting-tests>`_
@@ -143,7 +133,7 @@ You can combine these with ``pytest`` arguments to restrict which tests
you want to know about. For example, to see just the tests in
``architecture.py``:
.. command-output:: spack test --list-long lib/spack/spack/test/architecture.py
.. command-output:: spack test --list-long architecture.py
You can also combine any of these options with a ``pytest`` keyword
search. For example, to see the names of all tests that have "spec"
@@ -159,7 +149,7 @@ argument to ``pytest``:
.. code-block:: console
$ spack test -s spack test --list-long lib/spack/spack/test/architecture.py::test_platform
$ spack test -s architecture.py::test_platform
Unit tests are crucial to making sure bugs aren't introduced into
Spack. If you are modifying core Spack libraries or adding new
@@ -172,7 +162,7 @@ how to write tests!
.. note::
You may notice the ``share/spack/qa/run-unit-tests`` script in the
repository. This script is designed for CI. It runs the unit
repository. This script is designed for Travis CI. It runs the unit
tests and reports coverage statistics back to Codecov. If you want to
run the unit tests yourself, we suggest you use ``spack test``.
@@ -245,7 +235,7 @@ to update them.
Try fixing flake8 errors in reverse order. This eliminates the need for
multiple runs of ``spack flake8`` just to re-compute line numbers and
makes it much easier to fix errors directly off of the CI output.
makes it much easier to fix errors directly off of the Travis output.
.. warning::
@@ -337,7 +327,7 @@ your PR is accepted.
There is also a ``run-doc-tests`` script in ``share/spack/qa``. The only
difference between running this script and running ``make`` by hand is that
the script will exit immediately if it encounters an error or warning. This
is necessary for CI. If you made a lot of documentation changes, it is
is necessary for Travis CI. If you made a lot of documentation changes, it is
much quicker to run ``make`` by hand so that you can see all of the warnings
at once.
@@ -401,7 +391,7 @@ and allow you to see coverage line-by-line when viewing the Spack repository.
If you are new to Spack, a great way to get started is to write unit tests to
increase coverage!
Unlike with CI on Github Actions Codecov tests are not required to pass in order for your
Unlike with Travis, Codecov tests are not required to pass in order for your
PR to be merged. If you modify core Spack libraries, we would greatly
appreciate unit tests that cover these changed lines. Otherwise, we have no
way of knowing whether or not your changes introduce a bug. If you make

View File

@@ -495,370 +495,3 @@ The bottom of the output shows the top most time consuming functions,
slowest on top. The profiling support is from Python's built-in tool,
`cProfile
<https://docs.python.org/2/library/profile.html#module-cProfile>`_.
.. _releases:
--------
Releases
--------
This section documents Spack's release process. It is intended for
project maintainers, as the tasks described here require maintainer
privileges on the Spack repository. For others, we hope this section at
least provides some insight into how the Spack project works.
.. _release-branches:
^^^^^^^^^^^^^^^^
Release branches
^^^^^^^^^^^^^^^^
There are currently two types of Spack releases: :ref:`major releases
<major-releases>` (``0.13.0``, ``0.14.0``, etc.) and :ref:`point releases
<point-releases>` (``0.13.1``, ``0.13.2``, ``0.13.3``, etc.). Here is a
diagram of how Spack release branches work::
o branch: develop (latest version)
|
o merge v0.14.1 into develop
|\
| o branch: releases/v0.14, tag: v0.14.1
o | merge v0.14.0 into develop
|\|
| o tag: v0.14.0
|/
o merge v0.13.2 into develop
|\
| o branch: releases/v0.13, tag: v0.13.2
o | merge v0.13.1 into develop
|\|
| o tag: v0.13.1
o | merge v0.13.0 into develop
|\|
| o tag: v0.13.0
o |
| o
|/
o
The ``develop`` branch has the latest contributions, and nearly all pull
requests target ``develop``.
Each Spack release series also has a corresponding branch, e.g.
``releases/v0.14`` has ``0.14.x`` versions of Spack, and
``releases/v0.13`` has ``0.13.x`` versions. A major release is the first
tagged version on a release branch. Minor releases are back-ported from
develop onto release branches. This is typically done by cherry-picking
bugfix commits off of ``develop``.
To avoid version churn for users of a release series, minor releases
should **not** make changes that would change the concretization of
packages. They should generally only contain fixes to the Spack core.
Both major and minor releases are tagged. After each release, we merge
the release branch back into ``develop`` so that the version bump and any
other release-specific changes are visible in the mainline (see
:ref:`merging-releases-to-develop`).
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Scheduling work for releases
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
We schedule work for releases by creating `GitHub projects
<https://github.com/spack/spack/projects>`_. At any time, there may be
several open release projects. For example, here are two releases (from
some past version of the page linked above):
.. image:: images/projects.png
Here, there's one release in progress for ``0.15.1`` and another for
``0.16.0``. Each of these releases has a project board containing issues
and pull requests. GitHub shows a status bar with completed work in
green, work in progress in purple, and work not started yet in gray, so
it's fairly easy to see progress.
Spack's project boards are not firm commitments, and we move work between
releases frequently. If we need to make a release and some tasks are not
yet done, we will simply move them to next minor or major release, rather
than delaying the release to complete them.
For more on using GitHub project boards, see `GitHub's documentation
<https://docs.github.com/en/github/managing-your-work-on-github/about-project-boards>`_.
.. _major-releases:
^^^^^^^^^^^^^^^^^^^^^
Making Major Releases
^^^^^^^^^^^^^^^^^^^^^
Assuming you've already created a project board and completed the work
for a major release, the steps to make the release are as follows:
#. Create two new project boards:
* One for the next major release
* One for the next point release
#. Move any tasks that aren't done yet to one of the new project boards.
Small bugfixes should go to the next point release. Major features,
refactors, and changes that could affect concretization should go in
the next major release.
#. Create a branch for the release, based on ``develop``:
.. code-block:: console
$ git checkout -b releases/v0.15 develop
For a version ``vX.Y.Z``, the branch's name should be
``releases/vX.Y``. That is, you should create a ``releases/vX.Y``
branch if you are preparing the ``X.Y.0`` release.
#. Bump the version in ``lib/spack/spack/__init__.py``. See `this example from 0.13.0
<https://github.com/spack/spack/commit/8eeb64096c98b8a43d1c587f13ece743c864fba9>`_
#. Updaate the release version lists in these files to include the new version:
* ``lib/spack/spack/schema/container.py``
* ``lib/spack/spack/container/images.json``
**TODO**: We should get rid of this step in some future release.
#. Update ``CHANGELOG.md`` with major highlights in bullet form. Use
proper markdown formatting, like `this example from 0.15.0
<https://github.com/spack/spack/commit/d4bf70d9882fcfe88507e9cb444331d7dd7ba71c>`_.
#. Push the release branch to GitHub.
#. Make sure CI passes on the release branch, including:
* Regular unit tests
* Build tests
* The E4S pipeline at `gitlab.spack.io <https://gitlab.spack.io>`_
If CI is not passing, submit pull requests to ``develop`` as normal
and keep rebasing the release branch on ``develop`` until CI passes.
#. Follow the steps in :ref:`publishing-releases`.
#. Follow the steps in :ref:`merging-releases-to-develop`.
#. Follow the steps in :ref:`announcing-releases`.
.. _point-releases:
^^^^^^^^^^^^^^^^^^^^^
Making Point Releases
^^^^^^^^^^^^^^^^^^^^^
This assumes you've already created a project board for a point release
and completed the work to be done for the release. To make a point
release:
#. Create one new project board for the next point release.
#. Move any cards that aren't done yet to the next project board.
#. Check out the release branch (it should already exist). For the
``X.Y.Z`` release, the release branch is called ``releases/vX.Y``. For
``v0.15.1``, you would check out ``releases/v0.15``:
.. code-block:: console
$ git checkout releases/v0.15
#. Cherry-pick each pull request in the ``Done`` column of the release
project onto the release branch.
This is **usually** fairly simple since we squash the commits from the
vast majority of pull requests, which means there is only one commit
per pull request to cherry-pick. For example, `this pull request
<https://github.com/spack/spack/pull/15777>`_ has three commits, but
the were squashed into a single commit on merge. You can see the
commit that was created here:
.. image:: images/pr-commit.png
You can easily cherry pick it like this (assuming you already have the
release branch checked out):
.. code-block:: console
$ git cherry-pick 7e46da7
For pull requests that were rebased, you'll need to cherry-pick each
rebased commit individually. There have not been any rebased PRs like
this in recent point releases.
.. warning::
It is important to cherry-pick commits in the order they happened,
otherwise you can get conflicts while cherry-picking. When
cherry-picking onto a point release, look at the merge date,
**not** the number of the pull request or the date it was opened.
Sometimes you may **still** get merge conflicts even if you have
cherry-picked all the commits in order. This generally means there
is some other intervening pull request that the one you're trying
to pick depends on. In these cases, you'll need to make a judgment
call:
1. If the dependency is small, you might just cherry-pick it, too.
If you do this, add it to the release board.
2. If it is large, then you may decide that this fix is not worth
including in a point release, in which case you should remove it
from the release project.
3. You can always decide to manually back-port the fix to the release
branch if neither of the above options makes sense, but this can
require a lot of work. It's seldom the right choice.
#. Bump the version in ``lib/spack/spack/__init__.py``. See `this example from 0.14.1
<https://github.com/spack/spack/commit/ff0abb9838121522321df2a054d18e54b566b44a>`_.
#. Updaate the release version lists in these files to include the new version:
* ``lib/spack/spack/schema/container.py``
* ``lib/spack/spack/container/images.json``
**TODO**: We should get rid of this step in some future release.
#. Update ``CHANGELOG.md`` with a list of bugfixes. This is typically just a
summary of the commits you cherry-picked onto the release branch. See
`the changelog from 0.14.1
<https://github.com/spack/spack/commit/ff0abb9838121522321df2a054d18e54b566b44a>`_.
#. Push the release branch to GitHub.
#. Make sure CI passes on the release branch, including:
* Regular unit tests
* Build tests
* The E4S pipeline at `gitlab.spack.io <https://gitlab.spack.io>`_
If CI does not pass, you'll need to figure out why, and make changes
to the release branch until it does. You can make more commits, modify
or remove cherry-picked commits, or cherry-pick **more** from
``develop`` to make this happen.
#. Follow the steps in :ref:`publishing-releases`.
#. Follow the steps in :ref:`merging-releases-to-develop`.
#. Follow the steps in :ref:`announcing-releases`.
.. _publishing-releases:
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Publishing a release on GitHub
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
#. Go to `github.com/spack/spack/releases
<https://github.com/spack/spack/releases>`_ and click ``Draft a new
release``. Set the following:
* ``Tag version`` should start with ``v`` and contain *all three*
parts of the version, .g. ``v0.15.1``. This is the name of the tag
that will be created.
* ``Target`` should be the ``releases/vX.Y`` branch (e.g., ``releases/v0.15``).
* ``Release title`` should be ``vX.Y.Z`` (To match the tag, e.g., ``v0.15.1``).
* For the text, paste the latest release markdown from your ``CHANGELOG.md``.
You can save the draft and keep coming back to this as you prepare the release.
#. When you are done, click ``Publish release``.
#. Immediately after publishing, go back to
`github.com/spack/spack/releases
<https://github.com/spack/spack/releases>`_ and download the
auto-generated ``.tar.gz`` file for the release. It's the ``Source
code (tar.gz)`` link.
#. Click ``Edit`` on the release you just did and attach the downloaded
release tarball as a binary. This does two things:
#. Makes sure that the hash of our releases doesn't change over time.
GitHub sometimes annoyingly changes they way they generate
tarballs, and then hashes can change if you rely on the
auto-generated tarball links.
#. Gets us download counts on releases visible through the GitHub
API. GitHub tracks downloads of artifacts, but *not* the source
links. See the `releases
page <https://api.github.com/repos/spack/spack/releases>`_ and search
for ``download_count`` to see this.
.. _merging-releases-to-develop:
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Merging back into ``develop``
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Once each release is complete, make sure that it is merged back into
``develop`` with a merge commit:
.. code-block:: console
$ git checkout develop
$ git merge --no-ff releases/v0.15
$ git push
We merge back to ``develop`` because it:
* updates the version and ``CHANGELOG.md`` on ``develop``.
* ensures that your release tag is reachable from the head of
``develop``
We *must* use a real merge commit (via the ``--no-ff`` option) because it
ensures that the release tag is reachable from the tip of ``develop``.
This is necessary for ``spack -V`` to work properly -- it uses ``git
describe --tags`` to find the last reachable tag in the repository and
reports how far we are from it. For example:
.. code-block:: console
$ spack -V
0.14.2-1486-b80d5e74e5
This says that we are at commit ``b80d5e74e5``, which is 1,486 commits
ahead of the ``0.14.2`` release.
We put this step last in the process because it's best to do it only once
the release is complete and tagged. If you do it before you've tagged the
release and later decide you want to tag some later commit, you'll need
to merge again.
.. _announcing-releases:
^^^^^^^^^^^^^^^^^^^^
Announcing a release
^^^^^^^^^^^^^^^^^^^^
We announce releases in all of the major Spack communication channels.
Publishing the release takes care of GitHub. The remaining channels are
Twitter, Slack, and the mailing list. Here are the steps:
#. Make a tweet to announce the release. It should link to the release's
page on GitHub. You can base it on `this example tweet
<https://twitter.com/spackpm/status/1231761858182307840>`_.
#. Ping ``@channel`` in ``#general`` on Slack (`spackpm.slack.com
<https://spackpm.slack.com>`_) with a link to the tweet. The tweet
will be shown inline so that you do not have to retype your release
announcement.
#. Email the Spack mailing list to let them know about the release. As
with the tweet, you likely want to link to the release's page on
GitHub. It's also helpful to include some information directly in the
email. You can base yours on this `example email
<https://groups.google.com/forum/#!topic/spack/WT4CT9i_X4s>`_.
Once you've announced the release, congratulations, you're done! You've
finished making the release!

View File

@@ -818,7 +818,7 @@ Git
Some Spack packages use ``git`` to download, which might not work on
some computers. For example, the following error was
encountered on a Macintosh during ``spack install julia@master``:
encountered on a Macintosh during ``spack install julia-master``:
.. code-block:: console

Binary file not shown.

Before

Width:  |  Height:  |  Size: 44 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 68 KiB

View File

@@ -174,9 +174,8 @@ def _lock(self, op, timeout=None):
# If the file were writable, we'd have opened it 'r+'
raise LockROFileError(self.path)
self._log_debug("{0} locking [{1}:{2}]: timeout {3} sec"
.format(lock_type[op], self._start, self._length,
timeout))
tty.debug("{0} locking [{1}:{2}]: timeout {3} sec"
.format(lock_type[op], self._start, self._length, timeout))
poll_intervals = iter(Lock._poll_interval_generator())
start_time = time.time()
@@ -212,14 +211,14 @@ def _poll_lock(self, op):
# help for debugging distributed locking
if self.debug:
# All locks read the owner PID and host
self._read_log_debug_data()
self._log_debug('{0} locked {1} [{2}:{3}] (owner={4})'
.format(lock_type[op], self.path,
self._start, self._length, self.pid))
self._read_debug_data()
tty.debug('{0} locked {1} [{2}:{3}] (owner={4})'
.format(lock_type[op], self.path,
self._start, self._length, self.pid))
# Exclusive locks write their PID/host
if op == fcntl.LOCK_EX:
self._write_log_debug_data()
self._write_debug_data()
return True
@@ -246,7 +245,7 @@ def _ensure_parent_directory(self):
raise
return parent
def _read_log_debug_data(self):
def _read_debug_data(self):
"""Read PID and host data out of the file if it is there."""
self.old_pid = self.pid
self.old_host = self.host
@@ -258,7 +257,7 @@ def _read_log_debug_data(self):
_, _, self.host = host.rpartition('=')
self.pid = int(self.pid)
def _write_log_debug_data(self):
def _write_debug_data(self):
"""Write PID and host data to the file, recording old values."""
self.old_pid = self.pid
self.old_host = self.host
@@ -474,6 +473,9 @@ def release_write(self, release_fn=None):
else:
return False
def _debug(self, *args):
tty.debug(*args)
def _get_counts_desc(self):
return '(reads {0}, writes {1})'.format(self._reads, self._writes) \
if tty.is_verbose() else ''
@@ -482,50 +484,58 @@ def _log_acquired(self, locktype, wait_time, nattempts):
attempts_part = _attempts_str(wait_time, nattempts)
now = datetime.now()
desc = 'Acquired at %s' % now.strftime("%H:%M:%S.%f")
self._log_debug(self._status_msg(locktype, '{0}{1}'
.format(desc, attempts_part)))
self._debug(self._status_msg(locktype, '{0}{1}'.
format(desc, attempts_part)))
def _log_acquiring(self, locktype):
self._log_debug(self._status_msg(locktype, 'Acquiring'), level=3)
def _log_debug(self, *args, **kwargs):
"""Output lock debug messages."""
kwargs['level'] = kwargs.get('level', 2)
tty.debug(*args, **kwargs)
self._debug2(self._status_msg(locktype, 'Acquiring'))
def _log_downgraded(self, wait_time, nattempts):
attempts_part = _attempts_str(wait_time, nattempts)
now = datetime.now()
desc = 'Downgraded at %s' % now.strftime("%H:%M:%S.%f")
self._log_debug(self._status_msg('READ LOCK', '{0}{1}'
.format(desc, attempts_part)))
self._debug(self._status_msg('READ LOCK', '{0}{1}'
.format(desc, attempts_part)))
def _log_downgrading(self):
self._log_debug(self._status_msg('WRITE LOCK', 'Downgrading'), level=3)
self._debug2(self._status_msg('WRITE LOCK', 'Downgrading'))
def _log_released(self, locktype):
now = datetime.now()
desc = 'Released at %s' % now.strftime("%H:%M:%S.%f")
self._log_debug(self._status_msg(locktype, desc))
self._debug(self._status_msg(locktype, desc))
def _log_releasing(self, locktype):
self._log_debug(self._status_msg(locktype, 'Releasing'), level=3)
self._debug2(self._status_msg(locktype, 'Releasing'))
def _log_upgraded(self, wait_time, nattempts):
attempts_part = _attempts_str(wait_time, nattempts)
now = datetime.now()
desc = 'Upgraded at %s' % now.strftime("%H:%M:%S.%f")
self._log_debug(self._status_msg('WRITE LOCK', '{0}{1}'.
format(desc, attempts_part)))
self._debug(self._status_msg('WRITE LOCK', '{0}{1}'.
format(desc, attempts_part)))
def _log_upgrading(self):
self._log_debug(self._status_msg('READ LOCK', 'Upgrading'), level=3)
self._debug2(self._status_msg('READ LOCK', 'Upgrading'))
def _status_msg(self, locktype, status):
status_desc = '[{0}] {1}'.format(status, self._get_counts_desc())
return '{0}{1.desc}: {1.path}[{1._start}:{1._length}] {2}'.format(
locktype, self, status_desc)
def _debug2(self, *args):
# TODO: Easy place to make a single, temporary change to the
# TODO: debug level associated with the more detailed messages.
# TODO:
# TODO: Someday it would be great if we could switch this to
# TODO: another level, perhaps _between_ debug and verbose, or
# TODO: some other form of filtering so the first level of
# TODO: debugging doesn't have to generate these messages. Using
# TODO: verbose here did not work as expected because tests like
# TODO: test_spec_json will write the verbose messages to the
# TODO: output that is used to check test correctness.
tty.debug(*args)
class LockTransaction(object):
"""Simple nested transaction context manager that uses a file lock.

View File

@@ -19,8 +19,7 @@
from llnl.util.tty.color import cprint, cwrite, cescape, clen
# Globals
_debug = 0
_debug = False
_verbose = False
_stacktrace = False
_timestamp = False
@@ -30,26 +29,21 @@
indent = " "
def debug_level():
return _debug
def is_verbose():
return _verbose
def is_debug(level=1):
return _debug >= level
def is_debug():
return _debug
def is_stacktrace():
return _stacktrace
def set_debug(level=0):
def set_debug(flag):
global _debug
assert level >= 0, 'Debug level must be a positive value'
_debug = level
_debug = flag
def set_verbose(flag):
@@ -138,17 +132,12 @@ def process_stacktrace(countback):
return st_text
def show_pid():
return is_debug(2)
def get_timestamp(force=False):
"""Get a string timestamp"""
if _debug or _timestamp or force:
# Note inclusion of the PID is useful for parallel builds.
pid = ', {0}'.format(os.getpid()) if show_pid() else ''
return '[{0}{1}] '.format(
datetime.now().strftime("%Y-%m-%d-%H:%M:%S.%f"), pid)
return '[{0}, {1}] '.format(
datetime.now().strftime("%Y-%m-%d-%H:%M:%S.%f"), os.getpid())
else:
return ''
@@ -208,8 +197,7 @@ def verbose(message, *args, **kwargs):
def debug(message, *args, **kwargs):
level = kwargs.get('level', 1)
if is_debug(level):
if _debug:
kwargs.setdefault('format', 'g')
kwargs.setdefault('stream', sys.stderr)
info(message, *args, **kwargs)

View File

@@ -323,14 +323,14 @@ class log_output(object):
work within test frameworks like nose and pytest.
"""
def __init__(self, file_like=None, echo=False, debug=0, buffer=False):
def __init__(self, file_like=None, echo=False, debug=False, buffer=False):
"""Create a new output log context manager.
Args:
file_like (str or stream): open file object or name of file where
output should be logged
echo (bool): whether to echo output in addition to logging it
debug (int): positive to enable tty debug mode during logging
debug (bool): whether to enable tty debug mode during logging
buffer (bool): pass buffer=True to skip unbuffering output; note
this doesn't set up any *new* buffering
@@ -355,7 +355,7 @@ def __init__(self, file_like=None, echo=False, debug=0, buffer=False):
self._active = False # used to prevent re-entry
def __call__(self, file_like=None, echo=None, debug=None, buffer=None):
"""This behaves the same as init. It allows a logger to be reused.
"""Thie behaves the same as init. It allows a logger to be reused.
Arguments are the same as for ``__init__()``. Args here take
precedence over those passed to ``__init__()``.

View File

@@ -5,7 +5,7 @@
#: major, minor, patch version for Spack, in a tuple
spack_version_info = (0, 15, 4)
spack_version_info = (0, 15, 1)
#: String containing Spack version joined with .'s
spack_version = '.'.join(str(v) for v in spack_version_info)

View File

@@ -436,12 +436,6 @@ def to_dict(self):
('target', self.target.to_dict_or_value())])
return syaml_dict([('arch', d)])
def to_spec(self):
"""Convert this Arch to an anonymous Spec with architecture defined."""
spec = spack.spec.Spec()
spec.architecture = spack.spec.ArchSpec(str(self))
return spec
@staticmethod
def from_dict(d):
spec = spack.spec.ArchSpec.from_dict(d)
@@ -524,14 +518,6 @@ def platform():
@memoized
def default_arch():
"""Default ``Arch`` object for this machine.
See ``sys_type()``.
"""
return Arch(platform(), 'default_os', 'default_target')
def sys_type():
"""Print out the "default" platform-os-target tuple for this machine.
@@ -544,7 +530,8 @@ def sys_type():
architectures.
"""
return str(default_arch())
arch = Arch(platform(), 'default_os', 'default_target')
return str(arch)
@memoized

View File

@@ -36,6 +36,7 @@
from spack.spec import Spec
from spack.stage import Stage
from spack.util.gpg import Gpg
import spack.architecture as architecture
_build_cache_relative_path = 'build_cache'
@@ -465,8 +466,8 @@ def build_tarball(spec, outdir, force=False, rel=False, unsigned=False,
web_util.push_to_url(
specfile_path, remote_specfile_path, keep_original=False)
tty.debug('Buildcache for "{0}" written to \n {1}'
.format(spec, remote_spackfile_path))
tty.msg('Buildache for "%s" written to \n %s' %
(spec, remote_spackfile_path))
try:
# create an index.html for the build_cache directory so specs can be
@@ -497,7 +498,6 @@ def download_tarball(spec):
# stage the tarball into standard place
stage = Stage(url, name="build_cache", keep=True)
stage.create()
try:
stage.fetch()
return stage.save_filename
@@ -602,11 +602,15 @@ def is_backup_file(file):
if not is_backup_file(text_name):
text_names.append(text_name)
# If we are not installing back to the same install tree do the relocation
# If we are installing back to the same location don't replace anything
if old_layout_root != new_layout_root:
files_to_relocate = [os.path.join(workdir, filename)
for filename in buildinfo.get('relocate_binaries')
]
paths_to_relocate = [old_spack_prefix, old_layout_root]
paths_to_relocate.extend(prefix_to_hash.keys())
files_to_relocate = list(filter(
lambda pathname: not relocate.file_is_relocatable(
pathname, paths_to_relocate=paths_to_relocate),
map(lambda filename: os.path.join(workdir, filename),
buildinfo['relocate_binaries'])))
# If the buildcache was not created with relativized rpaths
# do the relocation of path in binaries
if (spec.architecture.platform == 'darwin' or
@@ -642,13 +646,6 @@ def is_backup_file(file):
new_spack_prefix,
prefix_to_prefix)
paths_to_relocate = [old_prefix, old_layout_root]
paths_to_relocate.extend(prefix_to_hash.keys())
files_to_relocate = list(filter(
lambda pathname: not relocate.file_is_relocatable(
pathname, paths_to_relocate=paths_to_relocate),
map(lambda filename: os.path.join(workdir, filename),
buildinfo['relocate_binaries'])))
# relocate the install prefixes in binary files including dependencies
relocate.relocate_text_bin(files_to_relocate,
old_prefix, new_prefix,
@@ -656,17 +653,6 @@ def is_backup_file(file):
new_spack_prefix,
prefix_to_prefix)
# If we are installing back to the same location
# relocate the sbang location if the spack directory changed
else:
if old_spack_prefix != new_spack_prefix:
relocate.relocate_text(text_names,
old_layout_root, new_layout_root,
old_prefix, new_prefix,
old_spack_prefix,
new_spack_prefix,
prefix_to_prefix)
def extract_tarball(spec, filename, allow_root=False, unsigned=False,
force=False):
@@ -842,24 +828,26 @@ def get_spec(spec=None, force=False):
mirror_dir = url_util.local_file_path(fetch_url_build_cache)
if mirror_dir:
tty.debug('Finding buildcaches in {0}'.format(mirror_dir))
tty.msg("Finding buildcaches in %s" % mirror_dir)
link = url_util.join(fetch_url_build_cache, specfile_name)
urls.add(link)
else:
tty.debug('Finding buildcaches at {0}'
.format(url_util.format(fetch_url_build_cache)))
tty.msg("Finding buildcaches at %s" %
url_util.format(fetch_url_build_cache))
link = url_util.join(fetch_url_build_cache, specfile_name)
urls.add(link)
return try_download_specs(urls=urls, force=force)
def get_specs():
def get_specs(allarch=False):
"""
Get spec.yaml's for build caches available on mirror
"""
global _cached_specs
arch = architecture.Arch(architecture.platform(),
'default_os', 'default_target')
if not spack.mirror.MirrorCollection():
tty.debug("No Spack mirrors are currently configured")
@@ -869,8 +857,8 @@ def get_specs():
fetch_url_build_cache = url_util.join(
mirror.fetch_url, _build_cache_relative_path)
tty.debug('Finding buildcaches at {0}'
.format(url_util.format(fetch_url_build_cache)))
tty.msg("Finding buildcaches at %s" %
url_util.format(fetch_url_build_cache))
index_url = url_util.join(fetch_url_build_cache, 'index.json')
@@ -879,9 +867,10 @@ def get_specs():
index_url, 'application/json')
index_object = codecs.getreader('utf-8')(file_stream).read()
except (URLError, web_util.SpackWebError) as url_err:
tty.debug('Failed to read index {0}'.format(index_url), url_err, 1)
# Continue on to the next mirror
continue
tty.error('Failed to read index {0}'.format(index_url))
tty.debug(url_err)
# Just return whatever specs we may already have cached
return _cached_specs
tmpdir = tempfile.mkdtemp()
index_file_path = os.path.join(tmpdir, 'index.json')
@@ -896,7 +885,9 @@ def get_specs():
spec_list = db.query_local(installed=False)
for indexed_spec in spec_list:
_cached_specs.add(indexed_spec)
spec_arch = architecture.arch_for_spec(indexed_spec.architecture)
if (allarch is True or spec_arch == arch):
_cached_specs.add(indexed_spec)
return _cached_specs
@@ -918,15 +909,15 @@ def get_keys(install=False, trust=False, force=False):
mirror_dir = url_util.local_file_path(fetch_url_build_cache)
if mirror_dir:
tty.debug('Finding public keys in {0}'.format(mirror_dir))
tty.msg("Finding public keys in %s" % mirror_dir)
files = os.listdir(str(mirror_dir))
for file in files:
if re.search(r'\.key', file) or re.search(r'\.pub', file):
link = url_util.join(fetch_url_build_cache, file)
keys.add(link)
else:
tty.debug('Finding public keys at {0}'
.format(url_util.format(fetch_url_build_cache)))
tty.msg("Finding public keys at %s" %
url_util.format(fetch_url_build_cache))
# For s3 mirror need to request index.html directly
p, links = web_util.spider(
url_util.join(fetch_url_build_cache, 'index.html'))
@@ -944,14 +935,14 @@ def get_keys(install=False, trust=False, force=False):
stage.fetch()
except fs.FetchError:
continue
tty.debug('Found key {0}'.format(link))
tty.msg('Found key %s' % link)
if install:
if trust:
Gpg.trust(stage.save_filename)
tty.debug('Added this key to trusted keys.')
tty.msg('Added this key to trusted keys.')
else:
tty.debug('Will not add this key to trusted keys.'
'Use -t to install all downloaded keys')
tty.msg('Will not add this key to trusted keys.'
'Use -t to install all downloaded keys')
def needs_rebuild(spec, mirror_url, rebuild_on_errors=False):
@@ -1038,7 +1029,7 @@ def check_specs_against_mirrors(mirrors, specs, output_file=None,
"""
rebuilds = {}
for mirror in spack.mirror.MirrorCollection(mirrors).values():
tty.debug('Checking for built specs at {0}'.format(mirror.fetch_url))
tty.msg('Checking for built specs at %s' % mirror.fetch_url)
rebuild_list = []

View File

@@ -174,14 +174,6 @@ def clean_environment():
for v in build_system_vars:
env.unset(v)
# Unset mpi environment vars. These flags should only be set by
# mpi providers for packages with mpi dependencies
mpi_vars = [
'MPICC', 'MPICXX', 'MPIFC', 'MPIF77', 'MPIF90'
]
for v in mpi_vars:
env.unset(v)
build_lang = spack.config.get('config:build_language')
if build_lang:
# Override language-related variables. This can be used to force

View File

@@ -42,6 +42,7 @@ def _fetch_cache():
building the same package different ways or multiple times.
"""
path = spack.config.get('config:source_cache')
if not path:
path = os.path.join(spack.paths.var_path, "cache")
path = spack.util.path.canonicalize_path(path)

View File

@@ -493,7 +493,7 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
after_script = None
if custom_spack_repo:
if not custom_spack_ref:
custom_spack_ref = 'develop'
custom_spack_ref = 'master'
before_script = [
('git clone "{0}"'.format(custom_spack_repo)),
'pushd ./spack && git checkout "{0}" && popd'.format(
@@ -613,7 +613,7 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
debug_flag = '-d '
job_scripts = [
'spack env activate --without-view .',
'spack env activate .',
'spack {0}ci rebuild'.format(debug_flag),
]
@@ -1043,10 +1043,17 @@ def copy_stage_logs_to_artifacts(job_spec, job_log_dir):
tty.debug('job package: {0}'.format(job_pkg))
stage_dir = job_pkg.stage.path
tty.debug('stage dir: {0}'.format(stage_dir))
build_env_src = os.path.join(stage_dir, 'spack-build-env.txt')
build_out_src = os.path.join(stage_dir, 'spack-build-out.txt')
build_env_dst = os.path.join(
job_log_dir, 'spack-build-env.txt')
build_out_dst = os.path.join(
job_log_dir, 'spack-build-out.txt')
tty.debug('Copying build log ({0}) to artifacts ({1})'.format(
tty.debug('Copying logs to artifacts:')
tty.debug(' 1: {0} -> {1}'.format(
build_env_src, build_env_dst))
shutil.copyfile(build_env_src, build_env_dst)
tty.debug(' 2: {0} -> {1}'.format(
build_out_src, build_out_dst))
shutil.copyfile(build_out_src, build_out_dst)
except Exception as inst:

View File

@@ -8,7 +8,6 @@
import sys
import llnl.util.tty as tty
import spack.architecture
import spack.binary_distribution as bindist
import spack.cmd
import spack.cmd.common.arguments as arguments
@@ -26,7 +25,6 @@
from spack.error import SpecError
from spack.spec import Spec, save_dependency_spec_yamls
from spack.util.string import plural
from spack.cmd import display_specs
@@ -290,12 +288,8 @@ def match_downloaded_specs(pkgs, allow_multiple_matches=False, force=False,
# List of specs that match expressions given via command line
specs_from_cli = []
has_errors = False
specs = bindist.get_specs()
if not other_arch:
arch = spack.architecture.default_arch().to_spec()
specs = [s for s in specs if s.satisfies(arch)]
allarch = other_arch
specs = bindist.get_specs(allarch)
for pkg in pkgs:
matches = []
tty.msg("buildcache spec(s) matching %s \n" % pkg)
@@ -399,12 +393,9 @@ def _createtarball(env, spec_yaml=None, packages=None, add_spec=True,
for spec in specs:
tty.debug('creating binary cache file for package %s ' % spec.format())
try:
bindist.build_tarball(spec, outdir, force, make_relative,
unsigned, allow_root, signing_key,
rebuild_index)
except bindist.NoOverwriteException as e:
tty.warn(e)
bindist.build_tarball(spec, outdir, force, make_relative,
unsigned, allow_root, signing_key,
rebuild_index)
def createtarball(args):
@@ -497,20 +488,10 @@ def install_tarball(spec, args):
def listspecs(args):
"""list binary packages available from mirrors"""
specs = bindist.get_specs()
if not args.allarch:
arch = spack.architecture.default_arch().to_spec()
specs = [s for s in specs if s.satisfies(arch)]
specs = bindist.get_specs(args.allarch)
if args.specs:
constraints = set(args.specs)
specs = [s for s in specs if any(s.satisfies(c) for c in constraints)]
if sys.stdout.isatty():
builds = len(specs)
tty.msg("%s." % plural(builds, 'cached build'))
if not builds and not args.allarch:
tty.msg("You can query all available architectures with:",
"spack buildcache list --allarch")
display_specs(specs, args, all_headers=True)

View File

@@ -351,9 +351,6 @@ def env_status(args):
% (ev.manifest_name, env.path))
else:
tty.msg('In environment %s' % env.name)
# Check if environment views can be safely activated
env.check_views()
else:
tty.msg('No active environment')

View File

@@ -41,6 +41,8 @@ def update_kwargs_from_args(args, kwargs):
'fake': args.fake,
'dirty': args.dirty,
'use_cache': args.use_cache,
'install_global': args.install_global,
'upstream': args.upstream,
'cache_only': args.cache_only,
'explicit': True, # Always true for install command
'stop_at': args.until,
@@ -127,6 +129,14 @@ def setup_parser(subparser):
'-f', '--file', action='append', default=[],
dest='specfiles', metavar='SPEC_YAML_FILE',
help="install from file. Read specs to install from .yaml files")
subparser.add_argument(
'--upstream', action='store', default=None,
dest='upstream', metavar='UPSTREAM_NAME',
help='specify which upstream spack to install too')
subparser.add_argument(
'-g', '--global', action='store_true', default=False,
dest='install_global',
help='install package to globally accesible location')
cd_group = subparser.add_mutually_exclusive_group()
arguments.add_common_arguments(cd_group, ['clean', 'dirty'])
@@ -220,7 +230,10 @@ def default_log_file(spec):
"""
fmt = 'test-{x.name}-{x.version}-{hash}.xml'
basename = fmt.format(x=spec, hash=spec.dag_hash())
dirname = fs.os.path.join(spack.paths.var_path, 'junit-report')
dirname = fs.os.path.join(spack.paths.user_config_path,
'var/spack',
'junit-report')
fs.mkdirp(dirname)
return fs.os.path.join(dirname, basename)
@@ -231,6 +244,7 @@ def install_spec(cli_args, kwargs, abstract_spec, spec):
try:
# handle active environment, if any
env = ev.get_env(cli_args, 'install')
if env:
with env.write_transaction():
concrete = env.concretize_and_add(
@@ -241,6 +255,10 @@ def install_spec(cli_args, kwargs, abstract_spec, spec):
env.regenerate_views()
else:
spec.package.do_install(**kwargs)
spack.config.set('config:active_tree', '~/.spack/opt/spack',
scope='user')
spack.config.set('config:active_upstream', None,
scope='user')
except spack.build_environment.InstallError as e:
if cli_args.show_log_on_error:
@@ -255,6 +273,30 @@ def install_spec(cli_args, kwargs, abstract_spec, spec):
def install(parser, args, **kwargs):
# Install Package to Global Upstream for multi-user use
if args.install_global:
spack.config.set('config:active_upstream', 'global',
scope='user')
global_root = spack.config.get('upstreams')
global_root = global_root['global']['install_tree']
global_root = spack.util.path.canonicalize_path(global_root)
spack.config.set('config:active_tree', global_root,
scope='user')
elif args.upstream:
if args.upstream not in spack.config.get('upstreams'):
tty.die("specified upstream does not exist")
spack.config.set('config:active_upstream', args.upstream,
scope='user')
root = spack.config.get('upstreams')
root = root[args.upstream]['install_tree']
root = spack.util.path.canonicalize_path(root)
spack.config.set('config:active_tree', root, scope='user')
else:
spack.config.set('config:active_upstream', None,
scope='user')
spack.config.set('config:active_tree',
spack.config.get('config:install_tree'),
scope='user')
if args.help_cdash:
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,

View File

@@ -5,6 +5,8 @@
from __future__ import print_function
import argparse
import copy
import sys
import itertools
@@ -15,6 +17,7 @@
import spack.cmd.common.arguments as arguments
import spack.repo
import spack.store
import spack.spec
from spack.database import InstallStatuses
from llnl.util import tty
@@ -63,8 +66,24 @@ def setup_parser(subparser):
help="remove ALL installed packages that match each supplied spec"
)
subparser.add_argument(
'packages',
nargs=argparse.REMAINDER,
help="specs of packages to uninstall")
def find_matching_specs(env, specs, allow_multiple_matches=False, force=False):
subparser.add_argument(
'-u', '--upstream', action='store', default=None,
dest='upstream', metavar='UPSTREAM_NAME',
help='specify which upstream spack to uninstall from')
subparser.add_argument(
'-g', '--global', action='store_true',
dest='global_uninstall',
help='uninstall packages installed to global upstream')
def find_matching_specs(env, specs, allow_multiple_matches=False, force=False,
upstream=None, global_uninstall=False):
"""Returns a list of specs matching the not necessarily
concretized specs given from cli
@@ -76,6 +95,35 @@ def find_matching_specs(env, specs, allow_multiple_matches=False, force=False):
Return:
list of specs
"""
if global_uninstall:
spack.config.set('config:active_upstream', 'global',
scope='user')
global_root = spack.config.get('upstreams')
global_root = global_root['global']['install_tree']
global_root = spack.util.path.canonicalize_path(global_root)
spack.config.set('config:active_tree', global_root,
scope='user')
elif upstream:
if upstream not in spack.config.get('upstreams'):
tty.die("specified upstream does not exist")
spack.config.set('config:active_upstream', upstream,
scope='user')
root = spack.config.get('upstreams')
root = root[upstream]['install_tree']
root = spack.util.path.canonicalize_path(root)
spack.config.set('config:active_tree', root, scope='user')
else:
spack.config.set('config:active_upstream', None,
scope='user')
for spec in specs:
if isinstance(spec, spack.spec.Spec):
spec_name = str(spec)
spec_copy = (copy.deepcopy(spec))
spec_copy.concretize()
if spec_copy.package.installed_upstream:
tty.warn("{0} is installed upstream".format(spec_name))
tty.die("Use 'spack uninstall [--upstream upstream_name]'")
# constrain uninstall resolution to current environment if one is active
hashes = env.all_hashes() if env else None
@@ -233,11 +281,25 @@ def do_uninstall(env, specs, force):
for item in ready:
item.do_uninstall(force=force)
# write any changes made to the active environment
if env:
env.write()
spack.config.set('config:active_tree',
'~/.spack/opt/spack',
scope='user')
spack.config.set('config:active_upstream', None,
scope='user')
def get_uninstall_list(args, specs, env):
# Gets the list of installed specs that match the ones give via cli
# args.all takes care of the case where '-a' is given in the cli
uninstall_list = find_matching_specs(env, specs, args.all, args.force)
uninstall_list = find_matching_specs(env, specs, args.all, args.force,
upstream=args.upstream,
global_uninstall=args.global_uninstall
)
# Takes care of '-R'
active_dpts, inactive_dpts = installed_dependents(uninstall_list, env)
@@ -314,7 +376,7 @@ def uninstall_specs(args, specs):
anything_to_do = set(uninstall_list).union(set(remove_list))
if not anything_to_do:
tty.warn('There are no package to uninstall.')
tty.warn('There are no packages to uninstall.')
return
if not args.yes_to_all:

View File

@@ -28,7 +28,7 @@
@llnl.util.lang.memoized
def _get_compiler_version_output(compiler_path, version_arg, ignore_errors=()):
def get_compiler_version_output(compiler_path, version_arg, ignore_errors=()):
"""Invokes the compiler at a given path passing a single
version argument and returns the output.
@@ -42,18 +42,6 @@ def _get_compiler_version_output(compiler_path, version_arg, ignore_errors=()):
return output
def get_compiler_version_output(compiler_path, *args, **kwargs):
"""Wrapper for _get_compiler_version_output()."""
# This ensures that we memoize compiler output by *absolute path*,
# not just executable name. If we don't do this, and the path changes
# (e.g., during testing), we can get incorrect results.
if not os.path.isabs(compiler_path):
compiler_path = spack.util.executable.which_string(
compiler_path, required=True)
return _get_compiler_version_output(compiler_path, *args, **kwargs)
def tokenize_flags(flags_str):
"""Given a compiler flag specification as a string, this returns a list
where the entries are the flags. For compiler options which set values

View File

@@ -650,18 +650,23 @@ def make_compiler_list(detected_versions):
Returns:
list of Compiler objects
"""
group_fn = lambda x: (x.id, x.variation, x.language)
sorted_compilers = sorted(detected_versions, key=group_fn)
# We don't sort on the path of the compiler
sort_fn = lambda x: (x.id, x.variation, x.language)
compilers_s = sorted(detected_versions, key=sort_fn)
# Gather items in a dictionary by the id, name variation and language
compilers_d = {}
for sort_key, group in itertools.groupby(sorted_compilers, key=group_fn):
for sort_key, group in itertools.groupby(compilers_s, key=sort_fn):
compiler_id, name_variation, language = sort_key
by_compiler_id = compilers_d.setdefault(compiler_id, {})
by_name_variation = by_compiler_id.setdefault(name_variation, {})
by_name_variation[language] = next(x.path for x in group)
def _default_make_compilers(cmp_id, paths):
# For each unique compiler id select the name variation with most entries
# i.e. the one that supports most languages
compilers = []
def _default(cmp_id, paths):
operating_system, compiler_name, version = cmp_id
compiler_cls = spack.compilers.class_for_compiler_name(compiler_name)
spec = spack.spec.CompilerSpec(compiler_cls.name, version)
@@ -672,38 +677,16 @@ def _default_make_compilers(cmp_id, paths):
)
return [compiler]
# For compilers with the same compiler id:
#
# - Prefer with C compiler to without
# - Prefer with C++ compiler to without
# - Prefer no variations to variations (e.g., clang to clang-gpu)
#
sort_fn = lambda variation: (
'cc' not in by_compiler_id[variation], # None last
'cxx' not in by_compiler_id[variation], # None last
getattr(variation, 'prefix', None),
getattr(variation, 'suffix', None),
)
compilers = []
for compiler_id, by_compiler_id in compilers_d.items():
ordered = sorted(by_compiler_id, key=sort_fn)
selected_variation = ordered[0]
selected = by_compiler_id[selected_variation]
# fill any missing parts from subsequent entries
for lang in ['cxx', 'f77', 'fc']:
if lang not in selected:
next_lang = next((
by_compiler_id[v][lang] for v in ordered
if lang in by_compiler_id[v]), None)
if next_lang:
selected[lang] = next_lang
_, selected_name_variation = max(
(len(by_compiler_id[variation]), variation)
for variation in by_compiler_id
)
# Add it to the list of compilers
selected = by_compiler_id[selected_name_variation]
operating_system, _, _ = compiler_id
make_compilers = getattr(
operating_system, 'make_compilers', _default_make_compilers)
make_compilers = getattr(operating_system, 'make_compilers', _default)
compilers.extend(make_compilers(compiler_id, selected))
return compilers

View File

@@ -23,12 +23,7 @@ def extract_version_from_output(cls, output):
ver = 'unknown'
match = re.search(
# Apple's LLVM compiler has its own versions, so suffix them.
r'^Apple (?:LLVM|clang) version ([^ )]+)',
output,
# Multi-line, since 'Apple clang' may not be on the first line
# in particular, when run as gcc, it seems to output
# "Configured with: --prefix=..." as the first line
re.M,
r'^Apple (?:LLVM|clang) version ([^ )]+)', output
)
if match:
ver = match.group(match.lastindex)

View File

@@ -5,13 +5,13 @@
import re
import spack.compiler
import spack.compilers.apple_clang as apple_clang
import spack.compilers.clang
from spack.compiler import Compiler, UnsupportedCompilerFlag
from spack.version import ver
class Gcc(spack.compiler.Compiler):
class Gcc(Compiler):
# Subclasses use possible names of C compiler
cc_names = ['gcc']
@@ -64,8 +64,10 @@ def cxx98_flag(self):
@property
def cxx11_flag(self):
if self.version < ver('4.3'):
raise spack.compiler.UnsupportedCompilerFlag(
self, "the C++11 standard", "cxx11_flag", " < 4.3")
raise UnsupportedCompilerFlag(self,
"the C++11 standard",
"cxx11_flag",
" < 4.3")
elif self.version < ver('4.7'):
return "-std=c++0x"
else:
@@ -74,8 +76,10 @@ def cxx11_flag(self):
@property
def cxx14_flag(self):
if self.version < ver('4.8'):
raise spack.compiler.UnsupportedCompilerFlag(
self, "the C++14 standard", "cxx14_flag", "< 4.8")
raise UnsupportedCompilerFlag(self,
"the C++14 standard",
"cxx14_flag",
"< 4.8")
elif self.version < ver('4.9'):
return "-std=c++1y"
elif self.version < ver('6.0'):
@@ -86,8 +90,10 @@ def cxx14_flag(self):
@property
def cxx17_flag(self):
if self.version < ver('5.0'):
raise spack.compiler.UnsupportedCompilerFlag(
self, "the C++17 standard", "cxx17_flag", "< 5.0")
raise UnsupportedCompilerFlag(self,
"the C++17 standard",
"cxx17_flag",
"< 5.0")
elif self.version < ver('6.0'):
return "-std=c++1z"
else:
@@ -96,15 +102,19 @@ def cxx17_flag(self):
@property
def c99_flag(self):
if self.version < ver('4.5'):
raise spack.compiler.UnsupportedCompilerFlag(
self, "the C99 standard", "c99_flag", "< 4.5")
raise UnsupportedCompilerFlag(self,
"the C99 standard",
"c99_flag",
"< 4.5")
return "-std=c99"
@property
def c11_flag(self):
if self.version < ver('4.7'):
raise spack.compiler.UnsupportedCompilerFlag(
self, "the C11 standard", "c11_flag", "< 4.7")
raise UnsupportedCompilerFlag(self,
"the C11 standard",
"c11_flag",
"< 4.7")
return "-std=c11"
@property
@@ -142,10 +152,10 @@ def default_version(cls, cc):
7.2.0
"""
# Apple's gcc is actually apple clang, so skip it. Returning
# "unknown" ensures this compiler is not detected by default.
# Users can add it manually to compilers.yaml at their own risk.
if apple_clang.AppleClang.default_version(cc) != 'unknown':
# Skip any gcc versions that are actually clang, like Apple's gcc.
# Returning "unknown" makes them not detected by default.
# Users can add these manually to compilers.yaml at their own risk.
if spack.compilers.clang.Clang.default_version(cc) != 'unknown':
return 'unknown'
version = super(Gcc, cls).default_version(cc)

View File

@@ -13,10 +13,7 @@
"0.14.2": "0.14.2",
"0.15": "0.15",
"0.15.0": "0.15.0",
"0.15.1": "0.15.1",
"0.15.2": "0.15.2",
"0.15.3": "0.15.3",
"0.15.4": "0.15.4"
"0.15.1": "0.15.1"
}
},
"ubuntu:16.04": {
@@ -33,10 +30,7 @@
"0.14.2": "0.14.2",
"0.15": "0.15",
"0.15.0": "0.15.0",
"0.15.1": "0.15.1",
"0.15.2": "0.15.2",
"0.15.3": "0.15.3",
"0.15.4": "0.15.4"
"0.15.1": "0.15.1"
}
},
"centos:7": {
@@ -53,10 +47,7 @@
"0.14.2": "0.14.2",
"0.15": "0.15",
"0.15.0": "0.15.0",
"0.15.1": "0.15.1",
"0.15.2": "0.15.2",
"0.15.3": "0.15.3",
"0.15.4": "0.15.4"
"0.15.1": "0.15.1"
}
},
"centos:6": {
@@ -73,10 +64,7 @@
"0.14.2": "0.14.2",
"0.15": "0.15",
"0.15.0": "0.15.0",
"0.15.1": "0.15.1",
"0.15.2": "0.15.2",
"0.15.3": "0.15.3",
"0.15.4": "0.15.4"
"0.15.1": "0.15.1"
}
}
}

View File

@@ -365,7 +365,26 @@ def __init__(self, root, db_dir=None, upstream_dbs=None,
tty.debug('PACKAGE LOCK TIMEOUT: {0}'.format(
str(timeout_format_str)))
# Create .spack-db/index.json for global upstream it doesn't exist
global_install_tree = spack.config.get(
'upstreams')['global']['install_tree']
global_install_tree = global_install_tree.replace(
'$spack', spack.paths.prefix)
if self.is_upstream:
if global_install_tree in self._db_dir:
if not os.path.isfile(self._index_path):
f = open(self._index_path, "w+")
database = {
'database': {
'installs': {},
'version': str(_db_version)
}
}
try:
sjson.dump(database, f)
except Exception as e:
raise Exception(
"error writing YAML database:", str(e))
self.lock = ForbiddenLock()
else:
self.lock = lk.Lock(self._lock_path,
@@ -1182,6 +1201,9 @@ def _remove(self, spec):
rec.installed = False
return rec.spec
if self.is_upstream:
return rec.spec
del self._data[key]
for dep in rec.spec.dependencies(_tracked_deps):
# FIXME: the two lines below needs to be updated once #11983 is

View File

@@ -175,20 +175,9 @@ def activate(
# MANPATH, PYTHONPATH, etc. All variables that end in PATH (case-sensitive)
# become PATH variables.
#
try:
if add_view and default_view_name in env.views:
with spack.store.db.read_transaction():
cmds += env.add_default_view_to_shell(shell)
except (spack.repo.UnknownPackageError,
spack.repo.UnknownNamespaceError) as e:
tty.error(e)
tty.die(
'Environment view is broken due to a missing package or repo.\n',
' To activate without views enabled, activate with:\n',
' spack env activate -V {0}\n'.format(env.name),
' To remove it and resolve the issue, '
'force concretize with the command:\n',
' spack -e {0} concretize --force'.format(env.name))
if add_view and default_view_name in env.views:
with spack.store.db.read_transaction():
cmds += env.add_default_view_to_shell(shell)
return cmds
@@ -241,15 +230,9 @@ def deactivate(shell='sh'):
cmds += ' unset SPACK_OLD_PS1; export SPACK_OLD_PS1;\n'
cmds += 'fi;\n'
try:
if default_view_name in _active_environment.views:
with spack.store.db.read_transaction():
cmds += _active_environment.rm_default_view_from_shell(shell)
except (spack.repo.UnknownPackageError,
spack.repo.UnknownNamespaceError) as e:
tty.warn(e)
tty.warn('Could not fully deactivate view due to missing package '
'or repo, shell environment may be corrupt.')
if default_view_name in _active_environment.views:
with spack.store.db.read_transaction():
cmds += _active_environment.rm_default_view_from_shell(shell)
tty.debug("Deactivated environmennt '%s'" % _active_environment.name)
_active_environment = None
@@ -463,9 +446,8 @@ def _eval_conditional(string):
class ViewDescriptor(object):
def __init__(self, base_path, root, projections={}, select=[], exclude=[],
def __init__(self, root, projections={}, select=[], exclude=[],
link=default_view_link):
self.base = base_path
self.root = root
self.projections = projections
self.select = select
@@ -495,19 +477,15 @@ def to_dict(self):
return ret
@staticmethod
def from_dict(base_path, d):
return ViewDescriptor(base_path,
d['root'],
def from_dict(d):
return ViewDescriptor(d['root'],
d.get('projections', {}),
d.get('select', []),
d.get('exclude', []),
d.get('link', default_view_link))
def view(self):
root = self.root
if not os.path.isabs(root):
root = os.path.normpath(os.path.join(self.base, self.root))
return YamlFilesystemView(root, spack.store.layout,
return YamlFilesystemView(self.root, spack.store.layout,
ignore_conflicts=True,
projections=self.projections)
@@ -549,29 +527,20 @@ def regenerate(self, all_specs, roots):
installed_specs_for_view = set(
s for s in specs_for_view if s in self and s.package.installed)
# To ensure there are no conflicts with packages being installed
# that cannot be resolved or have repos that have been removed
# we always regenerate the view from scratch. We must first make
# sure the root directory exists for the very first time though.
root = self.root
if not os.path.isabs(root):
root = os.path.normpath(os.path.join(self.base, self.root))
fs.mkdirp(root)
with fs.replace_directory_transaction(root):
view = self.view()
view = self.view()
view.clean()
specs_in_view = set(view.get_all_specs())
tty.msg("Updating view at {0}".format(self.root))
view.clean()
specs_in_view = set(view.get_all_specs())
tty.msg("Updating view at {0}".format(self.root))
rm_specs = specs_in_view - installed_specs_for_view
add_specs = installed_specs_for_view - specs_in_view
rm_specs = specs_in_view - installed_specs_for_view
add_specs = installed_specs_for_view - specs_in_view
# pass all_specs in, as it's expensive to read all the
# spec.yaml files twice.
view.remove_specs(*rm_specs, with_dependents=False,
all_specs=specs_in_view)
view.add_specs(*add_specs, with_dependencies=False)
# pass all_specs in, as it's expensive to read all the
# spec.yaml files twice.
view.remove_specs(*rm_specs, with_dependents=False,
all_specs=specs_in_view)
view.add_specs(*add_specs, with_dependencies=False)
class Environment(object):
@@ -617,11 +586,9 @@ def __init__(self, path, init_file=None, with_view=None):
self.views = {}
elif with_view is True:
self.views = {
default_view_name: ViewDescriptor(self.path,
self.view_path_default)}
default_view_name: ViewDescriptor(self.view_path_default)}
elif isinstance(with_view, six.string_types):
self.views = {default_view_name: ViewDescriptor(self.path,
with_view)}
self.views = {default_view_name: ViewDescriptor(with_view)}
# If with_view is None, then defer to the view settings determined by
# the manifest file
@@ -692,14 +659,11 @@ def _read_manifest(self, f, raw_yaml=None):
# enable_view can be boolean, string, or None
if enable_view is True or enable_view is None:
self.views = {
default_view_name: ViewDescriptor(self.path,
self.view_path_default)}
default_view_name: ViewDescriptor(self.view_path_default)}
elif isinstance(enable_view, six.string_types):
self.views = {default_view_name: ViewDescriptor(self.path,
enable_view)}
self.views = {default_view_name: ViewDescriptor(enable_view)}
elif enable_view:
path = self.path
self.views = dict((name, ViewDescriptor.from_dict(path, values))
self.views = dict((name, ViewDescriptor.from_dict(values))
for name, values in enable_view.items())
else:
self.views = {}
@@ -1133,7 +1097,7 @@ def update_default_view(self, viewpath):
if name in self.views:
self.default_view.root = viewpath
else:
self.views[name] = ViewDescriptor(self.path, viewpath)
self.views[name] = ViewDescriptor(viewpath)
else:
self.views.pop(name, None)
@@ -1147,24 +1111,6 @@ def regenerate_views(self):
for view in self.views.values():
view.regenerate(specs, self.roots())
def check_views(self):
"""Checks if the environments default view can be activated."""
try:
# This is effectively a no-op, but it touches all packages in the
# default view if they are installed.
for view_name, view in self.views.items():
for _, spec in self.concretized_specs():
if spec in view and spec.package.installed:
tty.debug(
'Spec %s in view %s' % (spec.name, view_name))
except (spack.repo.UnknownPackageError,
spack.repo.UnknownNamespaceError) as e:
tty.warn(e)
tty.warn(
'Environment %s includes out of date packages or repos. '
'Loading the environment view will require reconcretization.'
% self.name)
def _env_modifications_for_default_view(self, reverse=False):
all_mods = spack.util.environment.EnvironmentModifications()
@@ -1544,10 +1490,9 @@ def write(self, regenerate_views=True):
default_name = default_view_name
if self.views and len(self.views) == 1 and default_name in self.views:
path = self.default_view.root
if self.default_view == ViewDescriptor(self.path,
self.view_path_default):
if self.default_view == ViewDescriptor(self.view_path_default):
view = True
elif self.default_view == ViewDescriptor(self.path, path):
elif self.default_view == ViewDescriptor(path):
view = path
else:
view = dict((name, view.to_dict())

View File

@@ -289,11 +289,10 @@ def candidate_urls(self):
@_needs_stage
def fetch(self):
if self.archive_file:
tty.debug('Already downloaded {0}'.format(self.archive_file))
tty.msg("Already downloaded %s" % self.archive_file)
return
url = None
errors = []
for url in self.candidate_urls:
try:
partial_file, save_file = self._fetch_from_url(url)
@@ -301,10 +300,8 @@ def fetch(self):
os.rename(partial_file, save_file)
break
except FetchError as e:
errors.append(str(e))
for msg in errors:
tty.debug(msg)
tty.msg(str(e))
pass
if not self.archive_file:
raise FailedDownloadError(url)
@@ -315,7 +312,7 @@ def _fetch_from_url(self, url):
if self.stage.save_filename:
save_file = self.stage.save_filename
partial_file = self.stage.save_filename + '.part'
tty.debug('Fetching {0}'.format(url))
tty.msg("Fetching %s" % url)
if partial_file:
save_args = ['-C',
'-', # continue partial downloads
@@ -330,8 +327,6 @@ def _fetch_from_url(self, url):
'-', # print out HTML headers
'-L', # resolve 3xx redirects
url,
'--stderr', # redirect stderr output
'-', # redirect to stdout
]
if not spack.config.get('config:verify_ssl'):
@@ -417,8 +412,8 @@ def cachable(self):
@_needs_stage
def expand(self):
if not self.expand_archive:
tty.debug('Staging unexpanded archive {0} in {1}'
.format(self.archive_file, self.stage.source_path))
tty.msg("Staging unexpanded archive %s in %s" % (
self.archive_file, self.stage.source_path))
if not self.stage.expanded:
mkdirp(self.stage.source_path)
dest = os.path.join(self.stage.source_path,
@@ -426,7 +421,7 @@ def expand(self):
shutil.move(self.archive_file, dest)
return
tty.debug('Staging archive: {0}'.format(self.archive_file))
tty.msg("Staging archive: %s" % self.archive_file)
if not self.archive_file:
raise NoArchiveFileError(
@@ -569,7 +564,7 @@ def fetch(self):
raise
# Notify the user how we fetched.
tty.debug('Using cached archive: {0}'.format(path))
tty.msg('Using cached archive: %s' % path)
class VCSFetchStrategy(FetchStrategy):
@@ -599,8 +594,7 @@ def __init__(self, **kwargs):
@_needs_stage
def check(self):
tty.debug('No checksum needed when fetching with {0}'
.format(self.url_attr))
tty.msg("No checksum needed when fetching with %s" % self.url_attr)
@_needs_stage
def expand(self):
@@ -678,7 +672,7 @@ def go(self):
@_needs_stage
def fetch(self):
tty.debug('Getting go resource: {0}'.format(self.url))
tty.msg("Getting go resource:", self.url)
with working_dir(self.stage.path):
try:
@@ -794,10 +788,10 @@ def _repo_info(self):
@_needs_stage
def fetch(self):
if self.stage.expanded:
tty.debug('Already fetched {0}'.format(self.stage.source_path))
tty.msg("Already fetched {0}".format(self.stage.source_path))
return
tty.debug('Cloning git repository: {0}'.format(self._repo_info()))
tty.msg("Cloning git repository: {0}".format(self._repo_info()))
git = self.git
if self.commit:
@@ -965,10 +959,10 @@ def mirror_id(self):
@_needs_stage
def fetch(self):
if self.stage.expanded:
tty.debug('Already fetched {0}'.format(self.stage.source_path))
tty.msg("Already fetched %s" % self.stage.source_path)
return
tty.debug('Checking out subversion repository: {0}'.format(self.url))
tty.msg("Checking out subversion repository: %s" % self.url)
args = ['checkout', '--force', '--quiet']
if self.revision:
@@ -1074,14 +1068,13 @@ def mirror_id(self):
@_needs_stage
def fetch(self):
if self.stage.expanded:
tty.debug('Already fetched {0}'.format(self.stage.source_path))
tty.msg("Already fetched %s" % self.stage.source_path)
return
args = []
if self.revision:
args.append('at revision %s' % self.revision)
tty.debug('Cloning mercurial repository: {0} {1}'
.format(self.url, args))
tty.msg("Cloning mercurial repository:", self.url, *args)
args = ['clone']
@@ -1137,7 +1130,7 @@ def __init__(self, *args, **kwargs):
@_needs_stage
def fetch(self):
if self.archive_file:
tty.debug('Already downloaded {0}'.format(self.archive_file))
tty.msg("Already downloaded %s" % self.archive_file)
return
parsed_url = url_util.parse(self.url)
@@ -1145,7 +1138,7 @@ def fetch(self):
raise FetchError(
'S3FetchStrategy can only fetch from s3:// urls.')
tty.debug('Fetching {0}'.format(self.url))
tty.msg("Fetching %s" % self.url)
basename = os.path.basename(parsed_url.path)

View File

@@ -215,18 +215,18 @@ def _hms(seconds):
def _install_from_cache(pkg, cache_only, explicit, unsigned=False):
"""
Extract the package from binary cache
Install the package from binary cache
Args:
pkg (PackageBase): the package to install from the binary cache
cache_only (bool): only extract from binary cache
cache_only (bool): only install from binary cache
explicit (bool): ``True`` if installing the package was explicitly
requested by the user, otherwise, ``False``
unsigned (bool): ``True`` if binary package signatures to be checked,
otherwise, ``False``
Return:
(bool) ``True`` if the package was extract from binary cache,
(bool) ``True`` if the package was installed from binary cache,
``False`` otherwise
"""
installed_from_cache = _try_install_from_binary_cache(pkg, explicit,
@@ -237,10 +237,10 @@ def _install_from_cache(pkg, cache_only, explicit, unsigned=False):
if cache_only:
tty.die('{0} when cache-only specified'.format(pre))
tty.msg('{0}: installing from source'.format(pre))
tty.debug('{0}: installing from source'.format(pre))
return False
tty.debug('Successfully extracted {0} from binary cache'.format(pkg_id))
tty.debug('Successfully installed {0} from binary cache'.format(pkg_id))
_print_installed_pkg(pkg.spec.prefix)
spack.hooks.post_install(pkg.spec)
return True
@@ -275,17 +275,17 @@ def _process_external_package(pkg, explicit):
if spec.external_module:
tty.msg('{0} has external module in {1}'
.format(pre, spec.external_module))
tty.debug('{0} is actually installed in {1}'
.format(pre, spec.external_path))
tty.msg('{0} is actually installed in {1}'
.format(pre, spec.external_path))
else:
tty.msg('{0} externally installed in {1}'
tty.msg("{0} externally installed in {1}"
.format(pre, spec.external_path))
try:
# Check if the package was already registered in the DB.
# If this is the case, then just exit.
rec = spack.store.db.get_record(spec)
tty.debug('{0} already registered in DB'.format(pre))
tty.msg('{0} already registered in DB'.format(pre))
# Update the value of rec.explicit if it is necessary
_update_explicit_entry_in_db(pkg, rec, explicit)
@@ -294,11 +294,11 @@ def _process_external_package(pkg, explicit):
# If not, register it and generate the module file.
# For external packages we just need to run
# post-install hooks to generate module files.
tty.debug('{0} generating module file'.format(pre))
tty.msg('{0} generating module file'.format(pre))
spack.hooks.post_install(spec)
# Add to the DB
tty.debug('{0} registering into DB'.format(pre))
tty.msg('{0} registering into DB'.format(pre))
spack.store.db.add(spec, None, explicit=explicit)
@@ -314,7 +314,7 @@ def _process_binary_cache_tarball(pkg, binary_spec, explicit, unsigned):
otherwise, ``False``
Return:
(bool) ``True`` if the package was extracted from binary cache,
(bool) ``True`` if the package was installed from binary cache,
else ``False``
"""
tarball = binary_distribution.download_tarball(binary_spec)
@@ -325,7 +325,7 @@ def _process_binary_cache_tarball(pkg, binary_spec, explicit, unsigned):
return False
pkg_id = package_id(pkg)
tty.msg('Extracting {0} from binary cache'.format(pkg_id))
tty.msg('Installing {0} from binary cache'.format(pkg_id))
binary_distribution.extract_tarball(binary_spec, tarball, allow_root=False,
unsigned=unsigned, force=False)
pkg.installed_from_binary_cache = True
@@ -335,10 +335,10 @@ def _process_binary_cache_tarball(pkg, binary_spec, explicit, unsigned):
def _try_install_from_binary_cache(pkg, explicit, unsigned=False):
"""
Try to extract the package from binary cache.
Try to install the package from binary cache.
Args:
pkg (PackageBase): the package to be extracted from binary cache
pkg (PackageBase): the package to be installed from binary cache
explicit (bool): the package was explicitly requested by the user
unsigned (bool): ``True`` if binary package signatures to be checked,
otherwise, ``False``
@@ -369,7 +369,7 @@ def _update_explicit_entry_in_db(pkg, rec, explicit):
with spack.store.db.write_transaction():
rec = spack.store.db.get_record(pkg.spec)
message = '{s.name}@{s.version} : marking the package explicit'
tty.debug(message.format(s=pkg.spec))
tty.msg(message.format(s=pkg.spec))
rec.explicit = True
@@ -452,8 +452,7 @@ def install_msg(name, pid):
Return:
(str) Colorized installing message
"""
pre = '{0}: '.format(pid) if tty.show_pid() else ''
return pre + colorize('@*{Installing} @*g{%s}' % name)
return '{0}: '.format(pid) + colorize('@*{Installing} @*g{%s}' % name)
def log(pkg):
@@ -1058,15 +1057,11 @@ def _install_task(self, task, **kwargs):
if use_cache and \
_install_from_cache(pkg, cache_only, explicit, unsigned):
self._update_installed(task)
if task.compiler:
spack.compilers.add_compilers_to_config(
spack.compilers.find_compilers([pkg.spec.prefix]))
return
pkg.run_tests = (tests is True or tests and pkg.name in tests)
pid = '{0}: '.format(self.pid) if tty.show_pid() else ''
pre = '{0}{1}:'.format(pid, pkg.name)
pre = '{0}: {1}:'.format(self.pid, pkg.name)
def build_process():
"""
@@ -1085,8 +1080,8 @@ def build_process():
pkg.do_stage()
pkg_id = package_id(pkg)
tty.debug('{0} Building {1} [{2}]'
.format(pre, pkg_id, pkg.build_system_class))
tty.msg('{0} Building {1} [{2}]'
.format(pre, pkg_id, pkg.build_system_class))
# get verbosity from do_install() parameter or saved value
echo = verbose
@@ -1107,8 +1102,8 @@ def build_process():
if install_source and os.path.isdir(source_path):
src_target = os.path.join(pkg.spec.prefix, 'share',
pkg.name, 'src')
tty.debug('{0} Copying source to {1}'
.format(pre, src_target))
tty.msg('{0} Copying source to {1}'
.format(pre, src_target))
fs.install_tree(pkg.stage.source_path, src_target)
# Do the real install in the source directory.
@@ -1130,7 +1125,7 @@ def build_process():
pass
# cache debug settings
debug_level = tty.debug_level()
debug_enabled = tty.is_debug()
# Spawn a daemon that reads from a pipe and redirects
# everything to log_path
@@ -1139,11 +1134,11 @@ def build_process():
pkg.phases, pkg._InstallPhase_phases):
with logger.force_echo():
inner_debug_level = tty.debug_level()
tty.set_debug(debug_level)
inner_debug = tty.is_debug()
tty.set_debug(debug_enabled)
tty.msg("{0} Executing phase: '{1}'"
.format(pre, phase_name))
tty.set_debug(inner_debug_level)
tty.set_debug(inner_debug)
# Redirect stdout and stderr to daemon pipe
phase = getattr(pkg, phase_attr)
@@ -1159,11 +1154,11 @@ def build_process():
pkg._total_time = time.time() - start_time
build_time = pkg._total_time - pkg._fetch_time
tty.debug('{0} Successfully installed {1}'
.format(pre, pkg_id),
'Fetch: {0}. Build: {1}. Total: {2}.'
.format(_hms(pkg._fetch_time), _hms(build_time),
_hms(pkg._total_time)))
tty.msg('{0} Successfully installed {1}'
.format(pre, pkg_id),
'Fetch: {0}. Build: {1}. Total: {2}.'
.format(_hms(pkg._fetch_time), _hms(build_time),
_hms(pkg._total_time)))
_print_installed_pkg(pkg.prefix)
# preserve verbosity across runs
@@ -1194,8 +1189,7 @@ def build_process():
except spack.build_environment.StopPhase as e:
# A StopPhase exception means that do_install was asked to
# stop early from clients, and is not an error at this point
pre = '{0}'.format(self.pid) if tty.show_pid() else ''
tty.debug('{0}{1}'.format(pid, str(e)))
tty.debug('{0} {1}'.format(self.pid, str(e)))
tty.debug('Package stage directory : {0}'
.format(pkg.stage.source_path))
@@ -1568,14 +1562,9 @@ def install(self, **kwargs):
except (Exception, SystemExit) as exc:
# Best effort installs suppress the exception and mark the
# package as a failure UNLESS this is the explicit package.
if (not isinstance(exc, spack.error.SpackError) or
not exc.printed):
# SpackErrors can be printed by the build process or at
# lower levels -- skip printing if already printed.
# TODO: sort out this and SpackEror.print_context()
err = 'Failed to install {0} due to {1}: {2}'
tty.error(
err.format(pkg.name, exc.__class__.__name__, str(exc)))
err = 'Failed to install {0} due to {1}: {2}'
tty.error(err.format(pkg.name, exc.__class__.__name__,
str(exc)))
self._update_failed(task, True, exc)

View File

@@ -128,8 +128,8 @@ def get_version():
git = exe.which("git")
if git:
with fs.working_dir(spack.paths.prefix):
desc = git("describe", "--tags", "--match", "v*",
output=str, error=os.devnull, fail_on_error=False)
desc = git(
"describe", "--tags", output=str, fail_on_error=False)
if git.returncode == 0:
match = re.match(r"v([^-]+)-([^-]+)-g([a-f\d]+)", desc)
@@ -362,9 +362,8 @@ def make_argument_parser(**kwargs):
'-C', '--config-scope', dest='config_scopes', action='append',
metavar='DIR', help="add a custom configuration scope")
parser.add_argument(
'-d', '--debug', action='count', default=0,
help="write out debug messages "
"(more d's for more verbosity: -d, -dd, -ddd, etc.)")
'-d', '--debug', action='store_true',
help="write out debug logs during compile")
parser.add_argument(
'--timestamp', action='store_true',
help="Add a timestamp to tty output")
@@ -439,7 +438,7 @@ def setup_main_options(args):
tty.set_debug(args.debug)
tty.set_stacktrace(args.stacktrace)
# debug must be set first so that it can even affect behavior of
# debug must be set first so that it can even affect behvaior of
# errors raised by spack.config.
if args.debug:
spack.error.debug = True
@@ -645,6 +644,7 @@ def shell_set(var, value):
other_spack_instances = spack.config.get(
'upstreams') or {}
for install_properties in other_spack_instances.values():
upstream_module_roots = install_properties.get('modules', {})
upstream_module_roots = dict(
@@ -706,7 +706,7 @@ def main(argv=None):
if not args.no_env:
env = ev.find_environment(args)
if env:
ev.activate(env, args.use_env_repo, add_view=False)
ev.activate(env, args.use_env_repo)
# make spack.config aware of any command line configuration scopes
if args.config_scopes:

View File

@@ -215,9 +215,18 @@ def root_path(name):
Returns:
root folder for module file installation
"""
# Root folders where the various module files should be written
roots = spack.config.get('config:module_roots', {})
path = roots.get(name, os.path.join(spack.paths.share_path, name))
active_upstream = spack.config.get('config:active_upstream')
if active_upstream is not None:
# Installs module files to upstream share directory.
# Extra logic is needed for determining this location.
roots = spack.config.get('upstreams')[active_upstream]['modules']
path = roots.get(name, os.path.join(spack.paths.user_share_path, name))
else:
# If no upstream is active install module file to user share directory.
roots = spack.config.get('config:module_roots', {})
path = roots.get(name, os.path.join(spack.paths.user_share_path, name))
return spack.util.path.canonicalize_path(path)
@@ -288,6 +297,7 @@ def read_module_indices():
module_type_to_index = {}
module_type_to_root = install_properties.get('modules', {})
for module_type, root in module_type_to_root.items():
root = spack.util.path.canonicalize_path(root)
module_type_to_index[module_type] = read_module_index(root)
module_indices.append(module_type_to_index)

View File

@@ -97,9 +97,6 @@ def __str__(self):
def _detect_crayos_version(cls):
if os.path.isfile(_cle_release_file):
release_attrs = read_cle_release_file()
if 'RELEASE' not in release_attrs:
# This Cray system uses a base OS not CLE/CNL
return None
v = spack.version.Version(release_attrs['RELEASE'])
return v[0]
elif os.path.isfile(_clerelease_file):

View File

@@ -1121,8 +1121,9 @@ def do_fetch(self, mirror_only=False):
raise ValueError("Can only fetch concrete packages.")
if not self.has_code:
tty.debug('No fetch required for {0}: package has no code.'
.format(self.name))
tty.msg(
"No fetch required for %s: package has no code." % self.name
)
start_time = time.time()
checksum = spack.config.get('config:checksum')
@@ -1138,8 +1139,7 @@ def do_fetch(self, mirror_only=False):
ignore_checksum = tty.get_yes_or_no(" Fetch anyway?",
default=False)
if ignore_checksum:
tty.debug('Fetching with no checksum. {0}'
.format(ck_msg))
tty.msg("Fetching with no checksum.", ck_msg)
if not ignore_checksum:
raise FetchError("Will not fetch %s" %
@@ -1195,7 +1195,7 @@ def do_patch(self):
# If there are no patches, note it.
if not patches and not has_patch_fun:
tty.debug('No patches needed for {0}'.format(self.name))
tty.msg("No patches needed for %s" % self.name)
return
# Construct paths to special files in the archive dir used to
@@ -1208,15 +1208,15 @@ def do_patch(self):
# If we encounter an archive that failed to patch, restage it
# so that we can apply all the patches again.
if os.path.isfile(bad_file):
tty.debug('Patching failed last time. Restaging.')
tty.msg("Patching failed last time. Restaging.")
self.stage.restage()
# If this file exists, then we already applied all the patches.
if os.path.isfile(good_file):
tty.debug('Already patched {0}'.format(self.name))
tty.msg("Already patched %s" % self.name)
return
elif os.path.isfile(no_patches_file):
tty.debug('No patches needed for {0}'.format(self.name))
tty.msg("No patches needed for %s" % self.name)
return
# Apply all the patches for specs that match this one
@@ -1225,7 +1225,7 @@ def do_patch(self):
try:
with working_dir(self.stage.source_path):
patch.apply(self.stage)
tty.debug('Applied patch {0}'.format(patch.path_or_url))
tty.msg('Applied patch %s' % patch.path_or_url)
patched = True
except spack.error.SpackError as e:
tty.debug(e)
@@ -1239,7 +1239,7 @@ def do_patch(self):
try:
with working_dir(self.stage.source_path):
self.patch()
tty.debug('Ran patch() for {0}'.format(self.name))
tty.msg("Ran patch() for %s" % self.name)
patched = True
except spack.multimethod.NoSuchMethodError:
# We are running a multimethod without a default case.
@@ -1249,12 +1249,12 @@ def do_patch(self):
# directive, AND the patch function didn't apply, say
# no patches are needed. Otherwise, we already
# printed a message for each patch.
tty.debug('No patches needed for {0}'.format(self.name))
tty.msg("No patches needed for %s" % self.name)
except spack.error.SpackError as e:
tty.debug(e)
# Touch bad file if anything goes wrong.
tty.msg('patch() function failed for {0}'.format(self.name))
tty.msg("patch() function failed for %s" % self.name)
touch(bad_file)
raise
@@ -1341,7 +1341,7 @@ def _has_make_target(self, target):
if os.path.exists(makefile):
break
else:
tty.debug('No Makefile found in the build directory')
tty.msg('No Makefile found in the build directory')
return False
# Check if 'target' is a valid target.
@@ -1372,8 +1372,7 @@ def _has_make_target(self, target):
for missing_target_msg in missing_target_msgs:
if missing_target_msg.format(target) in stderr:
tty.debug("Target '{0}' not found in {1}"
.format(target, makefile))
tty.msg("Target '" + target + "' not found in " + makefile)
return False
return True
@@ -1401,7 +1400,7 @@ def _has_ninja_target(self, target):
# Check if we have a Ninja build script
if not os.path.exists('build.ninja'):
tty.debug('No Ninja build script found in the build directory')
tty.msg('No Ninja build script found in the build directory')
return False
# Get a list of all targets in the Ninja build script
@@ -1413,8 +1412,7 @@ def _has_ninja_target(self, target):
if line.startswith(target + ':')]
if not matches:
tty.debug("Target '{0}' not found in build.ninja"
.format(target))
tty.msg("Target '" + target + "' not found in build.ninja")
return False
return True
@@ -1721,12 +1719,11 @@ def uninstall_by_spec(spec, force=False, deprecator=None):
if specs:
if deprecator:
spack.store.db.deprecate(specs[0], deprecator)
tty.debug('Deprecating stale DB entry for {0}'
.format(spec.short_spec))
tty.msg("Deprecating stale DB entry for "
"%s" % spec.short_spec)
else:
spack.store.db.remove(specs[0])
tty.debug('Removed stale DB entry for {0}'
.format(spec.short_spec))
tty.msg("Removed stale DB entry for %s" % spec.short_spec)
return
else:
raise InstallError(str(spec) + " is not installed.")
@@ -1770,7 +1767,7 @@ def uninstall_by_spec(spec, force=False, deprecator=None):
if pkg is not None:
spack.hooks.post_uninstall(spec)
tty.msg('Successfully uninstalled {0}'.format(spec.short_spec))
tty.msg("Successfully uninstalled %s" % spec.short_spec)
def do_uninstall(self, force=False):
"""Uninstall this package by spec."""

View File

@@ -16,6 +16,9 @@
#: This file lives in $prefix/lib/spack/spack/__file__
prefix = ancestor(__file__, 4)
#: User configuration location
user_config_path = os.path.expanduser('~/.spack')
#: synonym for prefix
spack_root = prefix
@@ -38,16 +41,16 @@
test_path = os.path.join(module_path, "test")
hooks_path = os.path.join(module_path, "hooks")
var_path = os.path.join(prefix, "var", "spack")
user_var_path = os.path.join(user_config_path, "var", "spack")
stage_path = os.path.join(user_var_path, "stage")
repos_path = os.path.join(var_path, "repos")
share_path = os.path.join(prefix, "share", "spack")
user_share_path = os.path.join(user_config_path, "share", "spack")
# Paths to built-in Spack repositories.
packages_path = os.path.join(repos_path, "builtin")
mock_packages_path = os.path.join(repos_path, "builtin.mock")
#: User configuration location
user_config_path = os.path.expanduser('~/.spack')
opt_path = os.path.join(prefix, "opt")
etc_path = os.path.join(prefix, "etc")

View File

@@ -20,7 +20,7 @@
_craype_name_to_target_name = {
'x86-cascadelake': 'cascadelake',
'x86-naples': 'zen',
'x86-rome': 'zen2',
'x86-rome': 'zen', # Cheating because we have the wrong modules on rzcrayz
'x86-skylake': 'skylake_avx512',
'mic-knl': 'mic_knl',
'interlagos': 'bulldozer',

View File

@@ -804,17 +804,15 @@ def relocate_text(
where they should be relocated
"""
# TODO: reduce the number of arguments (8 seems too much)
orig_sbang = '#!/bin/bash {0}/bin/sbang'.format(orig_spack)
new_sbang = '#!/bin/bash {0}/bin/sbang'.format(new_spack)
sbang_regex = r'#!/bin/bash {0}/bin/sbang'.format(orig_spack)
new_sbang = r'#!/bin/bash {0}/bin/sbang'.format(new_spack)
for file in files:
_replace_prefix_text(file, orig_install_prefix, new_install_prefix)
for orig_dep_prefix, new_dep_prefix in new_prefixes.items():
_replace_prefix_text(file, orig_dep_prefix, new_dep_prefix)
_replace_prefix_text(file, orig_layout_root, new_layout_root)
# relocate the sbang location only if the spack directory changed
if orig_spack != new_spack:
_replace_prefix_text(file, orig_sbang, new_sbang)
_replace_prefix_text(file, sbang_regex, new_sbang)
def relocate_text_bin(

View File

@@ -32,8 +32,7 @@
'enum': [
'develop',
'0.14', '0.14.0', '0.14.1', '0.14.2',
'0.15', '0.15.0', '0.15.1', '0.15.2',
'0.15.3', '0.15.4',
'0.15', '0.15.0', '0.15.1',
]
}
},

View File

@@ -154,6 +154,7 @@ def get_stage_root():
if _stage_root is None:
candidates = spack.config.get('config:build_stage')
if isinstance(candidates, string_types):
candidates = [candidates]
@@ -414,11 +415,10 @@ def fetch(self, mirror_only=False):
# Join URLs of mirror roots with mirror paths. Because
# urljoin() will strip everything past the final '/' in
# the root, so we add a '/' if it is not present.
mirror_urls = []
urls = []
for mirror in spack.mirror.MirrorCollection().values():
for rel_path in self.mirror_paths:
mirror_urls.append(
url_util.join(mirror.fetch_url, rel_path))
urls.append(url_util.join(mirror.fetch_url, rel_path))
# If this archive is normally fetched from a tarball URL,
# then use the same digest. `spack mirror` ensures that
@@ -436,8 +436,7 @@ def fetch(self, mirror_only=False):
self.skip_checksum_for_mirror = not bool(digest)
# Add URL strategies for all the mirrors with the digest
# Insert fetchers in the order that the URLs are provided.
for url in reversed(mirror_urls):
for url in urls:
fetchers.insert(
0, fs.from_url_scheme(
url, digest, expand=expand, extension=extension))
@@ -459,11 +458,6 @@ def generate_fetchers():
for fetcher in dynamic_fetchers:
yield fetcher
def print_errors(errors):
for msg in errors:
tty.debug(msg)
errors = []
for fetcher in generate_fetchers():
try:
fetcher.stage = self
@@ -474,18 +468,14 @@ def print_errors(errors):
# Don't bother reporting when something is not cached.
continue
except spack.error.SpackError as e:
errors.append('Fetching from {0} failed.'.format(fetcher))
tty.msg("Fetching from %s failed." % fetcher)
tty.debug(e)
continue
else:
print_errors(errors)
err_msg = 'All fetchers failed for {0}'.format(self.name)
err_msg = "All fetchers failed for %s" % self.name
self.fetcher = self.default_fetcher
raise fs.FetchError(err_msg, None)
print_errors(errors)
def check(self):
"""Check the downloaded archive against a checksum digest.
No-op if this stage checks code out of a repository."""
@@ -546,9 +536,9 @@ def expand_archive(self):
downloaded."""
if not self.expanded:
self.fetcher.expand()
tty.debug('Created stage in {0}'.format(self.path))
tty.msg("Created stage in %s" % self.path)
else:
tty.debug('Already staged {0} in {1}'.format(self.name, self.path))
tty.msg("Already staged %s in %s" % (self.name, self.path))
def restage(self):
"""Removes the expanded archive path if it exists, then re-expands
@@ -719,13 +709,13 @@ def __exit__(self, exc_type, exc_val, exc_tb):
pass
def fetch(self, *args, **kwargs):
tty.debug('No need to fetch for DIY.')
tty.msg("No need to fetch for DIY.")
def check(self):
tty.debug('No checksum needed for DIY.')
tty.msg("No checksum needed for DIY.")
def expand_archive(self):
tty.debug('Using source directory: {0}'.format(self.source_path))
tty.msg("Using source directory: %s" % self.source_path)
@property
def expanded(self):
@@ -743,7 +733,7 @@ def destroy(self):
pass
def cache_local(self):
tty.debug('Sources for DIY stages are not cached')
tty.msg("Sources for DIY stages are not cached")
def ensure_access(file):
@@ -793,12 +783,12 @@ def get_checksums_for_versions(
max_len = max(len(str(v)) for v in sorted_versions)
num_ver = len(sorted_versions)
tty.debug('Found {0} version{1} of {2}:'.format(
num_ver, '' if num_ver == 1 else 's', name),
'',
*spack.cmd.elide_list(
['{0:{1}} {2}'.format(str(v), max_len, url_dict[v])
for v in sorted_versions]))
tty.msg("Found {0} version{1} of {2}:".format(
num_ver, '' if num_ver == 1 else 's', name),
"",
*spack.cmd.elide_list(
["{0:{1}} {2}".format(str(v), max_len, url_dict[v])
for v in sorted_versions]))
print()
if batch:
@@ -813,10 +803,9 @@ def get_checksums_for_versions(
versions = sorted_versions[:archives_to_fetch]
urls = [url_dict[v] for v in versions]
tty.debug('Downloading...')
tty.msg("Downloading...")
version_hashes = []
i = 0
errors = []
for url, version in zip(urls, versions):
try:
if fetch_options:
@@ -837,12 +826,10 @@ def get_checksums_for_versions(
hashlib.sha256, stage.archive_file)))
i += 1
except FailedDownloadError:
errors.append('Failed to fetch {0}'.format(url))
tty.msg("Failed to fetch {0}".format(url))
except Exception as e:
tty.msg('Something failed on {0}, skipping. ({1})'.format(url, e))
for msg in errors:
tty.debug(msg)
tty.msg("Something failed on {0}, skipping.".format(url),
" ({0})".format(e))
if not version_hashes:
tty.die("Could not fetch any versions for {0}".format(name))
@@ -857,8 +844,8 @@ def get_checksums_for_versions(
])
num_hash = len(version_hashes)
tty.debug('Checksummed {0} version{1} of {2}:'.format(
num_hash, '' if num_hash == 1 else 's', name))
tty.msg("Checksummed {0} version{1} of {2}:".format(
num_hash, '' if num_hash == 1 else 's', name))
return version_lines

View File

@@ -34,7 +34,7 @@
import spack.directory_layout
#: default installation root, relative to the Spack install path
default_root = os.path.join(spack.paths.opt_path, 'spack')
default_root = os.path.join(spack.paths.user_config_path, 'opt/spack')
class Store(object):
@@ -70,9 +70,10 @@ def reindex(self):
def _store():
"""Get the singleton store instance."""
root = spack.config.get('config:install_tree', default_root)
root = spack.util.path.canonicalize_path(root)
root = spack.config.get('config:active_tree', default_root)
# Canonicalize Path for Root regardless of origin
root = spack.util.path.canonicalize_path(root)
return Store(root,
spack.config.get('config:install_path_scheme'),
spack.config.get('config:install_hash_length'))
@@ -88,11 +89,19 @@ def _store():
def retrieve_upstream_dbs():
other_spack_instances = spack.config.get('upstreams', {})
global_fallback = {'global': {'install_tree': '$spack/opt/spack',
'modules':
{'tcl': '$spack/share/spack/modules',
'lmod': '$spack/share/spack/lmod',
'dotkit': '$spack/share/spack/dotkit'}}}
other_spack_instances = spack.config.get('upstreams',
global_fallback)
install_roots = []
for install_properties in other_spack_instances.values():
install_roots.append(install_properties['install_tree'])
install_roots.append(spack.util.path.canonicalize_path(
install_properties['install_tree']))
return _construct_upstream_dbs_from_install_roots(install_roots)

View File

@@ -1,471 +0,0 @@
# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""
This test checks creating and install buildcaches
"""
import os
import py
import pytest
import argparse
import platform
import spack.repo
import spack.store
import spack.binary_distribution as bindist
import spack.cmd.buildcache as buildcache
import spack.cmd.install as install
import spack.cmd.uninstall as uninstall
import spack.cmd.mirror as mirror
from spack.spec import Spec
from spack.directory_layout import YamlDirectoryLayout
def_install_path_scheme = '${ARCHITECTURE}/${COMPILERNAME}-${COMPILERVER}/${PACKAGE}-${VERSION}-${HASH}' # noqa: E501
ndef_install_path_scheme = '${PACKAGE}/${VERSION}/${ARCHITECTURE}-${COMPILERNAME}-${COMPILERVER}-${HASH}' # noqa: E501
mirror_path_def = None
mirror_path_rel = None
@pytest.fixture(scope='function')
def cache_directory(tmpdir):
old_cache_path = spack.caches.fetch_cache
tmpdir.ensure('fetch_cache', dir=True)
fsc = spack.fetch_strategy.FsCache(str(tmpdir.join('fetch_cache')))
spack.config.caches = fsc
yield spack.config.caches
tmpdir.join('fetch_cache').remove()
spack.config.caches = old_cache_path
@pytest.fixture(scope='session')
def session_mirror_def(tmpdir_factory):
dir = tmpdir_factory.mktemp('mirror')
global mirror_path_rel
mirror_path_rel = dir
dir.ensure('build_cache', dir=True)
yield dir
dir.join('build_cache').remove()
@pytest.fixture(scope='function')
def mirror_directory_def(session_mirror_def):
yield str(session_mirror_def)
@pytest.fixture(scope='session')
def session_mirror_rel(tmpdir_factory):
dir = tmpdir_factory.mktemp('mirror')
global mirror_path_rel
mirror_path_rel = dir
dir.ensure('build_cache', dir=True)
yield dir
dir.join('build_cache').remove()
@pytest.fixture(scope='function')
def mirror_directory_rel(session_mirror_rel):
yield(session_mirror_rel)
@pytest.fixture(scope='session')
def config_directory(tmpdir_factory):
tmpdir = tmpdir_factory.mktemp('test_configs')
# restore some sane defaults for packages and config
config_path = py.path.local(spack.paths.etc_path)
modules_yaml = config_path.join('spack', 'defaults', 'modules.yaml')
os_modules_yaml = config_path.join('spack', 'defaults', '%s' %
platform.system().lower(),
'modules.yaml')
packages_yaml = config_path.join('spack', 'defaults', 'packages.yaml')
config_yaml = config_path.join('spack', 'defaults', 'config.yaml')
repos_yaml = config_path.join('spack', 'defaults', 'repos.yaml')
tmpdir.ensure('site', dir=True)
tmpdir.ensure('user', dir=True)
tmpdir.ensure('site/%s' % platform.system().lower(), dir=True)
modules_yaml.copy(tmpdir.join('site', 'modules.yaml'))
os_modules_yaml.copy(tmpdir.join('site/%s' % platform.system().lower(),
'modules.yaml'))
packages_yaml.copy(tmpdir.join('site', 'packages.yaml'))
config_yaml.copy(tmpdir.join('site', 'config.yaml'))
repos_yaml.copy(tmpdir.join('site', 'repos.yaml'))
yield tmpdir
tmpdir.remove()
@pytest.fixture(scope='function')
def default_config(tmpdir_factory, config_directory, monkeypatch):
mutable_dir = tmpdir_factory.mktemp('mutable_config').join('tmp')
config_directory.copy(mutable_dir)
cfg = spack.config.Configuration(
*[spack.config.ConfigScope(name, str(mutable_dir))
for name in ['site/%s' % platform.system().lower(),
'site', 'user']])
monkeypatch.setattr(spack.config, 'config', cfg)
# This is essential, otherwise the cache will create weird side effects
# that will compromise subsequent tests if compilers.yaml is modified
monkeypatch.setattr(spack.compilers, '_cache_config_file', [])
njobs = spack.config.get('config:build_jobs')
if not njobs:
spack.config.set('config:build_jobs', 4, scope='user')
extensions = spack.config.get('config:template_dirs')
if not extensions:
spack.config.set('config:template_dirs',
[os.path.join(spack.paths.share_path, 'templates')],
scope='user')
mutable_dir.ensure('build_stage', dir=True)
build_stage = spack.config.get('config:build_stage')
if not build_stage:
spack.config.set('config:build_stage',
[str(mutable_dir.join('build_stage'))], scope='user')
timeout = spack.config.get('config:connect_timeout')
if not timeout:
spack.config.set('config:connect_timeout', 10, scope='user')
yield spack.config.config
mutable_dir.remove()
@pytest.fixture(scope='function')
def install_dir_default_layout(tmpdir):
"""Hooks a fake install directory with a default layout"""
real_store = spack.store.store
real_layout = spack.store.layout
spack.store.store = spack.store.Store(str(tmpdir.join('opt')))
spack.store.layout = YamlDirectoryLayout(str(tmpdir.join('opt')),
path_scheme=def_install_path_scheme) # noqa: E501
yield spack.store
spack.store.store = real_store
spack.store.layout = real_layout
@pytest.fixture(scope='function')
def install_dir_non_default_layout(tmpdir):
"""Hooks a fake install directory with a non-default layout"""
real_store = spack.store.store
real_layout = spack.store.layout
spack.store.store = spack.store.Store(str(tmpdir.join('opt')))
spack.store.layout = YamlDirectoryLayout(str(tmpdir.join('opt')),
path_scheme=ndef_install_path_scheme) # noqa: E501
yield spack.store
spack.store.store = real_store
spack.store.layout = real_layout
@pytest.mark.requires_executables(
'/usr/bin/gcc', 'patchelf', 'strings', 'file')
@pytest.mark.disable_clean_stage_check
@pytest.mark.maybeslow
@pytest.mark.usefixtures('default_config', 'cache_directory',
'install_dir_default_layout')
def test_default_rpaths_create_install_default_layout(tmpdir,
mirror_directory_def,
install_mockery):
"""
Test the creation and installation of buildcaches with default rpaths
into the default directory layout scheme.
"""
gspec = Spec('garply')
gspec.concretize()
cspec = Spec('corge')
cspec.concretize()
# Install patchelf needed for relocate in linux test environment
iparser = argparse.ArgumentParser()
install.setup_parser(iparser)
# Install some packages with dependent packages
iargs = iparser.parse_args(['--no-cache', cspec.name])
install.install(iparser, iargs)
global mirror_path_def
mirror_path_def = mirror_directory_def
mparser = argparse.ArgumentParser()
mirror.setup_parser(mparser)
margs = mparser.parse_args(
['add', '--scope', 'site', 'test-mirror-def', 'file://%s' % mirror_path_def])
mirror.mirror(mparser, margs)
margs = mparser.parse_args(['list'])
mirror.mirror(mparser, margs)
# setup argument parser
parser = argparse.ArgumentParser()
buildcache.setup_parser(parser)
# Set default buildcache args
create_args = ['create', '-a', '-u', '-d', str(mirror_path_def),
cspec.name]
install_args = ['install', '-a', '-u', cspec.name]
# Create a buildache
args = parser.parse_args(create_args)
buildcache.buildcache(parser, args)
# Test force overwrite create buildcache
create_args.insert(create_args.index('-a'), '-f')
args = parser.parse_args(create_args)
buildcache.buildcache(parser, args)
# create mirror index
args = parser.parse_args(['update-index', '-d', 'file://%s' % str(mirror_path_def)])
buildcache.buildcache(parser, args)
# list the buildcaches in the mirror
args = parser.parse_args(['list', '-a', '-l', '-v'])
buildcache.buildcache(parser, args)
# Uninstall the package and deps
uparser = argparse.ArgumentParser()
uninstall.setup_parser(uparser)
uargs = uparser.parse_args(['-y', '--dependents', gspec.name])
uninstall.uninstall(uparser, uargs)
# test install
args = parser.parse_args(install_args)
buildcache.buildcache(parser, args)
# This gives warning that spec is already installed
buildcache.buildcache(parser, args)
# test overwrite install
install_args.insert(install_args.index('-a'), '-f')
args = parser.parse_args(install_args)
buildcache.buildcache(parser, args)
args = parser.parse_args(['keys', '-f'])
buildcache.buildcache(parser, args)
args = parser.parse_args(['list'])
buildcache.buildcache(parser, args)
args = parser.parse_args(['list', '-a'])
buildcache.buildcache(parser, args)
args = parser.parse_args(['list', '-l', '-v'])
buildcache.buildcache(parser, args)
bindist._cached_specs = set()
spack.stage.purge()
margs = mparser.parse_args(
['rm', '--scope', 'site', 'test-mirror-def'])
mirror.mirror(mparser, margs)
@pytest.mark.requires_executables(
'/usr/bin/gcc', 'patchelf', 'strings', 'file')
@pytest.mark.disable_clean_stage_check
@pytest.mark.maybeslow
@pytest.mark.nomockstage
@pytest.mark.usefixtures('default_config', 'cache_directory',
'install_dir_non_default_layout')
def test_default_rpaths_install_nondefault_layout(tmpdir,
install_mockery):
"""
Test the creation and installation of buildcaches with default rpaths
into the non-default directory layout scheme.
"""
gspec = Spec('garply')
gspec.concretize()
cspec = Spec('corge')
cspec.concretize()
global mirror_path_def
mparser = argparse.ArgumentParser()
mirror.setup_parser(mparser)
margs = mparser.parse_args(
['add', '--scope', 'site', 'test-mirror-def', 'file://%s' % mirror_path_def])
mirror.mirror(mparser, margs)
# setup argument parser
parser = argparse.ArgumentParser()
buildcache.setup_parser(parser)
# Set default buildcache args
install_args = ['install', '-a', '-u', '%s' % cspec.name]
# Install some packages with dependent packages
# test install in non-default install path scheme
args = parser.parse_args(install_args)
buildcache.buildcache(parser, args)
# test force install in non-default install path scheme
install_args.insert(install_args.index('-a'), '-f')
args = parser.parse_args(install_args)
buildcache.buildcache(parser, args)
bindist._cached_specs = set()
spack.stage.purge()
margs = mparser.parse_args(
['rm', '--scope', 'site', 'test-mirror-def'])
mirror.mirror(mparser, margs)
@pytest.mark.requires_executables(
'/usr/bin/gcc', 'patchelf', 'strings', 'file')
@pytest.mark.disable_clean_stage_check
@pytest.mark.maybeslow
@pytest.mark.nomockstage
@pytest.mark.usefixtures('default_config', 'cache_directory',
'install_dir_default_layout')
def test_relative_rpaths_create_default_layout(tmpdir,
mirror_directory_rel,
install_mockery):
"""
Test the creation and installation of buildcaches with relative
rpaths into the default directory layout scheme.
"""
gspec = Spec('garply')
gspec.concretize()
cspec = Spec('corge')
cspec.concretize()
global mirror_path_rel
mirror_path_rel = mirror_directory_rel
# Install patchelf needed for relocate in linux test environment
iparser = argparse.ArgumentParser()
install.setup_parser(iparser)
# Install some packages with dependent packages
iargs = iparser.parse_args(['--no-cache', cspec.name])
install.install(iparser, iargs)
# setup argument parser
parser = argparse.ArgumentParser()
buildcache.setup_parser(parser)
# set default buildcache args
create_args = ['create', '-a', '-u', '-r', '-d',
str(mirror_path_rel),
cspec.name]
# create build cache with relatived rpaths
args = parser.parse_args(create_args)
buildcache.buildcache(parser, args)
# create mirror index
args = parser.parse_args(['update-index', '-d', 'file://%s' % str(mirror_path_rel)])
buildcache.buildcache(parser, args)
# Uninstall the package and deps
uparser = argparse.ArgumentParser()
uninstall.setup_parser(uparser)
uargs = uparser.parse_args(['-y', '--dependents', gspec.name])
uninstall.uninstall(uparser, uargs)
bindist._cached_specs = set()
spack.stage.purge()
@pytest.mark.requires_executables(
'/usr/bin/gcc', 'patchelf', 'strings', 'file')
@pytest.mark.disable_clean_stage_check
@pytest.mark.maybeslow
@pytest.mark.nomockstage
@pytest.mark.usefixtures('default_config', 'cache_directory',
'install_dir_default_layout')
def test_relative_rpaths_install_default_layout(tmpdir,
install_mockery):
"""
Test the creation and installation of buildcaches with relative
rpaths into the default directory layout scheme.
"""
gspec = Spec('garply')
gspec.concretize()
cspec = Spec('corge')
cspec.concretize()
global mirror_path_rel
mparser = argparse.ArgumentParser()
mirror.setup_parser(mparser)
margs = mparser.parse_args(
['add', '--scope', 'site', 'test-mirror-rel', 'file://%s' % mirror_path_rel])
mirror.mirror(mparser, margs)
# Install patchelf needed for relocate in linux test environment
iparser = argparse.ArgumentParser()
install.setup_parser(iparser)
# setup argument parser
parser = argparse.ArgumentParser()
buildcache.setup_parser(parser)
# set default buildcache args
install_args = ['install', '-a', '-u',
cspec.name]
# install buildcache created with relativized rpaths
args = parser.parse_args(install_args)
buildcache.buildcache(parser, args)
# This gives warning that spec is already installed
buildcache.buildcache(parser, args)
# Uninstall the package and deps
uparser = argparse.ArgumentParser()
uninstall.setup_parser(uparser)
uargs = uparser.parse_args(['-y', '--dependents', gspec.name])
uninstall.uninstall(uparser, uargs)
# install build cache
buildcache.buildcache(parser, args)
# test overwrite install
install_args.insert(install_args.index('-a'), '-f')
args = parser.parse_args(install_args)
buildcache.buildcache(parser, args)
bindist._cached_specs = set()
spack.stage.purge()
margs = mparser.parse_args(
['rm', '--scope', 'site', 'test-mirror-rel'])
mirror.mirror(mparser, margs)
@pytest.mark.requires_executables(
'/usr/bin/gcc', 'patchelf', 'strings', 'file')
@pytest.mark.disable_clean_stage_check
@pytest.mark.maybeslow
@pytest.mark.nomockstage
@pytest.mark.usefixtures('default_config', 'cache_directory',
'install_dir_non_default_layout')
def test_relative_rpaths_install_nondefault(tmpdir,
install_mockery):
"""
Test the installation of buildcaches with relativized rpaths
into the non-default directory layout scheme.
"""
gspec = Spec('garply')
gspec.concretize()
cspec = Spec('corge')
cspec.concretize()
global mirror_path_rel
mparser = argparse.ArgumentParser()
mirror.setup_parser(mparser)
margs = mparser.parse_args(
['add', '--scope', 'site', 'test-mirror-rel', 'file://%s' % mirror_path_rel])
mirror.mirror(mparser, margs)
# Install patchelf needed for relocate in linux test environment
iparser = argparse.ArgumentParser()
install.setup_parser(iparser)
# setup argument parser
parser = argparse.ArgumentParser()
buildcache.setup_parser(parser)
# Set default buildcache args
install_args = ['install', '-a', '-u', '%s' % cspec.name]
# test install in non-default install path scheme and relative path
args = parser.parse_args(install_args)
buildcache.buildcache(parser, args)
bindist._cached_specs = set()
spack.stage.purge()
margs = mparser.parse_args(
['rm', '--scope', 'site', 'test-mirror-rel'])
mirror.mirror(mparser, margs)

View File

@@ -1,36 +0,0 @@
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
import pytest
from llnl.util.filesystem import mkdirp, touch
from spack.stage import Stage
from spack.fetch_strategy import CacheURLFetchStrategy, NoCacheError
def test_fetch_missing_cache(tmpdir):
"""Ensure raise a missing cache file."""
testpath = str(tmpdir)
fetcher = CacheURLFetchStrategy(url='file:///not-a-real-cache-file')
with Stage(fetcher, path=testpath):
with pytest.raises(NoCacheError, match=r'No cache'):
fetcher.fetch()
def test_fetch(tmpdir):
"""Ensure a fetch after expanding is effectively a no-op."""
testpath = str(tmpdir)
cache = os.path.join(testpath, 'cache.tar.gz')
touch(cache)
url = 'file:///{0}'.format(cache)
fetcher = CacheURLFetchStrategy(url=url)
with Stage(fetcher, path=testpath) as stage:
source_path = stage.source_path
mkdirp(source_path)
fetcher.fetch()

View File

@@ -12,7 +12,6 @@
import spack.main
import spack.binary_distribution
import spack.environment as ev
import spack.spec
from spack.spec import Spec
buildcache = spack.main.SpackCommand('buildcache')
@@ -25,22 +24,7 @@
def mock_get_specs(database, monkeypatch):
specs = database.query_local()
monkeypatch.setattr(
spack.binary_distribution, 'get_specs', lambda: specs
)
@pytest.fixture()
def mock_get_specs_multiarch(database, monkeypatch):
specs = [spec.copy() for spec in database.query_local()]
# make one spec that is NOT the test architecture
for spec in specs:
if spec.name == "mpileaks":
spec.architecture = spack.spec.ArchSpec('linux-rhel7-x86_64')
break
monkeypatch.setattr(
spack.binary_distribution, 'get_specs', lambda: specs
spack.binary_distribution, 'get_specs', lambda x: specs
)
@@ -53,6 +37,10 @@ def test_buildcache_preview_just_runs(database):
buildcache('preview', 'mpileaks')
@pytest.mark.skipif(
platform.system().lower() != 'linux',
reason='implementation for MacOS still missing'
)
@pytest.mark.db
@pytest.mark.regression('13757')
def test_buildcache_list_duplicates(mock_get_specs, capsys):
@@ -62,20 +50,6 @@ def test_buildcache_list_duplicates(mock_get_specs, capsys):
assert output.count('mpileaks') == 3
@pytest.mark.db
@pytest.mark.regression('17827')
def test_buildcache_list_allarch(database, mock_get_specs_multiarch, capsys):
with capsys.disabled():
output = buildcache('list', '--allarch')
assert output.count('mpileaks') == 3
with capsys.disabled():
output = buildcache('list')
assert output.count('mpileaks') == 2
def tests_buildcache_create(
install_mockery, mock_fetch, monkeypatch, tmpdir):
""""Ensure that buildcache create creates output files"""

View File

@@ -751,6 +751,7 @@ def test_push_mirror_contents(tmpdir, mutable_mock_env_path, env_deactivate,
logs_dir_list = os.listdir(logs_dir.strpath)
assert('spack-build-env.txt' in logs_dir_list)
assert('spack-build-out.txt' in logs_dir_list)
# Also just make sure that if something goes wrong with the

View File

@@ -3,8 +3,6 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
import shutil
import sys
import pytest
@@ -16,7 +14,7 @@
@pytest.fixture
def no_compilers_yaml(mutable_config):
def no_compilers_yaml(mutable_config, monkeypatch):
"""Creates a temporary configuration without compilers.yaml"""
for scope, local_config in mutable_config.scopes.items():
@@ -66,7 +64,7 @@ def test_compiler_find_without_paths(no_compilers_yaml, working_env, tmpdir):
with tmpdir.as_cwd():
with open('gcc', 'w') as f:
f.write("""\
#!/bin/sh
#!/bin/bash
echo "0.0.0"
""")
os.chmod('gcc', 0o700)
@@ -77,33 +75,6 @@ def test_compiler_find_without_paths(no_compilers_yaml, working_env, tmpdir):
assert 'gcc' in output
@pytest.mark.regression('17589')
def test_compiler_find_no_apple_gcc(no_compilers_yaml, working_env, tmpdir):
with tmpdir.as_cwd():
# make a script to emulate apple gcc's version args
with open('gcc', 'w') as f:
f.write("""\
#!/bin/sh
if [ "$1" = "-dumpversion" ]; then
echo "4.2.1"
elif [ "$1" = "--version" ]; then
echo "Configured with: --prefix=/dummy"
echo "Apple clang version 11.0.0 (clang-1100.0.33.16)"
echo "Target: x86_64-apple-darwin18.7.0"
echo "Thread model: posix"
echo "InstalledDir: /dummy"
else
echo "clang: error: no input files"
fi
""")
os.chmod('gcc', 0o700)
os.environ['PATH'] = str(tmpdir)
output = compiler('find', '--scope=site')
assert 'gcc' not in output
def test_compiler_remove(mutable_config, mock_packages):
args = spack.util.pattern.Bunch(
all=True, compiler_spec='gcc@4.5.0', add_paths=[], scope=None
@@ -132,121 +103,3 @@ def test_compiler_add(
new_compiler = new_compilers - old_compilers
assert any(c.version == spack.version.Version(mock_compiler_version)
for c in new_compiler)
@pytest.fixture
def clangdir(tmpdir):
"""Create a directory with some dummy compiler scripts in it.
Scripts are:
- clang
- clang++
- gcc
- g++
- gfortran-8
"""
with tmpdir.as_cwd():
with open('clang', 'w') as f:
f.write("""\
#!/bin/sh
if [ "$1" = "--version" ]; then
echo "clang version 11.0.0 (clang-1100.0.33.16)"
echo "Target: x86_64-apple-darwin18.7.0"
echo "Thread model: posix"
echo "InstalledDir: /dummy"
else
echo "clang: error: no input files"
exit 1
fi
""")
shutil.copy('clang', 'clang++')
gcc_script = """\
#!/bin/sh
if [ "$1" = "-dumpversion" ]; then
echo "8"
elif [ "$1" = "-dumpfullversion" ]; then
echo "8.4.0"
elif [ "$1" = "--version" ]; then
echo "{0} (GCC) 8.4.0 20120313 (Red Hat 8.4.0-1)"
echo "Copyright (C) 2010 Free Software Foundation, Inc."
else
echo "{1}: fatal error: no input files"
echo "compilation terminated."
exit 1
fi
"""
with open('gcc-8', 'w') as f:
f.write(gcc_script.format('gcc', 'gcc-8'))
with open('g++-8', 'w') as f:
f.write(gcc_script.format('g++', 'g++-8'))
with open('gfortran-8', 'w') as f:
f.write(gcc_script.format('GNU Fortran', 'gfortran-8'))
os.chmod('clang', 0o700)
os.chmod('clang++', 0o700)
os.chmod('gcc-8', 0o700)
os.chmod('g++-8', 0o700)
os.chmod('gfortran-8', 0o700)
yield tmpdir
@pytest.mark.regression('17590')
def test_compiler_find_mixed_suffixes(
no_compilers_yaml, working_env, clangdir):
"""Ensure that we'll mix compilers with different suffixes when necessary.
"""
os.environ['PATH'] = str(clangdir)
output = compiler('find', '--scope=site')
assert 'clang@11.0.0' in output
assert 'gcc@8.4.0' in output
config = spack.compilers.get_compiler_config('site', False)
clang = next(c['compiler'] for c in config
if c['compiler']['spec'] == 'clang@11.0.0')
gcc = next(c['compiler'] for c in config
if c['compiler']['spec'] == 'gcc@8.4.0')
gfortran_path = str(clangdir.join('gfortran-8'))
assert clang['paths'] == {
'cc': str(clangdir.join('clang')),
'cxx': str(clangdir.join('clang++')),
# we only auto-detect mixed clang on macos
'f77': gfortran_path if sys.platform == 'darwin' else None,
'fc': gfortran_path if sys.platform == 'darwin' else None,
}
assert gcc['paths'] == {
'cc': str(clangdir.join('gcc-8')),
'cxx': str(clangdir.join('g++-8')),
'f77': gfortran_path,
'fc': gfortran_path,
}
@pytest.mark.regression('17590')
def test_compiler_find_prefer_no_suffix(
no_compilers_yaml, working_env, clangdir):
"""Ensure that we'll pick 'clang' over 'clang-gpu' when there is a choice.
"""
with clangdir.as_cwd():
shutil.copy('clang', 'clang-gpu')
shutil.copy('clang++', 'clang++-gpu')
os.chmod('clang-gpu', 0o700)
os.chmod('clang++-gpu', 0o700)
os.environ['PATH'] = str(clangdir)
output = compiler('find', '--scope=site')
assert 'clang@11.0.0' in output
assert 'gcc@8.4.0' in output
config = spack.compilers.get_compiler_config('site', False)
clang = next(c['compiler'] for c in config
if c['compiler']['spec'] == 'clang@11.0.0')
assert clang['paths']['cc'] == str(clangdir.join('clang'))
assert clang['paths']['cxx'] == str(clangdir.join('clang++'))

View File

@@ -117,7 +117,7 @@ def test_uninstall_deprecated(mock_packages, mock_archive, mock_fetch,
non_deprecated = spack.store.db.query()
uninstall('-y', 'libelf@0.8.10')
uninstall('-y', '-g', 'libelf@0.8.10')
assert spack.store.db.query() == spack.store.db.query(installed=any)
assert spack.store.db.query() == non_deprecated

View File

@@ -16,7 +16,7 @@
from spack.cmd.env import _env_create
from spack.spec import Spec
from spack.main import SpackCommand, SpackCommandError
from spack.main import SpackCommand
from spack.stage import stage_prefix
from spack.util.mock_package import MockPackageMultiRepo
@@ -284,45 +284,6 @@ def test_environment_status(capsys, tmpdir):
assert 'in current directory' in env('status')
def test_env_status_broken_view(
mutable_mock_env_path, mock_archive, mock_fetch, mock_packages,
install_mockery
):
with ev.create('test'):
install('trivial-install-test-package')
# switch to a new repo that doesn't include the installed package
# test that Spack detects the missing package and warns the user
new_repo = MockPackageMultiRepo()
with spack.repo.swap(new_repo):
output = env('status')
assert 'In environment test' in output
assert 'Environment test includes out of date' in output
# Test that the warning goes away when it's fixed
output = env('status')
assert 'In environment test' in output
assert 'Environment test includes out of date' not in output
def test_env_activate_broken_view(
mutable_mock_env_path, mock_archive, mock_fetch, mock_packages,
install_mockery
):
with ev.create('test'):
install('trivial-install-test-package')
# switch to a new repo that doesn't include the installed package
# test that Spack detects the missing package and fails gracefully
new_repo = MockPackageMultiRepo()
with spack.repo.swap(new_repo):
with pytest.raises(SpackCommandError):
env('activate', '--sh', 'test')
# test replacing repo fixes it
env('activate', '--sh', 'test')
def test_to_lockfile_dict():
e = ev.create('test')
e.add('mpileaks')

View File

@@ -29,9 +29,6 @@
install = SpackCommand('install')
env = SpackCommand('env')
add = SpackCommand('add')
mirror = SpackCommand('mirror')
uninstall = SpackCommand('uninstall')
buildcache = SpackCommand('buildcache')
@pytest.fixture()
@@ -58,6 +55,46 @@ def test_install_package_and_dependency(
assert 'errors="0"' in content
def test_global_install_package_and_dependency(
tmpdir, mock_packages, mock_archive, mock_fetch, config,
install_mockery):
with tmpdir.as_cwd():
install('--global',
'--log-format=junit',
'--log-file=test.xml',
'libdwarf')
files = tmpdir.listdir()
filename = tmpdir.join('test.xml')
assert filename in files
content = filename.open().read()
assert 'tests="2"' in content
assert 'failures="0"' in content
assert 'errors="0"' in content
def test_upstream_install_package_and_dependency(
tmpdir, mock_packages, mock_archive, mock_fetch, config,
install_mockery):
with tmpdir.as_cwd():
install('--upstream=global',
'--log-format=junit',
'--log-file=test.xml',
'libdwarf')
files = tmpdir.listdir()
filename = tmpdir.join('test.xml')
assert filename in files
content = filename.open().read()
assert 'tests="2"' in content
assert 'failures="0"' in content
assert 'errors="0"' in content
@pytest.mark.disable_clean_stage_check
def test_install_runtests_notests(monkeypatch, mock_packages, install_mockery):
def check(pkg):
@@ -133,8 +170,8 @@ def test_package_output(tmpdir, capsys, install_mockery, mock_fetch):
# make sure that output from the actual package file appears in the
# right place in the build log.
assert "BEFORE INSTALL" in out
assert "AFTER INSTALL" in out
assert re.search(r"BEFORE INSTALL\n==>( \[.+\])? './configure'", out)
assert "'install'\nAFTER INSTALL" in out
@pytest.mark.disable_clean_stage_check
@@ -180,12 +217,10 @@ def test_show_log_on_error(mock_packages, mock_archive, mock_fetch,
assert install.error.pkg.name == 'build-error'
assert 'Full build log:' in out
print(out)
# Message shows up for ProcessError (1) and output (1)
# Message shows up for ProcessError (1), ChildError (1), and output (1)
errors = [line for line in out.split('\n')
if 'configure: error: cannot run C compiled programs' in line]
assert len(errors) == 2
assert len(errors) == 3
def test_install_overwrite(
@@ -738,40 +773,6 @@ def test_compiler_bootstrap(
install('a%gcc@2.0')
def test_compiler_bootstrap_from_binary_mirror(
install_mockery_mutable_config, mock_packages, mock_fetch,
mock_archive, mutable_config, monkeypatch, tmpdir):
"""Make sure installing compiler from buildcache registers compiler"""
# Create a temp mirror directory for buildcache usage
mirror_dir = tmpdir.join('mirror_dir')
mirror_url = 'file://{0}'.format(mirror_dir.strpath)
# Install a compiler, because we want to put it in a buildcache
install('gcc@2.0')
# Put installed compiler in the buildcache
buildcache('create', '-u', '-a', '-f', '-d', mirror_dir.strpath, 'gcc@2.0')
# Now uninstall the compiler
uninstall('-y', 'gcc@2.0')
monkeypatch.setattr(spack.concretize.Concretizer,
'check_for_compiler_existence', False)
spack.config.set('config:install_missing_compilers', True)
assert CompilerSpec('gcc@2.0') not in compilers.all_compiler_specs()
# Configure the mirror where we put that buildcache w/ the compiler
mirror('add', 'test-mirror', mirror_url)
# Now make sure that when the compiler is installed from binary mirror,
# it also gets configured as a compiler. Test succeeds if it does not
# raise an error
install('--no-check-signature', '--cache-only', '--only',
'dependencies', 'b%gcc@2.0')
install('--no-cache', '--only', 'package', 'b%gcc@2.0')
@pytest.mark.regression('16221')
def test_compiler_bootstrap_already_installed(
install_mockery_mutable_config, mock_packages, mock_fetch,

View File

@@ -81,6 +81,41 @@ def test_force_uninstall_spec_with_ref_count_not_zero(
@pytest.mark.db
@pytest.mark.usefixtures('mutable_database')
def test_global_recursive_uninstall():
"""Test recursive uninstall from global upstream"""
uninstall('-g', '-y', '-a', '--dependents', 'callpath')
all_specs = spack.store.layout.all_specs()
assert len(all_specs) == 8
# query specs with multiple configurations
mpileaks_specs = [s for s in all_specs if s.satisfies('mpileaks')]
callpath_specs = [s for s in all_specs if s.satisfies('callpath')]
mpi_specs = [s for s in all_specs if s.satisfies('mpi')]
assert len(mpileaks_specs) == 0
assert len(callpath_specs) == 0
assert len(mpi_specs) == 3
@pytest.mark.db
@pytest.mark.usefixtures('mutable_database')
def test_upstream_recursive_uninstall():
"""Test recursive uninstall from specified upstream"""
uninstall('--upstream=global', '-y', '-a', '--dependents', 'callpath')
all_specs = spack.store.layout.all_specs()
assert len(all_specs) == 8
# query specs with multiple configurations
mpileaks_specs = [s for s in all_specs if s.satisfies('mpileaks')]
callpath_specs = [s for s in all_specs if s.satisfies('callpath')]
mpi_specs = [s for s in all_specs if s.satisfies('mpi')]
assert len(mpileaks_specs) == 0
assert len(callpath_specs) == 0
assert len(mpi_specs) == 3
def test_force_uninstall_and_reinstall_by_hash(mutable_database):
"""Test forced uninstall and reinstall of old specs."""
# this is the spec to be removed

View File

@@ -2,9 +2,8 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import re
import sys
import re
import pytest
import spack.repo
@@ -13,7 +12,6 @@
from spack.cmd.url import name_parsed_correctly, version_parsed_correctly
from spack.cmd.url import url_summary
url = SpackCommand('url')
@@ -72,11 +70,6 @@ def test_url_with_no_version_fails():
@pytest.mark.network
@pytest.mark.skipif(
sys.version_info < (2, 7),
reason="Python 2.6 tests are run in a container, where "
"networking is super slow"
)
def test_url_list():
out = url('list')
total_urls = len(out.split('\n'))
@@ -107,11 +100,6 @@ def test_url_list():
@pytest.mark.network
@pytest.mark.skipif(
sys.version_info < (2, 7),
reason="Python 2.6 tests are run in a container, where "
"networking is super slow"
)
def test_url_summary():
"""Test the URL summary command."""
# test url_summary, the internal function that does the work
@@ -138,11 +126,6 @@ def test_url_summary():
assert out_correct_versions == correct_versions
@pytest.mark.skipif(
sys.version_info < (2, 7),
reason="Python 2.6 tests are run in a container, where "
"networking is super slow"
)
def test_url_stats(capfd):
with capfd.disabled():
output = url('stats')

View File

@@ -1,5 +1,5 @@
config:
install_tree: $spack/opt/spack
install_tree: ~/.spack/opt/spack
template_dirs:
- $spack/share/spack/templates
- $spack/lib/spack/spack/test/data/templates
@@ -7,7 +7,7 @@ config:
build_stage:
- $tempdir/$user/spack-stage
- ~/.spack/stage
source_cache: $spack/var/spack/cache
source_cache: ~/.spack/var/spack/cache
misc_cache: ~/.spack/cache
verify_ssl: true
checksum: true

View File

@@ -0,0 +1,7 @@
upstreams:
global:
install_tree: $spack/opt/spack
modules:
tcl: $spack/share/spack/modules
lmod: $spack/share/spack/lmod
dotkit: $spack/share/spack/dotkit

View File

@@ -0,0 +1,7 @@
upstreams:
global:
install_tree: $spack/opt/spack
modules:
tcl: $spack/share/spack/modules
lmod: $spack/share/spack/lmod
dotkit: $spack/share/spack/dotkit

View File

@@ -13,6 +13,7 @@
import os
import pytest
import json
import shutil
try:
import uuid
_use_uuid = True
@@ -48,6 +49,19 @@ def test_store(tmpdir):
spack.store.store = real_store
@pytest.fixture()
def test_global_db_initializtion():
global_store = spack.store.store
global_db_path = '$spack/opt/spack'
global_db_path = spack.util.path.canonicalize_path(global_db_path)
shutil.rmtree(os.path.join(global_db_path, '.spack-db'))
global_store = spack.store.Store(str(global_db_path))
yield
spack.store.store = global_store
@pytest.fixture()
def upstream_and_downstream_db(tmpdir_factory, gen_mock_layout):
mock_db_root = str(tmpdir_factory.mktemp('mock_db_root'))

View File

@@ -344,9 +344,10 @@ def test_nosource_pkg_install(
# Make sure install works even though there is no associated code.
pkg.do_install()
out = capfd.readouterr()
assert "Installing dependency-install" in out[0]
assert "Missing a source id for nosource" in out[1]
# Also make sure an error is raised if `do_fetch` is called.
pkg.do_fetch()
assert "No fetch required for nosource" in capfd.readouterr()[0]
def test_nosource_pkg_install_post_install(

View File

@@ -99,21 +99,10 @@ def test_hms(sec, result):
assert inst._hms(sec) == result
def test_install_msg(monkeypatch):
"""Test results of call to install_msg based on debug level."""
def test_install_msg():
name = 'some-package'
pid = 123456
install_msg = 'Installing {0}'.format(name)
monkeypatch.setattr(tty, '_debug', 0)
assert inst.install_msg(name, pid) == install_msg
monkeypatch.setattr(tty, '_debug', 1)
assert inst.install_msg(name, pid) == install_msg
# Expect the PID to be added at debug level 2
monkeypatch.setattr(tty, '_debug', 2)
expected = "{0}: {1}".format(pid, install_msg)
expected = "{0}: Installing {1}".format(pid, name)
assert inst.install_msg(name, pid) == expected
@@ -162,6 +151,7 @@ def test_process_external_package_module(install_mockery, monkeypatch, capfd):
out = capfd.readouterr()[0]
assert 'has external module in {0}'.format(spec.external_module) in out
assert 'is actually installed in {0}'.format(spec.external_path) in out
def test_process_binary_cache_tarball_none(install_mockery, monkeypatch,
@@ -190,7 +180,7 @@ def _spec(spec):
spec = spack.spec.Spec('a').concretized()
assert inst._process_binary_cache_tarball(spec.package, spec, False, False)
assert 'Extracting a from binary cache' in capfd.readouterr()[0]
assert 'Installing a from binary cache' in capfd.readouterr()[0]
def test_try_install_from_binary_cache(install_mockery, mock_packages,

View File

@@ -1143,6 +1143,8 @@ def read():
assert vals['read'] == 1
@pytest.mark.skipif('macos' in os.environ.get('GITHUB_WORKFLOW', ''),
reason="Skip failing test for GA on MacOS")
def test_lock_debug_output(lock_path):
host = socket.getfqdn()

View File

@@ -389,10 +389,6 @@ def mock_shell_v_v_no_termios(proc, ctl, **kwargs):
(mock_shell_v_v, nullcontext),
(mock_shell_v_v_no_termios, no_termios),
])
@pytest.mark.skipif(
sys.version_info < (2, 7),
reason="Python 2.6 tests are run in a container, where this fails often"
)
def test_foreground_background_output(
test_fn, capfd, termios_on_or_off, tmpdir):
"""Tests hitting 'v' toggles output, and that force_echo works."""

View File

@@ -1,87 +0,0 @@
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
import pytest
import llnl.util.tty as tty
def test_get_timestamp(monkeypatch):
"""Ensure the results of get_timestamp are reasonable."""
# Debug disabled should return an empty string
monkeypatch.setattr(tty, '_debug', 0)
assert not tty.get_timestamp(False), 'Expected an empty string'
# Debug disabled but force the timestamp should return a string
assert tty.get_timestamp(True), 'Expected a timestamp/non-empty string'
pid_str = ' {0}'.format(os.getpid())
# Level 1 debugging should return a timestamp WITHOUT the pid
monkeypatch.setattr(tty, '_debug', 1)
out_str = tty.get_timestamp(False)
assert out_str and pid_str not in out_str, 'Expected no PID in results'
# Level 2 debugging should also return a timestamp WITH the pid
monkeypatch.setattr(tty, '_debug', 2)
out_str = tty.get_timestamp(False)
assert out_str and pid_str in out_str, 'Expected PID in results'
@pytest.mark.parametrize('msg,enabled,trace,newline', [
('', False, False, False), # Nothing is output
(Exception(''), True, False, True), # Exception output
('trace', True, True, False), # stacktrace output
('newline', True, False, True), # newline in output
('no newline', True, False, False) # no newline output
])
def test_msg(capfd, monkeypatch, enabled, msg, trace, newline):
"""Ensure the output from msg with options is appropriate."""
# temporarily use the parameterized settings
monkeypatch.setattr(tty, '_msg_enabled', enabled)
monkeypatch.setattr(tty, '_stacktrace', trace)
expected = [msg if isinstance(msg, str) else 'Exception: ']
if newline:
expected[0] = '{0}\n'.format(expected[0])
if trace:
expected.insert(0, '.py')
tty.msg(msg, newline=newline)
out = capfd.readouterr()[0]
for msg in expected:
assert msg in out
@pytest.mark.parametrize('msg,trace,wrap', [
(Exception(''), False, False), # Exception output
('trace', True, False), # stacktrace output
('wrap', False, True), # wrap in output
])
def test_info(capfd, monkeypatch, msg, trace, wrap):
"""Ensure the output from info with options is appropriate."""
# temporarily use the parameterized settings
monkeypatch.setattr(tty, '_stacktrace', trace)
expected = [msg if isinstance(msg, str) else 'Exception: ']
if trace:
expected.insert(0, '.py')
extra = 'This extra argument *should* make for a sufficiently long line' \
' that needs to be wrapped if the option is enabled.'
args = [msg, extra]
num_newlines = 3 if wrap else 2
tty.info(*args, wrap=wrap, countback=3)
out = capfd.readouterr()[0]
for msg in expected:
assert msg in out
assert out.count('\n') == num_newlines

View File

@@ -3,7 +3,6 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
import pytest
import spack.fetch_strategy as spack_fs
@@ -28,19 +27,3 @@ def test_s3fetchstrategy_bad_url(tmpdir):
assert fetcher.archive_file is None
with pytest.raises(spack_fs.FetchError):
fetcher.fetch()
def test_s3fetchstrategy_downloaded(tmpdir):
"""Ensure fetch with archive file already downloaded is a noop."""
testpath = str(tmpdir)
archive = os.path.join(testpath, 's3.tar.gz')
class Archived_S3FS(spack_fs.S3FetchStrategy):
@property
def archive_file(self):
return archive
url = 's3:///{0}'.format(archive)
fetcher = Archived_S3FS(url=url)
with spack_stage.Stage(fetcher, path=testpath):
fetcher.fetch()

View File

@@ -6,10 +6,7 @@
import sys
import os
import pytest
import llnl.util.filesystem as fs
import spack
import spack.util.executable as ex
from spack.hooks.sbang import filter_shebangs_in_directory
@@ -38,18 +35,3 @@ def test_read_unicode(tmpdir, working_env):
# read the unicode back in and see whether things work
script = ex.Executable('./%s' % script_name)
assert u'\xc3' == script(output=str).strip()
def test_which(tmpdir):
os.environ["PATH"] = str(tmpdir)
assert ex.which("spack-test-exe") is None
with pytest.raises(ex.CommandNotFoundError):
ex.which("spack-test-exe", required=True)
with tmpdir.as_cwd():
fs.touch("spack-test-exe")
fs.set_executable('spack-test-exe')
exe = ex.which("spack-test-exe")
assert exe is not None
assert exe.path == str(tmpdir.join("spack-test-exe"))

View File

@@ -239,8 +239,7 @@ def which_string(*args, **kwargs):
return exe
if required:
raise CommandNotFoundError(
"spack requires '%s'. Make sure it is in your path." % args[0])
tty.die("spack requires '%s'. Make sure it is in your path." % args[0])
return None
@@ -267,7 +266,3 @@ def which(*args, **kwargs):
class ProcessError(spack.error.SpackError):
"""ProcessErrors are raised when Executables exit with an error code."""
class CommandNotFoundError(spack.error.SpackError):
"""Raised when ``which()`` can't find a required executable."""

View File

@@ -13,7 +13,7 @@
_gnupg_version_re = r"^gpg \(GnuPG\) (.*)$"
GNUPGHOME = os.getenv('SPACK_GNUPGHOME', spack.paths.gpg_path)
GNUPGHOME = spack.paths.gpg_path
def parse_keys_output(output):

View File

@@ -77,8 +77,6 @@ def __init__(self):
def get(self, spec):
if not isinstance(spec, spack.spec.Spec):
spec = Spec(spec)
if spec.name not in self.spec_to_pkg:
raise spack.repo.UnknownPackageError(spec.fullname)
return self.spec_to_pkg[spec.name]
def get_pkg_class(self, name):

View File

@@ -1,38 +0,0 @@
-----BEGIN PGP PUBLIC KEY BLOCK-----
mQENBF1IgqcBCADqSIBM0TT4+6Acv6SUpQ2l1Ql+UVRtJ74VGFOw+8I8aBWcBryB
wNsS/Drxn9M9rX8il2aGtAmwc1dhTh0JvdZO7KqG8Q4vvWOytdLnGSE61LV4147q
S/dJiYH2DCvhMKpOByIsEiuoTrUHzd1EQBnEPSwAQV8oWPrc1++f3iYmRemsOBCT
BldAu7Y5RwjI3qQ6GazoCF5rd1uyiMYrpT4amEKFE91VRe+IG8XfEaSTapOc/hO3
Sw4fzPelA2qD12I+JMj56vM0fQy3TXD5qngIb+leb2jGI+0bTz8RGS0xSMYVvftA
upzQPaQIfzijVBt3tFSayx/NXKR0p+EuCqGBABEBAAG0MFNwYWNrIEJ1aWxkIFBp
cGVsaW5lIChEZW1vIEtleSkgPGtleUBzcGFjay5kZW1vPokBTgQTAQgAOBYhBDHI
4nh6FErErdiO0pX4aBGV4jnYBQJdSIKnAhsvBQsJCAcCBhUKCQgLAgQWAgMBAh4B
AheAAAoJEJX4aBGV4jnYpf0IAJDYEjpm0h1pNswTvmnEhgNVbojCGRfAts7F5uf8
IFXGafKQsekMWZh0Ig0YXVn72jsOuNK/+keErMfXM3DFNTq0Ki7mcFedR9r5EfLf
4YW2n6mphsfMgsg8NwKVLFYWyhQQ4OzhdydPxkGVhEebHwfHNQ3aIcqbFmzkhxnX
CIYh2Flf3T306tKX4lXbhsXKG1L/bLtDiFRaMCBp66HGZ8u9Dbyy/W8aDwyx4duD
MG+y2OrhOf+zEu3ZPFyc/jsjmfnUtIfQVyRajh/8vh+i9fkvFlLaOQittNElt3z1
8+ybGjE9qWY/mvR2ZqnP8SVkGvxSpBVfVXiFFdepvuPAcLu5AQ0EXUiCpwEIAJ2s
npNBAVocDUSdOF/Z/eCRvy3epuYm5f1Ge1ao9K2qWYno2FatnsYxK4qqB5yGRkfj
sEzAGP8JtJvqDSuB5Xk7CIjRNOwoSB3hqvmxWh2h+HsITUhMl11FZ0Cllz+etXcK
APz2ZHSKnA3R8uf4JzIr1cHLS+gDBoj8NgBCZhcyva2b5UC///FLm1+/Lpvekd0U
n7B524hbXhFUG+UMfHO/U1c4TvCMt7RGMoWUtRzfO6XB1VQCwWJBVcVGl8Yy59Zk
3K76VbFWQWOq6fRBE0xHBAga7pOgCc9qrb+FGl1IHUT8aV8CzkxckHlNb3PlntmE
lXZLPcGFWaPtGtuIJVsAEQEAAYkCbAQYAQgAIBYhBDHI4nh6FErErdiO0pX4aBGV
4jnYBQJdSIKnAhsuAUAJEJX4aBGV4jnYwHQgBBkBCAAdFiEEneR3pKqi9Rnivv07
CYCNVr37XP0FAl1IgqcACgkQCYCNVr37XP13RQf/Ttxidgo9upF8jxrWnT5YhM6D
ozzGWzqE+/KDBX+o4f33o6uzozjESRXQUKdclC9ftDJQ84lFTMs3Z+/12ZDqCV2k
2qf0VfXg4e5xMq4tt6hojXUeYSfeGZXNU9LzjURCcMD+amIKjVztFg4kl3KHW3Pi
/aPTr4xWWgy2tZ1FDEuA5J6AZiKKJSVeoSPOGANouPqm4fNj273XFXQepIhQ5wve
4No0abxfXcLt5Yp3y06rNCBC9QdC++19N5+ajn2z9Qd2ZwztPb0mNuqHAok4vrlE
1c4WBWk93Nfy9fKImalGENpPDz0td2H9pNC9IafOWltGSWSINRrU1GeaNXS/uAOT
CADjcDN+emLbDTTReW4FLoQ0mPJ0tACgszGW50PtncTMPSj4uxSktQPWWk41oD9q
gpXm1Vgto4GvPWYs/ewR6Kyd8K0YkBxbRFyYOmycu3/zzYJnry+EHdvtQspwUDPg
QlI/avDrncERzICsbd86Jz0CMY4kzpg5v9dt/N6WnHlSk/S+vv4pPUDSz26Q4Ehh
iDvDavLGyzKSlVzWQ4bzzlQxXbDL6TZyVAQ4DBI4sI+WGtLbfD51EI5G9BfmDsbw
XJ0Dt2yEwRfDUx/lYbAMvhUnWEu2DSpYdJb8GG0GKTGqU4YpvO1JgTCsLSLIAHfT
tQMw04Gs+kORRNbggsdTD4sR
=N5Wp
-----END PGP PUBLIC KEY BLOCK-----

View File

@@ -18,7 +18,7 @@
ORIGINAL_PATH="$PATH"
. "$(dirname $0)/setup.sh"
check_dependencies $coverage kcov git hg svn
check_dependencies $coverage git hg svn
# Clean the environment by removing Spack from the path and getting rid of
# the spack shell function

View File

@@ -37,7 +37,11 @@ bin/spack -h
bin/spack help -a
# Profile and print top 20 lines for a simple call to spack spec
spack -p --lines 20 spec mpileaks%gcc ^elfutils@0.170
if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then
spack -p --lines 20 spec openmpi
else
spack -p --lines 20 spec mpileaks%gcc ^elfutils@0.170
fi
#-----------------------------------------------------------
# Run unit tests with code coverage

View File

@@ -26,11 +26,14 @@ if [[ "$COVERAGE" == "true" ]]; then
coverage=coverage
coverage_run="coverage run"
# bash coverage depends on some other factors
mkdir -p coverage
cc_script="$SPACK_ROOT/lib/spack/env/cc"
bashcov=$(realpath ${QA_DIR}/bashcov)
sed -i~ "s@#\!/bin/bash@#\!${bashcov}@" "$cc_script"
# bash coverage depends on some other factors -- there are issues with
# kcov for Python 2.6, unit tests, and build tests.
if [[ $TRAVIS_PYTHON_VERSION != 2.6 ]]; then
mkdir -p coverage
cc_script="$SPACK_ROOT/lib/spack/env/cc"
bashcov=$(realpath ${QA_DIR}/bashcov)
sed -i~ "s@#\!/bin/bash@#\!${bashcov}@" "$cc_script"
fi
fi
#
@@ -71,9 +74,6 @@ check_dependencies() {
spack_package=mercurial
pip_package=mercurial
;;
kcov)
spack_package=kcov
;;
svn)
spack_package=subversion
;;

View File

@@ -1,129 +0,0 @@
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
###############################################################################
#
# This file is part of Spack and sets up the environment for the Spack tutorial
# It is intended to be run on ubuntu-18.04 or an ubuntu-18.04 container or AWS
# cloud9 environment
#
# Components:
# 1. apt installs for packages used in the tutorial
# these include compilers and externals used by the tutorial and
# basic spack requirements like python and curl
# 2. spack configuration files
# these set the default configuration for Spack to use x86_64 and suppress
# certain gpg warnings. The gpg warnings are not relevant for the tutorial
# and the default x86_64 architecture allows us to run the same tutorial on
# any x86_64 architecture without needing new binary packages.
# 3. aws cloud9 configuration to expand available storage
# when we run on aws cloud9 we have to expand the storage from 10G to 30G
# because we install too much software for a default cloud9 instance
###############################################################################
####
# Ensure we're on Ubuntu 18.04
####
if [ -f /etc/os-release ]; then
. /etc/os-release
fi
if [ x"$UBUNTU_CODENAME" != "xbionic" ]; then
echo "The tutorial setup script must be run on Ubuntu 18.04."
return 1 &>/dev/null || exit 1 # works if sourced or run
fi
####
# Install packages needed for tutorial
####
# compilers, basic system components, externals
# There are retries around these because apt fails frequently on new instances,
# due to unattended updates running in the background and taking the lock.
until sudo apt-get update -y; do
echo "==> apt-get update failed. retrying..."
sleep 5
done
until sudo apt-get install -y --no-install-recommends \
autoconf make python3 python3-pip \
build-essential ca-certificates curl git gnupg2 iproute2 emacs \
file openssh-server tcl unzip vim wget \
clang g++ g++-6 gcc gcc-6 gfortran gfortran-6 \
zlib1g zlib1g-dev mpich; do
echo "==> apt-get install failed. retrying..."
sleep 5
done
####
# Upgrade boto3 python package on AWS systems
####
pip3 install --upgrade boto3
####
# Spack configuration settings for tutorial
####
# create spack system config
sudo mkdir -p /etc/spack
# set default arch to x86_64
sudo tee /etc/spack/packages.yaml << EOF > /dev/null
packages:
all:
target: [x86_64]
EOF
# suppress gpg warnings
sudo tee /etc/spack/config.yaml << EOF > /dev/null
config:
suppress_gpg_warnings: true
EOF
####
# AWS set volume size to at least 30G
####
# Hardcode the specified size to 30G
SIZE=30
# Get the ID of the environment host Amazon EC2 instance.
INSTANCEID=$(curl http://169.254.169.254/latest/meta-data//instance-id)
# Get the ID of the Amazon EBS volume associated with the instance.
VOLUMEID=$(aws ec2 describe-instances \
--instance-id $INSTANCEID \
--query "Reservations[0].Instances[0].BlockDeviceMappings[0].Ebs.VolumeId" \
--output text)
# Resize the EBS volume.
aws ec2 modify-volume --volume-id $VOLUMEID --size $SIZE
# Wait for the resize to finish.
while [ \
"$(aws ec2 describe-volumes-modifications \
--volume-id $VOLUMEID \
--filters Name=modification-state,Values="optimizing","completed" \
--query "length(VolumesModifications)"\
--output text)" != "1" ]; do
sleep 1
done
if [ -e /dev/xvda1 ]
then
# Rewrite the partition table so that the partition takes up all the space that it can.
sudo growpart /dev/xvda 1
# Expand the size of the file system.
sudo resize2fs /dev/xvda1
else
# Rewrite the partition table so that the partition takes up all the space that it can.
sudo growpart /dev/nvme0n1 1
# Expand the size of the file system.
sudo resize2fs /dev/nvme0n1p1
fi

View File

@@ -962,7 +962,7 @@ _spack_info() {
_spack_install() {
if $list_options
then
SPACK_COMPREPLY="-h --help --only -u --until -j --jobs --overwrite --fail-fast --keep-prefix --keep-stage --dont-restage --use-cache --no-cache --cache-only --no-check-signature --show-log-on-error --source -n --no-checksum -v --verbose --fake --only-concrete -f --file --clean --dirty --test --run-tests --log-format --log-file --help-cdash --cdash-upload-url --cdash-build --cdash-site --cdash-track --cdash-buildstamp -y --yes-to-all"
SPACK_COMPREPLY="-h --help --only -u --until -j --jobs --overwrite --fail-fast --keep-prefix --keep-stage --dont-restage --use-cache --no-cache --cache-only --no-check-signature --show-log-on-error --source -n --no-checksum -v --verbose --fake --only-concrete -f --file --upstream -g --global --clean --dirty --test --run-tests --log-format --log-file --help-cdash --cdash-upload-url --cdash-build --cdash-site --cdash-track --cdash-buildstamp -y --yes-to-all"
else
_all_packages
fi
@@ -1436,7 +1436,7 @@ _spack_test() {
_spack_uninstall() {
if $list_options
then
SPACK_COMPREPLY="-h --help -f --force -R --dependents -y --yes-to-all -a --all"
SPACK_COMPREPLY="-h --help -f --force -R --dependents -y --yes-to-all -a --all -u --upstream -g --global"
else
_installed_packages
fi

View File

@@ -1,155 +0,0 @@
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
import os
class Corge(Package):
"""A toy package to test dependencies"""
homepage = "https://www.example.com"
url = "https://github.com/gartung/corge/archive/v3.0.0.tar.gz"
version('3.0.0',
sha256='5058861c3b887511387c725971984cec665a8307d660158915a04d7786fed6bc')
depends_on('quux')
def install(self, spec, prefix):
corge_cc = '''#include <iostream>
#include <stdexcept>
#include "corge.h"
#include "corge_version.h"
#include "quux/quux.h"
const int Corge::version_major = corge_version_major;
const int Corge::version_minor = corge_version_minor;
Corge::Corge()
{
}
int
Corge::get_version() const
{
return 10 * version_major + version_minor;
}
int
Corge::corgegate() const
{
int corge_version = get_version();
std::cout << "Corge::corgegate version " << corge_version
<< " invoked" << std::endl;
std::cout << "Corge config directory = %s" <<std::endl;
Quux quux;
int quux_version = quux.quuxify();
if(quux_version != corge_version) {
throw std::runtime_error(
"Corge found an incompatible version of Garply.");
}
return corge_version;
}
'''
corge_h = '''#ifndef CORGE_H_
class Corge
{
private:
static const int version_major;
static const int version_minor;
public:
Corge();
int get_version() const;
int corgegate() const;
};
#endif // CORGE_H_
'''
corge_version_h = '''
const int corge_version_major = %s;
const int corge_version_minor = %s;
'''
corgegator_cc = '''
#include <iostream>
#include "corge.h"
int
main(int argc, char* argv[])
{
std::cout << "corgerator called with ";
if (argc == 0) {
std::cout << "no command-line arguments" << std::endl;
} else {
std::cout << "command-line arguments:";
for (int i = 0; i < argc; ++i) {
std::cout << " \"" << argv[i] << "\"";
}
std::cout << std::endl;
}
std::cout << "corgegating.."<<std::endl;
Corge corge;
corge.corgegate();
std::cout << "done."<<std::endl;
return 0;
}
'''
mkdirp(prefix.lib64)
mkdirp('%s/corge' % prefix.include)
mkdirp('%s/corge' % self.stage.source_path)
with open('%s/corge_version.h' % self.stage.source_path, 'w') as f:
f.write(corge_version_h % (self.version[0], self.version[1:]))
with open('%s/corge/corge.cc' % self.stage.source_path, 'w') as f:
f.write(corge_cc % prefix.config)
with open('%s/corge/corge.h' % self.stage.source_path, 'w') as f:
f.write(corge_h)
with open('%s/corge/corgegator.cc' % self.stage.source_path, 'w') as f:
f.write(corgegator_cc)
gpp = which('/usr/bin/g++')
gpp('-Dcorge_EXPORTS',
'-I%s' % self.stage.source_path,
'-I%s' % spec['quux'].prefix.include,
'-I%s' % spec['garply'].prefix.include,
'-O2', '-g', '-DNDEBUG', '-fPIC',
'-o', 'corge.cc.o',
'-c', 'corge/corge.cc')
gpp('-Dcorge_EXPORTS',
'-I%s' % self.stage.source_path,
'-I%s' % spec['quux'].prefix.include,
'-I%s' % spec['garply'].prefix.include,
'-O2', '-g', '-DNDEBUG', '-fPIC',
'-o', 'corgegator.cc.o',
'-c', 'corge/corgegator.cc')
gpp('-fPIC', '-O2', '-g', '-DNDEBUG', '-shared',
'-Wl,-soname,libcorge.so', '-o', 'libcorge.so', 'corge.cc.o',
'-Wl,-rpath,%s:%s::::' %
(spec['quux'].prefix.lib64, spec['garply'].prefix.lib64),
'%s/libquux.so' % spec['quux'].prefix.lib64,
'%s/libgarply.so' % spec['garply'].prefix.lib64)
gpp('-O2', '-g', '-DNDEBUG', '-rdynamic',
'corgegator.cc.o', '-o', 'corgegator',
'-Wl,-rpath,%s:%s:%s:::' % (prefix.lib64,
spec['quux'].prefix.lib64,
spec['garply'].prefix.lib64),
'libcorge.so',
'%s/libquux.so' % spec['quux'].prefix.lib64,
'%s/libgarply.so' % spec['garply'].prefix.lib64)
copy('corgegator', '%s/corgegator' % prefix.lib64)
copy('libcorge.so', '%s/libcorge.so' % prefix.lib64)
copy('%s/corge/corge.h' % self.stage.source_path,
'%s/corge/corge.h' % prefix.include)
mkdirp(prefix.bin)
copy('corge_version.h', '%s/corge_version.h' % prefix.bin)
os.symlink('%s/corgegator' % prefix.lib64,
'%s/corgegator' % prefix.bin)
os.symlink('%s/quuxifier' % spec['quux'].prefix.lib64,
'%s/quuxifier' % prefix.bin)
os.symlink('%s/garplinator' % spec['garply'].prefix.lib64,
'%s/garplinator' % prefix.bin)

View File

@@ -1,112 +0,0 @@
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
import os
class Garply(Package):
"""Toy package for testing dependencies"""
homepage = "https://www.example.com"
url = "https://github.com/gartung/garply/archive/v3.0.0.tar.gz"
version('3.0.0',
sha256='534ac8ba7a6fed7e8bbb543bd43ca04999e65337445a531bd296939f5ac2f33d')
def install(self, spec, prefix):
garply_h = '''#ifndef GARPLY_H_
class Garply
{
private:
static const int version_major;
static const int version_minor;
public:
Garply();
int get_version() const;
int garplinate() const;
};
#endif // GARPLY_H_
'''
garply_cc = '''#include "garply.h"
#include "garply_version.h"
#include <iostream>
const int Garply::version_major = garply_version_major;
const int Garply::version_minor = garply_version_minor;
Garply::Garply() {}
int
Garply::get_version() const
{
return 10 * version_major + version_minor;
}
int
Garply::garplinate() const
{
std::cout << "Garply::garplinate version " << get_version()
<< " invoked" << std::endl;
std::cout << "Garply config dir = %s" << std::endl;
return get_version();
}
'''
garplinator_cc = '''#include "garply.h"
#include <iostream>
int
main()
{
Garply garply;
garply.garplinate();
return 0;
}
'''
garply_version_h = '''const int garply_version_major = %s;
const int garply_version_minor = %s;
'''
mkdirp(prefix.lib64)
mkdirp('%s/garply' % prefix.include)
mkdirp('%s/garply' % self.stage.source_path)
with open('%s/garply_version.h' % self.stage.source_path, 'w') as f:
f.write(garply_version_h % (self.version[0], self.version[1:]))
with open('%s/garply/garply.h' % self.stage.source_path, 'w') as f:
f.write(garply_h)
with open('%s/garply/garply.cc' % self.stage.source_path, 'w') as f:
f.write(garply_cc % prefix.config)
with open('%s/garply/garplinator.cc' %
self.stage.source_path, 'w') as f:
f.write(garplinator_cc)
gpp = which('/usr/bin/g++')
gpp('-Dgarply_EXPORTS',
'-I%s' % self.stage.source_path,
'-O2', '-g', '-DNDEBUG', '-fPIC',
'-o', 'garply.cc.o',
'-c', '%s/garply/garply.cc' % self.stage.source_path)
gpp('-Dgarply_EXPORTS',
'-I%s' % self.stage.source_path,
'-O2', '-g', '-DNDEBUG', '-fPIC',
'-o', 'garplinator.cc.o',
'-c', '%s/garply/garplinator.cc' % self.stage.source_path)
gpp('-fPIC', '-O2', '-g', '-DNDEBUG', '-shared',
'-Wl,-soname,libgarply.so', '-o', 'libgarply.so', 'garply.cc.o')
gpp('-O2', '-g', '-DNDEBUG', '-rdynamic',
'garplinator.cc.o', '-o', 'garplinator',
'-Wl,-rpath,%s' % prefix.lib64,
'libgarply.so')
copy('libgarply.so', '%s/libgarply.so' % prefix.lib64)
copy('garplinator', '%s/garplinator' % prefix.lib64)
copy('%s/garply/garply.h' % self.stage.source_path,
'%s/garply/garply.h' % prefix.include)
mkdirp(prefix.bin)
copy('garply_version.h', '%s/garply_version.h' % prefix.bin)
os.symlink('%s/garplinator' % prefix.lib64,
'%s/garplinator' % prefix.bin)

View File

@@ -7,17 +7,16 @@
class Patchelf(AutotoolsPackage):
"""PatchELF is a small utility to modify the dynamic linker and RPATH of
ELF executables."""
"""
PatchELF is a small utility to modify the
dynamic linker and RPATH of ELF executables.
"""
homepage = "https://nixos.org/patchelf.html"
url = "https://nixos.org/releases/patchelf/patchelf-0.10/patchelf-0.10.tar.gz"
list_url = "https://nixos.org/releases/patchelf/"
url = "http://nixos.org/releases/patchelf/patchelf-0.8/patchelf-0.8.tar.gz"
list_url = "http://nixos.org/releases/patchelf/"
list_depth = 1
version('0.10', sha256='b2deabce05c34ce98558c0efb965f209de592197b2c88e930298d740ead09019')
version('0.9', sha256='f2aa40a6148cb3b0ca807a1bf836b081793e55ec9e5540a5356d800132be7e0a')
version('0.8', sha256='14af06a2da688d577d64ff8dac065bb8903bbffbe01d30c62df7af9bf4ce72fe')
def install(self, spec, prefix):
install_tree(self.stage.source_path, prefix)
version('0.9', '3c265508526760f233620f35d79c79fc')
version('0.8', '407b229e6a681ffb0e2cdd5915cb2d01')

View File

@@ -1,132 +0,0 @@
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
import os
class Quux(Package):
"""Toy package for testing dependencies"""
homepage = "https://www.example.com"
url = "https://github.com/gartung/quux/archive/v3.0.0.tar.gz"
version('3.0.0',
sha256='b91bc96fb746495786bddac2c527039177499f2f76d3fa9dcf0b393859e68484')
depends_on('garply')
def install(self, spec, prefix):
quux_cc = '''#include "quux.h"
#include "garply/garply.h"
#include "quux_version.h"
#include <iostream>
#include <stdexcept>
const int Quux::version_major = quux_version_major;
const int Quux::version_minor = quux_version_minor;
Quux::Quux() {}
int
Quux::get_version() const
{
return 10 * version_major + version_minor;
}
int
Quux::quuxify() const
{
int quux_version = get_version();
std::cout << "Quux::quuxify version " << quux_version
<< " invoked" <<std::endl;
std::cout << "Quux config directory is %s" <<std::endl;
Garply garply;
int garply_version = garply.garplinate();
if (garply_version != quux_version) {
throw std::runtime_error(
"Quux found an incompatible version of Garply.");
}
return quux_version;
}
'''
quux_h = '''#ifndef QUUX_H_
class Quux
{
private:
static const int version_major;
static const int version_minor;
public:
Quux();
int get_version() const;
int quuxify() const;
};
#endif // QUUX_H_
'''
quuxifier_cc = '''
#include "quux.h"
#include <iostream>
int
main()
{
Quux quux;
quux.quuxify();
return 0;
}
'''
quux_version_h = '''const int quux_version_major = %s;
const int quux_version_minor = %s;
'''
mkdirp(prefix.lib64)
mkdirp('%s/quux' % prefix.include)
with open('%s/quux_version.h' % self.stage.source_path, 'w') as f:
f.write(quux_version_h % (self.version[0], self.version[1:]))
with open('%s/quux/quux.cc' % self.stage.source_path, 'w') as f:
f.write(quux_cc % (prefix.config))
with open('%s/quux/quux.h' % self.stage.source_path, 'w') as f:
f.write(quux_h)
with open('%s/quux/quuxifier.cc' % self.stage.source_path, 'w') as f:
f.write(quuxifier_cc)
gpp = which('/usr/bin/g++')
gpp('-Dquux_EXPORTS',
'-I%s' % self.stage.source_path,
'-I%s' % spec['garply'].prefix.include,
'-O2', '-g', '-DNDEBUG', '-fPIC',
'-o', 'quux.cc.o',
'-c', 'quux/quux.cc')
gpp('-Dquux_EXPORTS',
'-I%s' % self.stage.source_path,
'-I%s' % spec['garply'].prefix.include,
'-O2', '-g', '-DNDEBUG', '-fPIC',
'-o', 'quuxifier.cc.o',
'-c', 'quux/quuxifier.cc')
gpp('-fPIC', '-O2', '-g', '-DNDEBUG', '-shared',
'-Wl,-soname,libquux.so', '-o', 'libquux.so', 'quux.cc.o',
'-Wl,-rpath,%s:%s::::' % (prefix.lib64,
spec['garply'].prefix.lib64),
'%s/libgarply.so' % spec['garply'].prefix.lib64)
gpp('-O2', '-g', '-DNDEBUG', '-rdynamic',
'quuxifier.cc.o', '-o', 'quuxifier',
'-Wl,-rpath,%s:%s::::' % (prefix.lib64,
spec['garply'].prefix.lib64),
'libquux.so',
'%s/libgarply.so' % spec['garply'].prefix.lib64)
copy('libquux.so', '%s/libquux.so' % prefix.lib64)
copy('quuxifier', '%s/quuxifier' % prefix.lib64)
copy('%s/quux/quux.h' % self.stage.source_path,
'%s/quux/quux.h' % prefix.include)
mkdirp(prefix.bin)
copy('quux_version.h', '%s/quux_version.h' % prefix.bin)
os.symlink('%s/quuxifier' % prefix.lib64, '%s/quuxifier' % prefix.bin)
os.symlink('%s/garplinator' % spec['garply'].prefix.lib64,
'%s/garplinator' % prefix.bin)

View File

@@ -15,6 +15,6 @@ class AdeptUtils(CMakePackage):
version('1.0.1', sha256='259f777aeb368ede3583d3617bb779f0fde778319bf2122fdd216bdf223c015e')
version('1.0', sha256='fed29366c9bcf5f3799220ae3b351d2cb338e2aa42133d61584ea650aa8d6ff7')
depends_on('boost@:1.72.0')
depends_on('boost')
depends_on('mpi')
depends_on('cmake@2.8:', type='build')

View File

@@ -77,16 +77,15 @@ class Cuda(Package):
depends_on('libxml2', when='@10.1.243:')
def setup_build_environment(self, env):
env.set('CUDAHOSTCXX', self.compiler.cxx)
if self.spec.satisfies('@10.1.243:'):
libxml2_home = self.spec['libxml2'].prefix
env.set('LIBXML2HOME', libxml2_home)
env.append_path('LD_LIBRARY_PATH', libxml2_home.lib)
def setup_dependent_build_environment(self, env, dependent_spec):
env.set('CUDAHOSTCXX', dependent_spec.package.compiler.cxx)
def setup_run_environment(self, env):
env.set('CUDA_HOME', self.prefix)
env.set('CUDAHOSTCXX', self.compiler.cxx)
def install(self, spec, prefix):
if os.path.exists('/tmp/cuda-installer.log'):
@@ -129,7 +128,7 @@ def install(self, spec, prefix):
@property
def libs(self):
libs = find_libraries('libcudart', root=self.prefix, shared=True,
libs = find_libraries('libcuda', root=self.prefix, shared=True,
recursive=True)
filtered_libs = []

View File

@@ -238,7 +238,6 @@ def post_install(self):
spec['libelf'].prefix.include,
spec['hwloc'].prefix.include))
# Only build if offload target.
cmake(*args)
make()
make('install')
cmake(*args)
make()
make('install')