Compare commits

..

175 Commits

Author SHA1 Message Date
Carson Woods
4bb26802ed Merge tag 'v0.15.0' into features/shared 2020-06-30 19:24:23 -05:00
Carson Woods
1965e1e606 Merge branch 'develop' into features/shared 2020-06-26 12:19:32 -04:00
Carson Woods
6ed3db6c14 Re-add testing code 2020-06-25 12:29:59 -04:00
Carson Woods
a8fbc96271 Add updated bash completion file 2020-06-24 16:00:19 -04:00
Carson Woods
d8956a3bbe Merge branch 'develop' into features/shared
Conflicts:
	share/spack/spack-completion.bash
2020-06-24 10:59:49 -06:00
Carson Woods
a807b95081 Merge branch 'develop' into features/shared 2020-06-23 11:37:54 -04:00
Carson Woods
1b608d6041 Merge branch 'develop' into features/shared 2020-04-20 11:02:54 -05:00
Carson Woods
be143d7dff Merge branch 'develop' into features/shared 2020-04-06 13:49:46 -04:00
Carson Woods
05fe92e086 Merge branch 'develop' into features/shared 2020-03-09 23:23:37 +00:00
Carson Woods
cd54fb95b8 Fix for infinite recursion bug that stemmed from duplicate code existing
after an improper merge with develop
2020-02-25 12:18:03 -05:00
Carson Woods
8b63c4555c Merge branch 'develop' into features/shared 2020-02-25 11:50:23 -05:00
Carson Woods
ec78160569 Update spack-completition.bash 2020-02-12 12:26:41 -05:00
Carson Woods
e1379f132d Merge branch 'develop' into features/shared 2020-02-12 12:21:19 -05:00
Carson Woods
cdcd3dcedd Merge branch 'develop' into features/shared 2020-01-27 15:28:28 -05:00
Carson Woods
7c1083916a Fix bash completion script 2020-01-24 11:36:26 -05:00
Carson Woods
c07bbe1a25 Fix flake8 error 2020-01-24 11:00:08 -05:00
Carson Woods
85032c6224 Resolve merge conflicts with develop 2020-01-24 10:41:44 -05:00
Carson Woods
7b7898a69c Merge branch 'develop' into features/shared 2020-01-21 18:55:21 -05:00
Carson Woods
84c5d76eae Merge branch 'develop' into features/shared 2020-01-18 13:57:50 -08:00
Carson Woods
bcd47f0bd6 Merge branch 'develop' into features/shared 2020-01-17 14:32:47 -08:00
Carson Woods
cb6a959cdb Merge branch 'develop' into features/shared 2020-01-15 14:41:14 -05:00
Carson Woods
32cd12bff7 Merge branch 'develop' into features/shared 2020-01-10 16:19:37 -08:00
Carson Woods
7021965159 Fix merge conflicts and repair broken unit test. 2020-01-09 20:12:39 -08:00
Carson Woods
5c5743ca33 Merge branch 'develop' into features/shared to support Spack 0.13.3 2019-12-26 21:00:09 -06:00
Carson Woods
034a7662ac Merge branch 'develop' into features/shared 2019-11-21 12:52:24 -07:00
Carson Woods
e6b6ac5898 Fixed error message to use proper --upstream rather than -u 2019-11-21 12:30:15 -07:00
Carson Woods
35037bf088 Merge branch 'develop' into features/shared 2019-11-17 16:37:47 -07:00
Carson Woods
d14c245411 Merge branch 'develop' into features/shared 2019-11-10 22:05:20 -05:00
Carson Woods
6e2ad01f20 Fix flake8 formatting 2019-11-06 13:25:15 -05:00
Carson Woods
ef9b5a8f74 Fix unit test failing 2019-11-06 13:24:10 -05:00
Carson Woods
4921ed29d5 Fix a few broken unit tests 2019-11-06 09:56:22 -05:00
Carson Woods
f4c720e902 Ensure feature supports Spack version 0.13.0-0.13.1 2019-11-05 16:38:18 -05:00
Carson Woods
0a71b1d5ac Merge branch 'develop' into features/shared 2019-10-31 21:29:33 -04:00
Carson Woods
3593a7be6a Better comment the purpose of new unit tests 2019-09-20 19:05:56 -04:00
Carson Woods
e4d2cf4441 Fix flake8 error to avoid failing CI testing 2019-09-20 15:29:46 -04:00
Carson Woods
911e51bd89 Merge branch 'develop' into features/shared
Done to resolve merge conflicts that had arisen since work on this
feature completed.
2019-09-20 15:28:44 -04:00
Carson Woods
6ec8aea6f7 Rebase branch 'features/shared' of github.com:carsonwoods/spack against develop 2019-08-07 18:57:48 -06:00
Carson Woods
5b11f7aa4c Fix bug where environments would ignore global path 2019-08-07 18:32:28 -06:00
Carson Woods
97e46981b9 Remove old doc from doc index 2019-08-07 18:32:28 -06:00
Carson Woods
873ac5e890 Remove old documentation for shared mode 2019-08-07 18:32:28 -06:00
Carson Woods
4d7dae5716 Remove old share command from tests 2019-08-07 18:32:28 -06:00
Carson Woods
b19f0fafcc Remove outdate share command 2019-08-07 18:32:28 -06:00
Carson Woods
11b1bdd119 Pep8 Compliance Fix 2019-08-07 18:32:28 -06:00
Carson Woods
f749821dc2 Pep8 Compliance 2019-08-07 18:32:28 -06:00
Carson Woods
5abb20dcab Rename test 2019-08-07 18:32:28 -06:00
Carson Woods
0c233bdd0f Add test for validating upstream database initialization 2019-08-07 18:32:28 -06:00
Carson Woods
0f171c7ded Replace space with = in command parameter 2019-08-07 18:32:28 -06:00
Carson Woods
b4c7520dd8 Flake8 Test Compliance 2019-08-07 18:32:28 -06:00
Carson Woods
9ab7d8f01d Add config parameter for active upstream to set install location for modules 2019-08-07 18:32:28 -06:00
Carson Woods
a7ad344c2a Add upstreams.yaml testing file so calls to upstreams['global] doesn't cause tests to fail 2019-08-07 18:32:28 -06:00
Carson Woods
deb2d3745c Fix .spack-db/index.json not being created in global upstream if previously uninitialized 2019-08-07 18:32:28 -06:00
Carson Woods
ff96ec430b Can now specify upstream of anyname through -u/--upstream flag 2019-08-07 18:32:28 -06:00
Carson Woods
d4a959736a Flake8 Compliance Changes 2019-08-07 18:32:28 -06:00
Carson Woods
5ba51a0be0 --global option now works for both install and uninstall 2019-08-07 18:32:28 -06:00
Carson Woods
27e1140df7 Reset active directory after each global install 2019-08-07 18:32:28 -06:00
Carson Woods
7ab6af8a3b Add scope to setting active tree to ensure that it is set at user level 2019-08-07 18:32:28 -06:00
Carson Woods
0e6e93eaac Fix unit test config.yaml 2019-08-07 18:32:28 -06:00
Carson Woods
38f8bdd2bb Home expansion was removed because it was no longer being used 2019-08-07 18:32:27 -06:00
Carson Woods
8e45a3fc2f Fix flake8 compliance 2019-08-07 18:32:27 -06:00
Carson Woods
c22af99b04 Fix how upstream db paths are canonicalized 2019-08-07 18:32:27 -06:00
Carson Woods
fc3a909fbc Set staging location to ~/.spack/var 2019-08-07 18:32:27 -06:00
Carson Woods
9665754eae Fix default install tree 2019-08-07 18:32:27 -06:00
Carson Woods
0f9f9f3a85 Revise default var path 2019-08-07 18:32:27 -06:00
Carson Woods
777a5682a6 Fix default install location 2019-08-07 18:32:27 -06:00
Carson Woods
8994b4aab6 Fix flake8 compliance 2019-08-07 18:32:27 -06:00
Carson Woods
98ec366470 Set root of store object to active tree 2019-08-07 18:32:27 -06:00
Carson Woods
c61f4d7c82 Add logic to set the active install tree 2019-08-07 18:32:27 -06:00
Carson Woods
811b304230 Remove old code 2019-08-07 18:32:27 -06:00
Carson Woods
8f0c9ad409 Change name of global parameter to install_global 2019-08-07 18:32:27 -06:00
Carson Woods
6a423a5d8a Typo fix 2019-08-07 18:32:27 -06:00
Carson Woods
23c37063bd Add default global upstream of /opt/spack 2019-08-07 18:32:27 -06:00
Carson Woods
478f3a5a99 Fix whitespace issue 2019-08-07 18:32:27 -06:00
Carson Woods
02afb30990 Remove unit testing for shared spack mode 2019-08-07 18:32:27 -06:00
Carson Woods
06e3f15e47 Remove old shared spack code 2019-08-07 18:32:27 -06:00
Carson Woods
f13ce3540d Add dest name of install_global to --global parameter 2019-08-07 18:32:27 -06:00
Carson Woods
7ae34087e3 Set remove old shared spack code 2019-08-07 18:32:27 -06:00
Carson Woods
f0fea97e88 Set source_cache to user's ~/.spack directory 2019-08-07 18:32:27 -06:00
Carson Woods
54893197ed Set staging location to be based out of users .spack directory 2019-08-07 18:32:27 -06:00
Carson Woods
80da1d50d1 Make var_path point to ~/.spack/var/spack 2019-08-07 18:32:27 -06:00
Carson Woods
944c5d75cd Add --global flag to install cmd to install to globally accessible location 2019-08-07 18:32:27 -06:00
Carson Woods
9ef4bc9d50 Add macro for expanding home directory 2019-08-07 18:32:27 -06:00
Carson Woods
a2af432833 Temporarily disable module file location overrride while feature is being implemented 2019-08-07 18:32:27 -06:00
Carson Woods
aefed311af Change modulefiles install location 2019-08-07 18:32:27 -06:00
Carson Woods
6ffacddcf4 Change default install tree to user's ~/.spack directory 2019-08-07 18:32:27 -06:00
Carson Woods
e17824f82f Remove shared mode set self as upstream 2019-08-07 18:32:27 -06:00
Carson Woods
57ca47f035 Remove testing for shared mode 2019-08-07 18:32:27 -06:00
Carson Woods
4532a56b4e Remove shared disable from unit testing 2019-08-07 18:32:27 -06:00
Carson Woods
86e69a48a2 Fix flake8 error 2019-08-07 18:32:27 -06:00
Carson Woods
2508295d81 Fix error caused by SPACK_PATH environment variable not existing 2019-08-07 18:32:27 -06:00
Carson Woods
1a041c051a Fix flake8 error 2019-08-07 18:32:27 -06:00
Carson Woods
2262ca2e67 Add test for install in shared mode 2019-08-07 18:32:27 -06:00
Carson Woods
2269771a91 Fix typo 2019-08-07 18:32:27 -06:00
Carson Woods
7f32574dd8 Fix shared cmd test file 2019-08-07 18:32:27 -06:00
Carson Woods
d15ac30f62 Add shared to toctree 2019-08-07 18:32:27 -06:00
Carson Woods
1f41347ab8 Share feature Unit testing 2019-08-07 18:32:27 -06:00
Carson Woods
1f4f01103b Add command interface for share feature 2019-08-07 18:32:27 -06:00
Carson Woods
8f46fcb512 When running tests, disable shared mode because it will break other tests. Custom tests must be written 2019-08-07 18:32:27 -06:00
Carson Woods
2d3b973ebc When shared mode is active store installed packages in SPACK_PATH 2019-08-07 18:32:27 -06:00
Carson Woods
7e62e0f27f When shared mode is active set stage path to SPACK_PATH 2019-08-07 18:32:27 -06:00
Carson Woods
ea0db4c0f9 Prevent packages from being installed upstream 2019-08-07 18:32:27 -06:00
Carson Woods
0afc68e60b Change module root path when shared mode is active 2019-08-07 18:32:27 -06:00
Carson Woods
8ad25d5013 Uninstall from SPACK_PATH when shared mode is active 2019-08-07 18:32:27 -06:00
Carson Woods
e90db68321 Install to SPACK_PATH when shared mode is active 2019-08-07 18:32:27 -06:00
Carson Woods
9e96b89f02 Add documentation for spack share command 2019-08-07 18:32:27 -06:00
Carson Woods
b4dae1b7fd When shared mode is active, spack treats the normal install directory as an upstream 2019-08-07 18:32:27 -06:00
Carson Woods
9e9adf1d2f When shared mode is active, set cache location to SPACK_PATH 2019-08-07 18:32:27 -06:00
Carson Woods
de9255247a Fix bug where environments would ignore global path 2019-08-06 17:49:17 -06:00
Carson Woods
de5d3e3229 Remove old doc from doc index 2019-07-26 08:54:12 -06:00
Carson Woods
e621aafc77 Remove old documentation for shared mode 2019-07-25 16:40:00 -06:00
Carson Woods
c53427c98d Remove old share command from tests 2019-07-25 14:22:43 -06:00
Carson Woods
7a75148d1b Remove outdate share command 2019-07-25 13:32:44 -06:00
Carson Woods
4210520c9d Pep8 Compliance Fix 2019-07-25 13:32:44 -06:00
Carson Woods
4f3fb50ae7 Pep8 Compliance 2019-07-25 13:32:44 -06:00
Carson Woods
7660659107 Rename test 2019-07-25 13:32:44 -06:00
Carson Woods
fcca2a518b Add test for validating upstream database initialization 2019-07-25 13:32:44 -06:00
Carson Woods
23e1cd7775 Replace space with = in command parameter 2019-07-25 13:32:44 -06:00
Carson Woods
58e794e95a Flake8 Test Compliance 2019-07-25 13:32:44 -06:00
Carson Woods
7ed59ed835 Add config parameter for active upstream to set install location for modules 2019-07-25 13:32:43 -06:00
Carson Woods
512726ae5b Add upstreams.yaml testing file so calls to upstreams['global] doesn't cause tests to fail 2019-07-25 13:32:43 -06:00
Carson Woods
20851a6e6c Fix .spack-db/index.json not being created in global upstream if previously uninitialized 2019-07-25 13:32:43 -06:00
Carson Woods
92bbbb9659 Can now specify upstream of anyname through -u/--upstream flag 2019-07-25 13:32:43 -06:00
Carson Woods
5f2f2bfb84 Flake8 Compliance Changes 2019-07-25 13:32:43 -06:00
Carson Woods
9b63f72d6b --global option now works for both install and uninstall 2019-07-25 13:32:43 -06:00
Carson Woods
4c60f01bae Reset active directory after each global install 2019-07-25 13:32:43 -06:00
Carson Woods
cd08308463 Add scope to setting active tree to ensure that it is set at user level 2019-07-25 13:32:43 -06:00
Carson Woods
fe69997043 Fix unit test config.yaml 2019-07-25 13:32:43 -06:00
Carson Woods
1584a6e3c6 Home expansion was removed because it was no longer being used 2019-07-25 13:32:43 -06:00
Carson Woods
c393880852 Fix flake8 compliance 2019-07-25 13:32:43 -06:00
Carson Woods
bbe9e6bf54 Fix how upstream db paths are canonicalized 2019-07-25 13:32:43 -06:00
Carson Woods
d7a00b71d4 Set staging location to ~/.spack/var 2019-07-25 13:32:43 -06:00
Carson Woods
6775d2546a Fix default install tree 2019-07-25 13:32:43 -06:00
Carson Woods
8a154333f2 Revise default var path 2019-07-25 13:32:43 -06:00
Carson Woods
5e637a04fd Fix default install location 2019-07-25 13:32:43 -06:00
Carson Woods
0213869439 Fix flake8 compliance 2019-07-25 13:32:43 -06:00
Carson Woods
22e9a9792a Set root of store object to active tree 2019-07-25 13:32:43 -06:00
Carson Woods
4f23da9d26 Add logic to set the active install tree 2019-07-25 13:32:43 -06:00
Carson Woods
f9430e2fd4 Remove old code 2019-07-25 13:32:43 -06:00
Carson Woods
a2f86d5d18 Change name of global parameter to install_global 2019-07-25 13:32:43 -06:00
Carson Woods
0efab6637c Typo fix 2019-07-25 13:32:43 -06:00
Carson Woods
2b11694b94 Add default global upstream of /opt/spack 2019-07-25 13:32:43 -06:00
Carson Woods
088798a727 Fix whitespace issue 2019-07-25 13:32:43 -06:00
Carson Woods
bddbb1c22e Remove unit testing for shared spack mode 2019-07-25 13:32:42 -06:00
Carson Woods
92f447cf1c Remove old shared spack code 2019-07-25 13:32:42 -06:00
Carson Woods
96f266c3e3 Add dest name of install_global to --global parameter 2019-07-25 13:32:42 -06:00
Carson Woods
d5093c20c5 Set remove old shared spack code 2019-07-25 13:32:42 -06:00
Carson Woods
2064241c37 Set source_cache to user's ~/.spack directory 2019-07-25 13:32:42 -06:00
Carson Woods
721742b764 Set staging location to be based out of users .spack directory 2019-07-25 13:32:42 -06:00
Carson Woods
c45bf153d8 Make var_path point to ~/.spack/var/spack 2019-07-25 13:32:42 -06:00
Carson Woods
b98e5e66e7 Add --global flag to install cmd to install to globally accessible location 2019-07-25 13:32:42 -06:00
Carson Woods
3d18bf345f Add macro for expanding home directory 2019-07-25 13:32:42 -06:00
Carson Woods
f8e9cf4081 Temporarily disable module file location overrride while feature is being implemented 2019-07-25 13:32:42 -06:00
Carson Woods
98e0f8b89b Change modulefiles install location 2019-07-25 13:32:42 -06:00
Carson Woods
263275b7ea Change default install tree to user's ~/.spack directory 2019-07-25 13:32:42 -06:00
Carson Woods
3e13002d7f Remove shared mode set self as upstream 2019-07-25 13:32:42 -06:00
Carson Woods
654e5cc924 Remove testing for shared mode 2019-07-25 13:32:42 -06:00
Carson Woods
04a72c1834 Remove shared disable from unit testing 2019-07-25 13:32:42 -06:00
Carson Woods
53cf6eb194 Fix flake8 error 2019-07-25 13:32:42 -06:00
Carson Woods
5a7f186176 Fix error caused by SPACK_PATH environment variable not existing 2019-07-25 13:32:42 -06:00
Carson Woods
987adfa9c9 Fix flake8 error 2019-07-25 13:32:42 -06:00
Carson Woods
e476bb1400 Add test for install in shared mode 2019-07-25 13:32:42 -06:00
Carson Woods
dc12233610 Fix typo 2019-07-25 13:32:42 -06:00
Carson Woods
29d21a0a5d Fix shared cmd test file 2019-07-25 13:32:42 -06:00
Carson Woods
762f505da5 Add shared to toctree 2019-07-25 13:32:42 -06:00
Carson Woods
8e1c326174 Share feature Unit testing 2019-07-25 13:32:42 -06:00
Carson Woods
0bac5d527d Add command interface for share feature 2019-07-25 13:32:42 -06:00
Carson Woods
79256eeb5c When running tests, disable shared mode because it will break other tests. Custom tests must be written 2019-07-25 13:32:42 -06:00
Carson Woods
de760942f2 When shared mode is active store installed packages in SPACK_PATH 2019-07-25 13:32:41 -06:00
Carson Woods
860641bfab When shared mode is active set stage path to SPACK_PATH 2019-07-25 13:32:41 -06:00
Carson Woods
673e55f14d Prevent packages from being installed upstream 2019-07-25 13:32:41 -06:00
Carson Woods
54777a4f3e Change module root path when shared mode is active 2019-07-25 13:32:41 -06:00
Carson Woods
db36e66592 Uninstall from SPACK_PATH when shared mode is active 2019-07-25 13:32:41 -06:00
Carson Woods
0d36e94407 Install to SPACK_PATH when shared mode is active 2019-07-25 13:32:41 -06:00
Carson Woods
92c3b5b8b2 Add documentation for spack share command 2019-07-25 13:32:41 -06:00
Carson Woods
71220a3656 When shared mode is active, spack treats the normal install directory as an upstream 2019-07-25 13:32:41 -06:00
Carson Woods
09bd29d816 When shared mode is active, set cache location to SPACK_PATH 2019-07-25 13:32:41 -06:00
61 changed files with 546 additions and 944 deletions

View File

@@ -1,38 +1,3 @@
# v0.15.2 (2020-07-23)
This minor release includes two new features:
* Spack install verbosity is decreased, and more debug levels are added (#17546)
* The $spack/share/spack/keys directory contains public keys that may be optionally trusted for public binary mirrors (#17684)
This release also includes several important fixes:
* MPICC and related variables are now cleand in the build environment (#17450)
* LLVM flang only builds CUDA offload components when +cuda (#17466)
* CI pipelines no longer upload user environments that can contain secrets to the internet (#17545)
* CI pipelines add bootstrapped compilers to the compiler config (#17536)
* `spack buildcache list` does not exit on first failure and lists later mirrors (#17565)
* Apple's "gcc" executable that is an apple-clang compiler does not generate a gcc compiler config (#17589)
* Mixed compiler toolchains are merged more naturally across different compiler suffixes (#17590)
* Cray Shasta platforms detect the OS properly (#17467)
* Additional more minor fixes.
# v0.15.1 (2020-07-10)
This minor release includes several important fixes:
* Fix shell support on Cray (#17386)
* Fix use of externals installed with other Spack instances (#16954)
* Fix gcc+binutils build (#9024)
* Fixes for usage of intel-mpi (#17378 and #17382)
* Fixes to Autotools config.guess detection (#17333 and #17356)
* Update `spack install` message to prompt user when an environment is not
explicitly activated (#17454)
This release also adds a mirror for all sources that are
fetched in Spack (#17077). It is expected to be useful when the
official website for a Spack package is unavailable.
# v0.15.0 (2020-06-28)
`v0.15.0` is a major feature release.

View File

@@ -16,7 +16,7 @@
config:
# This is the path to the root of the Spack install tree.
# You can use $spack here to refer to the root of the spack instance.
install_tree: $spack/opt/spack
install_tree: ~/.spack/opt/spack
# Locations where templates should be found
@@ -30,8 +30,8 @@ config:
# Locations where different types of modules should be installed.
module_roots:
tcl: $spack/share/spack/modules
lmod: $spack/share/spack/lmod
tcl: ~/.spack/share/spack/modules
lmod: ~/.spack/share/spack/lmod
# Temporary locations Spack can try to use for builds.
@@ -67,7 +67,7 @@ config:
# Cache directory for already downloaded source tarballs and archived
# repositories. This can be purged with `spack clean --downloads`.
source_cache: $spack/var/spack/cache
source_cache: ~/.spack/var/spack/cache
# Cache directory for miscellaneous files, like the package index.

View File

@@ -1,2 +0,0 @@
mirrors:
spack-public: https://spack-llnl-mirror.s3-us-west-2.amazonaws.com/

View File

@@ -0,0 +1,7 @@
upstreams:
global:
install_tree: $spack/opt/spack
modules:
tcl: $spack/share/spack/modules
lmod: $spack/share/spack/lmod
dotkit: $spack/share/spack/dotkit

View File

@@ -174,9 +174,8 @@ def _lock(self, op, timeout=None):
# If the file were writable, we'd have opened it 'r+'
raise LockROFileError(self.path)
self._log_debug("{0} locking [{1}:{2}]: timeout {3} sec"
.format(lock_type[op], self._start, self._length,
timeout))
tty.debug("{0} locking [{1}:{2}]: timeout {3} sec"
.format(lock_type[op], self._start, self._length, timeout))
poll_intervals = iter(Lock._poll_interval_generator())
start_time = time.time()
@@ -212,14 +211,14 @@ def _poll_lock(self, op):
# help for debugging distributed locking
if self.debug:
# All locks read the owner PID and host
self._read_log_debug_data()
self._log_debug('{0} locked {1} [{2}:{3}] (owner={4})'
.format(lock_type[op], self.path,
self._start, self._length, self.pid))
self._read_debug_data()
tty.debug('{0} locked {1} [{2}:{3}] (owner={4})'
.format(lock_type[op], self.path,
self._start, self._length, self.pid))
# Exclusive locks write their PID/host
if op == fcntl.LOCK_EX:
self._write_log_debug_data()
self._write_debug_data()
return True
@@ -246,7 +245,7 @@ def _ensure_parent_directory(self):
raise
return parent
def _read_log_debug_data(self):
def _read_debug_data(self):
"""Read PID and host data out of the file if it is there."""
self.old_pid = self.pid
self.old_host = self.host
@@ -258,7 +257,7 @@ def _read_log_debug_data(self):
_, _, self.host = host.rpartition('=')
self.pid = int(self.pid)
def _write_log_debug_data(self):
def _write_debug_data(self):
"""Write PID and host data to the file, recording old values."""
self.old_pid = self.pid
self.old_host = self.host
@@ -474,6 +473,9 @@ def release_write(self, release_fn=None):
else:
return False
def _debug(self, *args):
tty.debug(*args)
def _get_counts_desc(self):
return '(reads {0}, writes {1})'.format(self._reads, self._writes) \
if tty.is_verbose() else ''
@@ -482,50 +484,58 @@ def _log_acquired(self, locktype, wait_time, nattempts):
attempts_part = _attempts_str(wait_time, nattempts)
now = datetime.now()
desc = 'Acquired at %s' % now.strftime("%H:%M:%S.%f")
self._log_debug(self._status_msg(locktype, '{0}{1}'
.format(desc, attempts_part)))
self._debug(self._status_msg(locktype, '{0}{1}'.
format(desc, attempts_part)))
def _log_acquiring(self, locktype):
self._log_debug(self._status_msg(locktype, 'Acquiring'), level=3)
def _log_debug(self, *args, **kwargs):
"""Output lock debug messages."""
kwargs['level'] = kwargs.get('level', 2)
tty.debug(*args, **kwargs)
self._debug2(self._status_msg(locktype, 'Acquiring'))
def _log_downgraded(self, wait_time, nattempts):
attempts_part = _attempts_str(wait_time, nattempts)
now = datetime.now()
desc = 'Downgraded at %s' % now.strftime("%H:%M:%S.%f")
self._log_debug(self._status_msg('READ LOCK', '{0}{1}'
.format(desc, attempts_part)))
self._debug(self._status_msg('READ LOCK', '{0}{1}'
.format(desc, attempts_part)))
def _log_downgrading(self):
self._log_debug(self._status_msg('WRITE LOCK', 'Downgrading'), level=3)
self._debug2(self._status_msg('WRITE LOCK', 'Downgrading'))
def _log_released(self, locktype):
now = datetime.now()
desc = 'Released at %s' % now.strftime("%H:%M:%S.%f")
self._log_debug(self._status_msg(locktype, desc))
self._debug(self._status_msg(locktype, desc))
def _log_releasing(self, locktype):
self._log_debug(self._status_msg(locktype, 'Releasing'), level=3)
self._debug2(self._status_msg(locktype, 'Releasing'))
def _log_upgraded(self, wait_time, nattempts):
attempts_part = _attempts_str(wait_time, nattempts)
now = datetime.now()
desc = 'Upgraded at %s' % now.strftime("%H:%M:%S.%f")
self._log_debug(self._status_msg('WRITE LOCK', '{0}{1}'.
format(desc, attempts_part)))
self._debug(self._status_msg('WRITE LOCK', '{0}{1}'.
format(desc, attempts_part)))
def _log_upgrading(self):
self._log_debug(self._status_msg('READ LOCK', 'Upgrading'), level=3)
self._debug2(self._status_msg('READ LOCK', 'Upgrading'))
def _status_msg(self, locktype, status):
status_desc = '[{0}] {1}'.format(status, self._get_counts_desc())
return '{0}{1.desc}: {1.path}[{1._start}:{1._length}] {2}'.format(
locktype, self, status_desc)
def _debug2(self, *args):
# TODO: Easy place to make a single, temporary change to the
# TODO: debug level associated with the more detailed messages.
# TODO:
# TODO: Someday it would be great if we could switch this to
# TODO: another level, perhaps _between_ debug and verbose, or
# TODO: some other form of filtering so the first level of
# TODO: debugging doesn't have to generate these messages. Using
# TODO: verbose here did not work as expected because tests like
# TODO: test_spec_json will write the verbose messages to the
# TODO: output that is used to check test correctness.
tty.debug(*args)
class LockTransaction(object):
"""Simple nested transaction context manager that uses a file lock.

View File

@@ -19,8 +19,7 @@
from llnl.util.tty.color import cprint, cwrite, cescape, clen
# Globals
_debug = 0
_debug = False
_verbose = False
_stacktrace = False
_timestamp = False
@@ -30,26 +29,21 @@
indent = " "
def debug_level():
return _debug
def is_verbose():
return _verbose
def is_debug(level=1):
return _debug >= level
def is_debug():
return _debug
def is_stacktrace():
return _stacktrace
def set_debug(level=0):
def set_debug(flag):
global _debug
assert level >= 0, 'Debug level must be a positive value'
_debug = level
_debug = flag
def set_verbose(flag):
@@ -138,17 +132,12 @@ def process_stacktrace(countback):
return st_text
def show_pid():
return is_debug(2)
def get_timestamp(force=False):
"""Get a string timestamp"""
if _debug or _timestamp or force:
# Note inclusion of the PID is useful for parallel builds.
pid = ', {0}'.format(os.getpid()) if show_pid() else ''
return '[{0}{1}] '.format(
datetime.now().strftime("%Y-%m-%d-%H:%M:%S.%f"), pid)
return '[{0}, {1}] '.format(
datetime.now().strftime("%Y-%m-%d-%H:%M:%S.%f"), os.getpid())
else:
return ''
@@ -208,8 +197,7 @@ def verbose(message, *args, **kwargs):
def debug(message, *args, **kwargs):
level = kwargs.get('level', 1)
if is_debug(level):
if _debug:
kwargs.setdefault('format', 'g')
kwargs.setdefault('stream', sys.stderr)
info(message, *args, **kwargs)

View File

@@ -323,14 +323,14 @@ class log_output(object):
work within test frameworks like nose and pytest.
"""
def __init__(self, file_like=None, echo=False, debug=0, buffer=False):
def __init__(self, file_like=None, echo=False, debug=False, buffer=False):
"""Create a new output log context manager.
Args:
file_like (str or stream): open file object or name of file where
output should be logged
echo (bool): whether to echo output in addition to logging it
debug (int): positive to enable tty debug mode during logging
debug (bool): whether to enable tty debug mode during logging
buffer (bool): pass buffer=True to skip unbuffering output; note
this doesn't set up any *new* buffering
@@ -355,7 +355,7 @@ def __init__(self, file_like=None, echo=False, debug=0, buffer=False):
self._active = False # used to prevent re-entry
def __call__(self, file_like=None, echo=None, debug=None, buffer=None):
"""This behaves the same as init. It allows a logger to be reused.
"""Thie behaves the same as init. It allows a logger to be reused.
Arguments are the same as for ``__init__()``. Args here take
precedence over those passed to ``__init__()``.

View File

@@ -5,7 +5,7 @@
#: major, minor, patch version for Spack, in a tuple
spack_version_info = (0, 15, 2)
spack_version_info = (0, 15, 0)
#: String containing Spack version joined with .'s
spack_version = '.'.join(str(v) for v in spack_version_info)

View File

@@ -466,8 +466,8 @@ def build_tarball(spec, outdir, force=False, rel=False, unsigned=False,
web_util.push_to_url(
specfile_path, remote_specfile_path, keep_original=False)
tty.debug('Buildcache for "{0}" written to \n {1}'
.format(spec, remote_spackfile_path))
tty.msg('Buildache for "%s" written to \n %s' %
(spec, remote_spackfile_path))
try:
# create an index.html for the build_cache directory so specs can be
@@ -828,13 +828,13 @@ def get_spec(spec=None, force=False):
mirror_dir = url_util.local_file_path(fetch_url_build_cache)
if mirror_dir:
tty.debug('Finding buildcaches in {0}'.format(mirror_dir))
tty.msg("Finding buildcaches in %s" % mirror_dir)
link = url_util.join(fetch_url_build_cache, specfile_name)
urls.add(link)
else:
tty.debug('Finding buildcaches at {0}'
.format(url_util.format(fetch_url_build_cache)))
tty.msg("Finding buildcaches at %s" %
url_util.format(fetch_url_build_cache))
link = url_util.join(fetch_url_build_cache, specfile_name)
urls.add(link)
@@ -857,8 +857,8 @@ def get_specs(allarch=False):
fetch_url_build_cache = url_util.join(
mirror.fetch_url, _build_cache_relative_path)
tty.debug('Finding buildcaches at {0}'
.format(url_util.format(fetch_url_build_cache)))
tty.msg("Finding buildcaches at %s" %
url_util.format(fetch_url_build_cache))
index_url = url_util.join(fetch_url_build_cache, 'index.json')
@@ -869,8 +869,8 @@ def get_specs(allarch=False):
except (URLError, web_util.SpackWebError) as url_err:
tty.error('Failed to read index {0}'.format(index_url))
tty.debug(url_err)
# Continue on to the next mirror
continue
# Just return whatever specs we may already have cached
return _cached_specs
tmpdir = tempfile.mkdtemp()
index_file_path = os.path.join(tmpdir, 'index.json')
@@ -909,15 +909,15 @@ def get_keys(install=False, trust=False, force=False):
mirror_dir = url_util.local_file_path(fetch_url_build_cache)
if mirror_dir:
tty.debug('Finding public keys in {0}'.format(mirror_dir))
tty.msg("Finding public keys in %s" % mirror_dir)
files = os.listdir(str(mirror_dir))
for file in files:
if re.search(r'\.key', file) or re.search(r'\.pub', file):
link = url_util.join(fetch_url_build_cache, file)
keys.add(link)
else:
tty.debug('Finding public keys at {0}'
.format(url_util.format(fetch_url_build_cache)))
tty.msg("Finding public keys at %s" %
url_util.format(fetch_url_build_cache))
# For s3 mirror need to request index.html directly
p, links = web_util.spider(
url_util.join(fetch_url_build_cache, 'index.html'))
@@ -935,14 +935,14 @@ def get_keys(install=False, trust=False, force=False):
stage.fetch()
except fs.FetchError:
continue
tty.debug('Found key {0}'.format(link))
tty.msg('Found key %s' % link)
if install:
if trust:
Gpg.trust(stage.save_filename)
tty.debug('Added this key to trusted keys.')
tty.msg('Added this key to trusted keys.')
else:
tty.debug('Will not add this key to trusted keys.'
'Use -t to install all downloaded keys')
tty.msg('Will not add this key to trusted keys.'
'Use -t to install all downloaded keys')
def needs_rebuild(spec, mirror_url, rebuild_on_errors=False):
@@ -1029,7 +1029,7 @@ def check_specs_against_mirrors(mirrors, specs, output_file=None,
"""
rebuilds = {}
for mirror in spack.mirror.MirrorCollection(mirrors).values():
tty.debug('Checking for built specs at {0}'.format(mirror.fetch_url))
tty.msg('Checking for built specs at %s' % mirror.fetch_url)
rebuild_list = []

View File

@@ -174,14 +174,6 @@ def clean_environment():
for v in build_system_vars:
env.unset(v)
# Unset mpi environment vars. These flags should only be set by
# mpi providers for packages with mpi dependencies
mpi_vars = [
'MPICC', 'MPICXX', 'MPIFC', 'MPIF77', 'MPIF90'
]
for v in mpi_vars:
env.unset(v)
build_lang = spack.config.get('config:build_language')
if build_lang:
# Override language-related variables. This can be used to force

View File

@@ -118,13 +118,15 @@ def _do_patch_config_files(self):
config_file = 'config.{0}'.format(config_name)
if os.path.exists(config_file):
# First search the top-level source directory
my_config_files[config_name] = os.path.abspath(config_file)
my_config_files[config_name] = os.path.join(
self.configure_directory, config_file)
else:
# Then search in all sub directories recursively.
# We would like to use AC_CONFIG_AUX_DIR, but not all packages
# ship with their configure.in or configure.ac.
config_path = next((os.path.abspath(os.path.join(r, f))
for r, ds, fs in os.walk('.') for f in fs
config_path = next((os.path.join(r, f)
for r, ds, fs in os.walk(
self.configure_directory) for f in fs
if f == config_file), None)
my_config_files[config_name] = config_path

View File

@@ -42,6 +42,7 @@ def _fetch_cache():
building the same package different ways or multiple times.
"""
path = spack.config.get('config:source_cache')
if not path:
path = os.path.join(spack.paths.var_path, "cache")
path = spack.util.path.canonicalize_path(path)

View File

@@ -613,7 +613,7 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
debug_flag = '-d '
job_scripts = [
'spack env activate --without-view .',
'spack env activate .',
'spack {0}ci rebuild'.format(debug_flag),
]
@@ -1043,10 +1043,17 @@ def copy_stage_logs_to_artifacts(job_spec, job_log_dir):
tty.debug('job package: {0}'.format(job_pkg))
stage_dir = job_pkg.stage.path
tty.debug('stage dir: {0}'.format(stage_dir))
build_env_src = os.path.join(stage_dir, 'spack-build-env.txt')
build_out_src = os.path.join(stage_dir, 'spack-build-out.txt')
build_env_dst = os.path.join(
job_log_dir, 'spack-build-env.txt')
build_out_dst = os.path.join(
job_log_dir, 'spack-build-out.txt')
tty.debug('Copying build log ({0}) to artifacts ({1})'.format(
tty.debug('Copying logs to artifacts:')
tty.debug(' 1: {0} -> {1}'.format(
build_env_src, build_env_dst))
shutil.copyfile(build_env_src, build_env_dst)
tty.debug(' 2: {0} -> {1}'.format(
build_out_src, build_out_dst))
shutil.copyfile(build_out_src, build_out_dst)
except Exception as inst:

View File

@@ -351,9 +351,6 @@ def env_status(args):
% (ev.manifest_name, env.path))
else:
tty.msg('In environment %s' % env.name)
# Check if environment views can be safely activated
env.check_views()
else:
tty.msg('No active environment')

View File

@@ -41,6 +41,8 @@ def update_kwargs_from_args(args, kwargs):
'fake': args.fake,
'dirty': args.dirty,
'use_cache': args.use_cache,
'install_global': args.install_global,
'upstream': args.upstream,
'cache_only': args.cache_only,
'explicit': True, # Always true for install command
'stop_at': args.until,
@@ -127,6 +129,14 @@ def setup_parser(subparser):
'-f', '--file', action='append', default=[],
dest='specfiles', metavar='SPEC_YAML_FILE',
help="install from file. Read specs to install from .yaml files")
subparser.add_argument(
'--upstream', action='store', default=None,
dest='upstream', metavar='UPSTREAM_NAME',
help='specify which upstream spack to install too')
subparser.add_argument(
'-g', '--global', action='store_true', default=False,
dest='install_global',
help='install package to globally accesible location')
cd_group = subparser.add_mutually_exclusive_group()
arguments.add_common_arguments(cd_group, ['clean', 'dirty'])
@@ -220,7 +230,10 @@ def default_log_file(spec):
"""
fmt = 'test-{x.name}-{x.version}-{hash}.xml'
basename = fmt.format(x=spec, hash=spec.dag_hash())
dirname = fs.os.path.join(spack.paths.var_path, 'junit-report')
dirname = fs.os.path.join(spack.paths.user_config_path,
'var/spack',
'junit-report')
fs.mkdirp(dirname)
return fs.os.path.join(dirname, basename)
@@ -231,6 +244,7 @@ def install_spec(cli_args, kwargs, abstract_spec, spec):
try:
# handle active environment, if any
env = ev.get_env(cli_args, 'install')
if env:
with env.write_transaction():
concrete = env.concretize_and_add(
@@ -241,6 +255,10 @@ def install_spec(cli_args, kwargs, abstract_spec, spec):
env.regenerate_views()
else:
spec.package.do_install(**kwargs)
spack.config.set('config:active_tree', '~/.spack/opt/spack',
scope='user')
spack.config.set('config:active_upstream', None,
scope='user')
except spack.build_environment.InstallError as e:
if cli_args.show_log_on_error:
@@ -255,6 +273,30 @@ def install_spec(cli_args, kwargs, abstract_spec, spec):
def install(parser, args, **kwargs):
# Install Package to Global Upstream for multi-user use
if args.install_global:
spack.config.set('config:active_upstream', 'global',
scope='user')
global_root = spack.config.get('upstreams')
global_root = global_root['global']['install_tree']
global_root = spack.util.path.canonicalize_path(global_root)
spack.config.set('config:active_tree', global_root,
scope='user')
elif args.upstream:
if args.upstream not in spack.config.get('upstreams'):
tty.die("specified upstream does not exist")
spack.config.set('config:active_upstream', args.upstream,
scope='user')
root = spack.config.get('upstreams')
root = root[args.upstream]['install_tree']
root = spack.util.path.canonicalize_path(root)
spack.config.set('config:active_tree', root, scope='user')
else:
spack.config.set('config:active_upstream', None,
scope='user')
spack.config.set('config:active_tree',
spack.config.get('config:install_tree'),
scope='user')
if args.help_cdash:
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
@@ -268,7 +310,7 @@ def install(parser, args, **kwargs):
return
if not args.spec and not args.specfiles:
# if there are no args but an active environment
# if there are no args but an active environment or spack.yaml file
# then install the packages from it.
env = ev.get_env(args, 'install')
if env:
@@ -289,18 +331,7 @@ def install(parser, args, **kwargs):
env.regenerate_views()
return
else:
msg = "install requires a package argument or active environment"
if 'spack.yaml' in os.listdir(os.getcwd()):
# There's a spack.yaml file in the working dir, the user may
# have intended to use that
msg += "\n\n"
msg += "Did you mean to install using the `spack.yaml`"
msg += " in this directory? Try: \n"
msg += " spack env activate .\n"
msg += " spack install\n"
msg += " OR\n"
msg += " spack --env . install"
tty.die(msg)
tty.die("install requires a package argument or a spack.yaml file")
if args.no_checksum:
spack.config.set('config:checksum', False, scope='command_line')

View File

@@ -5,6 +5,8 @@
from __future__ import print_function
import argparse
import copy
import sys
import itertools
@@ -15,6 +17,7 @@
import spack.cmd.common.arguments as arguments
import spack.repo
import spack.store
import spack.spec
from spack.database import InstallStatuses
from llnl.util import tty
@@ -63,8 +66,24 @@ def setup_parser(subparser):
help="remove ALL installed packages that match each supplied spec"
)
subparser.add_argument(
'packages',
nargs=argparse.REMAINDER,
help="specs of packages to uninstall")
def find_matching_specs(env, specs, allow_multiple_matches=False, force=False):
subparser.add_argument(
'-u', '--upstream', action='store', default=None,
dest='upstream', metavar='UPSTREAM_NAME',
help='specify which upstream spack to uninstall from')
subparser.add_argument(
'-g', '--global', action='store_true',
dest='global_uninstall',
help='uninstall packages installed to global upstream')
def find_matching_specs(env, specs, allow_multiple_matches=False, force=False,
upstream=None, global_uninstall=False):
"""Returns a list of specs matching the not necessarily
concretized specs given from cli
@@ -76,6 +95,35 @@ def find_matching_specs(env, specs, allow_multiple_matches=False, force=False):
Return:
list of specs
"""
if global_uninstall:
spack.config.set('config:active_upstream', 'global',
scope='user')
global_root = spack.config.get('upstreams')
global_root = global_root['global']['install_tree']
global_root = spack.util.path.canonicalize_path(global_root)
spack.config.set('config:active_tree', global_root,
scope='user')
elif upstream:
if upstream not in spack.config.get('upstreams'):
tty.die("specified upstream does not exist")
spack.config.set('config:active_upstream', upstream,
scope='user')
root = spack.config.get('upstreams')
root = root[upstream]['install_tree']
root = spack.util.path.canonicalize_path(root)
spack.config.set('config:active_tree', root, scope='user')
else:
spack.config.set('config:active_upstream', None,
scope='user')
for spec in specs:
if isinstance(spec, spack.spec.Spec):
spec_name = str(spec)
spec_copy = (copy.deepcopy(spec))
spec_copy.concretize()
if spec_copy.package.installed_upstream:
tty.warn("{0} is installed upstream".format(spec_name))
tty.die("Use 'spack uninstall [--upstream upstream_name]'")
# constrain uninstall resolution to current environment if one is active
hashes = env.all_hashes() if env else None
@@ -233,11 +281,25 @@ def do_uninstall(env, specs, force):
for item in ready:
item.do_uninstall(force=force)
# write any changes made to the active environment
if env:
env.write()
spack.config.set('config:active_tree',
'~/.spack/opt/spack',
scope='user')
spack.config.set('config:active_upstream', None,
scope='user')
def get_uninstall_list(args, specs, env):
# Gets the list of installed specs that match the ones give via cli
# args.all takes care of the case where '-a' is given in the cli
uninstall_list = find_matching_specs(env, specs, args.all, args.force)
uninstall_list = find_matching_specs(env, specs, args.all, args.force,
upstream=args.upstream,
global_uninstall=args.global_uninstall
)
# Takes care of '-R'
active_dpts, inactive_dpts = installed_dependents(uninstall_list, env)
@@ -314,7 +376,7 @@ def uninstall_specs(args, specs):
anything_to_do = set(uninstall_list).union(set(remove_list))
if not anything_to_do:
tty.warn('There are no package to uninstall.')
tty.warn('There are no packages to uninstall.')
return
if not args.yes_to_all:

View File

@@ -28,7 +28,7 @@
@llnl.util.lang.memoized
def _get_compiler_version_output(compiler_path, version_arg, ignore_errors=()):
def get_compiler_version_output(compiler_path, version_arg, ignore_errors=()):
"""Invokes the compiler at a given path passing a single
version argument and returns the output.
@@ -42,18 +42,6 @@ def _get_compiler_version_output(compiler_path, version_arg, ignore_errors=()):
return output
def get_compiler_version_output(compiler_path, *args, **kwargs):
"""Wrapper for _get_compiler_version_output()."""
# This ensures that we memoize compiler output by *absolute path*,
# not just executable name. If we don't do this, and the path changes
# (e.g., during testing), we can get incorrect results.
if not os.path.isabs(compiler_path):
compiler_path = spack.util.executable.which_string(
compiler_path, required=True)
return _get_compiler_version_output(compiler_path, *args, **kwargs)
def tokenize_flags(flags_str):
"""Given a compiler flag specification as a string, this returns a list
where the entries are the flags. For compiler options which set values

View File

@@ -650,18 +650,23 @@ def make_compiler_list(detected_versions):
Returns:
list of Compiler objects
"""
group_fn = lambda x: (x.id, x.variation, x.language)
sorted_compilers = sorted(detected_versions, key=group_fn)
# We don't sort on the path of the compiler
sort_fn = lambda x: (x.id, x.variation, x.language)
compilers_s = sorted(detected_versions, key=sort_fn)
# Gather items in a dictionary by the id, name variation and language
compilers_d = {}
for sort_key, group in itertools.groupby(sorted_compilers, key=group_fn):
for sort_key, group in itertools.groupby(compilers_s, key=sort_fn):
compiler_id, name_variation, language = sort_key
by_compiler_id = compilers_d.setdefault(compiler_id, {})
by_name_variation = by_compiler_id.setdefault(name_variation, {})
by_name_variation[language] = next(x.path for x in group)
def _default_make_compilers(cmp_id, paths):
# For each unique compiler id select the name variation with most entries
# i.e. the one that supports most languages
compilers = []
def _default(cmp_id, paths):
operating_system, compiler_name, version = cmp_id
compiler_cls = spack.compilers.class_for_compiler_name(compiler_name)
spec = spack.spec.CompilerSpec(compiler_cls.name, version)
@@ -672,38 +677,16 @@ def _default_make_compilers(cmp_id, paths):
)
return [compiler]
# For compilers with the same compiler id:
#
# - Prefer with C compiler to without
# - Prefer with C++ compiler to without
# - Prefer no variations to variations (e.g., clang to clang-gpu)
#
sort_fn = lambda variation: (
'cc' not in by_compiler_id[variation], # None last
'cxx' not in by_compiler_id[variation], # None last
getattr(variation, 'prefix', None),
getattr(variation, 'suffix', None),
)
compilers = []
for compiler_id, by_compiler_id in compilers_d.items():
ordered = sorted(by_compiler_id, key=sort_fn)
selected_variation = ordered[0]
selected = by_compiler_id[selected_variation]
# fill any missing parts from subsequent entries
for lang in ['cxx', 'f77', 'fc']:
if lang not in selected:
next_lang = next((
by_compiler_id[v][lang] for v in ordered
if lang in by_compiler_id[v]), None)
if next_lang:
selected[lang] = next_lang
_, selected_name_variation = max(
(len(by_compiler_id[variation]), variation)
for variation in by_compiler_id
)
# Add it to the list of compilers
selected = by_compiler_id[selected_name_variation]
operating_system, _, _ = compiler_id
make_compilers = getattr(
operating_system, 'make_compilers', _default_make_compilers)
make_compilers = getattr(operating_system, 'make_compilers', _default)
compilers.extend(make_compilers(compiler_id, selected))
return compilers

View File

@@ -23,12 +23,7 @@ def extract_version_from_output(cls, output):
ver = 'unknown'
match = re.search(
# Apple's LLVM compiler has its own versions, so suffix them.
r'^Apple (?:LLVM|clang) version ([^ )]+)',
output,
# Multi-line, since 'Apple clang' may not be on the first line
# in particular, when run as gcc, it seems to output
# "Configured with: --prefix=..." as the first line
re.M,
r'^Apple (?:LLVM|clang) version ([^ )]+)', output
)
if match:
ver = match.group(match.lastindex)

View File

@@ -5,13 +5,13 @@
import re
import spack.compiler
import spack.compilers.apple_clang as apple_clang
import spack.compilers.clang
from spack.compiler import Compiler, UnsupportedCompilerFlag
from spack.version import ver
class Gcc(spack.compiler.Compiler):
class Gcc(Compiler):
# Subclasses use possible names of C compiler
cc_names = ['gcc']
@@ -64,8 +64,10 @@ def cxx98_flag(self):
@property
def cxx11_flag(self):
if self.version < ver('4.3'):
raise spack.compiler.UnsupportedCompilerFlag(
self, "the C++11 standard", "cxx11_flag", " < 4.3")
raise UnsupportedCompilerFlag(self,
"the C++11 standard",
"cxx11_flag",
" < 4.3")
elif self.version < ver('4.7'):
return "-std=c++0x"
else:
@@ -74,8 +76,10 @@ def cxx11_flag(self):
@property
def cxx14_flag(self):
if self.version < ver('4.8'):
raise spack.compiler.UnsupportedCompilerFlag(
self, "the C++14 standard", "cxx14_flag", "< 4.8")
raise UnsupportedCompilerFlag(self,
"the C++14 standard",
"cxx14_flag",
"< 4.8")
elif self.version < ver('4.9'):
return "-std=c++1y"
elif self.version < ver('6.0'):
@@ -86,8 +90,10 @@ def cxx14_flag(self):
@property
def cxx17_flag(self):
if self.version < ver('5.0'):
raise spack.compiler.UnsupportedCompilerFlag(
self, "the C++17 standard", "cxx17_flag", "< 5.0")
raise UnsupportedCompilerFlag(self,
"the C++17 standard",
"cxx17_flag",
"< 5.0")
elif self.version < ver('6.0'):
return "-std=c++1z"
else:
@@ -96,15 +102,19 @@ def cxx17_flag(self):
@property
def c99_flag(self):
if self.version < ver('4.5'):
raise spack.compiler.UnsupportedCompilerFlag(
self, "the C99 standard", "c99_flag", "< 4.5")
raise UnsupportedCompilerFlag(self,
"the C99 standard",
"c99_flag",
"< 4.5")
return "-std=c99"
@property
def c11_flag(self):
if self.version < ver('4.7'):
raise spack.compiler.UnsupportedCompilerFlag(
self, "the C11 standard", "c11_flag", "< 4.7")
raise UnsupportedCompilerFlag(self,
"the C11 standard",
"c11_flag",
"< 4.7")
return "-std=c11"
@property
@@ -142,10 +152,10 @@ def default_version(cls, cc):
7.2.0
"""
# Apple's gcc is actually apple clang, so skip it. Returning
# "unknown" ensures this compiler is not detected by default.
# Users can add it manually to compilers.yaml at their own risk.
if apple_clang.AppleClang.default_version(cc) != 'unknown':
# Skip any gcc versions that are actually clang, like Apple's gcc.
# Returning "unknown" makes them not detected by default.
# Users can add these manually to compilers.yaml at their own risk.
if spack.compilers.clang.Clang.default_version(cc) != 'unknown':
return 'unknown'
version = super(Gcc, cls).default_version(cc)

View File

@@ -12,9 +12,7 @@
"0.14.1": "0.14.1",
"0.14.2": "0.14.2",
"0.15": "0.15",
"0.15.0": "0.15.0",
"0.15.1": "0.15.1",
"0.15.2": "0.15.2"
"0.15.0": "0.15.0"
}
},
"ubuntu:16.04": {
@@ -30,9 +28,7 @@
"0.14.1": "0.14.1",
"0.14.2": "0.14.2",
"0.15": "0.15",
"0.15.0": "0.15.0",
"0.15.1": "0.15.1",
"0.15.2": "0.15.2"
"0.15.0": "0.15.0"
}
},
"centos:7": {
@@ -48,9 +44,7 @@
"0.14.1": "0.14.1",
"0.14.2": "0.14.2",
"0.15": "0.15",
"0.15.0": "0.15.0",
"0.15.1": "0.15.1",
"0.15.2": "0.15.2"
"0.15.0": "0.15.0"
}
},
"centos:6": {
@@ -66,9 +60,7 @@
"0.14.1": "0.14.1",
"0.14.2": "0.14.2",
"0.15": "0.15",
"0.15.0": "0.15.0",
"0.15.1": "0.15.1",
"0.15.2": "0.15.2"
"0.15.0": "0.15.0"
}
}
}

View File

@@ -365,7 +365,26 @@ def __init__(self, root, db_dir=None, upstream_dbs=None,
tty.debug('PACKAGE LOCK TIMEOUT: {0}'.format(
str(timeout_format_str)))
# Create .spack-db/index.json for global upstream it doesn't exist
global_install_tree = spack.config.get(
'upstreams')['global']['install_tree']
global_install_tree = global_install_tree.replace(
'$spack', spack.paths.prefix)
if self.is_upstream:
if global_install_tree in self._db_dir:
if not os.path.isfile(self._index_path):
f = open(self._index_path, "w+")
database = {
'database': {
'installs': {},
'version': str(_db_version)
}
}
try:
sjson.dump(database, f)
except Exception as e:
raise Exception(
"error writing YAML database:", str(e))
self.lock = ForbiddenLock()
else:
self.lock = lk.Lock(self._lock_path,
@@ -1182,6 +1201,9 @@ def _remove(self, spec):
rec.installed = False
return rec.spec
if self.is_upstream:
return rec.spec
del self._data[key]
for dep in rec.spec.dependencies(_tracked_deps):
# FIXME: the two lines below needs to be updated once #11983 is

View File

@@ -175,20 +175,9 @@ def activate(
# MANPATH, PYTHONPATH, etc. All variables that end in PATH (case-sensitive)
# become PATH variables.
#
try:
if add_view and default_view_name in env.views:
with spack.store.db.read_transaction():
cmds += env.add_default_view_to_shell(shell)
except (spack.repo.UnknownPackageError,
spack.repo.UnknownNamespaceError) as e:
tty.error(e)
tty.die(
'Environment view is broken due to a missing package or repo.\n',
' To activate without views enabled, activate with:\n',
' spack env activate -V {0}\n'.format(env.name),
' To remove it and resolve the issue, '
'force concretize with the command:\n',
' spack -e {0} concretize --force'.format(env.name))
if add_view and default_view_name in env.views:
with spack.store.db.read_transaction():
cmds += env.add_default_view_to_shell(shell)
return cmds
@@ -241,15 +230,9 @@ def deactivate(shell='sh'):
cmds += ' unset SPACK_OLD_PS1; export SPACK_OLD_PS1;\n'
cmds += 'fi;\n'
try:
if default_view_name in _active_environment.views:
with spack.store.db.read_transaction():
cmds += _active_environment.rm_default_view_from_shell(shell)
except (spack.repo.UnknownPackageError,
spack.repo.UnknownNamespaceError) as e:
tty.warn(e)
tty.warn('Could not fully deactivate view due to missing package '
'or repo, shell environment may be corrupt.')
if default_view_name in _active_environment.views:
with spack.store.db.read_transaction():
cmds += _active_environment.rm_default_view_from_shell(shell)
tty.debug("Deactivated environmennt '%s'" % _active_environment.name)
_active_environment = None
@@ -544,26 +527,20 @@ def regenerate(self, all_specs, roots):
installed_specs_for_view = set(
s for s in specs_for_view if s in self and s.package.installed)
# To ensure there are no conflicts with packages being installed
# that cannot be resolved or have repos that have been removed
# we always regenerate the view from scratch. We must first make
# sure the root directory exists for the very first time though.
fs.mkdirp(self.root)
with fs.replace_directory_transaction(self.root):
view = self.view()
view = self.view()
view.clean()
specs_in_view = set(view.get_all_specs())
tty.msg("Updating view at {0}".format(self.root))
view.clean()
specs_in_view = set(view.get_all_specs())
tty.msg("Updating view at {0}".format(self.root))
rm_specs = specs_in_view - installed_specs_for_view
add_specs = installed_specs_for_view - specs_in_view
rm_specs = specs_in_view - installed_specs_for_view
add_specs = installed_specs_for_view - specs_in_view
# pass all_specs in, as it's expensive to read all the
# spec.yaml files twice.
view.remove_specs(*rm_specs, with_dependents=False,
all_specs=specs_in_view)
view.add_specs(*add_specs, with_dependencies=False)
# pass all_specs in, as it's expensive to read all the
# spec.yaml files twice.
view.remove_specs(*rm_specs, with_dependents=False,
all_specs=specs_in_view)
view.add_specs(*add_specs, with_dependencies=False)
class Environment(object):
@@ -1134,24 +1111,6 @@ def regenerate_views(self):
for view in self.views.values():
view.regenerate(specs, self.roots())
def check_views(self):
"""Checks if the environments default view can be activated."""
try:
# This is effectively a no-op, but it touches all packages in the
# default view if they are installed.
for view_name, view in self.views.items():
for _, spec in self.concretized_specs():
if spec in view and spec.package.installed:
tty.debug(
'Spec %s in view %s' % (spec.name, view_name))
except (spack.repo.UnknownPackageError,
spack.repo.UnknownNamespaceError) as e:
tty.warn(e)
tty.warn(
'Environment %s includes out of date packages or repos. '
'Loading the environment view will require reconcretization.'
% self.name)
def _env_modifications_for_default_view(self, reverse=False):
all_mods = spack.util.environment.EnvironmentModifications()

View File

@@ -289,11 +289,10 @@ def candidate_urls(self):
@_needs_stage
def fetch(self):
if self.archive_file:
tty.debug('Already downloaded {0}'.format(self.archive_file))
tty.msg("Already downloaded %s" % self.archive_file)
return
url = None
errors = []
for url in self.candidate_urls:
try:
partial_file, save_file = self._fetch_from_url(url)
@@ -301,10 +300,8 @@ def fetch(self):
os.rename(partial_file, save_file)
break
except FetchError as e:
errors.append(str(e))
for msg in errors:
tty.debug(msg)
tty.msg(str(e))
pass
if not self.archive_file:
raise FailedDownloadError(url)
@@ -315,7 +312,7 @@ def _fetch_from_url(self, url):
if self.stage.save_filename:
save_file = self.stage.save_filename
partial_file = self.stage.save_filename + '.part'
tty.debug('Fetching {0}'.format(url))
tty.msg("Fetching %s" % url)
if partial_file:
save_args = ['-C',
'-', # continue partial downloads
@@ -330,8 +327,6 @@ def _fetch_from_url(self, url):
'-', # print out HTML headers
'-L', # resolve 3xx redirects
url,
'--stderr', # redirect stderr output
'-', # redirect to stdout
]
if not spack.config.get('config:verify_ssl'):
@@ -417,8 +412,8 @@ def cachable(self):
@_needs_stage
def expand(self):
if not self.expand_archive:
tty.debug('Staging unexpanded archive {0} in {1}'
.format(self.archive_file, self.stage.source_path))
tty.msg("Staging unexpanded archive %s in %s" % (
self.archive_file, self.stage.source_path))
if not self.stage.expanded:
mkdirp(self.stage.source_path)
dest = os.path.join(self.stage.source_path,
@@ -426,7 +421,7 @@ def expand(self):
shutil.move(self.archive_file, dest)
return
tty.debug('Staging archive: {0}'.format(self.archive_file))
tty.msg("Staging archive: %s" % self.archive_file)
if not self.archive_file:
raise NoArchiveFileError(
@@ -569,7 +564,7 @@ def fetch(self):
raise
# Notify the user how we fetched.
tty.debug('Using cached archive: {0}'.format(path))
tty.msg('Using cached archive: %s' % path)
class VCSFetchStrategy(FetchStrategy):
@@ -599,8 +594,7 @@ def __init__(self, **kwargs):
@_needs_stage
def check(self):
tty.debug('No checksum needed when fetching with {0}'
.format(self.url_attr))
tty.msg("No checksum needed when fetching with %s" % self.url_attr)
@_needs_stage
def expand(self):
@@ -678,7 +672,7 @@ def go(self):
@_needs_stage
def fetch(self):
tty.debug('Getting go resource: {0}'.format(self.url))
tty.msg("Getting go resource:", self.url)
with working_dir(self.stage.path):
try:
@@ -794,10 +788,10 @@ def _repo_info(self):
@_needs_stage
def fetch(self):
if self.stage.expanded:
tty.debug('Already fetched {0}'.format(self.stage.source_path))
tty.msg("Already fetched {0}".format(self.stage.source_path))
return
tty.debug('Cloning git repository: {0}'.format(self._repo_info()))
tty.msg("Cloning git repository: {0}".format(self._repo_info()))
git = self.git
if self.commit:
@@ -965,10 +959,10 @@ def mirror_id(self):
@_needs_stage
def fetch(self):
if self.stage.expanded:
tty.debug('Already fetched {0}'.format(self.stage.source_path))
tty.msg("Already fetched %s" % self.stage.source_path)
return
tty.debug('Checking out subversion repository: {0}'.format(self.url))
tty.msg("Checking out subversion repository: %s" % self.url)
args = ['checkout', '--force', '--quiet']
if self.revision:
@@ -1074,14 +1068,13 @@ def mirror_id(self):
@_needs_stage
def fetch(self):
if self.stage.expanded:
tty.debug('Already fetched {0}'.format(self.stage.source_path))
tty.msg("Already fetched %s" % self.stage.source_path)
return
args = []
if self.revision:
args.append('at revision %s' % self.revision)
tty.debug('Cloning mercurial repository: {0} {1}'
.format(self.url, args))
tty.msg("Cloning mercurial repository:", self.url, *args)
args = ['clone']
@@ -1137,7 +1130,7 @@ def __init__(self, *args, **kwargs):
@_needs_stage
def fetch(self):
if self.archive_file:
tty.debug('Already downloaded {0}'.format(self.archive_file))
tty.msg("Already downloaded %s" % self.archive_file)
return
parsed_url = url_util.parse(self.url)
@@ -1145,7 +1138,7 @@ def fetch(self):
raise FetchError(
'S3FetchStrategy can only fetch from s3:// urls.')
tty.debug('Fetching {0}'.format(self.url))
tty.msg("Fetching %s" % self.url)
basename = os.path.basename(parsed_url.path)

View File

@@ -215,18 +215,18 @@ def _hms(seconds):
def _install_from_cache(pkg, cache_only, explicit, unsigned=False):
"""
Extract the package from binary cache
Install the package from binary cache
Args:
pkg (PackageBase): the package to install from the binary cache
cache_only (bool): only extract from binary cache
cache_only (bool): only install from binary cache
explicit (bool): ``True`` if installing the package was explicitly
requested by the user, otherwise, ``False``
unsigned (bool): ``True`` if binary package signatures to be checked,
otherwise, ``False``
Return:
(bool) ``True`` if the package was extract from binary cache,
(bool) ``True`` if the package was installed from binary cache,
``False`` otherwise
"""
installed_from_cache = _try_install_from_binary_cache(pkg, explicit,
@@ -237,10 +237,10 @@ def _install_from_cache(pkg, cache_only, explicit, unsigned=False):
if cache_only:
tty.die('{0} when cache-only specified'.format(pre))
tty.msg('{0}: installing from source'.format(pre))
tty.debug('{0}: installing from source'.format(pre))
return False
tty.debug('Successfully extracted {0} from binary cache'.format(pkg_id))
tty.debug('Successfully installed {0} from binary cache'.format(pkg_id))
_print_installed_pkg(pkg.spec.prefix)
spack.hooks.post_install(pkg.spec)
return True
@@ -275,17 +275,17 @@ def _process_external_package(pkg, explicit):
if spec.external_module:
tty.msg('{0} has external module in {1}'
.format(pre, spec.external_module))
tty.debug('{0} is actually installed in {1}'
.format(pre, spec.external_path))
tty.msg('{0} is actually installed in {1}'
.format(pre, spec.external_path))
else:
tty.msg('{0} externally installed in {1}'
tty.msg("{0} externally installed in {1}"
.format(pre, spec.external_path))
try:
# Check if the package was already registered in the DB.
# If this is the case, then just exit.
rec = spack.store.db.get_record(spec)
tty.debug('{0} already registered in DB'.format(pre))
tty.msg('{0} already registered in DB'.format(pre))
# Update the value of rec.explicit if it is necessary
_update_explicit_entry_in_db(pkg, rec, explicit)
@@ -294,11 +294,11 @@ def _process_external_package(pkg, explicit):
# If not, register it and generate the module file.
# For external packages we just need to run
# post-install hooks to generate module files.
tty.debug('{0} generating module file'.format(pre))
tty.msg('{0} generating module file'.format(pre))
spack.hooks.post_install(spec)
# Add to the DB
tty.debug('{0} registering into DB'.format(pre))
tty.msg('{0} registering into DB'.format(pre))
spack.store.db.add(spec, None, explicit=explicit)
@@ -314,7 +314,7 @@ def _process_binary_cache_tarball(pkg, binary_spec, explicit, unsigned):
otherwise, ``False``
Return:
(bool) ``True`` if the package was extracted from binary cache,
(bool) ``True`` if the package was installed from binary cache,
else ``False``
"""
tarball = binary_distribution.download_tarball(binary_spec)
@@ -325,7 +325,7 @@ def _process_binary_cache_tarball(pkg, binary_spec, explicit, unsigned):
return False
pkg_id = package_id(pkg)
tty.msg('Extracting {0} from binary cache'.format(pkg_id))
tty.msg('Installing {0} from binary cache'.format(pkg_id))
binary_distribution.extract_tarball(binary_spec, tarball, allow_root=False,
unsigned=unsigned, force=False)
pkg.installed_from_binary_cache = True
@@ -335,10 +335,10 @@ def _process_binary_cache_tarball(pkg, binary_spec, explicit, unsigned):
def _try_install_from_binary_cache(pkg, explicit, unsigned=False):
"""
Try to extract the package from binary cache.
Try to install the package from binary cache.
Args:
pkg (PackageBase): the package to be extracted from binary cache
pkg (PackageBase): the package to be installed from binary cache
explicit (bool): the package was explicitly requested by the user
unsigned (bool): ``True`` if binary package signatures to be checked,
otherwise, ``False``
@@ -369,7 +369,7 @@ def _update_explicit_entry_in_db(pkg, rec, explicit):
with spack.store.db.write_transaction():
rec = spack.store.db.get_record(pkg.spec)
message = '{s.name}@{s.version} : marking the package explicit'
tty.debug(message.format(s=pkg.spec))
tty.msg(message.format(s=pkg.spec))
rec.explicit = True
@@ -405,14 +405,9 @@ def dump_packages(spec, path):
source = spack.store.layout.build_packages_path(node)
source_repo_root = os.path.join(source, node.namespace)
# If there's no provenance installed for the package, skip it.
# If it's external, skip it because it either:
# 1) it wasn't built with Spack, so it has no Spack metadata
# 2) it was built by another Spack instance, and we do not
# (currently) use Spack metadata to associate repos with externals
# built by other Spack instances.
# Spack can always get something current from the builtin repo.
if node.external or not os.path.isdir(source_repo_root):
# There's no provenance installed for the source package. Skip it.
# User can always get something current from the builtin repo.
if not os.path.isdir(source_repo_root):
continue
# Create a source repo and get the pkg directory out of it.
@@ -452,8 +447,7 @@ def install_msg(name, pid):
Return:
(str) Colorized installing message
"""
pre = '{0}: '.format(pid) if tty.show_pid() else ''
return pre + colorize('@*{Installing} @*g{%s}' % name)
return '{0}: '.format(pid) + colorize('@*{Installing} @*g{%s}' % name)
def log(pkg):
@@ -1058,15 +1052,11 @@ def _install_task(self, task, **kwargs):
if use_cache and \
_install_from_cache(pkg, cache_only, explicit, unsigned):
self._update_installed(task)
if task.compiler:
spack.compilers.add_compilers_to_config(
spack.compilers.find_compilers([pkg.spec.prefix]))
return
pkg.run_tests = (tests is True or tests and pkg.name in tests)
pid = '{0}: '.format(self.pid) if tty.show_pid() else ''
pre = '{0}{1}:'.format(pid, pkg.name)
pre = '{0}: {1}:'.format(self.pid, pkg.name)
def build_process():
"""
@@ -1085,8 +1075,8 @@ def build_process():
pkg.do_stage()
pkg_id = package_id(pkg)
tty.debug('{0} Building {1} [{2}]'
.format(pre, pkg_id, pkg.build_system_class))
tty.msg('{0} Building {1} [{2}]'
.format(pre, pkg_id, pkg.build_system_class))
# get verbosity from do_install() parameter or saved value
echo = verbose
@@ -1107,8 +1097,8 @@ def build_process():
if install_source and os.path.isdir(source_path):
src_target = os.path.join(pkg.spec.prefix, 'share',
pkg.name, 'src')
tty.debug('{0} Copying source to {1}'
.format(pre, src_target))
tty.msg('{0} Copying source to {1}'
.format(pre, src_target))
fs.install_tree(pkg.stage.source_path, src_target)
# Do the real install in the source directory.
@@ -1130,7 +1120,7 @@ def build_process():
pass
# cache debug settings
debug_level = tty.debug_level()
debug_enabled = tty.is_debug()
# Spawn a daemon that reads from a pipe and redirects
# everything to log_path
@@ -1139,11 +1129,11 @@ def build_process():
pkg.phases, pkg._InstallPhase_phases):
with logger.force_echo():
inner_debug_level = tty.debug_level()
tty.set_debug(debug_level)
inner_debug = tty.is_debug()
tty.set_debug(debug_enabled)
tty.msg("{0} Executing phase: '{1}'"
.format(pre, phase_name))
tty.set_debug(inner_debug_level)
tty.set_debug(inner_debug)
# Redirect stdout and stderr to daemon pipe
phase = getattr(pkg, phase_attr)
@@ -1159,11 +1149,11 @@ def build_process():
pkg._total_time = time.time() - start_time
build_time = pkg._total_time - pkg._fetch_time
tty.debug('{0} Successfully installed {1}'
.format(pre, pkg_id),
'Fetch: {0}. Build: {1}. Total: {2}.'
.format(_hms(pkg._fetch_time), _hms(build_time),
_hms(pkg._total_time)))
tty.msg('{0} Successfully installed {1}'
.format(pre, pkg_id),
'Fetch: {0}. Build: {1}. Total: {2}.'
.format(_hms(pkg._fetch_time), _hms(build_time),
_hms(pkg._total_time)))
_print_installed_pkg(pkg.prefix)
# preserve verbosity across runs
@@ -1194,8 +1184,7 @@ def build_process():
except spack.build_environment.StopPhase as e:
# A StopPhase exception means that do_install was asked to
# stop early from clients, and is not an error at this point
pre = '{0}'.format(self.pid) if tty.show_pid() else ''
tty.debug('{0}{1}'.format(pid, str(e)))
tty.debug('{0} {1}'.format(self.pid, str(e)))
tty.debug('Package stage directory : {0}'
.format(pkg.stage.source_path))

View File

@@ -362,9 +362,8 @@ def make_argument_parser(**kwargs):
'-C', '--config-scope', dest='config_scopes', action='append',
metavar='DIR', help="add a custom configuration scope")
parser.add_argument(
'-d', '--debug', action='count', default=0,
help="write out debug messages "
"(more d's for more verbosity: -d, -dd, -ddd, etc.)")
'-d', '--debug', action='store_true',
help="write out debug logs during compile")
parser.add_argument(
'--timestamp', action='store_true',
help="Add a timestamp to tty output")
@@ -439,7 +438,7 @@ def setup_main_options(args):
tty.set_debug(args.debug)
tty.set_stacktrace(args.stacktrace)
# debug must be set first so that it can even affect behavior of
# debug must be set first so that it can even affect behvaior of
# errors raised by spack.config.
if args.debug:
spack.error.debug = True
@@ -645,6 +644,7 @@ def shell_set(var, value):
other_spack_instances = spack.config.get(
'upstreams') or {}
for install_properties in other_spack_instances.values():
upstream_module_roots = install_properties.get('modules', {})
upstream_module_roots = dict(
@@ -706,7 +706,7 @@ def main(argv=None):
if not args.no_env:
env = ev.find_environment(args)
if env:
ev.activate(env, args.use_env_repo, add_view=False)
ev.activate(env, args.use_env_repo)
# make spack.config aware of any command line configuration scopes
if args.config_scopes:

View File

@@ -215,6 +215,7 @@ def root_path(name):
Returns:
root folder for module file installation
"""
# Root folders where the various module files should be written
roots = spack.config.get('config:module_roots', {})
path = roots.get(name, os.path.join(spack.paths.share_path, name))
@@ -288,6 +289,7 @@ def read_module_indices():
module_type_to_index = {}
module_type_to_root = install_properties.get('modules', {})
for module_type, root in module_type_to_root.items():
root = spack.util.path.canonicalize_path(root)
module_type_to_index[module_type] = read_module_index(root)
module_indices.append(module_type_to_index)

View File

@@ -97,9 +97,6 @@ def __str__(self):
def _detect_crayos_version(cls):
if os.path.isfile(_cle_release_file):
release_attrs = read_cle_release_file()
if 'RELEASE' not in release_attrs:
# This Cray system uses a base OS not CLE/CNL
return None
v = spack.version.Version(release_attrs['RELEASE'])
return v[0]
elif os.path.isfile(_clerelease_file):

View File

@@ -1121,8 +1121,9 @@ def do_fetch(self, mirror_only=False):
raise ValueError("Can only fetch concrete packages.")
if not self.has_code:
tty.debug('No fetch required for {0}: package has no code.'
.format(self.name))
tty.msg(
"No fetch required for %s: package has no code." % self.name
)
start_time = time.time()
checksum = spack.config.get('config:checksum')
@@ -1138,8 +1139,7 @@ def do_fetch(self, mirror_only=False):
ignore_checksum = tty.get_yes_or_no(" Fetch anyway?",
default=False)
if ignore_checksum:
tty.debug('Fetching with no checksum. {0}'
.format(ck_msg))
tty.msg("Fetching with no checksum.", ck_msg)
if not ignore_checksum:
raise FetchError("Will not fetch %s" %
@@ -1195,7 +1195,7 @@ def do_patch(self):
# If there are no patches, note it.
if not patches and not has_patch_fun:
tty.debug('No patches needed for {0}'.format(self.name))
tty.msg("No patches needed for %s" % self.name)
return
# Construct paths to special files in the archive dir used to
@@ -1208,15 +1208,15 @@ def do_patch(self):
# If we encounter an archive that failed to patch, restage it
# so that we can apply all the patches again.
if os.path.isfile(bad_file):
tty.debug('Patching failed last time. Restaging.')
tty.msg("Patching failed last time. Restaging.")
self.stage.restage()
# If this file exists, then we already applied all the patches.
if os.path.isfile(good_file):
tty.debug('Already patched {0}'.format(self.name))
tty.msg("Already patched %s" % self.name)
return
elif os.path.isfile(no_patches_file):
tty.debug('No patches needed for {0}'.format(self.name))
tty.msg("No patches needed for %s" % self.name)
return
# Apply all the patches for specs that match this one
@@ -1225,7 +1225,7 @@ def do_patch(self):
try:
with working_dir(self.stage.source_path):
patch.apply(self.stage)
tty.debug('Applied patch {0}'.format(patch.path_or_url))
tty.msg('Applied patch %s' % patch.path_or_url)
patched = True
except spack.error.SpackError as e:
tty.debug(e)
@@ -1239,7 +1239,7 @@ def do_patch(self):
try:
with working_dir(self.stage.source_path):
self.patch()
tty.debug('Ran patch() for {0}'.format(self.name))
tty.msg("Ran patch() for %s" % self.name)
patched = True
except spack.multimethod.NoSuchMethodError:
# We are running a multimethod without a default case.
@@ -1249,12 +1249,12 @@ def do_patch(self):
# directive, AND the patch function didn't apply, say
# no patches are needed. Otherwise, we already
# printed a message for each patch.
tty.debug('No patches needed for {0}'.format(self.name))
tty.msg("No patches needed for %s" % self.name)
except spack.error.SpackError as e:
tty.debug(e)
# Touch bad file if anything goes wrong.
tty.msg('patch() function failed for {0}'.format(self.name))
tty.msg("patch() function failed for %s" % self.name)
touch(bad_file)
raise
@@ -1341,7 +1341,7 @@ def _has_make_target(self, target):
if os.path.exists(makefile):
break
else:
tty.debug('No Makefile found in the build directory')
tty.msg('No Makefile found in the build directory')
return False
# Check if 'target' is a valid target.
@@ -1372,8 +1372,7 @@ def _has_make_target(self, target):
for missing_target_msg in missing_target_msgs:
if missing_target_msg.format(target) in stderr:
tty.debug("Target '{0}' not found in {1}"
.format(target, makefile))
tty.msg("Target '" + target + "' not found in " + makefile)
return False
return True
@@ -1401,7 +1400,7 @@ def _has_ninja_target(self, target):
# Check if we have a Ninja build script
if not os.path.exists('build.ninja'):
tty.debug('No Ninja build script found in the build directory')
tty.msg('No Ninja build script found in the build directory')
return False
# Get a list of all targets in the Ninja build script
@@ -1413,8 +1412,7 @@ def _has_ninja_target(self, target):
if line.startswith(target + ':')]
if not matches:
tty.debug("Target '{0}' not found in build.ninja"
.format(target))
tty.msg("Target '" + target + "' not found in build.ninja")
return False
return True
@@ -1721,12 +1719,11 @@ def uninstall_by_spec(spec, force=False, deprecator=None):
if specs:
if deprecator:
spack.store.db.deprecate(specs[0], deprecator)
tty.debug('Deprecating stale DB entry for {0}'
.format(spec.short_spec))
tty.msg("Deprecating stale DB entry for "
"%s" % spec.short_spec)
else:
spack.store.db.remove(specs[0])
tty.debug('Removed stale DB entry for {0}'
.format(spec.short_spec))
tty.msg("Removed stale DB entry for %s" % spec.short_spec)
return
else:
raise InstallError(str(spec) + " is not installed.")
@@ -1770,7 +1767,7 @@ def uninstall_by_spec(spec, force=False, deprecator=None):
if pkg is not None:
spack.hooks.post_uninstall(spec)
tty.msg('Successfully uninstalled {0}'.format(spec.short_spec))
tty.msg("Successfully uninstalled %s" % spec.short_spec)
def do_uninstall(self, force=False):
"""Uninstall this package by spec."""

View File

@@ -16,6 +16,9 @@
#: This file lives in $prefix/lib/spack/spack/__file__
prefix = ancestor(__file__, 4)
#: User configuration location
user_config_path = os.path.expanduser('~/.spack')
#: synonym for prefix
spack_root = prefix
@@ -38,6 +41,8 @@
test_path = os.path.join(module_path, "test")
hooks_path = os.path.join(module_path, "hooks")
var_path = os.path.join(prefix, "var", "spack")
user_var_path = os.path.join(user_config_path, "var", "spack")
stage_path = os.path.join(user_var_path, "stage")
repos_path = os.path.join(var_path, "repos")
share_path = os.path.join(prefix, "share", "spack")
@@ -45,9 +50,6 @@
packages_path = os.path.join(repos_path, "builtin")
mock_packages_path = os.path.join(repos_path, "builtin.mock")
#: User configuration location
user_config_path = os.path.expanduser('~/.spack')
opt_path = os.path.join(prefix, "opt")
etc_path = os.path.join(prefix, "etc")

View File

@@ -20,7 +20,7 @@
_craype_name_to_target_name = {
'x86-cascadelake': 'cascadelake',
'x86-naples': 'zen',
'x86-rome': 'zen2',
'x86-rome': 'zen', # Cheating because we have the wrong modules on rzcrayz
'x86-skylake': 'skylake_avx512',
'mic-knl': 'mic_knl',
'interlagos': 'bulldozer',

View File

@@ -32,7 +32,7 @@
'enum': [
'develop',
'0.14', '0.14.0', '0.14.1', '0.14.2',
'0.15', '0.15.0', '0.15.1', '0.15.2',
'0.15', '0.15.0',
]
}
},

View File

@@ -154,6 +154,7 @@ def get_stage_root():
if _stage_root is None:
candidates = spack.config.get('config:build_stage')
if isinstance(candidates, string_types):
candidates = [candidates]
@@ -414,11 +415,10 @@ def fetch(self, mirror_only=False):
# Join URLs of mirror roots with mirror paths. Because
# urljoin() will strip everything past the final '/' in
# the root, so we add a '/' if it is not present.
mirror_urls = []
urls = []
for mirror in spack.mirror.MirrorCollection().values():
for rel_path in self.mirror_paths:
mirror_urls.append(
url_util.join(mirror.fetch_url, rel_path))
urls.append(url_util.join(mirror.fetch_url, rel_path))
# If this archive is normally fetched from a tarball URL,
# then use the same digest. `spack mirror` ensures that
@@ -436,8 +436,7 @@ def fetch(self, mirror_only=False):
self.skip_checksum_for_mirror = not bool(digest)
# Add URL strategies for all the mirrors with the digest
# Insert fetchers in the order that the URLs are provided.
for url in reversed(mirror_urls):
for url in urls:
fetchers.insert(
0, fs.from_url_scheme(
url, digest, expand=expand, extension=extension))
@@ -459,11 +458,6 @@ def generate_fetchers():
for fetcher in dynamic_fetchers:
yield fetcher
def print_errors(errors):
for msg in errors:
tty.debug(msg)
errors = []
for fetcher in generate_fetchers():
try:
fetcher.stage = self
@@ -474,18 +468,14 @@ def print_errors(errors):
# Don't bother reporting when something is not cached.
continue
except spack.error.SpackError as e:
errors.append('Fetching from {0} failed.'.format(fetcher))
tty.msg("Fetching from %s failed." % fetcher)
tty.debug(e)
continue
else:
print_errors(errors)
err_msg = 'All fetchers failed for {0}'.format(self.name)
err_msg = "All fetchers failed for %s" % self.name
self.fetcher = self.default_fetcher
raise fs.FetchError(err_msg, None)
print_errors(errors)
def check(self):
"""Check the downloaded archive against a checksum digest.
No-op if this stage checks code out of a repository."""
@@ -546,9 +536,9 @@ def expand_archive(self):
downloaded."""
if not self.expanded:
self.fetcher.expand()
tty.debug('Created stage in {0}'.format(self.path))
tty.msg("Created stage in %s" % self.path)
else:
tty.debug('Already staged {0} in {1}'.format(self.name, self.path))
tty.msg("Already staged %s in %s" % (self.name, self.path))
def restage(self):
"""Removes the expanded archive path if it exists, then re-expands
@@ -719,13 +709,13 @@ def __exit__(self, exc_type, exc_val, exc_tb):
pass
def fetch(self, *args, **kwargs):
tty.debug('No need to fetch for DIY.')
tty.msg("No need to fetch for DIY.")
def check(self):
tty.debug('No checksum needed for DIY.')
tty.msg("No checksum needed for DIY.")
def expand_archive(self):
tty.debug('Using source directory: {0}'.format(self.source_path))
tty.msg("Using source directory: %s" % self.source_path)
@property
def expanded(self):
@@ -743,7 +733,7 @@ def destroy(self):
pass
def cache_local(self):
tty.debug('Sources for DIY stages are not cached')
tty.msg("Sources for DIY stages are not cached")
def ensure_access(file):
@@ -793,12 +783,12 @@ def get_checksums_for_versions(
max_len = max(len(str(v)) for v in sorted_versions)
num_ver = len(sorted_versions)
tty.debug('Found {0} version{1} of {2}:'.format(
num_ver, '' if num_ver == 1 else 's', name),
'',
*spack.cmd.elide_list(
['{0:{1}} {2}'.format(str(v), max_len, url_dict[v])
for v in sorted_versions]))
tty.msg("Found {0} version{1} of {2}:".format(
num_ver, '' if num_ver == 1 else 's', name),
"",
*spack.cmd.elide_list(
["{0:{1}} {2}".format(str(v), max_len, url_dict[v])
for v in sorted_versions]))
print()
if batch:
@@ -813,10 +803,9 @@ def get_checksums_for_versions(
versions = sorted_versions[:archives_to_fetch]
urls = [url_dict[v] for v in versions]
tty.debug('Downloading...')
tty.msg("Downloading...")
version_hashes = []
i = 0
errors = []
for url, version in zip(urls, versions):
try:
if fetch_options:
@@ -837,12 +826,10 @@ def get_checksums_for_versions(
hashlib.sha256, stage.archive_file)))
i += 1
except FailedDownloadError:
errors.append('Failed to fetch {0}'.format(url))
tty.msg("Failed to fetch {0}".format(url))
except Exception as e:
tty.msg('Something failed on {0}, skipping. ({1})'.format(url, e))
for msg in errors:
tty.debug(msg)
tty.msg("Something failed on {0}, skipping.".format(url),
" ({0})".format(e))
if not version_hashes:
tty.die("Could not fetch any versions for {0}".format(name))
@@ -857,8 +844,8 @@ def get_checksums_for_versions(
])
num_hash = len(version_hashes)
tty.debug('Checksummed {0} version{1} of {2}:'.format(
num_hash, '' if num_hash == 1 else 's', name))
tty.msg("Checksummed {0} version{1} of {2}:".format(
num_hash, '' if num_hash == 1 else 's', name))
return version_lines

View File

@@ -34,7 +34,7 @@
import spack.directory_layout
#: default installation root, relative to the Spack install path
default_root = os.path.join(spack.paths.opt_path, 'spack')
default_root = os.path.join(spack.paths.user_config_path, 'opt/spack')
class Store(object):
@@ -70,9 +70,10 @@ def reindex(self):
def _store():
"""Get the singleton store instance."""
root = spack.config.get('config:install_tree', default_root)
root = spack.util.path.canonicalize_path(root)
root = spack.config.get('config:active_tree', default_root)
# Canonicalize Path for Root regardless of origin
root = spack.util.path.canonicalize_path(root)
return Store(root,
spack.config.get('config:install_path_scheme'),
spack.config.get('config:install_hash_length'))
@@ -88,11 +89,19 @@ def _store():
def retrieve_upstream_dbs():
other_spack_instances = spack.config.get('upstreams', {})
global_fallback = {'global': {'install_tree': '$spack/opt/spack',
'modules':
{'tcl': '$spack/share/spack/modules',
'lmod': '$spack/share/spack/lmod',
'dotkit': '$spack/share/spack/dotkit'}}}
other_spack_instances = spack.config.get('upstreams',
global_fallback)
install_roots = []
for install_properties in other_spack_instances.values():
install_roots.append(install_properties['install_tree'])
install_roots.append(spack.util.path.canonicalize_path(
install_properties['install_tree']))
return _construct_upstream_dbs_from_install_roots(install_roots)

View File

@@ -1,36 +0,0 @@
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
import pytest
from llnl.util.filesystem import mkdirp, touch
from spack.stage import Stage
from spack.fetch_strategy import CacheURLFetchStrategy, NoCacheError
def test_fetch_missing_cache(tmpdir):
"""Ensure raise a missing cache file."""
testpath = str(tmpdir)
fetcher = CacheURLFetchStrategy(url='file:///not-a-real-cache-file')
with Stage(fetcher, path=testpath):
with pytest.raises(NoCacheError, match=r'No cache'):
fetcher.fetch()
def test_fetch(tmpdir):
"""Ensure a fetch after expanding is effectively a no-op."""
testpath = str(tmpdir)
cache = os.path.join(testpath, 'cache.tar.gz')
touch(cache)
url = 'file:///{0}'.format(cache)
fetcher = CacheURLFetchStrategy(url=url)
with Stage(fetcher, path=testpath) as stage:
source_path = stage.source_path
mkdirp(source_path)
fetcher.fetch()

View File

@@ -751,6 +751,7 @@ def test_push_mirror_contents(tmpdir, mutable_mock_env_path, env_deactivate,
logs_dir_list = os.listdir(logs_dir.strpath)
assert('spack-build-env.txt' in logs_dir_list)
assert('spack-build-out.txt' in logs_dir_list)
# Also just make sure that if something goes wrong with the

View File

@@ -3,8 +3,6 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
import shutil
import sys
import pytest
@@ -16,7 +14,7 @@
@pytest.fixture
def no_compilers_yaml(mutable_config):
def no_compilers_yaml(mutable_config, monkeypatch):
"""Creates a temporary configuration without compilers.yaml"""
for scope, local_config in mutable_config.scopes.items():
@@ -66,7 +64,7 @@ def test_compiler_find_without_paths(no_compilers_yaml, working_env, tmpdir):
with tmpdir.as_cwd():
with open('gcc', 'w') as f:
f.write("""\
#!/bin/sh
#!/bin/bash
echo "0.0.0"
""")
os.chmod('gcc', 0o700)
@@ -77,33 +75,6 @@ def test_compiler_find_without_paths(no_compilers_yaml, working_env, tmpdir):
assert 'gcc' in output
@pytest.mark.regression('17589')
def test_compiler_find_no_apple_gcc(no_compilers_yaml, working_env, tmpdir):
with tmpdir.as_cwd():
# make a script to emulate apple gcc's version args
with open('gcc', 'w') as f:
f.write("""\
#!/bin/sh
if [ "$1" = "-dumpversion" ]; then
echo "4.2.1"
elif [ "$1" = "--version" ]; then
echo "Configured with: --prefix=/dummy"
echo "Apple clang version 11.0.0 (clang-1100.0.33.16)"
echo "Target: x86_64-apple-darwin18.7.0"
echo "Thread model: posix"
echo "InstalledDir: /dummy"
else
echo "clang: error: no input files"
fi
""")
os.chmod('gcc', 0o700)
os.environ['PATH'] = str(tmpdir)
output = compiler('find', '--scope=site')
assert 'gcc' not in output
def test_compiler_remove(mutable_config, mock_packages):
args = spack.util.pattern.Bunch(
all=True, compiler_spec='gcc@4.5.0', add_paths=[], scope=None
@@ -132,121 +103,3 @@ def test_compiler_add(
new_compiler = new_compilers - old_compilers
assert any(c.version == spack.version.Version(mock_compiler_version)
for c in new_compiler)
@pytest.fixture
def clangdir(tmpdir):
"""Create a directory with some dummy compiler scripts in it.
Scripts are:
- clang
- clang++
- gcc
- g++
- gfortran-8
"""
with tmpdir.as_cwd():
with open('clang', 'w') as f:
f.write("""\
#!/bin/sh
if [ "$1" = "--version" ]; then
echo "clang version 11.0.0 (clang-1100.0.33.16)"
echo "Target: x86_64-apple-darwin18.7.0"
echo "Thread model: posix"
echo "InstalledDir: /dummy"
else
echo "clang: error: no input files"
exit 1
fi
""")
shutil.copy('clang', 'clang++')
gcc_script = """\
#!/bin/sh
if [ "$1" = "-dumpversion" ]; then
echo "8"
elif [ "$1" = "-dumpfullversion" ]; then
echo "8.4.0"
elif [ "$1" = "--version" ]; then
echo "{0} (GCC) 8.4.0 20120313 (Red Hat 8.4.0-1)"
echo "Copyright (C) 2010 Free Software Foundation, Inc."
else
echo "{1}: fatal error: no input files"
echo "compilation terminated."
exit 1
fi
"""
with open('gcc-8', 'w') as f:
f.write(gcc_script.format('gcc', 'gcc-8'))
with open('g++-8', 'w') as f:
f.write(gcc_script.format('g++', 'g++-8'))
with open('gfortran-8', 'w') as f:
f.write(gcc_script.format('GNU Fortran', 'gfortran-8'))
os.chmod('clang', 0o700)
os.chmod('clang++', 0o700)
os.chmod('gcc-8', 0o700)
os.chmod('g++-8', 0o700)
os.chmod('gfortran-8', 0o700)
yield tmpdir
@pytest.mark.regression('17590')
def test_compiler_find_mixed_suffixes(
no_compilers_yaml, working_env, clangdir):
"""Ensure that we'll mix compilers with different suffixes when necessary.
"""
os.environ['PATH'] = str(clangdir)
output = compiler('find', '--scope=site')
assert 'clang@11.0.0' in output
assert 'gcc@8.4.0' in output
config = spack.compilers.get_compiler_config('site', False)
clang = next(c['compiler'] for c in config
if c['compiler']['spec'] == 'clang@11.0.0')
gcc = next(c['compiler'] for c in config
if c['compiler']['spec'] == 'gcc@8.4.0')
gfortran_path = str(clangdir.join('gfortran-8'))
assert clang['paths'] == {
'cc': str(clangdir.join('clang')),
'cxx': str(clangdir.join('clang++')),
# we only auto-detect mixed clang on macos
'f77': gfortran_path if sys.platform == 'darwin' else None,
'fc': gfortran_path if sys.platform == 'darwin' else None,
}
assert gcc['paths'] == {
'cc': str(clangdir.join('gcc-8')),
'cxx': str(clangdir.join('g++-8')),
'f77': gfortran_path,
'fc': gfortran_path,
}
@pytest.mark.regression('17590')
def test_compiler_find_prefer_no_suffix(
no_compilers_yaml, working_env, clangdir):
"""Ensure that we'll pick 'clang' over 'clang-gpu' when there is a choice.
"""
with clangdir.as_cwd():
shutil.copy('clang', 'clang-gpu')
shutil.copy('clang++', 'clang++-gpu')
os.chmod('clang-gpu', 0o700)
os.chmod('clang++-gpu', 0o700)
os.environ['PATH'] = str(clangdir)
output = compiler('find', '--scope=site')
assert 'clang@11.0.0' in output
assert 'gcc@8.4.0' in output
config = spack.compilers.get_compiler_config('site', False)
clang = next(c['compiler'] for c in config
if c['compiler']['spec'] == 'clang@11.0.0')
assert clang['paths']['cc'] == str(clangdir.join('clang'))
assert clang['paths']['cxx'] == str(clangdir.join('clang++'))

View File

@@ -117,7 +117,7 @@ def test_uninstall_deprecated(mock_packages, mock_archive, mock_fetch,
non_deprecated = spack.store.db.query()
uninstall('-y', 'libelf@0.8.10')
uninstall('-y', '-g', 'libelf@0.8.10')
assert spack.store.db.query() == spack.store.db.query(installed=any)
assert spack.store.db.query() == non_deprecated

View File

@@ -16,7 +16,7 @@
from spack.cmd.env import _env_create
from spack.spec import Spec
from spack.main import SpackCommand, SpackCommandError
from spack.main import SpackCommand
from spack.stage import stage_prefix
from spack.util.mock_package import MockPackageMultiRepo
@@ -284,45 +284,6 @@ def test_environment_status(capsys, tmpdir):
assert 'in current directory' in env('status')
def test_env_status_broken_view(
mutable_mock_env_path, mock_archive, mock_fetch, mock_packages,
install_mockery
):
with ev.create('test'):
install('trivial-install-test-package')
# switch to a new repo that doesn't include the installed package
# test that Spack detects the missing package and warns the user
new_repo = MockPackageMultiRepo()
with spack.repo.swap(new_repo):
output = env('status')
assert 'In environment test' in output
assert 'Environment test includes out of date' in output
# Test that the warning goes away when it's fixed
output = env('status')
assert 'In environment test' in output
assert 'Environment test includes out of date' not in output
def test_env_activate_broken_view(
mutable_mock_env_path, mock_archive, mock_fetch, mock_packages,
install_mockery
):
with ev.create('test'):
install('trivial-install-test-package')
# switch to a new repo that doesn't include the installed package
# test that Spack detects the missing package and fails gracefully
new_repo = MockPackageMultiRepo()
with spack.repo.swap(new_repo):
with pytest.raises(SpackCommandError):
env('activate', '--sh', 'test')
# test replacing repo fixes it
env('activate', '--sh', 'test')
def test_to_lockfile_dict():
e = ev.create('test')
e.add('mpileaks')

View File

@@ -29,9 +29,6 @@
install = SpackCommand('install')
env = SpackCommand('env')
add = SpackCommand('add')
mirror = SpackCommand('mirror')
uninstall = SpackCommand('uninstall')
buildcache = SpackCommand('buildcache')
@pytest.fixture()
@@ -58,6 +55,46 @@ def test_install_package_and_dependency(
assert 'errors="0"' in content
def test_global_install_package_and_dependency(
tmpdir, mock_packages, mock_archive, mock_fetch, config,
install_mockery):
with tmpdir.as_cwd():
install('--global',
'--log-format=junit',
'--log-file=test.xml',
'libdwarf')
files = tmpdir.listdir()
filename = tmpdir.join('test.xml')
assert filename in files
content = filename.open().read()
assert 'tests="2"' in content
assert 'failures="0"' in content
assert 'errors="0"' in content
def test_upstream_install_package_and_dependency(
tmpdir, mock_packages, mock_archive, mock_fetch, config,
install_mockery):
with tmpdir.as_cwd():
install('--upstream=global',
'--log-format=junit',
'--log-file=test.xml',
'libdwarf')
files = tmpdir.listdir()
filename = tmpdir.join('test.xml')
assert filename in files
content = filename.open().read()
assert 'tests="2"' in content
assert 'failures="0"' in content
assert 'errors="0"' in content
@pytest.mark.disable_clean_stage_check
def test_install_runtests_notests(monkeypatch, mock_packages, install_mockery):
def check(pkg):
@@ -133,8 +170,8 @@ def test_package_output(tmpdir, capsys, install_mockery, mock_fetch):
# make sure that output from the actual package file appears in the
# right place in the build log.
assert "BEFORE INSTALL" in out
assert "AFTER INSTALL" in out
assert re.search(r"BEFORE INSTALL\n==>( \[.+\])? './configure'", out)
assert "'install'\nAFTER INSTALL" in out
@pytest.mark.disable_clean_stage_check
@@ -736,40 +773,6 @@ def test_compiler_bootstrap(
install('a%gcc@2.0')
def test_compiler_bootstrap_from_binary_mirror(
install_mockery_mutable_config, mock_packages, mock_fetch,
mock_archive, mutable_config, monkeypatch, tmpdir):
"""Make sure installing compiler from buildcache registers compiler"""
# Create a temp mirror directory for buildcache usage
mirror_dir = tmpdir.join('mirror_dir')
mirror_url = 'file://{0}'.format(mirror_dir.strpath)
# Install a compiler, because we want to put it in a buildcache
install('gcc@2.0')
# Put installed compiler in the buildcache
buildcache('create', '-u', '-a', '-f', '-d', mirror_dir.strpath, 'gcc@2.0')
# Now uninstall the compiler
uninstall('-y', 'gcc@2.0')
monkeypatch.setattr(spack.concretize.Concretizer,
'check_for_compiler_existence', False)
spack.config.set('config:install_missing_compilers', True)
assert CompilerSpec('gcc@2.0') not in compilers.all_compiler_specs()
# Configure the mirror where we put that buildcache w/ the compiler
mirror('add', 'test-mirror', mirror_url)
# Now make sure that when the compiler is installed from binary mirror,
# it also gets configured as a compiler. Test succeeds if it does not
# raise an error
install('--no-check-signature', '--cache-only', '--only',
'dependencies', 'b%gcc@2.0')
install('--no-cache', '--only', 'package', 'b%gcc@2.0')
@pytest.mark.regression('16221')
def test_compiler_bootstrap_already_installed(
install_mockery_mutable_config, mock_packages, mock_fetch,
@@ -783,27 +786,3 @@ def test_compiler_bootstrap_already_installed(
# Test succeeds if it does not raise an error
install('gcc@2.0')
install('a%gcc@2.0')
def test_install_fails_no_args(tmpdir):
# ensure no spack.yaml in directory
with tmpdir.as_cwd():
output = install(fail_on_error=False)
# check we got the short version of the error message with no spack.yaml
assert 'requires a package argument or active environment' in output
assert 'spack env activate .' not in output
assert 'using the `spack.yaml` in this directory' not in output
def test_install_fails_no_args_suggests_env_activation(tmpdir):
# ensure spack.yaml in directory
tmpdir.ensure('spack.yaml')
with tmpdir.as_cwd():
output = install(fail_on_error=False)
# check we got the long version of the error message with spack.yaml
assert 'requires a package argument or active environment' in output
assert 'spack env activate .' in output
assert 'using the `spack.yaml` in this directory' in output

View File

@@ -81,6 +81,41 @@ def test_force_uninstall_spec_with_ref_count_not_zero(
@pytest.mark.db
@pytest.mark.usefixtures('mutable_database')
def test_global_recursive_uninstall():
"""Test recursive uninstall from global upstream"""
uninstall('-g', '-y', '-a', '--dependents', 'callpath')
all_specs = spack.store.layout.all_specs()
assert len(all_specs) == 8
# query specs with multiple configurations
mpileaks_specs = [s for s in all_specs if s.satisfies('mpileaks')]
callpath_specs = [s for s in all_specs if s.satisfies('callpath')]
mpi_specs = [s for s in all_specs if s.satisfies('mpi')]
assert len(mpileaks_specs) == 0
assert len(callpath_specs) == 0
assert len(mpi_specs) == 3
@pytest.mark.db
@pytest.mark.usefixtures('mutable_database')
def test_upstream_recursive_uninstall():
"""Test recursive uninstall from specified upstream"""
uninstall('--upstream=global', '-y', '-a', '--dependents', 'callpath')
all_specs = spack.store.layout.all_specs()
assert len(all_specs) == 8
# query specs with multiple configurations
mpileaks_specs = [s for s in all_specs if s.satisfies('mpileaks')]
callpath_specs = [s for s in all_specs if s.satisfies('callpath')]
mpi_specs = [s for s in all_specs if s.satisfies('mpi')]
assert len(mpileaks_specs) == 0
assert len(callpath_specs) == 0
assert len(mpi_specs) == 3
def test_force_uninstall_and_reinstall_by_hash(mutable_database):
"""Test forced uninstall and reinstall of old specs."""
# this is the spec to be removed

View File

@@ -1,5 +1,5 @@
config:
install_tree: $spack/opt/spack
install_tree: ~/.spack/opt/spack
template_dirs:
- $spack/share/spack/templates
- $spack/lib/spack/spack/test/data/templates
@@ -7,7 +7,7 @@ config:
build_stage:
- $tempdir/$user/spack-stage
- ~/.spack/stage
source_cache: $spack/var/spack/cache
source_cache: ~/.spack/var/spack/cache
misc_cache: ~/.spack/cache
verify_ssl: true
checksum: true

View File

@@ -0,0 +1,7 @@
upstreams:
global:
install_tree: $spack/opt/spack
modules:
tcl: $spack/share/spack/modules
lmod: $spack/share/spack/lmod
dotkit: $spack/share/spack/dotkit

View File

@@ -0,0 +1,7 @@
upstreams:
global:
install_tree: $spack/opt/spack
modules:
tcl: $spack/share/spack/modules
lmod: $spack/share/spack/lmod
dotkit: $spack/share/spack/dotkit

View File

@@ -13,6 +13,7 @@
import os
import pytest
import json
import shutil
try:
import uuid
_use_uuid = True
@@ -48,6 +49,19 @@ def test_store(tmpdir):
spack.store.store = real_store
@pytest.fixture()
def test_global_db_initializtion():
global_store = spack.store.store
global_db_path = '$spack/opt/spack'
global_db_path = spack.util.path.canonicalize_path(global_db_path)
shutil.rmtree(os.path.join(global_db_path, '.spack-db'))
global_store = spack.store.Store(str(global_db_path))
yield
spack.store.store = global_store
@pytest.fixture()
def upstream_and_downstream_db(tmpdir_factory, gen_mock_layout):
mock_db_root = str(tmpdir_factory.mktemp('mock_db_root'))

View File

@@ -344,9 +344,10 @@ def test_nosource_pkg_install(
# Make sure install works even though there is no associated code.
pkg.do_install()
out = capfd.readouterr()
assert "Installing dependency-install" in out[0]
assert "Missing a source id for nosource" in out[1]
# Also make sure an error is raised if `do_fetch` is called.
pkg.do_fetch()
assert "No fetch required for nosource" in capfd.readouterr()[0]
def test_nosource_pkg_install_post_install(

View File

@@ -99,21 +99,10 @@ def test_hms(sec, result):
assert inst._hms(sec) == result
def test_install_msg(monkeypatch):
"""Test results of call to install_msg based on debug level."""
def test_install_msg():
name = 'some-package'
pid = 123456
install_msg = 'Installing {0}'.format(name)
monkeypatch.setattr(tty, '_debug', 0)
assert inst.install_msg(name, pid) == install_msg
monkeypatch.setattr(tty, '_debug', 1)
assert inst.install_msg(name, pid) == install_msg
# Expect the PID to be added at debug level 2
monkeypatch.setattr(tty, '_debug', 2)
expected = "{0}: {1}".format(pid, install_msg)
expected = "{0}: Installing {1}".format(pid, name)
assert inst.install_msg(name, pid) == expected
@@ -162,6 +151,7 @@ def test_process_external_package_module(install_mockery, monkeypatch, capfd):
out = capfd.readouterr()[0]
assert 'has external module in {0}'.format(spec.external_module) in out
assert 'is actually installed in {0}'.format(spec.external_path) in out
def test_process_binary_cache_tarball_none(install_mockery, monkeypatch,
@@ -190,7 +180,7 @@ def _spec(spec):
spec = spack.spec.Spec('a').concretized()
assert inst._process_binary_cache_tarball(spec.package, spec, False, False)
assert 'Extracting a from binary cache' in capfd.readouterr()[0]
assert 'Installing a from binary cache' in capfd.readouterr()[0]
def test_try_install_from_binary_cache(install_mockery, mock_packages,

View File

@@ -1,87 +0,0 @@
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
import pytest
import llnl.util.tty as tty
def test_get_timestamp(monkeypatch):
"""Ensure the results of get_timestamp are reasonable."""
# Debug disabled should return an empty string
monkeypatch.setattr(tty, '_debug', 0)
assert not tty.get_timestamp(False), 'Expected an empty string'
# Debug disabled but force the timestamp should return a string
assert tty.get_timestamp(True), 'Expected a timestamp/non-empty string'
pid_str = ' {0}'.format(os.getpid())
# Level 1 debugging should return a timestamp WITHOUT the pid
monkeypatch.setattr(tty, '_debug', 1)
out_str = tty.get_timestamp(False)
assert out_str and pid_str not in out_str, 'Expected no PID in results'
# Level 2 debugging should also return a timestamp WITH the pid
monkeypatch.setattr(tty, '_debug', 2)
out_str = tty.get_timestamp(False)
assert out_str and pid_str in out_str, 'Expected PID in results'
@pytest.mark.parametrize('msg,enabled,trace,newline', [
('', False, False, False), # Nothing is output
(Exception(''), True, False, True), # Exception output
('trace', True, True, False), # stacktrace output
('newline', True, False, True), # newline in output
('no newline', True, False, False) # no newline output
])
def test_msg(capfd, monkeypatch, enabled, msg, trace, newline):
"""Ensure the output from msg with options is appropriate."""
# temporarily use the parameterized settings
monkeypatch.setattr(tty, '_msg_enabled', enabled)
monkeypatch.setattr(tty, '_stacktrace', trace)
expected = [msg if isinstance(msg, str) else 'Exception: ']
if newline:
expected[0] = '{0}\n'.format(expected[0])
if trace:
expected.insert(0, '.py')
tty.msg(msg, newline=newline)
out = capfd.readouterr()[0]
for msg in expected:
assert msg in out
@pytest.mark.parametrize('msg,trace,wrap', [
(Exception(''), False, False), # Exception output
('trace', True, False), # stacktrace output
('wrap', False, True), # wrap in output
])
def test_info(capfd, monkeypatch, msg, trace, wrap):
"""Ensure the output from info with options is appropriate."""
# temporarily use the parameterized settings
monkeypatch.setattr(tty, '_stacktrace', trace)
expected = [msg if isinstance(msg, str) else 'Exception: ']
if trace:
expected.insert(0, '.py')
extra = 'This extra argument *should* make for a sufficiently long line' \
' that needs to be wrapped if the option is enabled.'
args = [msg, extra]
num_newlines = 3 if wrap else 2
tty.info(*args, wrap=wrap, countback=3)
out = capfd.readouterr()[0]
for msg in expected:
assert msg in out
assert out.count('\n') == num_newlines

View File

@@ -3,7 +3,6 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
import pytest
import spack.fetch_strategy as spack_fs
@@ -28,19 +27,3 @@ def test_s3fetchstrategy_bad_url(tmpdir):
assert fetcher.archive_file is None
with pytest.raises(spack_fs.FetchError):
fetcher.fetch()
def test_s3fetchstrategy_downloaded(tmpdir):
"""Ensure fetch with archive file already downloaded is a noop."""
testpath = str(tmpdir)
archive = os.path.join(testpath, 's3.tar.gz')
class Archived_S3FS(spack_fs.S3FetchStrategy):
@property
def archive_file(self):
return archive
url = 's3:///{0}'.format(archive)
fetcher = Archived_S3FS(url=url)
with spack_stage.Stage(fetcher, path=testpath):
fetcher.fetch()

View File

@@ -6,10 +6,7 @@
import sys
import os
import pytest
import llnl.util.filesystem as fs
import spack
import spack.util.executable as ex
from spack.hooks.sbang import filter_shebangs_in_directory
@@ -38,18 +35,3 @@ def test_read_unicode(tmpdir, working_env):
# read the unicode back in and see whether things work
script = ex.Executable('./%s' % script_name)
assert u'\xc3' == script(output=str).strip()
def test_which(tmpdir):
os.environ["PATH"] = str(tmpdir)
assert ex.which("spack-test-exe") is None
with pytest.raises(ex.CommandNotFoundError):
ex.which("spack-test-exe", required=True)
with tmpdir.as_cwd():
fs.touch("spack-test-exe")
fs.set_executable('spack-test-exe')
exe = ex.which("spack-test-exe")
assert exe is not None
assert exe.path == str(tmpdir.join("spack-test-exe"))

View File

@@ -239,8 +239,7 @@ def which_string(*args, **kwargs):
return exe
if required:
raise CommandNotFoundError(
"spack requires '%s'. Make sure it is in your path." % args[0])
tty.die("spack requires '%s'. Make sure it is in your path." % args[0])
return None
@@ -267,7 +266,3 @@ def which(*args, **kwargs):
class ProcessError(spack.error.SpackError):
"""ProcessErrors are raised when Executables exit with an error code."""
class CommandNotFoundError(spack.error.SpackError):
"""Raised when ``which()`` can't find a required executable."""

View File

@@ -77,8 +77,6 @@ def __init__(self):
def get(self, spec):
if not isinstance(spec, spack.spec.Spec):
spec = Spec(spec)
if spec.name not in self.spec_to_pkg:
raise spack.repo.UnknownPackageError(spec.fullname)
return self.spec_to_pkg[spec.name]
def get_pkg_class(self, name):

View File

@@ -1,38 +0,0 @@
-----BEGIN PGP PUBLIC KEY BLOCK-----
mQENBF1IgqcBCADqSIBM0TT4+6Acv6SUpQ2l1Ql+UVRtJ74VGFOw+8I8aBWcBryB
wNsS/Drxn9M9rX8il2aGtAmwc1dhTh0JvdZO7KqG8Q4vvWOytdLnGSE61LV4147q
S/dJiYH2DCvhMKpOByIsEiuoTrUHzd1EQBnEPSwAQV8oWPrc1++f3iYmRemsOBCT
BldAu7Y5RwjI3qQ6GazoCF5rd1uyiMYrpT4amEKFE91VRe+IG8XfEaSTapOc/hO3
Sw4fzPelA2qD12I+JMj56vM0fQy3TXD5qngIb+leb2jGI+0bTz8RGS0xSMYVvftA
upzQPaQIfzijVBt3tFSayx/NXKR0p+EuCqGBABEBAAG0MFNwYWNrIEJ1aWxkIFBp
cGVsaW5lIChEZW1vIEtleSkgPGtleUBzcGFjay5kZW1vPokBTgQTAQgAOBYhBDHI
4nh6FErErdiO0pX4aBGV4jnYBQJdSIKnAhsvBQsJCAcCBhUKCQgLAgQWAgMBAh4B
AheAAAoJEJX4aBGV4jnYpf0IAJDYEjpm0h1pNswTvmnEhgNVbojCGRfAts7F5uf8
IFXGafKQsekMWZh0Ig0YXVn72jsOuNK/+keErMfXM3DFNTq0Ki7mcFedR9r5EfLf
4YW2n6mphsfMgsg8NwKVLFYWyhQQ4OzhdydPxkGVhEebHwfHNQ3aIcqbFmzkhxnX
CIYh2Flf3T306tKX4lXbhsXKG1L/bLtDiFRaMCBp66HGZ8u9Dbyy/W8aDwyx4duD
MG+y2OrhOf+zEu3ZPFyc/jsjmfnUtIfQVyRajh/8vh+i9fkvFlLaOQittNElt3z1
8+ybGjE9qWY/mvR2ZqnP8SVkGvxSpBVfVXiFFdepvuPAcLu5AQ0EXUiCpwEIAJ2s
npNBAVocDUSdOF/Z/eCRvy3epuYm5f1Ge1ao9K2qWYno2FatnsYxK4qqB5yGRkfj
sEzAGP8JtJvqDSuB5Xk7CIjRNOwoSB3hqvmxWh2h+HsITUhMl11FZ0Cllz+etXcK
APz2ZHSKnA3R8uf4JzIr1cHLS+gDBoj8NgBCZhcyva2b5UC///FLm1+/Lpvekd0U
n7B524hbXhFUG+UMfHO/U1c4TvCMt7RGMoWUtRzfO6XB1VQCwWJBVcVGl8Yy59Zk
3K76VbFWQWOq6fRBE0xHBAga7pOgCc9qrb+FGl1IHUT8aV8CzkxckHlNb3PlntmE
lXZLPcGFWaPtGtuIJVsAEQEAAYkCbAQYAQgAIBYhBDHI4nh6FErErdiO0pX4aBGV
4jnYBQJdSIKnAhsuAUAJEJX4aBGV4jnYwHQgBBkBCAAdFiEEneR3pKqi9Rnivv07
CYCNVr37XP0FAl1IgqcACgkQCYCNVr37XP13RQf/Ttxidgo9upF8jxrWnT5YhM6D
ozzGWzqE+/KDBX+o4f33o6uzozjESRXQUKdclC9ftDJQ84lFTMs3Z+/12ZDqCV2k
2qf0VfXg4e5xMq4tt6hojXUeYSfeGZXNU9LzjURCcMD+amIKjVztFg4kl3KHW3Pi
/aPTr4xWWgy2tZ1FDEuA5J6AZiKKJSVeoSPOGANouPqm4fNj273XFXQepIhQ5wve
4No0abxfXcLt5Yp3y06rNCBC9QdC++19N5+ajn2z9Qd2ZwztPb0mNuqHAok4vrlE
1c4WBWk93Nfy9fKImalGENpPDz0td2H9pNC9IafOWltGSWSINRrU1GeaNXS/uAOT
CADjcDN+emLbDTTReW4FLoQ0mPJ0tACgszGW50PtncTMPSj4uxSktQPWWk41oD9q
gpXm1Vgto4GvPWYs/ewR6Kyd8K0YkBxbRFyYOmycu3/zzYJnry+EHdvtQspwUDPg
QlI/avDrncERzICsbd86Jz0CMY4kzpg5v9dt/N6WnHlSk/S+vv4pPUDSz26Q4Ehh
iDvDavLGyzKSlVzWQ4bzzlQxXbDL6TZyVAQ4DBI4sI+WGtLbfD51EI5G9BfmDsbw
XJ0Dt2yEwRfDUx/lYbAMvhUnWEu2DSpYdJb8GG0GKTGqU4YpvO1JgTCsLSLIAHfT
tQMw04Gs+kORRNbggsdTD4sR
=N5Wp
-----END PGP PUBLIC KEY BLOCK-----

View File

@@ -12,13 +12,6 @@
# setenv SPACK_ROOT /path/to/spack
# source $SPACK_ROOT/share/spack/setup-env.csh
#
# prevent infinite recursion when spack shells out (e.g., on cray for modules)
if ($?_sp_initializing) then
exit 0
endif
setenv _sp_initializing true
if ($?SPACK_ROOT) then
set _spack_source_file = $SPACK_ROOT/share/spack/setup-env.csh
set _spack_share_dir = $SPACK_ROOT/share/spack
@@ -45,6 +38,3 @@ else
echo "ERROR: Sourcing spack setup-env.csh requires setting SPACK_ROOT to "
echo " the root of your spack installation."
endif
# done: unset sentinel variable as we're no longer initializing
unsetenv _sp_initializing

View File

@@ -36,12 +36,6 @@
# to come up with a user-friendly naming scheme for spack dotfiles.
#################################################################################
# prevent infinite recursion when spack shells out (e.g., on cray for modules)
if test -n "$_sp_initializing"
exit 0
end
set -x _sp_initializing true
#
# Test for STDERR-NOCARET feature: if this is off, fish will redirect stderr to
@@ -727,6 +721,3 @@ sp_multi_pathadd MODULEPATH $_sp_tcl_roots
# [3]: When the test in the if statement fails, the `status` flag is set to 1.
# `true` here manuallt resets the value of `status` to 0. Since `set`
# passes `status` along, we thus avoid the function returning 1 by mistake.
# done: unset sentinel variable as we're no longer initializing
set -e _sp_initializing

View File

@@ -39,12 +39,6 @@
# spack module files.
########################################################################
# prevent infinite recursion when spack shells out (e.g., on cray for modules)
if [ -n "${_sp_initializing:-}" ]; then
exit 0
fi
export _sp_initializing=true
spack() {
# Store LD_LIBRARY_PATH variables from spack shell function
# This is necessary because MacOS System Integrity Protection clears
@@ -363,7 +357,3 @@ _sp_multi_pathadd MODULEPATH "$_sp_tcl_roots"
if [ "$_sp_shell" = bash ]; then
source $_sp_share_dir/spack-completion.bash
fi
# done: unset sentinel variable as we're no longer initializing
unset _sp_initializing
export _sp_initializing

View File

@@ -962,7 +962,7 @@ _spack_info() {
_spack_install() {
if $list_options
then
SPACK_COMPREPLY="-h --help --only -u --until -j --jobs --overwrite --fail-fast --keep-prefix --keep-stage --dont-restage --use-cache --no-cache --cache-only --no-check-signature --show-log-on-error --source -n --no-checksum -v --verbose --fake --only-concrete -f --file --clean --dirty --test --run-tests --log-format --log-file --help-cdash --cdash-upload-url --cdash-build --cdash-site --cdash-track --cdash-buildstamp -y --yes-to-all"
SPACK_COMPREPLY="-h --help --only -u --until -j --jobs --overwrite --fail-fast --keep-prefix --keep-stage --dont-restage --use-cache --no-cache --cache-only --no-check-signature --show-log-on-error --source -n --no-checksum -v --verbose --fake --only-concrete -f --file --upstream -g --global --clean --dirty --test --run-tests --log-format --log-file --help-cdash --cdash-upload-url --cdash-build --cdash-site --cdash-track --cdash-buildstamp -y --yes-to-all"
else
_all_packages
fi
@@ -1436,7 +1436,7 @@ _spack_test() {
_spack_uninstall() {
if $list_options
then
SPACK_COMPREPLY="-h --help -f --force -R --dependents -y --yes-to-all -a --all"
SPACK_COMPREPLY="-h --help -f --force -R --dependents -y --yes-to-all -a --all -u --upstream -g --global"
else
_installed_packages
fi

View File

@@ -15,6 +15,6 @@ class AdeptUtils(CMakePackage):
version('1.0.1', sha256='259f777aeb368ede3583d3617bb779f0fde778319bf2122fdd216bdf223c015e')
version('1.0', sha256='fed29366c9bcf5f3799220ae3b351d2cb338e2aa42133d61584ea650aa8d6ff7')
depends_on('boost@:1.72.0')
depends_on('boost')
depends_on('mpi')
depends_on('cmake@2.8:', type='build')

View File

@@ -115,7 +115,7 @@ class Gcc(AutotoolsPackage, GNUMirrorPackage):
depends_on('zstd', when='@10:')
depends_on('iconv', when='platform=darwin')
depends_on('gnat', when='languages=ada')
depends_on('binutils~libiberty', when='+binutils', type=('build', 'link', 'run'))
depends_on('binutils~libiberty', when='+binutils')
depends_on('zip', type='build', when='languages=java')
depends_on('cuda', when='+nvptx')
@@ -373,6 +373,8 @@ def configure_args(self):
# enable appropriate bootstrapping flags
stage1_ldflags = str(self.rpath_args)
boot_ldflags = stage1_ldflags + ' -static-libstdc++ -static-libgcc'
if '%gcc' in spec:
stage1_ldflags = boot_ldflags
options.append('--with-stage1-ldflags=' + stage1_ldflags)
options.append('--with-boot-ldflags=' + boot_ldflags)

View File

@@ -65,7 +65,7 @@ def setup_dependent_build_environment(self, *args):
})
def setup_run_environment(self, env):
super(IntelMpi, self).setup_run_environment(env)
super(IntelMPI, self).setup_run_environment(env)
for name, value in self.mpi_compiler_wrappers.items():
for name, value in self.mpi_compiler.wrappers.items():
env.set(name, value)

View File

@@ -238,7 +238,6 @@ def post_install(self):
spec['libelf'].prefix.include,
spec['hwloc'].prefix.include))
# Only build if offload target.
cmake(*args)
make()
make('install')
cmake(*args)
make()
make('install')