Compare commits

...

255 Commits

Author SHA1 Message Date
Todd Gamblin
09151785b3 SPACK-87: enable disabled git fetch tests. 2015-05-18 11:32:15 -07:00
Todd Gamblin
b2f46c703d Fix SPACK-87: Tag checkout fails for older git versions. 2015-05-18 11:26:32 -07:00
Todd Gamblin
76cb3eb7d2 Add help on specs to top of spack -h 2015-05-17 22:29:48 -07:00
Gregory L. Lee
50970b72d8 added py-pandas 2015-05-12 13:47:48 -07:00
Todd Gamblin
25af6478c5 Fix SPACK-67: 'spack help stage' was broken.
- remove spurious mutually exclusive group from subparser.
2015-05-10 12:46:33 -07:00
Todd Gamblin
8e87b2176a Updated Sphinx RTD documentation theme supports 4 levels 2015-05-09 13:35:05 -05:00
Todd Gamblin
d6a5266976 Fix 'ç' in François's name. 2015-05-09 13:30:09 -05:00
Todd Gamblin
5b32101ef2 Merge branch 'features/xl-support' into develop 2015-05-07 01:54:30 -05:00
François Bissey
3cfbc0520a Basic support for IBM XL compilers 2015-05-07 01:54:18 -05:00
Todd Gamblin
42c88aa3ca Merge branch 'features/diy' into develop 2015-05-07 01:53:26 -05:00
Todd Gamblin
bbc973cc5f Merge pull request #41 from cyrush/code_chk_pkgs
added simple spack package files for uncrustify and cppcheck
2015-05-01 11:07:34 -07:00
Cyrus Harrison
8c4fe6d249 added simple spack package files for uncrustify and cppcheck 2015-04-30 22:42:53 -07:00
Todd Gamblin
1d6524f299 Merge branch 'develop' of github.com:scalability-llnl/spack into develop 2015-04-14 15:12:11 -07:00
Todd Gamblin
5c04b3c480 Fix for GitHub #30 and SPACK-70. 2015-04-14 15:11:41 -07:00
Todd Gamblin
8b14a46201 documentation updates 2015-04-14 15:11:01 -07:00
Todd Gamblin
97a20cac58 Merge pull request #29 from alfredo-gimenez/develop
MemAxes and Mitos updates to remove VTK dependency.
2015-04-09 20:19:05 -07:00
Alfredo Gimenez
0f5b26c9cd MemAxes and Mitos 2015-04-09 14:46:33 -07:00
Todd Gamblin
314120212d Complete diy command. 2015-04-08 00:22:46 -07:00
Todd Gamblin
5077a2a343 Add a DIY stage class. 2015-04-08 00:22:34 -07:00
Todd Gamblin
ab40d0b1a4 Factor edit_package function out of edit command. 2015-04-08 00:22:07 -07:00
Todd Gamblin
c4a6d04131 Start of DIY command. 2015-04-08 00:20:53 -07:00
Todd Gamblin
203fd861aa Allow spack install -j to throttle make jobs. 2015-04-07 22:21:19 -07:00
Todd Gamblin
37a5c3b09f Version bump for adept-utils 2015-04-01 00:05:26 -07:00
Todd Gamblin
634abdbcea Merge pull request #26 from mplegendre/binutils
Binutils as spack package
2015-03-30 20:39:35 -07:00
Matthew LeGendre
89731d4a7d Add binutils as a gcc dependency so it doesn't use the system linker/assembler. 2015-03-30 14:23:02 -07:00
Todd Gamblin
2b9dd718ae Merge pull request #25 from mplegendre/qtfix
Fix for QT with gcc 4.9 build error
2015-03-27 17:48:51 -07:00
Matthew LeGendre
50687e08cb Fix QT with gcc 4.9 build error where QT's -Wl,--no-undefined interfered with gcc's implicit libgcc_s.so library 2015-03-27 17:32:37 -07:00
Todd Gamblin
ad62b93df9 Merge pull request #23 from mplegendre/rpath_cleanup
Only convert -L<path> to -Wl,-rpath,<path> in compiler scripts if <path>...
2015-03-27 14:14:49 -07:00
Matthew LeGendre
a4cce9eb69 Only convert -L<path> to -Wl,-rpath,<path> in compiler scripts if <path> points into the spack install area
This specifically fixes problems with building gcc, as build and system directories were turning in in gcc library rpaths.
2015-03-27 13:58:57 -07:00
Todd Gamblin
b2f54bac95 Add contributions section to README.md 2015-03-24 09:58:32 -07:00
Todd Gamblin
744df77691 add mailing list links to README 2015-03-24 09:37:38 -07:00
Todd Gamblin
8364f0404c Add some options to spec command. 2015-03-24 08:40:45 -07:00
Todd Gamblin
af92250c7e Add google analytics to docs. 2015-03-15 00:08:58 -07:00
Todd Gamblin
32244ac733 MPICH provides environment vars to make MPI compilers behave like spack. 2015-03-13 13:32:24 -07:00
Todd Gamblin
b80e091f46 metis package. 2015-03-13 00:03:33 -07:00
Todd Gamblin
f97966d63a SPACK-18: simpler build error messages
Suppress python stack trace on build error UNLESS in debug mode (spack -d).
Now spack shows errors with a single red arrow, and it's easier to find the actual build output.
2015-03-12 23:50:07 -07:00
Todd Gamblin
132aa690d8 Remove superfluous env scripts (avoid specifically named ones). 2015-03-12 10:39:26 -07:00
Todd Gamblin
560f2c299a Add install_tree, force_symlink helper functions. 2015-03-12 10:32:29 -07:00
Todd Gamblin
32e2f21e8d Working scotch package. 2015-03-12 08:49:45 -07:00
Todd Gamblin
81d518438c Point ravel download at the github release page. 2015-03-12 06:54:15 -07:00
Todd Gamblin
b24aca6da2 Add -Wno-dev to avoid cmake policy warnings. 2015-03-12 06:54:15 -07:00
Todd Gamblin
e624ebec07 Working ravel package. 2015-03-12 06:54:15 -07:00
Todd Gamblin
14a79a2ddf Add OTF package. 2015-03-12 06:54:14 -07:00
Todd Gamblin
d6c14f40b2 SAMRAI builds on cab with new URL. 2015-03-11 19:14:43 -07:00
Todd Gamblin
f8044e0290 SAMRAI URL update. 2015-03-11 18:58:29 -07:00
Todd Gamblin
fbebb94cd6 Add cram package. 2015-03-05 11:05:42 -08:00
Todd Gamblin
0394adcf8d Minor doc tweak. 2015-02-25 10:41:12 -08:00
Todd Gamblin
ffdb90f39a Last minute Qt bugfix. 2015-02-24 10:42:35 -08:00
Todd Gamblin
5eb7e46654 Spell check docs 2015-02-24 10:26:26 -08:00
Todd Gamblin
447e295947 SPACK-62: fix for site docs. 2015-02-24 02:45:21 -08:00
Todd Gamblin
daa38d2ff4 SPACK-59: Documentation updates, bugfix in fetching. 2015-02-24 02:45:07 -08:00
Todd Gamblin
049808a34f Merge remote-tracking branch 'origin/features/SPACK-46' into develop
Conflicts:
	lib/spack/docs/packaging_guide.rst
2015-02-23 10:46:58 -08:00
Todd Gamblin
daef78f538 Update packaging documentataion. 2015-02-23 10:31:22 -08:00
Todd Gamblin
5699cbb597 Fix SPACK-60: 0.8.15 basic docs. 2015-02-23 01:23:30 -08:00
Todd Gamblin
6dab133d9f Same package add icon on mac and linux. 2015-02-23 01:23:30 -08:00
Todd Gamblin
d49c98188a Add an override to colify so we can set terminal dimensions. 2015-02-23 01:23:30 -08:00
Todd Gamblin
065e5ccd1a Update contributors list. 2015-02-18 20:51:50 -08:00
Todd Gamblin
27e9bfb5aa Merge pull request #19 from psaravan/develop
Added netcdf package support.
2015-02-18 20:38:04 -08:00
Saravan Pantham
14e70ad689 Added netcdf package support. 2015-02-18 18:05:57 -08:00
Todd Gamblin
02e316e772 Convert ValueErrors to SpackError subclass. 2015-02-18 16:45:54 -08:00
Todd Gamblin
2374eb4dca Fix for SPACK-62
- deactivate -a errors if arg is not activated
- deactivate -af does not.
2015-02-18 16:45:12 -08:00
Todd Gamblin
2eda01c703 uninstall -f ignores nonexisting packages. 2015-02-18 16:21:15 -08:00
Todd Gamblin
2755171e08 Update documentation to reflect new restage/clean behavior. 2015-02-18 14:46:00 -08:00
Todd Gamblin
e67655c31a docs autodetect version. 2015-02-18 14:33:21 -08:00
Todd Gamblin
c7b8a4e25c Fix for SPACK-46: cleanup spack clean, spack restage. 2015-02-18 14:00:37 -08:00
Todd Gamblin
db11373351 Resurrect combined qt4/5 package from b7dacb 2015-02-18 13:16:22 -08:00
Gregory L. Lee
59198e29f9 Merge branch 'develop' of ssh://cz-stash.llnl.gov:7999/scale/spack into develop 2015-02-18 13:14:26 -08:00
Gregory L. Lee
6e13d0985c fixed deps for python packages 2015-02-18 13:13:19 -08:00
Todd Gamblin
3e5aa4b0f5 llvm/clang version bump 2015-02-18 10:59:03 -08:00
Todd Gamblin
959ce4f985 Downgrade standard version of ImageMagick to a non-changing URL.
- bleeding edge still available but commented by default.
2015-02-18 10:59:03 -08:00
Todd Gamblin
14097e39cc Suppress download status meter when routing I/O to a file. 2015-02-18 10:59:03 -08:00
Gregory L. Lee
44003449d5 fixed install steps for version 4 2015-02-17 16:26:00 -08:00
Todd Gamblin
17ac609d23 Merge branch 'features/memaxes' into develop
Conflicts:
	var/spack/packages/libpng/package.py
2015-02-17 00:49:52 -08:00
Todd Gamblin
724b72bdaf take out dyninst 8.2 for now.
- doesn't build correctly with boost 1.55
2015-02-17 00:47:35 -08:00
Todd Gamblin
4af85441db Merge branch 'features/python-modules' into develop 2015-02-17 00:44:02 -08:00
Todd Gamblin
d800c23cec Better activate/deactivate logic.
spack activate
  - now activates dependency extensions
  - ensures dependencies are activated in the python installation.
  - -f/--force option still allows the old activate behavior.

spack deactivate
  - checks for dependents before deactivating (like uninstall)
  - deactivate -a/--all <extension> will deactviate a package and ALL
    of its dependency extensions.
  - deactivate -a/--all <extendee> activates all extensions of <extendee>
    e.g.: spack deactivate -a python
  - deactivate -f/--force option allows removing regardless of dependents.
    - deactivate -f can be run EVEN if a package is not activated.
    - allows for clenup of activations gone wrong.
2015-02-17 00:24:58 -08:00
Todd Gamblin
57f331e2ac Ignore conflicting nose tests in py-nose and py-matplotlib. 2015-02-17 00:22:18 -08:00
Todd Gamblin
67db8ddca8 Factor ignore logic into a predicate builder. 2015-02-17 00:21:15 -08:00
Todd Gamblin
06d6b0b205 More py-setuptools dependencies added. 2015-02-16 21:53:55 -08:00
Todd Gamblin
13376efafc Add package-specific rpath back to shiboken and pyside. 2015-02-16 21:53:34 -08:00
Todd Gamblin
e6b2c27011 Factor out forking logic to build_environment.py. 2015-02-16 21:41:31 -08:00
Todd Gamblin
614c22fc1b Allow forced deactivation -- best effort unlinking
spack deactivate -f will unlink even if Spack thinks the package isn't enabled.
Made deactivate routines idempotent.
2015-02-16 12:41:22 -08:00
Todd Gamblin
8aa3afcfde Python package cleanup.
- Added a number of dependencies to python packages.
- Python packages may still not build without some OS support.
- Example: Numpy needs ATLAS, and will use a system ATLAS install.
  - Atlas requires turning off CPU throttling to build.
  - can't do this as a regular user -- how to build ATLAS with Spack
  - currnetly relying on a system ATLAS install.
2015-02-15 23:04:20 -08:00
Todd Gamblin
847ed8ad39 Add libxslt, cleanup libxml2. 2015-02-15 23:04:04 -08:00
Todd Gamblin
b86eb69552 libgcrypt and libgpg-error packages. 2015-02-15 23:03:33 -08:00
Todd Gamblin
65d60f92f5 qhull package. 2015-02-15 23:02:51 -08:00
Todd Gamblin
36579844d9 Add Tcl/Tk packages. 2015-02-15 23:02:36 -08:00
Todd Gamblin
ce011501f9 Add R package. 2015-02-15 23:02:21 -08:00
Todd Gamblin
b11061f99d Rename py-pyqt4 to py-pyqt. 2015-02-15 12:40:02 -08:00
Todd Gamblin
2f67cdaf10 Better time output on build completion. 2015-02-15 12:39:10 -08:00
Todd Gamblin
d1e03329c5 Memoize all_specs() and exists() for better performance.
- Real bottleneck is calling normalize() for every spec when we read it.
- Need to store graph information in spec files to avoid the need for this.
  - Also, normalizing old specs isn't always possible, so we need to do this anyway.
2015-02-15 11:50:13 -08:00
Todd Gamblin
3c0048dd89 py-sip installs properly into a prefix 2015-02-15 01:59:36 -08:00
Todd Gamblin
c0c0879924 Better extension activation/deactivation 2015-02-15 01:58:35 -08:00
Todd Gamblin
82dc935a50 installed_extensions_for no longer fails when nothing known about pkg 2015-02-15 01:49:50 -08:00
Todd Gamblin
93067d0d63 Add profile option to spack script. 2015-02-15 01:45:05 -08:00
Todd Gamblin
0c94a6e2b0 Merge branch 'features/python-modules' into features/memaxes
Conflicts:
	var/spack/packages/qt/package.py
2015-02-12 10:01:58 -08:00
Gregory L. Lee
5c2608b032 typo: Self -> self 2015-02-09 15:55:18 -08:00
Todd Gamblin
25af341954 Python package improvements. 2015-02-09 02:54:49 -08:00
Todd Gamblin
d1d0b85d80 Add Alfredo to contributors. 2015-02-09 01:13:56 -08:00
Todd Gamblin
1e5bbe60f7 Merge pull request #18 from ch4i/features/memaxes
Features/memaxes
2015-02-09 01:06:06 -08:00
Alfredo Gimenez
27617670f0 qt with hardware accelerated opengl working 2015-02-09 00:01:07 -08:00
Todd Gamblin
aae364b4c9 "spack extensions" shows total extension count. 2015-02-08 23:26:15 -08:00
Todd Gamblin
c077f05705 Move dpeendency environment setup to build_environemnt. 2015-02-08 22:01:00 -08:00
Todd Gamblin
f81b136547 import fix in cmd/clean 2015-02-08 19:43:10 -08:00
Todd Gamblin
20ec80295d setup_extension_environment is now setup_dependent_environment.
- other packages, like Qt, can now use this to set up relevant build
  variables and env vars for their dependencies.

- not just extensions anymore.
2015-02-08 19:41:17 -08:00
Todd Gamblin
60a385d4a4 Minor textual error in extensions command. 2015-02-08 19:40:28 -08:00
Todd Gamblin
e51e01f4f0 Cleaned up python to remove redundant line. 2015-02-08 19:39:36 -08:00
Todd Gamblin
befe72b9b9 directory_layout now raises an error when an install fails. 2015-02-08 19:36:30 -08:00
Alfredo Gimenez
9e878075ac mesa 8.0.5 working 2015-02-08 16:09:13 -08:00
Alfredo Gimenez
cc684a3ebe older mesa for 2.6 kernel (not workin yet) 2015-02-08 13:34:45 -08:00
Alfredo Gimenez
1605e04d44 mesa and systemd (systemd not working yet) 2015-02-07 22:08:50 -08:00
Alfredo Gimenez
932f3930f4 util-linux added 2015-02-07 09:18:34 -08:00
Alfredo Gimenez
676cc84c9e more mesa dependencies 2015-02-06 17:24:55 -08:00
Alfredo Gimenez
5fdf5438ea flex and bison 2015-02-06 16:55:48 -08:00
Alfredo Gimenez
d95d48bbe6 py-mako and fix for setup-env.sh 2015-02-06 16:43:21 -08:00
Gregory L. Lee
5cc369c2b8 add dependent packages to PYTHONPATH for build 2015-02-06 16:35:35 -08:00
Alfredo Gimenez
a4ac1977a4 merge with python-modules 2015-02-06 16:27:33 -08:00
Todd Gamblin
457f2d1d51 Fix libpng to use a better URL
Sourceforge URLs like this eventually die when the libpng version is bumped:
    http://sourceforge.net/projects/libpng/files/libpng16/1.6.14/libpng-1.6.14.tar.gz/download

But ones like this give you a "permanently moved", which curl -L will follow:
    http://download.sourceforge.net/libpng/libpng-1.6.16.tar.gz
2015-02-06 08:37:22 -08:00
Todd Gamblin
3a3e4d4391 Do not automatically activate extensions on install. 2015-02-04 15:47:03 -08:00
Todd Gamblin
a9e189972a Bugfix in spak extensions 2015-02-04 15:42:41 -08:00
Todd Gamblin
5bde8359e8 More information in extensions command. 2015-02-02 11:20:36 -08:00
Todd Gamblin
2d9190d264 Add extensions command. 2015-02-02 11:20:36 -08:00
Todd Gamblin
6b90017efa Fixed dumb link_tree bug, added test for link tree. 2015-02-02 11:20:35 -08:00
Todd Gamblin
6400ace901 Add "spack extensions" command to list activated extensions. 2015-02-02 11:19:54 -08:00
Todd Gamblin
70c8bf44b8 Fix for install sanity check -- don't count hidden dir layout files. 2015-02-02 11:19:54 -08:00
Gregory L. Lee
48f1ff87f8 added more Python modules 2015-02-02 11:19:54 -08:00
Gregory L. Lee
2bc3f74df2 added more Python modules 2015-02-02 11:19:54 -08:00
Todd Gamblin
de91c95e8e Ability to ignore files in activate/deactivate for extensions. 2015-02-02 11:19:54 -08:00
Todd Gamblin
ff9cb94f4f Add arguements to extends() and activate/deactivate. 2015-02-02 11:19:54 -08:00
Gregory L. Lee
9fa489b7f2 added several modules 2015-02-02 11:19:54 -08:00
Gregory L. Lee
7992f415fe added py-nose 2015-02-02 11:19:54 -08:00
Todd Gamblin
2ae7f53b83 Bugfix: Extension hooks shoudl only run for extensions. 2015-02-02 11:19:54 -08:00
Todd Gamblin
89ccdf92cd Add activate and deactivate commands for extensions. 2015-02-02 11:19:54 -08:00
Todd Gamblin
acc62abbd0 Rework do_activate/activate and do_deactivate/deactivate semantics.
- packages can now extend only one other package.
- do_activate() and do_deactivate() are now called on the extension,
  and they automatically find the extendee
- activate() and deactivate() are still called on the extendee and are
  passed the extension.
2015-02-02 11:19:53 -08:00
Todd Gamblin
d13bbeb605 Add PYTOHNPATH to modules for python extensions. 2015-02-02 11:19:53 -08:00
Todd Gamblin
bcccf02020 Add setup_extension_environment() method.
- lets packages do some setup before their extensions run install()
2015-02-02 11:19:53 -08:00
Todd Gamblin
82946d2914 Move symlink tree routines to LinkTree class. 2015-02-02 11:19:52 -08:00
Todd Gamblin
9977543478 Added feature: package extensions
- packages can be "extended" by others
- allows extension to be symlinked into extendee's prefix.
- used for python modules.
  - first module: py-setuptools
2015-02-02 11:19:00 -08:00
Todd Gamblin
7215aee224 do_install() passes kwargs to dependencies. 2015-02-02 11:16:24 -08:00
Todd Gamblin
2c1eda66c4 First python extension package: setuptools 2015-02-02 11:16:23 -08:00
Todd Gamblin
adb7d614e6 Add pre-install and pre-uninstall hooks. 2015-02-02 11:16:23 -08:00
Todd Gamblin
ebe0c1d83a New "extends" relation adds another special list to the package class. 2015-02-02 11:16:23 -08:00
Todd Gamblin
88afad3e46 Directory layout can now track installed extensions per package. 2015-02-02 11:16:23 -08:00
Todd Gamblin
ba593ccb26 Fix bug in mirror path construction. 2015-02-02 11:15:24 -08:00
Todd Gamblin
81a4d89e94 Merge pull request #15 from ch4i/features/memaxes
Mitos package
2015-01-25 12:29:15 -08:00
Alfredo Gimenez
6a496ef620 PSAPI v0.6 -> Mitos v0.7 2015-01-23 16:58:15 -08:00
Todd Gamblin
0ac6ffb3ef Add extra gcc dependencies.
- not used until optional/conditional deps are implemented.
2015-01-23 00:05:23 -08:00
Todd Gamblin
3e37903ffd Packages have rpath property. 2015-01-23 00:03:51 -08:00
Todd Gamblin
e6b4530234 Add is_exe function to filesystem. 2015-01-22 13:52:28 -08:00
Alfredo Gimenez
e97db785d6 psapi v0.6 2015-01-21 20:42:44 -08:00
Todd Gamblin
51ed0d3f6f Properly set install RPATHS for cmake builds. 2015-01-19 20:59:23 -08:00
Todd Gamblin
2a0e33876e Add PSAPI 2015-01-19 20:45:27 -08:00
Todd Gamblin
d08c0703a0 Initial build of MemAxes GUI. 2015-01-19 14:07:41 -08:00
Todd Gamblin
b7dacb427d Qt5 & VTK builds. VTK works with Qt 4 and 5. 2015-01-19 14:07:09 -08:00
Todd Gamblin
0211adbdb6 version bump libpng 2015-01-19 14:06:25 -08:00
Todd Gamblin
53c8b4249a Make dbus put a machine id file in the right place. 2015-01-19 14:06:09 -08:00
Todd Gamblin
f35b8b8db4 Better location error output. 2015-01-19 14:05:48 -08:00
Todd Gamblin
a4c19eee14 Qt5 webkit requires gperf 2015-01-19 14:00:54 -08:00
Todd Gamblin
4e3662f318 Dyninst 8.2 works. 2015-01-17 17:09:42 -08:00
Todd Gamblin
c6351b5d00 Fix #11: bug in ProviderIndex
- packages that provided same spec (e.g. mpe) were overwritten in the index
  - Index now has a set of providers instead of a single provider per provided spec.
- see https://github.com/scalability-llnl/spack/issues/11
2015-01-14 00:18:29 -08:00
Todd Gamblin
f73abe6849 Merge branch 'features/dep-graph' into develop 2015-01-13 01:00:55 -08:00
Todd Gamblin
fa67d69585 Merge branch 'develop' of github.com:scalability-llnl/spack into develop 2015-01-13 00:53:04 -08:00
Todd Gamblin
917d82be0d Add list_url for ompss 2015-01-13 00:45:12 -08:00
George Todd Gamblin
1324b32665 Merge pull request #26 in SCALE/spack from ~JAULMES1/spack:develop to develop
# By Luc Jaulmes
# Via Luc Jaulmes
* commit '844c0838487529c0f2edc6f09e6ef86f12364716':
  Updated versions in OmpSs and Extrae, which resolves version-dependency problems with MPI
2015-01-12 22:33:26 -08:00
Luc Jaulmes
844c083848 Updated versions in OmpSs and Extrae, which resolves version-dependency problems with MPI 2015-01-12 20:38:32 +01:00
Todd Gamblin
9db967be98 Fix bug when all deps are back edges.
- Happened with the graph for SAMRAI
2015-01-10 19:23:07 -08:00
Todd Gamblin
011f71a442 Fix bug in STAT graph 2015-01-10 19:09:03 -08:00
Todd Gamblin
36198c525b Merge pull request #10 from justintoo/rose
Add Packages for ROSE and JDK
2015-01-08 09:10:52 -08:00
Justin Too
3a07ec6c7d (Package) Add ROSE compiler package 2015-01-07 14:07:35 -08:00
Justin Too
cd9e4b5b7f (Package) Add Oracle JDK package 2015-01-07 14:07:02 -08:00
Todd Gamblin
935eba2357 Allow commands to return error codes. 2015-01-05 02:33:15 -05:00
Todd Gamblin
5d033fbd0a Expansion works properly, simplified graph code. 2015-01-04 18:49:22 -08:00
Todd Gamblin
b4b8339d0d bugfix for dot graphs of virtual packages. 2015-01-03 17:58:37 -08:00
Todd Gamblin
0a0291678e Factor graph code out into its own module, rework spack graph. 2015-01-03 17:45:54 -08:00
Todd Gamblin
478af54cce Color graph edges. 2014-12-31 14:55:35 -08:00
Todd Gamblin
dba5d020cd Pipelining back edges works, saves more space. 2014-12-30 18:05:47 -08:00
Todd Gamblin
bb3dafa3b5 Reduce number of immediate expand/contracts. 2014-12-29 21:11:28 -08:00
Todd Gamblin
daf1e229f7 More compact graphs: do back edges before forward expansion. 2014-12-29 14:29:44 -08:00
Todd Gamblin
226de0a42d Spec graph works without color. 2014-12-29 01:52:03 -08:00
Todd Gamblin
a6e00f6086 Fix ColorStream 2014-12-29 01:05:21 -08:00
Todd Gamblin
6ffcdc1166 Partially wroking ASCII dependency graph. 2014-12-29 00:03:35 -08:00
Todd Gamblin
860f834aad spack graph allows plotting specific packages. 2014-12-26 13:52:49 -08:00
Todd Gamblin
9dabcc8703 Git package. 2014-12-26 00:07:15 -08:00
Todd Gamblin
d3e52d9f9a Fix lack of sorting in version concretization. 2014-12-25 23:13:44 -08:00
Todd Gamblin
b0ce1b81ba Fix SPINDLE and SCR download URLs. 2014-12-25 18:42:03 -08:00
Todd Gamblin
b80a0e1da5 Merge branch 'features/qt' into develop 2014-12-25 18:01:51 -08:00
Todd Gamblin
37bdbdd990 URLFetchStrategy now contains exploding tarballs. 2014-12-25 17:57:55 -08:00
Todd Gamblin
0bc861db6e Fix up bzip2 install 2014-12-25 17:55:19 -08:00
Todd Gamblin
d98e475361 Qt4 builds successfully with proper RPATHs. 2014-12-25 16:09:42 -08:00
Todd Gamblin
20388ece86 Clearer code in filter_file. 2014-12-25 16:07:39 -08:00
Todd Gamblin
7b71e6fb5a spack env command
spack env allows regular commands to be run with a spack build environment.
It also displays the spack build environment for a package.
2014-12-25 16:06:30 -08:00
Todd Gamblin
b3042db755 Add patch function to Package, so that packages can define custom patch functions. 2014-12-25 16:05:45 -08:00
Todd Gamblin
852c1dc286 Print out fetch, build, and total time for builds. 2014-12-23 16:35:54 -08:00
Todd Gamblin
01ca61c7cc Updates for Qt dependencies 2014-12-23 14:43:05 -08:00
Todd Gamblin
8edf299dd2 gnutls, nettle, wget, dbus 2014-12-23 14:43:05 -08:00
Todd Gamblin
e0b5890ab5 Initial versions of Qt and some dependencies. 2014-12-23 14:43:04 -08:00
Todd Gamblin
887c29ddc4 Merge branch 'features/better-mirror-support' into develop 2014-12-22 23:28:07 -08:00
Todd Gamblin
983f35f32a Tweak extrae indentation. 2014-12-22 23:24:21 -08:00
Todd Gamblin
c8d2097bae Merge branch 'features/gperftools' into develop 2014-12-22 23:15:44 -08:00
Todd Gamblin
ab3bf61903 Fix for SPACK-50
Bad format string in version check.
2014-12-19 11:09:37 -08:00
Todd Gamblin
5a3a838fe5 Merge branch 'bugfix/load-hooks-fix' into develop 2014-12-18 21:40:20 -08:00
Todd Gamblin
5cd4ddaf08 Fix for SPACK-49.
- name conflict in imp.load_source caused this to fail.
- Python modules loaded by imp have unique names now.
2014-12-18 21:38:25 -08:00
Todd Gamblin
08f1701e35 Allow fake installations (just make the directory).
- Use for debugging.
2014-12-18 15:52:45 -08:00
Adam Moody
a9be5e7239 add gperftools (tcmalloc and friends) 2014-12-18 11:31:58 -08:00
Todd Gamblin
f1c5e64c23 Partial fix for SPACK-48.
- Try to accommodate packages that have grown dependencies better.
- This will only get fully fixed when optional dependencies are supported
  and some extra functionality is added to the spec syntax.
2014-12-15 14:46:34 -08:00
Todd Gamblin
722e73f309 Better mirror path calculation.
- Add support in spack.url for extrapolating actual file type for URL
- Move mirror path computation to mirror.py from package.py
2014-12-12 14:53:55 -08:00
Todd Gamblin
2f90068661 Handle cases where tarball is in the URL query string. 2014-12-12 14:48:59 -08:00
Todd Gamblin
e309b41972 Add support for URLs with query strings
- support tarballs from raw github URLs
2014-12-09 01:07:48 -08:00
Todd Gamblin
c3fce7b77f Bugfix in create and checksum 2014-12-08 22:49:49 -08:00
Todd Gamblin
105420f372 Merge branch 'bugfix/ncurses-pkgconfig' into develop
Fixes #7 on github.
2014-12-05 09:19:00 -08:00
Todd Gamblin
588955a987 Disable pkgconfig files until I support this better. 2014-12-05 08:45:51 -08:00
Todd Gamblin
7dc90c7097 Add experimental gasnet package for legion. 2014-12-04 10:53:52 -08:00
Todd Gamblin
ba53ccb6b3 Minor tweak: use self.git everywhere in get fetch strategy. 2014-12-04 10:51:23 -08:00
Todd Gamblin
c774455fc5 Bugfix in create command. 2014-12-04 10:47:01 -08:00
Todd Gamblin
c19347a055 Merge branch 'features/mpibash' into develop 2014-12-02 23:00:11 -08:00
Adam Moody
0f04f75fa3 add autoconf and libcircle dependencies, call autoconf before configure 2014-12-02 22:59:33 -08:00
Todd Gamblin
652b761894 Merge branch 'features/better-find' into develop 2014-12-02 22:55:11 -08:00
Todd Gamblin
fdc6081244 CLI improvements to find and list. 2014-12-02 22:53:11 -08:00
Todd Gamblin
11cffff943 colify handles ansi color input directly; no more decorator. 2014-12-02 22:32:15 -08:00
Todd Gamblin
908400bfc5 Fix dyninst 8.1.1 checksum. 2014-12-02 21:57:37 -08:00
Todd Gamblin
0c12e26026 Bugfix in boost build.
- b2 used to be called bjam
2014-12-02 14:25:52 -08:00
Todd Gamblin
e71cf672f1 Fail fast in stage if all fetch strategies fail for a package. 2014-12-02 09:58:30 -08:00
Todd Gamblin
40b4fa5443 Better spack find view. 2014-12-01 23:14:06 -08:00
Todd Gamblin
e15316e825 index_by supports compound index keys. 2014-12-01 23:13:09 -08:00
Todd Gamblin
72c753b93e Colify now supports fixing the number of columns. 2014-12-01 21:29:01 -08:00
Todd Gamblin
22e4d11010 Cleanup code in colify. 2014-11-23 16:19:26 -08:00
Todd Gamblin
287b04e50a Bugfix in terminal_size() 2014-11-23 17:55:37 -06:00
Todd Gamblin
d2fe038caf Minor bugfix for 404 error on fetch. 2014-11-17 15:03:48 -08:00
Todd Gamblin
321a3a55c7 Prompt the user about checksums only if interactive. 2014-11-16 15:26:00 -08:00
Todd Gamblin
eba13b8653 Checksum warning now prompts for override. 2014-11-08 23:20:01 -08:00
Todd Gamblin
79414947ae Merge branch 'features/gcc' into develop
Conflicts:
	lib/spack/spack/package.py
2014-11-08 22:30:46 -08:00
Todd Gamblin
0d044cdc1b Shorter help strings. 2014-11-08 22:18:20 -08:00
Todd Gamblin
1a424c124c Python 2.6 fix for Mac OS 2014-11-08 22:18:08 -08:00
Todd Gamblin
1da5d12bdd 'spack urls' debugging command, more consistent URL extrapolation.
- spack urls inspects all URLs in packages, prints out colors to show how they are parased.
- URL extrapolation test added.
- Extrapolation is more consistent now.
- Extrapolation handles more complex URLs.
- More test cases for extrapolation.
2014-11-08 22:08:15 -08:00
Todd Gamblin
57076f6ca4 URL parsing improvements 2014-11-08 11:42:54 -08:00
Todd Gamblin
9033ae6460 Add package for Sandia QThreads. 2014-11-07 00:20:39 -08:00
Todd Gamblin
55bf243f16 Improved website scraping. 2014-11-07 00:17:25 -08:00
Todd Gamblin
d78ece658b Change to faster gcc mirror that allows spidering. 2014-11-07 00:13:52 -08:00
Todd Gamblin
3112096651 Merge branch 'hotfix/vcs-not-required' into develop 2014-11-06 13:41:54 -08:00
Todd Gamblin
193eddda5e Fix for missing format_doc in package-list command. 2014-11-06 11:46:43 -08:00
Todd Gamblin
b97ee67a4b Working GCC package. 2014-11-05 09:54:43 -08:00
Todd Gamblin
a37828bafb Packages for gcc and its dependencies. 2014-11-04 13:42:47 -08:00
Todd Gamblin
488a6737b7 Merge branch 'features/python' into develop 2014-11-03 14:20:37 -08:00
Todd Gamblin
7905b50dcb Bump ImageMagick version 2014-11-03 14:19:24 -08:00
Todd Gamblin
6c4bac2ed8 Update libmonitor URL to point to google code. 2014-11-03 14:17:10 -08:00
Todd Gamblin
6c8c41da98 Working Python 2.7.8, ncurses, readline 2014-11-03 14:12:16 -08:00
Todd Gamblin
0f3b80cddb Fix for SPACK-11: Spack compiler wrapper is now in bash.
- Startup is much faster
- Added test for compiler wrapper parsing.
- Removed old compilation module that had to be imported by old cc.
- Removed cc from python version checks now that it's bash.
2014-11-03 14:12:16 -08:00
Todd Gamblin
1656f62a12 Add bzip2 package and spack pkg add command. 2014-11-03 14:12:16 -08:00
Todd Gamblin
8c8fc749be Initial versions of python and libffi. 2014-11-03 14:12:06 -08:00
Todd Gamblin
8f9de17869 "spack info -r" is now "spack package-list"
- too much going on in this command, and it made subcommand parsing weird.
- information printed is the same but info and package-list are really different commands.
2014-11-01 16:03:09 -07:00
Todd Gamblin
a5859b0b05 Add ability to get subparser by name from argparse 2014-11-01 15:59:29 -07:00
Todd Gamblin
3db22a4e33 Sane self.url for packages (reflects current version) 2014-11-01 15:01:01 -07:00
Todd Gamblin
85a14b68b7 spack compiler add checks for access before listing directories. 2014-10-31 10:30:58 -07:00
Todd Gamblin
f60fd330cb Better error messages for extension() 2014-10-30 15:00:02 -07:00
Todd Gamblin
132c32076a Add Muster parallel clustering library. 2014-10-28 16:44:35 -07:00
248 changed files with 10470 additions and 1816 deletions

View File

@@ -32,20 +32,53 @@ Documentation
[Full documentation](http://scalability-llnl.github.io/spack)
for Spack is also available.
Get Involved!
------------------------
Spack is an open source project. Questions, discussion, and
contributions are welcome. Contributions can be anything from new
packages to bugfixes, or even new core features.
### Mailing list
If you are interested in contributing to spack, the first step is to
join the mailing list. We're currently using LLNL's old-fashioned
mailing list software, so you'll need to click the links below and
send the resulting email to subscribe or unsubscribe:
* **[Subscribe](mailto:majordomo@lists.llnl.gov?subject=subscribe&body=subscribe%20spack)**
* **[Unsubscribe](mailto:majordomo@lists.llnl.gov?subject=unsubscribe&body=unsubscribe%20spack)**
### Contributions
At the moment, contributing to Spack is relatively simple. Just send us
a [pull request](https://help.github.com/articles/using-pull-requests/).
When you send your request, make ``develop`` the destination branch.
Spack is using a rough approximation of the [Git
Flow](http://nvie.com/posts/a-successful-git-branching-model/)
branching model. The ``develop`` branch contains the latest
contributions, and ``master`` is always tagged and points to the
latest stable release.
Authors
----------------
Spack was written by Todd Gamblin, tgamblin@llnl.gov.
Significant contributions were also made by the following awesome
people:
Significant contributions were also made by:
* David Beckingsale
* David Boehme
* Alfredo Gimenez
* Luc Jaulmes
* Matt Legendre
* Greg Lee
* Adam Moody
* Saravan Pantham
* Joachim Protze
* Bob Robey
* Justin Too
Release
----------------

103
bin/spack
View File

@@ -25,7 +25,8 @@
##############################################################################
import sys
if not sys.version_info[:2] >= (2,6):
sys.exit("Spack requires Python 2.6. Version was %s." % sys.version_info)
v_info = sys.version_info[:3]
sys.exit("Spack requires Python 2.6 or higher. This is Python %d.%d.%d." % v_info)
import os
@@ -48,23 +49,39 @@ except OSError:
# clean up the scope and start using spack package instead.
del SPACK_FILE, SPACK_PREFIX, SPACK_LIB_PATH
import llnl.util.tty as tty
from llnl.util.tty.color import *
import spack
from spack.error import SpackError
from external import argparse
# Command parsing
parser = argparse.ArgumentParser(
description='Spack: the Supercomputing PACKage Manager.')
formatter_class=argparse.RawTextHelpFormatter,
description="Spack: the Supercomputing PACKage Manager." + colorize("""
spec expressions:
PACKAGE [CONSTRAINTS]
CONSTRAINTS:
@c{@version}
@g{%compiler @compiler_version}
@B{+variant}
@r{-variant} or @r{~variant}
@m{=architecture}
[^DEPENDENCY [CONSTRAINTS] ...]"""))
parser.add_argument('-d', '--debug', action='store_true',
help="Write out debug logs during compile")
parser.add_argument('-k', '--insecure', action='store_true',
help="Do not check ssl certificates when downloading.")
parser.add_argument('-m', '--mock', action='store_true',
help="Use mock packages instead of real ones.")
parser.add_argument('-p', '--profile', action='store_true',
help="Profile execution using cProfile.")
parser.add_argument('-v', '--verbose', action='store_true',
help="Print additional output during builds")
parser.add_argument('-V', '--version', action='version',
version="%s" % spack.spack_version)
parser.add_argument('-v', '--verbose', action='store_true', dest='verbose',
help="Print additional output during builds")
parser.add_argument('-d', '--debug', action='store_true', dest='debug',
help="Write out debug logs during compile")
parser.add_argument('-k', '--insecure', action='store_true', dest='insecure',
help="Do not check ssl certificates when downloading archives.")
parser.add_argument('-m', '--mock', action='store_true', dest='mock',
help="Use mock packages instead of real ones.")
# each command module implements a parser() function, to which we pass its
# subparser for setup.
@@ -84,33 +101,49 @@ if len(sys.argv) == 1:
# actually parse the args.
args = parser.parse_args()
# Set up environment based on args.
tty.set_verbose(args.verbose)
tty.set_debug(args.debug)
spack.debug = args.debug
def main():
# Set up environment based on args.
tty.set_verbose(args.verbose)
tty.set_debug(args.debug)
spack.debug = args.debug
spack.spack_working_dir = working_dir
if args.mock:
from spack.packages import PackageDB
spack.db = PackageDB(spack.mock_packages_path)
spack.spack_working_dir = working_dir
if args.mock:
from spack.packages import PackageDB
spack.db = PackageDB(spack.mock_packages_path)
# If the user asked for it, don't check ssl certs.
if args.insecure:
tty.warn("You asked for --insecure, which does not check SSL certificates or checksums.")
spack.curl.add_default_arg('-k')
# If the user asked for it, don't check ssl certs.
if args.insecure:
tty.warn("You asked for --insecure, which does not check SSL certificates or checksums.")
spack.curl.add_default_arg('-k')
# Try to load the particular command asked for and run it
command = spack.cmd.get_command(args.command)
try:
command(parser, args)
except SpackError, e:
if spack.debug:
# In debug mode, raise with a full stack trace.
raise
elif e.long_message:
tty.die(e.message, e.long_message)
# Try to load the particular command asked for and run it
command = spack.cmd.get_command(args.command)
try:
return_val = command(parser, args)
except SpackError, e:
if spack.debug:
# In debug mode, raise with a full stack trace.
raise
elif e.long_message:
tty.die(e.message, e.long_message)
else:
tty.die(e.message)
except KeyboardInterrupt:
sys.stderr.write('\n')
tty.die("Keyboard interrupt.")
# Allow commands to return values if they want to exit with some ohter code.
if return_val is None:
sys.exit(0)
elif isinstance(return_val, int):
sys.exit(return_val)
else:
tty.die(e.message)
tty.die("Bad return value from command %s: %s" % (args.command, return_val))
except KeyboardInterrupt:
tty.die("Keyboard interrupt.")
if args.profile:
import cProfile
cProfile.run('main()', sort='tottime')
else:
main()

View File

@@ -1,3 +1,4 @@
package_list.rst
command_index.rst
spack*.rst
_build

View File

@@ -25,7 +25,19 @@ all: html
# This autogenerates a package list.
#
package_list:
spack info -r > package_list.rst
spack package-list > package_list.rst
#
# Generate a command index
#
command_index:
cp command_index.in command_index.rst
echo >> command_index.rst
grep -ho '.. _spack-.*:' *rst \
| perl -pe 's/.. _([^:]*):/ * :ref:`\1`/' \
| sort >> command_index.rst
custom_targets: package_list command_index
#
# This creates a git repository and commits generated html docs.
@@ -77,10 +89,10 @@ help:
@echo " doctest to run all doctests embedded in the documentation (if enabled)"
clean:
-rm -f package_list.rst
-rm -f package_list.rst command_index.rst
-rm -rf $(BUILDDIR)/* $(APIDOC_FILES)
html: apidoc package_list
html: apidoc custom_targets
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
@echo
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."

View File

@@ -5,7 +5,7 @@
"""
import os
VERSION = (0, 1, 5)
VERSION = (0, 1, 8)
__version__ = ".".join(str(v) for v in VERSION)
__version_full__ = __version__

View File

@@ -1,15 +1,23 @@
<ul class="wy-breadcrumbs">
<li><a href="{{ pathto(master_doc) }}">Docs</a> &raquo;</li>
<li><a href="">{{ title }}</a></li>
<li class="wy-breadcrumbs-aside">
{% if display_github %}
<a href="https://github.com/{{ github_user }}/{{ github_repo }}/blob/{{ github_version }}{{ conf_py_path }}{{ pagename }}.rst" class="icon icon-github"> Edit on GitHub</a>
{% elif display_bitbucket %}
<a href="https://bitbucket.org/{{ bitbucket_user }}/{{ bitbucket_repo }}/src/{{ bitbucket_version}}{{ conf_py_path }}{{ pagename }}.rst'" class="icon icon-bitbucket"> Edit on Bitbucket</a>
{% elif show_source and has_source and sourcename %}
<a href="{{ pathto('_sources/' + sourcename, true)|e }}" rel="nofollow"> View page source</a>
{% endif %}
</li>
</ul>
<hr/>
<div role="navigation" aria-label="breadcrumbs navigation">
<ul class="wy-breadcrumbs">
<li><a href="{{ pathto(master_doc) }}">Docs</a> &raquo;</li>
{% for doc in parents %}
<li><a href="{{ doc.link|e }}">{{ doc.title }}</a> &raquo;</li>
{% endfor %}
<li>{{ title }}</li>
<li class="wy-breadcrumbs-aside">
{% if pagename != "search" %}
{% if display_github %}
<a href="https://{{ github_host|default("github.com") }}/{{ github_user }}/{{ github_repo }}/blob/{{ github_version }}{{ conf_py_path }}{{ pagename }}{{ source_suffix }}" class="fa fa-github"> Edit on GitHub</a>
{% elif display_bitbucket %}
<a href="https://bitbucket.org/{{ bitbucket_user }}/{{ bitbucket_repo }}/src/{{ bitbucket_version}}{{ conf_py_path }}{{ pagename }}{{ source_suffix }}" class="fa fa-bitbucket"> Edit on Bitbucket</a>
{% elif show_source and source_url_prefix %}
<a href="{{ source_url_prefix }}{{ pagename }}{{ source_suffix }}">View page source</a>
{% elif show_source and has_source and sourcename %}
<a href="{{ pathto('_sources/' + sourcename, true)|e }}" rel="nofollow"> View page source</a>
{% endif %}
{% endif %}
</li>
</ul>
<hr/>
</div>

View File

@@ -1,28 +1,36 @@
<footer>
{% if next or prev %}
<div class="rst-footer-buttons">
<div class="rst-footer-buttons" role="navigation" aria-label="footer navigation">
{% if next %}
<a href="{{ next.link|e }}" class="btn btn-neutral float-right" title="{{ next.title|striptags|e }}"/>Next <span class="icon icon-circle-arrow-right"></span></a>
<a href="{{ next.link|e }}" class="btn btn-neutral float-right" title="{{ next.title|striptags|e }}" accesskey="n">Next <span class="fa fa-arrow-circle-right"></span></a>
{% endif %}
{% if prev %}
<a href="{{ prev.link|e }}" class="btn btn-neutral" title="{{ prev.title|striptags|e }}"><span class="icon icon-circle-arrow-left"></span> Previous</a>
<a href="{{ prev.link|e }}" class="btn btn-neutral" title="{{ prev.title|striptags|e }}" accesskey="p"><span class="fa fa-arrow-circle-left"></span> Previous</a>
{% endif %}
</div>
{% endif %}
<hr/>
<p>
&copy; Copyright 2013-2014,
<a href="https://scalability.llnl.gov/">Lawrence Livermore National Laboratory</a>.
<br/>
Written by Todd Gamblin, <a href="mailto:tgamblin@llnl.gov">tgamblin@llnl.gov</a>, LLNL-CODE-647188
<br/>
<div role="contentinfo">
<p>
{%- if show_copyright %}
{%- if hasdoc('copyright') %}
{% trans path=pathto('copyright'), copyright=copyright|e %}&copy; <a href="{{ path }}">Copyright</a> {{ copyright }}.{% endtrans %}
{%- else %}
{% trans copyright=copyright|e %}&copy; Copyright {{ copyright }}.{% endtrans %}
{%- endif %}
{%- endif %}
{%- if last_updated %}
{% trans last_updated=last_updated|e %}Last updated on {{ last_updated }}.{% endtrans %}
{%- if last_updated %}
{% trans last_updated=last_updated|e %}Last updated on {{ last_updated }}.{% endtrans %}
{%- endif %}
</p>
</div>
{%- if show_sphinx %}
{% trans %}Built with <a href="http://sphinx-doc.org/">Sphinx</a> using a <a href="https://github.com/snide/sphinx_rtd_theme">theme</a> provided by <a href="https://readthedocs.org">Read the Docs</a>{% endtrans %}.
{%- endif %}
&nbsp;&nbsp;
{% trans %}<br/><a href="https://www.github.com/snide/sphinx_rtd_theme">Sphinx theme</a> provided by <a href="http://readthedocs.org">Read the Docs</a>{% endtrans %}
</p>
</footer>

View File

@@ -12,6 +12,7 @@
<!--[if gt IE 8]><!--> <html class="no-js" lang="en" > <!--<![endif]-->
<head>
<meta charset="utf-8">
{{ metatags }}
<meta name="viewport" content="width=device-width, initial-scale=1.0">
{% block htmltitle %}
<title>{{ title|striptags|e }}{{ titlesuffix }}</title>
@@ -23,40 +24,28 @@
{% endif %}
{# CSS #}
<link href='https://fonts.googleapis.com/css?family=Lato:400,700|Roboto+Slab:400,700|Inconsolata:400,700' rel='stylesheet' type='text/css'>
{# JS #}
{# OPENSEARCH #}
{% if not embedded %}
<script type="text/javascript">
var DOCUMENTATION_OPTIONS = {
URL_ROOT:'{{ url_root }}',
VERSION:'{{ release|e }}',
COLLAPSE_INDEX:false,
FILE_SUFFIX:'{{ '' if no_search_suffix else file_suffix }}',
HAS_SOURCE: {{ has_source|lower }}
};
</script>
{%- for scriptfile in script_files %}
<script type="text/javascript" src="{{ pathto(scriptfile, 1) }}"></script>
{%- endfor %}
{% if use_opensearch %}
<link rel="search" type="application/opensearchdescription+xml" title="{% trans docstitle=docstitle|e %}Search within {{ docstitle }}{% endtrans %}" href="{{ pathto('_static/opensearch.xml', 1) }}"/>
{% endif %}
{% endif %}
{# RTD hosts these file themselves, so just load on non RTD builds #}
{# RTD hosts this file, so just load on non RTD builds #}
{% if not READTHEDOCS %}
<link rel="stylesheet" href="{{ pathto('_static/' + style, 1) }}" type="text/css" />
<script type="text/javascript" src="_static/js/theme.js"></script>
{% endif %}
{% for cssfile in css_files %}
<link rel="stylesheet" href="{{ pathto(cssfile, 1) }}" type="text/css" />
{% endfor %}
{% for cssfile in extra_css_files %}
<link rel="stylesheet" href="{{ pathto(cssfile, 1) }}" type="text/css" />
{% endfor %}
{%- block linktags %}
{%- if hasdoc('about') %}
<link rel="author" title="{{ _('About these documents') }}"
@@ -85,29 +74,47 @@
{%- endblock %}
{%- block extrahead %} {% endblock %}
<script src="//cdnjs.cloudflare.com/ajax/libs/modernizr/2.6.2/modernizr.min.js"></script>
{# Keep modernizr in head - http://modernizr.com/docs/#installing #}
<script src="_static/js/modernizr.min.js"></script>
</head>
<body class="wy-body-for-nav">
<body class="wy-body-for-nav" role="document">
<div class="wy-grid-for-nav">
{# SIDE NAV, TOGGLES ON MOBILE #}
<nav data-toggle="wy-nav-shift" class="wy-nav-side">
<div class="wy-side-nav-search">
<a href="{{ pathto(master_doc) }}" class="icon icon-home"> {{ project }}</a>
{% block sidebartitle %}
{% if logo and theme_logo_only %}
<a href="{{ pathto(master_doc) }}">
{% else %}
<a href="{{ pathto(master_doc) }}" class="icon icon-home"> {{ project }}
{% endif %}
{% if logo %}
{# Not strictly valid HTML, but it's the only way to display/scale it properly, without weird scripting or heaps of work #}
<img src="{{ pathto('_static/' + logo, 1) }}" class="logo" />
{% endif %}
</a>
{% include "searchbox.html" %}
{% endblock %}
</div>
<div class="wy-menu wy-menu-vertical" data-spy="affix">
{% set toctree = toctree(maxdepth=2, collapse=False, includehidden=True) %}
{% if toctree %}
{{ toctree }}
{% else %}
<!-- Local TOC -->
<div class="local-toc">{{ toc }}</div>
{% endif %}
<div class="wy-menu wy-menu-vertical" data-spy="affix" role="navigation" aria-label="main navigation">
{% block menu %}
{% set toctree = toctree(maxdepth=4, collapse=False, includehidden=True) %}
{% if toctree %}
{{ toctree }}
{% else %}
<!-- Local TOC -->
<div class="local-toc">{{ toc }}</div>
{% endif %}
{% endblock %}
</div>
&nbsp;
</nav>
@@ -115,8 +122,8 @@
<section data-toggle="wy-nav-shift" class="wy-nav-content-wrap">
{# MOBILE NAV, TRIGGLES SIDE NAV ON TOGGLE #}
<nav class="wy-nav-top">
<i data-toggle="wy-nav-top" class="icon icon-reorder"></i>
<nav class="wy-nav-top" role="navigation" aria-label="top navigation">
<i data-toggle="wy-nav-top" class="fa fa-bars"></i>
<a href="{{ pathto(master_doc) }}">{{ project }}</a>
</nav>
@@ -125,7 +132,9 @@
<div class="wy-nav-content">
<div class="rst-content">
{% include "breadcrumbs.html" %}
{% block body %}{% endblock %}
<div role="main" class="document">
{% block body %}{% endblock %}
</div>
{% include "footer.html" %}
</div>
</div>
@@ -134,5 +143,39 @@
</div>
{% include "versions.html" %}
{% if not embedded %}
<script type="text/javascript">
var DOCUMENTATION_OPTIONS = {
URL_ROOT:'{{ url_root }}',
VERSION:'{{ release|e }}',
COLLAPSE_INDEX:false,
FILE_SUFFIX:'{{ '' if no_search_suffix else file_suffix }}',
HAS_SOURCE: {{ has_source|lower }}
};
</script>
{%- for scriptfile in script_files %}
<script type="text/javascript" src="{{ pathto(scriptfile, 1) }}"></script>
{%- endfor %}
{% endif %}
{# RTD hosts this file, so just load on non RTD builds #}
{% if not READTHEDOCS %}
<script type="text/javascript" src="{{ pathto('_static/js/theme.js', 1) }}"></script>
{% endif %}
{# STICKY NAVIGATION #}
{% if theme_sticky_navigation %}
<script type="text/javascript">
jQuery(function () {
SphinxRtdTheme.StickyNav.enable();
});
</script>
{% endif %}
{%- block footer %} {% endblock %}
</body>
</html>

View File

@@ -10,7 +10,7 @@
{%- extends "layout.html" %}
{% set title = _('Search') %}
{% set script_files = script_files + ['_static/searchtools.js'] %}
{% block extrahead %}
{% block footer %}
<script type="text/javascript">
jQuery(function() { Search.loadIndex("{{ pathto('searchindex.js', 1) }}"); });
</script>

View File

@@ -1,5 +1,9 @@
<form id ="rtd-search-form" class="wy-form" action="{{ pathto('search') }}" method="get">
<input type="text" name="q" placeholder="Search docs" />
<input type="hidden" name="check_keywords" value="yes" />
<input type="hidden" name="area" value="default" />
</form>
{%- if builder != 'singlehtml' %}
<div role="search">
<form id="rtd-search-form" class="wy-form" action="{{ pathto('search') }}" method="get">
<input type="text" name="q" placeholder="Search docs" />
<input type="hidden" name="check_keywords" value="yes" />
<input type="hidden" name="area" value="default" />
</form>
</div>
{%- endif %}

View File

@@ -1 +1,2 @@
.font-smooth,.icon:before{-webkit-font-smoothing:antialiased}.clearfix{*zoom:1}.clearfix:before,.clearfix:after{display:table;content:""}.clearfix:after{clear:both}@font-face{font-family:fontawesome-webfont;font-weight:normal;font-style:normal;src:url("../font/fontawesome_webfont.eot");src:url("../font/fontawesome_webfont.eot?#iefix") format("embedded-opentype"),url("../font/fontawesome_webfont.woff") format("woff"),url("../font/fontawesome_webfont.ttf") format("truetype"),url("../font/fontawesome_webfont.svg#fontawesome-webfont") format("svg")}.icon:before{display:inline-block;font-family:fontawesome-webfont;font-style:normal;font-weight:normal;line-height:1;text-decoration:inherit}a .icon{display:inline-block;text-decoration:inherit}li .icon{display:inline-block}li .icon-large:before,li .icon-large:before{width:1.875em}ul.icons{list-style-type:none;margin-left:2em;text-indent:-0.8em}ul.icons li .icon{width:0.8em}ul.icons li .icon-large:before,ul.icons li .icon-large:before{vertical-align:baseline}.icon-book:before{content:"\f02d"}.icon-caret-down:before{content:"\f0d7"}.icon-caret-up:before{content:"\f0d8"}.icon-caret-left:before{content:"\f0d9"}.icon-caret-right:before{content:"\f0da"}.rst-versions{position:fixed;bottom:0;left:0;width:300px;color:#fcfcfc;background:#1f1d1d;border-top:solid 10px #343131;font-family:"Lato","proxima-nova","Helvetica Neue",Arial,sans-serif;z-index:400}.rst-versions a{color:#2980b9;text-decoration:none}.rst-versions .rst-badge-small{display:none}.rst-versions .rst-current-version{padding:12px;background-color:#272525;display:block;text-align:right;font-size:90%;cursor:pointer;color:#27ae60;*zoom:1}.rst-versions .rst-current-version:before,.rst-versions .rst-current-version:after{display:table;content:""}.rst-versions .rst-current-version:after{clear:both}.rst-versions .rst-current-version .icon{color:#fcfcfc}.rst-versions .rst-current-version .icon-book{float:left}.rst-versions .rst-current-version.rst-out-of-date{background-color:#e74c3c;color:#fff}.rst-versions.shift-up .rst-other-versions{display:block}.rst-versions .rst-other-versions{font-size:90%;padding:12px;color:gray;display:none}.rst-versions .rst-other-versions hr{display:block;height:1px;border:0;margin:20px 0;padding:0;border-top:solid 1px #413d3d}.rst-versions .rst-other-versions dd{display:inline-block;margin:0}.rst-versions .rst-other-versions dd a{display:inline-block;padding:6px;color:#fcfcfc}.rst-versions.rst-badge{width:auto;bottom:20px;right:20px;left:auto;border:none;max-width:300px}.rst-versions.rst-badge .icon-book{float:none}.rst-versions.rst-badge.shift-up .rst-current-version{text-align:right}.rst-versions.rst-badge.shift-up .rst-current-version .icon-book{float:left}.rst-versions.rst-badge .rst-current-version{width:auto;height:30px;line-height:30px;padding:0 6px;display:block;text-align:center}@media screen and (max-width: 768px){.rst-versions{width:85%;display:none}.rst-versions.shift{display:block}img{width:100%;height:auto}}
.fa:before{-webkit-font-smoothing:antialiased}.clearfix{*zoom:1}.clearfix:before,.clearfix:after{display:table;content:""}.clearfix:after{clear:both}@font-face{font-family:FontAwesome;font-weight:normal;font-style:normal;src:url("../font/fontawesome_webfont.eot");src:url("../font/fontawesome_webfont.eot?#iefix") format("embedded-opentype"),url("../font/fontawesome_webfont.woff") format("woff"),url("../font/fontawesome_webfont.ttf") format("truetype"),url("../font/fontawesome_webfont.svg#FontAwesome") format("svg")}.fa:before{display:inline-block;font-family:FontAwesome;font-style:normal;font-weight:normal;line-height:1;text-decoration:inherit}a .fa{display:inline-block;text-decoration:inherit}li .fa{display:inline-block}li .fa-large:before,li .fa-large:before{width:1.875em}ul.fas{list-style-type:none;margin-left:2em;text-indent:-0.8em}ul.fas li .fa{width:0.8em}ul.fas li .fa-large:before,ul.fas li .fa-large:before{vertical-align:baseline}.fa-book:before{content:""}.icon-book:before{content:""}.fa-caret-down:before{content:""}.icon-caret-down:before{content:""}.fa-caret-up:before{content:""}.icon-caret-up:before{content:""}.fa-caret-left:before{content:""}.icon-caret-left:before{content:""}.fa-caret-right:before{content:""}.icon-caret-right:before{content:""}.rst-versions{position:fixed;bottom:0;left:0;width:300px;color:#fcfcfc;background:#1f1d1d;border-top:solid 10px #343131;font-family:"Lato","proxima-nova","Helvetica Neue",Arial,sans-serif;z-index:400}.rst-versions a{color:#2980B9;text-decoration:none}.rst-versions .rst-badge-small{display:none}.rst-versions .rst-current-version{padding:12px;background-color:#272525;display:block;text-align:right;font-size:90%;cursor:pointer;color:#27AE60;*zoom:1}.rst-versions .rst-current-version:before,.rst-versions .rst-current-version:after{display:table;content:""}.rst-versions .rst-current-version:after{clear:both}.rst-versions .rst-current-version .fa{color:#fcfcfc}.rst-versions .rst-current-version .fa-book{float:left}.rst-versions .rst-current-version .icon-book{float:left}.rst-versions .rst-current-version.rst-out-of-date{background-color:#E74C3C;color:#fff}.rst-versions .rst-current-version.rst-active-old-version{background-color:#F1C40F;color:#000}.rst-versions.shift-up .rst-other-versions{display:block}.rst-versions .rst-other-versions{font-size:90%;padding:12px;color:gray;display:none}.rst-versions .rst-other-versions hr{display:block;height:1px;border:0;margin:20px 0;padding:0;border-top:solid 1px #413d3d}.rst-versions .rst-other-versions dd{display:inline-block;margin:0}.rst-versions .rst-other-versions dd a{display:inline-block;padding:6px;color:#fcfcfc}.rst-versions.rst-badge{width:auto;bottom:20px;right:20px;left:auto;border:none;max-width:300px}.rst-versions.rst-badge .icon-book{float:none}.rst-versions.rst-badge .fa-book{float:none}.rst-versions.rst-badge.shift-up .rst-current-version{text-align:right}.rst-versions.rst-badge.shift-up .rst-current-version .fa-book{float:left}.rst-versions.rst-badge.shift-up .rst-current-version .icon-book{float:left}.rst-versions.rst-badge .rst-current-version{width:auto;height:30px;line-height:30px;padding:0 6px;display:block;text-align:center}@media screen and (max-width: 768px){.rst-versions{width:85%;display:none}.rst-versions.shift{display:block}img{width:100%;height:auto}}
/*# sourceMappingURL=badge_only.css.map */

View File

@@ -0,0 +1,7 @@
{
"version": 3,
"mappings": "CAyDA,SAAY,EACV,qBAAsB,EAAE,UAAW,EAqDrC,QAAS,EARP,IAAK,EAAE,AAAC,EACR,+BAAS,EAEP,MAAO,EAAE,IAAK,EACd,MAAO,EAAE,CAAE,EACb,cAAO,EACL,IAAK,EAAE,GAAI,EC1Gb,SAkBC,EAjBC,UAAW,ECFJ,UAAW,EDGlB,UAAW,EAHqC,KAAM,EAItD,SAAU,EAJsD,KAAM,EAapE,EAAG,EAAE,qCAAwB,EAC7B,EAAG,EAAE,0PAAyE,ECZpF,SAAU,EACR,MAAO,EAAE,WAAY,EACrB,UAAW,EAAE,UAAW,EACxB,SAAU,EAAE,KAAM,EAClB,UAAW,EAAE,KAAM,EACnB,UAAW,EAAE,AAAC,EACd,cAAe,EAAE,MAAO,EAG1B,IAAK,EACH,MAAO,EAAE,WAAY,EACrB,cAAe,EAAE,MAAO,EAIxB,KAAG,EACD,MAAO,EAAE,WAAY,EACvB,sCAAiB,EAGf,IAAK,EAAE,MAAY,EAEvB,KAAM,EACJ,cAAe,EAAE,GAAI,EACrB,UAAW,EAAE,EAAG,EAChB,UAAW,EAAE,KAAM,EAEjB,YAAG,EACD,IAAK,EAAE,IAAI,EACb,oDAAiB,EAGf,aAAc,EAAE,OAAQ,EAG9B,cAAe,EACb,MAAO,EAAE,EAAO,EAElB,gBAAiB,EACf,MAAO,EAAE,EAAO,EAElB,oBAAqB,EACnB,MAAO,EAAE,EAAO,EAElB,sBAAuB,EACrB,MAAO,EAAE,EAAO,EAElB,kBAAmB,EACjB,MAAO,EAAE,EAAO,EAElB,oBAAqB,EACnB,MAAO,EAAE,EAAO,EAElB,oBAAqB,EACnB,MAAO,EAAE,EAAO,EAElB,sBAAuB,EACrB,MAAO,EAAE,EAAO,EAElB,qBAAsB,EACpB,MAAO,EAAE,EAAO,EAElB,uBAAwB,EACtB,MAAO,EAAE,EAAO,ECnElB,YAAa,EACX,OAAQ,EAAE,IAAK,EACf,KAAM,EAAE,AAAC,EACT,GAAI,EAAE,AAAC,EACP,IAAK,EC6E+B,IAAK,ED5EzC,IAAK,ECE+B,MAAyB,EDD7D,SAAU,EAAE,MAAkC,EAC9C,SAAU,EAAE,iBAAiC,EAC7C,UAAW,EEAyB,sDAAM,EFC1C,MAAO,EC+E6B,EAAG,ED9EvC,cAAC,EACC,IAAK,ECqE6B,MAAW,EDpE7C,cAAe,EAAE,GAAI,EACvB,6BAAgB,EACd,MAAO,EAAE,GAAI,EACf,iCAAoB,EAClB,MAAO,EAAE,GAAqB,EAC9B,eAAgB,EAAE,MAAkC,EACpD,MAAO,EAAE,IAAK,EACd,SAAU,EAAE,IAAK,EACjB,QAAS,EAAE,EAAG,EACd,KAAM,EAAE,MAAO,EACf,IAAK,ECiD6B,MAAM,EJgC1C,IAAK,EAAE,AAAC,EACR,iFAAS,EAEP,MAAO,EAAE,IAAK,EACd,MAAO,EAAE,CAAE,EACb,uCAAO,EACL,IAAK,EAAE,GAAI,EGrFX,qCAAG,EACD,IAAK,EClB2B,MAAyB,EDmB3D,0CAAQ,EACN,IAAK,EAAE,GAAI,EACb,4CAAU,EACR,IAAK,EAAE,GAAI,EACb,iDAAiB,EACf,eAAgB,ECQgB,MAAI,EDPpC,IAAK,EC0B2B,GAAM,EDzBxC,wDAAwB,EACtB,eAAgB,ECXgB,MAAO,EDYvC,IAAK,ECzB2B,GAAI,ED0BxC,yCAA8B,EAC5B,MAAO,EAAE,IAAK,EAChB,gCAAmB,EACjB,QAAS,EAAE,EAAG,EACd,MAAO,EAAE,GAAqB,EAC9B,IAAK,ECE6B,GAAwB,EDD1D,MAAO,EAAE,GAAI,EACb,mCAAE,EACA,MAAO,EAAE,IAAK,EACd,KAAM,EAAE,EAAG,EACX,KAAM,EAAE,AAAC,EACT,KAAM,EAAE,KAAM,EACd,MAAO,EAAE,AAAC,EACV,SAAU,EAAE,gBAA6C,EAC3D,mCAAE,EACA,MAAO,EAAE,WAAY,EACrB,KAAM,EAAE,AAAC,EACT,qCAAC,EACC,MAAO,EAAE,WAAY,EACrB,MAAO,EAAE,EAAqB,EAC9B,IAAK,ECjDyB,MAAyB,EDkD7D,sBAAW,EACT,IAAK,EAAE,GAAI,EACX,KAAM,EAAE,GAAI,EACZ,IAAK,EAAE,GAAI,EACX,GAAI,EAAE,GAAI,EACV,KAAM,EAAE,GAAI,EACZ,QAAS,ECkByB,IAAK,EDjBvC,iCAAU,EACR,IAAK,EAAE,GAAI,EACb,+BAAQ,EACN,IAAK,EAAE,GAAI,EACb,oDAA+B,EAC7B,SAAU,EAAE,IAAK,EACjB,6DAAQ,EACN,IAAK,EAAE,GAAI,EACb,+DAAU,EACR,IAAK,EAAE,GAAI,EACf,2CAAoB,EAClB,IAAK,EAAE,GAAI,EACX,KAAM,EAAE,GAAI,EACZ,UAAW,EAAE,GAAI,EACjB,MAAO,EAAE,IAAuB,EAChC,MAAO,EAAE,IAAK,EACd,SAAU,EAAE,KAAM,EGhDpB,mCAAsB,EHmDxB,YAAa,EACX,IAAK,EAAE,EAAG,EACV,MAAO,EAAE,GAAI,EACb,kBAAO,EACL,MAAO,EAAE,IAAK,EAClB,EAAG,EACD,IAAK,EAAE,GAAI,EACX,KAAM,EAAE,GAAI",
"sources": ["../../../bower_components/wyrm/sass/wyrm_core/_mixin.sass","../../../bower_components/bourbon/dist/css3/_font-face.scss","../../../sass/_theme_badge_fa.sass","../../../sass/_theme_badge.sass","../../../bower_components/wyrm/sass/wyrm_core/_wy_variables.sass","../../../sass/_theme_variables.sass","../../../bower_components/neat/app/assets/stylesheets/grid/_media.scss"],
"names": [],
"file": "badge_only.css"
}

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

Binary file not shown.

View File

@@ -280,8 +280,8 @@
<glyph unicode="&#xf113;" horiz-adv-x="1664" d="M640 320q0 -40 -12.5 -82t-43 -76t-72.5 -34t-72.5 34t-43 76t-12.5 82t12.5 82t43 76t72.5 34t72.5 -34t43 -76t12.5 -82zM1280 320q0 -40 -12.5 -82t-43 -76t-72.5 -34t-72.5 34t-43 76t-12.5 82t12.5 82t43 76t72.5 34t72.5 -34t43 -76t12.5 -82zM1440 320 q0 120 -69 204t-187 84q-41 0 -195 -21q-71 -11 -157 -11t-157 11q-152 21 -195 21q-118 0 -187 -84t-69 -204q0 -88 32 -153.5t81 -103t122 -60t140 -29.5t149 -7h168q82 0 149 7t140 29.5t122 60t81 103t32 153.5zM1664 496q0 -207 -61 -331q-38 -77 -105.5 -133t-141 -86 t-170 -47.5t-171.5 -22t-167 -4.5q-78 0 -142 3t-147.5 12.5t-152.5 30t-137 51.5t-121 81t-86 115q-62 123 -62 331q0 237 136 396q-27 82 -27 170q0 116 51 218q108 0 190 -39.5t189 -123.5q147 35 309 35q148 0 280 -32q105 82 187 121t189 39q51 -102 51 -218 q0 -87 -27 -168q136 -160 136 -398z" />
<glyph unicode="&#xf114;" horiz-adv-x="1664" d="M1536 224v704q0 40 -28 68t-68 28h-704q-40 0 -68 28t-28 68v64q0 40 -28 68t-68 28h-320q-40 0 -68 -28t-28 -68v-960q0 -40 28 -68t68 -28h1216q40 0 68 28t28 68zM1664 928v-704q0 -92 -66 -158t-158 -66h-1216q-92 0 -158 66t-66 158v960q0 92 66 158t158 66h320 q92 0 158 -66t66 -158v-32h672q92 0 158 -66t66 -158z" />
<glyph unicode="&#xf115;" horiz-adv-x="1920" d="M1781 605q0 35 -53 35h-1088q-40 0 -85.5 -21.5t-71.5 -52.5l-294 -363q-18 -24 -18 -40q0 -35 53 -35h1088q40 0 86 22t71 53l294 363q18 22 18 39zM640 768h768v160q0 40 -28 68t-68 28h-576q-40 0 -68 28t-28 68v64q0 40 -28 68t-68 28h-320q-40 0 -68 -28t-28 -68 v-853l256 315q44 53 116 87.5t140 34.5zM1909 605q0 -62 -46 -120l-295 -363q-43 -53 -116 -87.5t-140 -34.5h-1088q-92 0 -158 66t-66 158v960q0 92 66 158t158 66h320q92 0 158 -66t66 -158v-32h544q92 0 158 -66t66 -158v-160h192q54 0 99 -24.5t67 -70.5q15 -32 15 -68z " />
<glyph unicode="&#xf116;" horiz-adv-x="1152" d="M896 608v-64q0 -14 -9 -23t-23 -9h-224v-224q0 -14 -9 -23t-23 -9h-64q-14 0 -23 9t-9 23v224h-224q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h224v224q0 14 9 23t23 9h64q14 0 23 -9t9 -23v-224h224q14 0 23 -9t9 -23zM1024 224v704q0 40 -28 68t-68 28h-704q-40 0 -68 -28 t-28 -68v-704q0 -40 28 -68t68 -28h704q40 0 68 28t28 68zM1152 928v-704q0 -92 -65.5 -158t-158.5 -66h-704q-93 0 -158.5 66t-65.5 158v704q0 93 65.5 158.5t158.5 65.5h704q93 0 158.5 -65.5t65.5 -158.5z" />
<glyph unicode="&#xf117;" horiz-adv-x="1152" d="M928 1152q93 0 158.5 -65.5t65.5 -158.5v-704q0 -92 -65.5 -158t-158.5 -66h-704q-93 0 -158.5 66t-65.5 158v704q0 93 65.5 158.5t158.5 65.5h704zM1024 224v704q0 40 -28 68t-68 28h-704q-40 0 -68 -28t-28 -68v-704q0 -40 28 -68t68 -28h704q40 0 68 28t28 68z M864 640q14 0 23 -9t9 -23v-64q0 -14 -9 -23t-23 -9h-576q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h576z" />
<glyph unicode="&#xf116;" horiz-adv-x="1792" />
<glyph unicode="&#xf117;" horiz-adv-x="1792" />
<glyph unicode="&#xf118;" d="M1134 461q-37 -121 -138 -195t-228 -74t-228 74t-138 195q-8 25 4 48.5t38 31.5q25 8 48.5 -4t31.5 -38q25 -80 92.5 -129.5t151.5 -49.5t151.5 49.5t92.5 129.5q8 26 32 38t49 4t37 -31.5t4 -48.5zM640 896q0 -53 -37.5 -90.5t-90.5 -37.5t-90.5 37.5t-37.5 90.5 t37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM1152 896q0 -53 -37.5 -90.5t-90.5 -37.5t-90.5 37.5t-37.5 90.5t37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM1408 640q0 130 -51 248.5t-136.5 204t-204 136.5t-248.5 51t-248.5 -51t-204 -136.5t-136.5 -204t-51 -248.5 t51 -248.5t136.5 -204t204 -136.5t248.5 -51t248.5 51t204 136.5t136.5 204t51 248.5zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" />
<glyph unicode="&#xf119;" d="M1134 307q8 -25 -4 -48.5t-37 -31.5t-49 4t-32 38q-25 80 -92.5 129.5t-151.5 49.5t-151.5 -49.5t-92.5 -129.5q-8 -26 -31.5 -38t-48.5 -4q-26 8 -38 31.5t-4 48.5q37 121 138 195t228 74t228 -74t138 -195zM640 896q0 -53 -37.5 -90.5t-90.5 -37.5t-90.5 37.5 t-37.5 90.5t37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM1152 896q0 -53 -37.5 -90.5t-90.5 -37.5t-90.5 37.5t-37.5 90.5t37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM1408 640q0 130 -51 248.5t-136.5 204t-204 136.5t-248.5 51t-248.5 -51t-204 -136.5t-136.5 -204 t-51 -248.5t51 -248.5t136.5 -204t204 -136.5t248.5 -51t248.5 51t204 136.5t136.5 204t51 248.5zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" />
<glyph unicode="&#xf11a;" d="M1152 448q0 -26 -19 -45t-45 -19h-640q-26 0 -45 19t-19 45t19 45t45 19h640q26 0 45 -19t19 -45zM640 896q0 -53 -37.5 -90.5t-90.5 -37.5t-90.5 37.5t-37.5 90.5t37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM1152 896q0 -53 -37.5 -90.5t-90.5 -37.5t-90.5 37.5 t-37.5 90.5t37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM1408 640q0 130 -51 248.5t-136.5 204t-204 136.5t-248.5 51t-248.5 -51t-204 -136.5t-136.5 -204t-51 -248.5t51 -248.5t136.5 -204t204 -136.5t248.5 -51t248.5 51t204 136.5t136.5 204t51 248.5zM1536 640 q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" />
@@ -310,7 +310,7 @@
<glyph unicode="&#xf133;" horiz-adv-x="1664" d="M128 -128h1408v1024h-1408v-1024zM512 1088v288q0 14 -9 23t-23 9h-64q-14 0 -23 -9t-9 -23v-288q0 -14 9 -23t23 -9h64q14 0 23 9t9 23zM1280 1088v288q0 14 -9 23t-23 9h-64q-14 0 -23 -9t-9 -23v-288q0 -14 9 -23t23 -9h64q14 0 23 9t9 23zM1664 1152v-1280 q0 -52 -38 -90t-90 -38h-1408q-52 0 -90 38t-38 90v1280q0 52 38 90t90 38h128v96q0 66 47 113t113 47h64q66 0 113 -47t47 -113v-96h384v96q0 66 47 113t113 47h64q66 0 113 -47t47 -113v-96h128q52 0 90 -38t38 -90z" />
<glyph unicode="&#xf134;" horiz-adv-x="1408" d="M512 1344q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1408 1376v-320q0 -16 -12 -25q-8 -7 -20 -7q-4 0 -7 1l-448 96q-11 2 -18 11t-7 20h-256v-102q111 -23 183.5 -111t72.5 -203v-800q0 -26 -19 -45t-45 -19h-512q-26 0 -45 19t-19 45v800 q0 106 62.5 190.5t161.5 114.5v111h-32q-59 0 -115 -23.5t-91.5 -53t-66 -66.5t-40.5 -53.5t-14 -24.5q-17 -35 -57 -35q-16 0 -29 7q-23 12 -31.5 37t3.5 49q5 10 14.5 26t37.5 53.5t60.5 70t85 67t108.5 52.5q-25 42 -25 86q0 66 47 113t113 47t113 -47t47 -113 q0 -33 -14 -64h302q0 11 7 20t18 11l448 96q3 1 7 1q12 0 20 -7q12 -9 12 -25z" />
<glyph unicode="&#xf135;" horiz-adv-x="1664" d="M1440 1088q0 40 -28 68t-68 28t-68 -28t-28 -68t28 -68t68 -28t68 28t28 68zM1664 1376q0 -249 -75.5 -430.5t-253.5 -360.5q-81 -80 -195 -176l-20 -379q-2 -16 -16 -26l-384 -224q-7 -4 -16 -4q-12 0 -23 9l-64 64q-13 14 -8 32l85 276l-281 281l-276 -85q-3 -1 -9 -1 q-14 0 -23 9l-64 64q-17 19 -5 39l224 384q10 14 26 16l379 20q96 114 176 195q188 187 358 258t431 71q14 0 24 -9.5t10 -22.5z" />
<glyph unicode="&#xf136;" horiz-adv-x="1792" d="M1708 881l-188 -881h-304l181 849q4 21 1 43q-4 20 -16 35q-10 14 -28 24q-18 9 -40 9h-197l-205 -960h-303l204 960h-304l-205 -960h-304l272 1280h1139q157 0 245 -118q86 -116 52 -281z" />
<glyph unicode="&#xf136;" horiz-adv-x="1792" d="M1745 763l-164 -763h-334l178 832q13 56 -15 88q-27 33 -83 33h-169l-204 -953h-334l204 953h-286l-204 -953h-334l204 953l-153 327h1276q101 0 189.5 -40.5t147.5 -113.5q60 -73 81 -168.5t0 -194.5z" />
<glyph unicode="&#xf137;" d="M909 141l102 102q19 19 19 45t-19 45l-307 307l307 307q19 19 19 45t-19 45l-102 102q-19 19 -45 19t-45 -19l-454 -454q-19 -19 -19 -45t19 -45l454 -454q19 -19 45 -19t45 19zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5 t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" />
<glyph unicode="&#xf138;" d="M717 141l454 454q19 19 19 45t-19 45l-454 454q-19 19 -45 19t-45 -19l-102 -102q-19 -19 -19 -45t19 -45l307 -307l-307 -307q-19 -19 -19 -45t19 -45l102 -102q19 -19 45 -19t45 19zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5 t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" />
<glyph unicode="&#xf139;" d="M1165 397l102 102q19 19 19 45t-19 45l-454 454q-19 19 -45 19t-45 -19l-454 -454q-19 -19 -19 -45t19 -45l102 -102q19 -19 45 -19t45 19l307 307l307 -307q19 -19 45 -19t45 19zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5 t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" />
@@ -342,7 +342,7 @@
<glyph unicode="&#xf155;" horiz-adv-x="1024" d="M978 351q0 -153 -99.5 -263.5t-258.5 -136.5v-175q0 -14 -9 -23t-23 -9h-135q-13 0 -22.5 9.5t-9.5 22.5v175q-66 9 -127.5 31t-101.5 44.5t-74 48t-46.5 37.5t-17.5 18q-17 21 -2 41l103 135q7 10 23 12q15 2 24 -9l2 -2q113 -99 243 -125q37 -8 74 -8q81 0 142.5 43 t61.5 122q0 28 -15 53t-33.5 42t-58.5 37.5t-66 32t-80 32.5q-39 16 -61.5 25t-61.5 26.5t-62.5 31t-56.5 35.5t-53.5 42.5t-43.5 49t-35.5 58t-21 66.5t-8.5 78q0 138 98 242t255 134v180q0 13 9.5 22.5t22.5 9.5h135q14 0 23 -9t9 -23v-176q57 -6 110.5 -23t87 -33.5 t63.5 -37.5t39 -29t15 -14q17 -18 5 -38l-81 -146q-8 -15 -23 -16q-14 -3 -27 7q-3 3 -14.5 12t-39 26.5t-58.5 32t-74.5 26t-85.5 11.5q-95 0 -155 -43t-60 -111q0 -26 8.5 -48t29.5 -41.5t39.5 -33t56 -31t60.5 -27t70 -27.5q53 -20 81 -31.5t76 -35t75.5 -42.5t62 -50 t53 -63.5t31.5 -76.5t13 -94z" />
<glyph unicode="&#xf156;" horiz-adv-x="898" d="M898 1066v-102q0 -14 -9 -23t-23 -9h-168q-23 -144 -129 -234t-276 -110q167 -178 459 -536q14 -16 4 -34q-8 -18 -29 -18h-195q-16 0 -25 12q-306 367 -498 571q-9 9 -9 22v127q0 13 9.5 22.5t22.5 9.5h112q132 0 212.5 43t102.5 125h-427q-14 0 -23 9t-9 23v102 q0 14 9 23t23 9h413q-57 113 -268 113h-145q-13 0 -22.5 9.5t-9.5 22.5v133q0 14 9 23t23 9h832q14 0 23 -9t9 -23v-102q0 -14 -9 -23t-23 -9h-233q47 -61 64 -144h171q14 0 23 -9t9 -23z" />
<glyph unicode="&#xf157;" horiz-adv-x="1027" d="M603 0h-172q-13 0 -22.5 9t-9.5 23v330h-288q-13 0 -22.5 9t-9.5 23v103q0 13 9.5 22.5t22.5 9.5h288v85h-288q-13 0 -22.5 9t-9.5 23v104q0 13 9.5 22.5t22.5 9.5h214l-321 578q-8 16 0 32q10 16 28 16h194q19 0 29 -18l215 -425q19 -38 56 -125q10 24 30.5 68t27.5 61 l191 420q8 19 29 19h191q17 0 27 -16q9 -14 1 -31l-313 -579h215q13 0 22.5 -9.5t9.5 -22.5v-104q0 -14 -9.5 -23t-22.5 -9h-290v-85h290q13 0 22.5 -9.5t9.5 -22.5v-103q0 -14 -9.5 -23t-22.5 -9h-290v-330q0 -13 -9.5 -22.5t-22.5 -9.5z" />
<glyph unicode="&#xf158;" horiz-adv-x="1664" d="M1664 352v-32q0 -132 -94 -226t-226 -94h-128q-132 0 -226 94t-94 226v480h-224q-2 -102 -14.5 -190.5t-30.5 -156t-48.5 -126.5t-57 -99.5t-67.5 -77.5t-69.5 -58.5t-74 -44t-69 -32t-65.5 -25.5q-4 -2 -32 -13q-8 -2 -12 -2q-22 0 -30 20l-71 178q-5 13 0 25t17 17 q7 3 20 7.5t18 6.5q31 12 46.5 18.5t44.5 20t45.5 26t42 32.5t40.5 42.5t34.5 53.5t30.5 68.5t22.5 83.5t17 103t6.5 123h-256q-14 0 -23 9t-9 23v160q0 14 9 23t23 9h1216q14 0 23 -9t9 -23v-160q0 -14 -9 -23t-23 -9h-224v-512q0 -26 19 -45t45 -19h128q26 0 45 19t19 45 v64q0 14 9 23t23 9h192q14 0 23 -9t9 -23zM1280 1376v-160q0 -14 -9 -23t-23 -9h-960q-14 0 -23 9t-9 23v160q0 14 9 23t23 9h960q14 0 23 -9t9 -23z" />
<glyph unicode="&#xf158;" horiz-adv-x="1280" d="M1043 971q0 100 -65 162t-171 62h-320v-448h320q106 0 171 62t65 162zM1280 971q0 -193 -126.5 -315t-326.5 -122h-340v-118h505q14 0 23 -9t9 -23v-128q0 -14 -9 -23t-23 -9h-505v-192q0 -14 -9.5 -23t-22.5 -9h-167q-14 0 -23 9t-9 23v192h-224q-14 0 -23 9t-9 23v128 q0 14 9 23t23 9h224v118h-224q-14 0 -23 9t-9 23v149q0 13 9 22.5t23 9.5h224v629q0 14 9 23t23 9h539q200 0 326.5 -122t126.5 -315z" />
<glyph unicode="&#xf159;" horiz-adv-x="1792" d="M514 341l81 299h-159l75 -300q1 -1 1 -3t1 -3q0 1 0.5 3.5t0.5 3.5zM630 768l35 128h-292l32 -128h225zM822 768h139l-35 128h-70zM1271 340l78 300h-162l81 -299q0 -1 0.5 -3.5t1.5 -3.5q0 1 0.5 3t0.5 3zM1382 768l33 128h-297l34 -128h230zM1792 736v-64q0 -14 -9 -23 t-23 -9h-213l-164 -616q-7 -24 -31 -24h-159q-24 0 -31 24l-166 616h-209l-167 -616q-7 -24 -31 -24h-159q-11 0 -19.5 7t-10.5 17l-160 616h-208q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h175l-33 128h-142q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h109l-89 344q-5 15 5 28 q10 12 26 12h137q26 0 31 -24l90 -360h359l97 360q7 24 31 24h126q24 0 31 -24l98 -360h365l93 360q5 24 31 24h137q16 0 26 -12q10 -13 5 -28l-91 -344h111q14 0 23 -9t9 -23v-64q0 -14 -9 -23t-23 -9h-145l-34 -128h179q14 0 23 -9t9 -23z" />
<glyph unicode="&#xf15a;" horiz-adv-x="1280" d="M1167 896q18 -182 -131 -258q117 -28 175 -103t45 -214q-7 -71 -32.5 -125t-64.5 -89t-97 -58.5t-121.5 -34.5t-145.5 -15v-255h-154v251q-80 0 -122 1v-252h-154v255q-18 0 -54 0.5t-55 0.5h-200l31 183h111q50 0 58 51v402h16q-6 1 -16 1v287q-13 68 -89 68h-111v164 l212 -1q64 0 97 1v252h154v-247q82 2 122 2v245h154v-252q79 -7 140 -22.5t113 -45t82.5 -78t36.5 -114.5zM952 351q0 36 -15 64t-37 46t-57.5 30.5t-65.5 18.5t-74 9t-69 3t-64.5 -1t-47.5 -1v-338q8 0 37 -0.5t48 -0.5t53 1.5t58.5 4t57 8.5t55.5 14t47.5 21t39.5 30 t24.5 40t9.5 51zM881 827q0 33 -12.5 58.5t-30.5 42t-48 28t-55 16.5t-61.5 8t-58 2.5t-54 -1t-39.5 -0.5v-307q5 0 34.5 -0.5t46.5 0t50 2t55 5.5t51.5 11t48.5 18.5t37 27t27 38.5t9 51z" />
<glyph unicode="&#xf15b;" horiz-adv-x="1280" d="M1280 768v-800q0 -40 -28 -68t-68 -28h-1088q-40 0 -68 28t-28 68v1344q0 40 28 68t68 28h544v-544q0 -40 28 -68t68 -28h544zM1277 896h-509v509q82 -15 132 -65l312 -312q50 -50 65 -132z" />
@@ -390,10 +390,25 @@
<glyph unicode="&#xf188;" horiz-adv-x="1664" d="M1632 576q0 -26 -19 -45t-45 -19h-224q0 -171 -67 -290l208 -209q19 -19 19 -45t-19 -45q-18 -19 -45 -19t-45 19l-198 197q-5 -5 -15 -13t-42 -28.5t-65 -36.5t-82 -29t-97 -13v896h-128v-896q-51 0 -101.5 13.5t-87 33t-66 39t-43.5 32.5l-15 14l-183 -207 q-20 -21 -48 -21q-24 0 -43 16q-19 18 -20.5 44.5t15.5 46.5l202 227q-58 114 -58 274h-224q-26 0 -45 19t-19 45t19 45t45 19h224v294l-173 173q-19 19 -19 45t19 45t45 19t45 -19l173 -173h844l173 173q19 19 45 19t45 -19t19 -45t-19 -45l-173 -173v-294h224q26 0 45 -19 t19 -45zM1152 1152h-640q0 133 93.5 226.5t226.5 93.5t226.5 -93.5t93.5 -226.5z" />
<glyph unicode="&#xf189;" horiz-adv-x="1920" d="M1917 1016q23 -64 -150 -294q-24 -32 -65 -85q-78 -100 -90 -131q-17 -41 14 -81q17 -21 81 -82h1l1 -1l1 -1l2 -2q141 -131 191 -221q3 -5 6.5 -12.5t7 -26.5t-0.5 -34t-25 -27.5t-59 -12.5l-256 -4q-24 -5 -56 5t-52 22l-20 12q-30 21 -70 64t-68.5 77.5t-61 58 t-56.5 15.5q-3 -1 -8 -3.5t-17 -14.5t-21.5 -29.5t-17 -52t-6.5 -77.5q0 -15 -3.5 -27.5t-7.5 -18.5l-4 -5q-18 -19 -53 -22h-115q-71 -4 -146 16.5t-131.5 53t-103 66t-70.5 57.5l-25 24q-10 10 -27.5 30t-71.5 91t-106 151t-122.5 211t-130.5 272q-6 16 -6 27t3 16l4 6 q15 19 57 19l274 2q12 -2 23 -6.5t16 -8.5l5 -3q16 -11 24 -32q20 -50 46 -103.5t41 -81.5l16 -29q29 -60 56 -104t48.5 -68.5t41.5 -38.5t34 -14t27 5q2 1 5 5t12 22t13.5 47t9.5 81t0 125q-2 40 -9 73t-14 46l-6 12q-25 34 -85 43q-13 2 5 24q17 19 38 30q53 26 239 24 q82 -1 135 -13q20 -5 33.5 -13.5t20.5 -24t10.5 -32t3.5 -45.5t-1 -55t-2.5 -70.5t-1.5 -82.5q0 -11 -1 -42t-0.5 -48t3.5 -40.5t11.5 -39t22.5 -24.5q8 -2 17 -4t26 11t38 34.5t52 67t68 107.5q60 104 107 225q4 10 10 17.5t11 10.5l4 3l5 2.5t13 3t20 0.5l288 2 q39 5 64 -2.5t31 -16.5z" />
<glyph unicode="&#xf18a;" horiz-adv-x="1792" d="M675 252q21 34 11 69t-45 50q-34 14 -73 1t-60 -46q-22 -34 -13 -68.5t43 -50.5t74.5 -2.5t62.5 47.5zM769 373q8 13 3.5 26.5t-17.5 18.5q-14 5 -28.5 -0.5t-21.5 -18.5q-17 -31 13 -45q14 -5 29 0.5t22 18.5zM943 266q-45 -102 -158 -150t-224 -12 q-107 34 -147.5 126.5t6.5 187.5q47 93 151.5 139t210.5 19q111 -29 158.5 -119.5t2.5 -190.5zM1255 426q-9 96 -89 170t-208.5 109t-274.5 21q-223 -23 -369.5 -141.5t-132.5 -264.5q9 -96 89 -170t208.5 -109t274.5 -21q223 23 369.5 141.5t132.5 264.5zM1563 422 q0 -68 -37 -139.5t-109 -137t-168.5 -117.5t-226 -83t-270.5 -31t-275 33.5t-240.5 93t-171.5 151t-65 199.5q0 115 69.5 245t197.5 258q169 169 341.5 236t246.5 -7q65 -64 20 -209q-4 -14 -1 -20t10 -7t14.5 0.5t13.5 3.5l6 2q139 59 246 59t153 -61q45 -63 0 -178 q-2 -13 -4.5 -20t4.5 -12.5t12 -7.5t17 -6q57 -18 103 -47t80 -81.5t34 -116.5zM1489 1046q42 -47 54.5 -108.5t-6.5 -117.5q-8 -23 -29.5 -34t-44.5 -4q-23 8 -34 29.5t-4 44.5q20 63 -24 111t-107 35q-24 -5 -45 8t-25 37q-5 24 8 44.5t37 25.5q60 13 119 -5.5t101 -65.5z M1670 1209q87 -96 112.5 -222.5t-13.5 -241.5q-9 -27 -34 -40t-52 -4t-40 34t-5 52q28 82 10 172t-80 158q-62 69 -148 95.5t-173 8.5q-28 -6 -52 9.5t-30 43.5t9.5 51.5t43.5 29.5q123 26 244 -11.5t208 -134.5z" />
<glyph unicode="&#xf18b;" horiz-adv-x="1920" d="M805 163q-122 -67 -261 -67q-141 0 -261 67q98 61 167 149t94 191q25 -103 94 -191t167 -149zM453 1176v-344q0 -179 -89.5 -326t-234.5 -217q-129 152 -129 351q0 200 129.5 352t323.5 184zM958 991q-128 -152 -128 -351q0 -201 128 -351q-145 70 -234.5 218t-89.5 328 v341q196 -33 324 -185zM1638 163q-122 -67 -261 -67q-141 0 -261 67q98 61 167 149t94 191q25 -103 94 -191t167 -149zM1286 1176v-344q0 -179 -91 -326t-237 -217v0q133 154 133 351q0 195 -133 351q129 151 328 185zM1920 640q0 -201 -129 -351q-145 70 -234.5 218 t-89.5 328v341q194 -32 323.5 -184t129.5 -352z" />
<glyph unicode="&#xf18c;" horiz-adv-x="1792" />
<glyph unicode="&#xf18d;" horiz-adv-x="1792" />
<glyph unicode="&#xf18e;" horiz-adv-x="1792" />
<glyph unicode="&#xf18b;" d="M1133 -34q-171 -94 -368 -94q-196 0 -367 94q138 87 235.5 211t131.5 268q35 -144 132.5 -268t235.5 -211zM638 1394v-485q0 -252 -126.5 -459.5t-330.5 -306.5q-181 215 -181 495q0 187 83.5 349.5t229.5 269.5t325 137zM1536 638q0 -280 -181 -495 q-204 99 -330.5 306.5t-126.5 459.5v485q179 -30 325 -137t229.5 -269.5t83.5 -349.5z" />
<glyph unicode="&#xf18c;" horiz-adv-x="1408" d="M1402 433q-32 -80 -76 -138t-91 -88.5t-99 -46.5t-101.5 -14.5t-96.5 8.5t-86.5 22t-69.5 27.5t-46 22.5l-17 10q-113 -228 -289.5 -359.5t-384.5 -132.5q-19 0 -32 13t-13 32t13 31.5t32 12.5q173 1 322.5 107.5t251.5 294.5q-36 -14 -72 -23t-83 -13t-91 2.5t-93 28.5 t-92 59t-84.5 100t-74.5 146q114 47 214 57t167.5 -7.5t124.5 -56.5t88.5 -77t56.5 -82q53 131 79 291q-7 -1 -18 -2.5t-46.5 -2.5t-69.5 0.5t-81.5 10t-88.5 23t-84 42.5t-75 65t-54.5 94.5t-28.5 127.5q70 28 133.5 36.5t112.5 -1t92 -30t73.5 -50t56 -61t42 -63t27.5 -56 t16 -39.5l4 -16q12 122 12 195q-8 6 -21.5 16t-49 44.5t-63.5 71.5t-54 93t-33 112.5t12 127t70 138.5q73 -25 127.5 -61.5t84.5 -76.5t48 -85t20.5 -89t-0.5 -85.5t-13 -76.5t-19 -62t-17 -42l-7 -15q1 -5 1 -50.5t-1 -71.5q3 7 10 18.5t30.5 43t50.5 58t71 55.5t91.5 44.5 t112 14.5t132.5 -24q-2 -78 -21.5 -141.5t-50 -104.5t-69.5 -71.5t-81.5 -45.5t-84.5 -24t-80 -9.5t-67.5 1t-46.5 4.5l-17 3q-23 -147 -73 -283q6 7 18 18.5t49.5 41t77.5 52.5t99.5 42t117.5 20t129 -23.5t137 -77.5z" />
<glyph unicode="&#xf18d;" horiz-adv-x="1280" d="M1259 283v-66q0 -85 -57.5 -144.5t-138.5 -59.5h-57l-260 -269v269h-529q-81 0 -138.5 59.5t-57.5 144.5v66h1238zM1259 609v-255h-1238v255h1238zM1259 937v-255h-1238v255h1238zM1259 1077v-67h-1238v67q0 84 57.5 143.5t138.5 59.5h846q81 0 138.5 -59.5t57.5 -143.5z " />
<glyph unicode="&#xf18e;" d="M1152 640q0 -14 -9 -23l-320 -320q-9 -9 -23 -9q-13 0 -22.5 9.5t-9.5 22.5v192h-352q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h352v192q0 14 9 23t23 9q12 0 24 -10l319 -319q9 -9 9 -23zM1312 640q0 148 -73 273t-198 198t-273 73t-273 -73t-198 -198 t-73 -273t73 -273t198 -198t273 -73t273 73t198 198t73 273zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" />
<glyph unicode="&#xf190;" d="M1152 736v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-352v-192q0 -14 -9 -23t-23 -9q-12 0 -24 10l-319 319q-9 9 -9 23t9 23l320 320q9 9 23 9q13 0 22.5 -9.5t9.5 -22.5v-192h352q13 0 22.5 -9.5t9.5 -22.5zM1312 640q0 148 -73 273t-198 198t-273 73t-273 -73t-198 -198 t-73 -273t73 -273t198 -198t273 -73t273 73t198 198t73 273zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" />
<glyph unicode="&#xf191;" d="M1024 960v-640q0 -26 -19 -45t-45 -19q-20 0 -37 12l-448 320q-27 19 -27 52t27 52l448 320q17 12 37 12q26 0 45 -19t19 -45zM1280 160v960q0 13 -9.5 22.5t-22.5 9.5h-960q-13 0 -22.5 -9.5t-9.5 -22.5v-960q0 -13 9.5 -22.5t22.5 -9.5h960q13 0 22.5 9.5t9.5 22.5z M1536 1120v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" />
<glyph unicode="&#xf192;" d="M1024 640q0 -106 -75 -181t-181 -75t-181 75t-75 181t75 181t181 75t181 -75t75 -181zM768 1184q-148 0 -273 -73t-198 -198t-73 -273t73 -273t198 -198t273 -73t273 73t198 198t73 273t-73 273t-198 198t-273 73zM1536 640q0 -209 -103 -385.5t-279.5 -279.5 t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" />
<glyph unicode="&#xf193;" horiz-adv-x="1664" d="M1023 349l102 -204q-58 -179 -210 -290t-339 -111q-156 0 -288.5 77.5t-210 210t-77.5 288.5q0 181 104.5 330t274.5 211l17 -131q-122 -54 -195 -165.5t-73 -244.5q0 -185 131.5 -316.5t316.5 -131.5q126 0 232.5 65t165 175.5t49.5 236.5zM1571 249l58 -114l-256 -128 q-13 -7 -29 -7q-40 0 -57 35l-239 477h-472q-24 0 -42.5 16.5t-21.5 40.5l-96 779q-2 16 6 42q14 51 57 82.5t97 31.5q66 0 113 -47t47 -113q0 -69 -52 -117.5t-120 -41.5l37 -289h423v-128h-407l16 -128h455q40 0 57 -35l228 -455z" />
<glyph unicode="&#xf194;" d="M1254 899q16 85 -21 132q-52 65 -187 45q-17 -3 -41 -12.5t-57.5 -30.5t-64.5 -48.5t-59.5 -70t-44.5 -91.5q80 7 113.5 -16t26.5 -99q-5 -52 -52 -143q-43 -78 -71 -99q-44 -32 -87 14q-23 24 -37.5 64.5t-19 73t-10 84t-8.5 71.5q-23 129 -34 164q-12 37 -35.5 69 t-50.5 40q-57 16 -127 -25q-54 -32 -136.5 -106t-122.5 -102v-7q16 -8 25.5 -26t21.5 -20q21 -3 54.5 8.5t58 10.5t41.5 -30q11 -18 18.5 -38.5t15 -48t12.5 -40.5q17 -46 53 -187q36 -146 57 -197q42 -99 103 -125q43 -12 85 -1.5t76 31.5q131 77 250 237 q104 139 172.5 292.5t82.5 226.5zM1536 1120v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" />
<glyph unicode="&#xf195;" horiz-adv-x="1152" d="M1152 704q0 -191 -94.5 -353t-256.5 -256.5t-353 -94.5h-160q-14 0 -23 9t-9 23v611l-215 -66q-3 -1 -9 -1q-10 0 -19 6q-13 10 -13 26v128q0 23 23 31l233 71v93l-215 -66q-3 -1 -9 -1q-10 0 -19 6q-13 10 -13 26v128q0 23 23 31l233 71v250q0 14 9 23t23 9h160 q14 0 23 -9t9 -23v-181l375 116q15 5 28 -5t13 -26v-128q0 -23 -23 -31l-393 -121v-93l375 116q15 5 28 -5t13 -26v-128q0 -23 -23 -31l-393 -121v-487q188 13 318 151t130 328q0 14 9 23t23 9h160q14 0 23 -9t9 -23z" />
<glyph unicode="&#xf196;" horiz-adv-x="1408" d="M1152 736v-64q0 -14 -9 -23t-23 -9h-352v-352q0 -14 -9 -23t-23 -9h-64q-14 0 -23 9t-9 23v352h-352q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h352v352q0 14 9 23t23 9h64q14 0 23 -9t9 -23v-352h352q14 0 23 -9t9 -23zM1280 288v832q0 66 -47 113t-113 47h-832 q-66 0 -113 -47t-47 -113v-832q0 -66 47 -113t113 -47h832q66 0 113 47t47 113zM1408 1120v-832q0 -119 -84.5 -203.5t-203.5 -84.5h-832q-119 0 -203.5 84.5t-84.5 203.5v832q0 119 84.5 203.5t203.5 84.5h832q119 0 203.5 -84.5t84.5 -203.5z" />
<glyph unicode="&#xf197;" horiz-adv-x="1792" />
<glyph unicode="&#xf198;" horiz-adv-x="1792" />
<glyph unicode="&#xf199;" horiz-adv-x="1792" />
<glyph unicode="&#xf19a;" horiz-adv-x="1792" />
<glyph unicode="&#xf19b;" horiz-adv-x="1792" />
<glyph unicode="&#xf19c;" horiz-adv-x="1792" />
<glyph unicode="&#xf19d;" horiz-adv-x="1792" />
<glyph unicode="&#xf19e;" horiz-adv-x="1792" />
<glyph unicode="&#xf500;" horiz-adv-x="1792" />
</font>
</defs></svg>

Before

Width:  |  Height:  |  Size: 193 KiB

After

Width:  |  Height:  |  Size: 197 KiB

File diff suppressed because one or more lines are too long

View File

@@ -1,16 +1,113 @@
$( document ).ready(function() {
// Shift nav in mobile when clicking the menu.
$("[data-toggle='wy-nav-top']").click(function() {
$("[data-toggle='wy-nav-shift']").toggleClass("shift");
$("[data-toggle='rst-versions']").toggleClass("shift");
});
// Close menu when you click a link.
$(".wy-menu-vertical .current ul li a").click(function() {
$("[data-toggle='wy-nav-shift']").removeClass("shift");
$("[data-toggle='rst-versions']").toggleClass("shift");
});
$("[data-toggle='rst-current-version']").click(function() {
$("[data-toggle='rst-versions']").toggleClass("shift-up");
});
$("table.docutils:not(.field-list").wrap("<div class='wy-table-responsive'></div>");
function toggleCurrent (elem) {
var parent_li = elem.closest('li');
parent_li.siblings('li.current').removeClass('current');
parent_li.siblings().find('li.current').removeClass('current');
parent_li.find('> ul li.current').removeClass('current');
parent_li.toggleClass('current');
}
$(document).ready(function() {
// Shift nav in mobile when clicking the menu.
$(document).on('click', "[data-toggle='wy-nav-top']", function() {
$("[data-toggle='wy-nav-shift']").toggleClass("shift");
$("[data-toggle='rst-versions']").toggleClass("shift");
});
// Nav menu link click operations
$(document).on('click', ".wy-menu-vertical .current ul li a", function() {
var target = $(this);
// Close menu when you click a link.
$("[data-toggle='wy-nav-shift']").removeClass("shift");
$("[data-toggle='rst-versions']").toggleClass("shift");
// Handle dynamic display of l3 and l4 nav lists
toggleCurrent(target);
if (typeof(window.SphinxRtdTheme) != 'undefined') {
window.SphinxRtdTheme.StickyNav.hashChange();
}
});
$(document).on('click', "[data-toggle='rst-current-version']", function() {
$("[data-toggle='rst-versions']").toggleClass("shift-up");
});
// Make tables responsive
$("table.docutils:not(.field-list)").wrap("<div class='wy-table-responsive'></div>");
// Add expand links to all parents of nested ul
$('.wy-menu-vertical ul').siblings('a').each(function () {
var link = $(this);
expand = $('<span class="toctree-expand"></span>');
expand.on('click', function (ev) {
toggleCurrent(link);
ev.stopPropagation();
return false;
});
link.prepend(expand);
});
});
// Sphinx theme state
window.SphinxRtdTheme = (function (jquery) {
var stickyNav = (function () {
var navBar,
win,
winScroll = false,
linkScroll = false,
winPosition = 0,
enable = function () {
init();
reset();
win.on('hashchange', reset);
// Set scrolling
win.on('scroll', function () {
if (!linkScroll) {
winScroll = true;
}
});
setInterval(function () {
if (winScroll) {
winScroll = false;
var newWinPosition = win.scrollTop(),
navPosition = navBar.scrollTop(),
newNavPosition = navPosition + (newWinPosition - winPosition);
navBar.scrollTop(newNavPosition);
winPosition = newWinPosition;
}
}, 25);
},
init = function () {
navBar = jquery('nav.wy-nav-side:first');
win = jquery(window);
},
reset = function () {
// Get anchor from URL and open up nested nav
var anchor = encodeURI(window.location.hash);
if (anchor) {
try {
var link = $('.wy-menu-vertical')
.find('[href="' + anchor + '"]');
$('.wy-menu-vertical li.toctree-l1 li.current')
.removeClass('current');
link.closest('li.toctree-l2').addClass('current');
link.closest('li.toctree-l3').addClass('current');
link.closest('li.toctree-l4').addClass('current');
}
catch (err) {
console.log("Error expanding nav for anchor", err);
}
}
},
hashChange = function () {
linkScroll = true;
win.one('hashchange', function () {
linkScroll = false;
});
};
jquery(init);
return {
enable: enable,
hashChange: hashChange
};
}());
return {
StickyNav: stickyNav
};
}($));

View File

@@ -5,3 +5,5 @@ stylesheet = css/theme.css
[options]
typekit_id = hiw1hhg
analytics_id =
sticky_navigation = False
logo_only =

View File

@@ -1,10 +1,10 @@
{% if READTHEDOCS %}
{# Add rst-badge after rst-versions for small badge style. #}
<div class="rst-versions" data-toggle="rst-versions">
<div class="rst-versions" data-toggle="rst-versions" role="note" aria-label="versions">
<span class="rst-current-version" data-toggle="rst-current-version">
<span class="icon icon-book"> Read the Docs</span>
v: {{ current_version }}
<span class="icon icon-caret-down"></span>
<span class="fa fa-book"> Read the Docs</span>
v: {{ current_version }}
<span class="fa fa-caret-down"></span>
</span>
<div class="rst-other-versions">
<dl>

View File

@@ -3,19 +3,18 @@
Basic usage
=====================
Spack is implemented as a single command (``spack``) with many
*subcommands*, much like ``git``, ``svn``, ``yum``, or ``apt-get``.
Only a small subset of commands are needed for typical usage.
This section covers a small set of subcommands that should cover most
general use cases for Spack.
The ``spack`` command has many *subcommands*. You'll only need a
small subset of them for typical usage.
Listing available packages
------------------------------
The first thing you will likely want to do with spack is find out what
software is available to install. There are a few relevant commands.
To install software with Spack, you need to know what software is
available. You can see a list of available package names at the
:ref:`package-list` webpage, or using the ``spack list`` command.
.. _spack-list:
``spack list``
~~~~~~~~~~~~~~~~
@@ -26,45 +25,293 @@ Spack can install:
.. command-output:: spack list
The packages are listed by name in alphabetical order. You can also
do wildcard searches using ``*``:
do wildcats searches using ``*``:
.. command-output:: spack list m*
.. command-output:: spack list *util*
.. _spack-info:
``spack info``
~~~~~~~~~~~~~~~~
To get information on a particular package from the full list, run
``spack info <package name>``. For example, for ``mpich`` the output
looks like this:
To get more information on a particular package from `spack list`, use
`spack info`. Just supply the name of a package:
.. command-output:: spack info mpich
This includes basic information about the package: where to download
it, its dependencies, virtual packages it provides (e.g. an MPI
implementation will provide the MPI interface), and a text
description, if one is available. :ref:`Dependencies
<sec-specs>` and :ref:`virtual dependencies
Most of the information is self-explanatory. The *safe versions* are
versions that Spack knows the checksum for, and it will use the
checksum to verify that these versions download without errors or
viruses.
:ref:`Dependencies <sec-specs>` and :ref:`virtual dependencies
<sec-virtual-dependencies>` are described in more detail later.
.. _spack-versions:
``spack versions``
~~~~~~~~~~~~~~~~~~~~~~~~
To see available versions of a package, run ``spack versions``, for
example:
To see *more* available versions of a package, run ``spack versions``.
For example:
.. command-output:: spack versions libelf
Since it has to manage many different software packages, Spack doesn't
place many restrictions on what a package version has to look like.
Packages like ``mpich`` use traditional version numbers like
``3.0.4``. Other packages, like ``libdwarf`` use date-stamp versions
like ``20130729``. Versions can contain numbers, letters, dashes,
underscores, and periods.
There are two sections in the output. *Safe versions* are versions
for which Spack has a checksum on file. It can verify that these
versions are downloaded correctly.
Compiler Configuration
In many cases, Spack can also show you what versions are available out
on the web---these are *remote versions*. Spack gets this information
by scraping it directly from package web pages. Depending on the
package and how its releases are organized, Spack may or may not be
able to find remote versions.
Installing and uninstalling
------------------------------
.. _spack-install:
``spack install``
~~~~~~~~~~~~~~~~~~~~~
``spack install`` will install any package shown by ``spack list``.
For example, To install the latest version of the ``mpileaks``
package, you might type this:
.. code-block:: sh
$ spack install mpileaks
If `mpileaks` depends on other packages, Spack will install the
dependencies first. It then fetches the ``mpileaks`` tarball, expands
it, verifies that it was downloaded without errors, builds it, and
installs it in its own directory under ``$SPACK_ROOT/opt``. You'll see
a number of messages from spack, a lot of build output, and a message
that the packages is installed:
.. code-block:: sh
$ spack install mpileaks
==> Installing mpileaks
==> mpich is already installed in /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/mpich@3.0.4.
==> callpath is already installed in /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/callpath@1.0.2-5dce4318.
==> adept-utils is already installed in /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/adept-utils@1.0-5adef8da.
==> Trying to fetch from https://github.com/hpc/mpileaks/releases/download/v1.0/mpileaks-1.0.tar.gz
######################################################################## 100.0%
==> Staging archive: /home/gamblin2/spack/var/spack/stage/mpileaks@1.0%gcc@4.4.7=chaos_5_x86_64_ib-59f6ad23/mpileaks-1.0.tar.gz
==> Created stage in /home/gamblin2/spack/var/spack/stage/mpileaks@1.0%gcc@4.4.7=chaos_5_x86_64_ib-59f6ad23.
==> No patches needed for mpileaks.
==> Building mpileaks.
... build output ...
==> Successfully installed mpileaks.
Fetch: 2.16s. Build: 9.82s. Total: 11.98s.
[+] /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/mpileaks@1.0-59f6ad23
The last line, with the ``[+]``, indicates where the package is
installed.
Building a specific version
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Spack can also build *specific versions* of a package. To do this,
just add ``@`` after the package name, followed by a version:
.. code-block:: sh
$ spack install mpich@3.0.4
Any number of versions of the same package can be installed at once
without interfering with each other. This is good for multi-user
sites, as installing a version that one user needs will not disrupt
existing installations for other users.
In addition to different versions, Spack can customize the compiler,
compile-time options (variants), and platform (for cross compiles) of
an installation. Spack is unique in that it can also configure the
*dependencies* a package is built with. For example, two
configurations of the same version of a package, one built with boost
1.39.0, and the other version built with version 1.43.0, can coexist.
This can all be done on the command line using the *spec* syntax.
Spack calls the descriptor used to refer to a particular package
configuration a **spec**. In the commands above, ``mpileaks`` and
``mpileaks@3.0.4``. We'll talk more about how you can use them to
customize an installation in :ref:`sec-specs`.
.. _spack-uninstall:
``spack uninstall``
~~~~~~~~~~~~~~~~~~~~~
To uninstall a package, type ``spack uninstall <package>``. This will
completely remove the directory in which the package was installed.
.. code-block:: sh
spack uninstall mpich
If there are still installed packages that depend on the package to be
uninstalled, spack will refuse to uninstall it. You can override this
behavior with ``spack uninstall -f <package>``, but you risk breaking
other installed packages. In general, it is safer to remove dependent
packages *before* removing their dependencies.
A line like ``spack uninstall mpich`` may be ambiguous, if multiple
``mpich`` configurations are installed. For example, if both
``mpich@3.0.2`` and ``mpich@3.1`` are installed, ``mpich`` could refer
to either one. Because it cannot determine which one to uninstall,
Spack will ask you to provide a version number to remove the
ambiguity. As an example, ``spack uninstall mpich@3.1`` is
unambiguous in this scenario.
Seeing installed packages
-----------------------------------
We know that ``spack list`` shows you the names of available packages,
but how do you figure out which are installed?
.. _spack-find:
``spack find``
~~~~~~~~~~~~~~~~~~~~~~
``spack find`` shows the *specs* of installed packages. A spec is
like a name, but it has a version, compiler, architecture, and build
options associated with it. In spack, you can have many installations
of the same package with different specs.
Running ``spack find`` with no arguments lists installed packages:
.. code-block:: sh
$ spack find
==> 74 installed packages.
-- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
ImageMagick@6.8.9-10 libdwarf@20130729 py-dateutil@2.4.0
adept-utils@1.0 libdwarf@20130729 py-ipython@2.3.1
atk@2.14.0 libelf@0.8.12 py-matplotlib@1.4.2
boost@1.55.0 libelf@0.8.13 py-nose@1.3.4
bzip2@1.0.6 libffi@3.1 py-numpy@1.9.1
cairo@1.14.0 libmng@2.0.2 py-pygments@2.0.1
callpath@1.0.2 libpng@1.6.16 py-pyparsing@2.0.3
cmake@3.0.2 libtiff@4.0.3 py-pyside@1.2.2
dbus@1.8.6 libtool@2.4.2 py-pytz@2014.10
dbus@1.9.0 libxcb@1.11 py-setuptools@11.3.1
dyninst@8.1.2 libxml2@2.9.2 py-six@1.9.0
fontconfig@2.11.1 libxml2@2.9.2 python@2.7.8
freetype@2.5.3 llvm@3.0 qhull@1.0
gdk-pixbuf@2.31.2 memaxes@0.5 qt@4.8.6
glib@2.42.1 mesa@8.0.5 qt@5.4.0
graphlib@2.0.0 mpich@3.0.4 readline@6.3
gtkplus@2.24.25 mpileaks@1.0 sqlite@3.8.5
harfbuzz@0.9.37 mrnet@4.1.0 stat@2.1.0
hdf5@1.8.13 ncurses@5.9 tcl@8.6.3
icu@54.1 netcdf@4.3.3 tk@src
jpeg@9a openssl@1.0.1h vtk@6.1.0
launchmon@1.0.1 pango@1.36.8 xcb-proto@1.11
lcms@2.6 pixman@0.32.6 xz@5.2.0
libdrm@2.4.33 py-dateutil@2.4.0 zlib@1.2.8
-- chaos_5_x86_64_ib / gcc@4.9.2 --------------------------------
libelf@0.8.10 mpich@3.0.4
Packages are divided into groups according to their architecture and
compiler. Within each group, Spack tries to keep the view simple, and
only shows the version of installed packages.
In some cases, there may be different configurations of the *same*
version of a package installed. For example, there are two
installations of of ``libdwarf@20130729`` above. We can look at them
in more detail using ``spack find -d``, and by asking only to show
``libdwarf`` packages:
.. code-block:: sh
$ spack find --deps libdwarf
==> 2 installed packages.
-- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
libdwarf@20130729-d9b90962
^libelf@0.8.12
libdwarf@20130729-b52fac98
^libelf@0.8.13
Now we see that the two instances of ``libdwarf`` depend on
*different* versions of ``libelf``: 0.8.12 and 0.8.13. This view can
become complicated for packages with many dependencies. If you just
want to know whether two packages' dependencies differ, you can use
``spack find -l``:
.. code-block:: sh
$ spack find -l libdwarf
==> 2 installed packages.
-- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
libdwarf@20130729-d9b90962 libdwarf@20130729-b52fac98
Now the ``libwarf`` installs have hashes after their names. These are
hashes over all of the dependencies of each package. If the hashes
are the same, then the packages have the same dependency configuration.
If you want to know the path where each package is installed, you can
use ``spack find -p``:
.. code-block:: sh
$ spack find -p
==> 74 installed packages.
-- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
ImageMagick@6.8.9-10 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/ImageMagick@6.8.9-10-4df950dd
adept-utils@1.0 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/adept-utils@1.0-5adef8da
atk@2.14.0 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/atk@2.14.0-3d09ac09
boost@1.55.0 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/boost@1.55.0
bzip2@1.0.6 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/bzip2@1.0.6
cairo@1.14.0 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/cairo@1.14.0-fcc2ab44
callpath@1.0.2 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/callpath@1.0.2-5dce4318
...
And, finally, you can restrict your search to a particular package
by supplying its name:
.. code-block:: sh
$ spack find -p libelf
-- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
libelf@0.8.11 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/libelf@0.8.11
libelf@0.8.12 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/libelf@0.8.12
libelf@0.8.13 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/libelf@0.8.13
``spack find`` actually does a lot more than this. You can use
*specs* to query for specific configurations and builds of each
package. If you want to find only libelf versions greater than version
0.8.12, you could say:
.. code-block:: sh
$ spack find libelf@0.8.12:
-- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
libelf@0.8.12 libelf@0.8.13
Finding just the versions of libdwarf built with a particular version
of libelf would look like this:
.. code-block:: sh
$ spack find -l libdwarf ^libelf@0.8.12
==> 1 installed packages.
-- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
libdwarf@20130729-d9b90962
The full spec syntax is discussed in detail in :ref:`sec-specs`.
Compiler configuration
-----------------------------------
Spack has the ability to build packages with multiple compilers and
@@ -72,6 +319,8 @@ compiler versions. Spack searches for compilers on your machine
automatically the first time it is run. It does this by inspecting
your path.
.. _spack-compilers:
``spack compilers``
~~~~~~~~~~~~~~~~~~~~~~~
@@ -98,6 +347,8 @@ compilers`` or ``spack compiler list``::
Any of these compilers can be used to build Spack packages. More on
how this is done is in :ref:`sec-specs`.
.. _spack-compiler-add:
``spack compiler add``
~~~~~~~~~~~~~~~~~~~~~~~
@@ -110,15 +361,19 @@ where the compiler is installed. For example::
intel@13.0.079
Or you can run ``spack compiler add`` with no arguments to force
autodetection. This is useful if you do not know where compilers
live, but new compilers have been added to your ``PATH``. For
example, using dotkit, you might do this::
auto-detection. This is useful if you do not know where compilers are
installed, but you know that new compilers have been added to your
``PATH``. For example, using dotkit, you might do this::
$ use gcc-4.9.0
$ module load gcc-4.9.0
$ spack compiler add
==> Added 1 new compiler to /Users/gamblin2/.spackconfig
gcc@4.9.0
This loads the environment module for gcc-4.9.0 to get it into the
``PATH``, and then it adds the compiler to Spack.
.. _spack-compiler-info:
``spack compiler info``
~~~~~~~~~~~~~~~~~~~~~~~
@@ -126,20 +381,23 @@ example, using dotkit, you might do this::
If you want to see specifics on a particular compiler, you can run
``spack compiler info`` on it::
$ spack compiler info intel@12.1.3
intel@12.1.3:
cc = /usr/local/bin/icc-12.1.293
cxx = /usr/local/bin/icpc-12.1.293
f77 = /usr/local/bin/ifort-12.1.293
fc = /usr/local/bin/ifort-12.1.293
$ spack compiler info intel@15
intel@15.0.0:
cc = /usr/local/bin/icc-15.0.090
cxx = /usr/local/bin/icpc-15.0.090
f77 = /usr/local/bin/ifort-15.0.090
fc = /usr/local/bin/ifort-15.0.090
This shows which C, C++, and Fortran compilers were detected by Spack.
Notice also that we didn't have to be too specific about the
version. We just said ``intel@15``, and information about the only
matching Intel compiler was displayed.
Manual configuration
~~~~~~~~~~~~~~~~~~~~~~~
Manual compiler configuration
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If autodetection fails, you can manually conigure a compiler by
If auto-detection fails, you can manually configure a compiler by
editing your ``~/.spackconfig`` file. You can do this by running
``spack config edit``, which will open the file in your ``$EDITOR``.
@@ -153,8 +411,8 @@ Each compiler configuration in the file looks like this::
fc = /usr/local/bin/ifort-15.0.024-beta
...
For compilers, like ``clang``, that do not support Fortran, you can simply
put ``None`` for ``f77`` and ``fc``::
For compilers, like ``clang``, that do not support Fortran, put
``None`` for ``f77`` and ``fc``::
[compiler "clang@3.3svn"]
cc = /usr/bin/clang
@@ -163,180 +421,18 @@ put ``None`` for ``f77`` and ``fc``::
fc = None
Once you save the file, the configured compilers will show up in the
list displayed when you run ``spack compilers``.
Seeing installed packages -----------------------------------
``spack find``
~~~~~~~~~~~~~~~~~~~~~~
The second thing you're likely to want to do with Spack, and the first
thing users of your system will likely want to do, is to find what
software is already installed and ready to use. You can do that with
``spack find``.
Running ``spack find`` with no arguments will list all the installed
packages:
.. code-block:: sh
$ spack find
== chaos_5_x86_64_ib ===========================================
-- gcc@4.4.7 ---------------------------------------------------
libdwarf@20130207-d9b909
libdwarf@20130729-d9b909
libdwarf@20130729-b52fac
libelf@0.8.11
libelf@0.8.12
libelf@0.8.13
Packages are grouped by architecture, then by the compiler used to
build them, and then by their versions and options. If a package has
dependencies, there will also be a hash at the end of the name
indicating the dependency configuration. Packages with the same hash
have the same dependency configuration. If you want ALL information
about dependencies, as well, then you can supply ``-l`` or ``--long``:
.. code-block:: sh
$ spack find -l
== chaos_5_x86_64_ib ===========================================
-- gcc@4.4.7 ---------------------------------------------------
libdwarf@20130207
^libelf@0.8.12
libdwarf@20130729
^libelf@0.8.12
libdwarf@20130729
^libelf@0.8.13
libelf@0.8.11
libelf@0.8.12
libelf@0.8.13
Now you can see which versions of ``libelf`` each version of
``libdwarf`` was built with.
If you want to know the path where each of these packages is
installed, do ``spack find -p`` or ``--path``:
.. code-block:: sh
$ spack find -p
== chaos_5_x86_64_ib ===========================================
-- gcc@4.4.7 ---------------------------------------------------
libdwarf@20130207-d9b909 /g/g21/gamblin2/src/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/libdwarf@20130207-d9b909
libdwarf@20130729-d9b909 /g/g21/gamblin2/src/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/libdwarf@20130729-d9b909
libdwarf@20130729-b52fac /g/g21/gamblin2/src/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/libdwarf@20130729-b52fac
libelf@0.8.11 /g/g21/gamblin2/src/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/libelf@0.8.11
libelf@0.8.12 /g/g21/gamblin2/src/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/libelf@0.8.12
libelf@0.8.13 /g/g21/gamblin2/src/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/libelf@0.8.13
And, finally, you can restrict your search to a particular package
by supplying its name:
.. code-block:: sh
$ spack find -p libelf
== chaos_5_x86_64_ib ===========================================
-- gcc@4.4.7 ---------------------------------------------------
libelf@0.8.11 /g/g21/gamblin2/src/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/libelf@0.8.11
libelf@0.8.12 /g/g21/gamblin2/src/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/libelf@0.8.12
libelf@0.8.13 /g/g21/gamblin2/src/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/libelf@0.8.13
``spack find`` actually does a lot more than this. You can use
*specs* to query for specific configurations and builds of each
package. The full spec syntax is discussed in detail in
:ref:`sec-specs`.
Installing and uninstalling
------------------------------
``spack install``
~~~~~~~~~~~~~~~~~~~~~
``spack install`` will install any package that appears in the output
of ``spack list``. To install the latest version of a pacakge and all
of its dependencies, simply run ``spack install <package>``:
.. code-block:: sh
spack install mpileaks
Spack will fetch the tarball for ``mpileaks``, expand it, verify that
it was downloaded without errors, build it, and install it in its own
directory under ``$SPACK_HOME/opt``. If the requested package depends
on other packages in order to build, Spack fetches them as well, and
installs them before it installs the requested package. Like the main
package, each dependency is also installed in its own directory.
Spack can also build *specific* configurations of a package. For
example, to install something with a specific version, add ``@`` after
the package name, followed by a version string:
.. code-block:: sh
spack install mpich@3.0.4
Any number of configurations of the same package can be installed at
once without interfering with each other. This is good for multi-user
sites, as installing a version that one user needs will not disrupt
existing installations for other users.
In addition to version configuraitons, Spack can customize the
compiler, compile-time options (variants), and platform (for cross
compiles) of an installation. Spack is unique in that it can also
configure the *dependencies* a package is built with. For example,
two configurations of the same version of a package, one built with
boost 1.39.0, and the other version built with version 1.43.0, can
coexist.
This can all be done on the command line using special syntax. Spack
calls the descriptor used to refer to a particular package
configuration a **spec**. In the command lines above, both
``mpileaks`` and ``mpileaks@3.0.4`` are specs. To customize
additional properties, simply add more attributes to the spec. Specs
and their syntax are covered in more detail in :ref:`sec-specs`.
``spack uninstall``
~~~~~~~~~~~~~~~~~~~~~
To uninstall a package, type ``spack uninstall <package>``. This will
completely remove the directory in which the package was installed.
.. code-block:: sh
spack uninstall mpich
If there are still installed packages that depend on the package to be
uninstalled, spack will refuse to uninstall. If you know what you're
doing, you can override this with ``spack uninstall -f <package>``.
However, running this risks breaking other installed packages. In
general, it is safer to remove dependent packages *before* removing
their dependencies.
A line like ``spack uninstall mpich`` may be ambiguous, if multiple
``mpich`` configurations are installed. For example, if both
``mpich@3.0.2`` and ``mpich@3.1`` are installed, it could refer to
either one, and Spack cannot determine which one to uninstall. Spack
will ask you to provide a version number to remove the ambiguity. For
example, ``spack uninstall mpich@3.1`` is unambiguous in the above
scenario.
list displayed by ``spack compilers``.
.. _sec-specs:
Specs & Dependencies
Specs & dependencies
-------------------------
We now know that ``spack install`` and ``spack uninstall`` both take a
package name with an optional version specifier. In Spack, that
descriptor is called a *spec*. Spack uses specs to refer to a
particular build configuration (or configurations) of a package.
We know that ``spack install``, ``spack uninstall``, and other
commands take a package name with an optional version specifier. In
Spack, that descriptor is called a *spec*. Spack uses specs to refer
to a particular build configuration (or configurations) of a package.
Specs are more than a package name and a version; you can use them to
specify the compiler, compiler version, architecture, compile options,
and dependency options for a build. In this section, we'll go over
@@ -499,6 +595,11 @@ based on site policies.
Variants
~~~~~~~~~~~~~~~~~~~~~~~
.. Note::
Variants are not yet supported, but will be in the next Spack
release (0.9), due in Q2 2015.
Variants are named options associated with a particular package, and
they can be turned on or off. For example, above, supplying
``+debug`` causes ``mpileaks`` to be built with debug flags. The
@@ -544,6 +645,11 @@ the command line is provided for convenience and legibility.
Architecture specifier
~~~~~~~~~~~~~~~~~~~~~~~
.. Note::
Architecture specifiers are part of specs but are not yet
functional. They will be in Spack version 1.0, due in Q3 2015.
The architecture specifier starts with a ``=`` and also comes after
some package name within a spec. It allows a user to specify a
particular architecture for the package to be built. This is mostly
@@ -627,6 +733,8 @@ any MPI implementation will do. If another package depends on
error. Likewise, if you try to plug in some package that doesn't
provide MPI, Spack will raise an error.
.. _spack-providers:
``spack providers``
~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -646,7 +754,7 @@ versions are now filtered out.
.. _shell-support:
Environment Modules
Environment modules
-------------------------------
.. note::
@@ -678,10 +786,6 @@ For ``csh`` and ``tcsh`` run:
You can put the above code in your ``.bashrc`` or ``.cshrc``, and
Spack's shell support will be available on the command line.
-------------------------------
When you install a package with Spack, it automatically generates an
environment module that lets you add the package to your environment.
@@ -698,6 +802,7 @@ The directories are automatically added to your ``MODULEPATH`` and
``DK_NODE`` environment variables when you enable Spack's `shell
support <shell-support_>`_.
Using Modules & Dotkits
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -710,7 +815,7 @@ of installed packages.
$ module avail
------- /g/g21/gamblin2/src/spack/share/spack/modules/chaos_5_x86_64_ib --------
------- /home/gamblin2/spack/share/spack/modules/chaos_5_x86_64_ib --------
adept-utils@1.0%gcc@4.4.7-5adef8da libelf@0.8.13%gcc@4.4.7
automaded@1.0%gcc@4.4.7-d9691bb0 libelf@0.8.13%intel@15.0.0
boost@1.55.0%gcc@4.4.7 mpc@1.0.2%gcc@4.4.7-559607f5
@@ -845,6 +950,8 @@ if newer, fancier module support is added to Spack at some later date,
you may want to regenerate all the modules to take advantage of these
new features.
.. _spack-module:
``spack module refresh``
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -858,9 +965,226 @@ regenerate all module and dotkit files from scratch:
==> Regenerating tcl module files.
==> Regenerating dotkit module files.
.. _extensions:
Extensions & Python support
------------------------------------
Spack's installation model assumes that each package will live in its
own install prefix. However, certain packages are typically installed
*within* the directory hierarchy of other packages. For example,
modules in interpreted languages like `Python
<https://www.python.org>`_ are typically installed in the
``$prefix/lib/python-2.7/site-packages`` directory.
Spack has support for this type of installation as well. In Spack,
a package that can live inside the prefix of another package is called
an *extension*. Suppose you have Python installed like so:
.. code-block:: sh
$ spack find python
==> 1 installed packages.
-- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
python@2.7.8
.. _spack-extensions:
``spack extensions``
~~~~~~~~~~~~~~~~~~~~~~~
You can find extensions for your Python installation like this:
.. code-block:: sh
$ spack extensions python
==> python@2.7.8%gcc@4.4.7=chaos_5_x86_64_ib-703c7a96
==> 36 extensions:
geos py-ipython py-pexpect py-pyside py-sip
py-basemap py-libxml2 py-pil py-pytz py-six
py-biopython py-mako py-pmw py-rpy2 py-sympy
py-cython py-matplotlib py-pychecker py-scientificpython py-virtualenv
py-dateutil py-mpi4py py-pygments py-scikit-learn
py-epydoc py-mx py-pylint py-scipy
py-gnuplot py-nose py-pyparsing py-setuptools
py-h5py py-numpy py-pyqt py-shiboken
==> 12 installed:
-- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
py-dateutil@2.4.0 py-nose@1.3.4 py-pyside@1.2.2
py-dateutil@2.4.0 py-numpy@1.9.1 py-pytz@2014.10
py-ipython@2.3.1 py-pygments@2.0.1 py-setuptools@11.3.1
py-matplotlib@1.4.2 py-pyparsing@2.0.3 py-six@1.9.0
==> None activated.
The extensions are a subset of what's returned by ``spack list``, and
they are packages like any other. They are installed into their own
prefixes, and you can see this with ``spack find -p``:
.. code-block:: sh
$ spack find -p py-numpy
==> 1 installed packages.
-- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
py-numpy@1.9.1 /g/g21/gamblin2/src/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/py-numpy@1.9.1-66733244
However, even though this package is installed, you cannot use it
directly when you run ``python``:
.. code-block:: sh
$ spack load python
$ python
Python 2.7.8 (default, Feb 17 2015, 01:35:25)
[GCC 4.4.7 20120313 (Red Hat 4.4.7-11)] on linux2
Type "help", "copyright", "credits" or "license" for more information.
>>> import numpy
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
ImportError: No module named numpy
>>>
Extensions & Environment Modules
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
There are two ways to get ``numpy`` working in Python. The first is
to use :ref:`shell-support`. You can simply ``use`` or ``load`` the
module for the extension, and it will be added to the ``PYTHONPATH``
in your current shell.
For tcl modules:
.. code-block:: sh
$ spack load python
$ spack load py-numpy
or, for dotkit:
.. code-block:: sh
$ spack use python
$ spack use py-numpy
Now ``import numpy`` will succeed for as long as you keep your current
session open.
Activating Extensions
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
It is often desirable to have certain packages *always* available as
part of a Python installation. Spack offers a more permanent solution
for this case. Instead of requiring users to load particular
environment modules, you can *activate* the package within the Python
installation:
.. _spack-activate:
``spack activate``
^^^^^^^^^^^^^^^^^^^^^^^
.. code-block:: sh
$ spack activate py-numpy
==> Activated extension py-setuptools@11.3.1%gcc@4.4.7=chaos_5_x86_64_ib-3c74eb69 for python@2.7.8%gcc@4.4.7.
==> Activated extension py-nose@1.3.4%gcc@4.4.7=chaos_5_x86_64_ib-5f70f816 for python@2.7.8%gcc@4.4.7.
==> Activated extension py-numpy@1.9.1%gcc@4.4.7=chaos_5_x86_64_ib-66733244 for python@2.7.8%gcc@4.4.7.
Several things have happened here. The user requested that
``py-numpy`` be activated in the ``python`` installation it was built
with. Spack knows that ``py-numpy`` depends on ``py-nose`` and
``py-setuptools``, so it activated those packages first. Finally,
once all dependencies were activated in the ``python`` installation,
``py-numpy`` was activated as well.
If we run ``spack extensions`` again, we now see the three new
packages listed as activated:
.. code-block:: sh
$ spack extensions python
==> python@2.7.8%gcc@4.4.7=chaos_5_x86_64_ib-703c7a96
==> 36 extensions:
geos py-ipython py-pexpect py-pyside py-sip
py-basemap py-libxml2 py-pil py-pytz py-six
py-biopython py-mako py-pmw py-rpy2 py-sympy
py-cython py-matplotlib py-pychecker py-scientificpython py-virtualenv
py-dateutil py-mpi4py py-pygments py-scikit-learn
py-epydoc py-mx py-pylint py-scipy
py-gnuplot py-nose py-pyparsing py-setuptools
py-h5py py-numpy py-pyqt py-shiboken
==> 12 installed:
-- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
py-dateutil@2.4.0 py-nose@1.3.4 py-pyside@1.2.2
py-dateutil@2.4.0 py-numpy@1.9.1 py-pytz@2014.10
py-ipython@2.3.1 py-pygments@2.0.1 py-setuptools@11.3.1
py-matplotlib@1.4.2 py-pyparsing@2.0.3 py-six@1.9.0
==> 3 currently activated:
-- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
py-nose@1.3.4 py-numpy@1.9.1 py-setuptools@11.3.1
Now, when a user runs python, ``numpy`` will be available for import
*without* the user having to explicitly loaded. ``python@2.7.8`` now
acts like a system Python installation with ``numpy`` installed inside
of it.
Spack accomplishes this by symbolically linking the *entire* prefix of
the ``py-numpy`` into the prefix of the ``python`` package. To the
python interpreter, it looks like ``numpy`` is installed in the
``site-packages`` directory.
The only limitation of activation is that you can only have a *single*
version of an extension activated at a time. This is because multiple
versions of the same extension would conflict if symbolically linked
into the same prefix. Users who want a different version of a package
can still get it by using environment modules, but they will have to
explicitly load their preferred version.
``spack activate -f``
^^^^^^^^^^^^^^^^^^^^^^^^^
If, for some reason, you want to activate a package *without* its
dependencies, you can use ``spack activate -f``:
.. code-block:: sh
$ spack activate -f py-numpy
==> Activated extension py-numpy@1.9.1%gcc@4.4.7=chaos_5_x86_64_ib-66733244 for python@2.7.8%gcc@4.4.7.
.. _spack-deactivate:
``spack deactivate``
^^^^^^^^^^^^^^^^^^^^^^^^^
We've seen how activating an extension can be used to set up a default
version of a Python module. Obviously, you may want to change that at
some point. ``spack deactivate`` is the command for this. There are
several variants:
* ``spack deactivate <extension>`` will deactivate a single
extension. If another activated extension depends on this one,
Spack will warn you and exit with an error.
* ``spack deactivate -f <extension>`` deactivates an extension
regardless of packages that depend on it.
* ``spack deactivate -a <extension>`` deactivates an extension and
all of its dependencies. Use ``-f`` to disregard dependents.
* ``spack deactivate -a <extendee>`` deactivates *all* activated
extensions of a package. For example, to deactivate *all* python
extensions, use::
spack deactivate -a python
Getting Help
-----------------------
.. _spack-help:
``spack help``
~~~~~~~~~~~~~~~~~~~~~~

View File

@@ -0,0 +1,10 @@
.. _command_index:
Command index
=================
This is an alphabetical list of commands with links to the places they
appear in the documentation.
.. hlist::
:columns: 3

View File

@@ -35,7 +35,9 @@
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
import sys
import os
import subprocess
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
@@ -43,14 +45,16 @@
sys.path.insert(0, os.path.abspath('exts'))
# Add the Spack bin directory to the path so that we can use its output in docs.
os.environ['SPACK_ROOT'] = '../../..'
spack_root = '../../..'
os.environ['SPACK_ROOT'] = spack_root
os.environ['PATH'] += os.pathsep + '$SPACK_ROOT/bin'
spack_version = subprocess.Popen(
['spack', '-V'], stderr=subprocess.PIPE).communicate()[1].strip().split('.')
# Set an environment variable so that colify will print output like it would to
# a terminal.
os.environ['COLIFY_TTY'] = 'true'
os.environ['COLUMNS'] = '80'
os.environ['LINES'] = '25'
os.environ['COLIFY_SIZE'] = '25x80'
# Enable todo items
todo_include_todos = True
@@ -83,7 +87,7 @@
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
@@ -97,9 +101,9 @@
# built documents.
#
# The short X.Y version.
version = '1.0'
version = '.'.join(spack_version[:2])
# The full version, including alpha/beta/rc tags.
release = '1.0'
release = '.'.join(spack_version[:2])
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
@@ -145,7 +149,7 @@
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = []
#html_theme_options = [('show_copyright', False)]
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = ["_themes"]

View File

@@ -50,11 +50,11 @@ as a descriptor for one or more instances of that template. Users
express the configuration they want using a spec, and a package turns
the spec into a complete build.
The obvious difficulty with this design is that users underspecify
The obvious difficulty with this design is that users under-specify
what they want. To build a software package, the package object needs
a *complete* specification. In Spack, if a spec describes only one
instance of a package, then we say it is **concrete**. If a spec
could describes many instances, (i.e. it is underspecified in one way
could describes many instances, (i.e. it is under-specified in one way
or another), then we say it is **abstract**.
Spack's job is to take an *abstract* spec from the user, find a
@@ -92,7 +92,7 @@ with a high level view of Spack's directory structure::
Spack is designed so that it could live within a `standard UNIX
directory hierarchy <http://linux.die.net/man/7/hier>`_, so ``lib``,
``var``, and ``opt`` all contain a ``spack`` subdirectory in case
Spack is installed alongside other software. Most of the insteresting
Spack is installed alongside other software. Most of the interesting
parts of Spack live in ``lib/spack``. Files under ``var`` are created
as needed, so there is no ``var`` directory when you initially clone
Spack from the repository.
@@ -123,13 +123,13 @@ Package-related modules
Contains the :class:`Package <spack.package.Package>` class, which
is the superclass for all packages in Spack. Methods on ``Package``
implement all phases of the :ref:`package lifecycle
<pacakge-lifecycle>` and manage the build process.
<package-lifecycle>` and manage the build process.
:mod:`spack.packages`
Contains all of the packages in Spack and methods for managing them.
Functions like :func:`packages.get <spack.packages.get>` and
:func:`class_name_for_package_name
<packages.class_name_for_package_name>` handle mapping packge module
<packages.class_name_for_package_name>` handle mapping package module
names to class names and dynamically instantiating packages by name
from module files.

View File

@@ -1,4 +1,4 @@
Feature Overview
Feature overview
==================
This is a high-level overview of features that make Spack different

View File

@@ -12,20 +12,30 @@ Getting spack is easy. You can clone it from the `github repository
$ git clone https://github.com/scalability-llnl/spack.git
This will create a directory called ``spack``. We'll assume that the
full path to this directory is in some environment called
``SPACK_HOME``. Add ``$SPACK_HOME/bin`` to your path and you're ready
to go:
full path to this directory is in the ``SPACK_ROOT`` environment
variable. Add ``$SPACK_ROOT/bin`` to your path and you're ready to
go:
.. code-block:: sh
$ export PATH=spack/bin:$PATH
$ export PATH=$SPACK_ROOT/bin:$PATH
$ spack install libelf
In general, most of your interactions with Spack will be through the
``spack`` command.
For a richer experience, use Spack's `shell support
<http://scalability-llnl.github.io/spack/basic_usage.html#environment-modules>`_:
.. code-block:: sh
Install
# For bash users
$ . $SPACK_ROOT/share/spack/setup-env.sh
# For tcsh or csh users (note you must set SPACK_ROOT)
$ setenv SPACK_ROOT /path/to/spack
$ source $SPACK_ROOT/share/spack/setup-env.csh
This automatically adds Spack to your ``PATH``.
Installation
--------------------
You don't need to install Spack; it's ready to run as soon as you
@@ -39,6 +49,7 @@ functionality. To install spack in a new directory, simply type:
$ spack bootstrap /my/favorite/prefix
This will install a new spack script in /my/favorite/prefix/bin, which
you can use just like you would the regular spack script. Each copy
of spack installs packages into its own ``$PREFIX/opt`` directory.
This will install a new spack script in ``/my/favorite/prefix/bin``,
which you can use just like you would the regular spack script. Each
copy of spack installs packages into its own ``$PREFIX/opt``
directory.

View File

@@ -18,8 +18,8 @@ configurations can coexist on the same system.
Most importantly, Spack is *simple*. It offers a simple *spec* syntax
so that users can specify versions and configuration options
concisely. Spack is also simple for package authors: package files
are writtin in pure Python, and specs allow package authors to write a
single build script for many different builds of the same package.
are writtin in pure Python, and specs allow package authors to
maintain a single file for many different builds of the same package.
See the :doc:`features` for examples and highlights.
@@ -46,8 +46,10 @@ Table of Contents
getting_started
basic_usage
packaging_guide
mirrors
site_configuration
developer_guide
command_index
package_list
API Docs <spack>

217
lib/spack/docs/mirrors.rst Normal file
View File

@@ -0,0 +1,217 @@
.. _mirrors:
Mirrors
============================
Some sites may not have access to the internet for fetching packages.
These sites will need a local repository of tarballs from which they
can get their files. Spack has support for this with *mirrors*. A
mirror is a URL that points to a directory, either on the local
filesystem or on some server, containing tarballs for all of Spack's
packages.
Here's an example of a mirror's directory structure::
mirror/
cmake/
cmake-2.8.10.2.tar.gz
dyninst/
dyninst-8.1.1.tgz
dyninst-8.1.2.tgz
libdwarf/
libdwarf-20130126.tar.gz
libdwarf-20130207.tar.gz
libdwarf-20130729.tar.gz
libelf/
libelf-0.8.12.tar.gz
libelf-0.8.13.tar.gz
libunwind/
libunwind-1.1.tar.gz
mpich/
mpich-3.0.4.tar.gz
mvapich2/
mvapich2-1.9.tgz
The structure is very simple. There is a top-level directory. The
second level directories are named after packages, and the third level
contains tarballs for each package, named after each package.
.. note::
Archives are **not** named exactly they were in the package's fetch
URL. They have the form ``<name>-<version>.<extension>``, where
``<name>`` is Spack's name for the package, ``<version>`` is the
version of the tarball, and ``<extension>`` is whatever format the
package's fetch URL contains.
In order to make mirror creation reasonably fast, we copy the
tarball in its original format to the mirror directory, but we do
not standardize on a particular compression algorithm, because this
would potentially require expanding and re-compressing each archive.
.. _spack-mirror:
``spack mirror``
----------------------------
Mirrors are managed with the ``spack mirror`` command. The help for
``spack mirror`` looks like this::
$ spack mirror -h
usage: spack mirror [-h] SUBCOMMAND ...
positional arguments:
SUBCOMMAND
create Create a directory to be used as a spack mirror, and fill
it with package archives.
add Add a mirror to Spack.
remove Remove a mirror by name.
list Print out available mirrors to the console.
optional arguments:
-h, --help show this help message and exit
The ``create`` command actually builds a mirror by fetching all of its
packages from the internet and checksumming them.
The other three commands are for managing mirror configuration. They
control the URL(s) from which Spack downloads its packages.
.. _spack-mirror-create:
``spack mirror create``
----------------------------
You can create a mirror using the ``spack mirror create`` command, assuming
you're on a machine where you can access the internet.
The command will iterate through all of Spack's packages and download
the safe ones into a directory structure like the one above. Here is
what it looks like:
.. code-block:: bash
$ spack mirror create libelf libdwarf
==> Created new mirror in spack-mirror-2014-06-24
==> Trying to fetch from http://www.mr511.de/software/libelf-0.8.13.tar.gz
########################################################## 81.6%
==> Checksum passed for libelf@0.8.13
==> Added libelf@0.8.13
==> Trying to fetch from http://www.mr511.de/software/libelf-0.8.12.tar.gz
###################################################################### 98.6%
==> Checksum passed for libelf@0.8.12
==> Added libelf@0.8.12
==> Trying to fetch from http://www.prevanders.net/libdwarf-20130207.tar.gz
###################################################################### 97.3%
==> Checksum passed for libdwarf@20130207
==> Added libdwarf@20130207
==> Trying to fetch from http://www.prevanders.net/libdwarf-20130126.tar.gz
######################################################## 78.9%
==> Checksum passed for libdwarf@20130126
==> Added libdwarf@20130126
==> Trying to fetch from http://www.prevanders.net/libdwarf-20130729.tar.gz
############################################################# 84.7%
==> Added libdwarf@20130729
==> Added spack-mirror-2014-06-24/libdwarf/libdwarf-20130729.tar.gz to mirror
==> Added python@2.7.8.
==> Successfully updated mirror in spack-mirror-2015-02-24.
Archive stats:
0 already present
5 added
0 failed to fetch.
Once this is done, you can tar up the ``spack-mirror-2014-06-24`` directory and
copy it over to the machine you want it hosted on.
Custom package sets
~~~~~~~~~~~~~~~~~~~~~~~
Normally, ``spack mirror create`` downloads all the archives it has
checksums for. If you want to only create a mirror for a subset of
packages, you can do that by supplying a list of package specs on the
command line after ``spack mirror create``. For example, this
command::
$ spack mirror create libelf@0.8.12: boost@1.44:
Will create a mirror for libelf versions greater than or equal to
0.8.12 and boost versions greater than or equal to 1.44.
Mirror files
~~~~~~~~~~~~~~~~~~~~~~~
If you have a *very* large number of packages you want to mirror, you
can supply a file with specs in it, one per line::
$ cat specs.txt
libdwarf
libelf@0.8.12:
boost@1.44:
boost@1.39.0
...
$ spack mirror create -f specs.txt
...
This is useful if there is a specific suite of software managed by
your site.
.. _spack-mirror-add:
``spack mirror add``
----------------------------
Once you have a mirror, you need to let spack know about it. This is
relatively simple. First, figure out the URL for the mirror. If it's
a file, you can use a file URL like this one::
file:///Users/gamblin2/spack-mirror-2014-06-24
That points to the directory on the local filesystem. If it were on a
web server, you could use a URL like this one:
https://example.com/some/web-hosted/directory/spack-mirror-2014-06-24
Spack will use the URL as the root for all of the packages it fetches.
You can tell your Spack installation to use that mirror like this:
.. code-block:: bash
$ spack mirror add local_filesystem file:///Users/gamblin2/spack-mirror-2014-06-24
Each mirror has a name so that you can refer to it again later.
.. _spack-mirror-list:
``spack mirror list``
----------------------------
If you want to see all the mirrors Spack knows about you can run ``spack mirror list``::
$ spack mirror list
local_filesystem file:///Users/gamblin2/spack-mirror-2014-06-24
.. _spack-mirror-remove:
``spack mirror remove``
----------------------------
And, if you want to remove a mirror, just remove it by name::
$ spack mirror remove local_filesystem
$ spack mirror list
==> No mirrors configured.
Mirror precedence
----------------------------
Adding a mirror really just adds a section in ``~/.spackconfig``::
[mirror "local_filesystem"]
url = file:///Users/gamblin2/spack-mirror-2014-06-24
[mirror "remote_server"]
url = https://example.com/some/web-hosted/directory/spack-mirror-2014-06-24
If you want to change the order in which mirrors are searched for
packages, you can edit this file and reorder the sections. Spack will
search the topmost mirror first and the bottom-most mirror last.

File diff suppressed because it is too large Load Diff

View File

@@ -1,208 +1,16 @@
.. _site-configuration:
Site-specific configuration
Site configuration
===================================
.. _mirrors:
Mirrors
----------------------------
Some sites may not have access to the internet for fetching packages.
These sites will need a local repository of tarballs from which they
can get their files. Spack has support for this with *mirrors*. A
mirror is a URL that points to a directory, either on the local
filesystem or on some server, containing tarballs for all of Spack's
packages.
Here's an example of a mirror's directory structure::
mirror/
cmake/
cmake-2.8.10.2.tar.gz
dyninst/
DyninstAPI-8.1.1.tgz
DyninstAPI-8.1.2.tgz
libdwarf/
libdwarf-20130126.tar.gz
libdwarf-20130207.tar.gz
libdwarf-20130729.tar.gz
libelf/
libelf-0.8.12.tar.gz
libelf-0.8.13.tar.gz
libunwind/
libunwind-1.1.tar.gz
mpich/
mpich-3.0.4.tar.gz
mvapich2/
mvapich2-1.9.tgz
The structure is very simple. There is a top-level directory. The
second level directories are named after packages, and the third level
contains tarballs for each package, named as they were in the
package's fetch URL.
``spack mirror``
~~~~~~~~~~~~~~~~~~~~~~~
Mirrors are managed with the ``spack mirror`` command. The help for
``spack mirror`` looks like this::
$ spack mirror -h
usage: spack mirror [-h] SUBCOMMAND ...
positional arguments:
SUBCOMMAND
create Create a directory to be used as a spack mirror, and fill
it with package archives.
add Add a mirror to Spack.
remove Remove a mirror by name.
list Print out available mirrors to the console.
optional arguments:
-h, --help show this help message and exit
The ``create`` command actually builds a mirror by fetching all of its
packages from the internet and checksumming them.
The other three commands are for managing mirror configuration. They
control the URL(s) from which Spack downloads its packages.
``spack mirror create``
~~~~~~~~~~~~~~~~~~~~~~~
You can create a mirror using the ``spack mirror create`` command, assuming
you're on a machine where you can access the internet.
The command will iterate through all of Spack's packages and download
the safe ones into a directory structure like the one above. Here is
what it looks like:
.. code-block:: bash
$ spack mirror create libelf libdwarf
==> Created new mirror in spack-mirror-2014-06-24
==> Trying to fetch from http://www.mr511.de/software/libelf-0.8.13.tar.gz
########################################################## 81.6%
==> Checksum passed for libelf@0.8.13
==> Added spack-mirror-2014-06-24/libelf/libelf-0.8.13.tar.gz to mirror
==> Trying to fetch from http://www.mr511.de/software/libelf-0.8.12.tar.gz
###################################################################### 98.6%
==> Checksum passed for libelf@0.8.12
==> Added spack-mirror-2014-06-24/libelf/libelf-0.8.12.tar.gz to mirror
==> Trying to fetch from http://www.prevanders.net/libdwarf-20130207.tar.gz
###################################################################### 97.3%
==> Checksum passed for libdwarf@20130207
==> Added spack-mirror-2014-06-24/libdwarf/libdwarf-20130207.tar.gz to mirror
==> Trying to fetch from http://www.prevanders.net/libdwarf-20130126.tar.gz
######################################################## 78.9%
==> Checksum passed for libdwarf@20130126
==> Added spack-mirror-2014-06-24/libdwarf/libdwarf-20130126.tar.gz to mirror
==> Trying to fetch from http://www.prevanders.net/libdwarf-20130729.tar.gz
############################################################# 84.7%
==> Checksum passed for libdwarf@20130729
==> Added spack-mirror-2014-06-24/libdwarf/libdwarf-20130729.tar.gz to mirror
Once this is done, you can tar up the ``spack-mirror-2014-06-24`` directory and
copy it over to the machine you want it hosted on.
Custom package sets
^^^^^^^^^^^^^^^^^^^^^^^^
Normally, ``spack mirror create`` downloads all the archives it has
checksums for. If you want to only create a mirror for a subset of
packages, you can do that by supplying a list of package specs on the
command line after ``spack mirror create``. For example, this
command::
$ spack mirror create libelf@0.8.12: boost@1.44:
Will create a mirror for libelf versions greater than or equal to
0.8.12 and boost versions greater than or equal to 1.44.
Mirror files
^^^^^^^^^^^^^^^^^^^^^^^^
If you have a *very* large number of packages you want to mirror, you
can supply a file with specs in it, one per line::
$ cat specs.txt
libdwarf
libelf@0.8.12:
boost@1.44:
boost@1.39.0
...
$ spack mirror create -f specs.txt
...
This is useful if there is a specific suite of software managed by
your site.
``spack mirror add``
~~~~~~~~~~~~~~~~~~~~~~~~~~~
Once you have a mirrror, you need to let spack know about it. This is
relatively simple. First, figure out the URL for the mirror. If it's
a file, you can use a file URL like this one::
file:///Users/gamblin2/spack-mirror-2014-06-24
That points to the directory on the local filesystem. If it were on a
web server, you could use a URL like this one:
https://example.com/some/web-hosted/directory/spack-mirror-2014-06-24
Spack will use the URL as the root for all of the packages it fetches.
You can tell your Spack installation to use that mirror like this:
.. code-block:: bash
$ spack mirror add local_filesystem file:///Users/gamblin2/spack-mirror-2014-06-24
Each mirror has a name so that you can refer to it again later.
``spack mirror list``
~~~~~~~~~~~~~~~~~~~~~~~~~~~
If you want to see all the mirrors Spack knows about you can run ``spack mirror list``::
$ spack mirror list
local_filesystem file:///Users/gamblin2/spack-mirror-2014-06-24
``spack mirror remove``
~~~~~~~~~~~~~~~~~~~~~~~~~~~
And, if you want to remove a mirror, just remove it by name::
$ spack mirror remove local_filesystem
$ spack mirror list
==> No mirrors configured.
Mirror precedence
~~~~~~~~~~~~~~~~~~~~~~~~~
Adding a mirror really just adds a section in ``~/.spackconfig``::
[mirror "local_filesystem"]
url = file:///Users/gamblin2/spack-mirror-2014-06-24
[mirror "remote_server"]
url = https://example.com/some/web-hosted/directory/spack-mirror-2014-06-24
If you want to change the order in which mirrors are searched for
packages, you can edit this file and reorder the sections. Spack will
search the topmost mirror first and the bottom-most mirror last.
.. _temp-space:
Temporary space
----------------------------
.. warning:: Temporary space configuration will be moved to configuration files.
The intructions here are old and refer to ``__init__.py``
.. warning:: Temporary space configuration will eventually be moved to
configuration files, but currently these settings are in
``lib/spack/spack/__init__.py``
By default, Spack will try to do all of its building in temporary
space. There are two main reasons for this. First, Spack is designed
@@ -286,7 +94,7 @@ the virtual spec to specs for possible implementations, and
later, so there is no need to fully concretize the spec when returning
it.
The ``DefaultConcretizer`` is intendend to provide sensible defaults
The ``DefaultConcretizer`` is intended to provide sensible defaults
for each policy, but there are certain choices that it can't know
about. For example, one site might prefer ``OpenMPI`` over ``MPICH``,
or another might prefer an old version of some packages. These types
@@ -327,3 +135,53 @@ Set concretizer to *your own* class instead of the default:
concretizer = MyConcretizer()
The next time you run Spack, your changes should take effect.
Profiling
~~~~~~~~~~~~~~~~~~~~~
Spack has some limited built-in support for profiling, and can report
statistics using standard Python timing tools. To use this feature,
supply ``-p`` to Spack on the command line, before any subcommands.
.. _spack-p:
``spack -p``
^^^^^^^^^^^^^^^^^^
``spack -p`` output looks like this:
.. code-block:: sh
$ spack -p graph dyninst
o dyninst
|\
| |\
| o | libdwarf
|/ /
o | libelf
/
o boost
307670 function calls (305943 primitive calls) in 0.127 seconds
Ordered by: internal time
ncalls tottime percall cumtime percall filename:lineno(function)
853 0.021 0.000 0.066 0.000 inspect.py:472(getmodule)
51197 0.011 0.000 0.018 0.000 inspect.py:51(ismodule)
73961 0.010 0.000 0.010 0.000 {isinstance}
1762 0.006 0.000 0.053 0.000 inspect.py:440(getsourcefile)
32075 0.006 0.000 0.006 0.000 {hasattr}
1760 0.004 0.000 0.004 0.000 {posix.stat}
2240 0.004 0.000 0.004 0.000 {posix.lstat}
2602 0.004 0.000 0.011 0.000 inspect.py:398(getfile)
771 0.004 0.000 0.077 0.000 inspect.py:518(findsource)
2656 0.004 0.000 0.004 0.000 {method 'match' of '_sre.SRE_Pattern' objects}
30772 0.003 0.000 0.003 0.000 {method 'get' of 'dict' objects}
...
The bottom of the output shows the top most time consuming functions,
slowest on top. The profiling support is from Python's built-in tool,
`cProfile
<https://docs.python.org/2/library/profile.html#module-cProfile>`_.

426
lib/spack/env/cc vendored
View File

@@ -1,140 +1,332 @@
#!/usr/bin/env python
import sys
if not sys.version_info[:2] >= (2,6):
sys.exit("Spack requires Python 2.6. Version was %s." % sys.version_info)
#!/bin/bash
##############################################################################
# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://scalability-llnl.github.io/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License (as published by
# the Free Software Foundation) version 2.1 dated February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
#
# Spack compiler wrapper script.
#
# Compiler commands go through this compiler wrapper in Spack builds.
# The compiler wrapper is a thin layer around the standard compilers.
# It enables several key pieces of functionality:
#
# 1. It allows Spack to swap compilers into and out of builds easily.
# 2. It adds several options to the compile line so that spack
# packages can find their dependencies at build time and run time:
# -I arguments for dependency /include directories.
# -L arguments for dependency /lib directories.
# -Wl,-rpath arguments for dependency /lib directories.
#
import os
import re
import subprocess
from contextlib import closing
# This is the list of environment variables that need to be set before
# the script runs. They are set by routines in spack.build_environment
# as part of spack.package.Package.do_install().
parameters="
SPACK_PREFIX
SPACK_ENV_PATH
SPACK_DEBUG_LOG_DIR
SPACK_COMPILER_SPEC
SPACK_SHORT_SPEC"
# Import spack parameters through the build environment.
spack_lib = os.environ.get("SPACK_LIB")
if not spack_lib:
print "Spack compiler must be run from spack!"
sys.exit(1)
# The compiler input variables are checked for sanity later:
# SPACK_CC, SPACK_CXX, SPACK_F77, SPACK_FC
# Debug flag is optional; set to true for debug logging:
# SPACK_DEBUG
# Test command is used to unit test the compiler script.
# SPACK_TEST_COMMAND
# Dependencies can be empty for pkgs with no deps:
# SPACK_DEPENDENCIES
# Grab a minimal set of spack packages
sys.path.append(spack_lib)
from spack.compilation import *
from external import argparse
import llnl.util.tty as tty
# die()
# Prints a message and exits with error 1.
function die {
echo "$@"
exit 1
}
spack_prefix = get_env_var("SPACK_PREFIX")
spack_debug = get_env_flag("SPACK_DEBUG")
spack_deps = get_path("SPACK_DEPENDENCIES")
spack_env_path = get_path("SPACK_ENV_PATH")
spack_debug_log_dir = get_env_var("SPACK_DEBUG_LOG_DIR")
spack_spec = get_env_var("SPACK_SPEC")
for param in $parameters; do
if [ -z "${!param}" ]; then
die "Spack compiler must be run from spack! Input $param was missing!"
fi
done
compiler_spec = get_env_var("SPACK_COMPILER_SPEC")
spack_cc = get_env_var("SPACK_CC", required=False)
spack_cxx = get_env_var("SPACK_CXX", required=False)
spack_f77 = get_env_var("SPACK_F77", required=False)
spack_fc = get_env_var("SPACK_FC", required=False)
#
# Figure out the type of compiler, the language, and the mode so that
# the compiler script knows what to do.
#
# Possible languages are C, C++, Fortran 77, and Fortran 90.
# 'command' is set based on the input command to $SPACK_[CC|CXX|F77|F90]
#
# 'mode' is set to one of:
# cc compile
# ld link
# ccld compile & link
# cpp preprocessor
# vcheck version check
#
command=$(basename "$0")
case "$command" in
cc|gcc|c89|c99|clang|xlc)
command="$SPACK_CC"
language="C"
;;
c++|CC|g++|clang++|xlC)
command="$SPACK_CXX"
language="C++"
;;
f77|xlf)
command="$SPACK_F77"
language="Fortran 77"
;;
fc|f90|f95|xlf90)
command="$SPACK_FC"
language="Fortran 90"
;;
cpp)
mode=cpp
;;
ld)
mode=ld
;;
*)
die "Unkown compiler: $command"
;;
esac
# Figure out what type of operation we're doing
command = os.path.basename(sys.argv[0])
# Finish setting up the mode.
if [ -z "$mode" ]; then
mode=ccld
for arg in "$@"; do
if [ "$arg" = -v -o "$arg" = -V -o "$arg" = --version -o "$arg" = -dumpversion ]; then
mode=vcheck
break
elif [ "$arg" = -E ]; then
mode=cpp
break
elif [ "$arg" = -c ]; then
mode=cc
break
fi
done
fi
cpp, cc, ccld, ld, version_check = range(5)
# Dump the version and exist if we're in testing mode.
if [ "$SPACK_TEST_COMMAND" = "dump-mode" ]; then
echo "$mode"
exit
fi
if command == 'cpp':
mode = cpp
elif command == 'ld':
mode = ld
elif '-E' in sys.argv:
mode = cpp
elif '-c' in sys.argv:
mode = cc
else:
mode = ccld
# Check that at least one of the real commands was actually selected,
# otherwise we don't know what to execute.
if [ -z "$command" ]; then
die "ERROR: Compiler '$SPACK_COMPILER_SPEC' does not support compiling $language programs."
fi
# Save original command for debug logging
input_command="$@"
if command in ('cc', 'gcc', 'c89', 'c99', 'clang'):
command = spack_cc
language = "C"
elif command in ('c++', 'CC', 'g++', 'clang++'):
command = spack_cxx
language = "C++"
elif command in ('f77'):
command = spack_f77
language = "Fortran 77"
elif command in ('fc', 'f90', 'f95'):
command = spack_fc
language = "Fortran 90"
elif command in ('ld', 'cpp'):
pass # leave it the same. TODO: what's the right thing?
else:
raise Exception("Unknown compiler: %s" % command)
#
# Now do real parsing of the command line args, trying hard to keep
# non-rpath linker arguments in the proper order w.r.t. other command
# line arguments. This is important for things like groups.
#
includes=()
libraries=()
libs=()
rpaths=()
other_args=()
if command is None:
print "ERROR: Compiler '%s' does not support compiling %s programs." % (
compiler_spec, language)
sys.exit(1)
while [ -n "$1" ]; do
case "$1" in
-I*)
arg="${1#-I}"
if [ -z "$arg" ]; then shift; arg="$1"; fi
includes+=("$arg")
;;
-L*)
arg="${1#-L}"
if [ -z "$arg" ]; then shift; arg="$1"; fi
libraries+=("$arg")
;;
-l*)
arg="${1#-l}"
if [ -z "$arg" ]; then shift; arg="$1"; fi
libs+=("$arg")
;;
-Wl,*)
arg="${1#-Wl,}"
if [ -z "$arg" ]; then shift; arg="$1"; fi
if [[ "$arg" = -rpath=* ]]; then
rpaths+=("${arg#-rpath=}")
elif [[ "$arg" = -rpath ]]; then
shift; arg="$1"
if [[ "$arg" != -Wl,* ]]; then
die "-Wl,-rpath was not followed by -Wl,*"
fi
rpaths+=("${arg#-Wl,}")
else
other_args+=("-Wl,$arg")
fi
;;
-Xlinker,*)
arg="${1#-Xlinker,}"
if [ -z "$arg" ]; then shift; arg="$1"; fi
if [[ "$arg" = -rpath=* ]]; then
rpaths+=("${arg#-rpath=}")
elif [[ "$arg" = -rpath ]]; then
shift; arg="$1"
if [[ "$arg" != -Xlinker,* ]]; then
die "-Xlinker,-rpath was not followed by -Xlinker,*"
fi
rpaths+=("${arg#-Xlinker,}")
else
other_args+=("-Xlinker,$arg")
fi
;;
*)
other_args+=("$1")
;;
esac
shift
done
version_args = ['-V', '-v', '--version', '-dumpversion']
if any(arg in sys.argv for arg in version_args):
mode = version_check
# Dump parsed values for unit testing if asked for
if [ -n "$SPACK_TEST_COMMAND" ]; then
IFS=$'\n'
case "$SPACK_TEST_COMMAND" in
dump-includes) echo "${includes[*]}";;
dump-libraries) echo "${libraries[*]}";;
dump-libs) echo "${libs[*]}";;
dump-rpaths) echo "${rpaths[*]}";;
dump-other-args) echo "${other_args[*]}";;
dump-all)
echo "INCLUDES:"
echo "${includes[*]}"
echo
echo "LIBRARIES:"
echo "${libraries[*]}"
echo
echo "LIBS:"
echo "${libs[*]}"
echo
echo "RPATHS:"
echo "${rpaths[*]}"
echo
echo "ARGS:"
echo "${other_args[*]}"
;;
*)
echo "ERROR: Unknown test command"
exit 1 ;;
esac
exit
fi
# Parse out the includes, libs, etc. so we can adjust them if need be.
parser = argparse.ArgumentParser(add_help=False)
parser.add_argument("-I", action='append', default=[], dest='include_path')
parser.add_argument("-L", action='append', default=[], dest='lib_path')
parser.add_argument("-l", action='append', default=[], dest='libs')
# Read spack dependencies from the path environment variable
IFS=':' read -ra deps <<< "$SPACK_DEPENDENCIES"
for dep in "${deps[@]}"; do
if [ -d "$dep/include" ]; then
includes+=("$dep/include")
fi
options, other_args = parser.parse_known_args()
rpaths, other_args = parse_rpaths(other_args)
if [ -d "$dep/lib" ]; then
libraries+=("$dep/lib")
rpaths+=("$dep/lib")
fi
# Add dependencies' include and lib paths to our compiler flags.
def add_if_dir(path_list, directory, index=None):
if os.path.isdir(directory):
if index is None:
path_list.append(directory)
else:
path_list.insert(index, directory)
if [ -d "$dep/lib64" ]; then
libraries+=("$dep/lib64")
rpaths+=("$dep/lib64")
fi
done
for dep_dir in spack_deps:
add_if_dir(options.include_path, os.path.join(dep_dir, "include"))
add_if_dir(options.lib_path, os.path.join(dep_dir, "lib"))
add_if_dir(options.lib_path, os.path.join(dep_dir, "lib64"))
# Include all -L's and prefix/whatever dirs in rpath
for dir in "${libraries[@]}"; do
[[ dir = $SPACK_INSTALL* ]] && rpaths+=("$dir")
done
rpaths+=("$SPACK_PREFIX/lib")
rpaths+=("$SPACK_PREFIX/lib64")
# Add our modified arguments to it.
arguments = ['-I%s' % path for path in options.include_path]
arguments += other_args
arguments += ['-L%s' % path for path in options.lib_path]
arguments += ['-l%s' % path for path in options.libs]
# Put the arguments together
args=()
for dir in "${includes[@]}"; do args+=("-I$dir"); done
args+=("${other_args[@]}")
for dir in "${libraries[@]}"; do args+=("-L$dir"); done
for lib in "${libs[@]}"; do args+=("-l$lib"); done
# Add rpaths to install dir and its dependencies. We add both lib and lib64
# here because we don't know which will be created.
rpaths.extend(options.lib_path)
rpaths.append('%s/lib' % spack_prefix)
rpaths.append('%s/lib64' % spack_prefix)
if mode == ccld:
arguments += ['-Wl,-rpath,%s' % p for p in rpaths]
elif mode == ld:
pairs = [('-rpath', '%s' % p) for p in rpaths]
arguments += [item for sublist in pairs for item in sublist]
if [ "$mode" = ccld ]; then
for dir in "${rpaths[@]}"; do
args+=("-Wl,-rpath")
args+=("-Wl,$dir");
done
elif [ "$mode" = ld ]; then
for dir in "${rpaths[@]}"; do
args+=("-rpath")
args+=("$dir");
done
fi
# Unset some pesky environment variables
for var in ["LD_LIBRARY_PATH", "LD_RUN_PATH", "DYLD_LIBRARY_PATH"]:
if var in os.environ:
os.environ.pop(var)
#
# Unset pesky environment variables that could affect build sanity.
#
unset LD_LIBRARY_PATH
unset LD_RUN_PATH
unset DYLD_LIBRARY_PATH
# Ensure that the delegated command doesn't just call this script again.
remove_paths = ['.'] + spack_env_path
path = [p for p in get_path("PATH") if p not in remove_paths]
os.environ["PATH"] = ":".join(path)
#
# Filter '.' and Spack environment directories out of PATH so that
# this script doesn't just call itself
#
IFS=':' read -ra env_path <<< "$PATH"
IFS=':' read -ra spack_env_dirs <<< "$SPACK_ENV_PATH"
spack_env_dirs+=(".")
PATH=""
for dir in "${env_path[@]}"; do
remove=""
for rm_dir in "${spack_env_dirs[@]}"; do
if [ "$dir" = "$rm_dir" ]; then remove=True; fi
done
if [ -z "$remove" ]; then
if [ -z "$PATH" ]; then
PATH="$dir"
else
PATH="$PATH:$dir"
fi
fi
done
export PATH
full_command = [command] + arguments
full_command=("$command")
full_command+=("${args[@]}")
if spack_debug:
input_log = os.path.join(spack_debug_log_dir, 'spack-cc-%s.in.log' % spack_spec)
output_log = os.path.join(spack_debug_log_dir, 'spack-cc-%s.out.log' % spack_spec)
with closing(open(input_log, 'a')) as log:
args = [os.path.basename(sys.argv[0])] + sys.argv[1:]
log.write("%s\n" % " ".join(arg.replace(' ', r'\ ') for arg in args))
with closing(open(output_log, 'a')) as log:
log.write("%s\n" % " ".join(full_command))
#
# Write the input and output commands to debug logs if it's asked for.
#
if [ "$SPACK_DEBUG" = "TRUE" ]; then
input_log="$SPACK_DEBUG_LOG_DIR/spack-cc-$SPACK_SHORT_SPEC.in.log"
output_log="$SPACK_DEBUG_LOG_DIR/spack-cc-$SPACK_SHORT_SPEC.out.log"
echo "$input_command" >> $input_log
echo "$mode ${full_command[@]}" >> $output_log
fi
rcode = subprocess.call(full_command)
sys.exit(rcode)
exec "${full_command[@]}"

1
lib/spack/env/clang vendored
View File

@@ -1 +0,0 @@
cc

View File

@@ -1 +0,0 @@
cc

1
lib/spack/env/g++ vendored
View File

@@ -1 +0,0 @@
cc

1
lib/spack/env/gcc vendored
View File

@@ -1 +0,0 @@
cc

View File

@@ -1708,6 +1708,21 @@ def _add_action(self, action):
self._positionals._add_action(action)
return action
def get_subparser(self, name):
"""Gets a subparser added with the supplied name.
This is an extension to the standard argparse API.
"""
subpasrsers_actions = [
action for action in self._actions
if isinstance(action, _SubParsersAction)]
for action in subpasrsers_actions:
for choice, subparser in action.choices.items():
if choice == name:
return subparser
return None
def _get_optional_actions(self):
return [action
for action in self._actions

View File

@@ -22,13 +22,16 @@
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
__all__ = ['install', 'expand_user', 'working_dir', 'touch', 'mkdirp',
'join_path', 'ancestor', 'can_access', 'filter_file', 'change_sed_delimiter']
__all__ = ['set_install_permissions', 'install', 'install_tree', 'traverse_tree',
'expand_user', 'working_dir', 'touch', 'touchp', 'mkdirp',
'force_remove', 'join_path', 'ancestor', 'can_access', 'filter_file',
'change_sed_delimiter', 'is_exe', 'force_symlink']
import os
import sys
import re
import shutil
import stat
import errno
import getpass
from contextlib import contextmanager, closing
@@ -62,8 +65,11 @@ def filter_file(regex, repl, *filenames, **kwargs):
# Allow strings to use \1, \2, etc. for replacement, like sed
if not callable(repl):
unescaped = repl.replace(r'\\', '\\')
repl = lambda m: re.sub(
r'\\([0-9])', lambda x: m.group(int(x.group(1))), unescaped)
def replace_groups_with_groupid(m):
def groupid_to_group(x):
return m.group(int(x.group(1)))
return re.sub(r'\\([1-9])', groupid_to_group, unescaped)
repl = replace_groups_with_groupid
if string:
regex = re.escape(regex)
@@ -127,10 +133,44 @@ def change_sed_delimiter(old_delim, new_delim, *filenames):
filter_file(double_quoted, '"%s"' % repl, f)
def set_install_permissions(path):
"""Set appropriate permissions on the installed file."""
if os.path.isdir(path):
os.chmod(path, 0755)
else:
os.chmod(path, 0644)
def copy_mode(src, dest):
src_mode = os.stat(src).st_mode
dest_mode = os.stat(dest).st_mode
if src_mode | stat.S_IXUSR: dest_mode |= stat.S_IXUSR
if src_mode | stat.S_IXGRP: dest_mode |= stat.S_IXGRP
if src_mode | stat.S_IXOTH: dest_mode |= stat.S_IXOTH
os.chmod(dest, dest_mode)
def install(src, dest):
"""Manually install a file to a particular location."""
tty.info("Installing %s to %s" % (src, dest))
shutil.copy(src, dest)
set_install_permissions(dest)
copy_mode(src, dest)
def install_tree(src, dest, **kwargs):
"""Manually install a file to a particular location."""
tty.info("Installing %s to %s" % (src, dest))
shutil.copytree(src, dest, **kwargs)
for s, d in traverse_tree(src, dest, follow_nonexisting=False):
set_install_permissions(d)
copy_mode(s, d)
def is_exe(path):
"""True if path is an executable file."""
return os.path.isfile(path) and os.access(path, os.X_OK)
def expand_user(path):
@@ -152,6 +192,15 @@ def mkdirp(*paths):
raise OSError(errno.EEXIST, "File alredy exists", path)
def force_remove(*paths):
"""Remove files without printing errors. Like rm -f, does NOT
remove directories."""
for path in paths:
try:
os.remove(path)
except OSError, e:
pass
@contextmanager
def working_dir(dirname, **kwargs):
if kwargs.get('create', False):
@@ -169,6 +218,20 @@ def touch(path):
os.utime(path, None)
def touchp(path):
"""Like touch, but creates any parent directories needed for the file."""
mkdirp(os.path.dirname(path))
touch(path)
def force_symlink(src, dest):
try:
os.symlink(src, dest)
except OSError, e:
os.remove(dest)
os.symlink(src, dest)
def join_path(prefix, *args):
path = str(prefix)
for elt in args:
@@ -187,3 +250,84 @@ def ancestor(dir, n=1):
def can_access(file_name):
"""True if we have read/write access to the file."""
return os.access(file_name, os.R_OK|os.W_OK)
def traverse_tree(source_root, dest_root, rel_path='', **kwargs):
"""Traverse two filesystem trees simultaneously.
Walks the LinkTree directory in pre or post order. Yields each
file in the source directory with a matching path from the dest
directory, along with whether the file is a directory.
e.g., for this tree::
root/
a/
file1
file2
b/
file3
When called on dest, this yields::
('root', 'dest')
('root/a', 'dest/a')
('root/a/file1', 'dest/a/file1')
('root/a/file2', 'dest/a/file2')
('root/b', 'dest/b')
('root/b/file3', 'dest/b/file3')
Optional args:
order=[pre|post] -- Whether to do pre- or post-order traveral.
ignore=<predicate> -- Predicate indicating which files to ignore.
follow_nonexisting -- Whether to descend into directories in
src that do not exit in dest. Default True.
follow_links -- Whether to descend into symlinks in src.
"""
follow_nonexisting = kwargs.get('follow_nonexisting', True)
follow_links = kwargs.get('follow_link', False)
# Yield in pre or post order?
order = kwargs.get('order', 'pre')
if order not in ('pre', 'post'):
raise ValueError("Order must be 'pre' or 'post'.")
# List of relative paths to ignore under the src root.
ignore = kwargs.get('ignore', lambda filename: False)
# Don't descend into ignored directories
if ignore(rel_path):
return
source_path = os.path.join(source_root, rel_path)
dest_path = os.path.join(dest_root, rel_path)
# preorder yields directories before children
if order == 'pre':
yield (source_path, dest_path)
for f in os.listdir(source_path):
source_child = os.path.join(source_path, f)
dest_child = os.path.join(dest_path, f)
rel_child = os.path.join(rel_path, f)
# Treat as a directory
if os.path.isdir(source_child) and (
follow_links or not os.path.islink(source_child)):
# When follow_nonexisting isn't set, don't descend into dirs
# in source that do not exist in dest
if follow_nonexisting or os.path.exists(dest_child):
tuples = traverse_tree(source_root, dest_root, rel_child, **kwargs)
for t in tuples: yield t
# Treat as a file.
elif not ignore(os.path.join(rel_path, f)):
yield (source_child, dest_child)
if order == 'post':
yield (source_path, dest_path)

View File

@@ -68,6 +68,12 @@ def index_by(objects, *funcs):
index1 = index_by(list_of_specs, 'arch', 'compiler')
index2 = index_by(list_of_specs, 'compiler')
You can also index by tuples by passing tuples:
index1 = index_by(list_of_specs, ('arch', 'compiler'))
Keys in the resulting dict will look like ('gcc', 'bgqos_0').
"""
if not funcs:
return objects
@@ -75,6 +81,8 @@ def index_by(objects, *funcs):
f = funcs[0]
if isinstance(f, basestring):
f = lambda x: getattr(x, funcs[0])
elif isinstance(f, tuple):
f = lambda x: tuple(getattr(x, p) for p in funcs[0])
result = {}
for o in objects:
@@ -261,6 +269,59 @@ def in_function(function_name):
del stack
def check_kwargs(kwargs, fun):
"""Helper for making functions with kwargs. Checks whether the kwargs
are empty after all of them have been popped off. If they're
not, raises an error describing which kwargs are invalid.
Example::
def foo(self, **kwargs):
x = kwargs.pop('x', None)
y = kwargs.pop('y', None)
z = kwargs.pop('z', None)
check_kwargs(kwargs, self.foo)
# This raises a TypeError:
foo(w='bad kwarg')
"""
if kwargs:
raise TypeError(
"'%s' is an invalid keyword argument for function %s()."
% (next(kwargs.iterkeys()), fun.__name__))
def match_predicate(*args):
"""Utility function for making string matching predicates.
Each arg can be a:
- regex
- list or tuple of regexes
- predicate that takes a string.
This returns a predicate that is true if:
- any arg regex matches
- any regex in a list or tuple of regexes matches.
- any predicate in args matches.
"""
def match(string):
for arg in args:
if isinstance(arg, basestring):
if re.search(arg, string):
return True
elif isinstance(arg, list) or isinstance(arg, tuple):
if any(re.search(i, string) for i in arg):
return True
elif callable(arg):
if arg(string):
return True
else:
raise ValueError("args to match_predicate must be regex, "
"list of regexes, or callable.")
return False
return match
class RequiredAttributeError(ValueError):
def __init__(self, message):
super(RequiredAttributeError, self).__init__(message)

View File

@@ -0,0 +1,115 @@
##############################################################################
# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://scalability-llnl.github.io/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License (as published by
# the Free Software Foundation) version 2.1 dated February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
"""LinkTree class for setting up trees of symbolic links."""
__all__ = ['LinkTree']
import os
import shutil
from llnl.util.filesystem import *
empty_file_name = '.spack-empty'
class LinkTree(object):
"""Class to create trees of symbolic links from a source directory.
LinkTree objects are constructed with a source root. Their
methods allow you to create and delete trees of symbolic links
back to the source tree in specific destination directories.
Trees comprise symlinks only to files; directries are never
symlinked to, to prevent the source directory from ever being
modified.
"""
def __init__(self, source_root):
if not os.path.exists(source_root):
raise IOError("No such file or directory: '%s'", source_root)
self._root = source_root
def find_conflict(self, dest_root, **kwargs):
"""Returns the first file in dest that conflicts with src"""
kwargs['follow_nonexisting'] = False
for src, dest in traverse_tree(self._root, dest_root, **kwargs):
if os.path.isdir(src):
if os.path.exists(dest) and not os.path.isdir(dest):
return dest
elif os.path.exists(dest):
return dest
return None
def merge(self, dest_root, **kwargs):
"""Link all files in src into dest, creating directories if necessary."""
kwargs['order'] = 'pre'
for src, dest in traverse_tree(self._root, dest_root, **kwargs):
if os.path.isdir(src):
if not os.path.exists(dest):
mkdirp(dest)
continue
if not os.path.isdir(dest):
raise ValueError("File blocks directory: %s" % dest)
# mark empty directories so they aren't removed on unmerge.
if not os.listdir(dest):
marker = os.path.join(dest, empty_file_name)
touch(marker)
else:
assert(not os.path.exists(dest))
os.symlink(src, dest)
def unmerge(self, dest_root, **kwargs):
"""Unlink all files in dest that exist in src.
Unlinks directories in dest if they are empty.
"""
kwargs['order'] = 'post'
for src, dest in traverse_tree(self._root, dest_root, **kwargs):
if os.path.isdir(src):
# Skip non-existing links.
if not os.path.exists(dest):
continue
if not os.path.isdir(dest):
raise ValueError("File blocks directory: %s" % dest)
# remove directory if it is empty.
if not os.listdir(dest):
shutil.rmtree(dest, ignore_errors=True)
# remove empty dir marker if present.
marker = os.path.join(dest, empty_file_name)
if os.path.exists(marker):
os.remove(marker)
elif os.path.exists(dest):
if not os.path.islink(dest):
raise ValueError("%s is not a link tree!" % dest)
os.remove(dest)

View File

@@ -25,6 +25,9 @@
import sys
import os
import textwrap
import fcntl
import termios
import struct
from StringIO import StringIO
from llnl.util.tty.color import *
@@ -114,21 +117,46 @@ def get_number(prompt, **kwargs):
return number
def get_yes_or_no(prompt, **kwargs):
default_value = kwargs.get('default', None)
if default_value is None:
prompt += ' [y/n] '
elif default_value is True:
prompt += ' [Y/n] '
elif default_value is False:
prompt += ' [y/N] '
else:
raise ValueError("default for get_yes_no() must be True, False, or None.")
result = None
while result is None:
ans = raw_input(prompt).lower()
if not ans:
result = default_value
if result is None:
print "Please enter yes or no."
else:
if ans == 'y' or ans == 'yes':
result = True
elif ans == 'n' or ans == 'no':
result = False
return result
def hline(label=None, **kwargs):
"""Draw an optionally colored or labeled horizontal line.
"""Draw a labeled horizontal line.
Options:
char Char to draw the line with. Default '-'
color Color of the label. Default is no color.
max_width Maximum width of the line. Default is 64 chars.
See tty.color for possible color formats.
"""
char = kwargs.get('char', '-')
color = kwargs.get('color', '')
max_width = kwargs.get('max_width', 64)
char = kwargs.pop('char', '-')
max_width = kwargs.pop('max_width', 64)
if kwargs:
raise TypeError("'%s' is an invalid keyword argument for this function."
% next(kwargs.iterkeys()))
cols, rows = terminal_size()
rows, cols = terminal_size()
if not cols:
cols = max_width
else:
@@ -136,37 +164,34 @@ def hline(label=None, **kwargs):
cols = min(max_width, cols)
label = str(label)
prefix = char * 2 + " " + label + " "
suffix = (cols - len(prefix)) * char
prefix = char * 2 + " "
suffix = " " + (cols - len(prefix) - clen(label)) * char
out = StringIO()
if color:
prefix = char * 2 + " " + color + cescape(label) + "@. "
cwrite(prefix, stream=out, color=True)
else:
out.write(prefix)
out.write(prefix)
out.write(label)
out.write(suffix)
print out.getvalue()
def terminal_size():
"""Gets the dimensions of the console: cols, rows."""
"""Gets the dimensions of the console: (rows, cols)."""
def ioctl_GWINSZ(fd):
try:
cr = struct.unpack('hh', fcntl.ioctl(fd, termios.TIOCGWINSZ, '1234'))
rc = struct.unpack('hh', fcntl.ioctl(fd, termios.TIOCGWINSZ, '1234'))
except:
return
return cr
cr = ioctl_GWINSZ(0) or ioctl_GWINSZ(1) or ioctl_GWINSZ(2)
if not cr:
return rc
rc = ioctl_GWINSZ(0) or ioctl_GWINSZ(1) or ioctl_GWINSZ(2)
if not rc:
try:
fd = os.open(os.ctermid(), os.O_RDONLY)
cr = ioctl_GWINSZ(fd)
rc = ioctl_GWINSZ(fd)
os.close(fd)
except:
pass
if not cr:
cr = (os.environ.get('LINES', 25), os.environ.get('COLUMNS', 80))
if not rc:
rc = (os.environ.get('LINES', 25), os.environ.get('COLUMNS', 80))
return int(cr[1]), int(cr[0])
return int(rc[0]), int(rc[1])

View File

@@ -22,16 +22,9 @@
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
# colify
# By Todd Gamblin, tgamblin@llnl.gov
#
# Takes a list of items as input and finds a good columnization of them,
# similar to how gnu ls does. You can pipe output to this script and
# get a tight display for it. This supports both uniform-width and
# variable-width (tighter) columns.
#
# Run colify -h for more information.
#
"""
Routines for printing columnar output. See colify() for more information.
"""
import os
import sys
import fcntl
@@ -40,6 +33,7 @@
from StringIO import StringIO
from llnl.util.tty import terminal_size
from llnl.util.tty.color import clen
class ColumnConfig:
@@ -47,32 +41,52 @@ def __init__(self, cols):
self.cols = cols
self.line_length = 0
self.valid = True
self.widths = [0] * cols
self.widths = [0] * cols # does not include ansi colors
self.cwidths = [0] * cols # includes ansi colors
def __repr__(self):
attrs = [(a,getattr(self, a)) for a in dir(self) if not a.startswith("__")]
return "<Config: %s>" % ", ".join("%s: %r" % a for a in attrs)
def config_variable_cols(elts, console_cols, padding):
def config_variable_cols(elts, console_width, padding, cols=0):
"""Variable-width column fitting algorithm.
This function determines the most columns that can fit in the
screen width. Unlike uniform fitting, where all columns take
the width of the longest element in the list, each column takes
the width of its own longest element. This packs elements more
efficiently on screen.
If cols is nonzero, force
"""
if cols < 0:
raise ValueError("cols must be non-negative.")
# Get a bound on the most columns we could possibly have.
lengths = [len(elt) for elt in elts]
max_cols = max(1, console_cols / (min(lengths) + padding))
# 'clen' ignores length of ansi color sequences.
lengths = [clen(e) for e in elts]
clengths = [len(e) for e in elts]
max_cols = max(1, console_width / (min(lengths) + padding))
max_cols = min(len(elts), max_cols)
configs = [ColumnConfig(c) for c in xrange(1, max_cols+1)]
for elt, length in enumerate(lengths):
for i, conf in enumerate(configs):
if conf.valid:
col = elt / ((len(elts) + i) / (i + 1))
padded = length
if col < i:
padded += padding
# Range of column counts to try. If forced, use the supplied value.
col_range = [cols] if cols else xrange(1, max_cols+1)
if conf.widths[col] < padded:
conf.line_length += padded - conf.widths[col]
conf.widths[col] = padded
conf.valid = (conf.line_length < console_cols)
# Determine the most columns possible for the console width.
configs = [ColumnConfig(c) for c in col_range]
for i, length in enumerate(lengths):
for conf in configs:
if conf.valid:
col = i / ((len(elts) + conf.cols - 1) / conf.cols)
p = padding if col < (conf.cols - 1) else 0
if conf.widths[col] < (length + p):
conf.line_length += length + p - conf.widths[col]
conf.widths[col] = length + p
conf.cwidths[col] = clengths[i] + p
conf.valid = (conf.line_length < console_width)
try:
config = next(conf for conf in reversed(configs) if conf.valid)
@@ -85,57 +99,107 @@ def config_variable_cols(elts, console_cols, padding):
return config
def config_uniform_cols(elts, console_cols, padding):
max_len = max(len(elt) for elt in elts) + padding
cols = max(1, console_cols / max_len)
cols = min(len(elts), cols)
def config_uniform_cols(elts, console_width, padding, cols=0):
"""Uniform-width column fitting algorithm.
Determines the longest element in the list, and determines how
many columns of that width will fit on screen. Returns a
corresponding column config.
"""
if cols < 0:
raise ValueError("cols must be non-negative.")
# 'clen' ignores length of ansi color sequences.
max_len = max(clen(e) for e in elts) + padding
max_clen = max(len(e) for e in elts) + padding
if cols == 0:
cols = max(1, console_width / max_len)
cols = min(len(elts), cols)
config = ColumnConfig(cols)
config.widths = [max_len] * cols
config.cwidths = [max_clen] * cols
return config
def isatty(ostream):
force = os.environ.get('COLIFY_TTY', 'false').lower() != 'false'
return force or ostream.isatty()
def colify(elts, **options):
"""Takes a list of elements as input and finds a good columnization
of them, similar to how gnu ls does. This supports both
uniform-width and variable-width (tighter) columns.
If elts is not a list of strings, each element is first conveted
using str().
Keyword arguments:
output=<stream> A file object to write to. Default is sys.stdout.
indent=<int> Optionally indent all columns by some number of spaces.
padding=<int> Spaces between columns. Default is 2.
width=<int> Width of the output. Default is 80 if tty is not detected.
cols=<int> Force number of columns. Default is to size to terminal,
or single-column if no tty
tty=<bool> Whether to attempt to write to a tty. Default is to
autodetect a tty. Set to False to force single-column output.
method=<string> Method to use to fit columns. Options are variable or uniform.
Variable-width columns are tighter, uniform columns are all the
same width and fit less data on the screen.
len=<func> Function to use for calculating string length.
Useful for ignoring ansi color. Default is 'len'.
"""
# Get keyword arguments or set defaults
output = options.get("output", sys.stdout)
indent = options.get("indent", 0)
padding = options.get("padding", 2)
tty = options.get('tty', None)
cols = options.pop("cols", 0)
output = options.pop("output", sys.stdout)
indent = options.pop("indent", 0)
padding = options.pop("padding", 2)
tty = options.pop('tty', None)
method = options.pop("method", "variable")
console_cols = options.pop("width", None)
if options:
raise TypeError("'%s' is an invalid keyword argument for this function."
% next(options.iterkeys()))
# elts needs to be an array of strings so we can count the elements
elts = [str(elt) for elt in elts]
if not elts:
return (0, ())
# environment size is of the form "<rows>x<cols>"
env_size = os.environ.get('COLIFY_SIZE')
if env_size:
try:
r, c = env_size.split('x')
console_rows, console_cols = int(r), int(c)
tty = True
except: pass
# Use only one column if not a tty.
if not tty:
if tty is False or not isatty(output):
for elt in elts:
output.write("%s\n" % elt)
if tty is False or not output.isatty():
cols = 1
maxlen = max(len(str(s)) for s in elts)
return (1, (maxlen,))
console_cols = options.get("cols", None)
# Specify the number of character columns to use.
if not console_cols:
console_cols, console_rows = terminal_size()
console_rows, console_cols = terminal_size()
elif type(console_cols) != int:
raise ValueError("Number of columns must be an int")
console_cols = max(1, console_cols - indent)
method = options.get("method", "variable")
# Choose a method. Variable-width colums vs uniform-width.
if method == "variable":
config = config_variable_cols(elts, console_cols, padding)
config = config_variable_cols(elts, console_cols, padding, cols)
elif method == "uniform":
config = config_uniform_cols(elts, console_cols, padding)
config = config_uniform_cols(elts, console_cols, padding, cols)
else:
raise ValueError("method must be one of: " + allowed_methods)
cols = config.cols
formats = ["%%-%ds" % width for width in config.widths[:-1]]
formats = ["%%-%ds" % width for width in config.cwidths[:-1]]
formats.append("%s") # last column has no trailing space
rows = (len(elts) + cols - 1) / cols
@@ -155,6 +219,25 @@ def colify(elts, **options):
return (config.cols, tuple(config.widths))
def colify_table(table, **options):
if table is None:
raise TypeError("Can't call colify_table on NoneType")
elif not table or not table[0]:
raise ValueError("Table is empty in colify_table!")
columns = len(table[0])
def transpose():
for i in xrange(columns):
for row in table:
yield row[i]
if 'cols' in options:
raise ValueError("Cannot override columsn in colify_table.")
options['cols'] = columns
colify(transpose(), **options)
def colified(elts, **options):
"""Invokes the colify() function but returns the result as a string
instead of writing it to an output string."""
@@ -162,29 +245,3 @@ def colified(elts, **options):
options['output'] = sio
colify(elts, **options)
return sio.getvalue()
if __name__ == "__main__":
import optparse
cols, rows = terminal_size()
parser = optparse.OptionParser()
parser.add_option("-u", "--uniform", action="store_true", default=False,
help="Use uniformly sized columns instead of variable-size.")
parser.add_option("-p", "--padding", metavar="PADDING", action="store",
type=int, default=2, help="Spaces to add between columns. Default is 2.")
parser.add_option("-i", "--indent", metavar="SPACES", action="store",
type=int, default=0, help="Indent the output by SPACES. Default is 0.")
parser.add_option("-w", "--width", metavar="COLS", action="store",
type=int, default=cols, help="Indent the output by SPACES. Default is 0.")
options, args = parser.parse_args()
method = "variable"
if options.uniform:
method = "uniform"
if sys.stdin.isatty():
parser.print_help()
sys.exit(1)
else:
colify([line.strip() for line in sys.stdin], method=method, **options.__dict__)

View File

@@ -149,6 +149,11 @@ def colorize(string, **kwargs):
return re.sub(color_re, match_to_ansi(color), string)
def clen(string):
"""Return the length of a string, excluding ansi color sequences."""
return len(re.sub(r'\033[^m]*m', '', string))
def cwrite(string, stream=sys.stdout, color=None):
"""Replace all color expressions in string with ANSI control
codes and write the result to the stream. If color is
@@ -172,17 +177,20 @@ def cescape(string):
class ColorStream(object):
def __init__(self, stream, color=None):
self.__class__ = type(stream.__class__.__name__,
(self.__class__, stream.__class__), {})
self.__dict__ = stream.__dict__
self.color = color
self.stream = stream
self._stream = stream
self._color = color
def write(self, string, **kwargs):
if kwargs.get('raw', False):
super(ColorStream, self).write(string)
else:
cwrite(string, self.stream, self.color)
raw = kwargs.get('raw', False)
raw_write = getattr(self._stream, 'write')
color = self._color
if self._color is None:
if raw:
color=True
else:
color = self._stream.isatty()
raw_write(colorize(string, color=color))
def writelines(self, sequence, **kwargs):
raw = kwargs.get('raw', False)

View File

@@ -26,7 +26,7 @@
import tempfile
from llnl.util.filesystem import *
# This lives in $prefix/lib/spac/spack/__file__
# This lives in $prefix/lib/spack/spack/__file__
prefix = ancestor(__file__, 4)
# The spack script itself
@@ -78,7 +78,7 @@
# Version information
from spack.version import Version
spack_version = Version("0.8")
spack_version = Version("0.8.15")
#
# Executables used by Spack
@@ -138,7 +138,7 @@
# should live. This file is overloaded for spack core vs. for packages.
#
__all__ = ['Package', 'Version', 'when', 'ver']
from spack.package import Package
from spack.package import Package, ExtensionConflictError
from spack.version import Version, ver
from spack.multimethod import when

View File

@@ -65,7 +65,7 @@ def get_mac_sys_type():
if not mac_ver:
return None
return "macosx_{}_{}".format(
return "macosx_%s_%s" % (
Version(mac_ver).up_to(2), py_platform.machine())

View File

@@ -28,6 +28,7 @@
calls you can make from within the install() function.
"""
import os
import sys
import shutil
import multiprocessing
import platform
@@ -48,12 +49,12 @@
# set_build_environment_variables and used to pass parameters to
# Spack's compiler wrappers.
#
SPACK_LIB = 'SPACK_LIB'
SPACK_ENV_PATH = 'SPACK_ENV_PATH'
SPACK_DEPENDENCIES = 'SPACK_DEPENDENCIES'
SPACK_PREFIX = 'SPACK_PREFIX'
SPACK_INSTALL = 'SPACK_INSTALL'
SPACK_DEBUG = 'SPACK_DEBUG'
SPACK_SPEC = 'SPACK_SPEC'
SPACK_SHORT_SPEC = 'SPACK_SHORT_SPEC'
SPACK_DEBUG_LOG_DIR = 'SPACK_DEBUG_LOG_DIR'
@@ -67,16 +68,16 @@ class MakeExecutable(Executable):
Note that if the SPACK_NO_PARALLEL_MAKE env var is set it overrides
everything.
"""
def __init__(self, name, parallel):
def __init__(self, name, jobs):
super(MakeExecutable, self).__init__(name)
self.parallel = parallel
self.jobs = jobs
def __call__(self, *args, **kwargs):
parallel = kwargs.get('parallel', self.parallel)
parallel = kwargs.get('parallel', self.jobs > 1)
disable_parallel = env_flag(SPACK_NO_PARALLEL_MAKE)
if parallel and not disable_parallel:
jobs = "-j%d" % multiprocessing.cpu_count()
if self.jobs > 1 and not disable_parallel:
jobs = "-j%d" % self.jobs
args = (jobs,) + args
super(MakeExecutable, self).__call__(*args, **kwargs)
@@ -108,9 +109,6 @@ def set_compiler_environment_variables(pkg):
def set_build_environment_variables(pkg):
"""This ensures a clean install environment when we build packages.
"""
# This tells the compiler script where to find the Spack installation.
os.environ[SPACK_LIB] = spack.lib_path
# Add spack build environment path with compiler wrappers first in
# the path. We handle case sensitivity conflicts like "CC" and
# "cc" by putting one in the <build_env_path>/case-insensitive
@@ -128,6 +126,9 @@ def set_build_environment_variables(pkg):
# Install prefix
os.environ[SPACK_PREFIX] = pkg.prefix
# Install root prefix
os.environ[SPACK_INSTALL] = spack.install_path
# Remove these vars from the environment during build becaus they
# can affect how some packages find libraries. We want to make
# sure that builds never pull in unintended external dependencies.
@@ -140,7 +141,7 @@ def set_build_environment_variables(pkg):
# Working directory for the spack command itself, for debug logs.
if spack.debug:
os.environ[SPACK_DEBUG] = "TRUE"
os.environ[SPACK_SPEC] = str(pkg.spec)
os.environ[SPACK_SHORT_SPEC] = pkg.spec.short_spec
os.environ[SPACK_DEBUG_LOG_DIR] = spack.spack_working_dir
# Add dependencies to CMAKE_PREFIX_PATH
@@ -162,15 +163,21 @@ def set_module_variables_for_package(pkg):
"""
m = pkg.module
m.make = MakeExecutable('make', pkg.parallel)
m.gmake = MakeExecutable('gmake', pkg.parallel)
# number of jobs spack will to build with.
jobs = multiprocessing.cpu_count()
if not pkg.parallel:
jobs = 1
elif pkg.make_jobs:
jobs = pkg.make_jobs
m.make_jobs = jobs
# TODO: make these build deps that can be installed if not found.
m.make = MakeExecutable('make', jobs)
m.gmake = MakeExecutable('gmake', jobs)
# easy shortcut to os.environ
m.env = os.environ
# number of jobs spack prefers to build with.
m.make_jobs = multiprocessing.cpu_count()
# Find the configure script in the archive path
# Don't use which for this; we want to find it in the current dir.
m.configure = Executable('./configure')
@@ -187,19 +194,102 @@ def set_module_variables_for_package(pkg):
if platform.mac_ver()[0]:
m.std_cmake_args.append('-DCMAKE_FIND_FRAMEWORK=LAST')
# Emulate some shell commands for convenience
m.pwd = os.getcwd
m.cd = os.chdir
m.mkdir = os.mkdir
m.makedirs = os.makedirs
m.remove = os.remove
m.removedirs = os.removedirs
# Set up CMake rpath
m.std_cmake_args.append('-DCMAKE_INSTALL_RPATH_USE_LINK_PATH=FALSE')
m.std_cmake_args.append('-DCMAKE_INSTALL_RPATH=%s' % ":".join(get_rpaths(pkg)))
m.mkdirp = mkdirp
m.install = install
m.rmtree = shutil.rmtree
m.move = shutil.move
# Emulate some shell commands for convenience
m.pwd = os.getcwd
m.cd = os.chdir
m.mkdir = os.mkdir
m.makedirs = os.makedirs
m.remove = os.remove
m.removedirs = os.removedirs
m.symlink = os.symlink
m.mkdirp = mkdirp
m.install = install
m.install_tree = install_tree
m.rmtree = shutil.rmtree
m.move = shutil.move
# Useful directories within the prefix are encapsulated in
# a Prefix object.
m.prefix = pkg.prefix
def get_rpaths(pkg):
"""Get a list of all the rpaths for a package."""
rpaths = [pkg.prefix.lib, pkg.prefix.lib64]
rpaths.extend(d.prefix.lib for d in pkg.spec.traverse(root=False)
if os.path.isdir(d.prefix.lib))
rpaths.extend(d.prefix.lib64 for d in pkg.spec.traverse(root=False)
if os.path.isdir(d.prefix.lib64))
return rpaths
def setup_package(pkg):
"""Execute all environment setup routines."""
set_compiler_environment_variables(pkg)
set_build_environment_variables(pkg)
set_module_variables_for_package(pkg)
# Allow dependencies to set up environment as well.
for dep_spec in pkg.spec.traverse(root=False):
dep_spec.package.setup_dependent_environment(
pkg.module, dep_spec, pkg.spec)
def fork(pkg, function):
"""Fork a child process to do part of a spack build.
Arguments:
pkg -- pkg whose environemnt we should set up the
forked process for.
function -- arg-less function to run in the child process.
Usage:
def child_fun():
# do stuff
build_env.fork(pkg, child_fun)
Forked processes are run with the build environemnt set up by
spack.build_environment. This allows package authors to have
full control over the environment, etc. without offecting
other builds that might be executed in the same spack call.
If something goes wrong, the child process is expected toprint
the error and the parent process will exit with error as
well. If things go well, the child exits and the parent
carries on.
"""
try:
pid = os.fork()
except OSError, e:
raise InstallError("Unable to fork build process: %s" % e)
if pid == 0:
# Give the child process the package's build environemnt.
setup_package(pkg)
try:
# call the forked function.
function()
# Use os._exit here to avoid raising a SystemExit exception,
# which interferes with unit tests.
os._exit(0)
except:
# Child doesn't raise or return to main spack code.
# Just runs default exception handler and exits.
sys.excepthook(*sys.exc_info())
os._exit(1)
else:
# Parent process just waits for the child to complete. If the
# child exited badly, assume it already printed an appropriate
# message. Just make the parent exit with an error code.
pid, returncode = os.waitpid(pid, 0)
if returncode != 0:
sys.exit(1)

View File

@@ -121,3 +121,18 @@ def elide_list(line_list, max_num=10):
return line_list[:max_num-1] + ['...'] + line_list[-1:]
else:
return line_list
def disambiguate_spec(spec):
matching_specs = spack.db.get_installed(spec)
if not matching_specs:
tty.die("Spec '%s' matches no installed packages." % spec)
elif len(matching_specs) > 1:
args = ["%s matches multiple packages." % spec,
"Matching packages:"]
args += [" " + str(s) for s in matching_specs]
args += ["Use a more specific spec."]
tty.die(*args)
return matching_specs[0]

View File

@@ -0,0 +1,58 @@
##############################################################################
# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://scalability-llnl.github.io/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License (as published by
# the Free Software Foundation) version 2.1 dated February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from external import argparse
import llnl.util.tty as tty
import spack
import spack.cmd
description = "Activate a package extension."
def setup_parser(subparser):
subparser.add_argument(
'-f', '--force', action='store_true',
help="Activate without first activating dependencies.")
subparser.add_argument(
'spec', nargs=argparse.REMAINDER, help="spec of package extension to activate.")
def activate(parser, args):
# TODO: shouldn't have to concretize here. Fix DAG issues.
specs = spack.cmd.parse_specs(args.spec, concretize=True)
if len(specs) != 1:
tty.die("activate requires one spec. %d given." % len(specs))
# TODO: remove this hack when DAG info is stored in dir layout.
# This ensures the ext spec is always normalized properly.
spack.db.get(specs[0])
spec = spack.cmd.disambiguate_spec(specs[0])
if not spec.package.is_extension:
tty.die("%s is not an extension." % spec.name)
if spec.package.activated:
tty.die("Package %s is already activated." % specs[0].short_spec)
spec.package.do_activate()

View File

@@ -38,7 +38,7 @@
from spack.stage import Stage, FailedDownloadError
from spack.version import *
description ="Checksum available versions of a package to update a package file."
description ="Checksum available versions of a package."
def setup_parser(subparser):
subparser.add_argument(
@@ -56,7 +56,6 @@ def get_checksums(versions, urls, **kwargs):
first_stage_function = kwargs.get('first_stage_function', None)
keep_stage = kwargs.get('keep_stage', False)
tty.msg("Downloading...")
hashes = []
for i, (url, version) in enumerate(zip(urls, versions)):
@@ -85,24 +84,24 @@ def checksum(parser, args):
pkg = spack.db.get(args.package)
# If the user asked for specific versions, use those.
versions = [ver(v) for v in args.versions]
if not all(type(v) == Version for v in versions):
tty.die("Cannot generate checksums for version lists or " +
"version ranges. Use unambiguous versions.")
if not versions:
versions = pkg.fetch_available_versions()
if args.versions:
versions = {}
for v in args.versions:
v = ver(v)
if not isinstance(v, Version):
tty.die("Cannot generate checksums for version lists or " +
"version ranges. Use unambiguous versions.")
versions[v] = pkg.url_for_version(v)
else:
versions = pkg.fetch_remote_versions()
if not versions:
tty.die("Could not fetch any available versions for %s." % pkg.name)
tty.die("Could not fetch any versions for %s." % pkg.name)
versions = list(reversed(sorted(versions)))
urls = [pkg.url_for_version(v) for v in versions]
sorted_versions = sorted(versions, reverse=True)
tty.msg("Found %s versions of %s." % (len(urls), pkg.name),
tty.msg("Found %s versions of %s." % (len(versions), pkg.name),
*spack.cmd.elide_list(
["%-10s%s" % (v,u) for v, u in zip(versions, urls)]))
["%-10s%s" % (v, versions[v]) for v in sorted_versions]))
print
archives_to_fetch = tty.get_number(
"How many would you like to checksum?", default=5, abort='q')
@@ -112,10 +111,12 @@ def checksum(parser, args):
return
version_hashes = get_checksums(
versions[:archives_to_fetch], urls[:archives_to_fetch], keep_stage=args.keep_stage)
sorted_versions[:archives_to_fetch],
[versions[v] for v in sorted_versions[:archives_to_fetch]],
keep_stage=args.keep_stage)
if not version_hashes:
tty.die("Could not fetch any available versions for %s." % pkg.name)
tty.die("Could not fetch any versions for %s." % pkg.name)
version_lines = [" version('%s', '%s')" % (v, h) for v, h in version_hashes]
tty.msg("Checksummed new versions of %s:" % pkg.name, *version_lines)

View File

@@ -1,5 +1,5 @@
##############################################################################
# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
# Copyright (c) 2013-2014, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
@@ -28,39 +28,19 @@
import spack
import spack.cmd
import spack.stage as stage
description = "Remove staged files for packages"
description = "Remove build stage and source tarball for packages."
def setup_parser(subparser):
subparser.add_argument('-c', "--clean", action="store_true", dest='clean',
help="run make clean in the build directory (default)")
subparser.add_argument('-w', "--work", action="store_true", dest='work',
help="delete the build directory and re-expand it from its archive.")
subparser.add_argument('-d', "--dist", action="store_true", dest='dist',
help="delete the downloaded archive.")
subparser.add_argument('packages', nargs=argparse.REMAINDER,
help="specs of packages to clean")
def clean(parser, args):
if not args.packages:
tty.die("spack clean requires at least one package argument")
tty.die("spack clean requires at least one package spec.")
specs = spack.cmd.parse_specs(args.packages, concretize=True)
for spec in specs:
package = spack.db.get(spec)
if args.dist:
package.do_clean_dist()
tty.msg("Cleaned %s" % package.name)
elif args.work:
package.do_clean_work()
tty.msg("Restaged %s" % package.name)
else:
try:
package.do_clean()
except subprocess.CalledProcessError, e:
tty.warn("Warning: 'make clean' didn't work. Consider 'spack clean --work'.")
tty.msg("Made clean for %s" % package.name)
package.do_clean()

View File

@@ -25,13 +25,14 @@
from external import argparse
import llnl.util.tty as tty
from llnl.util.tty.color import colorize
from llnl.util.tty.colify import colify
from llnl.util.lang import index_by
import spack.compilers
import spack.spec
import spack.config
from spack.compilation import get_path
from spack.util.environment import get_path
from spack.spec import CompilerSpec
description = "Manage compilers"
@@ -96,9 +97,12 @@ def compiler_info(args):
def compiler_list(args):
tty.msg("Available compilers")
index = index_by(spack.compilers.all_compilers(), 'name')
for name, compilers in index.items():
tty.hline(name, char='-', color=spack.spec.compiler_color)
colify(reversed(sorted(compilers)), indent=4)
for i, (name, compilers) in enumerate(index.items()):
if i >= 1: print
cname = "%s{%s}" % (spack.spec.compiler_color, name)
tty.hline(colorize(cname), char='-')
colify(reversed(sorted(compilers)))
def compiler(parser, args):

View File

@@ -28,6 +28,7 @@
import re
from contextlib import closing
from external.ordereddict import OrderedDict
import llnl.util.tty as tty
from llnl.util.filesystem import mkdirp
@@ -159,32 +160,33 @@ def create(parser, args):
else:
mkdirp(os.path.dirname(pkg_path))
versions = list(reversed(spack.package.find_versions_of_archive(url)))
versions = spack.package.find_versions_of_archive(url)
rkeys = sorted(versions.keys(), reverse=True)
versions = OrderedDict(zip(rkeys, (versions[v] for v in rkeys)))
archives_to_fetch = 1
if not versions:
# If the fetch failed for some reason, revert to what the user provided
versions = [version]
urls = [url]
else:
urls = [spack.url.substitute_version(url, v) for v in versions]
if len(urls) > 1:
tty.msg("Found %s versions of %s:" % (len(urls), name),
*spack.cmd.elide_list(
["%-10s%s" % (v,u) for v, u in zip(versions, urls)]))
print
archives_to_fetch = tty.get_number(
"Include how many checksums in the package file?",
default=5, abort='q')
versions = { version : url }
elif len(versions) > 1:
tty.msg("Found %s versions of %s:" % (len(versions), name),
*spack.cmd.elide_list(
["%-10s%s" % (v,u) for v, u in versions.iteritems()]))
print
archives_to_fetch = tty.get_number(
"Include how many checksums in the package file?",
default=5, abort='q')
if not archives_to_fetch:
tty.msg("Aborted.")
return
if not archives_to_fetch:
tty.msg("Aborted.")
return
guesser = ConfigureGuesser()
ver_hash_tuples = spack.cmd.checksum.get_checksums(
versions[:archives_to_fetch], urls[:archives_to_fetch],
first_stage_function=guesser, keep_stage=args.keep_stage)
versions.keys()[:archives_to_fetch],
[versions[v] for v in versions.keys()[:archives_to_fetch]],
first_stage_function=guesser,
keep_stage=args.keep_stage)
if not ver_hash_tuples:
tty.die("Could not fetch any tarballs for %s." % name)

View File

@@ -0,0 +1,104 @@
##############################################################################
# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://scalability-llnl.github.io/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License (as published by
# the Free Software Foundation) version 2.1 dated February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from external import argparse
import llnl.util.tty as tty
import spack
import spack.cmd
from spack.graph import topological_sort
description = "Deactivate a package extension."
def setup_parser(subparser):
subparser.add_argument(
'-f', '--force', action='store_true',
help="Run deactivation even if spec is NOT currently activated.")
subparser.add_argument(
'-a', '--all', action='store_true',
help="Deactivate all extensions of an extendable pacakge, or "
"deactivate an extension AND its dependencies.")
subparser.add_argument(
'spec', nargs=argparse.REMAINDER, help="spec of package extension to deactivate.")
def deactivate(parser, args):
# TODO: shouldn't have to concretize here. Fix DAG issues.
specs = spack.cmd.parse_specs(args.spec, concretize=True)
if len(specs) != 1:
tty.die("deactivate requires one spec. %d given." % len(specs))
# TODO: remove this hack when DAG info is stored properly.
# This ensures the ext spec is always normalized properly.
spack.db.get(specs[0])
spec = spack.cmd.disambiguate_spec(specs[0])
pkg = spec.package
if args.all:
if pkg.extendable:
tty.msg("Deactivating all extensions of %s" % pkg.spec.short_spec)
ext_pkgs = spack.db.installed_extensions_for(spec)
for ext_pkg in ext_pkgs:
ext_pkg.spec.normalize()
if ext_pkg.activated:
ext_pkg.do_deactivate(force=True)
elif pkg.is_extension:
# TODO: store DAG info properly (see above)
spec.normalize()
if not args.force and not spec.package.activated:
tty.die("%s is not activated." % pkg.spec.short_spec)
tty.msg("Deactivating %s and all dependencies." % pkg.spec.short_spec)
topo_order = topological_sort(spec)
index = spec.index()
for name in topo_order:
espec = index[name]
epkg = espec.package
# TODO: store DAG info properly (see above)
epkg.spec.normalize()
if epkg.extends(pkg.extendee_spec):
if epkg.activated or args.force:
epkg.do_deactivate(force=args.force)
else:
tty.die("spack deactivate --all requires an extendable package or an extension.")
else:
if not pkg.is_extension:
tty.die("spack deactivate requires an extension.",
"Did you mean 'spack deactivate --all'?")
if not args.force and not spec.package.activated:
tty.die("Package %s is not activated." % specs[0].short_spec)
spec.package.do_deactivate(force=args.force)

View File

@@ -0,0 +1,93 @@
##############################################################################
# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://scalability-llnl.github.io/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License (as published by
# the Free Software Foundation) version 2.1 dated February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import sys
import os
from external import argparse
import llnl.util.tty as tty
import spack
import spack.cmd
from spack.cmd.edit import edit_package
from spack.stage import DIYStage
description = "Do-It-Yourself: build from an existing source directory."
def setup_parser(subparser):
subparser.add_argument(
'-i', '--ignore-dependencies', action='store_true', dest='ignore_deps',
help="Do not try to install dependencies of requested packages.")
subparser.add_argument(
'--keep-prefix', action='store_true',
help="Don't remove the install prefix if installation fails.")
subparser.add_argument(
'--skip-patch', action='store_true',
help="Skip patching for the DIY build.")
subparser.add_argument(
'spec', nargs=argparse.REMAINDER,
help="specs to use for install. Must contain package AND verison.")
def diy(self, args):
if not args.spec:
tty.die("spack diy requires a package spec argument.")
specs = spack.cmd.parse_specs(args.spec)
if len(specs) > 1:
tty.die("spack diy only takes one spec.")
spec = specs[0]
if not spack.db.exists(spec.name):
tty.warn("No such package: %s" % spec.name)
create = tty.get_yes_or_no("Create this package?", default=False)
if not create:
tty.msg("Exiting without creating.")
sys.exit(1)
else:
tty.msg("Running 'spack edit -f %s'" % spec.name)
edit_package(spec.name, True)
return
if not spec.version.concrete:
tty.die("spack diy spec must have a single, concrete version.")
spec.concretize()
package = spack.db.get(spec)
if package.installed:
tty.error("Already installed in %s" % package.prefix)
tty.msg("Uninstall or try adding a version suffix for this DIY build.")
sys.exit(1)
# Forces the build to run out of the current directory.
package.stage = DIYStage(os.getcwd())
# TODO: make this an argument, not a global.
spack.do_checksum = False
package.do_install(
keep_prefix=args.keep_prefix,
ignore_deps=args.ignore_deps,
keep_stage=True) # don't remove source dir for DIY.

View File

@@ -24,7 +24,6 @@
##############################################################################
import os
import string
from contextlib import closing
import llnl.util.tty as tty
from llnl.util.filesystem import mkdirp, join_path
@@ -54,6 +53,27 @@ def install(self, spec, prefix):
""")
def edit_package(name, force=False):
path = spack.db.filename_for_package_name(name)
if os.path.exists(path):
if not os.path.isfile(path):
tty.die("Something's wrong. '%s' is not a file!" % path)
if not os.access(path, os.R_OK|os.W_OK):
tty.die("Insufficient permissions on '%s'!" % path)
elif not force:
tty.die("No package '%s'. Use spack create, or supply -f/--force "
"to edit a new file." % name)
else:
mkdirp(os.path.dirname(path))
with open(path, "w") as pkg_file:
pkg_file.write(
package_template.substitute(
name=name, class_name=mod_to_class(name)))
spack.editor(path)
def setup_parser(subparser):
subparser.add_argument(
'-f', '--force', dest='force', action='store_true',
@@ -80,22 +100,7 @@ def edit(parser, args):
# By default open the directory where packages or commands live.
if not name:
path = spack.packages_path
spack.editor(path)
else:
path = spack.db.filename_for_package_name(name)
edit_package(name, args.force)
if os.path.exists(path):
if not os.path.isfile(path):
tty.die("Something's wrong. '%s' is not a file!" % path)
if not os.access(path, os.R_OK|os.W_OK):
tty.die("Insufficient permissions on '%s'!" % path)
elif not args.force:
tty.die("No package '%s'. Use spack create, or supply -f/--force "
"to edit a new file." % name)
else:
mkdirp(os.path.dirname(path))
with closing(open(path, "w")) as pkg_file:
pkg_file.write(
package_template.substitute(name=name, class_name=mod_to_class(name)))
# If everything checks out, go ahead and edit.
spack.editor(path)

View File

@@ -0,0 +1,69 @@
##############################################################################
# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://scalability-llnl.github.io/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License (as published by
# the Free Software Foundation) version 2.1 dated February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import os
from external import argparse
import llnl.util.tty as tty
import spack.cmd
import spack.build_environment as build_env
description = "Run a command with the environment for a particular spec's install."
def setup_parser(subparser):
subparser.add_argument(
'spec', nargs=argparse.REMAINDER, help="specs of package environment to emulate.")
def env(parser, args):
if not args.spec:
tty.die("spack env requires a spec.")
# Specs may have spaces in them, so if they do, require that the
# caller put a '--' between the spec and the command to be
# executed. If there is no '--', assume that the spec is the
# first argument.
sep = '--'
if sep in args.spec:
s = args.spec.index(sep)
spec = args.spec[:s]
cmd = args.spec[s+1:]
else:
spec = args.spec[0]
cmd = args.spec[1:]
specs = spack.cmd.parse_specs(spec, concretize=True)
if len(specs) > 1:
tty.die("spack env only takes one spec.")
spec = specs[0]
build_env.setup_package(spec.package)
if not cmd:
# If no command act like the "env" command and print out env vars.
for key, val in os.environ.items():
print "%s=%s" % (key, val)
else:
# Otherwise execute the command with the new environment
os.execvp(cmd[0], cmd)

View File

@@ -0,0 +1,98 @@
##############################################################################
# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://scalability-llnl.github.io/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License (as published by
# the Free Software Foundation) version 2.1 dated February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import sys
from external import argparse
import llnl.util.tty as tty
from llnl.util.tty.colify import colify
import spack
import spack.cmd
import spack.cmd.find
description = "List extensions for package."
def setup_parser(subparser):
format_group = subparser.add_mutually_exclusive_group()
format_group.add_argument(
'-l', '--long', action='store_const', dest='mode', const='long',
help='Show dependency hashes as well as versions.')
format_group.add_argument(
'-p', '--paths', action='store_const', dest='mode', const='paths',
help='Show paths to extension install directories')
format_group.add_argument(
'-d', '--deps', action='store_const', dest='mode', const='deps',
help='Show full dependency DAG of extensions')
subparser.add_argument(
'spec', nargs=argparse.REMAINDER, help='Spec of package to list extensions for')
def extensions(parser, args):
if not args.spec:
tty.die("extensions requires a package spec.")
# Checks
spec = spack.cmd.parse_specs(args.spec)
if len(spec) > 1:
tty.die("Can only list extensions for one package.")
if not spec[0].package.extendable:
tty.die("%s is not an extendable package." % spec[0].name)
spec = spack.cmd.disambiguate_spec(spec[0])
if not spec.package.extendable:
tty.die("%s does not have extensions." % spec.short_spec)
if not args.mode:
args.mode = 'short'
# List package names of extensions
extensions = spack.db.extensions_for(spec)
if not extensions:
tty.msg("%s has no extensions." % spec.cshort_spec)
return
tty.msg(spec.cshort_spec)
tty.msg("%d extensions:" % len(extensions))
colify(ext.name for ext in extensions)
# List specs of installed extensions.
installed = [s.spec for s in spack.db.installed_extensions_for(spec)]
print
if not installed:
tty.msg("None installed.")
return
tty.msg("%d installed:" % len(installed))
spack.cmd.find.display_specs(installed, mode=args.mode)
# List specs of activated extensions.
activated = spack.install_layout.extension_map(spec)
print
if not activated:
tty.msg("None activated.")
return
tty.msg("%d currently activated:" % len(activated))
spack.cmd.find.display_specs(activated.values(), mode=args.mode)

View File

@@ -24,13 +24,14 @@
##############################################################################
import sys
import collections
import itertools
from external import argparse
from StringIO import StringIO
import llnl.util.tty as tty
from llnl.util.tty.colify import colify
from llnl.util.tty.colify import *
from llnl.util.tty.color import *
from llnl.util.lang import partition_list, index_by
from llnl.util.lang import *
import spack
import spack.spec
@@ -40,17 +41,64 @@
def setup_parser(subparser):
format_group = subparser.add_mutually_exclusive_group()
format_group.add_argument(
'-p', '--paths', action='store_true', dest='paths',
'-l', '--long', action='store_const', dest='mode', const='long',
help='Show dependency hashes as well as versions.')
format_group.add_argument(
'-p', '--paths', action='store_const', dest='mode', const='paths',
help='Show paths to package install directories')
format_group.add_argument(
'-l', '--long', action='store_true', dest='full_specs',
help='Show full-length specs of installed packages')
'-d', '--deps', action='store_const', dest='mode', const='deps',
help='Show full dependency DAG of installed packages')
subparser.add_argument(
'query_specs', nargs=argparse.REMAINDER,
help='optional specs to filter results')
def display_specs(specs, **kwargs):
mode = kwargs.get('mode', 'short')
# Make a dict with specs keyed by architecture and compiler.
index = index_by(specs, ('architecture', 'compiler'))
# Traverse the index and print out each package
for i, (architecture, compiler) in enumerate(sorted(index)):
if i > 0: print
header = "%s{%s} / %s{%s}" % (
spack.spec.architecture_color, architecture,
spack.spec.compiler_color, compiler)
tty.hline(colorize(header), char='-')
specs = index[(architecture,compiler)]
specs.sort()
abbreviated = [s.format('$_$@$+', color=True) for s in specs]
if mode == 'paths':
# Print one spec per line along with prefix path
width = max(len(s) for s in abbreviated)
width += 2
format = " %-{}s%s".format(width)
for abbrv, spec in zip(abbreviated, specs):
print format % (abbrv, spec.prefix)
elif mode == 'deps':
for spec in specs:
print spec.tree(indent=4, format='$_$@$+$#', color=True),
elif mode in ('short', 'long'):
fmt = '$-_$@$+'
if mode == 'long':
fmt += '$#'
colify(s.format(fmt, color=True) for s in specs)
else:
raise ValueError(
"Invalid mode for display_specs: %s. Must be one of (paths, deps, short)." % mode)
def find(parser, args):
# Filter out specs that don't exist.
query_specs = spack.cmd.parse_specs(args.query_specs)
@@ -65,39 +113,17 @@ def find(parser, args):
if not query_specs:
return
specs = [s for s in spack.db.installed_package_specs()
if not query_specs or any(s.satisfies(q) for q in query_specs)]
# Get all the specs the user asked for
if not query_specs:
specs = set(spack.db.installed_package_specs())
else:
results = [set(spack.db.get_installed(qs)) for qs in query_specs]
specs = set.union(*results)
# Make a dict with specs keyed by architecture and compiler.
index = index_by(specs, 'architecture', 'compiler')
if not args.mode:
args.mode = 'short'
# Traverse the index and print out each package
for architecture in index:
tty.hline(architecture, char='=', color=spack.spec.architecture_color)
for compiler in index[architecture]:
tty.hline(compiler, char='-', color=spack.spec.compiler_color)
if sys.stdout.isatty():
tty.msg("%d installed packages." % len(specs))
display_specs(specs, mode=args.mode)
specs = index[architecture][compiler]
specs.sort()
abbreviated = [s.format('$_$@$+$#', color=True) for s in specs]
if args.paths:
# Print one spec per line along with prefix path
width = max(len(s) for s in abbreviated)
width += 2
format = " %-{}s%s".format(width)
for abbrv, spec in zip(abbreviated, specs):
print format % (abbrv, spec.prefix)
elif args.full_specs:
for spec in specs:
print spec.tree(indent=4, format='$_$@$+', color=True),
else:
max_len = max([len(s.name) for s in specs])
max_len += 4
for spec in specs:
format = '$-' + str(max_len) + '_$@$+$#'
print " " + spec.format(format, color=True)

View File

@@ -22,9 +22,45 @@
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import spack
from external import argparse
import spack
import spack.cmd
from spack.graph import *
description = "Generate graphs of package dependency relationships."
def setup_parser(subparser):
setup_parser.parser = subparser
method = subparser.add_mutually_exclusive_group()
method.add_argument(
'--ascii', action='store_true',
help="Draw graph as ascii to stdout (default).")
method.add_argument(
'--dot', action='store_true',
help="Generate graph in dot format and print to stdout.")
subparser.add_argument(
'--concretize', action='store_true', help="Concretize specs before graphing.")
subparser.add_argument(
'specs', nargs=argparse.REMAINDER, help="specs of packages to graph.")
description = "Write out inter-package dependencies in dot graph format"
def graph(parser, args):
spack.db.graph_dependencies()
specs = spack.cmd.parse_specs(
args.specs, normalize=True, concretize=args.concretize)
if not specs:
setup_parser.parser.print_help()
return 1
if args.dot: # Dot graph only if asked for.
graph_dot(*specs)
elif specs: # ascii is default: user doesn't need to provide it explicitly
graph_ascii(specs[0], debug=spack.debug)
for spec in specs[1:]:
print # extra line bt/w independent graphs
graph_ascii(spec, debug=spack.debug)

View File

@@ -22,94 +22,18 @@
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import re
import textwrap
from StringIO import StringIO
from llnl.util.tty.colify import *
import spack
import spack.fetch_strategy as fs
description = "Get detailed information on a particular package"
def setup_parser(subparser):
subparser.add_argument('-r', '--rst', action='store_true',
help="List all packages in reStructured text, for docs.")
subparser.add_argument('name', metavar="PACKAGE", nargs='?', help="name of packages to get info on")
subparser.add_argument('name', metavar="PACKAGE", help="Name of package to get info for.")
def format_doc(pkg, **kwargs):
"""Wrap doc string at 72 characters and format nicely"""
indent = kwargs.get('indent', 0)
if not pkg.__doc__:
return ""
doc = re.sub(r'\s+', ' ', pkg.__doc__)
lines = textwrap.wrap(doc, 72)
results = StringIO()
for line in lines:
results.write((" " * indent) + line + "\n")
return results.getvalue()
def github_url(pkg):
"""Link to a package file on github."""
return ("https://github.com/scalability-llnl/spack/blob/master/var/spack/packages/%s/package.py" %
pkg.name)
def rst_table(elts):
"""Print out a RST-style table."""
cols = StringIO()
ncol, widths = colify(elts, output=cols, tty=True)
header = " ".join("=" * (w-1) for w in widths)
return "%s\n%s%s" % (header, cols.getvalue(), header)
def info_rst():
"""Print out information on all packages in restructured text."""
pkgs = sorted(spack.db.all_packages(), key=lambda s:s.name.lower())
print "Package List"
print "=================="
print "This is a list of things you can install using Spack. It is"
print "automatically generated based on the packages in the latest Spack"
print "release."
print
print "Spack currently has %d mainline packages:" % len(pkgs)
print
print rst_table("`%s`_" % p.name for p in pkgs)
print
print "-----"
# Output some text for each package.
for pkg in pkgs:
print
print ".. _%s:" % pkg.name
print
print pkg.name
print "-" * len(pkg.name)
print "Links"
print " * `Homepage <%s>`__" % pkg.homepage
print " * `%s/package.py <%s>`__" % (pkg.name, github_url(pkg))
print
if pkg.versions:
print "Versions:"
print " " + ", ".join(str(v) for v in reversed(sorted(pkg.versions)))
if pkg.dependencies:
print "Dependencies"
print " " + ", ".join("`%s`_" % d if d != "mpi" else d
for d in pkg.dependencies)
print
print "Description"
print format_doc(pkg, indent=2)
print
print "-----"
def info_text(pkg):
def print_text_info(pkg):
"""Print out a plain text description of a package."""
print "Package: ", pkg.name
print "Homepage: ", pkg.homepage
@@ -144,17 +68,11 @@ def info_text(pkg):
print
print "Description:"
if pkg.__doc__:
print format_doc(pkg, indent=4)
print pkg.format_doc(indent=4)
else:
print " None"
def info(parser, args):
if args.rst:
info_rst()
else:
if not args.name:
tty.die("You must supply a package name.")
pkg = spack.db.get(args.name)
info_text(pkg)
pkg = spack.db.get(args.name)
print_text_info(pkg)

View File

@@ -22,9 +22,10 @@
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import sys
from external import argparse
import llnl.util.tty as tty
import spack
import spack.cmd
@@ -34,6 +35,9 @@ def setup_parser(subparser):
subparser.add_argument(
'-i', '--ignore-dependencies', action='store_true', dest='ignore_deps',
help="Do not try to install dependencies of requested packages.")
subparser.add_argument(
'-j', '--jobs', action='store', type=int,
help="Explicitly set number of make jobs. Default is #cpus.")
subparser.add_argument(
'--keep-prefix', action='store_true', dest='keep_prefix',
help="Don't remove the install prefix if installation fails.")
@@ -43,6 +47,9 @@ def setup_parser(subparser):
subparser.add_argument(
'-n', '--no-checksum', action='store_true', dest='no_checksum',
help="Do not check packages against checksum")
subparser.add_argument(
'--fake', action='store_true', dest='fake',
help="Fake install. Just remove the prefix and touch a fake file in it.")
subparser.add_argument(
'packages', nargs=argparse.REMAINDER, help="specs of packages to install")
@@ -51,12 +58,19 @@ def install(parser, args):
if not args.packages:
tty.die("install requires at least one package argument")
if args.jobs is not None:
if args.jobs <= 0:
tty.die("The -j option must be a positive integer!")
if args.no_checksum:
spack.do_checksum = False
spack.do_checksum = False # TODO: remove this global.
specs = spack.cmd.parse_specs(args.packages, concretize=True)
for spec in specs:
package = spack.db.get(spec)
package.do_install(keep_prefix=args.keep_prefix,
keep_stage=args.keep_stage,
ignore_deps=args.ignore_deps)
package.do_install(
keep_prefix=args.keep_prefix,
keep_stage=args.keep_stage,
ignore_deps=args.ignore_deps,
make_jobs=args.jobs,
fake=args.fake)

View File

@@ -61,5 +61,4 @@ def match(p, f):
indent=0
if sys.stdout.isatty():
tty.msg("%d packages." % len(sorted_packages))
indent=2
colify(sorted_packages, indent=indent)

View File

@@ -23,6 +23,7 @@
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import os
import sys
from external import argparse
import llnl.util.tty as tty
@@ -77,38 +78,30 @@ def location(parser, args):
tty.die("You must supply a spec.")
if len(specs) != 1:
tty.die("Too many specs. Supply only one.")
spec = specs[0]
if args.install_dir:
# install_dir command matches against installed specs.
matching_specs = spack.db.get_installed(spec)
if not matching_specs:
tty.die("Spec '%s' matches no installed packages." % spec)
elif len(matching_specs) > 1:
args = ["%s matches multiple packages." % spec,
"Matching packages:"]
args += [" " + str(s) for s in matching_specs]
args += ["Use a more specific spec."]
tty.die(*args)
print matching_specs[0].prefix
elif args.package_dir:
# This one just needs the spec name.
print join_path(spack.db.root, spec.name)
spec = spack.cmd.disambiguate_spec(specs[0])
print spec.prefix
else:
# These versions need concretized specs.
spec.concretize()
pkg = spack.db.get(spec)
spec = specs[0]
if args.stage_dir:
print pkg.stage.path
if args.package_dir:
# This one just needs the spec name.
print join_path(spack.db.root, spec.name)
else: # args.build_dir is the default.
if not pkg.stage.source_path:
tty.die("Build directory does not exist yet. Run this to create it:",
"spack stage " + " ".join(args.spec))
print pkg.stage.source_path
else:
# These versions need concretized specs.
spec.concretize()
pkg = spack.db.get(spec)
if args.stage_dir:
print pkg.stage.path
else: # args.build_dir is the default.
if not pkg.stage.source_path:
tty.die("Build directory does not exist yet. Run this to create it:",
"spack stage " + " ".join(args.spec))
print pkg.stage.source_path

View File

@@ -41,6 +41,7 @@ def setup_parser(subparser):
def md5(parser, args):
if not args.files:
setup_parser.parser.print_help()
return 1
for f in args.files:
if not os.path.isfile(f):

View File

@@ -0,0 +1,95 @@
##############################################################################
# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://scalability-llnl.github.io/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License (as published by
# the Free Software Foundation) version 2.1 dated February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import re
import cgi
from StringIO import StringIO
import llnl.util.tty as tty
from llnl.util.tty.colify import *
import spack
description = "Print a list of all packages in reStructuredText."
def github_url(pkg):
"""Link to a package file on github."""
return ("https://github.com/scalability-llnl/spack/blob/master/var/spack/packages/%s/package.py" %
pkg.name)
def rst_table(elts):
"""Print out a RST-style table."""
cols = StringIO()
ncol, widths = colify(elts, output=cols, tty=True)
header = " ".join("=" * (w-1) for w in widths)
return "%s\n%s%s" % (header, cols.getvalue(), header)
def print_rst_package_list():
"""Print out information on all packages in restructured text."""
pkgs = sorted(spack.db.all_packages(), key=lambda s:s.name.lower())
print ".. _package-list:"
print
print "Package List"
print "=================="
print "This is a list of things you can install using Spack. It is"
print "automatically generated based on the packages in the latest Spack"
print "release."
print
print "Spack currently has %d mainline packages:" % len(pkgs)
print
print rst_table("`%s`_" % p.name for p in pkgs)
print
print "-----"
# Output some text for each package.
for pkg in pkgs:
print
print ".. _%s:" % pkg.name
print
print pkg.name
print "-" * len(pkg.name)
print "Links:"
print " * `%s <%s>`__" % (cgi.escape(pkg.homepage), pkg.homepage)
print " * `%s/package.py <%s>`__" % (pkg.name, github_url(pkg))
print
if pkg.versions:
print "Versions:"
print " " + ", ".join(str(v) for v in reversed(sorted(pkg.versions)))
if pkg.dependencies:
print "Dependencies"
print " " + ", ".join("`%s`_" % d if d != "mpi" else d
for d in pkg.dependencies)
print
print "Description:"
print pkg.format_doc(indent=2)
print
print "-----"
def package_list(parser, args):
print_rst_package_list()

View File

@@ -31,12 +31,16 @@
import spack
from spack.util.executable import *
description = "Query packages associated with particular git revisions in spack."
description = "Query packages associated with particular git revisions."
def setup_parser(subparser):
sp = subparser.add_subparsers(
metavar='SUBCOMMAND', dest='pkg_command')
add_parser = sp.add_parser('add', help=pkg_add.__doc__)
add_parser.add_argument('packages', nargs=argparse.REMAINDER,
help="Names of packages to add to git repo.")
list_parser = sp.add_parser('list', help=pkg_list.__doc__)
list_parser.add_argument('rev', default='HEAD', nargs='?',
help="Revision to list packages for.")
@@ -79,6 +83,16 @@ def list_packages(rev):
return sorted(line[len(relpath):] for line in output.split('\n') if line)
def pkg_add(args):
for pkg_name in args.packages:
filename = spack.db.filename_for_package_name(pkg_name)
if not os.path.isfile(filename):
tty.die("No such package: %s. Path does not exist:" % pkg_name, filename)
git = get_git()
git('-C', spack.packages_path, 'add', filename)
def pkg_list(args):
"""List packages associated with a particular spack git revision."""
colify(list_packages(args.rev))
@@ -117,7 +131,8 @@ def pkg_added(args):
def pkg(parser, args):
action = { 'diff' : pkg_diff,
action = { 'add' : pkg_add,
'diff' : pkg_diff,
'list' : pkg_list,
'removed' : pkg_removed,
'added' : pkg_added }

View File

@@ -0,0 +1,46 @@
##############################################################################
# Copyright (c) 2013-2014, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://scalability-llnl.github.io/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License (as published by
# the Free Software Foundation) version 2.1 dated February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from external import argparse
import llnl.util.tty as tty
import spack
import spack.cmd
description = "Revert checked out package source code."
def setup_parser(subparser):
subparser.add_argument('packages', nargs=argparse.REMAINDER,
help="specs of packages to restage")
def restage(parser, args):
if not args.packages:
tty.die("spack restage requires at least one package spec.")
specs = spack.cmd.parse_specs(args.packages, concretize=True)
for spec in specs:
package = spack.db.get(spec)
package.do_restage()

View File

@@ -27,26 +27,33 @@
import llnl.util.tty as tty
import spack.url as url
import spack
import spack.url as url
description = "print out abstract and concrete versions of a spec."
def setup_parser(subparser):
subparser.add_argument('-i', '--ids', action='store_true',
help="show numerical ids for dependencies.")
subparser.add_argument('specs', nargs=argparse.REMAINDER, help="specs of packages")
def spec(parser, args):
kwargs = { 'ids' : args.ids,
'indent' : 2,
'color' : True }
for spec in spack.cmd.parse_specs(args.specs):
print "Input spec"
print "------------------------------"
print spec.tree(color=True, indent=2)
print spec.tree(**kwargs)
print "Normalized"
print "------------------------------"
spec.normalize()
print spec.tree(color=True, indent=2)
print spec.tree(**kwargs)
print "Concretized"
print "------------------------------"
spec.concretize()
print spec.tree(color=True, indent=2)
print spec.tree(**kwargs)

View File

@@ -36,7 +36,6 @@ def setup_parser(subparser):
'-n', '--no-checksum', action='store_true', dest='no_checksum',
help="Do not check downloaded packages against checksum")
dir_parser = subparser.add_mutually_exclusive_group()
subparser.add_argument(
'specs', nargs=argparse.REMAINDER, help="specs of packages to stage")
@@ -52,4 +51,3 @@ def stage(parser, args):
for spec in specs:
package = spack.db.get(spec)
package.do_stage()

View File

@@ -65,20 +65,19 @@ def uninstall(parser, args):
" b) use a more specific spec."]
tty.die(*args)
if len(matching_specs) == 0:
if args.force: continue
tty.die("%s does not match any installed packages." % spec)
for s in matching_specs:
try:
# should work if package is known to spack
pkgs.append(spack.db.get(s))
pkgs.append(s.package)
except spack.packages.UnknownPackageError, e:
# The package.py file has gone away -- but still want to uninstall.
spack.Package(s).do_uninstall(force=True)
# Sort packages to be uninstalled by the number of installed dependents
# This ensures we do things in the right order
def num_installed_deps(pkg):

View File

@@ -0,0 +1,58 @@
##############################################################################
# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://scalability-llnl.github.io/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License (as published by
# the Free Software Foundation) version 2.1 dated February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import sys
import spack
import spack.url
description = "Inspect urls used by packages in spack."
def setup_parser(subparser):
subparser.add_argument(
'-c', '--color', action='store_true',
help="Color the parsed version and name in the urls shown. "
"Version will be cyan, name red.")
subparser.add_argument(
'-e', '--extrapolation', action='store_true',
help="Color the versions used for extrapolation as well."
"Additional versions are green, names magenta.")
def urls(parser, args):
urls = set()
for pkg in spack.db.all_packages():
url = getattr(pkg.__class__, 'url', None)
if url:
urls.add(url)
for params in pkg.versions.values():
url = params.get('url', None)
if url:
urls.add(url)
for url in sorted(urls):
if args.color or args.extrapolation:
print spack.url.color_url(url, subs=args.extrapolation, errors=True)
else:
print url

View File

@@ -24,6 +24,7 @@
##############################################################################
import os
from llnl.util.tty.colify import colify
import llnl.util.tty as tty
import spack
description ="List available versions of a package"
@@ -34,4 +35,21 @@ def setup_parser(subparser):
def versions(parser, args):
pkg = spack.db.get(args.package)
colify(reversed(pkg.fetch_available_versions()))
safe_versions = pkg.versions
fetched_versions = pkg.fetch_remote_versions()
remote_versions = set(fetched_versions).difference(safe_versions)
tty.msg("Safe versions (already checksummed):")
colify(sorted(safe_versions, reverse=True), indent=2)
tty.msg("Remote versions (not yet checksummed):")
if not remote_versions:
if not fetched_versions:
print " Found no versions for %s" % pkg.name
tty.debug("Check the list_url and list_depth attribute on the "
"package to help Spack find versions.")
else:
print " Found no unckecksummed versions for %s" % pkg.name
else:
colify(sorted(remote_versions, reverse=True), indent=2)

View File

@@ -1,117 +0,0 @@
##############################################################################
# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://scalability-llnl.github.io/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License (as published by
# the Free Software Foundation) version 2.1 dated February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
"""\
The ``compilation`` module contains utility functions used by the compiler
wrapper script.
.. todo::
Think about moving this into the script to increase compilation
speed.
"""
import os
import sys
def get_env_var(name, required=True):
value = os.environ.get(name)
if required and value is None:
print "%s must be run from spack." % os.path.abspath(sys.argv[0])
sys.exit(1)
return value
def get_env_flag(name, required=False):
value = get_env_var(name, required)
if value:
return value.lower() == "true"
return False
def get_path(name):
path = os.environ.get(name, "").strip()
if path:
return path.split(":")
else:
return []
def parse_rpaths(arguments):
"""argparse, for all its features, cannot understand most compilers'
rpath arguments. This handles '-Wl,', '-Xlinker', and '-R'"""
def get_next(arg, args):
"""Get an expected next value of an iterator, or die if it's not there"""
try:
return next(args)
except StopIteration:
# quietly ignore -rpath and -Xlinker without args.
return None
other_args = []
def linker_args():
"""This generator function allows us to parse the linker args separately
from the compiler args, so that we can handle them more naturally.
"""
args = iter(arguments)
for arg in args:
if arg.startswith('-Wl,'):
sub_args = [sub for sub in arg.replace('-Wl,', '', 1).split(',')]
for arg in sub_args:
yield arg
elif arg == '-Xlinker':
target = get_next(arg, args)
if target is not None:
yield target
else:
other_args.append(arg)
# Extract all the possible ways rpath can appear in linker args, then
# append non-rpaths to other_args. This happens in-line as the linker
# args are extracted, so we preserve the original order of arguments.
# This is important for args like --whole-archive, --no-whole-archive,
# and others that tell the linker how to handle the next few libraries
# it encounters on the command line.
rpaths = []
largs = linker_args()
for arg in largs:
if arg == '-rpath':
target = get_next(arg, largs)
if target is not None:
rpaths.append(target)
elif arg.startswith('-R'):
target = arg.replace('-R', '', 1)
if not target:
target = get_next(arg, largs)
if target is None: break
if os.path.isdir(target):
rpaths.append(target)
else:
other_args.extend(['-Wl,' + arg, '-Wl,' + target])
else:
other_args.append('-Wl,' + arg)
return rpaths, other_args

View File

@@ -35,8 +35,8 @@
import spack.spec
from spack.util.multiproc import parmap
from spack.util.executable import *
from spack.util.environment import get_path
from spack.version import Version
from spack.compilation import get_path
__all__ = ['Compiler', 'get_compiler_version']
@@ -169,6 +169,10 @@ def _find_matches_in_path(cls, compiler_names, detect_version, *path):
checks = []
for directory in path:
if not (os.path.isdir(directory) and
os.access(directory, os.R_OK | os.X_OK)):
continue
files = os.listdir(directory)
for exe in files:
full_path = join_path(directory, exe)

View File

@@ -40,12 +40,12 @@
from spack.compiler import Compiler
from spack.util.executable import which
from spack.util.naming import mod_to_class
from spack.compilation import get_path
from spack.util.environment import get_path
_imported_compilers_module = 'spack.compilers'
_required_instance_vars = ['cc', 'cxx', 'f77', 'fc']
_default_order = ['gcc', 'intel', 'pgi', 'clang']
_default_order = ['gcc', 'intel', 'pgi', 'clang', 'xlc']
def _auto_compiler_spec(function):
def converter(cspec_like):

View File

@@ -0,0 +1,97 @@
# -*- coding: utf-8 -*-
##############################################################################
# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Written by François Bissey, francois.bissey@canterbury.ac.nz, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://scalability-llnl.github.io/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License (as published by
# the Free Software Foundation) version 2.1 dated February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack.compiler import *
class Xl(Compiler):
# Subclasses use possible names of C compiler
cc_names = ['xlc','xlc_r']
# Subclasses use possible names of C++ compiler
cxx_names = ['xlC','xlC_r','xlc++','xlc++_r']
# Subclasses use possible names of Fortran 77 compiler
f77_names = ['xlf','xlf_r']
# Subclasses use possible names of Fortran 90 compiler
fc_names = ['xlf90','xlf90_r','xlf95','xlf95_r','xlf2003','xlf2003_r','xlf2008','xlf2008_r']
@property
def cxx11_flag(self):
if self.version < ver('13.1'):
tty.die("Only xlC 13.1 and above have some c++11 support.")
else:
return "-qlanglvl=extended0x"
@classmethod
def default_version(self, comp):
"""The '-qversion' is the standard option fo XL compilers.
Output looks like this::
IBM XL C/C++ for Linux, V11.1 (5724-X14)
Version: 11.01.0000.0000
or::
IBM XL Fortran for Linux, V13.1 (5724-X16)
Version: 13.01.0000.0000
or::
IBM XL C/C++ for AIX, V11.1 (5724-X13)
Version: 11.01.0000.0009
or::
IBM XL C/C++ Advanced Edition for Blue Gene/P, V9.0
Version: 09.00.0000.0017
"""
return get_compiler_version(
comp, '-qversion',r'([0-9]?[0-9]\.[0-9])')
@classmethod
def fc_version(cls, fc):
"""The fortran and C/C++ versions of the XL compiler are always two units apart.
By this we mean that the fortran release that goes with XL C/C++ 11.1 is 13.1.
Having such a difference in version number is confusing spack quite a lot.
Most notably if you keep the versions as is the default xl compiler will only
have fortran and no C/C++.
So we associate the Fortran compiler with the version associated to the C/C++
compiler.
One last stumble. Version numbers over 10 have at least a .1 those under 10
a .0. There is no xlf 9.x or under currently available. BG/P and BG/L can
such a compiler mix and possibly older version of AIX and linux on power.
"""
fver = get_compiler_version(fc, '-qversion',r'([0-9]?[0-9]\.[0-9])')
cver = float(fver) - 2
if cver < 10 :
cver = cver - 0.1
return str(cver)
@classmethod
def f77_version(cls, f77):
return cls.fc_version(f77)

View File

@@ -68,8 +68,9 @@ def concretize_version(self, spec):
# If there are known avaialble versions, return the most recent
# version that satisfies the spec
pkg = spec.package
valid_versions = [v for v in pkg.available_versions
if any(v.satisfies(sv) for sv in spec.versions)]
valid_versions = sorted(
[v for v in pkg.versions
if any(v.satisfies(sv) for sv in spec.versions)])
if valid_versions:
spec.versions = ver([valid_versions[-1]])

View File

@@ -27,9 +27,11 @@
import exceptions
import hashlib
import shutil
import tempfile
from contextlib import closing
import llnl.util.tty as tty
from llnl.util.lang import memoized
from llnl.util.filesystem import join_path, mkdirp
import spack
@@ -53,6 +55,19 @@ def __init__(self, root):
self.root = root
@property
def hidden_file_paths(self):
"""Return a list of hidden files used by the directory layout.
Paths are relative to the root of an install directory.
If the directory layout uses no hidden files to maintain
state, this should return an empty container, e.g. [] or (,).
"""
raise NotImplementedError()
def all_specs(self):
"""To be implemented by subclasses to traverse all specs for which there is
a directory within the root.
@@ -71,6 +86,42 @@ def make_path_for_spec(self, spec):
raise NotImplementedError()
def extension_map(self, spec):
"""Get a dict of currently installed extension packages for a spec.
Dict maps { name : extension_spec }
Modifying dict does not affect internals of this layout.
"""
raise NotImplementedError()
def check_extension_conflict(self, spec, ext_spec):
"""Ensure that ext_spec can be activated in spec.
If not, raise ExtensionAlreadyInstalledError or
ExtensionConflictError.
"""
raise NotImplementedError()
def check_activated(self, spec, ext_spec):
"""Ensure that ext_spec can be removed from spec.
If not, raise NoSuchExtensionError.
"""
raise NotImplementedError()
def add_extension(self, spec, ext_spec):
"""Add to the list of currently installed extensions."""
raise NotImplementedError()
def remove_extension(self, spec, ext_spec):
"""Remove from the list of currently installed extensions."""
raise NotImplementedError()
def path_for_spec(self, spec):
"""Return an absolute path from the root to a directory for the spec."""
_check_concrete(spec)
@@ -81,12 +132,17 @@ def path_for_spec(self, spec):
def remove_path_for_spec(self, spec):
"""Removes a prefix and any empty parent directories from the root."""
"""Removes a prefix and any empty parent directories from the root.
Raised RemoveFailedError if something goes wrong.
"""
path = self.path_for_spec(spec)
assert(path.startswith(self.root))
if os.path.exists(path):
shutil.rmtree(path, True)
try:
shutil.rmtree(path)
except exceptions.OSError, e:
raise RemoveFailedError(spec, path, e)
path = os.path.dirname(path)
while path != self.root:
@@ -134,9 +190,18 @@ def __init__(self, root, **kwargs):
"""Prefix size is number of characters in the SHA-1 prefix to use
to make each hash unique.
"""
spec_file_name = kwargs.get('spec_file_name', '.spec')
spec_file_name = kwargs.get('spec_file_name', '.spec')
extension_file_name = kwargs.get('extension_file_name', '.extensions')
super(SpecHashDirectoryLayout, self).__init__(root)
self.spec_file_name = spec_file_name
self.extension_file_name = extension_file_name
# Cache of already written/read extension maps.
self._extension_maps = {}
@property
def hidden_file_paths(self):
return ('.spec', '.extensions')
def relative_path_for_spec(self, spec):
@@ -157,19 +222,27 @@ def read_spec(self, path):
# Specs from files are assumed normal and concrete
spec = Spec(spec_file.read().replace('\n', ''))
# If we do not have a package on hand for this spec, we know
# it is concrete, and we *assume* that it is normal. This
# prevents us from trying to fetch a non-existing package, and
# allows best effort for commands like spack find.
if not spack.db.exists(spec.name):
spec._normal = True
spec._concrete = True
else:
spec.normalize()
if not spec.concrete:
tty.warn("Spec read from installed package is not concrete:",
path, spec)
if all(spack.db.exists(s.name) for s in spec.traverse()):
copy = spec.copy()
# TODO: It takes a lot of time to normalize every spec on read.
# TODO: Storing graph info with spec files would fix this.
copy.normalize()
if copy.concrete:
return copy # These are specs spack still understands.
# If we get here, either the spec is no longer in spack, or
# something about its dependencies has changed. So we need to
# just assume the read spec is correct. We'll lose graph
# information if we do this, but this is just for best effort
# for commands like uninstall and find. Currently Spack
# doesn't do anything that needs the graph info after install.
# TODO: store specs with full connectivity information, so
# that we don't have to normalize or reconstruct based on
# changing dependencies in the Spack tree.
spec._normal = True
spec._concrete = True
return spec
@@ -207,17 +280,116 @@ def make_path_for_spec(self, spec):
self.write_spec(spec, spec_file_path)
@memoized
def all_specs(self):
if not os.path.isdir(self.root):
return
return []
specs = []
for path in traverse_dirs_at_depth(self.root, 3):
arch, compiler, last_dir = path
spec_file_path = join_path(
self.root, arch, compiler, last_dir, self.spec_file_name)
if os.path.exists(spec_file_path):
spec = self.read_spec(spec_file_path)
yield spec
specs.append(spec)
return specs
def extension_file_path(self, spec):
"""Gets full path to an installed package's extension file"""
_check_concrete(spec)
return join_path(self.path_for_spec(spec), self.extension_file_name)
def _extension_map(self, spec):
"""Get a dict<name -> spec> for all extensions currnetly
installed for this package."""
_check_concrete(spec)
if not spec in self._extension_maps:
path = self.extension_file_path(spec)
if not os.path.exists(path):
self._extension_maps[spec] = {}
else:
exts = {}
with closing(open(path)) as ext_file:
for line in ext_file:
try:
spec = Spec(line.strip())
exts[spec.name] = spec
except spack.error.SpackError, e:
# TODO: do something better here -- should be
# resilient to corrupt files.
raise InvalidExtensionSpecError(str(e))
self._extension_maps[spec] = exts
return self._extension_maps[spec]
def extension_map(self, spec):
"""Defensive copying version of _extension_map() for external API."""
return self._extension_map(spec).copy()
def check_extension_conflict(self, spec, ext_spec):
exts = self._extension_map(spec)
if ext_spec.name in exts:
installed_spec = exts[ext_spec.name]
if ext_spec == installed_spec:
raise ExtensionAlreadyInstalledError(spec, ext_spec)
else:
raise ExtensionConflictError(spec, ext_spec, installed_spec)
def check_activated(self, spec, ext_spec):
exts = self._extension_map(spec)
if (not ext_spec.name in exts) or (ext_spec != exts[ext_spec.name]):
raise NoSuchExtensionError(spec, ext_spec)
def _write_extensions(self, spec, extensions):
path = self.extension_file_path(spec)
# Create a temp file in the same directory as the actual file.
dirname, basename = os.path.split(path)
tmp = tempfile.NamedTemporaryFile(
prefix=basename, dir=dirname, delete=False)
# Write temp file.
with closing(tmp):
for extension in sorted(extensions.values()):
tmp.write("%s\n" % extension)
# Atomic update by moving tmpfile on top of old one.
os.rename(tmp.name, path)
def add_extension(self, spec, ext_spec):
_check_concrete(spec)
_check_concrete(ext_spec)
# Check whether it's already installed or if it's a conflict.
exts = self._extension_map(spec)
self.check_extension_conflict(spec, ext_spec)
# do the actual adding.
exts[ext_spec.name] = ext_spec
self._write_extensions(spec, exts)
def remove_extension(self, spec, ext_spec):
_check_concrete(spec)
_check_concrete(ext_spec)
# Make sure it's installed before removing.
exts = self._extension_map(spec)
self.check_activated(spec, ext_spec)
# do the actual removing.
del exts[ext_spec.name]
self._write_extensions(spec, exts)
class DirectoryLayoutError(SpackError):
@@ -234,6 +406,15 @@ def __init__(self, installed_spec, new_spec):
% installed_spec, new_spec)
class RemoveFailedError(DirectoryLayoutError):
"""Raised when a DirectoryLayout cannot remove an install prefix."""
def __init__(self, installed_spec, prefix, error):
super(RemoveFailedError, self).__init__(
'Could not remove prefix %s for %s : %s'
% prefix, installed_spec.short_spec, error)
self.cause = error
class InconsistentInstallDirectoryError(DirectoryLayoutError):
"""Raised when a package seems to be installed to the wrong place."""
def __init__(self, message):
@@ -245,3 +426,34 @@ class InstallDirectoryAlreadyExistsError(DirectoryLayoutError):
def __init__(self, path):
super(InstallDirectoryAlreadyExistsError, self).__init__(
"Install path %s already exists!")
class InvalidExtensionSpecError(DirectoryLayoutError):
"""Raised when an extension file has a bad spec in it."""
def __init__(self, message):
super(InvalidExtensionSpecError, self).__init__(message)
class ExtensionAlreadyInstalledError(DirectoryLayoutError):
"""Raised when an extension is added to a package that already has it."""
def __init__(self, spec, ext_spec):
super(ExtensionAlreadyInstalledError, self).__init__(
"%s is already installed in %s" % (ext_spec.short_spec, spec.short_spec))
class ExtensionConflictError(DirectoryLayoutError):
"""Raised when an extension is added to a package that already has it."""
def __init__(self, spec, ext_spec, conflict):
super(ExtensionConflictError, self).__init__(
"%s cannot be installed in %s because it conflicts with %s."% (
ext_spec.short_spec, spec.short_spec, conflict.short_spec))
class NoSuchExtensionError(DirectoryLayoutError):
"""Raised when an extension isn't there on deactivate."""
def __init__(self, spec, ext_spec):
super(NoSuchExtensionError, self).__init__(
"%s cannot be removed from %s because it's not activated."% (
ext_spec.short_spec, spec.short_spec))

View File

@@ -33,6 +33,12 @@ def __init__(self, message, long_message=None):
self.long_message = long_message
def __str__(self):
msg = self.message
if self.long_message:
msg += "\n %s" % self.long_message
return msg
class UnsupportedPlatformError(SpackError):
"""Raised by packages when a platform is not supported"""
def __init__(self, message):

View File

@@ -41,11 +41,12 @@
Archive a source directory, e.g. for creating a mirror.
"""
import os
import sys
import re
import shutil
from functools import wraps
import llnl.util.tty as tty
from llnl.util.filesystem import *
import spack
import spack.error
import spack.util.crypto as crypto
@@ -141,13 +142,19 @@ def fetch(self):
tty.msg("Trying to fetch from %s" % self.url)
curl_args = ['-O', # save file to disk
'-f', # fail on >400 errors
'-D', '-', # print out HTML headers
'-L', self.url,]
if sys.stdout.isatty():
curl_args.append('-#') # status bar when using a tty
else:
curl_args.append('-sS') # just errors when not.
# Run curl but grab the mime type from the http headers
headers = spack.curl('-#', # status bar
'-O', # save file to disk
'-f', # fail on >400 errors
'-D', '-', # print out HTML headers
'-L', self.url,
return_output=True, fail_on_error=False)
headers = spack.curl(
*curl_args, return_output=True, fail_on_error=False)
if spack.curl.returncode != 0:
# clean up archive on failure.
@@ -156,9 +163,10 @@ def fetch(self):
if spack.curl.returncode == 22:
# This is a 404. Curl will print the error.
raise FailedDownloadError(url)
raise FailedDownloadError(
self.url, "URL %s was not found!" % self.url)
if spack.curl.returncode == 60:
elif spack.curl.returncode == 60:
# This is a certificate error. Suggest spack -k
raise FailedDownloadError(
self.url,
@@ -168,6 +176,13 @@ def fetch(self):
"can try running spack -k, which will not check SSL certificates."
"Use this at your own risk.")
else:
# This is some other curl error. Curl will print the
# error, but print a spack message too
raise FailedDownloadError(
self.url, "Curl failed with error %d" % spack.curl.returncode)
# Check if we somehow got an HTML file rather than the archive we
# asked for. We only look at the last content type, to handle
# redirects properly.
@@ -197,8 +212,26 @@ def expand(self):
"Failed on expand() for URL %s" % self.url)
decompress = decompressor_for(self.archive_file)
# Expand all tarballs in their own directory to contain
# exploding tarballs.
tarball_container = os.path.join(self.stage.path, "spack-expanded-archive")
mkdirp(tarball_container)
os.chdir(tarball_container)
decompress(self.archive_file)
# If the tarball *didn't* explode, move
# the expanded directory up & remove the protector directory.
files = os.listdir(tarball_container)
if len(files) == 1:
expanded_dir = os.path.join(tarball_container, files[0])
if os.path.isdir(expanded_dir):
shutil.move(expanded_dir, self.stage.path)
os.rmdir(tarball_container)
# Set the wd back to the stage when done.
self.stage.chdir()
def archive(self, destination):
"""Just moves this archive to the destination."""
@@ -330,15 +363,10 @@ def __init__(self, **kwargs):
'git', 'tag', 'branch', 'commit', **kwargs)
self._git = None
# For git fetch branches and tags the same way.
if not self.branch:
self.branch = self.tag
@property
def git_version(self):
git = which('git', required=True)
vstring = git('--version', return_output=True).lstrip('git version ')
vstring = self.git('--version', return_output=True).lstrip('git version ')
return Version(vstring)
@@ -348,6 +376,7 @@ def git(self):
self._git = which('git', required=True)
return self._git
@_needs_stage
def fetch(self):
self.stage.chdir()
@@ -389,6 +418,12 @@ def fetch(self):
self.git(*args)
self.stage.chdir_to_source()
# For tags, be conservative and check them out AFTER
# cloning. Later git versions can do this with clone
# --branch, but older ones fail.
if self.tag:
self.git('checkout', self.tag)
def archive(self, destination):
super(GitFetchStrategy, self).archive(destination, exclude='.git')
@@ -580,7 +615,7 @@ def for_package_version(pkg, version):
version() in the package description."""
# If it's not a known version, extrapolate one.
if not version in pkg.versions:
url = pkg.url_for_verison(version)
url = pkg.url_for_version(version)
if not url:
raise InvalidArgsError(pkg, version)
return URLFetchStrategy(url)

553
lib/spack/spack/graph.py Normal file
View File

@@ -0,0 +1,553 @@
##############################################################################
# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://scalability-llnl.github.io/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License (as published by
# the Free Software Foundation) version 2.1 dated February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
"""Functions for graphing DAGs of dependencies.
This file contains code for graphing DAGs of software packages
(i.e. Spack specs). There are two main functions you probably care
about:
graph_ascii() will output a colored graph of a spec in ascii format,
kind of like the graph git shows with "git log --graph", e.g.::
o mpileaks
|\
| |\
| o | callpath
|/| |
| |\|
| |\ \
| | |\ \
| | | | o adept-utils
| |_|_|/|
|/| | | |
o | | | | mpi
/ / / /
| | o | dyninst
| |/| |
|/|/| |
| | |/
| o | libdwarf
|/ /
o | libelf
/
o boost
graph_dot() will output a graph of a spec (or multiple specs) in dot
format.
Note that ``graph_ascii`` assumes a single spec while ``graph_dot``
can take a number of specs as input.
"""
__all__ = ['topological_sort', 'graph_ascii', 'AsciiGraph', 'graph_dot']
from heapq import *
from llnl.util.lang import *
from llnl.util.tty.color import *
import spack
from spack.spec import Spec
def topological_sort(spec, **kwargs):
"""Topological sort for specs.
Return a list of dependency specs sorted topologically. The spec
argument is not modified in the process.
"""
reverse = kwargs.get('reverse', False)
if not reverse:
parents = lambda s: s.dependents
children = lambda s: s.dependencies
else:
parents = lambda s: s.dependencies
children = lambda s: s.dependents
# Work on a copy so this is nondestructive.
spec = spec.copy()
nodes = spec.index()
topo_order = []
remaining = [name for name in nodes.keys() if not parents(nodes[name])]
heapify(remaining)
while remaining:
name = heappop(remaining)
topo_order.append(name)
node = nodes[name]
for dep in children(node).values():
del parents(dep)[node.name]
if not parents(dep):
heappush(remaining, dep.name)
if any(parents(s) for s in spec.traverse()):
raise ValueError("Spec has cycles!")
else:
return topo_order
def find(seq, predicate):
"""Find index in seq for which predicate is True.
Searches the sequence and returns the index of the element for
which the predicate evaluates to True. Returns -1 if the
predicate does not evaluate to True for any element in seq.
"""
for i, elt in enumerate(seq):
if predicate(elt):
return i
return -1
# Names of different graph line states. We Record previous line
# states so that we can easily determine what to do when connecting.
states = ('node', 'collapse', 'merge-right', 'expand-right', 'back-edge')
NODE, COLLAPSE, MERGE_RIGHT, EXPAND_RIGHT, BACK_EDGE = states
class AsciiGraph(object):
def __init__(self):
# These can be set after initialization or after a call to
# graph() to change behavior.
self.node_character = '*'
self.debug = False
self.indent = 0
# These are colors in the order they'll be used for edges.
# See llnl.util.tty.color for details on color characters.
self.colors = 'rgbmcyRGBMCY'
# Internal vars are used in the graph() function and are
# properly initialized there.
self._name_to_color = None # Node name to color
self._out = None # Output stream
self._frontier = None # frontier
self._nodes = None # dict from name -> node
self._prev_state = None # State of previous line
self._prev_index = None # Index of expansion point of prev line
def _indent(self):
self._out.write(self.indent * ' ')
def _write_edge(self, string, index, sub=0):
"""Write a colored edge to the output stream."""
name = self._frontier[index][sub]
edge = "@%s{%s}" % (self._name_to_color[name], string)
self._out.write(edge)
def _connect_deps(self, i, deps, label=None):
"""Connect dependencies to existing edges in the frontier.
``deps`` are to be inserted at position i in the
frontier. This routine determines whether other open edges
should be merged with <deps> (if there are other open edges
pointing to the same place) or whether they should just be
inserted as a completely new open edge.
Open edges that are not fully expanded (i.e. those that point
at multiple places) are left intact.
Parameters:
label -- optional debug label for the connection.
Returns: True if the deps were connected to another edge
(i.e. the frontier did not grow) and False if the deps were
NOT already in the frontier (i.e. they were inserted and the
frontier grew).
"""
if len(deps) == 1 and deps in self._frontier:
j = self._frontier.index(deps)
# convert a right connection into a left connection
if i < j:
self._frontier.pop(j)
self._frontier.insert(i, deps)
return self._connect_deps(j, deps, label)
collapse = True
if self._prev_state == EXPAND_RIGHT:
# Special case where previous line expanded and i is off by 1.
self._back_edge_line([], j, i+1, True, label + "-1.5 " + str((i+1,j)))
collapse = False
else:
# Previous node also expanded here, so i is off by one.
if self._prev_state == NODE and self._prev_index < i:
i += 1
if i-j > 1:
# We need two lines to connect if distance > 1
self._back_edge_line([], j, i, True, label + "-1 " + str((i,j)))
collapse = False
self._back_edge_line([j], -1, -1, collapse, label + "-2 " + str((i,j)))
return True
elif deps:
self._frontier.insert(i, deps)
return False
def _set_state(self, state, index, label=None):
if state not in states:
raise ValueError("Invalid graph state!")
self._prev_state = state
self._prev_index = index
if self.debug:
self._out.write(" " * 20)
self._out.write("%-20s" % (
str(self._prev_state) if self._prev_state else ''))
self._out.write("%-20s" % (str(label) if label else ''))
self._out.write("%s" % self._frontier)
def _back_edge_line(self, prev_ends, end, start, collapse, label=None):
"""Write part of a backwards edge in the graph.
Writes single- or multi-line backward edges in an ascii graph.
For example, a single line edge::
| | | | o |
| | | |/ / <-- single-line edge connects two nodes.
| | | o |
Or a multi-line edge (requires two calls to back_edge)::
| | | | o |
| |_|_|/ / <-- multi-line edge crosses vertical edges.
|/| | | |
o | | | |
Also handles "pipelined" edges, where the same line contains
parts of multiple edges::
o start
| |_|_|_|/|
|/| | |_|/| <-- this line has parts of 2 edges.
| | |/| | |
o o
Arguments:
prev_ends -- indices in frontier of previous edges that need
to be finished on this line.
end -- end of the current edge on this line.
start -- start index of the current edge.
collapse -- whether the graph will be collapsing (i.e. whether
to slant the end of the line or keep it straight)
label -- optional debug label to print after the line.
"""
def advance(to_pos, edges):
"""Write edges up to <to_pos>."""
for i in range(self._pos, to_pos):
for e in edges():
self._write_edge(*e)
self._pos += 1
flen = len(self._frontier)
self._pos = 0
self._indent()
for p in prev_ends:
advance(p, lambda: [("| ", self._pos)] )
advance(p+1, lambda: [("|/", self._pos)] )
if end >= 0:
advance(end + 1, lambda: [("| ", self._pos)] )
advance(start - 1, lambda: [("|", self._pos), ("_", end)] )
else:
advance(start - 1, lambda: [("| ", self._pos)] )
if start >= 0:
advance(start, lambda: [("|", self._pos), ("/", end)] )
if collapse:
advance(flen, lambda: [(" /", self._pos)] )
else:
advance(flen, lambda: [("| ", self._pos)] )
self._set_state(BACK_EDGE, end, label)
self._out.write("\n")
def _node_line(self, index, name):
"""Writes a line with a node at index."""
self._indent()
for c in range(index):
self._write_edge("| ", c)
self._out.write("%s " % self.node_character)
for c in range(index+1, len(self._frontier)):
self._write_edge("| ", c)
self._out.write(" %s" % name)
self._set_state(NODE, index)
self._out.write("\n")
def _collapse_line(self, index):
"""Write a collapsing line after a node was added at index."""
self._indent()
for c in range(index):
self._write_edge("| ", c)
for c in range(index, len(self._frontier)):
self._write_edge(" /", c)
self._set_state(COLLAPSE, index)
self._out.write("\n")
def _merge_right_line(self, index):
"""Edge at index is same as edge to right. Merge directly with '\'"""
self._indent()
for c in range(index):
self._write_edge("| ", c)
self._write_edge("|", index)
self._write_edge("\\", index+1)
for c in range(index+1, len(self._frontier)):
self._write_edge("| ", c )
self._set_state(MERGE_RIGHT, index)
self._out.write("\n")
def _expand_right_line(self, index):
self._indent()
for c in range(index):
self._write_edge("| ", c)
self._write_edge("|", index)
self._write_edge("\\", index+1)
for c in range(index+2, len(self._frontier)):
self._write_edge(" \\", c)
self._set_state(EXPAND_RIGHT, index)
self._out.write("\n")
def write(self, spec, **kwargs):
"""Write out an ascii graph of the provided spec.
Arguments:
spec -- spec to graph. This only handles one spec at a time.
Optional arguments:
out -- file object to write out to (default is sys.stdout)
color -- whether to write in color. Default is to autodetect
based on output file.
"""
out = kwargs.get('out', None)
if not out:
out = sys.stdout
color = kwargs.get('color', None)
if not color:
color = out.isatty()
self._out = ColorStream(sys.stdout, color=color)
# We'll traverse the spec in topo order as we graph it.
topo_order = topological_sort(spec, reverse=True)
# Work on a copy to be nondestructive
spec = spec.copy()
self._nodes = spec.index()
# Colors associated with each node in the DAG.
# Edges are colored by the node they point to.
self._name_to_color = dict((name, self.colors[i % len(self.colors)])
for i, name in enumerate(topo_order))
# Frontier tracks open edges of the graph as it's written out.
self._frontier = [[spec.name]]
while self._frontier:
# Find an unexpanded part of frontier
i = find(self._frontier, lambda f: len(f) > 1)
if i >= 0:
# Expand frontier until there are enough columns for all children.
# Figure out how many back connections there are and
# sort them so we do them in order
back = []
for d in self._frontier[i]:
b = find(self._frontier[:i], lambda f: f == [d])
if b != -1:
back.append((b, d))
# Do all back connections in sorted order so we can
# pipeline them and save space.
if back:
back.sort()
prev_ends = []
for j, (b, d) in enumerate(back):
self._frontier[i].remove(d)
if i-b > 1:
self._back_edge_line(prev_ends, b, i, False, 'left-1')
del prev_ends[:]
prev_ends.append(b)
# Check whether we did ALL the deps as back edges,
# in which case we're done.
collapse = not self._frontier[i]
if collapse:
self._frontier.pop(i)
self._back_edge_line(prev_ends, -1, -1, collapse, 'left-2')
elif len(self._frontier[i]) > 1:
# Expand forward after doing all back connections
if (i+1 < len(self._frontier) and len(self._frontier[i+1]) == 1
and self._frontier[i+1][0] in self._frontier[i]):
# We need to connect to the element to the right.
# Keep lines straight by connecting directly and
# avoiding unnecessary expand/contract.
name = self._frontier[i+1][0]
self._frontier[i].remove(name)
self._merge_right_line(i)
else:
# Just allow the expansion here.
name = self._frontier[i].pop(0)
deps = [name]
self._frontier.insert(i, deps)
self._expand_right_line(i)
self._frontier.pop(i)
self._connect_deps(i, deps, "post-expand")
# Handle any remaining back edges to the right
j = i+1
while j < len(self._frontier):
deps = self._frontier.pop(j)
if not self._connect_deps(j, deps, "back-from-right"):
j += 1
else:
# Nothing to expand; add dependencies for a node.
name = topo_order.pop()
node = self._nodes[name]
# Find the named node in the frontier and draw it.
i = find(self._frontier, lambda f: name in f)
self._node_line(i, name)
# Replace node with its dependencies
self._frontier.pop(i)
if node.dependencies:
deps = sorted((d for d in node.dependencies), reverse=True)
self._connect_deps(i, deps, "new-deps") # anywhere.
elif self._frontier:
self._collapse_line(i)
def graph_ascii(spec, **kwargs):
node_character = kwargs.get('node', 'o')
out = kwargs.pop('out', None)
debug = kwargs.pop('debug', False)
indent = kwargs.pop('indent', 0)
color = kwargs.pop('color', None)
check_kwargs(kwargs, graph_ascii)
graph = AsciiGraph()
graph.debug = debug
graph.indent = indent
graph.node_character = node_character
graph.write(spec, color=color, out=out)
def graph_dot(*specs, **kwargs):
"""Generate a graph in dot format of all provided specs.
Print out a dot formatted graph of all the dependencies between
package. Output can be passed to graphviz, e.g.:
spack graph --dot qt | dot -Tpdf > spack-graph.pdf
"""
out = kwargs.pop('out', sys.stdout)
check_kwargs(kwargs, graph_dot)
out.write('digraph G {\n')
out.write(' label = "Spack Dependencies"\n')
out.write(' labelloc = "b"\n')
out.write(' rankdir = "LR"\n')
out.write(' ranksep = "5"\n')
out.write('\n')
def quote(string):
return '"%s"' % string
if not specs:
specs = [p.name for p in spack.db.all_packages()]
else:
roots = specs
specs = set()
for spec in roots:
specs.update(Spec(s.name) for s in spec.normalized().traverse())
deps = []
for spec in specs:
out.write(' %-30s [label="%s"]\n' % (quote(spec.name), spec.name))
# Skip virtual specs (we'll find out about them from concrete ones.
if spec.virtual:
continue
# Add edges for each depends_on in the package.
for dep_name, dep in spec.package.dependencies.iteritems():
deps.append((spec.name, dep_name))
# If the package provides something, add an edge for that.
for provider in set(s.name for s in spec.package.provided):
deps.append((provider, spec.name))
out.write('\n')
for pair in deps:
out.write(' "%s" -> "%s"\n' % pair)
out.write('}\n')

View File

@@ -31,7 +31,9 @@
Currently the following hooks are supported:
* pre_install()
* post_install()
* pre_uninstall()
* post_uninstall()
This can be used to implement support for things like module
@@ -47,8 +49,11 @@
def all_hook_modules():
modules = []
for name in list_modules(spack.hooks_path):
mod_name = __name__ + '.' + name
path = join_path(spack.hooks_path, name) + ".py"
modules.append(imp.load_source('spack.hooks', path))
mod = imp.load_source(mod_name, path)
modules.append(mod)
return modules
@@ -67,5 +72,8 @@ def __call__(self, pkg):
#
# Define some functions that can be called to fire off hooks.
#
post_install = HookRunner('post_install')
pre_install = HookRunner('pre_install')
post_install = HookRunner('post_install')
pre_uninstall = HookRunner('pre_uninstall')
post_uninstall = HookRunner('post_uninstall')

View File

@@ -0,0 +1,36 @@
##############################################################################
# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://scalability-llnl.github.io/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License (as published by
# the Free Software Foundation) version 2.1 dated February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import spack
def pre_uninstall(pkg):
# Need to do this b/c uninstall does not automatically do it.
# TODO: store full graph info in stored .spec file.
pkg.spec.normalize()
if pkg.is_extension:
if pkg.activated:
pkg.do_deactivate(force=True)

View File

@@ -37,6 +37,7 @@
import spack
import spack.error
import spack.url as url
import spack.fetch_strategy as fs
from spack.spec import Spec
from spack.stage import Stage
@@ -45,14 +46,14 @@
def mirror_archive_filename(spec):
"""Get the path that this spec will live at within a mirror."""
"""Get the name of the spec's archive in the mirror."""
if not spec.version.concrete:
raise ValueError("mirror.path requires spec with concrete version.")
fetcher = spec.package.fetcher
if isinstance(fetcher, fs.URLFetchStrategy):
# If we fetch this version with a URLFetchStrategy, use URL's archive type
ext = extension(fetcher.url)
ext = url.downloaded_file_extension(fetcher.url)
else:
# Otherwise we'll make a .tar.gz ourselves
ext = 'tar.gz'
@@ -60,6 +61,11 @@ def mirror_archive_filename(spec):
return "%s-%s.%s" % (spec.package.name, spec.version, ext)
def mirror_archive_path(spec):
"""Get the relative path to the spec's archive within a mirror."""
return join_path(spec.name, mirror_archive_filename(spec))
def get_matching_versions(specs, **kwargs):
"""Get a spec for EACH known version matching any spec in the list."""
matching = []
@@ -140,12 +146,10 @@ def create(path, specs, **kwargs):
stage = None
try:
# create a subdirectory for the current package@version
subdir = join_path(mirror_root, pkg.name)
archive_path = os.path.abspath(join_path(path, mirror_archive_path(spec)))
subdir = os.path.dirname(archive_path)
mkdirp(subdir)
archive_file = mirror_archive_filename(spec)
archive_path = join_path(subdir, archive_file)
if os.path.exists(archive_path):
tty.msg("Already added %s" % spec.format("$_$@"))
present.append(spec)

View File

@@ -49,6 +49,7 @@
import re
import textwrap
import shutil
from glob import glob
from contextlib import closing
import llnl.util.tty as tty
@@ -123,6 +124,13 @@ def add_path(path_name, directory):
if os.path.isdir(directory):
add_path(var, directory)
# Add python path unless it's an actual python installation
# TODO: is there a better way to do this?
if self.spec.name != 'python':
site_packages = glob(join_path(self.spec.prefix.lib, "python*/site-packages"))
if site_packages:
add_path('PYTHONPATH', site_packages[0])
# short description is just the package + version
# TODO: maybe packages can optionally provide it.
self.short_description = self.spec.format("$_ $@")

View File

@@ -35,13 +35,17 @@
"""
import os
import re
import time
import inspect
import subprocess
import platform as py_platform
import multiprocessing
from urlparse import urlparse
from urlparse import urlparse, urljoin
import textwrap
from StringIO import StringIO
import llnl.util.tty as tty
from llnl.util.link_tree import LinkTree
from llnl.util.filesystem import *
from llnl.util.lang import *
@@ -49,6 +53,7 @@
import spack.spec
import spack.error
import spack.compilers
import spack.mirror
import spack.hooks
import spack.build_environment as build_env
import spack.url as url
@@ -57,6 +62,7 @@
from spack.stage import Stage
from spack.util.web import get_pages
from spack.util.compression import allowed_archive, extension
from spack.util.executable import ProcessError
"""Allowed URL schemes for spack packages."""
_ALLOWED_URL_SCHEMES = ["http", "https", "ftp", "file", "git"]
@@ -282,10 +288,9 @@ class SomePackage(Package):
.. code-block:: python
p.do_clean() # runs make clean
p.do_clean_work() # removes the build directory and
p.do_clean() # removes the stage directory entirely
p.do_restage() # removes the build directory and
# re-expands the archive.
p.do_clean_dist() # removes the stage directory entirely
The convention used here is that a do_* function is intended to be called
internally by Spack commands (in spack.cmd). These aren't for package
@@ -316,12 +321,24 @@ class SomePackage(Package):
"""Patches to apply to newly expanded source, if any."""
patches = {}
"""Specs of package this one extends, or None.
Currently, ppackages can extend at most one other package.
"""
extendees = {}
#
# These are default values for instance variables.
#
"""By default we build in parallel. Subclasses can override this."""
parallel = True
"""# jobs to use for parallel make. If set, overrides default of ncpus."""
make_jobs = None
"""Most packages are NOT extendable. Set to True if you want extensions."""
extendable = False
def __init__(self, spec):
# this determines how the package should be built.
@@ -333,9 +350,6 @@ def __init__(self, spec):
if '.' in self.name:
self.name = self.name[self.name.rindex('.') + 1:]
# This is set by scraping a web page.
self._available_versions = None
# Sanity check some required variables that could be
# overridden by package authors.
def ensure_has_dict(attr_name):
@@ -368,12 +382,20 @@ def ensure_has_dict(attr_name):
# stage used to build this package.
self._stage = None
# If there's no default URL provided, set this package's url to None
if not hasattr(self, 'url'):
self.url = None
# Init fetch strategy to None
# Init fetch strategy and url to None
self._fetcher = None
self.url = getattr(self.__class__, 'url', None)
# Fix up self.url if this package fetches with a URLFetchStrategy.
# This makes self.url behave sanely.
if self.spec.versions.concrete:
# TODO: this is a really roundabout way of determining the type
# TODO: of fetch to do. figure out a more sane fetch strategy/package
# TODO: init order (right now it's conflated with stage, package, and
# TODO: the tests make assumptions)
f = fs.for_package_version(self, self.version)
if isinstance(f, fs.URLFetchStrategy):
self.url = self.url_for_version(self.spec.version)
# Set a default list URL (place to find available versions)
if not hasattr(self, 'list_url'):
@@ -382,6 +404,13 @@ def ensure_has_dict(attr_name):
if not hasattr(self, 'list_depth'):
self.list_depth = 1
# Set up some internal variables for timing.
self._fetch_time = 0.0
self._total_time = 0.0
if self.is_extension:
spack.db.get(self.extendee_spec)._check_extendable()
@property
def version(self):
@@ -410,7 +439,7 @@ def nearest_url(self, version):
*higher* URL, and if that isn't there raises an error.
"""
version_urls = self.version_urls()
url = self.url
url = getattr(self.__class__, 'url', None)
for v in version_urls:
if v > version and url:
@@ -420,21 +449,15 @@ def nearest_url(self, version):
return url
def has_url(self):
"""Returns whether there is a URL available for this package.
If there isn't, it's probably fetched some other way (version
control, etc.)"""
return self.url or self.version_urls()
# TODO: move this out of here and into some URL extrapolation module?
def url_for_version(self, version):
"""Returns a URL that you can download a new version of this package from."""
if not isinstance(version, Version):
version = Version(version)
if not self.has_url():
raise NoURLError(self.__class__)
cls = self.__class__
if not (hasattr(cls, 'url') or self.version_urls()):
raise NoURLError(cls)
# If we have a specific URL for this version, don't extrapolate.
version_urls = self.version_urls()
@@ -452,12 +475,18 @@ def stage(self):
raise ValueError("Can only get a stage for a concrete package.")
if self._stage is None:
self._stage = Stage(self.fetcher,
mirror_path=self.mirror_path(),
name=self.spec.short_spec)
mp = spack.mirror.mirror_archive_path(self.spec)
self._stage = Stage(
self.fetcher, mirror_path=mp, name=self.spec.short_spec)
return self._stage
@stage.setter
def stage(self, stage):
"""Allow a stage object to be set to override the default."""
self._stage = stage
@property
def fetcher(self):
if not self.spec.versions.concrete:
@@ -474,11 +503,45 @@ def fetcher(self, f):
self._fetcher = f
def mirror_path(self):
"""Get path to this package's archive in a mirror."""
filename = "%s-%s." % (self.name, self.version)
filename += extension(self.url) if self.has_url() else "tar.gz"
return "%s/%s" % (self.name, filename)
@property
def extendee_spec(self):
"""Spec of the extendee of this package, or None if it is not an extension."""
if not self.extendees:
return None
name = next(iter(self.extendees))
if not name in self.spec:
spec, kwargs = self.extendees[name]
return spec
# Need to do this to get the concrete version of the spec
return self.spec[name]
@property
def extendee_args(self):
"""Spec of the extendee of this package, or None if it is not an extension."""
if not self.extendees:
return None
name = next(iter(self.extendees))
return self.extendees[name][1]
@property
def is_extension(self):
return len(self.extendees) > 0
def extends(self, spec):
return (spec.name in self.extendees and
spec.satisfies(self.extendees[spec.name][0]))
@property
def activated(self):
if not self.is_extension:
raise ValueError("is_extension called on package that is not an extension.")
exts = spack.install_layout.extension_map(self.extendee_spec)
return (self.name in exts) and (exts[self.name] == self.spec)
def preorder_traversal(self, visited=None, **kwargs):
@@ -611,14 +674,26 @@ def do_fetch(self):
if not self.spec.concrete:
raise ValueError("Can only fetch concrete packages.")
start_time = time.time()
if spack.do_checksum and not self.version in self.versions:
raise FetchError(
"Cannot fetch %s safely; there is no checksum on file for version %s."
% (self.name, self.version),
"Add a checksum to the package file, or use --no-checksum to "
"skip this check.")
tty.warn("There is no checksum on file to fetch %s safely."
% self.spec.format('$_$@'))
# Ask the user whether to skip the checksum if we're
# interactive, but just fail if non-interactive.
checksum_msg = "Add a checksum or use --no-checksum to skip this check."
ignore_checksum = False
if sys.stdout.isatty():
ignore_checksum = tty.get_yes_or_no(" Fetch anyway?", default=False)
if ignore_checksum:
tty.msg("Fetching with no checksum.", checksum_msg)
if not ignore_checksum:
raise FetchError(
"Will not fetch %s." % self.spec.format('$_$@'), checksum_msg)
self.stage.fetch()
self._fetch_time = time.time() - start_time
if spack.do_checksum and self.version in self.versions:
self.stage.check()
@@ -650,8 +725,11 @@ def do_patch(self):
# Kick off the stage first.
self.do_stage()
# Package can add its own patch function.
has_patch_fun = hasattr(self, 'patch') and callable(self.patch)
# If there are no patches, note it.
if not self.patches:
if not self.patches and not has_patch_fun:
tty.msg("No patches needed for %s." % self.name)
return
@@ -674,7 +752,7 @@ def do_patch(self):
tty.msg("Already patched %s" % self.name)
return
# Apply all the patches for specs that match this on
# Apply all the patches for specs that match this one
for spec, patch_list in self.patches.items():
if self.spec.satisfies(spec):
for patch in patch_list:
@@ -692,15 +770,33 @@ def do_patch(self):
os.remove(bad_file)
touch(good_file)
if has_patch_fun:
self.patch()
tty.msg("Patched %s" % self.name)
def do_fake_install(self):
"""Make a fake install directory contaiing a 'fake' file in bin."""
mkdirp(self.prefix.bin)
touch(join_path(self.prefix.bin, 'fake'))
mkdirp(self.prefix.lib)
mkdirp(self.prefix.man1)
def do_install(self, **kwargs):
"""This class should call this version of the install method.
Package implementations should override install().
"""
# whether to keep the prefix on failure. Default is to destroy it.
keep_prefix = kwargs.get('keep_prefix', False)
keep_stage = kwargs.get('keep_stage', False)
ignore_deps = kwargs.get('ignore_deps', False)
keep_prefix = kwargs.get('keep_prefix', False)
keep_stage = kwargs.get('keep_stage', False)
ignore_deps = kwargs.get('ignore_deps', False)
fake_install = kwargs.get('fake', False)
skip_patch = kwargs.get('skip_patch', False)
# Override builtin number of make jobs.
self.make_jobs = kwargs.get('make_jobs', None)
if not self.spec.concrete:
raise ValueError("Can only install concrete packages.")
@@ -709,85 +805,99 @@ def do_install(self, **kwargs):
tty.msg("%s is already installed in %s." % (self.name, self.prefix))
return
tty.msg("Installing %s" % self.name)
if not ignore_deps:
self.do_install_dependencies()
self.do_install_dependencies(**kwargs)
self.do_patch()
start_time = time.time()
if not fake_install:
if not skip_patch:
self.do_patch()
else:
self.do_stage()
# Fork a child process to do the build. This allows each
# package authors to have full control over their environment,
# etc. without offecting other builds that might be executed
# in the same spack call.
try:
pid = os.fork()
except OSError, e:
raise InstallError("Unable to fork build process: %s" % e)
# create the install directory. The install layout
# handles this in case so that it can use whatever
# package naming scheme it likes.
spack.install_layout.make_path_for_spec(self.spec)
if pid == 0:
def cleanup():
if not keep_prefix:
# If anything goes wrong, remove the install prefix
self.remove_prefix()
else:
tty.warn("Keeping install prefix in place despite error.",
"Spack will think this package is installed." +
"Manually remove this directory to fix:",
self.prefix)
def real_work():
try:
tty.msg("Building %s." % self.name)
# create the install directory. The install layout
# handles this in case so that it can use whatever
# package naming scheme it likes.
spack.install_layout.make_path_for_spec(self.spec)
# Run the pre-install hook in the child process after
# the directory is created.
spack.hooks.pre_install(self)
# Set up process's build environment before running install.
build_env.set_compiler_environment_variables(self)
build_env.set_build_environment_variables(self)
build_env.set_module_variables_for_package(self)
# Subclasses implement install() to do the real work.
self.install(self.spec, self.prefix)
self.stage.chdir_to_source()
if fake_install:
self.do_fake_install()
else:
# Subclasses implement install() to do the real work.
self.install(self.spec, self.prefix)
# Ensure that something was actually installed.
if not os.listdir(self.prefix):
raise InstallError(
"Install failed for %s. Nothing was installed!"
% self.name)
self._sanity_check_install()
# On successful install, remove the stage.
if not keep_stage:
self.stage.destroy()
tty.msg("Successfully installed %s" % self.name)
# Stop timer.
self._total_time = time.time() - start_time
build_time = self._total_time - self._fetch_time
tty.msg("Successfully installed %s." % self.name,
"Fetch: %s. Build: %s. Total: %s."
% (_hms(self._fetch_time), _hms(build_time), _hms(self._total_time)))
print_pkg(self.prefix)
# Use os._exit here to avoid raising a SystemExit exception,
# which interferes with unit tests.
os._exit(0)
except ProcessError, e:
# One of the processes returned an error code.
# Suppress detailed stack trace here unless in debug mode
if spack.debug:
raise e
else:
tty.error(e)
# Still need to clean up b/c there was an error.
cleanup()
except:
if not keep_prefix:
# If anything goes wrong, remove the install prefix
self.remove_prefix()
else:
tty.warn("Keeping install prefix in place despite error.",
"Spack will think this package is installed." +
"Manually remove this directory to fix:",
self.prefix)
# Child doesn't raise or return to main spack code.
# Just runs default exception handler and exits.
sys.excepthook(*sys.exc_info())
os._exit(1)
# Parent process just waits for the child to complete. If the
# child exited badly, assume it already printed an appropriate
# message. Just make the parent exit with an error code.
pid, returncode = os.waitpid(pid, 0)
if returncode != 0:
sys.exit(1)
# other exceptions just clean up and raise.
cleanup()
raise
build_env.fork(self, real_work)
# Once everything else is done, run post install hooks
spack.hooks.post_install(self)
def do_install_dependencies(self):
def _sanity_check_install(self):
installed = set(os.listdir(self.prefix))
installed.difference_update(spack.install_layout.hidden_file_paths)
if not installed:
raise InstallError(
"Install failed for %s. Nothing was installed!" % self.name)
def do_install_dependencies(self, **kwargs):
# Pass along paths of dependencies here
for dep in self.spec.dependencies.values():
dep.package.do_install()
dep.package.do_install(**kwargs)
@property
@@ -799,6 +909,32 @@ def module(self):
fromlist=[self.__class__.__name__])
def setup_dependent_environment(self, module, spec, dependent_spec):
"""Called before the install() method of dependents.
Default implementation does nothing, but this can be
overridden by an extendable package to set up the install
environment for its extensions. This is useful if there are
some common steps to installing all extensions for a
certain package.
Some examples:
1. Installing python modules generally requires PYTHONPATH to
point to the lib/pythonX.Y/site-packages directory in the
module's install prefix. This could set that variable.
2. Extensions often need to invoke the 'python' interpreter
from the Python installation being extended. This routine can
put a 'python' Execuable object in the module scope for the
extension package to simplify extension installs.
3. A lot of Qt extensions need QTDIR set. This can be used to do that.
"""
pass
def install(self, spec, prefix):
"""Package implementations override this with their own build configuration."""
raise InstallError("Package %s provides no install method!" % self.name)
@@ -818,6 +954,10 @@ def do_uninstall(self, **kwargs):
"The following installed packages depend on it: %s" %
' '.join(formatted_deps))
# Pre-uninstall hook runs first.
spack.hooks.pre_uninstall(self)
# Uninstalling in Spack only requires removing the prefix.
self.remove_prefix()
tty.msg("Successfully uninstalled %s." % self.spec.short_spec)
@@ -825,95 +965,227 @@ def do_uninstall(self, **kwargs):
spack.hooks.post_uninstall(self)
def do_clean(self):
if self.stage.expanded_archive_path:
self.stage.chdir_to_source()
self.clean()
def _check_extendable(self):
if not self.extendable:
raise ValueError("Package %s is not extendable!" % self.name)
def clean(self):
"""By default just runs make clean. Override if this isn't good."""
# TODO: should we really call make clean, ro just blow away the directory?
make = build_env.MakeExecutable('make', self.parallel)
make('clean')
def _sanity_check_extension(self):
if not self.is_extension:
raise ActivationError("This package is not an extension.")
extendee_package = self.extendee_spec.package
extendee_package._check_extendable()
if not extendee_package.installed:
raise ActivationError("Can only (de)activate extensions for installed packages.")
if not self.installed:
raise ActivationError("Extensions must first be installed.")
if not self.extendee_spec.name in self.extendees:
raise ActivationError("%s does not extend %s!" % (self.name, self.extendee.name))
def do_clean_work(self):
"""By default just blows away the stage directory and re-stages."""
def do_activate(self, **kwargs):
"""Called on an etension to invoke the extendee's activate method.
Commands should call this routine, and should not call
activate() directly.
"""
self._sanity_check_extension()
force = kwargs.get('force', False)
# TODO: get rid of this normalize - DAG handling.
self.spec.normalize()
spack.install_layout.check_extension_conflict(self.extendee_spec, self.spec)
if not force:
for spec in self.spec.traverse(root=False):
if spec.package.extends(self.extendee_spec):
# TODO: fix this normalize() requirement -- revisit DAG handling.
spec.package.spec.normalize()
if not spec.package.activated:
spec.package.do_activate(**kwargs)
self.extendee_spec.package.activate(self, **self.extendee_args)
spack.install_layout.add_extension(self.extendee_spec, self.spec)
tty.msg("Activated extension %s for %s."
% (self.spec.short_spec, self.extendee_spec.format("$_$@$+$%@")))
def activate(self, extension, **kwargs):
"""Symlinks all files from the extension into extendee's install dir.
Package authors can override this method to support other
extension mechanisms. Spack internals (commands, hooks, etc.)
should call do_activate() method so that proper checks are
always executed.
"""
def ignore(filename):
return (filename in spack.install_layout.hidden_file_paths or
kwargs.get('ignore', lambda f: False)(filename))
tree = LinkTree(extension.prefix)
conflict = tree.find_conflict(self.prefix, ignore=ignore)
if conflict:
raise ExtensionConflictError(conflict)
tree.merge(self.prefix, ignore=ignore)
def do_deactivate(self, **kwargs):
"""Called on the extension to invoke extendee's deactivate() method."""
self._sanity_check_extension()
force = kwargs.get('force', False)
# Allow a force deactivate to happen. This can unlink
# spurious files if something was corrupted.
if not force:
spack.install_layout.check_activated(self.extendee_spec, self.spec)
activated = spack.install_layout.extension_map(self.extendee_spec)
for name, aspec in activated.items():
if aspec != self.spec and self.spec in aspec:
raise ActivationError(
"Cannot deactivate %s beacuse %s is activated and depends on it."
% (self.spec.short_spec, aspec.short_spec))
self.extendee_spec.package.deactivate(self, **self.extendee_args)
# redundant activation check -- makes SURE the spec is not
# still activated even if something was wrong above.
if self.activated:
spack.install_layout.remove_extension(self.extendee_spec, self.spec)
tty.msg("Deactivated extension %s for %s."
% (self.spec.short_spec, self.extendee_spec.format("$_$@$+$%@")))
def deactivate(self, extension, **kwargs):
"""Unlinks all files from extension out of this package's install dir.
Package authors can override this method to support other
extension mechanisms. Spack internals (commands, hooks, etc.)
should call do_deactivate() method so that proper checks are
always executed.
"""
def ignore(filename):
return (filename in spack.install_layout.hidden_file_paths or
kwargs.get('ignore', lambda f: False)(filename))
tree = LinkTree(extension.prefix)
tree.unmerge(self.prefix, ignore=ignore)
def do_restage(self):
"""Reverts expanded/checked out source to a pristine state."""
self.stage.restage()
def do_clean_dist(self):
"""Removes the stage directory where this package was built."""
def do_clean(self):
"""Removes the package's build stage and source tarball."""
if os.path.exists(self.stage.path):
self.stage.destroy()
def fetch_available_versions(self):
if not hasattr(self, 'url'):
raise VersionFetchError(self.__class__)
def format_doc(self, **kwargs):
"""Wrap doc string at 72 characters and format nicely"""
indent = kwargs.get('indent', 0)
# If not, then try to fetch using list_url
if not self._available_versions:
try:
self._available_versions = find_versions_of_archive(
self.url,
list_url=self.list_url,
list_depth=self.list_depth)
if not self.__doc__:
return ""
if not self._available_versions:
tty.warn("Found no versions for %s" % self.name,
"Check the list_url and list_depth attribute on the "
+ self.name + " package.",
"Use them to tell Spack where to look for versions.")
except spack.error.NoNetworkConnectionError, e:
tty.die("Package.fetch_available_versions couldn't connect to:",
e.url, e.message)
return self._available_versions
doc = re.sub(r'\s+', ' ', self.__doc__)
lines = textwrap.wrap(doc, 72)
results = StringIO()
for line in lines:
results.write((" " * indent) + line + "\n")
return results.getvalue()
@property
def available_versions(self):
# If the package overrode available_versions, then use that.
if self.versions is not None:
return VersionList(self.versions.keys())
else:
vlist = self.fetch_available_versions()
if not vlist:
vlist = ver([self.version])
return vlist
def all_urls(self):
urls = []
if self.url:
urls.append(self.url)
for args in self.versions.values():
if 'url' in args:
urls.append(args['url'])
return urls
def find_versions_of_archive(archive_url, **kwargs):
def fetch_remote_versions(self):
"""Try to find remote versions of this package using the
list_url and any other URLs described in the package file."""
if not self.all_urls:
raise VersionFetchError(self.__class__)
try:
return find_versions_of_archive(
*self.all_urls, list_url=self.list_url, list_depth=self.list_depth)
except spack.error.NoNetworkConnectionError, e:
tty.die("Package.fetch_versions couldn't connect to:",
e.url, e.message)
@property
def rpath(self):
"""Get the rpath this package links with, as a list of paths."""
rpaths = [self.prefix.lib, self.prefix.lib64]
rpaths.extend(d.prefix.lib for d in self.spec.traverse(root=False)
if os.path.isdir(d.prefix.lib))
rpaths.extend(d.prefix.lib64 for d in self.spec.traverse(root=False)
if os.path.isdir(d.prefix.lib64))
return rpaths
@property
def rpath_args(self):
"""Get the rpath args as a string, with -Wl,-rpath= for each element."""
return " ".join("-Wl,-rpath=%s" % p for p in self.rpath)
def find_versions_of_archive(*archive_urls, **kwargs):
list_url = kwargs.get('list_url', None)
list_depth = kwargs.get('list_depth', 1)
if not list_url:
list_url = url.find_list_url(archive_url)
# This creates a regex from the URL with a capture group for the
# version part of the URL. The capture group is converted to a
# generic wildcard, so we can use this to extract things on a page
# that look like archive URLs.
url_regex = url.wildcard_version(archive_url)
# We'll be a bit more liberal and just look for the archive part,
# not the full path.
archive_regex = os.path.basename(url_regex)
# Generate a list of list_urls based on archive urls and any
# explicitly listed list_url in the package
list_urls = set()
if list_url:
list_urls.add(list_url)
for aurl in archive_urls:
list_urls.add(url.find_list_url(aurl))
# Grab some web pages to scrape.
page_map = get_pages(list_url, depth=list_depth)
page_map = {}
for lurl in list_urls:
page_map.update(get_pages(lurl, depth=list_depth))
# Scrape them for archive URLs
regexes = []
for aurl in archive_urls:
# This creates a regex from the URL with a capture group for
# the version part of the URL. The capture group is converted
# to a generic wildcard, so we can use this to extract things
# on a page that look like archive URLs.
url_regex = url.wildcard_version(aurl)
# We'll be a bit more liberal and just look for the archive
# part, not the full path.
regexes.append(os.path.basename(url_regex))
# Build a version list from all the matches we find
versions = VersionList()
for site, page in page_map.iteritems():
versions = {}
for page_url, content in page_map.iteritems():
# extract versions from matches.
matches = re.finditer(archive_regex, page)
version_strings = set(m.group(1) for m in matches)
for v in version_strings:
versions.add(Version(v))
for regex in regexes:
versions.update(
(Version(m.group(1)), urljoin(page_url, m.group(0)))
for m in re.finditer(regex, content))
return versions
@@ -930,15 +1202,23 @@ def validate_package_url(url_string):
def print_pkg(message):
"""Outputs a message with a package icon."""
mac_ver = py_platform.mac_ver()[0]
if mac_ver and Version(mac_ver) >= Version('10.7'):
print u"\U0001F4E6" + tty.indent,
else:
from llnl.util.tty.color import cwrite
cwrite('@*g{[+]} ')
from llnl.util.tty.color import cwrite
cwrite('@*g{[+]} ')
print message
def _hms(seconds):
"""Convert time in seconds to hours, minutes, seconds."""
m, s = divmod(seconds, 60)
h, m = divmod(m, 60)
parts = []
if h: parts.append("%dh" % h)
if m: parts.append("%dm" % m)
if s: parts.append("%.2fs" % s)
return ' '.join(parts)
class FetchError(spack.error.SpackError):
"""Raised when something goes wrong during fetch."""
def __init__(self, message, long_msg=None):
@@ -976,8 +1256,8 @@ class VersionFetchError(PackageError):
"""Raised when a version URL cannot automatically be determined."""
def __init__(self, cls):
super(VersionFetchError, self).__init__(
"Cannot fetch version for package %s " % cls.__name__ +
"because it does not define a default url.")
"Cannot fetch versions for package %s " % cls.__name__ +
"because it does not define any URLs to fetch.")
class NoURLError(PackageError):
@@ -985,3 +1265,17 @@ class NoURLError(PackageError):
def __init__(self, cls):
super(NoURLError, self).__init__(
"Package %s has no version with a URL." % cls.__name__)
class ExtensionError(PackageError): pass
class ExtensionConflictError(ExtensionError):
def __init__(self, path):
super(ExtensionConflictError, self).__init__(
"Extension blocked by file: %s" % path)
class ActivationError(ExtensionError):
def __init__(self, msg, long_msg=None):
super(ActivationError, self).__init__(msg, long_msg)

View File

@@ -30,7 +30,7 @@
import llnl.util.tty as tty
from llnl.util.filesystem import join_path
from llnl.util.lang import memoized
from llnl.util.lang import *
import spack.error
import spack.spec
@@ -74,8 +74,11 @@ def get(self, spec, **kwargs):
if not spec in self.instances:
package_class = self.get_class_for_package_name(spec.name)
try:
self.instances[spec.copy()] = package_class(spec)
copy = spec.copy()
self.instances[copy] = package_class(copy)
except Exception, e:
if spack.debug:
sys.excepthook(*sys.exc_info())
raise FailedConstructorError(spec.name, e)
return self.instances[spec]
@@ -109,6 +112,24 @@ def providers_for(self, vpkg_spec):
return providers
@_autospec
def extensions_for(self, extendee_spec):
return [p for p in self.all_packages() if p.extends(extendee_spec)]
@_autospec
def installed_extensions_for(self, extendee_spec):
for s in self.installed_package_specs():
try:
if s.package.extends(extendee_spec):
yield s.package
except UnknownPackageError, e:
# Skip packages we know nothing about
continue
# TODO: add some conditional way to do this instead of
# catching exceptions.
def dirname_for_package_name(self, pkg_name):
"""Get the directory name for a particular package. This is the
directory that contains its package.py file."""
@@ -171,6 +192,7 @@ def all_packages(self):
yield self.get(name)
@memoized
def exists(self, pkg_name):
"""Whether a package with the supplied name exists ."""
return os.path.exists(self.filename_for_package_name(pkg_name))
@@ -213,38 +235,6 @@ def get_class_for_package_name(self, pkg_name):
return cls
def graph_dependencies(self, out=sys.stdout):
"""Print out a graph of all the dependencies between package.
Graph is in dot format."""
out.write('digraph G {\n')
out.write(' label = "Spack Dependencies"\n')
out.write(' labelloc = "b"\n')
out.write(' rankdir = "LR"\n')
out.write(' ranksep = "5"\n')
out.write('\n')
def quote(string):
return '"%s"' % string
deps = []
for pkg in self.all_packages():
out.write(' %-30s [label="%s"]\n' % (quote(pkg.name), pkg.name))
# Add edges for each depends_on in the package.
for dep_name, dep in pkg.dependencies.iteritems():
deps.append((pkg.name, dep_name))
# If the package provides something, add an edge for that.
for provider in set(p.name for p in pkg.provided):
deps.append((provider, pkg.name))
out.write('\n')
for pair in deps:
out.write(' "%s" -> "%s"\n' % pair)
out.write('}\n')
class UnknownPackageError(spack.error.SpackError):
"""Raised when we encounter a package spack doesn't have."""
def __init__(self, name):

View File

@@ -68,7 +68,7 @@ class Mpileaks(Package):
spack install mpileaks ^mvapich
spack install mpileaks ^mpich
"""
__all__ = [ 'depends_on', 'provides', 'patch', 'version' ]
__all__ = [ 'depends_on', 'extends', 'provides', 'patch', 'version' ]
import re
import inspect
@@ -107,8 +107,9 @@ def depends_on(*specs):
"""Adds a dependencies local variable in the locals of
the calling class, based on args. """
pkg = get_calling_package_name()
clocals = caller_locals()
dependencies = clocals.setdefault('dependencies', {})
dependencies = caller_locals().setdefault('dependencies', {})
for string in specs:
for spec in spack.spec.parse(string):
if pkg == spec.name:
@@ -116,6 +117,34 @@ def depends_on(*specs):
dependencies[spec.name] = spec
def extends(spec, **kwargs):
"""Same as depends_on, but dependency is symlinked into parent prefix.
This is for Python and other language modules where the module
needs to be installed into the prefix of the Python installation.
Spack handles this by installing modules into their own prefix,
but allowing ONE module version to be symlinked into a parent
Python install at a time.
keyword arguments can be passed to extends() so that extension
packages can pass parameters to the extendee's extension
mechanism.
"""
pkg = get_calling_package_name()
clocals = caller_locals()
dependencies = clocals.setdefault('dependencies', {})
extendees = clocals.setdefault('extendees', {})
if extendees:
raise RelationError("Packages can extend at most one other package.")
spec = Spec(spec)
if pkg == spec.name:
raise CircularReferenceError('extends', pkg)
dependencies[spec.name] = spec
extendees[spec.name] = (spec, kwargs)
def provides(*specs, **kwargs):
"""Allows packages to provide a virtual dependency. If a package provides
'mpi', other packages can declare that they depend on "mpi", and spack

View File

@@ -552,6 +552,13 @@ def short_spec(self):
return self.format('$_$@$%@$+$=$#')
@property
def cshort_spec(self):
"""Returns a version of the spec with the dependencies hashed
instead of completely enumerated."""
return self.format('$_$@$%@$+$=$#', color=True)
@property
def prefix(self):
return Prefix(spack.install_layout.path_for_spec(self))
@@ -712,6 +719,15 @@ def flat_dependencies(self, **kwargs):
raise InconsistentSpecError("Invalid Spec DAG: %s" % e.message)
def index(self):
"""Return DependencyMap that points to all the dependencies in this
spec."""
dm = DependencyMap()
for spec in self.traverse():
dm[spec.name] = spec
return dm
def flatten(self):
"""Pull all dependencies up to the root (this spec).
Merge constraints for dependencies with the same name, and if they
@@ -858,7 +874,7 @@ def normalize(self, **kwargs):
def normalized(self):
"""Return a normalized copy of this spec without modifying this spec."""
clone = self.copy()
clone.normalized()
clone.normalize()
return clone
@@ -1096,8 +1112,9 @@ def __getitem__(self, name):
def __contains__(self, spec):
"""True if this spec has any dependency that satisfies the supplied
spec."""
"""True if this spec satisfis the provided spec, or if any dependency
does. If the spec has no name, then we parse this one first.
"""
spec = self._autospec(spec)
for s in self.traverse():
if s.satisfies(spec):
@@ -1288,12 +1305,13 @@ def __str__(self):
def tree(self, **kwargs):
"""Prints out this spec and its dependencies, tree-formatted
with indentation."""
color = kwargs.get('color', False)
depth = kwargs.get('depth', False)
showid = kwargs.get('ids', False)
cover = kwargs.get('cover', 'nodes')
indent = kwargs.get('indent', 0)
format = kwargs.get('format', '$_$@$%@$+$=')
color = kwargs.pop('color', False)
depth = kwargs.pop('depth', False)
showid = kwargs.pop('ids', False)
cover = kwargs.pop('cover', 'nodes')
indent = kwargs.pop('indent', 0)
fmt = kwargs.pop('format', '$_$@$%@$+$=')
check_kwargs(kwargs, self.tree)
out = ""
cur_id = 0
@@ -1310,7 +1328,7 @@ def tree(self, **kwargs):
out += (" " * d)
if d > 0:
out += "^"
out += node.format(format, color=color) + "\n"
out += node.format(fmt, color=color) + "\n"
return out

View File

@@ -257,8 +257,11 @@ def fetch(self):
fetcher.fetch()
break
except spack.error.SpackError, e:
tty.msg("Fetching %s failed." % fetcher)
tty.msg("Fetching from %s failed." % fetcher)
tty.debug(e)
continue
else:
tty.die("All fetchers failed for %s" % self.name)
def check(self):
@@ -306,6 +309,39 @@ def destroy(self):
os.chdir(os.path.dirname(self.path))
class DIYStage(object):
"""Simple class that allows any directory to be a spack stage."""
def __init__(self, path):
self.archive_file = None
self.path = path
self.source_path = path
def chdir(self):
if os.path.isdir(self.path):
os.chdir(self.path)
else:
tty.die("Setup failed: no such directory: " + self.path)
def chdir_to_source(self):
self.chdir()
def fetch(self):
tty.msg("No need to fetch for DIY.")
def check(self):
tty.msg("No checksum needed for DIY.")
def expand_archive(self):
tty.msg("Using source directory: %s" % self.source_path)
def restage(self):
tty.die("Cannot restage DIY stage.")
def destroy(self):
# No need to destroy DIY stage.
pass
def _get_mirrors():
"""Get mirrors from spack configuration."""
config = spack.config.get_config()

Some files were not shown because too many files have changed in this diff Show More