Compare commits

..

20 Commits

Author SHA1 Message Date
Matthew Krafczyk
51cbe48cb5 Add first working version of enzo 2017-02-09 16:21:02 -06:00
Matthew Krafczyk
daf50693bf Update openssl package 2017-02-09 16:20:49 -06:00
Matthew Krafczyk
c2375d0ab6 Add completion file for bash 2017-02-08 17:04:39 -06:00
Matthew Krafczyk
2f6027486d Add version of environment modules which does not depend on X windows 2017-02-08 16:28:50 -06:00
Matthew Krafczyk
3c4959f666 No longer automatically install bootstrap packages.
Source spack-env.sh to execute spack bootstrap. Then source
spack-env.sh again to get module functionality.
2017-02-08 16:25:42 -06:00
Matthew Krafczyk
c0ee44bf27 Moved bootstrap to clone, new implementation of bootstrap
spack clone now contains the functionality of the old spack bootstrap.
spack bootstrap now installs needed packages for full functionality. A
list 'needed_specs' contains a list of specs for these packages and can
be expanded when more packages are recognized to be needed.
2017-02-08 16:25:39 -06:00
Matthew Krafczyk
4eb7b241c6 Add code to automatically build and enable module
If the module command doesn't exist in the shell, automatically build a
version which doesn't depend on X windows and create the module
function for the appropriate shell.
2017-02-08 16:24:54 -06:00
Matthew Krafczyk
81424dce5c Merge branch 'workaround/yt-py-pillow' into yt-dev-working-branch 2017-01-17 15:32:30 -06:00
Matthew Krafczyk
65be1a93f1 Merge branch 'update/yt-rockstar' into yt-dev-working-branch 2017-01-17 15:31:49 -06:00
Matthew Krafczyk
e2d4dadf33 Merge branch 'update/dev-yt' into yt-dev-working-branch 2017-01-17 15:31:14 -06:00
Matthew Krafczyk
67bfb782ef Merge branch 'new-package/rockstar' into yt-dev-working-branch 2017-01-17 15:30:59 -06:00
Matthew Krafczyk
40cbe69897 Fix installation of rockstar. 2017-01-17 15:28:57 -06:00
Matthew Krafczyk
fa39273e1f Correct how rockstar prefix is found and written to file 2017-01-16 12:04:52 -05:00
Matthew Krafczyk
ef32f3880f Add rockstar support to py-yt package 2017-01-16 12:04:51 -05:00
Matthew Krafczyk
31f7a01a9d Correct rockstar package installation process
Needed to change the way the necessary prefixes are found as well as
how the completed package is copied.

There should probably be a way to do the copying in a more 'spack' way.
2017-01-16 11:29:37 -05:00
Matthew Krafczyk
d74309a7b7 Correct how rockstar prefix is found and written to file 2017-01-16 11:29:37 -05:00
Matthew Krafczyk
190bd6ca65 Create rockstar package
rockstar is a halo finding algorithm
2017-01-16 11:29:37 -05:00
Matthew Krafczyk
209eb83d0d Add a +devmode variant to yt
When specifying +devmode when installing yt, a link to the source
directory will be used instead of creating an egg. This eases the
development process as changes you make in the yt source directory will
be instantly available when building the package as a diy.
2017-01-16 11:26:26 -05:00
Matthew Krafczyk
f10c3ca55e Add py-pillow explicit dependency 2017-01-16 11:25:45 -05:00
Matthew Krafczyk
dcab47cdc0 Add -d and -j options to diy
Also add -j to the common arguments
2017-01-16 11:16:36 -05:00
3313 changed files with 32849 additions and 172696 deletions

View File

@@ -1,36 +0,0 @@
coverage:
precision: 2
round: nearest
range: 60...90
status:
project:
default: true
llnl:
threshold: 0.5
paths:
- lib/spack/llnl
commands:
threshold: 0.5
paths:
- lib/spack/spack/cmd
build_systems:
threshold: 0.5
paths:
- lib/spack/spack/build_systems
modules:
threshold: 0.5
paths:
- lib/spack/spack/modules
core:
threshold: 0.5
paths:
- "!lib/spack/llnl"
- "!lib/spack/spack/cmd"
ignore:
- lib/spack/spack/test/.*
- lib/spack/env/.*
- lib/spack/docs/.*
- lib/spack/external/.*
comment: off

17
.flake8
View File

@@ -1,8 +1,8 @@
# -*- conf -*- # -*- conf -*-
# flake8 settings for Spack core files. # flake8 settings for Spack.
# #
# These exceptions ar for Spack core files. We're slightly more lenient # Below we describe which flake8 checks Spack ignores and what the
# with packages. See .flake8_packages for that. # rationale is.
# #
# Let people line things up nicely: # Let people line things up nicely:
# - E129: visually indented line with same indent as next logical line # - E129: visually indented line with same indent as next logical line
@@ -11,11 +11,16 @@
# - E272: multiple spaces before keyword # - E272: multiple spaces before keyword
# #
# Let people use terse Python features: # Let people use terse Python features:
# - E731: lambda expressions # - E731 : lambda expressions
# #
# These are required to get the package.py files to test clean: # Spack allows wildcard imports:
# - F403: disable wildcard import
#
# These are required to get the package.py files to test clean.
# - F405: `name` may be undefined, or undefined from star imports: `module`
# - F821: undefined name `name` (needed for cmake, configure, etc.)
# - F999: syntax error in doctest # - F999: syntax error in doctest
# #
[flake8] [flake8]
ignore = E129,E221,E241,E272,E731,F999 ignore = E129,E221,E241,E272,E731,F403,F405,F821,F999
max-line-length = 79 max-line-length = 79

View File

@@ -1,22 +0,0 @@
# -*- conf -*-
# flake8 settings for Spack package files.
#
# This should include all the same exceptions that we use for core files.
#
# In Spack packages, we also allow the single `from spack import *`
# wildcard import and dependencies can set globals for their
# dependents. So we add exceptions for checks related to undefined names.
#
# Note that we also add *per-line* exemptions for certain patters in the
# `spack flake8` command. This is where F403 for `from spack import *`
# is added (beause we *only* allow that wildcard).
#
# See .flake8 for regular exceptions.
#
# Redefinition exceptions:
# - F405: `name` may be undefined, or undefined from star imports: `module`
# - F821: undefined name `name` (needed for cmake, configure, etc.)
#
[flake8]
ignore = E129,E221,E241,E272,E731,F999,F405,F821
max-line-length = 79

7
.gitignore vendored
View File

@@ -8,9 +8,8 @@
*~ *~
.DS_Store .DS_Store
.idea .idea
# Ignore everything in /etc/spack except /etc/spack/defaults /etc/spack/licenses
/etc/spack/* /etc/spack/*.yaml
!/etc/spack/defaults
/etc/spackconfig /etc/spackconfig
/share/spack/dotkit /share/spack/dotkit
/share/spack/modules /share/spack/modules
@@ -23,5 +22,3 @@
.#* .#*
/.cache /.cache
/bin/spackc /bin/spackc
*.in.log
*.out.log

View File

@@ -3,7 +3,6 @@ Adam Moody <moody20@llnl.gov> Adam T. Moody
Alfredo Gimenez <gimenez1@llnl.gov> Alfredo Gimenez <alfredo.gimenez@gmail.com> Alfredo Gimenez <gimenez1@llnl.gov> Alfredo Gimenez <alfredo.gimenez@gmail.com>
Alfredo Gimenez <gimenez1@llnl.gov> Alfredo Adolfo Gimenez <alfredo.gimenez@gmail.com> Alfredo Gimenez <gimenez1@llnl.gov> Alfredo Adolfo Gimenez <alfredo.gimenez@gmail.com>
Andrew Williams <williamsa89@cardiff.ac.uk> Andrew Williams <andrew@alshain.org.uk> Andrew Williams <williamsa89@cardiff.ac.uk> Andrew Williams <andrew@alshain.org.uk>
Axel Huebl <a.huebl@hzdr.de> Axel Huebl <axel.huebl@plasma.ninja>
Ben Boeckel <ben.boeckel@kitware.com> Ben Boeckel <mathstuf@gmail.com> Ben Boeckel <ben.boeckel@kitware.com> Ben Boeckel <mathstuf@gmail.com>
Ben Boeckel <ben.boeckel@kitware.com> Ben Boeckel <mathstuf@users.noreply.github.com> Ben Boeckel <ben.boeckel@kitware.com> Ben Boeckel <mathstuf@users.noreply.github.com>
Benedikt Hegner <hegner@cern.ch> Benedikt Hegner <benedikt.hegner@cern.ch> Benedikt Hegner <hegner@cern.ch> Benedikt Hegner <benedikt.hegner@cern.ch>
@@ -49,7 +48,6 @@ Robert D. French <frenchrd@ornl.gov> Robert D. French
Robert D. French <frenchrd@ornl.gov> Robert.French <frenchrd@ornl.gov> Robert D. French <frenchrd@ornl.gov> Robert.French <frenchrd@ornl.gov>
Robert D. French <frenchrd@ornl.gov> robertdfrench <frenchrd@ornl.gov> Robert D. French <frenchrd@ornl.gov> robertdfrench <frenchrd@ornl.gov>
Saravan Pantham <saravan.pantham@gmail.com> Saravan Pantham <pantham1@surface86.llnl.gov> Saravan Pantham <saravan.pantham@gmail.com> Saravan Pantham <pantham1@surface86.llnl.gov>
Sergey Kosukhin <sergey.kosukhin@mpimet.mpg.de> Sergey Kosukhin <skosukhin@gmail.com>
Stephen Herbein <sherbein@udel.edu> Stephen Herbein <stephen272@gmail.com> Stephen Herbein <sherbein@udel.edu> Stephen Herbein <stephen272@gmail.com>
Todd Gamblin <tgamblin@llnl.gov> George Todd Gamblin <gamblin2@llnl.gov> Todd Gamblin <tgamblin@llnl.gov> George Todd Gamblin <gamblin2@llnl.gov>
Todd Gamblin <tgamblin@llnl.gov> Todd Gamblin <gamblin2@llnl.gov> Todd Gamblin <tgamblin@llnl.gov> Todd Gamblin <gamblin2@llnl.gov>

View File

@@ -1,107 +1,40 @@
#============================================================================= #=============================================================================
# Project settings # Project settings
#============================================================================= #=============================================================================
language: python
# Only build master and develop on push; do not build every branch. # Only build master and develop on push; do not build every branch.
branches: branches:
only: only:
- master - master
- develop - develop
- /^releases\/.*$/
#============================================================================= #=============================================================================
# Build matrix # Build matrix
#============================================================================= #=============================================================================
jobs: python:
fast_finish: true - 2.6
- 2.7
env:
- TEST_SUITE=unit
- TEST_SUITE=flake8
- TEST_SUITE=doc
matrix:
# Flake8 and Sphinx no longer support Python 2.6, and one run is enough.
exclude:
- python: 2.6
env: TEST_SUITE=flake8
- python: 2.6
env: TEST_SUITE=doc
# Explicitly include an OS X build with homebrew's python.
# Works around Python issues on Travis for OSX, described here:
# http://blog.fizyk.net.pl/blog/running-python-tests-on-traviss-osx-workers.html
include: include:
- stage: 'flake8 + documentation' - os: osx
python: '2.7' language: generic
os: linux env: TEST_SUITE=unit
language: python
env: TEST_SUITE=flake8
- stage: 'flake8 + documentation'
python: '2.7'
os: linux
language: python
env: TEST_SUITE=doc
- stage: 'unit tests'
python: '2.6'
os: linux
language: python
env: TEST_SUITE=unit
- python: '2.7'
os: linux
language: python
env: [ TEST_SUITE=unit, COVERAGE=true ]
- python: '3.3'
os: linux
language: python
env: TEST_SUITE=unit
- python: '3.4'
os: linux
language: python
env: TEST_SUITE=unit
- python: '3.5'
os: linux
language: python
env: TEST_SUITE=unit
- python: '3.6'
os: linux
language: python
env: [ TEST_SUITE=unit, COVERAGE=true ]
- stage: 'unit tests - osx'
os: osx
language: generic
env: [ TEST_SUITE=unit, PYTHON_VERSION=2.7, COVERAGE=true ]
# mpich (AutotoolsPackage)
- stage: 'build tests'
python: '2.7'
os: linux
language: python
env: [ TEST_SUITE=build, COVERAGE=true, 'SPEC=mpich' ]
# astyle (MakefilePackage)
- python: '2.7'
os: linux
language: python
env: [ TEST_SUITE=build, COVERAGE=true, 'SPEC=astyle' ]
# tut (WafPackage)
- python: '2.7'
os: linux
language: python
env: [ TEST_SUITE=build, COVERAGE=true, 'SPEC=tut' ]
# py-setuptools (PythonPackage)
- python: '2.7'
os: linux
language: python
env: [ TEST_SUITE=build, COVERAGE=true, 'SPEC=py-setuptools' ]
# perl-dbi (PerlPackage)
# - python: '2.7'
# os: linux
# language: python
# env: [ TEST_SUITE=build, COVERAGE=true, 'SPEC=perl-dbi' ]
# openjpeg (CMakePackage + external cmake)
- python: '2.7'
os: linux
language: python
env: [ TEST_SUITE=build, COVERAGE=true, 'SPEC=openjpeg' ]
# r-rcpp (RPackage + external R)
- python: '2.7'
os: linux
language: python
env: [ TEST_SUITE=build, COVERAGE=true, 'SPEC=r-rcpp' ]
# mpich (AutotoolsPackage)
- python: '3.6'
os: linux
language: python
env: [ TEST_SUITE=build, COVERAGE=true, 'SPEC=mpich' ]
stages:
- 'flake8 + documentation'
- 'unit tests'
- 'build tests'
- name: 'unit tests - osx'
if: type IN (cron)
#============================================================================= #=============================================================================
# Environment # Environment
@@ -114,36 +47,23 @@ addons:
apt: apt:
packages: packages:
- gfortran - gfortran
- mercurial
- graphviz - graphviz
- gnupg2 - libyaml-dev
- cmake
- r-base
- r-base-core
- r-base-dev
- perl
- perl-base
cache: pip
# Work around Travis's lack of support for Python on OSX # Work around Travis's lack of support for Python on OSX
before_install: before_install:
- if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then brew update; fi - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then brew update; fi
- if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then brew ls --versions python > /dev/null || brew install python; fi - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then brew ls --versions python > /dev/null || brew install python; fi
- if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then rm /usr/local/include/c++ ; fi
- if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then brew ls --versions gcc > /dev/null || brew install gcc; fi - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then brew ls --versions gcc > /dev/null || brew install gcc; fi
- if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then brew ls --versions gnupg2 > /dev/null || brew install gnupg2; fi
- if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then virtualenv venv; fi - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then virtualenv venv; fi
- if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then source venv/bin/activate; fi - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then source venv/bin/activate; fi
# Install various dependencies # Install various dependencies
install: install:
- pip install --upgrade pip - pip install --upgrade coveralls
- pip install --upgrade six
- pip install --upgrade setuptools
- pip install --upgrade codecov
- pip install --upgrade flake8 - pip install --upgrade flake8
- if [[ "$TEST_SUITE" == "doc" ]]; then pip install --upgrade -r lib/spack/docs/requirements.txt; fi - pip install --upgrade sphinx
- pip install --upgrade mercurial
before_script: before_script:
# Need this for the git tests to succeed. # Need this for the git tests to succeed.
@@ -153,15 +73,13 @@ before_script:
# Need this to be able to compute the list of changed files # Need this to be able to compute the list of changed files
- git fetch origin develop:develop - git fetch origin develop:develop
# Set up external dependencies for build tests, because the take too long to compile
- if [[ "$TEST_SUITE" == "build" ]]; then cp share/spack/qa/configuration/packages.yaml etc/spack/packages.yaml; fi
#============================================================================= #=============================================================================
# Building # Building
#============================================================================= #=============================================================================
script: script: share/spack/qa/run-$TEST_SUITE-tests
- share/spack/qa/run-$TEST_SUITE-tests
- if [[ "$COVERAGE" == "true" ]]; then codecov --env PYTHON_VERSION --required --flags "${TEST_SUITE}${TRAVIS_OS_NAME}"; fi after_success:
- if [[ $TEST_SUITE == unit && $TRAVIS_PYTHON_VERSION == 2.7 && $TRAVIS_OS_NAME == "linux" ]]; then coveralls; fi
#============================================================================= #=============================================================================
# Notifications # Notifications

View File

@@ -1,46 +0,0 @@
# Contributor Covenant Code of Conduct
## Our Pledge
In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, nationality, personal appearance, race, religion, or sexual identity and orientation.
## Our Standards
Examples of behavior that contributes to creating a positive environment include:
* Using welcoming and inclusive language
* Being respectful of differing viewpoints and experiences
* Gracefully accepting constructive criticism
* Focusing on what is best for the community
* Showing empathy towards other community members
Examples of unacceptable behavior by participants include:
* The use of sexualized language or imagery and unwelcome sexual attention or advances
* Trolling, insulting/derogatory comments, and personal or political attacks
* Public or private harassment
* Publishing others' private information, such as a physical or electronic address, without explicit permission
* Other conduct which could reasonably be considered inappropriate in a professional setting
## Our Responsibilities
Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior.
Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful.
## Scope
This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers.
## Enforcement
Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at maintainers@spack.io. The project team will review and investigate all complaints, and will respond in a way that it deems appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately.
Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership.
## Attribution
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at [http://contributor-covenant.org/version/1/4][version]
[homepage]: http://contributor-covenant.org
[version]: http://contributor-covenant.org/version/1/4/

View File

@@ -1,5 +0,0 @@
# Contributing to Spack
Before contributing to Spack you should read the
[Contribution Guide](https://spack.readthedocs.io/en/latest/contribution_guide.html),
which is maintained as part of Spack's documentation.

574
LICENSE
View File

@@ -1,197 +1,135 @@
GNU LESSER GENERAL PUBLIC LICENSE ########################################################################
Version 2.1, February 1999 GNU LESSER GENERAL PUBLIC LICENSE (Lesser GPL)
Version 2.1, February 1999
########################################################################
Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
Produced at the Lawrence Livermore National Laboratory.
Copyright (C) 1991, 1999 Free Software Foundation, Inc. This file is part of Spack.
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
Everyone is permitted to copy and distribute verbatim copies LLNL-CODE-647188
of this license document, but changing it is not allowed.
[This is the first released version of the Lesser GPL. It also counts For details, see https://github.com/llnl/spack
as the successor of the GNU Library Public License, version 2, hence
the version number 2.1.]
Preamble This program is free software; you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License (as
published by the Free Software Foundation) version 2.1, February 1999.
The licenses for most software are designed to take away your This program is distributed in the hope that it will be useful, but
freedom to share and change it. By contrast, the GNU General Public WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
Licenses are intended to guarantee your freedom to share and change MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
free software--to make sure the software is free for all its users. conditions of the GNU Lesser General Public License for more details.
This license, the Lesser General Public License, applies to some You should have received a copy of the GNU Lesser General Public
specially designated software packages--typically libraries--of the License along with this program; if not, write to the Free Software
Free Software Foundation and other authors who decide to use it. You Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
can use it too, but we suggest you first think carefully about whether
this license or the ordinary General Public License is the better
strategy to use in any particular case, based on the explanations below.
When we speak of free software, we are referring to freedom of use, ########################################################################
not price. Our General Public Licenses are designed to make sure that LLNL NOTICE AND TERMS AND CONDITIONS OF THE GNU LGPL
you have the freedom to distribute copies of free software (and charge
for this service if you wish); that you receive source code or can get
it if you want it; that you can change the software and use pieces of
it in new free programs; and that you are informed that you can do
these things.
To protect your rights, we need to make restrictions that forbid LLNL Preamble Notice
distributors to deny you these rights or to ask you to surrender these
rights. These restrictions translate to certain responsibilities for
you if you distribute copies of the library or if you modify it.
For example, if you distribute copies of the library, whether gratis A. This notice is required to be provided under LLNL's contract with
or for a fee, you must give the recipients all the rights that we gave the U.S. Department of Energy (DOE). This work was produced at the
you. You must make sure that they, too, receive or can get the source Lawrence Livermore National Laboratory under Contract
code. If you link other code with the library, you must provide No. DE-AC52-07NA27344 with the DOE.
complete object files to the recipients, so that they can relink them
with the library after making changes to the library and recompiling
it. And you must show them these terms so they know their rights.
We protect your rights with a two-step method: (1) we copyright the B. Neither the United States Government nor Lawrence Livermore
library, and (2) we offer you this license, which gives you legal National Security, LLC nor any of their employees, makes any
permission to copy, distribute and/or modify the library. warranty, express or implied, or assumes any liability or
responsibility for the accuracy, completeness, or usefulness of any
information, apparatus, product, or process disclosed, or
represents that its use would not infringe privately-owned rights.
To protect each distributor, we want to make it very clear that C. Also, reference herein to any specific commercial products,
there is no warranty for the free library. Also, if the library is process, or services by trade name, trademark, manufacturer or
modified by someone else and passed on, the recipients should know otherwise does not necessarily constitute or imply its endorsement,
that what they have is not the original version, so that the original recommendation, or favoring by the United States Government or
author's reputation will not be affected by problems that might be Lawrence Livermore National Security, LLC. The views and opinions
introduced by others. of authors expressed herein do not necessarily state or reflect
those of the United States Government or Lawrence Livermore
National Security, LLC, and shall not be used for advertising or
product endorsement purposes.
Finally, software patents pose a constant threat to the existence of The precise terms and conditions for copying, distribution and
any free program. We wish to make sure that a company cannot modification follows.
effectively restrict the users of a free program by obtaining a
restrictive license from a patent holder. Therefore, we insist that
any patent license obtained for a version of the library must be
consistent with the full freedom of use specified in this license.
Most GNU software, including some libraries, is covered by the ########################################################################
ordinary GNU General Public License. This license, the GNU Lesser GNU LESSER GENERAL PUBLIC LICENSE
General Public License, applies to certain designated libraries, and TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
is quite different from the ordinary General Public License. We use
this license for certain libraries in order to permit linking those
libraries into non-free programs.
When a program is linked with a library, whether statically or using 0. This License Agreement applies to any software library or other
a shared library, the combination of the two is legally speaking a
combined work, a derivative of the original library. The ordinary
General Public License therefore permits such linking only if the
entire combination fits its criteria of freedom. The Lesser General
Public License permits more lax criteria for linking other code with
the library.
We call this license the "Lesser" General Public License because it
does Less to protect the user's freedom than the ordinary General
Public License. It also provides other free software developers Less
of an advantage over competing non-free programs. These disadvantages
are the reason we use the ordinary General Public License for many
libraries. However, the Lesser license provides advantages in certain
special circumstances.
For example, on rare occasions, there may be a special need to
encourage the widest possible use of a certain library, so that it becomes
a de-facto standard. To achieve this, non-free programs must be
allowed to use the library. A more frequent case is that a free
library does the same job as widely used non-free libraries. In this
case, there is little to gain by limiting the free library to free
software only, so we use the Lesser General Public License.
In other cases, permission to use a particular library in non-free
programs enables a greater number of people to use a large body of
free software. For example, permission to use the GNU C Library in
non-free programs enables many more people to use the whole GNU
operating system, as well as its variant, the GNU/Linux operating
system.
Although the Lesser General Public License is Less protective of the
users' freedom, it does ensure that the user of a program that is
linked with the Library has the freedom and the wherewithal to run
that program using a modified version of the Library.
The precise terms and conditions for copying, distribution and
modification follow. Pay close attention to the difference between a
"work based on the library" and a "work that uses the library". The
former contains code derived from the library, whereas the latter must
be combined with the library in order to run.
GNU LESSER GENERAL PUBLIC LICENSE
TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
0. This License Agreement applies to any software library or other
program which contains a notice placed by the copyright holder or program which contains a notice placed by the copyright holder or
other authorized party saying it may be distributed under the terms of other authorized party saying it may be distributed under the terms of
this Lesser General Public License (also called "this License"). this Lesser General Public License (also called "this License"). Each
Each licensee is addressed as "you". licensee is addressed as "you".
A "library" means a collection of software functions and/or data A "library" means a collection of software functions and/or data
prepared so as to be conveniently linked with application programs prepared so as to be conveniently linked with application programs
(which use some of those functions and data) to form executables. (which use some of those functions and data) to form executables.
The "Library", below, refers to any such software library or work The "Library", below, refers to any such software library or work
which has been distributed under these terms. A "work based on the which has been distributed under these terms. A "work based on the
Library" means either the Library or any derivative work under Library" means either the Library or any derivative work under
copyright law: that is to say, a work containing the Library or a copyright law: that is to say, a work containing the Library or a
portion of it, either verbatim or with modifications and/or translated portion of it, either verbatim or with modifications and/or translated
straightforwardly into another language. (Hereinafter, translation is straightforwardly into another language. (Hereinafter, translation is
included without limitation in the term "modification".) included without limitation in the term "modification".)
"Source code" for a work means the preferred form of the work for "Source code" for a work means the preferred form of the work for
making modifications to it. For a library, complete source code means making modifications to it. For a library, complete source code means
all the source code for all modules it contains, plus any associated all the source code for all modules it contains, plus any associated
interface definition files, plus the scripts used to control compilation interface definition files, plus the scripts used to control
and installation of the library. compilation and installation of the library. Activities other than
copying, distribution and modification are not covered by this
License; they are outside its scope. The act of running a program
using the Library is not restricted, and output from such a program is
covered only if its contents constitute a work based on the Library
(independent of the use of the Library in a tool for writing
it). Whether that is true depends on what the Library does and what
the program that uses the Library does.
Activities other than copying, distribution and modification are not 1. You may copy and distribute verbatim copies of the Library's
covered by this License; they are outside its scope. The act of
running a program using the Library is not restricted, and output from
such a program is covered only if its contents constitute a work based
on the Library (independent of the use of the Library in a tool for
writing it). Whether that is true depends on what the Library does
and what the program that uses the Library does.
1. You may copy and distribute verbatim copies of the Library's
complete source code as you receive it, in any medium, provided that complete source code as you receive it, in any medium, provided that
you conspicuously and appropriately publish on each copy an you conspicuously and appropriately publish on each copy an
appropriate copyright notice and disclaimer of warranty; keep intact appropriate copyright notice and disclaimer of warranty; keep intact
all the notices that refer to this License and to the absence of any all the notices that refer to this License and to the absence of any
warranty; and distribute a copy of this License along with the warranty; and distribute a copy of this License along with the
Library. Library. You may charge a fee for the physical act of transferring a
copy, and you may at your option offer warranty protection in exchange
for a fee.
You may charge a fee for the physical act of transferring a copy, 2. You may modify your copy or copies of the Library or any portion of
and you may at your option offer warranty protection in exchange for a it, thus forming a work based on the Library, and copy and distribute
fee. such modifications or work under the terms of Section 1 above,
provided that you also meet all of these conditions:
2. You may modify your copy or copies of the Library or any portion a) The modified work must itself be a software library.
of it, thus forming a work based on the Library, and copy and
distribute such modifications or work under the terms of Section 1
above, provided that you also meet all of these conditions:
a) The modified work must itself be a software library. b) You must cause the files modified to carry prominent notices
stating that you changed the files and the date of any change.
b) You must cause the files modified to carry prominent notices c) You must cause the whole of the work to be licensed at no charge to
stating that you changed the files and the date of any change. all third parties under the terms of this License.
c) You must cause the whole of the work to be licensed at no d) If a facility in the modified Library refers to a function or a
charge to all third parties under the terms of this License. table of data to be supplied by an application program that uses the
facility, other than as an argument passed when the facility is
invoked, then you must make a good faith effort to ensure that, in the
event an application does not supply such function or table, the
facility still operates, and performs whatever part of its purpose
remains meaningful. (For example, a function in a library to compute
square roots has a purpose that is entirely well-defined independent
of the application. Therefore, Subsection 2d requires that any
application-supplied function or table used by this function must be
optional: if the application does not supply it, the square root
function must still compute square roots.)
d) If a facility in the modified Library refers to a function or a These requirements apply to the modified work as a whole. If
table of data to be supplied by an application program that uses
the facility, other than as an argument passed when the facility
is invoked, then you must make a good faith effort to ensure that,
in the event an application does not supply such function or
table, the facility still operates, and performs whatever part of
its purpose remains meaningful.
(For example, a function in a library to compute square roots has
a purpose that is entirely well-defined independent of the
application. Therefore, Subsection 2d requires that any
application-supplied function or table used by this function must
be optional: if the application does not supply it, the square
root function must still compute square roots.)
These requirements apply to the modified work as a whole. If
identifiable sections of that work are not derived from the Library, identifiable sections of that work are not derived from the Library,
and can be reasonably considered independent and separate works in and can be reasonably considered independent and separate works in
themselves, then this License, and its terms, do not apply to those themselves, then this License, and its terms, do not apply to those
sections when you distribute them as separate works. But when you sections when you distribute them as separate works. But when you
distribute the same sections as part of a whole which is a work based distribute the same sections as part of a whole which is a work based
on the Library, the distribution of the whole must be on the terms of on the Library, the distribution of the whole must be on the terms of
this License, whose permissions for other licensees extend to the this License, whose permissions for other licensees extend to the
@@ -208,191 +146,189 @@ with the Library (or with a work based on the Library) on a volume of
a storage or distribution medium does not bring the other work under a storage or distribution medium does not bring the other work under
the scope of this License. the scope of this License.
3. You may opt to apply the terms of the ordinary GNU General Public 3. You may opt to apply the terms of the ordinary GNU General Public
License instead of this License to a given copy of the Library. To do License instead of this License to a given copy of the Library. To do
this, you must alter all the notices that refer to this License, so this, you must alter all the notices that refer to this License, so
that they refer to the ordinary GNU General Public License, version 2, that they refer to the ordinary GNU General Public License, version 2,
instead of to this License. (If a newer version than version 2 of the instead of to this License. (If a newer version than version 2 of the
ordinary GNU General Public License has appeared, then you can specify ordinary GNU General Public License has appeared, then you can specify
that version instead if you wish.) Do not make any other change in that version instead if you wish.) Do not make any other change in
these notices. these notices.
Once this change is made in a given copy, it is irreversible for Once this change is made in a given copy, it is irreversible for that
that copy, so the ordinary GNU General Public License applies to all copy, so the ordinary GNU General Public License applies to all
subsequent copies and derivative works made from that copy. subsequent copies and derivative works made from that copy. This
option is useful when you wish to copy part of the code of the Library
into a program that is not a library.
This option is useful when you wish to copy part of the code of 4. You may copy and distribute the Library (or a portion or derivative
the Library into a program that is not a library. of it, under Section 2) in object code or executable form under the
terms of Sections 1 and 2 above provided that you accompany it with
the complete corresponding machine- readable source code, which must
be distributed under the terms of Sections 1 and 2 above on a medium
customarily used for software interchange.
4. You may copy and distribute the Library (or a portion or If distribution of object code is made by offering access to copy from
derivative of it, under Section 2) in object code or executable form a designated place, then offering equivalent access to copy the source
under the terms of Sections 1 and 2 above provided that you accompany code from the same place satisfies the requirement to distribute the
it with the complete corresponding machine-readable source code, which source code, even though third parties are not compelled to copy the
must be distributed under the terms of Sections 1 and 2 above on a source along with the object code.
medium customarily used for software interchange.
If distribution of object code is made by offering access to copy 5. A program that contains no derivative of any portion of the
from a designated place, then offering equivalent access to copy the
source code from the same place satisfies the requirement to
distribute the source code, even though third parties are not
compelled to copy the source along with the object code.
5. A program that contains no derivative of any portion of the
Library, but is designed to work with the Library by being compiled or Library, but is designed to work with the Library by being compiled or
linked with it, is called a "work that uses the Library". Such a linked with it, is called a "work that uses the Library". Such a work,
work, in isolation, is not a derivative work of the Library, and in isolation, is not a derivative work of the Library, and therefore
therefore falls outside the scope of this License. falls outside the scope of this License.
However, linking a "work that uses the Library" with the Library However, linking a "work that uses the Library" with the Library
creates an executable that is a derivative of the Library (because it creates an executable that is a derivative of the Library (because it
contains portions of the Library), rather than a "work that uses the contains portions of the Library), rather than a "work that uses the
library". The executable is therefore covered by this License. library". The executable is therefore covered by this License. Section
Section 6 states terms for distribution of such executables. 6 states terms for distribution of such executables.
When a "work that uses the Library" uses material from a header file When a "work that uses the Library" uses material from a header file
that is part of the Library, the object code for the work may be a that is part of the Library, the object code for the work may be a
derivative work of the Library even though the source code is not. derivative work of the Library even though the source code is
Whether this is true is especially significant if the work can be not. Whether this is true is especially significant if the work can be
linked without the Library, or if the work is itself a library. The linked without the Library, or if the work is itself a library. The
threshold for this to be true is not precisely defined by law. threshold for this to be true is not precisely defined by law.
If such an object file uses only numerical parameters, data If such an object file uses only numerical parameters, data structure
structure layouts and accessors, and small macros and small inline layouts and accessors, and small macros and small inline functions
functions (ten lines or less in length), then the use of the object (ten lines or less in length), then the use of the object file is
file is unrestricted, regardless of whether it is legally a derivative unrestricted, regardless of whether it is legally a derivative
work. (Executables containing this object code plus portions of the work. (Executables containing this object code plus portions of the
Library will still fall under Section 6.) Library will still fall under section 6.)
Otherwise, if the work is a derivative of the Library, you may Otherwise, if the work is a derivative of the Library, you may
distribute the object code for the work under the terms of Section 6. distribute the object code for the work under the terms of Section
Any executables containing that work also fall under Section 6, 6. Any executables containing that work also fall under Section 6,
whether or not they are linked directly with the Library itself. whether or not they are linked directly with the Library itself.
6. As an exception to the Sections above, you may also combine or 6. As an exception to the Sections above, you may also combine or link
link a "work that uses the Library" with the Library to produce a a "work that uses the Library" with the Library to produce a work
work containing portions of the Library, and distribute that work containing portions of the Library, and distribute that work under
under terms of your choice, provided that the terms permit terms of your choice, provided that the terms permit modification of
modification of the work for the customer's own use and reverse the work for the customer's own use and reverse engineering for
engineering for debugging such modifications. debugging such modifications.
You must give prominent notice with each copy of the work that the You must give prominent notice with each copy of the work that the
Library is used in it and that the Library and its use are covered by Library is used in it and that the Library and its use are covered by
this License. You must supply a copy of this License. If the work this License. You must supply a copy of this License. If the work
during execution displays copyright notices, you must include the during execution displays copyright notices, you must include the
copyright notice for the Library among them, as well as a reference copyright notice for the Library among them, as well as a reference
directing the user to the copy of this License. Also, you must do one directing the user to the copy of this License. Also, you must do one
of these things: of these things:
a) Accompany the work with the complete corresponding a) Accompany the work with the complete corresponding machine-readable
machine-readable source code for the Library including whatever source code for the Library including whatever changes were used in
changes were used in the work (which must be distributed under the work (which must be distributed under Sections 1 and 2 above);
Sections 1 and 2 above); and, if the work is an executable linked and, if the work is an executable liked with the Library, with the
with the Library, with the complete machine-readable "work that complete machine-readable "work that uses the Library", as object code
uses the Library", as object code and/or source code, so that the and/or source code, so that the user can modify the Library and then
user can modify the Library and then relink to produce a modified relink to produce a modified executable containing the modified
executable containing the modified Library. (It is understood Library. (It is understood that the user who changes the contents of
that the user who changes the contents of definitions files in the definitions files in the Library will not necessarily be able to
Library will not necessarily be able to recompile the application recompile the application to use the modified definitions.)
to use the modified definitions.)
b) Use a suitable shared library mechanism for linking with the b) Use a suitable shared library mechanism for linking with the
Library. A suitable mechanism is one that (1) uses at run time a Library. A suitable mechanism is one that (1) uses at run time a copy
copy of the library already present on the user's computer system, of the library already present on the user's computer system, rather
rather than copying library functions into the executable, and (2) than copying library functions into the executable, and (2) will
will operate properly with a modified version of the library, if operate properly with a modified version of the library, if the user
the user installs one, as long as the modified version is installs one, as long as the modified version is interface- compatible
interface-compatible with the version that the work was made with. with the version that the work was made with.
c) Accompany the work with a written offer, valid for at c) Accompany the work with a written offer, valid for at least three
least three years, to give the same user the materials years, to give the same user the materials specified in Subsection 6a,
specified in Subsection 6a, above, for a charge no more above, for a charge no more than the cost of performing this
than the cost of performing this distribution. distribution.
d) If distribution of the work is made by offering access to copy d) If distribution of the work is made by offering access to copy from
from a designated place, offer equivalent access to copy the above a designated place, offer equivalent access to copy the above
specified materials from the same place. specified materials from the same place.
e) Verify that the user has already received a copy of these e) Verify that the user has already received a copy of these materials
materials or that you have already sent this user a copy. or that you have already sent this user a copy.
For an executable, the required form of the "work that uses the For an executable, the required form of the "work that uses the
Library" must include any data and utility programs needed for Library" must include any data and utility programs needed for
reproducing the executable from it. However, as a special exception, reproducing the executable from it. However, as a special exception,
the materials to be distributed need not include anything that is the materials to be distributed need not include anything that is
normally distributed (in either source or binary form) with the major normally distributed (in either source or binary form) with the major
components (compiler, kernel, and so on) of the operating system on components (compiler, kernel, and so on) of the operating system on
which the executable runs, unless that component itself accompanies which the executable runs, unless that component itself accompanies
the executable. the executable.
It may happen that this requirement contradicts the license It may happen that this requirement contradicts the license
restrictions of other proprietary libraries that do not normally restrictions of other propriety libraries that do not normally
accompany the operating system. Such a contradiction means you cannot accompany the operating system. Such a contradiction means you cannot
use both them and the Library together in an executable that you use both them and the Library together in an executable that you
distribute. distribute.
7. You may place library facilities that are a work based on the 7. You may place library facilities that are a work based on the
Library side-by-side in a single library together with other library Library side-by-side in a single library together with other library
facilities not covered by this License, and distribute such a combined facilities not covered by this License, and distribute such a combined
library, provided that the separate distribution of the work based on library, provided that the separate distribution of the work based on
the Library and of the other library facilities is otherwise the Library and of the other library facilities is otherwise
permitted, and provided that you do these two things: permitted, and provided that you do these two things:
a) Accompany the combined library with a copy of the same work a) Accompany the combined library with a copy of the same work based
based on the Library, uncombined with any other library on the Library, uncombined with any other library facilities. This
facilities. This must be distributed under the terms of the must be distributed under the terms of the Sections above.
Sections above.
b) Give prominent notice with the combined library of the fact b) Give prominent notice with the combined library of the fact that
that part of it is a work based on the Library, and explaining part of it is a work based on the Library, and explaining where to
where to find the accompanying uncombined form of the same work. find the accompanying uncombined form of the same work.
8. You may not copy, modify, sublicense, link with, or distribute 1 You may not copy, modify, sublicense, link with, or distribute the
the Library except as expressly provided under this License. Any Library except as expressly provided under this License. Any attempt
attempt otherwise to copy, modify, sublicense, link with, or otherwise to copy, modify, sublicense, link with, or distribute the
distribute the Library is void, and will automatically terminate your Library is void, and will automatically terminate your rights under
rights under this License. However, parties who have received copies, this License. However, parties who have received copies, or rights,
or rights, from you under this License will not have their licenses from you under this License will not have their licenses terminated so
terminated so long as such parties remain in full compliance. long as such parties remain in full compliance.
9. You are not required to accept this License, since you have not 2 You are not required to accept this License, since you have not
signed it. However, nothing else grants you permission to modify or signed it. However, nothing else grants you permission to modify or
distribute the Library or its derivative works. These actions are distribute the Library or its derivative works. These actions are
prohibited by law if you do not accept this License. Therefore, by prohibited by law if you do not accept this License. Therefore, by
modifying or distributing the Library (or any work based on the modifying or distributing the Library (or any work based on the
Library), you indicate your acceptance of this License to do so, and Library), you indicate your acceptance of this License to do so, and
all its terms and conditions for copying, distributing or modifying all its terms and conditions for copying, distributing or modifying
the Library or works based on it. the Library or works based on it.
10. Each time you redistribute the Library (or any work based on the 3 Each time you redistribute the Library (or any work based on the
Library), the recipient automatically receives a license from the Library), the recipient automatically receives a license from the
original licensor to copy, distribute, link with or modify the Library original licensor to copy, distribute, link with or modify the Library
subject to these terms and conditions. You may not impose any further subject to these terms and conditions. You may not impose any further
restrictions on the recipients' exercise of the rights granted herein. restrictions on the recipients' exercise of the rights granted
You are not responsible for enforcing compliance by third parties with herein. You are not responsible for enforcing compliance by third
this License. parties with this License.
11. If, as a consequence of a court judgment or allegation of patent 4 If, as a consequence of a court judgment or allegation of patent
infringement or for any other reason (not limited to patent issues), infringement or for any other reason (not limited to patent issues),
conditions are imposed on you (whether by court order, agreement or conditions are imposed on you (whether by court order, agreement or
otherwise) that contradict the conditions of this License, they do not otherwise) that contradict the conditions of this License, they do not
excuse you from the conditions of this License. If you cannot excuse you from the conditions of this License. If you cannot
distribute so as to satisfy simultaneously your obligations under this distribute so as to satisfy simultaneously your obligations under this
License and any other pertinent obligations, then as a consequence you License and any other pertinent obligations, then as a consequence you
may not distribute the Library at all. For example, if a patent may not distribute the Library at all. For example, if a patent
license would not permit royalty-free redistribution of the Library by license would not permit royalty-free redistribution of the Library by
all those who receive copies directly or indirectly through you, then all those who receive copies directly or indirectly through you, then
the only way you could satisfy both it and this License would be to the only way you could satisfy both it and this License would be to
refrain entirely from distribution of the Library. refrain entirely from distribution of the Library.
If any portion of this section is held invalid or unenforceable under any If any portion of this section is held invalid or unenforceable under
particular circumstance, the balance of the section is intended to apply, any particular circumstance, the balance of the section is intended to
and the section as a whole is intended to apply in other circumstances. apply, and the section as a whole is intended to apply in other
circumstances.
It is not the purpose of this section to induce you to infringe any It is not the purpose of this section to induce you to infringe any
patents or other property right claims or to contest validity of any patents or other property right claims or to contest validity of any
such claims; this section has the sole purpose of protecting the such claims; this section has the sole purpose of protecting the
integrity of the free software distribution system which is integrity of the free software distribution system which is
implemented by public license practices. Many people have made implemented by public license practices. Many people have made
generous contributions to the wide range of software distributed generous contributions to the wide range of software distributed
through that system in reliance on consistent application of that through that system in reliance on consistent application of that
system; it is up to the author/donor to decide if he or she is willing system; it is up to the author/donor to decide if he or she is willing
@@ -402,102 +338,56 @@ impose that choice.
This section is intended to make thoroughly clear what is believed to This section is intended to make thoroughly clear what is believed to
be a consequence of the rest of this License. be a consequence of the rest of this License.
12. If the distribution and/or use of the Library is restricted in 1 If the distribution and/or use of the Library is restricted in
certain countries either by patents or by copyrighted interfaces, the certain countries either by patents or by copyrighted interfaces, the
original copyright holder who places the Library under this License may add original copyright holder who places the Library under this License
an explicit geographical distribution limitation excluding those countries, may add an explicit geographical distribution limitation excluding
so that distribution is permitted only in or among countries not thus those countries, so that distribution is permitted only in or among
excluded. In such case, this License incorporates the limitation as if countries not thus excluded. In such case, this License incorporates
written in the body of this License. the limitation as if written in the body of this License.
13. The Free Software Foundation may publish revised and/or new 13. The Free Software Foundation may publish revised and/or new
versions of the Lesser General Public License from time to time. versions of the Lesser General Public License from time to time. Such
Such new versions will be similar in spirit to the present version, new versions will be similar in spirit to the present version, but may
but may differ in detail to address new problems or concerns. differ in detail to address new problems or concerns.
Each version is given a distinguishing version number. If the Library Each version is given a distinguishing version number. If the Library
specifies a version number of this License which applies to it and specifies a version number of this License which applies to it and
"any later version", you have the option of following the terms and "any later version", you have the option of following the terms and
conditions either of that version or of any later version published by conditions either of that version or of any later version published by
the Free Software Foundation. If the Library does not specify a the Free Software Foundation. If the Library does not specify a
license version number, you may choose any version ever published by license version number, you may choose any version ever published by
the Free Software Foundation. the Free Software Foundation.
14. If you wish to incorporate parts of the Library into other free 2 If you wish to incorporate parts of the Library into other free
programs whose distribution conditions are incompatible with these, programs whose distribution conditions are incompatible with these,
write to the author to ask for permission. For software which is write to the author to ask for permission. For software which is
copyrighted by the Free Software Foundation, write to the Free copyrighted by the Free Software Foundation, write to the Free
Software Foundation; we sometimes make exceptions for this. Our Software Foundation; we sometimes make exceptions for this. Our
decision will be guided by the two goals of preserving the free status decision will be guided by the two goals of preserving the free status
of all derivatives of our free software and of promoting the sharing of all derivatives of our free software and of promoting the sharing
and reuse of software generally. and reuse of software generally.
NO WARRANTY NO WARRANTY
15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO 1 BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY
WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW. FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT
EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER
OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY KIND,
KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE EITHER EXPRESSED OR IMPLIED INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE
LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME
THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN 2 IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN
WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY
AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE
FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR
CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL
OR CONSQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE
LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING
RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A
FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF
SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
DAMAGES. DAMAGES.
END OF TERMS AND CONDITIONS
How to Apply These Terms to Your New Libraries
If you develop a new library, and you want it to be of the greatest
possible use to the public, we recommend making it free software that
everyone can redistribute and change. You can do so by permitting
redistribution under these terms (or, alternatively, under the terms of the
ordinary General Public License).
To apply these terms, attach the following notices to the library. It is
safest to attach them to the start of each source file to most effectively
convey the exclusion of warranty; and each file should have at least the
"copyright" line and a pointer to where the full notice is found.
<one line to give the library's name and a brief idea of what it does.>
Copyright (C) <year> <name of author>
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
Also add information on how to contact you by electronic and paper mail.
You should also get your employer (if you work as a programmer) or your
school, if any, to sign a "copyright disclaimer" for the library, if
necessary. Here is a sample; alter the names:
Yoyodyne, Inc., hereby disclaims all copyright interest in the
library `Frob' (a library for tweaking knobs) written by James Random Hacker.
<signature of Ty Coon>, 1 April 1990
Ty Coon, President of Vice
That's all there is to it!

32
NOTICE
View File

@@ -1,32 +0,0 @@
########################################################################
LLNL NOTICE AND TERMS AND CONDITIONS OF THE GNU LGPL
########################################################################
LLNL Preamble Notice
A. This notice is required to be provided under LLNL's contract with
the U.S. Department of Energy (DOE). This work was produced at the
Lawrence Livermore National Laboratory under Contract
No. DE-AC52-07NA27344 with the DOE.
B. Neither the United States Government nor Lawrence Livermore
National Security, LLC nor any of their employees, makes any
warranty, express or implied, or assumes any liability or
responsibility for the accuracy, completeness, or usefulness of any
information, apparatus, product, or process disclosed, or
represents that its use would not infringe privately-owned rights.
C. Also, reference herein to any specific commercial products,
process, or services by trade name, trademark, manufacturer or
otherwise does not necessarily constitute or imply its endorsement,
recommendation, or favoring by the United States Government or
Lawrence Livermore National Security, LLC. The views and opinions
of authors expressed herein do not necessarily state or reflect
those of the United States Government or Lawrence Livermore
National Security, LLC, and shall not be used for advertising or
product endorsement purposes.
See the LICENSE file for the precise terms and conditions for copying,
distribution and modification.
########################################################################

View File

@@ -1,30 +1,31 @@
# <img src="https://cdn.rawgit.com/spack/spack/develop/share/spack/logo/spack-logo.svg" width="64" valign="middle" alt="Spack"/> Spack ![image](share/spack/logo/spack-logo-text-64.png "Spack")
============
[![Build Status](https://travis-ci.org/spack/spack.svg?branch=develop)](https://travis-ci.org/spack/spack) [![Build Status](https://travis-ci.org/LLNL/spack.svg?branch=develop)](https://travis-ci.org/LLNL/spack)
[![codecov](https://codecov.io/gh/spack/spack/branch/develop/graph/badge.svg)](https://codecov.io/gh/spack/spack) [![Coverage Status](https://coveralls.io/repos/github/LLNL/spack/badge.svg?branch=develop)](https://coveralls.io/github/LLNL/spack?branch=develop)
[![Read the Docs](https://readthedocs.org/projects/spack/badge/?version=latest)](https://spack.readthedocs.io)
[![Slack](https://spackpm.herokuapp.com/badge.svg)](https://spackpm.herokuapp.com)
Spack is a multi-platform package manager that builds and installs Spack is a package management tool designed to support multiple
multiple versions and configurations of software. It works on Linux, versions and configurations of software on a wide variety of platforms
macOS, and many supercomputers. Spack is non-destructive: installing a and environments. It was designed for large supercomputing centers,
new version of a package does not break existing installations, so many where many users and application teams share common installations of
configurations of the same package can coexist. software on clusters with exotic architectures, using libraries that
do not have a standard ABI. Spack is non-destructive: installing a new
version does not break existing installations, so many configurations
can coexist on the same system.
Spack offers a simple "spec" syntax that allows users to specify versions Most importantly, Spack is simple. It offers a simple spec syntax so
and configuration options. Package files are written in pure Python, and that users can specify versions and configuration options
specs allow package authors to write a single script for many different concisely. Spack is also simple for package authors: package files are
builds of the same package. With Spack, you can build your software written in pure Python, and specs allow package authors to write a
*all* the ways you want to. single build script for many different builds of the same package.
See the See the
[Feature Overview](http://spack.readthedocs.io/en/latest/features.html) [Feature Overview](http://spack.readthedocs.io/en/latest/features.html)
for examples and highlights. for examples and highlights.
To install spack and your first package, make sure you have Python. To install spack and install your first package:
Then:
$ git clone https://github.com/spack/spack.git $ git clone https://github.com/llnl/spack.git
$ cd spack/bin $ cd spack/bin
$ ./spack install libelf $ ./spack install libelf
@@ -34,16 +35,15 @@ Documentation
[**Full documentation**](http://spack.readthedocs.io/) for Spack is [**Full documentation**](http://spack.readthedocs.io/) for Spack is
the first place to look. the first place to look.
Try the We've also got a [**Spack 101 Tutorial**](http://spack.readthedocs.io/en/latest/tutorial_sc16.html),
[**Spack Tutorial**](http://spack.readthedocs.io/en/latest/tutorial.html), so you can learn Spack yourself, or teach users at your own site.
to learn how to use spack, write packages, or deploy packages for users
at your site.
See also: See also:
* [Technical paper](http://www.computer.org/csdl/proceedings/sc/2015/3723/00/2807623.pdf) and * [Technical paper](http://www.computer.org/csdl/proceedings/sc/2015/3723/00/2807623.pdf) and
[slides](https://tgamblin.github.io/files/Gamblin-Spack-SC15-Talk.pdf) on Spack's design and implementation. [slides](https://tgamblin.github.io/files/Gamblin-Spack-SC15-Talk.pdf) on Spack's design and implementation.
* [Short presentation](https://tgamblin.github.io/files/Gamblin-Spack-Lightning-Talk-BOF-SC15.pdf) from the *Getting Scientific Software Installed* BOF session at Supercomputing 2015. * [Short presentation](https://tgamblin.github.io/files/Gamblin-Spack-Lightning-Talk-BOF-SC15.pdf) from the *Getting Scientific Software Installed* BOF session at Supercomputing 2015.
Get Involved! Get Involved!
------------------------ ------------------------
@@ -53,44 +53,37 @@ packages to bugfixes, or even new core features.
### Mailing list ### Mailing list
If you are interested in contributing to spack, join the mailing list. If you are interested in contributing to spack, the first step is to
We're using Google Groups for this: join the mailing list. We're using a Google Group for this, and you
can join it here:
* [Spack Google Group](https://groups.google.com/d/forum/spack) * [Spack Google Group](https://groups.google.com/d/forum/spack)
### Slack channel
Spack has a Slack channel where you can chat about all things Spack:
* [Spack on Slack](https://spackpm.slack.com)
[Sign up here](https://spackpm.herokuapp.com) to get an invitation mailed
to you.
### Contributions ### Contributions
Contributing to Spack is relatively easy. Just send us a Contributing to Spack is relatively easy. Just send us a
[pull request](https://help.github.com/articles/using-pull-requests/). [pull request](https://help.github.com/articles/using-pull-requests/).
When you send your request, make ``develop`` the destination branch on the When you send your request, make ``develop`` the destination branch on the
[Spack repository](https://github.com/spack/spack). [Spack repository](https://github.com/LLNL/spack).
Your PR must pass Spack's unit tests and documentation tests, and must be Your PR must pass Spack's unit tests and documentation tests, and must be
[PEP 8](https://www.python.org/dev/peps/pep-0008/) compliant. We enforce [PEP 8](https://www.python.org/dev/peps/pep-0008/) compliant.
these guidelines with [Travis CI](https://travis-ci.org/spack/spack). To We enforce these guidelines with [Travis CI](https://travis-ci.org/LLNL/spack).
run these tests locally, and for helpful tips on git, see our To run these tests locally, and for helpful tips on git, see our
[Contribution Guide](http://spack.readthedocs.io/en/latest/contribution_guide.html). [Contribution Guide](http://spack.readthedocs.io/en/latest/contribution_guide.html).
Spack uses a rough approximation of the Spack uses a rough approximation of the [Git
[Git Flow](http://nvie.com/posts/a-successful-git-branching-model/) Flow](http://nvie.com/posts/a-successful-git-branching-model/)
branching model. The ``develop`` branch contains the latest branching model. The ``develop`` branch contains the latest
contributions, and ``master`` is always tagged and points to the latest contributions, and ``master`` is always tagged and points to the
stable release. latest stable release.
Authors Authors
---------------- ----------------
Many thanks go to Spack's [contributors](https://github.com/spack/spack/graphs/contributors). Many thanks go to Spack's [contributors](https://github.com/llnl/spack/graphs/contributors).
Spack was created by Todd Gamblin, tgamblin@llnl.gov. Spack was originally written by Todd Gamblin, tgamblin@llnl.gov.
### Citing Spack ### Citing Spack
@@ -104,8 +97,6 @@ If you are referencing Spack in a publication, please cite the following paper:
Release Release
---------------- ----------------
Spack is released under an LGPL license. For more details see the Spack is released under an LGPL license. For more details see the
NOTICE and LICENSE files. LICENSE file.
``LLNL-CODE-647188`` ``LLNL-CODE-647188``
![Analytics](https://ga-beacon.appspot.com/UA-101208306-3/welcome-page?pixel)

View File

@@ -1,14 +1,14 @@
#!/bin/bash #!/bin/bash
############################################################################## ##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC. # Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory. # Produced at the Lawrence Livermore National Laboratory.
# #
# This file is part of Spack. # This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. # Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188 # LLNL-CODE-647188
# #
# For details, see https://github.com/spack/spack # For details, see https://github.com/llnl/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL. # Please also see the LICENSE file for our notice and the LGPL.
# #
# This program is free software; you can redistribute it and/or modify # This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as # it under the terms of the GNU Lesser General Public License (as
@@ -104,27 +104,18 @@ lines=0
while read line && ((lines < 2)) ; do while read line && ((lines < 2)) ; do
if [[ "$line" = '#!'* ]]; then if [[ "$line" = '#!'* ]]; then
interpreter="${line#\#!}" interpreter="${line#\#!}"
elif [[ "$line" = '//!'*node* ]]; then
interpreter="${line#//!}"
elif [[ "$line" = '--!'*lua* ]]; then elif [[ "$line" = '--!'*lua* ]]; then
interpreter="${line#--!}" interpreter="${line#--!}"
fi fi
lines=$((lines+1)) lines=$((lines+1))
done < "$script" done < "$script"
# this is ineeded for scripts with sbang parameter
# like ones in intltool
# #!/<spack-long-path>/perl -w
# this is the interpreter line with all the parameters as a vector
interpreter_v=(${interpreter})
# this is the single interpreter path
interpreter_f="${interpreter_v[0]}"
# Invoke any interpreter found, or raise an error if none was found. # Invoke any interpreter found, or raise an error if none was found.
if [[ -n "$interpreter_f" ]]; then if [[ -n "$interpreter" ]]; then
if [[ "${interpreter_f##*/}" = "perl" ]]; then if [[ "${interpreter##*/}" = "perl" ]]; then
exec $interpreter_v -x "$@" exec $interpreter -x "$@"
else else
exec $interpreter_v "$@" exec $interpreter "$@"
fi fi
else else
echo "error: sbang found no interpreter in $script" echo "error: sbang found no interpreter in $script"

203
bin/spack
View File

@@ -1,14 +1,15 @@
#!/usr/bin/env python #!/usr/bin/env python
# flake8: noqa
############################################################################## ##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC. # Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory. # Produced at the Lawrence Livermore National Laboratory.
# #
# This file is part of Spack. # This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. # Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188 # LLNL-CODE-647188
# #
# For details, see https://github.com/spack/spack # For details, see https://github.com/llnl/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL. # Please also see the LICENSE file for our notice and the LGPL.
# #
# This program is free software; you can redistribute it and/or modify # This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as # it under the terms of the GNU Lesser General Public License (as
@@ -23,38 +24,27 @@
# License along with this program; if not, write to the Free Software # License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
############################################################################## ##############################################################################
from __future__ import print_function
import os
import sys import sys
if (sys.version_info[0] > 2) or (sys.version_info[:2] < (2, 6)):
if sys.version_info[:2] < (2, 6):
v_info = sys.version_info[:3] v_info = sys.version_info[:3]
sys.exit("Spack requires Python 2.6 or higher." sys.exit("Spack requires Python 2.6 or 2.7. "
"This is Python %d.%d.%d." % v_info) "This is Python %d.%d.%d." % v_info)
import os
import inspect
# Find spack's location and its prefix. # Find spack's location and its prefix.
spack_file = os.path.realpath(os.path.expanduser(__file__)) SPACK_FILE = os.path.realpath(os.path.expanduser(__file__))
spack_prefix = os.path.dirname(os.path.dirname(spack_file)) os.environ["SPACK_FILE"] = SPACK_FILE
SPACK_PREFIX = os.path.dirname(os.path.dirname(SPACK_FILE))
# Allow spack libs to be imported in our scripts # Allow spack libs to be imported in our scripts
spack_lib_path = os.path.join(spack_prefix, "lib", "spack") SPACK_LIB_PATH = os.path.join(SPACK_PREFIX, "lib", "spack")
sys.path.insert(0, spack_lib_path) sys.path.insert(0, SPACK_LIB_PATH)
# Add external libs # Add external libs
spack_external_libs = os.path.join(spack_lib_path, "external") SPACK_EXTERNAL_LIBS = os.path.join(SPACK_LIB_PATH, "external")
sys.path.insert(0, SPACK_EXTERNAL_LIBS)
if sys.version_info[:2] == (2, 6):
sys.path.insert(0, os.path.join(spack_external_libs, 'py26'))
sys.path.insert(0, spack_external_libs)
# Handle vendoring of YAML specially, as it has two versions.
if sys.version_info[0] == 2:
spack_yaml_libs = os.path.join(spack_external_libs, "yaml/lib")
else:
spack_yaml_libs = os.path.join(spack_external_libs, "yaml/lib3")
sys.path.insert(0, spack_yaml_libs)
# Quick and dirty check to clean orphaned .pyc files left over from # Quick and dirty check to clean orphaned .pyc files left over from
# previous revisions. These files were present in earlier versions of # previous revisions. These files were present in earlier versions of
@@ -62,13 +52,13 @@ sys.path.insert(0, spack_yaml_libs)
# imports. If we leave them, Spack will fail in mysterious ways. # imports. If we leave them, Spack will fail in mysterious ways.
# TODO: more elegant solution for orphaned pyc files. # TODO: more elegant solution for orphaned pyc files.
orphaned_pyc_files = [ orphaned_pyc_files = [
os.path.join(spack_external_libs, 'functools.pyc'), os.path.join(SPACK_EXTERNAL_LIBS, 'functools.pyc'),
os.path.join(spack_external_libs, 'ordereddict.pyc'), os.path.join(SPACK_EXTERNAL_LIBS, 'ordereddict.pyc'),
os.path.join(spack_lib_path, 'spack', 'platforms', 'cray_xc.pyc'), os.path.join(SPACK_LIB_PATH, 'spack', 'platforms', 'cray_xc.pyc'),
os.path.join(spack_lib_path, 'spack', 'cmd', 'package-list.pyc'), os.path.join(SPACK_LIB_PATH, 'spack', 'cmd', 'package-list.pyc'),
os.path.join(spack_lib_path, 'spack', 'cmd', 'test-install.pyc'), os.path.join(SPACK_LIB_PATH, 'spack', 'cmd', 'test-install.pyc'),
os.path.join(spack_lib_path, 'spack', 'cmd', 'url-parse.pyc'), os.path.join(SPACK_LIB_PATH, 'spack', 'cmd', 'url-parse.pyc'),
os.path.join(spack_lib_path, 'spack', 'test', 'yaml.pyc') os.path.join(SPACK_LIB_PATH, 'spack', 'test', 'yaml.pyc')
] ]
for pyc_file in orphaned_pyc_files: for pyc_file in orphaned_pyc_files:
@@ -77,9 +67,146 @@ for pyc_file in orphaned_pyc_files:
try: try:
os.remove(pyc_file) os.remove(pyc_file)
except OSError as e: except OSError as e:
print("WARNING: Spack may fail mysteriously. " print ("WARNING: Spack may fail mysteriously. "
"Couldn't remove orphaned .pyc file: %s" % pyc_file) "Couldn't remove orphaned .pyc file: %s" % pyc_file)
# Once we've set up the system path, run the spack main method # If there is no working directory, use the spack prefix.
import spack.main # noqa try:
sys.exit(spack.main.main()) working_dir = os.getcwd()
except OSError:
os.chdir(SPACK_PREFIX)
working_dir = SPACK_PREFIX
# clean up the scope and start using spack package instead.
del SPACK_FILE, SPACK_PREFIX, SPACK_LIB_PATH
import llnl.util.tty as tty
from llnl.util.tty.color import *
import spack
from spack.error import SpackError
import argparse
# Command parsing
parser = argparse.ArgumentParser(
formatter_class=argparse.RawTextHelpFormatter,
description="Spack: the Supercomputing PACKage Manager." + colorize("""
spec expressions:
PACKAGE [CONSTRAINTS]
CONSTRAINTS:
@c{@version}
@g{%compiler @compiler_version}
@B{+variant}
@r{-variant} or @r{~variant}
@m{=architecture}
[^DEPENDENCY [CONSTRAINTS] ...]"""))
parser.add_argument('-d', '--debug', action='store_true',
help="Write out debug logs during compile")
parser.add_argument('-D', '--pdb', action='store_true',
help="Run spack under the pdb debugger")
parser.add_argument('-k', '--insecure', action='store_true',
help="Do not check ssl certificates when downloading.")
parser.add_argument('-m', '--mock', action='store_true',
help="Use mock packages instead of real ones.")
parser.add_argument('-p', '--profile', action='store_true',
help="Profile execution using cProfile.")
parser.add_argument('-v', '--verbose', action='store_true',
help="Print additional output during builds")
parser.add_argument('-s', '--stacktrace', action='store_true',
help="Add stacktrace information to all printed statements")
parser.add_argument('-V', '--version', action='version',
version="%s" % spack.spack_version)
# each command module implements a parser() function, to which we pass its
# subparser for setup.
subparsers = parser.add_subparsers(metavar='SUBCOMMAND', dest="command")
import spack.cmd
for cmd in spack.cmd.commands:
module = spack.cmd.get_module(cmd)
cmd_name = cmd.replace('_', '-')
subparser = subparsers.add_parser(cmd_name, help=module.description)
module.setup_parser(subparser)
def _main(args, unknown_args):
# Set up environment based on args.
tty.set_verbose(args.verbose)
tty.set_debug(args.debug)
tty.set_stacktrace(args.stacktrace)
spack.debug = args.debug
if spack.debug:
import spack.util.debug as debug
debug.register_interrupt_handler()
# Run any available pre-run hooks
spack.hooks.pre_run()
spack.spack_working_dir = working_dir
if args.mock:
from spack.repository import RepoPath
spack.repo.swap(RepoPath(spack.mock_packages_path))
# If the user asked for it, don't check ssl certs.
if args.insecure:
tty.warn("You asked for --insecure. Will NOT check SSL certificates.")
spack.insecure = True
# Try to load the particular command asked for and run it
command = spack.cmd.get_command(args.command.replace('-', '_'))
# Allow commands to inject an optional argument and get unknown args
# if they want to handle them.
info = dict(inspect.getmembers(command))
varnames = info['__code__'].co_varnames
argcount = info['__code__'].co_argcount
# Actually execute the command
try:
if argcount == 3 and varnames[2] == 'unknown_args':
return_val = command(parser, args, unknown_args)
else:
if unknown_args:
tty.die('unrecognized arguments: %s' % ' '.join(unknown_args))
return_val = command(parser, args)
except SpackError as e:
e.die()
except KeyboardInterrupt:
sys.stderr.write('\n')
tty.die("Keyboard interrupt.")
# Allow commands to return values if they want to exit with some other code.
if return_val is None:
sys.exit(0)
elif isinstance(return_val, int):
sys.exit(return_val)
else:
tty.die("Bad return value from command %s: %s"
% (args.command, return_val))
def main(args):
# Just print help and exit if run with no arguments at all
if len(args) == 1:
parser.print_help()
sys.exit(1)
# actually parse the args.
args, unknown = parser.parse_known_args()
if args.profile:
import cProfile
cProfile.runctx('_main(args, unknown)', globals(), locals(),
sort='time')
elif args.pdb:
import pdb
pdb.runctx('_main(args, unknown)', globals(), locals())
else:
_main(args, unknown)
if __name__ == '__main__':
main(sys.argv)

View File

@@ -1,14 +1,14 @@
#!/bin/sh #!/bin/sh
############################################################################## ##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC. # Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory. # Produced at the Lawrence Livermore National Laboratory.
# #
# This file is part of Spack. # This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. # Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188 # LLNL-CODE-647188
# #
# For details, see https://github.com/spack/spack # For details, see https://github.com/llnl/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL. # Please also see the LICENSE file for our notice and the LGPL.
# #
# This program is free software; you can redistribute it and/or modify # This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as # it under the terms of the GNU Lesser General Public License (as

View File

@@ -1,52 +0,0 @@
##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import pytest
# Hooks to add command line options or set other custom behaviors.
# They must be placed here to be found by pytest. See:
#
# https://docs.pytest.org/en/latest/writing_plugins.html
#
def pytest_addoption(parser):
group = parser.getgroup("Spack specific command line options")
group.addoption(
'--fast', action='store_true', default=False,
help='runs only "fast" unit tests, instead of the whole suite')
def pytest_collection_modifyitems(config, items):
if not config.getoption('--fast'):
# --fast not given, run all the tests
return
slow_tests = ['db', 'network', 'maybeslow']
skip_as_slow = pytest.mark.skip(
reason='skipped slow test [--fast command line option given]'
)
for item in items:
if any(x in item.keywords for x in slow_tests):
item.add_marker(skip_as_slow)

View File

@@ -18,9 +18,6 @@ config:
# You can use $spack here to refer to the root of the spack instance. # You can use $spack here to refer to the root of the spack instance.
install_tree: $spack/opt/spack install_tree: $spack/opt/spack
# Locations where templates should be found
template_dirs:
- $spack/templates
# Locations where different types of modules should be installed. # Locations where different types of modules should be installed.
module_roots: module_roots:
@@ -69,9 +66,3 @@ config:
# If set to true, `spack install` and friends will NOT clean # If set to true, `spack install` and friends will NOT clean
# potentially harmful variables from the build environment. Use wisely. # potentially harmful variables from the build environment. Use wisely.
dirty: false dirty: false
# The default number of jobs to use when running `make` in parallel.
# If set to 4, for example, `spack install` will run `make -j4`.
# If not set, all available cores are used by default.
# build_jobs: 4

View File

@@ -40,7 +40,3 @@ modules:
- PKG_CONFIG_PATH - PKG_CONFIG_PATH
'': '':
- CMAKE_PREFIX_PATH - CMAKE_PREFIX_PATH
lmod:
hierarchy:
- mpi

View File

@@ -17,23 +17,7 @@ packages:
all: all:
compiler: [gcc, intel, pgi, clang, xl, nag] compiler: [gcc, intel, pgi, clang, xl, nag]
providers: providers:
awk: [gawk]
blas: [openblas]
daal: [intel-daal]
elf: [elfutils]
gl: [mesa, opengl]
golang: [gcc]
ipp: [intel-ipp]
java: [jdk]
lapack: [openblas]
mkl: [intel-mkl]
mpe: [mpe2]
mpi: [openmpi, mpich] mpi: [openmpi, mpich]
opencl: [pocl] blas: [openblas]
openfoam: [openfoam-com, openfoam-org, foam-extend] lapack: [openblas]
pil: [py-pillow] pil: [py-pillow]
pkgconfig: [pkgconf, pkg-config]
scalapack: [netlib-scalapack]
szip: [libszip, libaec]
tbb: [intel-tbb]
jpeg: [libjpeg-turbo, libjpeg]

View File

@@ -1,36 +0,0 @@
If you want to report a build error for some package, or a bug in core please use the following template as a guideline:
-----
Summary
### Expected Result
What you expected
### Actual Result
What happened instead
### Steps to reproduce the issue
```console
$ spack <command1> <spec>
$ spack <command2> <spec>
...
```
### Information on your system
This includes:
1. which platform you are using
2. any relevant configuration detail (custom `packages.yaml` or `modules.yaml`, etc.)
-----
We encourage you to try, as much as possible, to reduce your problem to the minimal example that still reproduces the issue. That would help us a lot in fixing it quickly and effectively!
If you want to ask a question about the tool (how to use it, what it can currently do, etc.), try the `#general` channel on our Slack first. We have a welcoming community and chances are you'll get your reply faster and without opening an issue.
Other than that, thanks for taking the time to contribute to Spack!

View File

@@ -1,5 +1,5 @@
package_list.html package_list.rst
command_index.rst command_index.rst
spack*.rst spack*.rst
llnl*.rst modules.rst
_build _build

View File

@@ -3,12 +3,13 @@
# You can set these variables from the command line. # You can set these variables from the command line.
SPHINXOPTS = -E SPHINXOPTS = -E
SPHINXBUILD = sphinx-build JOBS ?= $(shell python -c 'import multiprocessing; print multiprocessing.cpu_count()')
SPHINXBUILD = sphinx-build -j $(JOBS)
PAPER = PAPER =
BUILDDIR = _build BUILDDIR = _build
export PYTHONPATH := ../../spack:$(PYTHONPATH) export PYTHONPATH := ../../spack:$(PYTHONPATH)
APIDOC_FILES = spack*.rst llnl*.rst APIDOC_FILES = spack*.rst
# Internal variables. # Internal variables.
PAPEROPT_a4 = -D latex_paper_size=a4 PAPEROPT_a4 = -D latex_paper_size=a4
@@ -57,8 +58,7 @@ upload:
git push -f github gh-pages git push -f github gh-pages
apidoc: apidoc:
sphinx-apidoc -f -T -o . ../spack sphinx-apidoc -T -o . $(PYTHONPATH)/spack
sphinx-apidoc -f -T -o . ../llnl
help: help:
@echo "Please use \`make <target>' where <target> is one of" @echo "Please use \`make <target>' where <target> is one of"
@@ -83,7 +83,7 @@ help:
@echo " doctest to run all doctests embedded in the documentation (if enabled)" @echo " doctest to run all doctests embedded in the documentation (if enabled)"
clean: clean:
-rm -f command_index.rst -rm -f package_list.rst command_index.rst modules.rst
-rm -rf $(BUILDDIR)/* $(APIDOC_FILES) -rm -rf $(BUILDDIR)/* $(APIDOC_FILES)
html: html:

View File

@@ -24,7 +24,7 @@
<br/> <br/>
Written by Todd Gamblin (<a href="mailto:tgamblin@llnl.gov">tgamblin@llnl.gov</a>) and Written by Todd Gamblin (<a href="mailto:tgamblin@llnl.gov">tgamblin@llnl.gov</a>) and
many <a href="https://github.com/spack/spack/graphs/contributors">contributors.</a> LLNL-CODE-647188. many contributors. LLNL-CODE-647188.
{%- if last_updated %} {%- if last_updated %}
<br/> <br/>

View File

@@ -808,46 +808,17 @@ Specifying Specs by Hash
^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^
Complicated specs can become cumbersome to enter on the command line, Complicated specs can become cumbersome to enter on the command line,
especially when many of the qualifications are necessary to distinguish especially when many of the qualifications are necessary to
between similar installs. To avoid this, when referencing an existing spec, distinguish between similar installs, for example when using the
``uninstall`` command. To avoid this, when referencing an existing spec,
Spack allows you to reference specs by their hash. We previously Spack allows you to reference specs by their hash. We previously
discussed the spec hash that Spack computes. In place of a spec in any discussed the spec hash that Spack computes. In place of a spec in any
command, substitute ``/<hash>`` where ``<hash>`` is any amount from command, substitute ``/<hash>`` where ``<hash>`` is any amount from
the beginning of a spec hash. the beginning of a spec hash. If the given spec hash is sufficient
to be unique, Spack will replace the reference with the spec to which
it refers. Otherwise, it will prompt for a more qualified hash.
For example, lets say that you accidentally installed two different Note that this will not work to reinstall a depencency uninstalled by
``mvapich2`` installations. If you want to uninstall one of them but don't
know what the difference is, you can run:
.. code-block:: console
$ spack find --long mvapich2
==> 2 installed packages.
-- linux-centos7-x86_64 / gcc@6.3.0 ----------
qmt35td mvapich2@2.2%gcc
er3die3 mvapich2@2.2%gcc
You can then uninstall the latter installation using:
.. code-block:: console
$ spack uninstall /er3die3
Or, if you want to build with a specific installation as a dependency,
you can use:
.. code-block:: console
$ spack install trilinos ^/er3die3
If the given spec hash is sufficiently long as to be unique, Spack will
replace the reference with the spec to which it refers. Otherwise, it will
prompt for a more qualified hash.
Note that this will not work to reinstall a dependency uninstalled by
``spack uninstall --force``. ``spack uninstall --force``.
.. _cmd-spack-providers: .. _cmd-spack-providers:

View File

@@ -1,132 +0,0 @@
.. _binary_caches:
============
Build caches
============
Some sites may encourage users to set up their own test environments
before carrying out central installations, or some users may prefer to set
up these environments on their own motivation. To reduce the load of
recompiling otherwise identical package specs in different installations,
installed packages can be put into build cache tarballs, uploaded to
your Spack mirror and then downloaded and installed by others.
--------------------------
Creating build cache files
--------------------------
A compressed tarball of an installed package is created. Tarballs are created
for all of its link and run dependency packages as well. Compressed tarballs are
signed with gpg and signature and tarball and put in a ``.spack`` file. Optionally,
the rpaths (and ids and deps on macOS) can be changed to paths relative to
the Spack install tree before the tarball is created.
Build caches are created via:
.. code-block:: console
$ spack buildcache create spec
---------------------------------------
Finding or installing build cache files
---------------------------------------
To find build caches or install build caches, a Spack mirror must be configured
with:
.. code-block:: console
$ spack mirror add <name> <url>
Build caches are found via:
.. code-block:: console
$ spack buildcache list
Build caches are installed via:
.. code-block:: console
$ spack buildcache install
----------
Relocation
----------
Initial build and later installation do not necessarily happen at the same
location. Spack provides a relocation capability and corrects for RPATHs and
non-relocatable scripts. However, many packages compile paths into binary
artifacts directly. In such cases, the build instructions of this package would
need to be adjusted for better re-locatability.
.. _cmd-spack-buildcache:
--------------------
``spack buildcache``
--------------------
^^^^^^^^^^^^^^^^^^^^^^^^^^^
``spack buildcache create``
^^^^^^^^^^^^^^^^^^^^^^^^^^^
Create tarball of installed Spack package and all dependencies.
Tarballs are checksummed and signed if gpg2 is available.
Places them in a directory ``build_cache`` that can be copied to a mirror.
Commands like ``spack buildcache install`` will search Spack mirrors for build_cache to get the list of build caches.
============== ========================================================================================================================
Arguments Description
============== ========================================================================================================================
``<specs>`` list of partial specs or hashes with a leading ``/`` to match from installed packages and used for creating build caches
``-d <path>`` directory in which ``build_cache`` directory is created, defaults to ``.``
``-f`` overwrite ``.spack`` file in ``build_cache`` directory if it exists
``-k <key>`` the key to sign package with. In the case where multiple keys exist, the package will be unsigned unless ``-k`` is used.
``-r`` make paths in binaries relative before creating tarball
``-y`` answer yes to all create unsigned ``build_cache`` questions
============== ========================================================================================================================
^^^^^^^^^^^^^^^^^^^^^^^^^
``spack buildcache list``
^^^^^^^^^^^^^^^^^^^^^^^^^
Retrieves all specs for build caches available on a Spack mirror.
============== =====================================================================================
Arguments Description
============== =====================================================================================
``<specs>`` list of partial package specs to be matched against specs downloaded for build caches
============== =====================================================================================
E.g. ``spack buildcache list gcc`` with print only commands to install ``gcc`` package(s)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
``spack buildcache install``
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Retrieves all specs for build caches available on a Spack mirror and installs build caches
with specs matching the specs input.
============== ==============================================================================================
Arguments Description
============== ==============================================================================================
``<specs>`` list of partial package specs or hashes with a leading ``/`` to be installed from build caches
``-f`` remove install directory if it exists before unpacking tarball
``-y`` answer yes to all to don't verify package with gpg questions
============== ==============================================================================================
^^^^^^^^^^^^^^^^^^^^^^^^^
``spack buildcache keys``
^^^^^^^^^^^^^^^^^^^^^^^^^
List public keys available on Spack mirror.
========= ==============================================
Arguments Description
========= ==============================================
``-i`` trust the keys downloaded with prompt for each
``-y`` answer yes to all trust all keys downloaded
========= ==============================================

View File

@@ -71,9 +71,9 @@ package lacks a spec component, such as missing a compiler or
package version, then Spack will guess the missing component based package version, then Spack will guess the missing component based
on its most-favored packages, and it may guess incorrectly. on its most-favored packages, and it may guess incorrectly.
Each package version and compiler listed in an external should Each package version and compilers listed in an external should
have entries in Spack's packages and compiler configuration, even have entries in Spack's packages and compiler configuration, even
though the package and compiler may not ever be built. though the package and compiler may not every be built.
The packages configuration can tell Spack to use an external location The packages configuration can tell Spack to use an external location
for certain package versions, but it does not restrict Spack to using for certain package versions, but it does not restrict Spack to using

View File

@@ -1,14 +1,14 @@
# flake8: noqa # flake8: noqa
############################################################################## ##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC. # Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory. # Produced at the Lawrence Livermore National Laboratory.
# #
# This file is part of Spack. # This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. # Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188 # LLNL-CODE-647188
# #
# For details, see https://github.com/spack/spack # For details, see https://github.com/llnl/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL. # Please also see the LICENSE file for our notice and the LGPL.
# #
# This program is free software; you can redistribute it and/or modify # This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as # it under the terms of the GNU Lesser General Public License (as
@@ -49,11 +49,8 @@
# If extensions (or modules to document with autodoc) are in another directory, # If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the # add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here. # documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('exts'))
sys.path.insert(0, os.path.abspath('../external')) sys.path.insert(0, os.path.abspath('../external'))
if sys.version_info[0] < 3:
sys.path.insert(0, os.path.abspath('../external/yaml/lib'))
else:
sys.path.insert(0, os.path.abspath('../external/yaml/lib3'))
sys.path.append(os.path.abspath('..')) sys.path.append(os.path.abspath('..'))
# Add the Spack bin directory to the path so that we can use its output in docs. # Add the Spack bin directory to the path so that we can use its output in docs.
@@ -61,6 +58,11 @@
os.environ['SPACK_ROOT'] = spack_root os.environ['SPACK_ROOT'] = spack_root
os.environ['PATH'] += '%s%s/bin' % (os.pathsep, spack_root) os.environ['PATH'] += '%s%s/bin' % (os.pathsep, spack_root)
# Get the spack version for use in the docs
spack_version = subprocess.Popen(
[spack_root + '/bin/spack', '-V'],
stderr=subprocess.PIPE).communicate()[1].strip().split('.')
# Set an environment variable so that colify will print output like it would to # Set an environment variable so that colify will print output like it would to
# a terminal. # a terminal.
os.environ['COLIFY_SIZE'] = '25x120' os.environ['COLIFY_SIZE'] = '25x120'
@@ -68,10 +70,9 @@
# #
# Generate package list using spack command # Generate package list using spack command
# #
with open('package_list.html', 'w') as plist_file: with open('package_list.rst', 'w') as plist_file:
subprocess.Popen( subprocess.Popen(
[spack_root + '/bin/spack', 'list', '--format=html'], [spack_root + '/bin/spack', 'list', '--format=rst'], stdout=plist_file)
stdout=plist_file)
# #
# Find all the `cmd-spack-*` references and add them to a command index # Find all the `cmd-spack-*` references and add them to a command index
@@ -90,21 +91,10 @@
for cmd in sorted(command_names): for cmd in sorted(command_names):
index.write(' * :ref:`%s`\n' % cmd) index.write(' * :ref:`%s`\n' % cmd)
#
# Run sphinx-apidoc # Run sphinx-apidoc
# sphinx_apidoc(['-T', '-o', '.', '../spack'])
# Remove any previous API docs os.remove('modules.rst')
# Read the Docs doesn't clean up after previous builds
# Without this, the API Docs will never actually update
#
apidoc_args = [
'--force', # Older versions of Sphinx ignore the first argument
'--force', # Overwrite existing files
'--no-toc', # Don't create a table of contents file
'--output-dir=.', # Directory to place all output
]
sphinx_apidoc(apidoc_args + ['../spack'])
sphinx_apidoc(apidoc_args + ['../llnl'])
# #
# Exclude everything in spack.__all__ from indexing. All of these # Exclude everything in spack.__all__ from indexing. All of these
@@ -120,31 +110,17 @@
for line in fileinput.input('spack.rst', inplace=1): for line in fileinput.input('spack.rst', inplace=1):
if handling_spack: if handling_spack:
if not line.startswith(' :noindex:'): if not line.startswith(' :noindex:'):
print(' :noindex: %s' % ' '.join(spack.__all__)) print ' :noindex: %s' % ' '.join(spack.__all__)
handling_spack = False handling_spack = False
if line.startswith('.. automodule::'): if line.startswith('.. automodule::'):
handling_spack = (line == '.. automodule:: spack\n') handling_spack = (line == '.. automodule:: spack\n')
sys.stdout.write(line) print line,
# Enable todo items # Enable todo items
todo_include_todos = True todo_include_todos = True
#
# Disable duplicate cross-reference warnings.
#
from sphinx.domains.python import PythonDomain
class PatchedPythonDomain(PythonDomain):
def resolve_xref(self, env, fromdocname, builder, typ, target, node, contnode):
if 'refspecific' in node:
del node['refspecific']
return super(PatchedPythonDomain, self).resolve_xref(
env, fromdocname, builder, typ, target, node, contnode)
def setup(sphinx):
sphinx.override_domain(PatchedPythonDomain)
# -- General configuration ----------------------------------------------------- # -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here. # If your documentation needs a minimal Sphinx version, state it here.
@@ -154,7 +130,6 @@ def setup(sphinx):
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones. # coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', extensions = ['sphinx.ext.autodoc',
'sphinx.ext.graphviz', 'sphinx.ext.graphviz',
'sphinx.ext.napoleon',
'sphinx.ext.todo', 'sphinx.ext.todo',
'sphinxcontrib.programoutput'] 'sphinxcontrib.programoutput']
@@ -181,16 +156,16 @@ def setup(sphinx):
# General information about the project. # General information about the project.
project = u'Spack' project = u'Spack'
copyright = u'2013-2017, Lawrence Livermore National Laboratory.' copyright = u'2013-2015, Lawrence Livermore National Laboratory.'
# The version info for the project you're documenting, acts as replacement for # The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the # |version| and |release|, also used in various other places throughout the
# built documents. # built documents.
# #
# The short X.Y version. # The short X.Y version.
version = str(spack.spack_version.up_to(2)) version = '.'.join(spack_version[:2])
# The full version, including alpha/beta/rc tags. # The full version, including alpha/beta/rc tags.
release = str(spack.spack_version.up_to(2)) release = '.'.join(spack_version[:2])
# The language for content autogenerated by Sphinx. Refer to documentation # The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages. # for a list of supported languages.
@@ -250,7 +225,7 @@ def setup(sphinx):
# The name of an image file (relative to this directory) to place at the top # The name of an image file (relative to this directory) to place at the top
# of the sidebar. # of the sidebar.
html_logo = '../../../share/spack/logo/spack-logo-white-text.svg' html_logo = '../../../share/spack/logo/spack-logo-white-text-48.png'
# The name of an image file (within the static path) to use as favicon of the # The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32

View File

@@ -14,6 +14,27 @@ see the default settings by looking at
These settings can be overridden in ``etc/spack/config.yaml`` or These settings can be overridden in ``etc/spack/config.yaml`` or
``~/.spack/config.yaml``. See :ref:`configuration-scopes` for details. ``~/.spack/config.yaml``. See :ref:`configuration-scopes` for details.
.. _config-file-variables:
------------------------------
Config file variables
------------------------------
You may notice some variables prefixed with ``$`` in the settings above.
Spack understands several variables that can be used in values of
configuration parameters. They are:
* ``$spack``: path to the prefix of this spack installation
* ``$tempdir``: default system temporary directory (as specified in
Python's `tempfile.tempdir
<https://docs.python.org/2/library/tempfile.html#tempfile.tempdir>`_
variable.
* ``$user``: name of the current user
Note that, as with shell variables, you can write these as ``$varname``
or with braces to distinguish the variable from surrounding characters:
``${varname}``.
-------------------- --------------------
``install_tree`` ``install_tree``
-------------------- --------------------
@@ -21,43 +42,6 @@ These settings can be overridden in ``etc/spack/config.yaml`` or
The location where Spack will install packages and their dependencies. The location where Spack will install packages and their dependencies.
Default is ``$spack/opt/spack``. Default is ``$spack/opt/spack``.
---------------------------------------------------
``install_hash_length`` and ``install_path_scheme``
---------------------------------------------------
The default Spack installation path can be very long and can create
problems for scripts with hardcoded shebangs. There are two parameters
to help with that. Firstly, the ``install_hash_length`` parameter can
set the length of the hash in the installation path from 1 to 32. The
default path uses the full 32 characters.
Secondly, it is
also possible to modify the entire installation scheme. By default
Spack uses
``${ARCHITECTURE}/${COMPILERNAME}-${COMPILERVER}/${PACKAGE}-${VERSION}-${HASH}``
where the tokens that are available for use in this directive are the
same as those understood by the ``Spec.format`` method. Using this parameter it
is possible to use a different package layout or reduce the depth of
the installation paths. For example
.. code-block:: yaml
config:
install_path_scheme: '${PACKAGE}/${VERSION}/${HASH:7}'
would install packages into sub-directories using only the package
name, version and a hash length of 7 characters.
When using either parameter to set the hash length it only affects the
representation of the hash in the installation directory. You
should be aware that the smaller the hash length the more likely
naming conflicts will occur. These parameters are independent of those
used to configure module names.
.. warning:: Modifying the installation hash length or path scheme after
packages have been installed will prevent Spack from being
able to find the old installation directories.
-------------------- --------------------
``module_roots`` ``module_roots``
-------------------- --------------------
@@ -78,8 +62,8 @@ See :ref:`modules` for details.
``build_stage`` ``build_stage``
-------------------- --------------------
Spack is designed to run out of a user home directory, and on many Spack is designed to run out of a user home directories, and on many
systems the home directory is a (slow) network filesystem. On most systems, systems the home directory a (slow) network filesystem. On most systems,
building in a temporary filesystem results in faster builds than building building in a temporary filesystem results in faster builds than building
in the home directory. Usually, there is also more space available in in the home directory. Usually, there is also more space available in
the temporary location than in the home directory. So, Spack tries to the temporary location than in the home directory. So, Spack tries to
@@ -101,12 +85,12 @@ See :ref:`config-file-variables` for more on ``$tempdir`` and ``$spack``.
When Spack builds a package, it creates a temporary directory within the When Spack builds a package, it creates a temporary directory within the
``build_stage``, and it creates a symbolic link to that directory in ``build_stage``, and it creates a symbolic link to that directory in
``$spack/var/spack/stage``. This is used to track the stage. ``$spack/var/spack/stage``. This is used totrack the stage.
After a package is successfully installed, Spack deletes the temporary After a package is successfully installed, Spack deletes the temporary
directory it used to build. Unsuccessful builds are not deleted, but you directory it used to build. Unsuccessful builds are not deleted, but you
can manually purge them with :ref:`spack clean --stage can manually purge them with :ref:`spack purge --stage
<cmd-spack-clean>`. <cmd-spack-purge>`.
.. note:: .. note::
@@ -121,8 +105,8 @@ can manually purge them with :ref:`spack clean --stage
Location to cache downloaded tarballs and repositories. By default these Location to cache downloaded tarballs and repositories. By default these
are stored in ``$spack/var/spack/cache``. These are stored indefinitely are stored in ``$spack/var/spack/cache``. These are stored indefinitely
by default. Can be purged with :ref:`spack clean --downloads by default. Can be purged with :ref:`spack purge --downloads
<cmd-spack-clean>`. <cmd-spack-purge>`.
-------------------- --------------------
``misc_cache`` ``misc_cache``
@@ -130,7 +114,7 @@ by default. Can be purged with :ref:`spack clean --downloads
Temporary directory to store long-lived cache files, such as indices of Temporary directory to store long-lived cache files, such as indices of
packages available in repositories. Defaults to ``~/.spack/cache``. Can packages available in repositories. Defaults to ``~/.spack/cache``. Can
be purged with :ref:`spack clean --misc-cache <cmd-spack-clean>`. be purged with :ref:`spack purge --misc-cache <cmd-spack-purge>`.
-------------------- --------------------
``verify_ssl`` ``verify_ssl``
@@ -159,24 +143,7 @@ the way packages build. This includes ``LD_LIBRARY_PATH``, ``CPATH``,
``LIBRARY_PATH``, ``DYLD_LIBRARY_PATH``, and others. ``LIBRARY_PATH``, ``DYLD_LIBRARY_PATH``, and others.
By default, builds are ``clean``, but on some machines, compilers and By default, builds are ``clean``, but on some machines, compilers and
other tools may need custom ``LD_LIBRARY_PATH`` settings to run. You can other tools may need custom ``LD_LIBRARY_PATH`` setings to run. You can
set ``dirty`` to ``true`` to skip the cleaning step and make all builds set ``dirty`` to ``true`` to skip the cleaning step and make all builds
"dirty" by default. Be aware that this will reduce the reproducibility "dirty" by default. Be aware that this will reduce the reproducibility
of builds. of builds.
--------------
``build_jobs``
--------------
Unless overridden in a package or on the command line, Spack builds all
packages in parallel. For a build system that uses Makefiles, this means
running ``make -j<build_jobs>``, where ``build_jobs`` is the number of
threads to use.
The default parallelism is equal to the number of cores on your machine.
If you work on a shared login node or have a strict ulimit, it may be
necessary to set the default to a lower value. By setting ``build_jobs``
to 4, for example, commands like ``spack install`` will run ``make -j4``
instead of hogging every core.
To build all software in serial, set ``build_jobs`` to 1.

View File

@@ -45,27 +45,20 @@ Configuration Scopes
------------------------- -------------------------
Spack pulls configuration data from files in several directories. There Spack pulls configuration data from files in several directories. There
are four configuration scopes. From lowest to highest: are three configuration scopes. From lowest to highest:
#. **defaults**: Stored in ``$(prefix)/etc/spack/defaults/``. These are 1. **defaults**: Stored in ``$(prefix)/etc/spack/defaults/``. These are
the "factory" settings. Users should generally not modify the settings the "factory" settings. Users should generally not modify the settings
here, but should override them in other configuration scopes. The here, but should override them in other configuration scopes. The
defaults here will change from version to version of Spack. defaults here will change from version to version of Spack.
#. **system**: Stored in ``/etc/spack``. These are settings for this 2. **site**: Stored in ``$(prefix)/etc/spack/``. Settings here affect
machine, or for all machines on which this file system is
mounted. The site scope can be used for settings idiosyncratic to a
particular machine, such as the locations of compilers or external
packages. These settings are presumably controlled by someone with
root access on the machine.
#. **site**: Stored in ``$(prefix)/etc/spack/``. Settings here affect
only *this instance* of Spack, and they override defaults. The site only *this instance* of Spack, and they override defaults. The site
scope can can be used for per-project settings (one spack instance per scope can can be used for per-project settings (one spack instance per
project) or for site-wide settings on a multi-user machine (e.g., for project) or for site-wide settings on a multi-user machine (e.g., for
a common spack instance). a common spack instance).
#. **user**: Stored in the home directory: ``~/.spack/``. These settings 3. **user**: Stored in the home directory: ``~/.spack/``. These settings
affect all instances of Spack and take the highest precedence. affect all instances of Spack and take the highest precedence.
Each configuration directory may contain several configuration files, Each configuration directory may contain several configuration files,
@@ -85,25 +78,22 @@ Platform-specific scopes
------------------------- -------------------------
For each scope above, there can *also* be platform-specific settings. For each scope above, there can *also* be platform-specific settings.
For example, on Blue Gene/Q machines, Spack needs to know the location For example, on Blue Gene/Q machines, Spack needs to know the location of
of cross-compilers for the compute nodes. This configuration is in cross-compilers for the compute nodes. This configuration is in
``etc/spack/defaults/bgq/compilers.yaml``. It will take precedence ``etc/spack/defaults/bgq/compilers.yaml``. It will take precedence over
over settings in the ``defaults`` scope, but can still be overridden settings in the ``defaults`` scope, but can still be overridden by
by settings in ``system``, ``system/bgq``, ``site``, ``site/bgq``, settings in ``site``, ``site/bgq``, ``user``, or ``user/bgq``. So, the
``user``, or ``user/bgq``. So, the full scope precedence is: full scope precedence is:
1. ``defaults`` 1. ``defaults``
2. ``defaults/<platform>`` 2. ``defaults/<platform>``
3. ``system`` 3. ``site``
4. ``system/<platform>`` 4. ``site/<platform>``
5. ``site`` 5. ``user``
6. ``site/<platform>`` 6. ``user/<platform>``
7. ``user``
8. ``user/<platform>``
You can get the name to use for ``<platform>`` by running ``spack arch You can get the name to use for ``<platform>`` by running ``spack arch
--platform``. The system config scope has a ``<platform>`` section for --platform``.
sites at which ``/etc`` is mounted on multiple heterogeneous machines.
------------------------- -------------------------
Scope precedence Scope precedence
@@ -261,52 +251,3 @@ The merged configuration would look like this:
- /lustre-scratch/$user - /lustre-scratch/$user
- ~/mystage - ~/mystage
$ _ $ _
.. _config-file-variables:
------------------------------
Config file variables
------------------------------
Spack understands several variables which can be used in config file paths
where ever they appear. There are three sets of these variables, Spack specific
variables, environment variables, and user path variables. Spack specific
variables and environment variables both are indicated by prefixing the variable
name with ``$``. User path variables are indicated at the start of the path with
``~`` or ``~user``. Let's discuss each in turn.
^^^^^^^^^^^^^^^^^^^^^^^^
Spack Specific Variables
^^^^^^^^^^^^^^^^^^^^^^^^
Spack understands several special variables. These are:
* ``$spack``: path to the prefix of this spack installation
* ``$tempdir``: default system temporary directory (as specified in
Python's `tempfile.tempdir
<https://docs.python.org/2/library/tempfile.html#tempfile.tempdir>`_
variable.
* ``$user``: name of the current user
Note that, as with shell variables, you can write these as ``$varname``
or with braces to distinguish the variable from surrounding characters:
``${varname}``. Their names are also case insensitive meaning that ``$SPACK``
works just as well as ``$spack``. These special variables are also
substituted first, so any environment variables with the same name will not
be used.
^^^^^^^^^^^^^^^^^^^^^
Environment Variables
^^^^^^^^^^^^^^^^^^^^^
Spack then uses ``os.path.expandvars`` to expand any remaining environment
variables.
^^^^^^^^^^^^^^
User Variables
^^^^^^^^^^^^^^
Spack also uses the ``os.path.expanduser`` function on the path to expand
any user tilde paths such as ``~`` or ``~user``. These tilde paths must appear
at the beginning of the path or ``os.path.expanduser`` will not properly
expand them.

View File

@@ -26,38 +26,21 @@ Spack uses a rough approximation of the `Git Flow <http://nvie.com/posts/a-succe
branching model. The develop branch contains the latest contributions, and branching model. The develop branch contains the latest contributions, and
master is always tagged and points to the latest stable release. Therefore, when master is always tagged and points to the latest stable release. Therefore, when
you send your request, make ``develop`` the destination branch on the you send your request, make ``develop`` the destination branch on the
`Spack repository <https://github.com/spack/spack>`_. `Spack repository <https://github.com/LLNL/spack>`_.
---------------------- ----------------------
Continuous Integration Continuous Integration
---------------------- ----------------------
Spack uses `Travis CI <https://travis-ci.org/spack/spack>`_ for Continuous Integration Spack uses `Travis CI <https://travis-ci.org/LLNL/spack>`_ for Continuous Integration
testing. This means that every time you submit a pull request, a series of tests will testing. This means that every time you submit a pull request, a series of tests will
be run to make sure you didn't accidentally introduce any bugs into Spack. **Your PR be run to make sure you didn't accidentally introduce any bugs into Spack. Your PR
will not be accepted until it passes all of these tests.** While you can certainly wait will not be accepted until it passes all of these tests. While you can certainly wait
for the results of these tests after submitting a PR, we recommend that you run them for the results of these tests after submitting a PR, we recommend that you run them
locally to speed up the review process. locally to speed up the review process.
.. note::
Oftentimes, Travis will fail for reasons other than a problem with your PR.
For example, apt-get, pip, or homebrew will fail to download one of the
dependencies for the test suite, or a transient bug will cause the unit tests
to timeout. If Travis fails, click the "Details" link and click on the test(s)
that is failing. If it doesn't look like it is failing for reasons related to
your PR, you have two options. If you have write permissions for the Spack
repository, you should see a "Restart job" button on the right-hand side. If
not, you can close and reopen your PR to rerun all of the tests. If the same
test keeps failing, there may be a problem with your PR. If you notice that
every recent PR is failing with the same error message, it may be that Travis
is down or one of Spack's dependencies put out a new release that is causing
problems. If this is the case, please file an issue.
If you take a look in ``$SPACK_ROOT/.travis.yml``, you'll notice that we test If you take a look in ``$SPACK_ROOT/.travis.yml``, you'll notice that we test
against Python 2.6, 2.7, and 3.3-3.6 on both macOS and Linux. We currently against Python 2.6 and 2.7. We currently perform 3 types of tests:
perform 3 types of tests:
^^^^^^^^^^ ^^^^^^^^^^
Unit Tests Unit Tests
@@ -92,10 +75,7 @@ This allows you to develop iteratively: make a change, test that change, make
another change, test that change, etc. To get a list of all available unit another change, test that change, etc. To get a list of all available unit
tests, run: tests, run:
.. command-output:: spack test --list .. command-output:: spack test --collect-only
A more detailed list of available unit tests can be found by running
``spack test --long-list``.
Unit tests are crucial to making sure bugs aren't introduced into Spack. If you Unit tests are crucial to making sure bugs aren't introduced into Spack. If you
are modifying core Spack libraries or adding new functionality, please consider are modifying core Spack libraries or adding new functionality, please consider
@@ -104,7 +84,7 @@ adding new unit tests or strengthening existing tests.
.. note:: .. note::
There is also a ``run-unit-tests`` script in ``share/spack/qa`` that There is also a ``run-unit-tests`` script in ``share/spack/qa`` that
runs the unit tests. Afterwards, it reports back to Codecov with the runs the unit tests. Afterwards, it reports back to Coverage with the
percentage of Spack that is covered by unit tests. This script is percentage of Spack that is covered by unit tests. This script is
designed for Travis CI. If you want to run the unit tests yourself, we designed for Travis CI. If you want to run the unit tests yourself, we
suggest you use ``spack test``. suggest you use ``spack test``.
@@ -141,7 +121,7 @@ command:
"extends" in ``package.py`` files. "extends" in ``package.py`` files.
More approved flake8 exemptions can be found More approved flake8 exemptions can be found
`here <https://github.com/spack/spack/blob/develop/.flake8>`_. `here <https://github.com/LLNL/spack/blob/develop/.flake8>`_.
If all is well, you'll see something like this: If all is well, you'll see something like this:
@@ -171,22 +151,20 @@ However, if you aren't compliant with PEP 8, flake8 will complain:
Most of the error messages are straightforward, but if you don't understand what Most of the error messages are straightforward, but if you don't understand what
they mean, just ask questions about them when you submit your PR. The line numbers they mean, just ask questions about them when you submit your PR. The line numbers
will change if you add or delete lines, so simply run ``spack flake8`` again will change if you add or delete lines, so simply run ``run-flake8-tests`` again
to update them. to update them.
.. tip:: .. tip::
Try fixing flake8 errors in reverse order. This eliminates the need for Try fixing flake8 errors in reverse order. This eliminates the need for
multiple runs of ``spack flake8`` just to re-compute line numbers and multiple runs of ``flake8`` just to re-compute line numbers and makes it
makes it much easier to fix errors directly off of the Travis output. much easier to fix errors directly off of the Travis output.
.. warning:: .. warning::
Flake8 requires setuptools in order to run. If you installed ``py-flake8`` Flake8 requires setuptools in order to run. If you installed ``py-flake8``
with Spack, make sure to add ``py-setuptools`` to your ``PYTHONPATH``. with Spack, make sure to add ``py-setuptools`` to your ``PYTHONPATH``.
The easiest way to do this is to run ``spack activate py-flake8`` so that Otherwise, you will get an error message like:
all of the dependencies are symlinked to a central location. If you see an
error message like:
.. code-block:: console .. code-block:: console
@@ -195,8 +173,6 @@ to update them.
from pkg_resources import load_entry_point from pkg_resources import load_entry_point
ImportError: No module named pkg_resources ImportError: No module named pkg_resources
that means Flake8 couldn't find setuptools in your ``PYTHONPATH``.
^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^
Documentation Tests Documentation Tests
^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^
@@ -210,7 +186,6 @@ Building the documentation requires several dependencies, all of which can be
installed with Spack: installed with Spack:
* sphinx * sphinx
* sphinxcontrib-programoutput
* graphviz * graphviz
* git * git
* mercurial * mercurial
@@ -218,26 +193,20 @@ installed with Spack:
.. warning:: .. warning::
Sphinx has `several required dependencies <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/py-sphinx/package.py>`_. Sphinx has `several required dependencies <https://github.com/LLNL/spack/blob/develop/var/spack/repos/builtin/packages/py-sphinx/package.py>`_.
If you installed ``py-sphinx`` with Spack, make sure to add all of these If you installed ``py-sphinx`` with Spack, make sure to add all of these
dependencies to your ``PYTHONPATH``. The easiest way to do this is to run: dependencies to your ``PYTHONPATH``. The easiest way to do this is to run
``spack activate py-sphinx`` so that all of the dependencies are symlinked
to a central location. If you see an error message like:
.. code-block:: console .. code-block:: console
$ spack activate py-sphinx Traceback (most recent call last):
$ spack activate py-sphinxcontrib-programoutput File: "/usr/bin/flake8", line 5, in <module>
from pkg_resources import load_entry_point
ImportError: No module named pkg_resources
so that all of the dependencies are symlinked to a central location. that means Sphinx couldn't find setuptools in your ``PYTHONPATH``.
If you see an error message like:
.. code-block:: console
Extension error:
Could not import extension sphinxcontrib.programoutput (exception: No module named sphinxcontrib.programoutput)
make: *** [html] Error 1
that means Sphinx couldn't find ``py-sphinxcontrib-programoutput`` in your
``PYTHONPATH``.
Once all of the dependencies are installed, you can try building the documentation: Once all of the dependencies are installed, you can try building the documentation:
@@ -252,11 +221,11 @@ your PR is accepted.
.. note:: .. note::
There is also a ``run-doc-tests`` script in ``share/spack/qa``. The only There is also a ``run-doc-tests`` script in the Quality Assurance directory.
difference between running this script and running ``make`` by hand is that The only difference between running this script and running ``make`` by hand
the script will exit immediately if it encounters an error or warning. This is that the script will exit immediately if it encounters an error or warning.
is necessary for Travis CI. If you made a lot of documentation changes, it is This is necessary for Travis CI. If you made a lot of documentation tests, it
much quicker to run ``make`` by hand so that you can see all of the warnings is much quicker to run ``make`` by hand so that you can see all of the warnings
at once. at once.
If you are editing the documentation, you should obviously be running the If you are editing the documentation, you should obviously be running the
@@ -304,37 +273,6 @@ Documentation changes can result in much more obfuscated warning messages.
If you don't understand what they mean, feel free to ask when you submit If you don't understand what they mean, feel free to ask when you submit
your PR. your PR.
--------
Coverage
--------
Spack uses `Codecov <https://codecov.io/>`_ to generate and report unit test
coverage. This helps us tell what percentage of lines of code in Spack are
covered by unit tests. Although code covered by unit tests can still contain
bugs, it is much less error prone than code that is not covered by unit tests.
Codecov provides `browser extensions <https://github.com/codecov/browser-extension>`_
for Google Chrome, Firefox, and Opera. These extensions integrate with GitHub
and allow you to see coverage line-by-line when viewing the Spack repository.
If you are new to Spack, a great way to get started is to write unit tests to
increase coverage!
Unlike with Travis, Codecov tests are not required to pass in order for your
PR to be merged. If you modify core Spack libraries, we would greatly
appreciate unit tests that cover these changed lines. Otherwise, we have no
way of knowing whether or not your changes introduce a bug. If you make
substantial changes to the core, we may request unit tests to increase coverage.
.. note::
If the only files you modified are package files, we do not care about
coverage on your PR. You may notice that the Codecov tests fail even though
you didn't modify any core files. This means that Spack's overall coverage
has increased since you branched off of develop. This is a good thing!
If you really want to get the Codecov tests to pass, you can rebase off of
the latest develop, but again, this is not required.
------------- -------------
Git Workflows Git Workflows
------------- -------------

View File

@@ -300,46 +300,6 @@ Stage objects
Writing commands Writing commands
---------------- ----------------
Adding a new command to Spack is easy. Simply add a ``<name>.py`` file to
``lib/spack/spack/cmd/``, where ``<name>`` is the name of the subcommand.
At the bare minimum, two functions are required in this file:
^^^^^^^^^^^^^^^^^^
``setup_parser()``
^^^^^^^^^^^^^^^^^^
Unless your command doesn't accept any arguments, a ``setup_parser()``
function is required to define what arguments and flags your command takes.
See the `Argparse documentation <https://docs.python.org/2.7/library/argparse.html>`_
for more details on how to add arguments.
Some commands have a set of subcommands, like ``spack compiler find`` or
``spack module refresh``. You can add subparsers to your parser to handle
this. Check out ``spack edit --command compiler`` for an example of this.
A lot of commands take the same arguments and flags. These arguments should
be defined in ``lib/spack/spack/cmd/common/arguments.py`` so that they don't
need to be redefined in multiple commands.
^^^^^^^^^^^^
``<name>()``
^^^^^^^^^^^^
In order to run your command, Spack searches for a function with the same
name as your command in ``<name>.py``. This is the main method for your
command, and can call other helper methods to handle common tasks.
Remember, before adding a new command, think to yourself whether or not this
new command is actually necessary. Sometimes, the functionality you desire
can be added to an existing command. Also remember to add unit tests for
your command. If it isn't used very frequently, changes to the rest of
Spack can cause your command to break without sufficient unit tests to
prevent this from happening.
Whenever you add/remove/rename a command or flags for an existing command,
make sure to update Spack's `Bash tab completion script
<https://github.com/adamjstewart/spack/blob/develop/share/spack/spack-completion.bash>`_.
---------- ----------
Unit tests Unit tests
---------- ----------
@@ -352,121 +312,14 @@ Unit testing
Developer commands Developer commands
------------------ ------------------
.. _cmd-spack-doc:
^^^^^^^^^^^^^ ^^^^^^^^^^^^^
``spack doc`` ``spack doc``
^^^^^^^^^^^^^ ^^^^^^^^^^^^^
.. _cmd-spack-test:
^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^
``spack test`` ``spack test``
^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^
.. _cmd-spack-python:
^^^^^^^^^^^^^^^^
``spack python``
^^^^^^^^^^^^^^^^
``spack python`` is a command that lets you import and debug things as if
you were in a Spack interactive shell. Without any arguments, it is similar
to a normal interactive Python shell, except you can import spack and any
other Spack modules:
.. code-block:: console
$ spack python
Spack version 0.10.0
Python 2.7.13, Linux x86_64
>>> from spack.version import Version
>>> a = Version('1.2.3')
>>> b = Version('1_2_3')
>>> a == b
True
>>> c = Version('1.2.3b')
>>> c > a
True
>>>
You can also run a single command:
.. code-block:: console
$ spack python -c 'import distro; distro.linux_distribution()'
('Fedora', '25', 'Workstation Edition')
or a file:
.. code-block:: console
$ spack python ~/test_fetching.py
just like you would with the normal ``python`` command.
.. _cmd-spack-url:
^^^^^^^^^^^^^
``spack url``
^^^^^^^^^^^^^
A package containing a single URL can be used to download several different
versions of the package. If you've ever wondered how this works, all of the
magic is in :mod:`spack.url`. This module contains methods for extracting
the name and version of a package from its URL. The name is used by
``spack create`` to guess the name of the package. By determining the version
from the URL, Spack can replace it with other versions to determine where to
download them from.
The regular expressions in ``parse_name_offset`` and ``parse_version_offset``
are used to extract the name and version, but they aren't perfect. In order
to debug Spack's URL parsing support, the ``spack url`` command can be used.
"""""""""""""""""""
``spack url parse``
"""""""""""""""""""
If you need to debug a single URL, you can use the following command:
.. command-output:: spack url parse http://cache.ruby-lang.org/pub/ruby/2.2/ruby-2.2.0.tar.gz
You'll notice that the name and version of this URL are correctly detected,
and you can even see which regular expressions it was matched to. However,
you'll notice that when it substitutes the version number in, it doesn't
replace the ``2.2`` with ``9.9`` where we would expect ``9.9.9b`` to live.
This particular package may require a ``list_url`` or ``url_for_version``
function.
This command also accepts a ``--spider`` flag. If provided, Spack searches
for other versions of the package and prints the matching URLs.
""""""""""""""""""
``spack url list``
""""""""""""""""""
This command lists every URL in every package in Spack. If given the
``--color`` and ``--extrapolation`` flags, it also colors the part of
the string that it detected to be the name and version. The
``--incorrect-name`` and ``--incorrect-version`` flags can be used to
print URLs that were not being parsed correctly.
"""""""""""""""""""""
``spack url summary``
"""""""""""""""""""""
This command attempts to parse every URL for every package in Spack
and prints a summary of how many of them are being correctly parsed.
It also prints a histogram showing which regular expressions are being
matched and how frequently:
.. command-output:: spack url summary
This command is essential for anyone adding or changing the regular
expressions that parse names and versions. By running this command
before and after the change, you can make sure that your regular
expression fixes more packages than it breaks.
--------- ---------
Profiling Profiling
--------- ---------

View File

@@ -0,0 +1,25 @@
Copyright (c) 2010, 2011, 2012 Sebastian Wiesner <lunaryorn@googlemail.com>
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

View File

@@ -0,0 +1,9 @@
# -*- coding: utf-8 -*-
"""
sphinxcontrib
~~~~~~~~~~~~~
Contains 3rd party Sphinx extensions.
"""
__import__('pkg_resources').declare_namespace(__name__)

View File

@@ -0,0 +1,263 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2010, 2011, 2012, Sebastian Wiesner <lunaryorn@gmail.com>
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""
sphinxcontrib.programoutput
===========================
This extension provides a directive to include the output of commands as
literal block while building the docs.
.. moduleauthor:: Sebastian Wiesner <lunaryorn@gmail.com>
"""
from __future__ import (print_function, division, unicode_literals,
absolute_import)
import sys
import os
import shlex
from subprocess import Popen, PIPE, STDOUT
from collections import defaultdict, namedtuple
from docutils import nodes
from docutils.parsers import rst
from docutils.parsers.rst.directives import flag, unchanged, nonnegative_int
__version__ = '0.9'
class program_output(nodes.Element):
pass
def _slice(value):
parts = [int(v.strip()) for v in value.split(',')]
if len(parts) > 2:
raise ValueError('too many slice parts')
return tuple((parts + [None] * 2)[:2])
class ProgramOutputDirective(rst.Directive):
has_content = False
final_argument_whitespace = True
required_arguments = 1
option_spec = dict(shell=flag, prompt=flag, nostderr=flag,
ellipsis=_slice, extraargs=unchanged,
returncode=nonnegative_int, cwd=unchanged)
def run(self):
env = self.state.document.settings.env
node = program_output()
node.line = self.lineno
node['command'] = self.arguments[0]
if self.name == 'command-output':
node['show_prompt'] = True
else:
node['show_prompt'] = 'prompt' in self.options
node['hide_standard_error'] = 'nostderr' in self.options
node['extraargs'] = self.options.get('extraargs', '')
_, cwd = env.relfn2path(self.options.get('cwd', '/'))
node['working_directory'] = cwd
node['use_shell'] = 'shell' in self.options
node['returncode'] = self.options.get('returncode', 0)
if 'ellipsis' in self.options:
node['strip_lines'] = self.options['ellipsis']
return [node]
_Command = namedtuple(
'Command', 'command shell hide_standard_error working_directory')
class Command(_Command):
"""
A command to be executed.
"""
def __new__(cls, command, shell=False, hide_standard_error=False,
working_directory='/'):
if isinstance(command, list):
command = tuple(command)
# `chdir()` resolves symlinks, so we need to resolve them too for
# caching to make sure that different symlinks to the same directory
# don't result in different cache keys. Also normalize paths to make
# sure that identical paths are also equal as strings.
working_directory = os.path.normpath(os.path.realpath(
working_directory))
return _Command.__new__(cls, command, shell, hide_standard_error,
working_directory)
@classmethod
def from_program_output_node(cls, node):
"""
Create a command from a :class:`program_output` node.
"""
extraargs = node.get('extraargs', '')
command = (node['command'] + ' ' + extraargs).strip()
return cls(command, node['use_shell'],
node['hide_standard_error'], node['working_directory'])
def execute(self):
"""
Execute this command.
Return the :class:`~subprocess.Popen` object representing the running
command.
"""
if self.shell:
if sys.version_info[0] < 3 and isinstance(self.command, unicode):
command = self.command.encode(sys.getfilesystemencoding())
else:
command = self.command
else:
if sys.version_info[0] < 3 and isinstance(self.command, unicode):
command = shlex.split(self.command.encode(
sys.getfilesystemencoding()))
elif isinstance(self.command, str):
command = shlex.split(self.command)
else:
command = self.command
return Popen(command, shell=self.shell, stdout=PIPE,
stderr=PIPE if self.hide_standard_error else STDOUT,
cwd=self.working_directory)
def get_output(self):
"""
Get the output of this command.
Return a tuple ``(returncode, output)``. ``returncode`` is the
integral return code of the process, ``output`` is the output as
unicode string, with final trailing spaces and new lines stripped.
"""
process = self.execute()
output = process.communicate()[0].decode(
sys.getfilesystemencoding(), 'replace').rstrip()
return process.returncode, output
def __str__(self):
if isinstance(self.command, tuple):
return repr(list(self.command))
return repr(self.command)
class ProgramOutputCache(defaultdict):
"""
Execute command and cache their output.
This class is a mapping. Its keys are :class:`Command` objects represeting
command invocations. Its values are tuples of the form ``(returncode,
output)``, where ``returncode`` is the integral return code of the command,
and ``output`` is the output as unicode string.
The first time, a key is retrieved from this object, the command is
invoked, and its result is cached. Subsequent access to the same key
returns the cached value.
"""
def __missing__(self, command):
"""
Called, if a command was not found in the cache.
``command`` is an instance of :class:`Command`.
"""
result = command.get_output()
self[command] = result
return result
def run_programs(app, doctree):
"""
Execute all programs represented by ``program_output`` nodes in
``doctree``. Each ``program_output`` node in ``doctree`` is then
replaced with a node, that represents the output of this program.
The program output is retrieved from the cache in
``app.env.programoutput_cache``.
"""
if app.config.programoutput_use_ansi:
# enable ANSI support, if requested by config
from sphinxcontrib.ansi import ansi_literal_block
node_class = ansi_literal_block
else:
node_class = nodes.literal_block
cache = app.env.programoutput_cache
for node in doctree.traverse(program_output):
command = Command.from_program_output_node(node)
try:
returncode, output = cache[command]
except EnvironmentError as error:
error_message = 'Command {0} failed: {1}'.format(command, error)
error_node = doctree.reporter.error(error_message, base_node=node)
node.replace_self(error_node)
else:
if returncode != node['returncode']:
app.warn('Unexpected return code {0} from command {1}'.format(
returncode, command))
# replace lines with ..., if ellipsis is specified
if 'strip_lines' in node:
lines = output.splitlines()
start, stop = node['strip_lines']
lines[start:stop] = ['...']
output = '\n'.join(lines)
if node['show_prompt']:
tmpl = app.config.programoutput_prompt_template
output = tmpl.format(command=node['command'], output=output,
returncode=returncode)
new_node = node_class(output, output)
new_node['language'] = 'text'
node.replace_self(new_node)
def init_cache(app):
"""
Initialize the cache for program output at
``app.env.programoutput_cache``, if not already present (e.g. being
loaded from a pickled environment).
The cache is of type :class:`ProgramOutputCache`.
"""
if not hasattr(app.env, 'programoutput_cache'):
app.env.programoutput_cache = ProgramOutputCache()
def setup(app):
app.add_config_value('programoutput_use_ansi', False, 'env')
app.add_config_value('programoutput_prompt_template',
'$ {command}\n{output}', 'env')
app.add_directive('program-output', ProgramOutputDirective)
app.add_directive('command-output', ProgramOutputDirective)
app.connect(str('builder-inited'), init_cache)
app.connect(str('doctree-read'), run_programs)

View File

@@ -11,10 +11,9 @@ Prerequisites
Spack has the following minimum requirements, which must be installed Spack has the following minimum requirements, which must be installed
before Spack is run: before Spack is run:
1. Python 2 (2.6 or 2.7) or 3 (3.3 - 3.6) 1. Python 2.6 or 2.7
2. A C/C++ compiler 2. A C/C++ compiler
3. The ``git`` and ``curl`` commands. 3. The ``git`` and ``curl`` commands.
4. If using the ``gpg`` subcommand, ``gnupg2`` is required.
These requirements can be easily installed on most modern Linux systems; These requirements can be easily installed on most modern Linux systems;
on Macintosh, XCode is required. Spack is designed to run on HPC on Macintosh, XCode is required. Spack is designed to run on HPC
@@ -27,11 +26,11 @@ Installation
------------ ------------
Getting Spack is easy. You can clone it from the `github repository Getting Spack is easy. You can clone it from the `github repository
<https://github.com/spack/spack>`_ using this command: <https://github.com/llnl/spack>`_ using this command:
.. code-block:: console .. code-block:: console
$ git clone https://github.com/spack/spack.git $ git clone https://github.com/llnl/spack.git
This will create a directory called ``spack``. This will create a directory called ``spack``.
@@ -52,7 +51,7 @@ For a richer experience, use Spack's shell support:
.. code-block:: console .. code-block:: console
# For bash/zsh users # For bash users
$ export SPACK_ROOT=/path/to/spack $ export SPACK_ROOT=/path/to/spack
$ . $SPACK_ROOT/share/spack/setup-env.sh $ . $SPACK_ROOT/share/spack/setup-env.sh
@@ -60,15 +59,10 @@ For a richer experience, use Spack's shell support:
$ setenv SPACK_ROOT /path/to/spack $ setenv SPACK_ROOT /path/to/spack
$ source $SPACK_ROOT/share/spack/setup-env.csh $ source $SPACK_ROOT/share/spack/setup-env.csh
This automatically adds Spack to your ``PATH`` and allows the ``spack`` This automatically adds Spack to your ``PATH`` and allows the ``spack``
command to be used to execute spack :ref:`commands <shell-support>` and command to :ref:`load environment modules <shell-support>` and execute
:ref:`useful packaging commands <packaging-shell-support>`. :ref:`useful packaging commands <packaging-shell-support>`.
If :ref:`environment-modules or dotkit <InstallEnvironmentModules>` is
installed and available, the ``spack`` command can also load and unload
:ref:`modules <modules>`.
^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^
Clean Environment Clean Environment
^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^
@@ -91,20 +85,30 @@ Check Installation
With Spack installed, you should be able to run some basic Spack With Spack installed, you should be able to run some basic Spack
commands. For example: commands. For example:
.. command-output:: spack spec netcdf .. code-block:: console
$ spack spec netcdf
...
netcdf@4.4.1%gcc@5.3.0~hdf4+mpi arch=linux-SuSE11-x86_64
^curl@7.50.1%gcc@5.3.0 arch=linux-SuSE11-x86_64
^openssl@system%gcc@5.3.0 arch=linux-SuSE11-x86_64
^zlib@1.2.8%gcc@5.3.0 arch=linux-SuSE11-x86_64
^hdf5@1.10.0-patch1%gcc@5.3.0+cxx~debug+fortran+mpi+shared~szip~threadsafe arch=linux-SuSE11-x86_64
^openmpi@1.10.1%gcc@5.3.0~mxm~pmi~psm~psm2~slurm~sqlite3~thread_multiple~tm+verbs+vt arch=linux-SuSE11-x86_64
^m4@1.4.17%gcc@5.3.0+sigsegv arch=linux-SuSE11-x86_64
^libsigsegv@2.10%gcc@5.3.0 arch=linux-SuSE11-x86_64
^^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^^^
Optional: Alternate Prefix Optional: Alternate Prefix
^^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^^^
You may want to run Spack out of a prefix other than the git repository You may want to run Spack out of a prefix other than the git repository
you cloned. The ``spack clone`` command provides this you cloned. The ``spack bootstrap`` command provides this
functionality. To install spack in a new directory, simply type: functionality. To install spack in a new directory, simply type:
.. code-block:: console .. code-block:: console
$ spack clone /my/favorite/prefix $ spack bootstrap /my/favorite/prefix
This will install a new spack script in ``/my/favorite/prefix/bin``, This will install a new spack script in ``/my/favorite/prefix/bin``,
which you can use just like you would the regular spack script. Each which you can use just like you would the regular spack script. Each
@@ -221,7 +225,7 @@ If you want to see specifics on a particular compiler, you can run
f77 = /usr/local/bin/ifort-15.0.090 f77 = /usr/local/bin/ifort-15.0.090
fc = /usr/local/bin/ifort-15.0.090 fc = /usr/local/bin/ifort-15.0.090
modules = [] modules = []
operating_system = centos6 operating system = centos6
... ...
This shows which C, C++, and Fortran compilers were detected by Spack. This shows which C, C++, and Fortran compilers were detected by Spack.
@@ -321,6 +325,19 @@ by adding the following to your ``packages.yaml`` file:
compiler: [gcc@4.9.3] compiler: [gcc@4.9.3]
.. tip::
If you are building your own compiler, some users prefer to have a
Spack instance just for that. For example, create a new Spack in
``~/spack-tools`` and then run ``~/spack-tools/bin/spack install
gcc@4.9.3``. Once the compiler is built, don't build anything
more in that Spack instance; instead, create a new "real" Spack
instance, configure Spack to use the compiler you've just built,
and then build your application software in the new Spack
instance. Following this tip makes it easy to delete all your
Spack packages *except* the compiler.
^^^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^^^^
Compilers Requiring Modules Compilers Requiring Modules
^^^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -398,62 +415,14 @@ provides no Fortran compilers. The user is therefore forced to use a
mixed toolchain: XCode-provided Clang for C/C++ and GNU ``gfortran`` for mixed toolchain: XCode-provided Clang for C/C++ and GNU ``gfortran`` for
Fortran. Fortran.
#. You need to make sure that Xcode is installed. Run the following command: #. You need to make sure that command-line tools are installed. To that
end run ``$ xcode-select --install``.
.. code-block:: console #. Run ``$ spack compiler find`` to locate Clang.
$ xcode-select --install
If the Xcode command-line tools are already installed, you will see an
error message:
.. code-block:: none
xcode-select: error: command line tools are already installed, use "Software Update" to install updates
#. For most packages, the Xcode command-line tools are sufficient. However,
some packages like ``qt`` require the full Xcode suite. You can check
to see which you have installed by running:
.. code-block:: console
$ xcode-select -p
If the output is:
.. code-block:: none
/Applications/Xcode.app/Contents/Developer
you already have the full Xcode suite installed. If the output is:
.. code-block:: none
/Library/Developer/CommandLineTools
you only have the command-line tools installed. The full Xcode suite can
be installed through the App Store. Make sure you launch the Xcode
application and accept the license agreement before using Spack.
It may ask you to install additional components. Alternatively, the license
can be accepted through the command line:
.. code-block:: console
$ sudo xcodebuild -license accept
Note: the flag is ``-license``, not ``--license``.
#. Run ``spack compiler find`` to locate Clang.
#. There are different ways to get ``gfortran`` on macOS. For example, you can #. There are different ways to get ``gfortran`` on macOS. For example, you can
install GCC with Spack (``spack install gcc``) or with Homebrew install GCC with Spack (``$ spack install gcc``) or with Homebrew
(``brew install gcc``). (``$ brew install gcc``).
#. The only thing left to do is to edit ``~/.spack/compilers.yaml`` to provide #. The only thing left to do is to edit ``~/.spack/compilers.yaml`` to provide
the path to ``gfortran``: the path to ``gfortran``:
@@ -469,7 +438,7 @@ Fortran.
fc: /path/to/bin/gfortran fc: /path/to/bin/gfortran
If you used Spack to install GCC, you can get the installation prefix by If you used Spack to install GCC, you can get the installation prefix by
``spack location -i gcc`` (this will only work if you have a single version ``$ spack location -i gcc`` (this will only work if you have a single version
of GCC installed). Whereas for Homebrew, GCC is installed in of GCC installed). Whereas for Homebrew, GCC is installed in
``/usr/local/Cellar/gcc/x.y.z``. ``/usr/local/Cellar/gcc/x.y.z``.
@@ -681,7 +650,6 @@ Or it can be set permanently in your ``compilers.yaml``:
fflags: -mismatch fflags: -mismatch
spec: nag@6.1 spec: nag@6.1
--------------- ---------------
System Packages System Packages
--------------- ---------------
@@ -744,22 +712,19 @@ example:
$ curl -O https://github.com/ImageMagick/ImageMagick/archive/7.0.2-7.tar.gz $ curl -O https://github.com/ImageMagick/ImageMagick/archive/7.0.2-7.tar.gz
To tell Spack to use the system-supplied OpenSSL, first determine what The recommended way to tell Spack to use the system-supplied OpenSSL is
version you have: to add the following to ``packages.yaml``. Note that the ``@system``
"version" means "I don't care what version it is, just use what is
there." This is reasonable for OpenSSL, which has a stable API.
.. code-block:: console
$ openssl version
OpenSSL 1.0.2g 1 Mar 2016
Then add the following to ``~/.spack/packages.yaml``:
.. code-block:: yaml .. code-block:: yaml
packages: packages:
openssl: openssl:
paths: paths:
openssl@1.0.2g: /usr openssl@system: /usr
version: [system]
buildable: False buildable: False
@@ -775,7 +740,8 @@ to add the following to ``packages.yaml``:
packages: packages:
netlib-lapack: netlib-lapack:
paths: paths:
netlib-lapack@3.6.1: /usr netlib-lapack@system: /usr
version: [system]
buildable: False buildable: False
all: all:
providers: providers:
@@ -784,9 +750,11 @@ to add the following to ``packages.yaml``:
.. note:: .. note::
Above we pretend that the system-provided BLAS / LAPACK is ``netlib-lapack`` The ``@system`` "version" means "I don't care what version it is,
only because it is the only BLAS / LAPACK provider which use standard names just use what is there." Above we pretend that the system-provided
for libraries (as opposed to, for example, ``libopenblas.so``). Blas/Lapack is ``netlib-lapack`` only because it is the only BLAS / LAPACK
provider which use standard names for libraries (as opposed to, for example,
`libopenblas.so`).
Although we specify external package in ``/usr``, Spack is smart enough not Although we specify external package in ``/usr``, Spack is smart enough not
to add ``/usr/lib`` to RPATHs, where it could cause unrelated system to add ``/usr/lib`` to RPATHs, where it could cause unrelated system
@@ -816,7 +784,7 @@ This problem is related to OpenSSL, and in some cases might be solved
by installing a new version of ``git`` and ``openssl``: by installing a new version of ``git`` and ``openssl``:
#. Run ``spack install git`` #. Run ``spack install git``
#. Add the output of ``spack module loads git`` to your ``.bashrc``. #. Add the output of ``spack module loads git`` to your ``.bahsrc``.
If this doesn't work, it is also possible to disable checking of SSL If this doesn't work, it is also possible to disable checking of SSL
certificates by using: certificates by using:
@@ -889,10 +857,6 @@ well. They can generally be activated as in the ``curl`` example above;
or some systems might already have an appropriate hand-built or some systems might already have an appropriate hand-built
environment module that may be loaded. Either way works. environment module that may be loaded. Either way works.
If you find that you are missing some of these programs, ``spack`` can
build some of them for you with ``spack bootstrap``. Currently supported
programs are ``environment-modules``.
A few notes on specific programs in this list: A few notes on specific programs in this list:
"""""""""""""""""""""""""" """"""""""""""""""""""""""
@@ -926,37 +890,55 @@ Once ``curl`` has been installed, you can similarly install the others.
Environment Modules Environment Modules
""""""""""""""""""" """""""""""""""""""
In order to use Spack's generated module files, you must have In order to use Spack's generated environment modules, you must have
installed ``environment-modules`` or ``lmod``. The simplest way installed one of *Environment Modules* or *Lmod*. On many Linux
to get the latest version of either of these tools is installing distributions, this can be installed from the vendor's repository. For
it as part of Spack's bootstrap procedure: example: ``yum install environment-modules`` (Fedora/RHEL/CentOS). If
your Linux distribution does not have Environment Modules, you can get it
with Spack:
#. Consider using system tcl (as long as your system has Tcl version 8.0 or later):
#) Identify its location using ``which tclsh``
#) Identify its version using ``echo 'puts $tcl_version;exit 0' | tclsh``
#) Add to ``~/.spack/packages.yaml`` and modify as appropriate:
.. code-block:: yaml
packages:
tcl:
paths:
tcl@8.5: /usr
version: [8.5]
buildable: False
#. Install with:
.. code-block:: console
$ spack install environment-modules
#. Activate with the following script (or apply the updates to your
``.bashrc`` file manually):
.. code-block:: sh
TMP=`tempfile`
echo >$TMP
MODULE_HOME=`spack location --install-dir environment-modules`
MODULE_VERSION=`ls -1 $MODULE_HOME/Modules | head -1`
${MODULE_HOME}/Modules/${MODULE_VERSION}/bin/add.modules <$TMP
cp .bashrc $TMP
echo "MODULE_VERSION=${MODULE_VERSION}" > .bashrc
cat $TMP >>.bashrc
This adds to your ``.bashrc`` (or similar) files, enabling Environment
Modules when you log in. Re-load your .bashrc (or log out and in
again), and then test that the ``module`` command is found with:
.. code-block:: console .. code-block:: console
$ spack bootstrap $ module avail
.. warning::
At the moment ``spack bootstrap`` is only able to install ``environment-modules``.
Extending its capabilities to prefer ``lmod`` where possible is in the roadmap,
and likely to happen before the next release.
Alternatively, on many Linux distributions, you can install a pre-built binary
from the vendor's repository. On Fedora/RHEL/CentOS, for example, this can be
done with the command:
.. code-block:: console
$ yum install environment-modules
Once you have the tool installed and available in your path, you can source
Spack's setup file:
.. code-block:: console
$ source share/spack/setup-env.sh
This activates :ref:`shell support <shell-support>` and makes commands like
``spack load`` available for use.
^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^
@@ -1014,89 +996,6 @@ written in C/C++/Fortran would need it. A potential workaround is to
load a recent ``binutils`` into your environment and use the ``--dirty`` load a recent ``binutils`` into your environment and use the ``--dirty``
flag. flag.
-----------
GPG Signing
-----------
.. _cmd-spack-gpg:
^^^^^^^^^^^^^
``spack gpg``
^^^^^^^^^^^^^
Spack has support for signing and verifying packages using GPG keys. A
separate keyring is used for Spack, so any keys available in the user's home
directory are not used.
^^^^^^^^^^^^^^^^^^
``spack gpg init``
^^^^^^^^^^^^^^^^^^
When Spack is first installed, its keyring is empty. Keys stored in
:file:`var/spack/gpg` are the default keys for a Spack installation. These
keys may be imported by running ``spack gpg init``. This will import the
default keys into the keyring as trusted keys.
^^^^^^^^^^^^^
Trusting keys
^^^^^^^^^^^^^
Additional keys may be added to the keyring using
``spack gpg trust <keyfile>``. Once a key is trusted, packages signed by the
owner of they key may be installed.
^^^^^^^^^^^^^
Creating keys
^^^^^^^^^^^^^
You may also create your own key so that you may sign your own packages using
``spack gpg create <name> <email>``. By default, the key has no expiration,
but it may be set with the ``--expires <date>`` flag (see the ``gnupg2``
documentation for accepted date formats). It is also recommended to add a
comment as to the use of the key using the ``--comment <comment>`` flag. The
public half of the key can also be exported for sharing with others so that
they may use packages you have signed using the ``--export <keyfile>`` flag.
Secret keys may also be later exported using the
``spack gpg export <location> [<key>...]`` command.
.. note::
Key creation speed
The creation of a new GPG key requires generating a lot of random numbers.
Depending on the entropy produced on your system, the entire process may
take a long time (even a few minutes). To speed it up you may install
tools like ``rngd``, which is usually available as a package in the host OS.
On e.g. an Ubuntu machine you need to give the following commands:
.. code-block:: console
$ sudo apt-get install rng-tools
$ sudo rngd -r /dev/urandom
before generating the keys.
^^^^^^^^^^^^
Listing keys
^^^^^^^^^^^^
In order to list the keys available in the keyring, the
``spack gpg list`` command will list trusted keys with the ``--trusted`` flag
and keys available for signing using ``--signing``. If you would like to
remove keys from your keyring, ``spack gpg untrust <keyid>``. Key IDs can be
email addresses, names, or (best) fingerprints.
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Signing and Verifying Packages
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
In order to sign a package, ``spack gpg sign <file>`` should be used. By
default, the signature will be written to ``<file>.asc``, but that may be
changed by using the ``--output <file>`` flag. If there is only one signing
key available, it will be used, but if there is more than one, the key to use
must be specified using the ``--key <keyid>`` flag. The ``--clearsign`` flag
may also be used to create a signed file which contains the contents, but it
is not recommended. Signed packages may be verified by using
``spack gpg verify <file>``.
.. _cray-support: .. _cray-support:
@@ -1192,13 +1091,10 @@ Here's an example of an external configuration for cray modules:
.. code-block:: yaml .. code-block:: yaml
packages: packages:
mpich: mpi:
modules: modules:
mpich@7.3.1%gcc@5.2.0 arch=cray_xc-haswell-CNL10: cray-mpich mpich@7.3.1%gcc@5.2.0 arch=cray_xc-haswell-CNL10: cray-mpich
mpich@7.3.1%intel@16.0.0.109 arch=cray_xc-haswell-CNL10: cray-mpich mpich@7.3.1%intel@16.0.0.109 arch=cray_xc-haswell-CNL10: cray-mpich
all:
providers:
mpi: [mpich]
This tells Spack that for whatever package that depends on mpi, load the This tells Spack that for whatever package that depends on mpi, load the
cray-mpich module into the environment. You can then be able to use whatever cray-mpich module into the environment. You can then be able to use whatever
@@ -1215,7 +1111,7 @@ Here is an example of a full packages.yaml used at NERSC
.. code-block:: yaml .. code-block:: yaml
packages: packages:
mpich: mpi:
modules: modules:
mpich@7.3.1%gcc@5.2.0 arch=cray_xc-CNL10-ivybridge: cray-mpich mpich@7.3.1%gcc@5.2.0 arch=cray_xc-CNL10-ivybridge: cray-mpich
mpich@7.3.1%intel@16.0.0.109 arch=cray_xc-SuSE11-ivybridge: cray-mpich mpich@7.3.1%intel@16.0.0.109 arch=cray_xc-SuSE11-ivybridge: cray-mpich
@@ -1232,8 +1128,6 @@ Here is an example of a full packages.yaml used at NERSC
buildable: False buildable: False
all: all:
compiler: [gcc@5.2.0, intel@16.0.0.109] compiler: [gcc@5.2.0, intel@16.0.0.109]
providers:
mpi: [mpich]
Here we tell spack that whenever we want to build with gcc use version 5.2.0 or Here we tell spack that whenever we want to build with gcc use version 5.2.0 or
if we want to build with intel compilers, use version 16.0.0.109. We add a spec if we want to build with intel compilers, use version 16.0.0.109. We add a spec

View File

@@ -4,13 +4,9 @@
contain the root `toctree` directive. contain the root `toctree` directive.
=================== ===================
Spack Spack Documentation
=================== ===================
.. epigraph::
`These are docs for the Spack package manager. For sphere packing, see` `pyspack <https://pyspack.readthedocs.io>`_.
Spack is a package management tool designed to support multiple Spack is a package management tool designed to support multiple
versions and configurations of software on a wide variety of platforms versions and configurations of software on a wide variety of platforms
and environments. It was designed for large supercomputing centers, and environments. It was designed for large supercomputing centers,
@@ -29,12 +25,12 @@ maintain a single file for many different builds of the same package.
See the :doc:`features` for examples and highlights. See the :doc:`features` for examples and highlights.
Get spack from the `github repository Get spack from the `github repository
<https://github.com/spack/spack>`_ and install your first <https://github.com/llnl/spack>`_ and install your first
package: package:
.. code-block:: console .. code-block:: console
$ git clone https://github.com/spack/spack.git $ git clone https://github.com/llnl/spack.git
$ cd spack/bin $ cd spack/bin
$ ./spack install libelf $ ./spack install libelf
@@ -50,8 +46,7 @@ or refer to the full manual below.
getting_started getting_started
basic_usage basic_usage
workflows workflows
tutorial tutorial_sc16
known_issues
.. toctree:: .. toctree::
:maxdepth: 2 :maxdepth: 2
@@ -63,7 +58,6 @@ or refer to the full manual below.
mirrors mirrors
module_file_support module_file_support
repositories repositories
binary_caches
command_index command_index
package_list package_list
@@ -74,8 +68,7 @@ or refer to the full manual below.
contribution_guide contribution_guide
packaging_guide packaging_guide
developer_guide developer_guide
Spack API Docs <spack> API Docs <spack>
LLNL API Docs <llnl>
================== ==================
Indices and tables Indices and tables

View File

@@ -1,90 +0,0 @@
============
Known Issues
============
This is a list of known bugs in Spack. It provides ways of getting around these
problems if you encounter them.
-----------------------------------------------------------------
Default variants are not taken into account during concretization
-----------------------------------------------------------------
**Status:** Expected to be fixed in the next release
Current concretization algorithm does not take into account default values
of variants when adding extra constraints to the spec via CLI. For example
you may encounter the following error when trying to specify which MPI provider
to use:
.. code-block:: console
$ spack install hdf5 ^openmpi
==> Error: hdf5 does not depend on openmpi
although the hdf5 package contains:
.. code-block:: python
variant('mpi', default=True, description='Enable MPI support')
depends_on('mpi', when='+mpi')
A workaround is to explicitly activate the variant related to the dependency:
.. code-block:: console
$ spack install hdf5+mpi ^openmpi
See https://github.com/spack/spack/issues/397 for further details.
---------------------------------------------------
Variants are not properly forwarded to dependencies
---------------------------------------------------
**Status:** Expected to be fixed in the next release
Sometimes, a variant of a package can also affect how its dependencies are
built. For example, in order to build MPI support for a package, it may
require that its dependencies are also built with MPI support. In the
``package.py``, this looks like:
.. code-block:: python
depends_on('hdf5~mpi', when='~mpi')
depends_on('hdf5+mpi', when='+mpi')
Spack handles this situation properly for *immediate* dependencies, and
builds ``hdf5`` with the same variant you used for the package that
depends on it. However, for *indirect* dependencies (dependencies of
dependencies), Spack does not backtrack up the DAG far enough to handle
this. Users commonly run into this situation when trying to build R with
X11 support:
.. code-block:: console
$ spack install r+X
...
==> Error: Invalid spec: 'cairo@1.14.8%gcc@6.2.1+X arch=linux-fedora25-x86_64 ^bzip2@1.0.6%gcc@6.2.1+shared arch=linux-fedora25-x86_64 ^font-util@1.3.1%gcc@6.2.1 arch=linux-fedora25-x86_64 ^fontconfig@2.12.1%gcc@6.2.1 arch=linux-fedora25-x86_64 ^freetype@2.7.1%gcc@6.2.1 arch=linux-fedora25-x86_64 ^gettext@0.19.8.1%gcc@6.2.1+bzip2+curses+git~libunistring+libxml2+tar+xz arch=linux-fedora25-x86_64 ^glib@2.53.1%gcc@6.2.1~libmount arch=linux-fedora25-x86_64 ^inputproto@2.3.2%gcc@6.2.1 arch=linux-fedora25-x86_64 ^kbproto@1.0.7%gcc@6.2.1 arch=linux-fedora25-x86_64 ^libffi@3.2.1%gcc@6.2.1 arch=linux-fedora25-x86_64 ^libpng@1.6.29%gcc@6.2.1 arch=linux-fedora25-x86_64 ^libpthread-stubs@0.4%gcc@6.2.1 arch=linux-fedora25-x86_64 ^libx11@1.6.5%gcc@6.2.1 arch=linux-fedora25-x86_64 ^libxau@1.0.8%gcc@6.2.1 arch=linux-fedora25-x86_64 ^libxcb@1.12%gcc@6.2.1 arch=linux-fedora25-x86_64 ^libxdmcp@1.1.2%gcc@6.2.1 arch=linux-fedora25-x86_64 ^libxext@1.3.3%gcc@6.2.1 arch=linux-fedora25-x86_64 ^libxml2@2.9.4%gcc@6.2.1~python arch=linux-fedora25-x86_64 ^libxrender@0.9.10%gcc@6.2.1 arch=linux-fedora25-x86_64 ^ncurses@6.0%gcc@6.2.1~symlinks arch=linux-fedora25-x86_64 ^openssl@1.0.2k%gcc@6.2.1 arch=linux-fedora25-x86_64 ^pcre@8.40%gcc@6.2.1+utf arch=linux-fedora25-x86_64 ^pixman@0.34.0%gcc@6.2.1 arch=linux-fedora25-x86_64 ^pkg-config@0.29.2%gcc@6.2.1+internal_glib arch=linux-fedora25-x86_64 ^python@2.7.13%gcc@6.2.1+shared~tk~ucs4 arch=linux-fedora25-x86_64 ^readline@7.0%gcc@6.2.1 arch=linux-fedora25-x86_64 ^renderproto@0.11.1%gcc@6.2.1 arch=linux-fedora25-x86_64 ^sqlite@3.18.0%gcc@6.2.1 arch=linux-fedora25-x86_64 ^tar^util-macros@1.19.1%gcc@6.2.1 arch=linux-fedora25-x86_64 ^xcb-proto@1.12%gcc@6.2.1 arch=linux-fedora25-x86_64 ^xextproto@7.3.0%gcc@6.2.1 arch=linux-fedora25-x86_64 ^xproto@7.0.31%gcc@6.2.1 arch=linux-fedora25-x86_64 ^xtrans@1.3.5%gcc@6.2.1 arch=linux-fedora25-x86_64 ^xz@5.2.3%gcc@6.2.1 arch=linux-fedora25-x86_64 ^zlib@1.2.11%gcc@6.2.1+pic+shared arch=linux-fedora25-x86_64'.
Package cairo requires variant ~X, but spec asked for +X
A workaround is to explicitly activate the variants of dependencies as well:
.. code-block:: console
$ spack install r+X ^cairo+X ^pango+X
See https://github.com/spack/spack/issues/267 and
https://github.com/spack/spack/issues/2546 for further details.
----------------------------
``spack setup`` doesn't work
----------------------------
**Status:** Work in progress
Spack provides a ``setup`` command that is useful for the development of
software outside of Spack. Unfortunately, this command no longer works.
See https://github.com/spack/spack/issues/2597 and
https://github.com/spack/spack/issues/2662 for details. This is expected
to be fixed by https://github.com/spack/spack/pull/2664.

View File

@@ -162,7 +162,7 @@ your site.
Once you have a mirror, you need to let spack know about it. This is Once you have a mirror, you need to let spack know about it. This is
relatively simple. First, figure out the URL for the mirror. If it's relatively simple. First, figure out the URL for the mirror. If it's
a directory, you can use a file URL like this one: a file, you can use a file URL like this one:
.. code-block:: none .. code-block:: none
@@ -237,7 +237,7 @@ as other Spack mirrors (so it can be copied anywhere and referenced with a URL
like other mirrors). The mirror is maintained locally (within the Spack like other mirrors). The mirror is maintained locally (within the Spack
installation directory) at :file:`var/spack/cache/`. It is always enabled (and installation directory) at :file:`var/spack/cache/`. It is always enabled (and
is always searched first when attempting to retrieve files for an installation) is always searched first when attempting to retrieve files for an installation)
but can be cleared with :ref:`clean <cmd-spack-clean>`; the cache directory can also but can be cleared with :ref:`purge <cmd-spack-purge>`; the cache directory can also
be deleted manually without issue. be deleted manually without issue.
Caching includes retrieved tarball archives and source control repositories, but Caching includes retrieved tarball archives and source control repositories, but

View File

@@ -1,989 +0,0 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<svg
xmlns:dc="http://purl.org/dc/elements/1.1/"
xmlns:cc="http://creativecommons.org/ns#"
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
xmlns:svg="http://www.w3.org/2000/svg"
xmlns="http://www.w3.org/2000/svg"
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
version="1.1"
width="682"
height="382"
id="svg3341"
inkscape:version="0.48.4 r9939"
sodipodi:docname="module_file_generation.svg">
<metadata
id="metadata3657">
<rdf:RDF>
<cc:Work
rdf:about="">
<dc:format>image/svg+xml</dc:format>
<dc:type
rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
</cc:Work>
</rdf:RDF>
</metadata>
<sodipodi:namedview
pagecolor="#ffffff"
bordercolor="#666666"
borderopacity="1"
objecttolerance="10"
gridtolerance="10"
guidetolerance="10"
inkscape:pageopacity="0"
inkscape:pageshadow="2"
inkscape:window-width="1920"
inkscape:window-height="1134"
id="namedview3655"
showgrid="false"
inkscape:zoom="2.0073314"
inkscape:cx="341"
inkscape:cy="191"
inkscape:window-x="1920"
inkscape:window-y="27"
inkscape:window-maximized="1"
inkscape:current-layer="g3345" />
<defs
id="defs3343" />
<g
inkscape:groupmode="layer"
id="layer1"
inkscape:label="Background">
<rect
style="fill:none;stroke:#ff0000;stroke-width:1.40437257;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:8.42623582, 8.42623582;stroke-dashoffset:0"
id="rect3844"
width="442.15341"
height="375.15021"
x="18.745768"
y="3.2206354" />
<rect
style="fill:none;stroke:#0000ff;stroke-width:1.50000000000000000;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:9, 9;stroke-dashoffset:0"
id="rect3844-6"
width="175.74904"
height="179.83459"
x="486.96402"
y="179.3212" />
</g>
<g
id="g3345">
<g
transform="translate(-246,-214) scale(1,1)"
id="g3347">
<rect
fill="#C0C0C0"
stroke="none"
x="297"
y="231"
width="198"
height="104"
opacity="0.2"
id="rect3349" />
</g>
<g
transform="translate(-246,-214) scale(1,1)"
id="g3351">
<rect
fill="#ffffff"
stroke="none"
x="290"
y="224"
width="198"
height="104"
id="rect3353" />
</g>
<g
transform="translate(-246,-214) scale(1,1)"
id="g3355">
<path
fill="none"
stroke="#000000"
d="M 290 224 L 488 224 L 488 328 L 290 328 L 290 224 Z Z"
stroke-miterlimit="10"
id="path3357" />
</g>
<g
transform="translate(-246,-214) scale(1,1)"
id="g3359">
<rect
fill="#ffffff"
stroke="none"
x="280"
y="231"
width="20"
height="10"
id="rect3361" />
</g>
<g
transform="translate(-246,-214) scale(1,1)"
id="g3363">
<path
fill="none"
stroke="#000000"
d="M 280 231 L 300 231 L 300 241 L 280 241 L 280 231 Z Z"
stroke-miterlimit="10"
id="path3365" />
</g>
<g
transform="translate(-246,-214) scale(1,1)"
id="g3367">
<rect
fill="#ffffff"
stroke="none"
x="280"
y="251"
width="20"
height="10"
id="rect3369" />
</g>
<g
transform="translate(-246,-214) scale(1,1)"
id="g3371">
<path
fill="none"
stroke="#000000"
d="M 280 251 L 300 251 L 300 261 L 280 261 L 280 251 Z Z"
stroke-miterlimit="10"
id="path3373" />
</g>
<g
transform="translate(-246,-214) scale(1,1)"
id="g3375">
<g
id="g3377">
<path
fill="none"
stroke="none"
id="path3379" />
<text
fill="#000000"
stroke="none"
font-family="Arial"
font-size="13px"
font-style="normal"
font-weight="bold"
text-decoration="none"
x="343"
y="243.5"
id="text3381">Configuration files</text>
</g>
</g>
<g
transform="translate(-246,-214) scale(1,1)"
id="g3383">
<rect
fill="#C0C0C0"
stroke="none"
x="327"
y="263"
width="98"
height="24"
opacity="0.2"
id="rect3385" />
</g>
<g
transform="translate(-246,-214) scale(1,1)"
id="g3387">
<rect
fill="#ffffff"
stroke="none"
x="320"
y="256"
width="98"
height="24"
id="rect3389" />
</g>
<g
transform="translate(-246,-214) scale(1,1)"
id="g3391">
<path
fill="none"
stroke="#000000"
d="M 320 256 L 418 256 L 418 280 L 320 280 L 320 256 Z Z"
stroke-miterlimit="10"
id="path3393" />
</g>
<g
transform="translate(-246,-214) scale(1,1)"
id="g3395">
<rect
fill="#ffffff"
stroke="none"
x="320"
y="256"
width="98"
height="24"
id="rect3397" />
</g>
<g
transform="translate(-246,-214) scale(1,1)"
id="g3399">
<path
fill="none"
stroke="#000000"
d="M 320 256 L 418 256 L 418 280 L 320 280 L 320 256 Z Z"
stroke-miterlimit="10"
id="path3401" />
</g>
<g
transform="translate(-246,-214) scale(1,1)"
id="g3403">
<g
id="g3405"
transform="translate(-2.8259277,-4.138916)">
<path
id="path3407"
d=""
inkscape:connector-curvature="0"
style="fill:none;stroke:none" />
<text
font-size="13px"
font-style="normal"
font-weight="bold"
text-decoration="none"
x="328.5"
y="275.5"
id="text3409"
style="font-size:13px;font-style:normal;font-weight:bold;text-decoration:none;fill:#000000;stroke:none;font-family:Arial">modules.yaml</text>
</g>
</g>
<g
transform="translate(-246,-214) scale(1,1)"
id="g3411">
<rect
fill="#C0C0C0"
stroke="none"
x="335"
y="295"
width="83"
height="24"
opacity="0.2"
id="rect3413" />
</g>
<g
transform="translate(-246,-214) scale(1,1)"
id="g3415">
<rect
fill="#ffffff"
stroke="none"
x="328"
y="288"
width="83"
height="24"
id="rect3417" />
</g>
<g
transform="translate(-246,-214) scale(1,1)"
id="g3419">
<path
fill="none"
stroke="#000000"
d="M 328 288 L 411 288 L 411 312 L 328 312 L 328 288 Z Z"
stroke-miterlimit="10"
id="path3421" />
</g>
<g
transform="translate(-246,-214) scale(1,1)"
id="g3423">
<rect
fill="#ffffff"
stroke="none"
x="328"
y="288"
width="83"
height="24"
id="rect3425" />
</g>
<g
transform="translate(-246,-214) scale(1,1)"
id="g3427">
<path
fill="none"
stroke="#000000"
d="M 328 288 L 411 288 L 411 312 L 328 312 L 328 288 Z Z"
stroke-miterlimit="10"
id="path3429" />
</g>
<g
transform="translate(-248.92773,-218.16748)"
id="g3431">
<g
id="g3433">
<path
id="path3435"
d=""
inkscape:connector-curvature="0"
style="fill:none;stroke:none" />
<text
font-size="13px"
font-style="normal"
font-weight="bold"
text-decoration="none"
x="336.5"
y="307.5"
id="text3437"
style="font-size:13px;font-style:normal;font-weight:bold;text-decoration:none;fill:#000000;stroke:none;font-family:Arial">config.yaml</text>
</g>
</g>
<g
transform="translate(-246,-214) scale(1,1)"
id="g3439">
<rect
fill="#C0C0C0"
stroke="none"
x="489"
y="407"
width="198"
height="72"
opacity="0.2"
id="rect3441" />
</g>
<g
transform="translate(-246,-214) scale(1,1)"
id="g3443">
<rect
fill="#ffffff"
stroke="none"
x="482"
y="400"
width="198"
height="72"
id="rect3445" />
</g>
<g
transform="translate(-246,-214) scale(1,1)"
id="g3447">
<path
fill="none"
stroke="#000000"
d="M 482 400 L 680 400 L 680 472 L 482 472 L 482 400 Z Z"
stroke-miterlimit="10"
id="path3449" />
</g>
<g
transform="translate(-246,-214) scale(1,1)"
id="g3451">
<rect
fill="#ffffff"
stroke="none"
x="472"
y="407"
width="20"
height="10"
id="rect3453" />
</g>
<g
transform="translate(-246,-214) scale(1,1)"
id="g3455">
<path
fill="none"
stroke="#000000"
d="M 472 407 L 492 407 L 492 417 L 472 417 L 472 407 Z Z"
stroke-miterlimit="10"
id="path3457" />
</g>
<g
transform="translate(-246,-214) scale(1,1)"
id="g3459">
<rect
fill="#ffffff"
stroke="none"
x="472"
y="427"
width="20"
height="10"
id="rect3461" />
</g>
<g
transform="translate(-246,-214) scale(1,1)"
id="g3463">
<path
fill="none"
stroke="#000000"
d="M 472 427 L 492 427 L 492 437 L 472 437 L 472 427 Z Z"
stroke-miterlimit="10"
id="path3465" />
</g>
<g
transform="translate(-255.37842,-194.16748)"
id="g3467">
<g
id="g3469">
<path
id="path3471"
d=""
inkscape:connector-curvature="0"
style="fill:none;stroke:none" />
<text
font-size="13px"
font-style="normal"
font-weight="bold"
text-decoration="none"
x="528"
y="419.5"
id="text3473"
style="font-size:13px;font-style:normal;font-weight:bold;text-decoration:none;fill:#000000;stroke:none;font-family:Arial">Module subpackage</text>
</g>
</g>
<g
transform="translate(-246,-214) scale(1,1)"
id="g3475">
<path
fill="none"
stroke="#000000"
d="M 369 313 L 368 440 L 471 438"
stroke-miterlimit="10"
stroke-dasharray="3"
id="path3477" />
</g>
<g
transform="translate(-246,-214) scale(1,1)"
id="g3479">
<path
fill="none"
stroke="#000000"
d="M 460.9209632639215 442.4060207072573 L 471 438 L 460.7575176559406 433.98857189624056"
stroke-miterlimit="10"
id="path3481" />
</g>
<g
transform="translate(-246,-214) scale(1,1)"
id="g3483">
<path
fill="#ffffff"
stroke="none"
d="M 256 448 L 381 448 L 391 458 L 391 496 L 256 496 L 256 448"
id="path3485" />
</g>
<g
transform="translate(-246,-214) scale(1,1)"
id="g3487">
<path
fill="none"
stroke="#000000"
d="M 256 448 L 381 448 L 391 458 L 391 496 L 256 496 L 256 448 L 256 448"
stroke-miterlimit="10"
id="path3489" />
</g>
<g
transform="translate(-246,-214) scale(1,1)"
id="g3491">
<path
fill="none"
stroke="#000000"
d="M 381 448 L 381 458 L 391 458 L 381 448"
stroke-miterlimit="10"
id="path3493" />
</g>
<g
transform="translate(-246,-214) scale(1,1)"
id="g3495">
<g
id="g3497">
<path
fill="none"
stroke="none"
id="path3499" />
<text
fill="#000000"
stroke="none"
font-family="Arial"
font-size="13px"
font-style="normal"
font-weight="normal"
text-decoration="none"
x="261"
y="466"
id="text3501">template directory</text>
</g>
</g>
<g
transform="translate(-246,-214) scale(1,1)"
id="g3503">
<g
id="g3505">
<path
fill="none"
stroke="none"
id="path3507" />
<text
fill="#000000"
stroke="none"
font-family="Arial"
font-size="13px"
font-style="normal"
font-weight="normal"
text-decoration="none"
x="261"
y="481"
id="text3509">module file directory</text>
</g>
</g>
<g
transform="translate(-246,-214) scale(1,1)"
id="g3511">
<path
fill="none"
stroke="#000000"
d="M 419 439 L 393 448"
stroke-miterlimit="10"
stroke-dasharray="3"
id="path3513" />
</g>
<g
transform="translate(-246,-214) scale(1,1)"
id="g3515">
<path
d="M 419,267 576.40856,267.26845 576,399"
stroke-miterlimit="10"
id="path3517"
inkscape:connector-curvature="0"
style="fill:none;stroke:#000000;stroke-miterlimit:10;stroke-dasharray:3"
sodipodi:nodetypes="ccc" />
</g>
<g
transform="translate(-246,-214) scale(1,1)"
id="g3519">
<path
fill="none"
stroke="#000000"
d="M 571.790482243984 388.83732514237585 L 576 399 L 580.209517756016 388.83732514237585"
stroke-miterlimit="10"
id="path3521" />
</g>
<g
transform="translate(-246,-214) scale(1,1)"
id="g3523">
<path
fill="#ffffff"
stroke="none"
d="M 600 296 L 737 296 L 747 306 L 747 344 L 600 344 L 600 296"
id="path3525" />
</g>
<g
transform="translate(-246,-214) scale(1,1)"
id="g3527">
<path
fill="none"
stroke="#000000"
d="M 600 296 L 737 296 L 747 306 L 747 344 L 600 344 L 600 296 L 600 296"
stroke-miterlimit="10"
id="path3529" />
</g>
<g
transform="translate(-246,-214) scale(1,1)"
id="g3531">
<path
fill="none"
stroke="#000000"
d="M 737 296 L 737 306 L 747 306 L 737 296"
stroke-miterlimit="10"
id="path3533" />
</g>
<g
transform="translate(-246,-214) scale(1,1)"
id="g3535">
<g
id="g3537">
<path
fill="none"
stroke="none"
id="path3539" />
<text
fill="#000000"
stroke="none"
font-family="Arial"
font-size="13px"
font-style="normal"
font-weight="normal"
text-decoration="none"
x="605"
y="314"
id="text3541">content customization</text>
</g>
</g>
<g
transform="translate(-246,-214) scale(1,1)"
id="g3543">
<g
id="g3545">
<path
fill="none"
stroke="none"
id="path3547" />
<text
fill="#000000"
stroke="none"
font-family="Arial"
font-size="13px"
font-style="normal"
font-weight="normal"
text-decoration="none"
x="605"
y="329"
id="text3549">layout customization</text>
</g>
</g>
<g
transform="translate(-246,-214) scale(1,1)"
id="g3551">
<path
fill="none"
stroke="#000000"
d="M 576 331 L 599 328"
stroke-miterlimit="10"
stroke-dasharray="3"
id="path3553" />
</g>
<g
transform="translate(-246,-214) scale(1,1)"
id="g3555">
<rect
fill="#C0C0C0"
stroke="none"
x="801"
y="415"
width="99"
height="44"
opacity="0.2"
id="rect3557" />
</g>
<g
transform="translate(-246,-210)"
id="g3559">
<rect
x="794"
y="408"
width="99"
height="44"
id="rect3561"
style="fill:#ffffff;stroke:none" />
</g>
<g
transform="translate(-246,-214) scale(1,1)"
id="g3563">
<path
fill="none"
stroke="#000000"
d="M 794 408 L 893 408 L 893 452 L 794 452 L 794 408 Z Z"
stroke-miterlimit="10"
id="path3565" />
</g>
<g
transform="translate(-246,-214) scale(1,1)"
id="g3567">
<rect
fill="#ffffff"
stroke="none"
x="784"
y="415"
width="20"
height="10"
id="rect3569" />
</g>
<g
transform="translate(-246,-214) scale(1,1)"
id="g3571">
<path
fill="none"
stroke="#000000"
d="M 784 415 L 804 415 L 804 425 L 784 425 L 784 415 Z Z"
stroke-miterlimit="10"
id="path3573" />
</g>
<g
transform="translate(-246,-214) scale(1,1)"
id="g3575">
<rect
fill="#ffffff"
stroke="none"
x="784"
y="435"
width="20"
height="10"
id="rect3577" />
</g>
<g
transform="translate(-246,-214) scale(1,1)"
id="g3579">
<path
fill="none"
stroke="#000000"
d="M 784 435 L 804 435 L 804 445 L 784 445 L 784 435 Z Z"
stroke-miterlimit="10"
id="path3581" />
</g>
<g
transform="translate(-246,-214) scale(1,1)"
id="g3583">
<g
id="g3585"
transform="translate(-6.7685547,5.861084)">
<path
id="path3587"
d=""
inkscape:connector-curvature="0"
style="fill:none;stroke:none" />
<text
font-size="13px"
font-style="normal"
font-weight="bold"
text-decoration="none"
x="832"
y="427.5"
id="text3589"
style="font-size:13px;font-style:normal;font-weight:bold;text-decoration:none;fill:#000000;stroke:none;font-family:Arial">Jinja2</text>
</g>
</g>
<g
transform="translate(-246,-214) scale(1,1)"
id="g3591">
<path
fill="#C0C0C0"
stroke="none"
d="M 568.9 519 L 596.34 519 L 608.1 530.76 L 608.1 568 L 568.9 568"
opacity="0.2"
id="path3593" />
</g>
<g
transform="translate(-246,-214) scale(1,1)"
id="g3595">
<path
fill="#ffffff"
stroke="none"
d="M 561.9 512 L 589.34 512 L 601.1 523.76 L 601.1 561 L 561.9 561"
id="path3597" />
</g>
<g
transform="translate(-246,-214) scale(1,1)"
id="g3599">
<path
fill="none"
stroke="#000000"
d="M 561.9 512 L 589.34 512 L 601.1 523.76 L 601.1 561 L 561.9 561 L 561.9 512"
stroke-miterlimit="10"
id="path3601" />
</g>
<g
transform="translate(-246,-214) scale(1,1)"
id="g3603">
<path
fill="none"
stroke="#000000"
d="M 589.34 512 L 589.34 523.76 L 601.1 523.76 L 589.34 512"
stroke-miterlimit="10"
id="path3605" />
</g>
<g
transform="translate(-246,-214) scale(1,1)"
id="g3607">
<g
id="g3609">
<path
fill="none"
stroke="none"
id="path3611" />
<text
fill="#000000"
stroke="none"
font-family="Arial"
font-size="13px"
font-style="normal"
font-weight="bold"
text-decoration="none"
x="551.5"
y="580.5"
id="text3613">Templates</text>
</g>
</g>
<g
transform="translate(-246,-214) scale(1,1)"
id="g3615">
<path
d="M 620,549 836.73155,549.54866 838,453"
stroke-miterlimit="10"
id="path3617"
inkscape:connector-curvature="0"
style="fill:none;stroke:#000000;stroke-miterlimit:10;stroke-dasharray:3"
sodipodi:nodetypes="ccc" />
</g>
<g
transform="translate(-246,-214) scale(1,1)"
id="g3619">
<path
fill="none"
stroke="#000000"
d="M 842.4139237018308 463.0755782739462 L 838 453 L 833.9966056593214 463.2456251030878"
stroke-miterlimit="10"
id="path3621" />
</g>
<g
transform="translate(-246,-214) scale(1,1)"
id="g3623">
<path
d="M 680.18289,431.54866 783,431"
stroke-miterlimit="10"
id="path3625"
inkscape:connector-curvature="0"
style="fill:none;stroke:#000000;stroke-miterlimit:10;stroke-dasharray:3"
sodipodi:nodetypes="cc" />
</g>
<g
transform="translate(-246,-214) scale(1,1)"
id="g3627">
<path
fill="none"
stroke="#000000"
d="M 772.9654737429415 435.5064712133113 L 783 431 L 772.7179620834851 427.0910747917938"
stroke-miterlimit="10"
id="path3629" />
</g>
<g
transform="translate(-246,-214) scale(1,1)"
id="g3631">
<path
fill="#C0C0C0"
stroke="none"
d="M 862.4 247 L 889.84 247 L 901.6 258.76 L 901.6 296 L 862.4 296"
opacity="0.2"
id="path3633" />
</g>
<g
transform="translate(-246,-214) scale(1,1)"
id="g3635">
<path
fill="#ffffff"
stroke="none"
d="M 855.4 240 L 882.84 240 L 894.6 251.76000000000002 L 894.6 289 L 855.4 289"
id="path3637" />
</g>
<g
transform="translate(-246,-214) scale(1,1)"
id="g3639">
<path
fill="none"
stroke="#000000"
d="M 855.4 240 L 882.84 240 L 894.6 251.76000000000002 L 894.6 289 L 855.4 289 L 855.4 240"
stroke-miterlimit="10"
id="path3641" />
</g>
<g
transform="translate(-246,-214) scale(1,1)"
id="g3643">
<path
fill="none"
stroke="#000000"
d="M 882.84 240 L 882.84 251.76000000000002 L 894.6 251.76000000000002 L 882.84 240"
stroke-miterlimit="10"
id="path3645" />
</g>
<g
transform="translate(-246,-214) scale(1,1)"
id="g3647">
<g
id="g3649">
<path
fill="none"
stroke="none"
id="path3651" />
<text
fill="#000000"
stroke="none"
font-family="Arial"
font-size="13px"
font-style="normal"
font-weight="bold"
text-decoration="none"
x="840.5"
y="308.5"
id="text3653">Module files</text>
</g>
</g>
<g
transform="translate(-261.34866,-222.82727)"
id="g3635-8">
<path
style="fill:#ffffff;stroke:none"
inkscape:connector-curvature="0"
d="m 855.4,240 27.44,0 11.76,11.76 0,37.24 -39.2,0"
id="path3637-9" />
</g>
<g
transform="translate(-261.34866,-222.82727)"
id="g3639-7">
<path
style="fill:none;stroke:#000000;stroke-miterlimit:10"
inkscape:connector-curvature="0"
d="m 855.4,240 27.44,0 11.76,11.76 0,37.24 -39.2,0 0,-49"
stroke-miterlimit="10"
id="path3641-3" />
</g>
<g
transform="translate(-261.34866,-222.82727)"
id="g3643-6">
<path
style="fill:none;stroke:#000000;stroke-miterlimit:10"
inkscape:connector-curvature="0"
d="m 882.84,240 0,11.76 11.76,0 L 882.84,240"
stroke-miterlimit="10"
id="path3645-1" />
</g>
<g
transform="translate(-278.09946,-233.44973)"
id="g3635-84">
<path
style="fill:#ffffff;stroke:none"
inkscape:connector-curvature="0"
d="m 855.4,240 27.44,0 11.76,11.76 0,37.24 -39.2,0"
id="path3637-5" />
</g>
<g
transform="translate(-278.09946,-233.44973)"
id="g3639-0">
<path
style="fill:none;stroke:#000000;stroke-miterlimit:10"
inkscape:connector-curvature="0"
d="m 855.4,240 27.44,0 11.76,11.76 0,37.24 -39.2,0 0,-49"
stroke-miterlimit="10"
id="path3641-36" />
</g>
<g
transform="translate(-278.09946,-233.44973)"
id="g3643-1">
<path
style="fill:none;stroke:#000000;stroke-miterlimit:10"
inkscape:connector-curvature="0"
d="m 882.84,240 0,11.76 11.76,0 L 882.84,240"
stroke-miterlimit="10"
id="path3645-0" />
</g>
<g
transform="translate(-27.431351,-208.3001)"
id="g3375-6" />
<g
id="g4709"
transform="matrix(1,0,0,0.81117898,54.337968,31.640263)">
<g
id="g3623-5"
transform="matrix(0,-1,1,0,121.9107,875.37876)">
<path
sodipodi:nodetypes="cc"
style="fill:none;stroke:#000000;stroke-miterlimit:10;stroke-dasharray:3"
inkscape:connector-curvature="0"
id="path3625-4"
stroke-miterlimit="10"
d="M 680.18289,431.54866 783,431" />
</g>
<g
id="g3627-7"
transform="matrix(0,-1,1,0,121.98695,871.00978)">
<path
id="path3629-6"
stroke-miterlimit="10"
d="M 772.96547,435.50647 783,431 772.71796,427.09107"
inkscape:connector-curvature="0"
style="fill:none;stroke:#000000;stroke-miterlimit:10" />
</g>
</g>
</g>
</svg>

Before

Width:  |  Height:  |  Size: 26 KiB

View File

@@ -7,10 +7,16 @@ Modules
The use of module systems to manage user environment in a controlled way The use of module systems to manage user environment in a controlled way
is a common practice at HPC centers that is often embraced also by individual is a common practice at HPC centers that is often embraced also by individual
programmers on their development machines. To support this common practice programmers on their development machines. To support this common practice
Spack integrates with `Environment Modules Spack provides integration with `Environment Modules
<http://modules.sourceforge.net/>`_ , `LMod <http://modules.sourceforge.net/>`_ , `LMod
<http://lmod.readthedocs.io/en/latest/>`_ and `Dotkit <https://computing.llnl.gov/?set=jobs&page=dotkit>`_ by <http://lmod.readthedocs.io/en/latest/>`_ and `Dotkit <https://computing.llnl.gov/?set=jobs&page=dotkit>`_ by:
providing post-install hooks that generate module files and commands to manipulate them.
* generating module files after a successful installation
* providing commands that can leverage the spec syntax to manipulate modules
In the following you will see how to activate shell support for commands in Spack
that requires it, and discover what benefits this may bring with respect to deal
directly with automatically generated module files.
.. note:: .. note::
@@ -20,58 +26,13 @@ providing post-install hooks that generate module files and commands to manipula
.. _shell-support: .. _shell-support:
---------------------------- -------------
Using module files via Spack
----------------------------
If you have installed a supported module system either manually or through
``spack bootstrap``, you should be able to run either ``module avail`` or
``use -l spack`` to see what module files have been installed. Here is
sample output of those programs, showing lots of installed packages:
.. code-block:: console
$ module avail
--------------------------------------------------------------- ~/spack/share/spack/modules/linux-ubuntu14-x86_64 ---------------------------------------------------------------
autoconf-2.69-gcc-4.8-qextxkq hwloc-1.11.6-gcc-6.3.0-akcisez m4-1.4.18-gcc-4.8-ev2znoc openblas-0.2.19-gcc-6.3.0-dhkmed6 py-setuptools-34.2.0-gcc-6.3.0-fadur4s
automake-1.15-gcc-4.8-maqvukj isl-0.18-gcc-4.8-afi6taq m4-1.4.18-gcc-6.3.0-uppywnz openmpi-2.1.0-gcc-6.3.0-go2s4z5 py-six-1.10.0-gcc-6.3.0-p4dhkaw
binutils-2.28-gcc-4.8-5s7c6rs libiconv-1.15-gcc-4.8-at46wg3 mawk-1.3.4-gcc-4.8-acjez57 openssl-1.0.2k-gcc-4.8-dkls5tk python-2.7.13-gcc-6.3.0-tyehea7
bison-3.0.4-gcc-4.8-ek4luo5 libpciaccess-0.13.4-gcc-6.3.0-gmufnvh mawk-1.3.4-gcc-6.3.0-ostdoms openssl-1.0.2k-gcc-6.3.0-gxgr5or readline-7.0-gcc-4.8-xhufqhn
bzip2-1.0.6-gcc-4.8-iffrxzn libsigsegv-2.11-gcc-4.8-pp2cvte mpc-1.0.3-gcc-4.8-g5mztc5 pcre-8.40-gcc-4.8-r5pbrxb readline-7.0-gcc-6.3.0-zzcyicg
bzip2-1.0.6-gcc-6.3.0-bequudr libsigsegv-2.11-gcc-6.3.0-7enifnh mpfr-3.1.5-gcc-4.8-o7xm7az perl-5.24.1-gcc-4.8-dg5j65u sqlite-3.8.5-gcc-6.3.0-6zoruzj
cmake-3.7.2-gcc-6.3.0-fowuuby libtool-2.4.6-gcc-4.8-7a523za mpich-3.2-gcc-6.3.0-dmvd3aw perl-5.24.1-gcc-6.3.0-6uzkpt6 tar-1.29-gcc-4.8-wse2ass
curl-7.53.1-gcc-4.8-3fz46n6 libtool-2.4.6-gcc-6.3.0-n7zmbzt ncurses-6.0-gcc-4.8-dcpe7ia pkg-config-0.29.2-gcc-4.8-ib33t75 tcl-8.6.6-gcc-4.8-tfxzqbr
expat-2.2.0-gcc-4.8-mrv6bd4 libxml2-2.9.4-gcc-4.8-ryzxnsu ncurses-6.0-gcc-6.3.0-ucbhcdy pkg-config-0.29.2-gcc-6.3.0-jpgubk3 util-macros-1.19.1-gcc-6.3.0-xorz2x2
flex-2.6.3-gcc-4.8-yf345oo libxml2-2.9.4-gcc-6.3.0-rltzsdh netlib-lapack-3.6.1-gcc-6.3.0-js33dog py-appdirs-1.4.0-gcc-6.3.0-jxawmw7 xz-5.2.3-gcc-4.8-mew4log
gcc-6.3.0-gcc-4.8-24puqve lmod-7.4.1-gcc-4.8-je4srhr netlib-scalapack-2.0.2-gcc-6.3.0-5aidk4l py-numpy-1.12.0-gcc-6.3.0-oemmoeu xz-5.2.3-gcc-6.3.0-3vqeuvb
gettext-0.19.8.1-gcc-4.8-yymghlh lua-5.3.4-gcc-4.8-im75yaz netlib-scalapack-2.0.2-gcc-6.3.0-hjsemcn py-packaging-16.8-gcc-6.3.0-i2n3dtl zip-3.0-gcc-4.8-rwar22d
gmp-6.1.2-gcc-4.8-5ub2wu5 lua-luafilesystem-1_6_3-gcc-4.8-wkey3nl netlib-scalapack-2.0.2-gcc-6.3.0-jva724b py-pyparsing-2.1.10-gcc-6.3.0-tbo6gmw zlib-1.2.11-gcc-4.8-pgxsxv7
help2man-1.47.4-gcc-4.8-kcnqmau lua-luaposix-33.4.0-gcc-4.8-mdod2ry netlib-scalapack-2.0.2-gcc-6.3.0-rgqfr6d py-scipy-0.19.0-gcc-6.3.0-kr7nat4 zlib-1.2.11-gcc-6.3.0-7cqp6cj
The names should look familiar, as they resemble the output from ``spack find``.
You *can* use the modules here directly. For example, you could type either of these commands
to load the ``cmake`` module:
.. code-block:: console
$ use cmake-3.7.2-gcc-6.3.0-fowuuby
.. code-block:: console
$ module load cmake-3.7.2-gcc-6.3.0-fowuuby
Neither of these is particularly pretty, easy to remember, or
easy to type. Luckily, Spack has its own interface for using modules and dotkits.
^^^^^^^^^^^^^
Shell support Shell support
^^^^^^^^^^^^^ -------------
To enable additional Spack commands for loading and unloading module files, You can enable shell support by sourcing the appropriate setup file
and to add the correct path to ``MODULEPATH``, you need to source the appropriate in the ``$SPACK_ROOT/share/spack`` directory.
setup file in the ``$SPACK_ROOT/share/spack`` directory. This will activate shell For ``bash`` or ``ksh`` users:
support for the commands that need it. For ``bash``, ``ksh`` or ``zsh`` users:
.. code-block:: console .. code-block:: console
@@ -81,20 +42,73 @@ For ``csh`` and ``tcsh`` instead:
.. code-block:: console .. code-block:: console
$ set SPACK_ROOT ...
$ source $SPACK_ROOT/share/spack/setup-env.csh $ source $SPACK_ROOT/share/spack/setup-env.csh
Note that in the latter case it is necessary to explicitly set ``SPACK_ROOT``
before sourcing the setup file (you will get a meaningful error message
if you don't).
When ``bash`` and ``ksh`` users update their environment with ``setup-env.sh``, it will check for spack-installed environment modules and add the ``module`` command to their environment; This only occurs if the module command is not already available. You can install ``environment-modules`` with ``spack bootstrap`` as described in :ref:`InstallEnvironmentModules`. .. note::
You can put the source line in your ``.bashrc`` or ``.cshrc`` to
have Spack's shell support available on the command line at any login.
Finally, if you want to have Spack's shell support available on the command line at
any login you can put this source line in one of the files that are sourced
at startup (like ``.profile``, ``.bashrc`` or ``.cshrc``). Be aware though
that the startup time may be slightly increased because of that.
----------------------------
Using module files via Spack
----------------------------
If you have shell support enabled you should be able to run either
``module avail`` or ``use -l spack`` to see what module/dotkit files have
been installed. Here is sample output of those programs, showing lots
of installed packages.
.. code-block:: console
$ module avail
------- ~/spack/share/spack/modules/linux-debian7-x86_64 --------
adept-utils@1.0%gcc@4.4.7-5adef8da libelf@0.8.13%gcc@4.4.7
automaded@1.0%gcc@4.4.7-d9691bb0 libelf@0.8.13%intel@15.0.0
boost@1.55.0%gcc@4.4.7 mpc@1.0.2%gcc@4.4.7-559607f5
callpath@1.0.1%gcc@4.4.7-5dce4318 mpfr@3.1.2%gcc@4.4.7
dyninst@8.1.2%gcc@4.4.7-b040c20e mpich@3.0.4%gcc@4.4.7
gcc@4.9.1%gcc@4.4.7-93ab98c5 mpich@3.0.4%gcc@4.9.0
gmp@6.0.0a%gcc@4.4.7 mrnet@4.1.0%gcc@4.4.7-72b7881d
graphlib@2.0.0%gcc@4.4.7 netgauge@2.4.6%gcc@4.9.0-27912b7b
launchmon@1.0.1%gcc@4.4.7 stat@2.1.0%gcc@4.4.7-51101207
libNBC@1.1.1%gcc@4.9.0-27912b7b sundials@2.5.0%gcc@4.9.0-27912b7b
libdwarf@20130729%gcc@4.4.7-b52fac98
.. code-block:: console
$ use -l spack
spack ----------
adept-utils@1.0%gcc@4.4.7-5adef8da - adept-utils @1.0
automaded@1.0%gcc@4.4.7-d9691bb0 - automaded @1.0
boost@1.55.0%gcc@4.4.7 - boost @1.55.0
callpath@1.0.1%gcc@4.4.7-5dce4318 - callpath @1.0.1
dyninst@8.1.2%gcc@4.4.7-b040c20e - dyninst @8.1.2
gmp@6.0.0a%gcc@4.4.7 - gmp @6.0.0a
libNBC@1.1.1%gcc@4.9.0-27912b7b - libNBC @1.1.1
libdwarf@20130729%gcc@4.4.7-b52fac98 - libdwarf @20130729
libelf@0.8.13%gcc@4.4.7 - libelf @0.8.13
libelf@0.8.13%intel@15.0.0 - libelf @0.8.13
mpc@1.0.2%gcc@4.4.7-559607f5 - mpc @1.0.2
mpfr@3.1.2%gcc@4.4.7 - mpfr @3.1.2
mpich@3.0.4%gcc@4.4.7 - mpich @3.0.4
mpich@3.0.4%gcc@4.9.0 - mpich @3.0.4
netgauge@2.4.6%gcc@4.9.0-27912b7b - netgauge @2.4.6
sundials@2.5.0%gcc@4.9.0-27912b7b - sundials @2.5.0
The names here should look familiar, they're the same ones from
``spack find``. You *can* use the names here directly. For example,
you could type either of these commands to load the callpath module:
.. code-block:: console
$ use callpath@1.0.1%gcc@4.4.7-5dce4318
.. code-block:: console
$ module load callpath@1.0.1%gcc@4.4.7-5dce4318
.. _cmd-spack-load: .. _cmd-spack-load:
@@ -102,11 +116,12 @@ that the startup time may be slightly increased because of that.
``spack load / unload`` ``spack load / unload``
^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^
Once you have shell support enabled you can use the same spec syntax Neither of these is particularly pretty, easy to remember, or
you're used to: easy to type. Luckily, Spack has its own interface for using modules
and dotkits. You can use the same spec syntax you're used to:
========================= ========================== ========================= ==========================
Modules Dotkit Environment Modules Dotkit
========================= ========================== ========================= ==========================
``spack load <spec>`` ``spack use <spec>`` ``spack load <spec>`` ``spack use <spec>``
``spack unload <spec>`` ``spack unuse <spec>`` ``spack unload <spec>`` ``spack unuse <spec>``
@@ -195,7 +210,7 @@ Scripts to load modules recursively may be made with the command:
$ spack module loads --dependencies <spec> $ spack module loads --dependencies <spec>
An equivalent alternative using `process substitution <http://tldp.org/LDP/abs/html/process-sub.html>`_ is: An equivalent alternative is:
.. code-block :: console .. code-block :: console
@@ -281,46 +296,43 @@ For example, consider the following on one system:
# antlr@2.7.7%gcc@5.3.0~csharp+cxx~java~python arch=linux-SuSE11-x86_64 # antlr@2.7.7%gcc@5.3.0~csharp+cxx~java~python arch=linux-SuSE11-x86_64
module load linux-SuSE11-x86_64/antlr-2.7.7-gcc-5.3.0-bdpl46y module load linux-SuSE11-x86_64/antlr-2.7.7-gcc-5.3.0-bdpl46y
------------------------- ----------------------------
Module file customization Auto-generating Module Files
------------------------- ----------------------------
Module files are generated by post-install hooks after the successful Module files are generated by post-install hooks after the successful
installation of a package. The table below summarizes the essential installation of a package. The following table summarizes the essential
information associated with the different file formats information associated with the different file formats
that can be generated by Spack: that can be generated by Spack:
+-----------------------------+--------------------+-------------------------------+----------------------------------+----------------------+ +-----------------------------+--------------------+-------------------------------+----------------------+
| | **Hook name** | **Default root directory** | **Default template file** | **Compatible tools** | | | **Hook name** | **Default root directory** | **Compatible tools** |
+=============================+====================+===============================+==================================+======================+ +=============================+====================+===============================+======================+
| **Dotkit** | ``dotkit`` | share/spack/dotkit | templates/modules/modulefile.dk | DotKit | | **Dotkit** | ``dotkit`` | share/spack/dotkit | DotKit |
+-----------------------------+--------------------+-------------------------------+----------------------------------+----------------------+ +-----------------------------+--------------------+-------------------------------+----------------------+
| **TCL - Non-Hierarchical** | ``tcl`` | share/spack/modules | templates/modules/modulefile.tcl | Env. Modules/LMod | | **TCL - Non-Hierarchical** | ``tcl`` | share/spack/modules | Env. Modules/LMod |
+-----------------------------+--------------------+-------------------------------+----------------------------------+----------------------+ +-----------------------------+--------------------+-------------------------------+----------------------+
| **Lua - Hierarchical** | ``lmod`` | share/spack/lmod | templates/modules/modulefile.lua | LMod | | **Lua - Hierarchical** | ``lmod`` | share/spack/lmod | LMod |
+-----------------------------+--------------------+-------------------------------+----------------------------------+----------------------+ +-----------------------------+--------------------+-------------------------------+----------------------+
Spack ships with sensible defaults for the generation of module files, but Though Spack ships with sensible defaults for the generation of module files,
you can customize many aspects of it to accommodate package or site specific needs. one can customize many aspects of it to accommodate package or site specific needs.
In general you can override or extend the default behavior by: These customizations are enabled by either:
1. overriding certain callback APIs in the Python packages 1. overriding certain callback APIs in the Python packages
2. writing specific rules in the ``modules.yaml`` configuration file 2. writing specific rules in the ``modules.yaml`` configuration file
3. writing your own templates to override or extend the defaults
The former method let you express changes in the run-time environment The former method fits best cases that are site independent, e.g. injecting variables
that are needed to use the installed software properly, e.g. injecting variables from language interpreters into their extensions. The latter instead permits to
from language interpreters into their extensions. The latter two instead permit to fine tune the content, naming and creation of module files to meet site specific conventions.
fine tune the filesystem layout, content and creation of module files to meet
site specific conventions.
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^
Override API calls in ``package.py`` ``Package`` file API
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^
There are two methods that you can override in any ``package.py`` to affect the There are two methods that can be overridden in any ``package.py`` to affect the
content of the module files generated by Spack. The first one: content of generated module files. The first one is:
.. code-block:: python .. code-block:: python
@@ -328,8 +340,8 @@ content of the module files generated by Spack. The first one:
"""Set up the compile and runtime environments for a package.""" """Set up the compile and runtime environments for a package."""
pass pass
can alter the content of the module file associated with the same package where it is overridden. and can alter the content of *the same package where it is overridden*
The second method: by adding actions to ``run_env``. The second method is:
.. code-block:: python .. code-block:: python
@@ -337,13 +349,12 @@ The second method:
"""Set up the environment of packages that depend on this one""" """Set up the environment of packages that depend on this one"""
pass pass
can instead inject run-time environment modifications in the module files of packages and has similar effects on module file of dependees. Even in this case
that depend on it. In both cases you need to fill ``run_env`` with the desired ``run_env`` must be filled with the desired list of environment modifications.
list of environment modifications.
.. note:: .. note::
The ``r`` package and callback APIs The ``r`` package and callback APIs
An example in which it is crucial to override both methods A typical example in which overriding both methods prove to be useful
is given by the ``r`` package. This package installs libraries and headers is given by the ``r`` package. This package installs libraries and headers
in non-standard locations and it is possible to prepend the appropriate directory in non-standard locations and it is possible to prepend the appropriate directory
to the corresponding environment variables: to the corresponding environment variables:
@@ -364,36 +375,37 @@ list of environment modifications.
it appropriately in the override of the second method: it appropriately in the override of the second method:
.. literalinclude:: ../../../var/spack/repos/builtin/packages/r/package.py .. literalinclude:: ../../../var/spack/repos/builtin/packages/r/package.py
:pyobject: R.setup_dependent_environment :lines: 128-129,146-151
.. _modules-yaml: .. _modules-yaml:
^^^^^^^^^^^^^^^^^^^^^^^^^^ ---------------------------------
Write a configuration file Configuration in ``modules.yaml``
^^^^^^^^^^^^^^^^^^^^^^^^^^ ---------------------------------
The configuration files that control module generation behavior The name of the configuration file that controls module generation behavior
are named ``modules.yaml``. The default configuration: is ``modules.yaml``. The default configuration:
.. literalinclude:: ../../../etc/spack/defaults/modules.yaml .. literalinclude:: ../../../etc/spack/defaults/modules.yaml
:language: yaml :language: yaml
activates the hooks to generate ``tcl`` and ``dotkit`` module files and inspects activates generation for ``tcl`` and ``dotkit`` module files and inspects
the installation folder of each package for the presence of a set of subdirectories the installation folder of each package for the presence of a set of subdirectories
(``bin``, ``man``, ``share/man``, etc.). If any is found its full path is prepended (``bin``, ``man``, ``share/man``, etc.). If any is found its full path is prepended
to the environment variables listed below the folder name. to the environment variables listed below the folder name.
"""""""""""""""""""" ^^^^^^^^^^^^^^^^^^^^^^^^^^^
Activate other hooks Activation of other systems
"""""""""""""""""""" ^^^^^^^^^^^^^^^^^^^^^^^^^^^
Any other module file generator shipped with Spack can be activated adding it to the Any other module file generator shipped with Spack can be activated adding it to the
list under the ``enable`` key in the module file. Currently the only generator that list under the ``enable`` key in the module file. Currently the only generator that
is not active by default is ``lmod``, which produces hierarchical lua module files. is not activated by default is ``lmod``, which produces hierarchical lua module files.
For each module system that can be enabled a finer configuration is possible.
Each module system can then be configured separately. In fact, you should list configuration Directives that are aimed at driving the generation of a particular type of module files
options that affect a particular type of module files under a top level key corresponding should be listed under a top level key that corresponds to the generator being
to the generator being customized: customized:
.. code-block:: yaml .. code-block:: yaml
@@ -409,21 +421,24 @@ to the generator being customized:
lmod: lmod:
# contains lmod specific customizations # contains lmod specific customizations
In general, the configuration options that you can use in ``modules.yaml`` will All these module sections allow for both:
either change the layout of the module files on the filesystem, or they will affect
their content. For the latter point it is possible to use anonymous specs 1. global directives that usually affect the whole layout of modules or the naming scheme
to fine tune the set of packages on which the modifications should be applied. 2. directives that affect only a set of packages and modify their content
For the latter point in particular it is possible to use anonymous specs
to select an appropriate set of packages on which the modifications should be applied.
.. _anonymous_specs: .. _anonymous_specs:
"""""""""""""""""""""""""""" ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Selection by anonymous specs Selection by anonymous specs
"""""""""""""""""""""""""""" ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
In the configuration file you can use *anonymous specs* (i.e. specs The procedure to select packages using anonymous specs is a natural
that **are not required to have a root package** and are thus used just extension of using them to install packages, the only difference being
to express constraints) to apply certain modifications on a selected set that specs in this case **are not required to have a root package**.
of the installed software. For instance, in the snippet below: Consider for instance this snippet:
.. code-block:: yaml .. code-block:: yaml
@@ -452,7 +467,8 @@ of the installed software. For instance, in the snippet below:
unset: unset:
- FOOBAR - FOOBAR
you are instructing Spack to set the environment variable ``BAR=bar`` for every module, During module file generation, the configuration above will instruct
Spack to set the environment variable ``BAR=bar`` for every module,
unless the associated spec satisfies ``^openmpi`` in which case ``BAR=baz``. unless the associated spec satisfies ``^openmpi`` in which case ``BAR=baz``.
In addition in any spec that satisfies ``zlib`` the value ``foo`` will be In addition in any spec that satisfies ``zlib`` the value ``foo`` will be
prepended to ``LD_LIBRARY_PATH`` and in any spec that satisfies ``zlib%gcc@4.8`` prepended to ``LD_LIBRARY_PATH`` and in any spec that satisfies ``zlib%gcc@4.8``
@@ -464,15 +480,15 @@ the variable ``FOOBAR`` will be unset.
first, no matter where they appear in the configuration file. All the other first, no matter where they appear in the configuration file. All the other
spec constraints are instead evaluated top to bottom. spec constraints are instead evaluated top to bottom.
"""""""""""""""""""""""""""""""""""""""""""" ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Blacklist or whitelist specific module files Blacklist or whitelist the generation of specific module files
"""""""""""""""""""""""""""""""""""""""""""" ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
You can use anonymous specs also to prevent module files from being written or Anonymous specs are also used to prevent module files from being written or
to force them to be written. Consider the case where you want to hide from users to force them to be written. A common case for that at HPC centers is to hide
all the boilerplate software that you had to build in order to bootstrap a new from users all of the software that needs to be built with system compilers.
compiler. Suppose for instance that ``gcc@4.4.7`` is the compiler provided by Suppose for instance to have ``gcc@4.4.7`` provided by your system. Then
your system. If you write a configuration file like: with a configuration file like this one:
.. code-block:: yaml .. code-block:: yaml
@@ -481,13 +497,13 @@ your system. If you write a configuration file like:
whitelist: ['gcc', 'llvm'] # Whitelist will have precedence over blacklist whitelist: ['gcc', 'llvm'] # Whitelist will have precedence over blacklist
blacklist: ['%gcc@4.4.7'] # Assuming gcc@4.4.7 is the system compiler blacklist: ['%gcc@4.4.7'] # Assuming gcc@4.4.7 is the system compiler
you will prevent the generation of module files for any package that you will skip the generation of module files for any package that
is compiled with ``gcc@4.4.7``, with the only exception of any ``gcc`` is compiled with ``gcc@4.4.7``, with the exception of any ``gcc``
or any ``llvm`` installation. or any ``llvm`` installation.
""""""""""""""""""""""""""" ^^^^^^^^^^^^^^^^^^^^^^^^^^^
Customize the naming scheme Customize the naming scheme
""""""""""""""""""""""""""" ^^^^^^^^^^^^^^^^^^^^^^^^^^^
The names of environment modules generated by spack are not always easy to The names of environment modules generated by spack are not always easy to
fully comprehend due to the long hash in the name. There are two module fully comprehend due to the long hash in the name. There are two module
@@ -535,9 +551,7 @@ most likely via the ``+blas`` variant specification.
tcl: tcl:
naming_scheme: '${PACKAGE}/${VERSION}-${COMPILERNAME}-${COMPILERVER}' naming_scheme: '${PACKAGE}/${VERSION}-${COMPILERNAME}-${COMPILERVER}'
all: all:
conflict: conflict: ['${PACKAGE}', 'intel/14.0.1']
- '${PACKAGE}'
- 'intel/14.0.1'
will create module files that will conflict with ``intel/14.0.1`` and with the will create module files that will conflict with ``intel/14.0.1`` and with the
base directory of the same module, effectively preventing the possibility to base directory of the same module, effectively preventing the possibility to
@@ -549,9 +563,9 @@ most likely via the ``+blas`` variant specification.
.. note:: .. note::
LMod hierarchical module files LMod hierarchical module files
When ``lmod`` is activated Spack will generate a set of hierarchical lua module When ``lmod`` is activated Spack will generate a set of hierarchical lua module
files that are understood by LMod. The hierarchy will always contain the files that are understood by LMod. The generated hierarchy always contains the
two layers ``Core`` / ``Compiler`` but can be further extended to three layers ``Core`` / ``Compiler`` / ``MPI`` but can be further extended to
any of the virtual dependencies present in Spack. A case that could be useful in any other virtual dependency present in Spack. A case that could be useful in
practice is for instance: practice is for instance:
.. code-block:: yaml .. code-block:: yaml
@@ -560,14 +574,11 @@ most likely via the ``+blas`` variant specification.
enable: enable:
- lmod - lmod
lmod: lmod:
core_compilers: core_compilers: ['gcc@4.8']
- 'gcc@4.8' hierarchical_scheme: ['lapack']
hierarchy:
- 'mpi'
- 'lapack'
that will generate a hierarchy in which the ``lapack`` and ``mpi`` layer can be switched that will generate a hierarchy in which the ``lapack`` layer is treated as the ``mpi``
independently. This allows a site to build the same libraries or applications against different one. This allows a site to build the same libraries or applications against different
implementations of ``mpi`` and ``lapack``, and let LMod switch safely from one to the implementations of ``mpi`` and ``lapack``, and let LMod switch safely from one to the
other. other.
@@ -576,14 +587,15 @@ most likely via the ``+blas`` variant specification.
For hierarchies that are deeper than three layers ``lmod spider`` may have some issues. For hierarchies that are deeper than three layers ``lmod spider`` may have some issues.
See `this discussion on the LMod project <https://github.com/TACC/Lmod/issues/114>`_. See `this discussion on the LMod project <https://github.com/TACC/Lmod/issues/114>`_.
"""""""""""""""""""""""""""""""""""" ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Filter out environment modifications Filter out environment modifications
"""""""""""""""""""""""""""""""""""" ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Modifications to certain environment variables in module files are there by Modifications to certain environment variables in module files are generated by
default, for instance because they are generated by prefix inspections. default, for instance by prefix inspections in the default configuration file.
If you want to prevent modifications to some environment variables, you can There are cases though where some of these modifications are unwanted.
do so by using the environment blacklist: Suppose you need to avoid having ``CPATH`` and ``LIBRARY_PATH``
modified by your ``dotkit`` modules:
.. code-block:: yaml .. code-block:: yaml
@@ -598,11 +610,11 @@ The configuration above will generate dotkit module files that will not contain
modifications to either ``CPATH`` or ``LIBRARY_PATH`` and environment module modifications to either ``CPATH`` or ``LIBRARY_PATH`` and environment module
files that instead will contain these modifications. files that instead will contain these modifications.
""""""""""""""""""""" ^^^^^^^^^^^^^^^^^^^^^
Autoload dependencies Autoload dependencies
""""""""""""""""""""" ^^^^^^^^^^^^^^^^^^^^^
In some cases it can be useful to have module files that automatically load In some cases it can be useful to have module files directly autoload
their dependencies. This may be the case for Python extensions, if not their dependencies. This may be the case for Python extensions, if not
activated using ``spack activate``: activated using ``spack activate``:
@@ -614,9 +626,8 @@ activated using ``spack activate``:
autoload: 'direct' autoload: 'direct'
The configuration file above will produce module files that will The configuration file above will produce module files that will
load their direct dependencies if the package installed depends on ``python``. automatically load their direct dependencies. The allowed values for the
The allowed values for the ``autoload`` statement are either ``none``, ``autoload`` statement are either ``none``, ``direct`` or ``all``.
``direct`` or ``all``.
.. note:: .. note::
TCL prerequisites TCL prerequisites

View File

@@ -1,12 +0,0 @@
.. _package-list:
============
Package List
============
This is a list of things you can install using Spack. It is
automatically generated based on the packages in the latest Spack
release.
.. raw:: html
:file: package_list.html

File diff suppressed because it is too large Load Diff

View File

@@ -1,5 +0,0 @@
# These dependencies should be installed using pip in order
# to build the documentation.
sphinx
sphinxcontrib-programoutput

View File

@@ -1,13 +1,13 @@
############################################################################## ##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC. # Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory. # Produced at the Lawrence Livermore National Laboratory.
# #
# This file is part of Spack. # This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. # Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188 # LLNL-CODE-647188
# #
# For details, see https://github.com/spack/spack # For details, see https://github.com/llnl/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL. # Please also see the LICENSE file for our notice and the LGPL.
# #
# This program is free software; you can redistribute it and/or modify # This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as # it under the terms of the GNU Lesser General Public License (as
@@ -40,7 +40,7 @@
from spack import * from spack import *
class Mpileaks(Package): class Mpileaks(AutotoolsPackage):
"""FIXME: Put a proper description of your package here.""" """FIXME: Put a proper description of your package here."""
# FIXME: Add a proper url for your package's homepage here. # FIXME: Add a proper url for your package's homepage here.
@@ -50,9 +50,14 @@ class Mpileaks(Package):
version('1.0', '8838c574b39202a57d7c2d68692718aa') version('1.0', '8838c574b39202a57d7c2d68692718aa')
# FIXME: Add dependencies if required. # FIXME: Add dependencies if required.
# depends_on('m4', type='build')
# depends_on('autoconf', type='build')
# depends_on('automake', type='build')
# depends_on('libtool', type='build')
# depends_on('foo') # depends_on('foo')
def install(self, spec, prefix): def configure_args(self):
# FIXME: Unknown build system # FIXME: Add arguments other than --prefix
make() # FIXME: If not needed delete the function
make('install') args = []
return args

View File

@@ -1,13 +1,13 @@
############################################################################## ##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC. # Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory. # Produced at the Lawrence Livermore National Laboratory.
# #
# This file is part of Spack. # This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. # Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188 # LLNL-CODE-647188
# #
# For details, see https://github.com/spack/spack # For details, see https://github.com/llnl/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL. # Please also see the LICENSE file for our notice and the LGPL.
# #
# This program is free software; you can redistribute it and/or modify # This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as # it under the terms of the GNU Lesser General Public License (as
@@ -25,18 +25,24 @@
from spack import * from spack import *
class Mpileaks(Package): class Mpileaks(AutotoolsPackage):
"""Tool to detect and report MPI objects like MPI_Requests and """Tool to detect and report MPI objects like MPI_Requests and
MPI_Datatypes.""" MPI_Datatypes."""
homepage = "https://github.com/hpc/mpileaks" homepage = "https://github.com/hpc/mpileaks"
url = "https://github.com/hpc/mpileaks/releases/download/v1.0/mpileaks-1.0.tar.gz" # NOQA url = "https://github.com/hpc/mpileaks/releases/download/v1.0/mpileaks-1.0.tar.gz"
version('1.0', '8838c574b39202a57d7c2d68692718aa') version('1.0', '8838c574b39202a57d7c2d68692718aa')
# FIXME: Add dependencies if required. # FIXME: Add dependencies if required.
# depends_on('m4', type='build')
# depends_on('autoconf', type='build')
# depends_on('automake', type='build')
# depends_on('libtool', type='build')
# depends_on('foo') # depends_on('foo')
def install(self, spec, prefix): def configure_args(self):
# FIXME: Unknown build system # FIXME: Add arguments other than --prefix
make() # FIXME: If not needed delete the function
make('install') args = []
return args

View File

@@ -1,13 +1,13 @@
############################################################################## ##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC. # Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory. # Produced at the Lawrence Livermore National Laboratory.
# #
# This file is part of Spack. # This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. # Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188 # LLNL-CODE-647188
# #
# For details, see https://github.com/spack/spack # For details, see https://github.com/llnl/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL. # Please also see the LICENSE file for our notice and the LGPL.
# #
# This program is free software; you can redistribute it and/or modify # This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as # it under the terms of the GNU Lesser General Public License (as
@@ -25,7 +25,7 @@
from spack import * from spack import *
class Mpileaks(Package): class Mpileaks(AutotoolsPackage):
"""Tool to detect and report MPI objects like MPI_Requests and """Tool to detect and report MPI objects like MPI_Requests and
MPI_Datatypes.""" MPI_Datatypes."""
@@ -38,7 +38,8 @@ class Mpileaks(Package):
depends_on('adept-utils') depends_on('adept-utils')
depends_on('callpath') depends_on('callpath')
def install(self, spec, prefix): def configure_args(self):
# FIXME: Unknown build system # FIXME: Add arguments other than --prefix
make() # FIXME: If not needed delete the function
make('install') args = []
return args

View File

@@ -1,13 +1,13 @@
############################################################################## ##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC. # Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory. # Produced at the Lawrence Livermore National Laboratory.
# #
# This file is part of Spack. # This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. # Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188 # LLNL-CODE-647188
# #
# For details, see https://github.com/spack/spack # For details, see https://github.com/llnl/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL. # Please also see the LICENSE file for our notice and the LGPL.
# #
# This program is free software; you can redistribute it and/or modify # This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as # it under the terms of the GNU Lesser General Public License (as
@@ -25,10 +25,9 @@
from spack import * from spack import *
class Mpileaks(Package): class Mpileaks(AutotoolsPackage):
"""Tool to detect and report MPI objects like MPI_Requests and """Tool to detect and report MPI objects like MPI_Requests and
MPI_Datatypes.""" MPI_Datatypes."""
homepage = "https://github.com/hpc/mpileaks" homepage = "https://github.com/hpc/mpileaks"
url = "https://github.com/hpc/mpileaks/releases/download/v1.0/mpileaks-1.0.tar.gz" url = "https://github.com/hpc/mpileaks/releases/download/v1.0/mpileaks-1.0.tar.gz"
@@ -38,7 +37,7 @@ class Mpileaks(Package):
depends_on('adept-utils') depends_on('adept-utils')
depends_on('callpath') depends_on('callpath')
def install(self, spec, prefix): def configure_args(self):
configure() args = ['--with-adept-utils=%s' % self.spec['adept-utils'].prefix,
make() '--with-callpath=%s' % self.spec['callpath'].prefix]
make('install') return args

View File

@@ -1,13 +1,13 @@
############################################################################## ##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC. # Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory. # Produced at the Lawrence Livermore National Laboratory.
# #
# This file is part of Spack. # This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. # Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188 # LLNL-CODE-647188
# #
# For details, see https://github.com/spack/spack # For details, see https://github.com/llnl/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL. # Please also see the LICENSE file for our notice and the LGPL.
# #
# This program is free software; you can redistribute it and/or modify # This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as # it under the terms of the GNU Lesser General Public License (as
@@ -25,7 +25,7 @@
from spack import * from spack import *
class Mpileaks(Package): class Mpileaks(AutotoolsPackage):
"""Tool to detect and report MPI objects like MPI_Requests and """Tool to detect and report MPI objects like MPI_Requests and
MPI_Datatypes.""" MPI_Datatypes."""
@@ -34,13 +34,17 @@ class Mpileaks(Package):
version('1.0', '8838c574b39202a57d7c2d68692718aa') version('1.0', '8838c574b39202a57d7c2d68692718aa')
variant('stackstart', default=0, description='Specify the number of stack frames to truncate.')
depends_on('mpi') depends_on('mpi')
depends_on('adept-utils') depends_on('adept-utils')
depends_on('callpath') depends_on('callpath')
def install(self, spec, prefix): def configure_args(self):
configure('--with-adept-utils=%s' % self.spec['adept-utils'].prefix, args = ['--with-adept-utils=%s' % self.spec['adept-utils'].prefix,
'--with-callpath=%s' % self.spec['callpath'].prefix, '--with-callpath=%s' % self.spec['callpath'].prefix]
'--prefix=%s' % self.spec.prefix) stackstart = int(self.spec.variants['stackstart'].value)
make() if stackstart:
make('install') args.extend(['--with-stack-start-c=%s' % stackstart,
'--with-stack-start-fortran=%s' % stackstart])
return args

View File

@@ -1,53 +0,0 @@
##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Mpileaks(Package):
"""Tool to detect and report MPI objects like MPI_Requests and
MPI_Datatypes."""
homepage = "https://github.com/hpc/mpileaks"
url = "https://github.com/hpc/mpileaks/releases/download/v1.0/mpileaks-1.0.tar.gz"
version('1.0', '8838c574b39202a57d7c2d68692718aa')
variant('stackstart', values=int, default=0, description='Specify the number of stack frames to truncate.')
depends_on('mpi')
depends_on('adept-utils')
depends_on('callpath')
def install(self, spec, prefix):
stackstart = int(self.spec.variants['stackstart'].value)
confargs = ['--with-adept-utils=%s' % self.spec['adept-utils'].prefix,
'--with-callpath=%s' % self.spec['callpath'].prefix,
'--prefix=%s' % self.spec.prefix]
if stackstart:
confargs.extend(['--with-stack-start-c=%s' % stackstart,
'--with-stack-start-fortran=%s' % stackstart])
configure(*confargs)
make()
make('install')

View File

@@ -1,46 +0,0 @@
##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Mpileaks(AutoToolsPackage):
"""Tool to detect and report leaked MPI objects like MPI_Requests and
MPI_Datatypes."""
homepage = "https://github.com/hpc/mpileaks"
url = "https://github.com/hpc/mpileaks/releases/download/v1.0/mpileaks-1.0.tar.gz"
version('1.0', '8838c574b39202a57d7c2d68692718aa')
depends_on("mpi")
depends_on("adept-utils")
depends_on("callpath")
def install(self, spec, prefix):
configure("--prefix=" + prefix,
"--with-adept-utils=" + spec['adept-utils'].prefix,
"--with-callpath=" + spec['callpath'].prefix)
make()
make("install")

View File

@@ -1,51 +0,0 @@
##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Mpileaks(AutoToolsPackage):
"""Tool to detect and report leaked MPI objects like MPI_Requests and
MPI_Datatypes."""
homepage = "https://github.com/hpc/mpileaks"
url = "https://github.com/hpc/mpileaks/releases/download/v1.0/mpileaks-1.0.tar.gz"
version('1.0', '8838c574b39202a57d7c2d68692718aa')
variant("stackstart", values=int, default=0,
description="Specify the number of stack frames to truncate")
depends_on("mpi")
depends_on("adept-utils")
depends_on("callpath")
def configure_args(self):
stackstart = int(self.spec.variants['stackstart'].value)
args = ["--with-adept-utils=" + spec['adept-utils'].prefix,
"--with-callpath=" + spec['callpath'].prefix]
if stackstart:
args.extend(['--with-stack-start-c=%s' % stackstart,
'--with-stack-start-fortran=%s' % stackstart])
return args

View File

@@ -1,60 +0,0 @@
##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
#
# This is a template package file for Spack. We've put "FIXME"
# next to all the things you'll want to change. Once you've handled
# them, you can save this file and test your package like this:
#
# spack install callpath
#
# You can edit this file again by typing:
#
# spack edit callpath
#
# See the Spack documentation for more information on packaging.
# If you submit this package back to Spack as a pull request,
# please first remove this boilerplate and all FIXME comments.
#
from spack import *
class Callpath(CMakePackage):
"""FIXME: Put a proper description of your package here."""
# FIXME: Add a proper url for your package's homepage here.
homepage = "http://www.example.com"
url = "https://github.com/llnl/callpath/archive/v1.0.1.tar.gz"
version('1.0.3', 'c89089b3f1c1ba47b09b8508a574294a')
# FIXME: Add dependencies if required.
# depends_on('foo')
def cmake_args(self):
# FIXME: Add arguments other than
# FIXME: CMAKE_INSTALL_PREFIX and CMAKE_BUILD_TYPE
# FIXME: If not needed delete this function
args = []
return args

View File

@@ -1,42 +0,0 @@
##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Callpath(CMakePackage):
"""Library for representing callpaths consistently in
distributed-memory performance tools."""
homepage = "https://github.com/llnl/callpath"
url = "https://github.com/llnl/callpath/archive/v1.0.3.tar.gz"
version('1.0.3', 'c89089b3f1c1ba47b09b8508a574294a')
depends_on("elf", type="link")
depends_on("libdwarf")
depends_on("dyninst")
depends_on("adept-utils")
depends_on("mpi")
depends_on("cmake@2.8:", type="build")

View File

@@ -1,52 +0,0 @@
##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Callpath(CMakePackage):
"""Library for representing callpaths consistently in
distributed-memory performance tools."""
homepage = "https://github.com/llnl/callpath"
url = "https://github.com/llnl/callpath/archive/v1.0.3.tar.gz"
version('1.0.3', 'c89089b3f1c1ba47b09b8508a574294a')
depends_on("elf", type="link")
depends_on("libdwarf")
depends_on("dyninst")
depends_on("adept-utils")
depends_on("mpi")
depends_on("cmake@2.8:", type="build")
def cmake_args(self):
args = ["-DCALLPATH_WALKER=dyninst"]
if self.spec.satisfies("^dyninst@9.3.0:"):
std.flag = self.compiler.cxx_flag
args.append("-DCMAKE_CXX_FLAGS='{0}' -fpermissive'".format(
std_flag))
return args

View File

@@ -1,45 +0,0 @@
##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Bowtie(MakefilePackage):
"""FIXME: Put a proper description of your package here."""
# FIXME: Add a proper url for your package's homepage here.
homepage = "http://www.example.com"
url = "https://downloads.sourceforge.net/project/bowtie-bio/bowtie/1.2.1.1/bowtie-1.2.1.1-src.zip"
version('1.2.1.1', 'ec06265730c5f587cd58bcfef6697ddf')
# FIXME: Add dependencies if required.
# depends_on('foo')
def edit(self, spec, prefix):
# FIXME: Edit the Makefile if necessary
# FIXME: If not needed delete this function
# makefile = FileFilter('Makefile')
# makefile.filter('CC = .*', 'CC = cc')
return

View File

@@ -1,46 +0,0 @@
##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Bowtie(MakefilePackage):
"""Bowtie is an ultrafast, memory efficient short read aligner
for short DNA sequences (reads) from next-gen sequencers."""
homepage = "https://sourceforge.net/projects/bowtie-bio/"
url = "https://downloads.sourceforge.net/project/bowtie-bio/bowtie/1.2.1.1/bowtie-1.2.1.1-src.zip"
version('1.2.1.1', 'ec06265730c5f587cd58bcfef6697ddf')
variant("tbb", default=False, description="Use Intel thread building block")
depends_on("tbb", when="+tbb")
def edit(self, spec, prefix):
# FIXME: Edit the Makefile if necessary
# FIXME: If not needed delete this function
# makefile = FileFilter('Makefile')
# makefile.filter('CC = .*', 'CC = cc')
return

View File

@@ -1,44 +0,0 @@
##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Bowtie(MakefilePackage):
"""Bowtie is an ultrafast, memory efficient short read aligner
for short DNA sequences (reads) from next-gen sequencers."""
homepage = "https://sourceforge.net/projects/bowtie-bio/"
url = "https://downloads.sourceforge.net/project/bowtie-bio/bowtie/1.2.1.1/bowtie-1.2.1.1-src.zip"
version('1.2.1.1', 'ec06265730c5f587cd58bcfef6697ddf')
variant("tbb", default=False, description="Use Intel thread building block")
depends_on("tbb", when="+tbb")
def edit(self, spec, prefix):
makefile = FileFilter("Makefile")
makefile.filter('CC= .*', 'CC = ' + env['CC'])
makefile.filter('CXX = .*', 'CXX = ' + env['CXX'])

View File

@@ -1,53 +0,0 @@
##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Bowtie(MakefilePackage):
"""Bowtie is an ultrafast, memory efficient short read aligner
for short DNA sequences (reads) from next-gen sequencers."""
homepage = "https://sourceforge.net/projects/bowtie-bio/"
url = "https://downloads.sourceforge.net/project/bowtie-bio/bowtie/1.2.1.1/bowtie-1.2.1.1-src.zip"
version('1.2.1.1', 'ec06265730c5f587cd58bcfef6697ddf')
variant("tbb", default=False, description="Use Intel thread building block")
depends_on("tbb", when="+tbb")
def edit(self, spec, prefix):
makefile = FileFilter("Makefile")
makefile.filter('CC= .*', 'CC = ' + env['CC'])
makefile.filter('CXX = .*', 'CXX = ' + env['CXX'])
def build(self, spec, prefix):
if "+tbb" in spec:
make()
else:
make("NO_TBB=1")
def install(self, spec, prefix):
make('prefix={0}'.format(self.prefix), 'install')

View File

@@ -1,60 +0,0 @@
##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
#
# This is a template package file for Spack. We've put "FIXME"
# next to all the things you'll want to change. Once you've handled
# them, you can save this file and test your package like this:
#
# spack install py-pandas
#
# You can edit this file again by typing:
#
# spack edit py-pandas
#
# See the Spack documentation for more information on packaging.
# If you submit this package back to Spack as a pull request,
# please first remove this boilerplate and all FIXME comments.
#
from spack import *
class PyPandas(PythonPackage):
"""FIXME: Put a proper description of your package here."""
# FIXME: Add a proper url for your package's homepage here.
homepage = "http://www.example.com"
url = "https://pypi.io/packages/source/p/pandas/pandas-0.19.0.tar.gz"
version('0.19.0', 'bc9bb7188e510b5d44fbdd249698a2c3')
# FIXME: Add dependencies if required.
# depends_on('py-setuptools', type='build')
# depends_on('py-foo', type=('build', 'run'))
def build_args(self, spec, prefix):
# FIXME: Add arguments other than --prefix
# FIXME: If not needed delete this function
args = []
return args

View File

@@ -1,51 +0,0 @@
##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class PyPandas(PythonPackage):
"""pandas is a Python package providing fast, flexible, and expressive
data structures designed to make working with relational or
labeled data both easy and intuitive. It aims to be the
fundamental high-level building block for doing practical, real
world data analysis in Python. Additionally, it has the broader
goal of becoming the most powerful and flexible open source data
analysis / manipulation tool available in any language.
"""
homepage = "http://pandas.pydata.org/"
url = "https://pypi.io/packages/source/p/pandas/pandas-0.19.0.tar.gz"
version('0.19.0', 'bc9bb7188e510b5d44fbdd249698a2c3')
version('0.18.0', 'f143762cd7a59815e348adf4308d2cf6')
version('0.16.1', 'fac4f25748f9610a3e00e765474bdea8')
version('0.16.0', 'bfe311f05dc0c351f8955fbd1e296e73')
depends_on('py-dateutil', type=('build', 'run'))
depends_on('py-numpy', type=('build', 'run'))
depends_on('py-setuptools', type='build')
depends_on('py-cython', type='build')
depends_on('py-pytz', type=('build', 'run'))
depends_on('py-numexpr', type=('build', 'run'))
depends_on('py-bottleneck', type=('build', 'run'))

View File

@@ -1,512 +0,0 @@
.. _advanced-packaging-tutorial:
============================
Advanced Topics in Packaging
============================
Spack tries to automatically configure packages with information from
dependencies such that all you need to do is to list the dependencies
(i.e. with the ``depends_on`` directive) and the build system (for example
by deriving from :code:`CmakePackage`).
However, there are many special cases. Often you need to retrieve details
about dependencies to set package-specific configuration options, or to
define package-specific environment variables used by the package's build
system. This tutorial covers how to retrieve build information from
dependencies, and how you can automatically provide important information to
dependents in your package.
----------------------
Setup for the tutorial
----------------------
The simplest way to follow along with this tutorial is to use our Docker image,
which comes with Spack and various packages pre-installed:
.. code-block:: console
$ docker pull alalazo/spack:advanced_packaging_tutorial
$ docker run --rm -h advanced-packaging-tutorial -it alalazo/spack:advanced_packaging_tutorial
root@advanced-packaging-tutorial:/#
root@advanced-packaging-tutorial:/# spack find
==> 20 installed packages.
-- linux-ubuntu16.04-x86_64 / gcc@5.4.0 -------------------------
arpack-ng@3.5.0 hdf5@1.10.1 libpciaccess@0.13.5 libtool@2.4.6 m4@1.4.18 ncurses@6.0 openblas@0.2.20 openssl@1.0.2k superlu@5.2.1 xz@5.2.3
cmake@3.9.4 hwloc@1.11.8 libsigsegv@2.11 libxml2@2.9.4 mpich@3.2 netlib-lapack@3.6.1 openmpi@3.0.0 pkg-config@0.29.2 util-macros@1.19.1 zlib@1.2.11
If you already started the image, you can set the ``EDITOR`` environment
variable to your preferred editor (``vi``, ``emacs``, and ``nano`` are included in the image)
and move directly to :ref:`adv_pkg_tutorial_start`.
If you choose not to use the Docker image, you can clone the Spack repository
and build the necessary bits yourself:
.. code-block:: console
$ git clone https://github.com/spack/spack.git
Cloning into 'spack'...
remote: Counting objects: 92731, done.
remote: Compressing objects: 100% (1108/1108), done.
remote: Total 92731 (delta 1964), reused 4186 (delta 1637), pack-reused 87932
Receiving objects: 100% (92731/92731), 33.31 MiB | 64.00 KiB/s, done.
Resolving deltas: 100% (43557/43557), done.
Checking connectivity... done.
$ cd spack
$ git checkout tutorials/advanced_packaging
Branch tutorials/advanced_packaging set up to track remote branch tutorials/advanced_packaging from origin.
Switched to a new branch 'tutorials/advanced_packaging'
At this point you can install the software that will be used
during the rest of the tutorial (the output of the commands is omitted
for the sake of brevity):
.. code-block:: console
$ spack install openblas
$ spack install netlib-lapack
$ spack install mpich
$ spack install openmpi
$ spack install --only=dependencies armadillo ^openblas
$ spack install --only=dependencies netcdf
$ spack install --only=dependencies elpa
Now, you are ready to set your preferred ``EDITOR`` and continue with
the rest of the tutorial.
.. _adv_pkg_tutorial_start:
------------------------------
Retrieving library information
------------------------------
Although Spack attempts to help packages locate their dependency libraries
automatically (e.g. by setting PKG_CONFIG_PATH and CMAKE_PREFIX_PATH), a
package may have unique configuration options that are required to locate
libraries. When a package needs information about dependency libraries, the
general approach in Spack is to query the dependencies for the locations of
their libraries and set configuration options accordingly. By default most
Spack packages know how to automatically locate their libraries. This section
covers how to retrieve library information from dependencies and how to locate
libraries when the default logic doesn't work.
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Accessing dependency libraries
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
If you need to access the libraries of a dependency, you can do so
via the ``libs`` property of the spec, for example in the ``arpack-ng``
package:
.. code-block:: python
def install(self, spec, prefix):
lapack_libs = spec['lapack'].libs.joined(';')
blas_libs = spec['blas'].libs.joined(';')
cmake(*[
'-DLAPACK_LIBRARIES={0}'.format(lapack_libs),
'-DBLAS_LIBRARIES={0}'.format(blas_libs)
], '.')
Note that ``arpack-ng`` is querying virtual dependencies, which Spack
automatically resolves to the installed implementation (e.g. ``openblas``
for ``blas``).
We've started work on a package for ``armadillo``. You should open it,
read through the comment that starts with ``# TUTORIAL:`` and complete
the ``cmake_args`` section:
.. code-block:: console
root@advanced-packaging-tutorial:/# spack edit armadillo
If you followed the instructions in the package, when you are finished your
``cmake_args`` method should look like:
.. code-block:: python
def cmake_args(self):
spec = self.spec
return [
# ARPACK support
'-DARPACK_LIBRARY={0}'.format(spec['arpack-ng'].libs.joined(";")),
# BLAS support
'-DBLAS_LIBRARY={0}'.format(spec['blas'].libs.joined(";")),
# LAPACK support
'-DLAPACK_LIBRARY={0}'.format(spec['lapack'].libs.joined(";")),
# SuperLU support
'-DSuperLU_INCLUDE_DIR={0}'.format(spec['superlu'].prefix.include),
'-DSuperLU_LIBRARY={0}'.format(spec['superlu'].libs.joined(";")),
# HDF5 support
'-DDETECT_HDF5={0}'.format('ON' if '+hdf5' in spec else 'OFF')
]
As you can see, getting the list of libraries that your dependencies provide
is as easy as accessing the their ``libs`` attribute. Furthermore, the interface
remains the same whether you are querying regular or virtual dependencies.
At this point you can complete the installation of ``armadillo`` using ``openblas``
as a LAPACK provider:
.. code-block:: console
root@advanced-packaging-tutorial:/# spack install armadillo ^openblas
==> pkg-config is already installed in /usr/local/opt/spack/linux-ubuntu16.04-x86_64/gcc-5.4.0/pkg-config-0.29.2-ae2hwm7q57byfbxtymts55xppqwk7ecj
...
==> superlu is already installed in /usr/local/opt/spack/linux-ubuntu16.04-x86_64/gcc-5.4.0/superlu-5.2.1-q2mbtw2wo4kpzis2e2n227ip2fquxrno
==> Installing armadillo
==> Using cached archive: /usr/local/var/spack/cache/armadillo/armadillo-8.100.1.tar.xz
==> Staging archive: /usr/local/var/spack/stage/armadillo-8.100.1-n2eojtazxbku6g4l5izucwwgnpwz77r4/armadillo-8.100.1.tar.xz
==> Created stage in /usr/local/var/spack/stage/armadillo-8.100.1-n2eojtazxbku6g4l5izucwwgnpwz77r4
==> Applied patch undef_linux.patch
==> Building armadillo [CMakePackage]
==> Executing phase: 'cmake'
==> Executing phase: 'build'
==> Executing phase: 'install'
==> Successfully installed armadillo
Fetch: 0.01s. Build: 3.96s. Total: 3.98s.
[+] /usr/local/opt/spack/linux-ubuntu16.04-x86_64/gcc-5.4.0/armadillo-8.100.1-n2eojtazxbku6g4l5izucwwgnpwz77r4
Hopefully the installation went fine and the code we added expanded to the right list
of semicolon separated libraries (you are encouraged to open ``armadillo``'s
build logs to double check).
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Providing libraries to dependents
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Spack provides a default implementation for ``libs`` which often works
out of the box. A user can write a package definition without having to
implement a ``libs`` property and dependents can retrieve its libraries
as shown in the above section. However, the default implementation assumes that
libraries follow the naming scheme ``lib<package name>.so`` (or e.g.
``lib<package name>.a`` for static libraries). Packages which don't
follow this naming scheme must implement this function themselves, e.g.
``opencv``:
.. code-block:: python
@property
def libs(self):
shared = "+shared" in self.spec
return find_libraries(
"libopencv_*", root=self.prefix, shared=shared, recurse=True
)
This issue is common for packages which implement an interface (i.e.
virtual package providers in Spack). If we try to build another version of
``armadillo`` tied to ``netlib-lapack`` we'll notice that this time the
installation won't complete:
.. code-block:: console
root@advanced-packaging-tutorial:/# spack install armadillo ^netlib-lapack
==> pkg-config is already installed in /usr/local/opt/spack/linux-ubuntu16.04-x86_64/gcc-5.4.0/pkg-config-0.29.2-ae2hwm7q57byfbxtymts55xppqwk7ecj
...
==> openmpi is already installed in /usr/local/opt/spack/linux-ubuntu16.04-x86_64/gcc-5.4.0/openmpi-3.0.0-yo5qkfvumpmgmvlbalqcadu46j5bd52f
==> Installing arpack-ng
==> Using cached archive: /usr/local/var/spack/cache/arpack-ng/arpack-ng-3.5.0.tar.gz
==> Already staged arpack-ng-3.5.0-bloz7cqirpdxj33pg7uj32zs5likz2un in /usr/local/var/spack/stage/arpack-ng-3.5.0-bloz7cqirpdxj33pg7uj32zs5likz2un
==> No patches needed for arpack-ng
==> Building arpack-ng [Package]
==> Executing phase: 'install'
==> Error: RuntimeError: Unable to recursively locate netlib-lapack libraries in /usr/local/opt/spack/linux-ubuntu16.04-x86_64/gcc-5.4.0/netlib-lapack-3.6.1-jjfe23wgt7nkjnp2adeklhseg3ftpx6z
RuntimeError: RuntimeError: Unable to recursively locate netlib-lapack libraries in /usr/local/opt/spack/linux-ubuntu16.04-x86_64/gcc-5.4.0/netlib-lapack-3.6.1-jjfe23wgt7nkjnp2adeklhseg3ftpx6z
/usr/local/var/spack/repos/builtin/packages/arpack-ng/package.py:105, in install:
5 options.append('-DCMAKE_INSTALL_NAME_DIR:PATH=%s/lib' % prefix)
6
7 # Make sure we use Spack's blas/lapack:
>> 8 lapack_libs = spec['lapack'].libs.joined(';')
9 blas_libs = spec['blas'].libs.joined(';')
10
11 options.extend([
See build log for details:
/usr/local/var/spack/stage/arpack-ng-3.5.0-bloz7cqirpdxj33pg7uj32zs5likz2un/arpack-ng-3.5.0/spack-build.out
Unlike ``openblas`` which provides a library named ``libopenblas.so``,
``netlib-lapack`` provides ``liblapack.so``, so it needs to implement
customized library search logic. Let's edit it:
.. code-block:: console
root@advanced-packaging-tutorial:/# spack edit netlib-lapack
and follow the instructions in the ``# TUTORIAL:`` comment as before.
What we need to implement is:
.. code-block:: python
@property
def lapack_libs(self):
shared = True if '+shared' in self.spec else False
return find_libraries(
'liblapack', root=self.prefix, shared=shared, recurse=True
)
i.e. a property that returns the correct list of libraries for the LAPACK interface.
We use the name ``lapack_libs`` rather than ``libs`` because
``netlib-lapack`` can also provide ``blas``, and when it does it is provided
as a separate library file. Using this name ensures that when
dependents ask for ``lapack`` libraries, ``netlib-lapack`` will retrieve only
the libraries associated with the ``lapack`` interface. Now we can finally
install ``armadillo ^netlib-lapack``:
.. code-block:: console
root@advanced-packaging-tutorial:/# spack install armadillo ^netlib-lapack
...
==> Building armadillo [CMakePackage]
==> Executing phase: 'cmake'
==> Executing phase: 'build'
==> Executing phase: 'install'
==> Successfully installed armadillo
Fetch: 0.01s. Build: 3.75s. Total: 3.76s.
[+] /usr/local/opt/spack/linux-ubuntu16.04-x86_64/gcc-5.4.0/armadillo-8.100.1-sxmpu5an4dshnhickh6ykchyfda7jpyn
Since each implementation of a virtual package is responsible for locating the
libraries associated with the interfaces it provides, dependents do not need
to include special-case logic for different implementations and for example
need only ask for :code:`spec['blas'].libs`.
---------------------------------------
Modifying a package's build environment
---------------------------------------
Spack sets up several environment variables like PATH by default to aid in
building a package, but many packages make use of environment variables which
convey specific information about their dependencies, for example MPICC. This
section covers how update your Spack packages so that package-specific
environment variables are defined at build-time.
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Set environment variables in dependent packages at build-time
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Dependencies can set environment variables that are required when their
dependents build. For example, when a package depends on a python extension
like py-numpy, Spack's ``python`` package will add it to ``PYTHONPATH``
so it is available at build time; this is required because the default setup
that spack does is not sufficient for python to import modules.
To provide environment setup for a dependent, a package can implement the
:py:func:`setup_dependent_environment <spack.package.PackageBase.setup_dependent_environment>`
function. This function takes as a parameter a :py:class:`EnvironmentModifications <spack.environment.EnvironmentModifications>`
object which includes convenience methods to update the environment. For
example an MPI implementation can set ``MPICC`` for packages that depend on it:
.. code-block:: python
def setup_dependent_environment(self, spack_env, run_env, dependent_spec):
spack_env.set('MPICC', join_path(self.prefix.bin, 'mpicc'))
In this case packages which depend on ``mpi`` will have ``MPICC`` defined in
their environment when they build. This section is focused on modifying the
build-time environment represented by ``spack_env``, but it's worth noting that
modifications to ``run_env`` are included in Spack's automatically-generated
module files.
We can practice by editing the ``mpich`` package to set the ``MPICC``
environment variable in the build-time environment of dependent packages.
.. code-block:: console
root@advanced-packaging-tutorial:/# spack edit mpich
Once you're finished the method should look like this:
.. code-block:: python
def setup_dependent_environment(self, spack_env, run_env, dependent_spec):
spack_env.set('MPICC', join_path(self.prefix.bin, 'mpicc'))
spack_env.set('MPICXX', join_path(self.prefix.bin, 'mpic++'))
spack_env.set('MPIF77', join_path(self.prefix.bin, 'mpif77'))
spack_env.set('MPIF90', join_path(self.prefix.bin, 'mpif90'))
spack_env.set('MPICH_CC', spack_cc)
spack_env.set('MPICH_CXX', spack_cxx)
spack_env.set('MPICH_F77', spack_f77)
spack_env.set('MPICH_F90', spack_fc)
spack_env.set('MPICH_FC', spack_fc)
At this point we can, for instance, install ``netlib-scalapack``:
.. code-block:: console
root@advanced-packaging-tutorial:/# spack install netlib-scalapack ^mpich
...
==> Created stage in /usr/local/var/spack/stage/netlib-scalapack-2.0.2-km7tsbgoyyywonyejkjoojskhc5knz3z
==> No patches needed for netlib-scalapack
==> Building netlib-scalapack [CMakePackage]
==> Executing phase: 'cmake'
==> Executing phase: 'build'
==> Executing phase: 'install'
==> Successfully installed netlib-scalapack
Fetch: 0.01s. Build: 3m 59.86s. Total: 3m 59.87s.
[+] /usr/local/opt/spack/linux-ubuntu16.04-x86_64/gcc-5.4.0/netlib-scalapack-2.0.2-km7tsbgoyyywonyejkjoojskhc5knz3z
and double check the environment logs to verify that every variable was
set to the correct value.
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Set environment variables in your own package
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Packages can modify their own build-time environment by implementing the
:py:func:`setup_environment <spack.package.PackageBase.setup_environment>` function.
For ``qt`` this looks like:
.. code-block:: python
def setup_environment(self, spack_env, run_env):
spack_env.set('MAKEFLAGS', '-j{0}'.format(make_jobs))
run_env.set('QTDIR', self.prefix)
When ``qt`` builds, ``MAKEFLAGS`` will be defined in the environment.
To contrast with ``qt``'s :py:func:`setup_dependent_environment <spack.package.PackageBase.setup_dependent_environment>`
function:
.. code-block:: python
def setup_dependent_environment(self, spack_env, run_env, dependent_spec):
spack_env.set('QTDIR', self.prefix)
Let's see how it works by completing the ``elpa`` package:
.. code-block:: console
root@advanced-packaging-tutorial:/# spack edit elpa
In the end your method should look like:
.. code-block:: python
def setup_environment(self, spack_env, run_env):
spec = self.spec
spack_env.set('CC', spec['mpi'].mpicc)
spack_env.set('FC', spec['mpi'].mpifc)
spack_env.set('CXX', spec['mpi'].mpicxx)
spack_env.set('SCALAPACK_LDFLAGS', spec['scalapack'].libs.joined())
spack_env.append_flags('LDFLAGS', spec['lapack'].libs.search_flags)
spack_env.append_flags('LIBS', spec['lapack'].libs.link_flags)
At this point it's possible to proceed with the installation of ``elpa``.
----------------------
Other Packaging Topics
----------------------
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Attach attributes to other packages
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Build tools usually also provide a set of executables that can be used
when another package is being installed. Spack gives the opportunity
to monkey-patch dependent modules and attach attributes to them. This
helps make the packager experience as similar as possible to what would
have been the manual installation of the same package.
An example here is the ``automake`` package, which overrides
:py:func:`setup_dependent_package <spack.package.PackageBase.setup_dependent_package>`:
.. code-block:: python
def setup_dependent_package(self, module, dependent_spec):
# Automake is very likely to be a build dependency,
# so we add the tools it provides to the dependent module
executables = ['aclocal', 'automake']
for name in executables:
setattr(module, name, self._make_executable(name))
so that every other package that depends on it can use directly ``aclocal``
and ``automake`` with the usual function call syntax of :py:class:`Executable <spack.util.executable.Executable>`:
.. code-block:: python
aclocal('--force')
^^^^^^^^^^^^^^^^^^^^^^^
Extra query parameters
^^^^^^^^^^^^^^^^^^^^^^^
An advanced feature of the Spec's build-interface protocol is the support
for extra parameters after the subscript key. In fact, any of the keys used in the query
can be followed by a comma separated list of extra parameters which can be
inspected by the package receiving the request to fine-tune a response.
Let's look at an example and try to install ``netcdf``:
.. code-block:: console
root@advanced-packaging-tutorial:/# spack install netcdf
==> libsigsegv is already installed in /usr/local/opt/spack/linux-ubuntu16.04-x86_64/gcc-5.4.0/libsigsegv-2.11-fypapcprssrj3nstp6njprskeyynsgaz
==> m4 is already installed in /usr/local/opt/spack/linux-ubuntu16.04-x86_64/gcc-5.4.0/m4-1.4.18-r5envx3kqctwwflhd4qax4ahqtt6x43a
...
==> Error: AttributeError: 'list' object has no attribute 'search_flags'
AttributeError: AttributeError: 'list' object has no attribute 'search_flags'
/usr/local/var/spack/repos/builtin/packages/netcdf/package.py:207, in configure_args:
50 # used instead.
51 hdf5_hl = self.spec['hdf5:hl']
52 CPPFLAGS.append(hdf5_hl.headers.cpp_flags)
>> 53 LDFLAGS.append(hdf5_hl.libs.search_flags)
54
55 if '+parallel-netcdf' in self.spec:
56 config_args.append('--enable-pnetcdf')
See build log for details:
/usr/local/var/spack/stage/netcdf-4.4.1.1-gk2xxhbqijnrdwicawawcll4t3c7dvoj/netcdf-4.4.1.1/spack-build.out
We can see from the error that ``netcdf`` needs to know how to link the *high-level interface*
of ``hdf5``, and thus passes the extra parameter ``hl`` after the request to retrieve it.
Clearly the implementation in the ``hdf5`` package is not complete, and we need to fix it:
.. code-block:: console
root@advanced-packaging-tutorial:/# spack edit hdf5
If you followed the instructions correctly, the code added to the
``lib`` property should be similar to:
.. code-block:: python
:emphasize-lines: 1
query_parameters = self.spec.last_query.extra_parameters
key = tuple(sorted(query_parameters))
libraries = query2libraries[key]
shared = '+shared' in self.spec
return find_libraries(
libraries, root=self.prefix, shared=shared, recurse=True
)
where we highlighted the line retrieving the extra parameters. Now we can successfully
complete the installation of ``netcdf``:
.. code-block:: console
root@advanced-packaging-tutorial:/# spack install netcdf
==> libsigsegv is already installed in /usr/local/opt/spack/linux-ubuntu16.04-x86_64/gcc-5.4.0/libsigsegv-2.11-fypapcprssrj3nstp6njprskeyynsgaz
==> m4 is already installed in /usr/local/opt/spack/linux-ubuntu16.04-x86_64/gcc-5.4.0/m4-1.4.18-r5envx3kqctwwflhd4qax4ahqtt6x43a
...
==> Installing netcdf
==> Using cached archive: /usr/local/var/spack/cache/netcdf/netcdf-4.4.1.1.tar.gz
==> Already staged netcdf-4.4.1.1-gk2xxhbqijnrdwicawawcll4t3c7dvoj in /usr/local/var/spack/stage/netcdf-4.4.1.1-gk2xxhbqijnrdwicawawcll4t3c7dvoj
==> Already patched netcdf
==> Building netcdf [AutotoolsPackage]
==> Executing phase: 'autoreconf'
==> Executing phase: 'configure'
==> Executing phase: 'build'
==> Executing phase: 'install'
==> Successfully installed netcdf
Fetch: 0.01s. Build: 24.61s. Total: 24.62s.
[+] /usr/local/opt/spack/linux-ubuntu16.04-x86_64/gcc-5.4.0/netcdf-4.4.1.1-gk2xxhbqijnrdwicawawcll4t3c7dvoj

File diff suppressed because it is too large Load Diff

View File

@@ -1,795 +0,0 @@
.. _build-systems-tutorial:
==============================
Spack Package Build Systems
==============================
You may begin to notice after writing a couple of package template files a
pattern emerge for some packages. For example, you may find yourself writing
an :code:`install()` method that invokes: :code:`configure`, :code:`cmake`,
:code:`make`, :code:`make install`. You may also find yourself writing
:code:`"prefix=" + prefix` as an argument to :code:`configure` or :code:`cmake`.
Rather than having you repeat these lines for all packages, Spack has
classes that can take care of these patterns. In addition,
these package files allow for finer grained control of these build systems.
In this section, we will describe each build system and give examples on
how these can be manipulated to install a package.
-----------------------
Package Class Hierarchy
-----------------------
.. graphviz::
digraph G {
node [
shape = "record"
]
edge [
arrowhead = "empty"
]
PackageBase -> Package [dir=back]
PackageBase -> MakefilePackage [dir=back]
PackageBase -> AutotoolsPackage [dir=back]
PackageBase -> CMakePackage [dir=back]
PackageBase -> PythonPackage [dir=back]
}
The above diagram gives a high level view of the class hierarchy and how each
package relates. Each subclass inherits from the :code:`PackageBaseClass`
super class. The bulk of the work is done in this super class which includes
fetching, extracting to a staging directory and installing. Each subclass
then adds additional build-system-specific functionality. In the following
sections, we will go over examples of how to utilize each subclass and to see
how powerful these abstractions are when packaging.
-----------------
Package
-----------------
We've already seen examples of a :code:`Package` class in our walkthrough for writing
package files, so we won't be spending much time with them here. Briefly,
the Package class allows for abitrary control over the build process, whereas
subclasses rely on certain patterns (e.g. :code:`configure` :code:`make`
:code:`make install`) to be useful. :code:`Package` classes are particularly useful
for packages that have a non-conventional way of being built since the packager
can utilize some of Spack's helper functions to customize the building and
installing of a package.
-------------------
Autotools
-------------------
As we have seen earlier, packages using :code:`Autotools` use :code:`configure`,
:code:`make` and :code:`make install` commands to execute the build and
install process. In our :code:`Package` class, your typical build incantation will
consist of the following:
.. code-block:: python
def install(self, spec, prefix):
configure("--prefix=" + prefix)
make()
make("install")
You'll see that this looks similar to what we wrote in our packaging tutorial.
The :code:`Autotools` subclass aims to simplify writing package files and provides
convenience methods to manipulate each of the different phases for a :code:`Autotools`
build system.
:code:`Autotools` packages consist of four phases:
1. :code:`autoreconf()`
2. :code:`configure()`
3. :code:`build()`
4. :code:`install()`
Each of these phases have sensible defaults. Let's take a quick look at some
the internals of the :code:`Autotools` class:
.. code-block:: console
$ spack edit --build-system autotools
This will open the :code:`AutotoolsPackage` file in your text editor.
.. note::
The examples showing code for these classes is abridged to avoid having
long examples. We only show what is relevant to the packager.
.. literalinclude:: ../../../lib/spack/spack/build_systems/autotools.py
:language: python
:emphasize-lines: 42,45,62
:lines: 40-95,259-267
:linenos:
Important to note are the highlighted lines. These properties allow the
packager to set what build targets and install targets they want for their
package. If, for example, we wanted to add as our build target :code:`foo`
then we can append to our :code:`build_targets` property:
.. code-block:: python
build_targets = ["foo"]
Which is similiar to invoking make in our Package
.. code-block:: python
make("foo")
This is useful if we have packages that ignore environment variables and need
a command-line argument.
Another thing to take note of is in the :code:`configure()` method.
Here we see that the :code:`prefix` argument is already included since it is a
common pattern amongst packages using :code:`Autotools`. We then only have to
override :code:`configure_args()`, which will then return it's output to
to :code:`configure()`. Then, :code:`configure()` will append the common
arguments
Packagers also have the option to run :code:`autoreconf` in case a package
needs to update the build system and generate a new :code:`configure`. Though,
for the most part this will be unnecessary.
Let's look at the :code:`mpileaks` package.py file that we worked on earlier:
.. code-block:: console
$ spack edit mpileaks
Notice that mpileaks is a :code:`Package` class but uses the :code:`Autotools`
build system. Although this package is acceptable let's make this into an
:code:`AutotoolsPackage` class and simplify it further.
.. literalinclude:: tutorial/examples/Autotools/0.package.py
:language: python
:emphasize-lines: 28
:linenos:
We first inherit from the :code:`AutotoolsPackage` class.
Although we could keep the :code:`install()` method, most of it can be handled
by the :code:`AutotoolsPackage` base class. In fact, the only thing that needs
to be overridden is :code:`configure_args()`.
.. literalinclude:: tutorial/examples/Autotools/1.package.py
:language: python
:emphasize-lines: 42,43
:linenos:
Since Spack takes care of setting the prefix for us we can exclude that as
an argument to :code:`configure`. Our packages look simpler, and the packager
does not need to worry about whether they have properly included :code:`configure`
and :code:`make`.
This version of the :code:`mpileaks` package installs the same as the previous,
but the :code:`AutotoolsPackage` class lets us do it with a cleaner looking
package file.
-----------------
Makefile
-----------------
Packages that utilize :code:`Make` or a :code:`Makefile` usually require you
to edit a :code:`Makefile` to set up platform and compiler specific variables.
These packages are handled by the :code:`Makefile` subclass which provides
convenience methods to help write these types of packages.
A :code:`MakefilePackage` class has three phases that can be overridden. These include:
1. :code:`edit()`
2. :code:`build()`
3. :code:`install()`
Packagers then have the ability to control how a :code:`Makefile` is edited, and
what targets to include for the build phase or install phase.
Let's also take a look inside the :code:`MakefilePackage` class:
.. code-block:: console
$ spack edit --build-system makefile
Take note of the following:
.. literalinclude:: ../../../lib/spack/spack/build_systems/makefile.py
:language: python
:lines: 33-79,89-107
:emphasize-lines: 48,54,61
:linenos:
Similar to :code:`Autotools`, :code:`MakefilePackage` class has properties
that can be set by the packager. We can also override the different
methods highlighted.
Let's try to recreate the Bowtie_ package:
.. _Bowtie: http://bowtie-bio.sourceforge.net/index.shtml
.. code-block:: console
$ spack create -f https://downloads.sourceforge.net/project/bowtie-bio/bowtie/1.2.1.1/bowtie-1.2.1.1-src.zip
==> This looks like a URL for bowtie
==> Found 1 version of bowtie:
1.2.1.1 https://downloads.sourceforge.net/project/bowtie-bio/bowtie/1.2.1.1/bowtie-1.2.1.1-src.zip
==> How many would you like to checksum? (default is 1, q to abort) 1
==> Downloading...
==> Fetching https://downloads.sourceforge.net/project/bowtie-bio/bowtie/1.2.1.1/bowtie-1.2.1.1-src.zip
######################################################################## 100.0%
==> Checksummed 1 version of bowtie
==> This package looks like it uses the makefile build system
==> Created template for bowtie package
==> Created package file: /Users/mamelara/spack/var/spack/repos/builtin/packages/bowtie/package.py
Once the fetching is completed, Spack will open up your text editor in the
usual fashion and create a template of a :code:`MakefilePackage` package.py.
.. literalinclude:: tutorial/examples/Makefile/0.package.py
:language: python
:linenos:
Spack was successfully able to detect that :code:`Bowtie` uses :code:`Make`.
Let's add in the rest of our details for our package:
.. literalinclude:: tutorial/examples/Makefile/1.package.py
:language: python
:emphasize-lines: 29,30,32,33,37,39
:linenos:
As we mentioned earlier, most packages using a :code:`Makefile` have hard-coded
variables that must be edited. These variables are fine if you happen to not
care about setup or types of compilers used but Spack is designed to work with
any compiler. The :code:`MakefilePackage` subclass makes it easy to edit
these :code:`Makefiles` by having an :code:`edit()` method that
can be overridden.
Let's take a look at the default :code:`Makefile` that :code:`Bowtie` provides.
If we look inside, we see that :code:`CC` and :code:`CXX` point to our GNU
compiler:
.. code-block:: console
$ spack stage bowtie
.. note::
As usual make sure you have shell support activated with spack:
:code:`source /path/to/spack_root/spack/share/spack/setup-env.sh`
.. code-block:: console
$ spack cd -s bowtie
$ cd bowtie-1.2
$ vim Makefile
.. code-block:: make
CPP = g++ -w
CXX = $(CPP)
CC = gcc
LIBS = $(LDFLAGS) -lz
HEADERS = $(wildcard *.h)
To fix this, we need to use the :code:`edit()` method to write our custom
:code:`Makefile`.
.. literalinclude:: tutorial/examples/Makefile/2.package.py
:language: python
:emphasize-lines: 42,43,44
:linenos:
Here we use a :code:`FileFilter` object to edit our :code:`Makefile`. It takes
in a regular expression and then replaces :code:`CC` and :code:`CXX` to whatever
Spack sets :code:`CC` and :code:`CXX` environment variables to. This allows us to
build :code:`Bowtie` with whatever compiler we specify through Spack's
:code:`spec` syntax.
Let's change the build and install phases of our package:
.. literalinclude:: tutorial/examples/Makefile/3.package.py
:language: python
:emphasize-lines: 46, 52
:linenos:
Here demonstrate another strategy that we can use to manipulate our package
We can provide command-line arguments to :code:`make()`. Since :code:`Bowtie`
can use :code:`tbb` we can either add :code:`NO_TBB=1` as a argument to prevent
:code:`tbb` support or we can just invoke :code:`make` with no arguments.
:code:`Bowtie` requires our :code:`install_target` to provide a path to
the install directory. We can do this by providing :code:`prefix=` as a command
line argument to :code:`make()`.
Let's look at a couple of other examples and go through them:
.. code-block:: console
$ spack edit cbench
Some packages allow environment variables to be set and will honor them.
Packages that use :code:`?=` for assignment in their :code:`Makefile`
can be set using environment variables. In our :code:`cbench` example we
set two environment variables in our :code:`edit()` method:
.. code-block:: python
def edit(self, spec, prefix):
# The location of the Cbench source tree
env['CBENCHHOME'] = self.stage.source_path
# The location that will contain all your tests and your results
env['CBENCHTEST'] = prefix
# ... more code
As you may have noticed, we didn't really write anything to the :code:`Makefile`
but rather we set environment variables that will override variables set in
the :code:`Makefile`.
Some packages include a configuration file that sets certain compiler variables,
platform specific variables, and the location of dependencies or libraries.
If the file is simple and only requires a couple of changes, we can overwrite
those entries with our :code:`FileFilter` object. If the configuration involves
complex changes, we can write a new configuration file from scratch.
Let's look at an example of this in the :code:`elk` package:
.. code-block:: console
$ spack edit elk
.. code-block:: python
def edit(self, spec, prefix):
# Dictionary of configuration options
config = {
'MAKE': 'make',
'AR': 'ar'
}
# Compiler-specific flags
flags = ''
if self.compiler.name == 'intel':
flags = '-O3 -ip -unroll -no-prec-div'
elif self.compiler.name == 'gcc':
flags = '-O3 -ffast-math -funroll-loops'
elif self.compiler.name == 'pgi':
flags = '-O3 -lpthread'
elif self.compiler.name == 'g95':
flags = '-O3 -fno-second-underscore'
elif self.compiler.name == 'nag':
flags = '-O4 -kind=byte -dusty -dcfuns'
elif self.compiler.name == 'xl':
flags = '-O3'
config['F90_OPTS'] = flags
config['F77_OPTS'] = flags
# BLAS/LAPACK support
# Note: BLAS/LAPACK must be compiled with OpenMP support
# if the +openmp variant is chosen
blas = 'blas.a'
lapack = 'lapack.a'
if '+blas' in spec:
blas = spec['blas'].libs.joined()
if '+lapack' in spec:
lapack = spec['lapack'].libs.joined()
# lapack must come before blas
config['LIB_LPK'] = ' '.join([lapack, blas])
# FFT support
if '+fft' in spec:
config['LIB_FFT'] = join_path(spec['fftw'].prefix.lib,
'libfftw3.so')
config['SRC_FFT'] = 'zfftifc_fftw.f90'
else:
config['LIB_FFT'] = 'fftlib.a'
config['SRC_FFT'] = 'zfftifc.f90'
# MPI support
if '+mpi' in spec:
config['F90'] = spec['mpi'].mpifc
config['F77'] = spec['mpi'].mpif77
else:
config['F90'] = spack_fc
config['F77'] = spack_f77
config['SRC_MPI'] = 'mpi_stub.f90'
# OpenMP support
if '+openmp' in spec:
config['F90_OPTS'] += ' ' + self.compiler.openmp_flag
config['F77_OPTS'] += ' ' + self.compiler.openmp_flag
else:
config['SRC_OMP'] = 'omp_stub.f90'
# Libxc support
if '+libxc' in spec:
config['LIB_libxc'] = ' '.join([
join_path(spec['libxc'].prefix.lib, 'libxcf90.so'),
join_path(spec['libxc'].prefix.lib, 'libxc.so')
])
config['SRC_libxc'] = ' '.join([
'libxc_funcs.f90',
'libxc.f90',
'libxcifc.f90'
])
else:
config['SRC_libxc'] = 'libxcifc_stub.f90'
# Write configuration options to include file
with open('make.inc', 'w') as inc:
for key in config:
inc.write('{0} = {1}\n'.format(key, config[key]))
:code:`config` is just a dictionary that we can add key-value pairs to. By the
end of the :code:`edit()` method we write the contents of our dictionary to
:code:`make.inc`.
---------------
CMake
---------------
CMake_ is another common build system that has been gaining popularity. It works
in a similar manner to :code:`Autotools` but with differences in variable names,
the number of configuration options available, and the handling of shared libraries.
Typical build incantations look like this:
.. _CMake: https://cmake.org
.. code-block:: python
def install(self, spec, prefix):
cmake("-DCMAKE_INSTALL_PREFIX:PATH=/path/to/install_dir ..")
make()
make("install")
As you can see from the example above, it's very similar to invoking
:code:`configure` and :code:`make` in an :code:`Autotools` build system. However,
the variable names and options differ. Most options in CMake are prefixed
with a :code:`'-D'` flag to indicate a configuration setting.
In the :code:`CMakePackage` class we can override the following phases:
1. :code:`cmake()`
2. :code:`build()`
3. :code:`install()`
The :code:`CMakePackage` class also provides sensible defaults so we only need to
override :code:`cmake_args()`.
Let's look at these defaults in the :code:`CMakePackage` class:
.. code-block:: console
$ spack edit --build-system cmake
And go into a bit of detail on the highlighted sections:
.. literalinclude:: ../../../lib/spack/spack/build_systems/cmake.py
:language: python
:lines: 37-92, 94-155, 174-211
:emphasize-lines: 57,68,86,94,96,99,100,101,102,111,117,135,136
:linenos:
Some :code:`CMake` packages use different generators. Spack is able to support
Unix-Makefile_ generators as well as Ninja_ generators.
.. _Unix-Makefile: https://cmake.org/cmake/help/v3.4/generator/Unix%20Makefiles.html
.. _Ninja: https://cmake.org/cmake/help/v3.4/generator/Ninja.html
Default generator is :code:`Unix Makefile`.
Next we setup the build type. In :code:`CMake` you can specify the build type
that you want. Options include:
1. empty
2. Debug
3. Release
4. RelWithDebInfo
5. MinSizeRel
With these options you can specify whether you want your executable to have
the debug version only, release version or the release with debug information.
Release executables tend to be more optimized than Debug. In Spack, we set
the default as RelWithDebInfo unless otherwise specified through a variant.
Spack then automatically sets up the :code:`-DCMAKE_INSTALL_PREFIX` path,
appends the build type (RelDebInfo default), and then specifies a verbose
:code:`Makefile`.
Next we add the :code:`rpaths` to :code:`-DCMAKE_INSTALL_RPATH:STRING`.
Finally we add to :code:`-DCMAKE_PREFIX_PATH:STRING` the locations of all our
dependencies so that :code:`CMake` can find them.
In the end our :code:`cmake` line will look like this (example is :code:`xrootd`):
.. code-block:: console
$ cmake $HOME/spack/var/spack/stage/xrootd-4.6.0-4ydm74kbrp4xmcgda5upn33co5pwddyk/xrootd-4.6.0 -G Unix Makefiles -DCMAKE_INSTALL_PREFIX:PATH=$HOME/spack/opt/spack/darwin-sierra-x86_64/clang-9.0.0-apple/xrootd-4.6.0-4ydm74kbrp4xmcgda5upn33co5pwddyk -DCMAKE_BUILD_TYPE:STRING=RelWithDebInfo -DCMAKE_VERBOSE_MAKEFILE:BOOL=ON -DCMAKE_FIND_FRAMEWORK:STRING=LAST -DCMAKE_INSTALL_RPATH_USE_LINK_PATH:BOOL=FALSE -DCMAKE_INSTALL_RPATH:STRING=$HOME/spack/opt/spack/darwin-sierra-x86_64/clang-9.0.0-apple/xrootd-4.6.0-4ydm74kbrp4xmcgda5upn33co5pwddyk/lib:$HOME/spack/opt/spack/darwin-sierra-x86_64/clang-9.0.0-apple/xrootd-4.6.0-4ydm74kbrp4xmcgda5upn33co5pwddyk/lib64 -DCMAKE_PREFIX_PATH:STRING=$HOME/spack/opt/spack/darwin-sierra-x86_64/clang-9.0.0-apple/cmake-3.9.4-hally3vnbzydiwl3skxcxcbzsscaasx5
Saves a lot of typing doesn't it?
Let's try to recreate callpath_:
.. _callpath: https://github.com/LLNL/callpath.git
.. code-block:: console
$ spack create -f https://github.com/llnl/callpath/archive/v1.0.3.tar.gz
==> This looks like a URL for callpath
==> Found 4 versions of callpath:
1.0.3 https://github.com/LLNL/callpath/archive/v1.0.3.tar.gz
1.0.2 https://github.com/LLNL/callpath/archive/v1.0.2.tar.gz
1.0.1 https://github.com/LLNL/callpath/archive/v1.0.1.tar.gz
1.0 https://github.com/LLNL/callpath/archive/v1.0.tar.gz
==> How many would you like to checksum? (default is 1, q to abort) 1
==> Downloading...
==> Fetching https://github.com/LLNL/callpath/archive/v1.0.3.tar.gz
######################################################################## 100.0%
==> Checksummed 1 version of callpath
==> This package looks like it uses the cmake build system
==> Created template for callpath package
==> Created package file: /Users/mamelara/spack/var/spack/repos/builtin/packages/callpath/package.py
which then produces the following template:
.. literalinclude:: tutorial/examples/Cmake/0.package.py
:language: python
:linenos:
Again we fill in the details:
.. literalinclude:: tutorial/examples/Cmake/1.package.py
:language: python
:linenos:
:emphasize-lines: 28,32,33,37,38,39,40,41,42
As mentioned earlier, Spack will use sensible defaults to prevent repeated code
and to make writing :code:`CMake` package files simpler.
In callpath, we want to add options to :code:`CALLPATH_WALKER` as well as add
compiler flags. We add the following options like so:
.. literalinclude:: tutorial/examples/Cmake/2.package.py
:language: python
:linenos:
:emphasize-lines: 45,49,50
Now we can control our build options using :code:`cmake_args()`. If defaults are
sufficient enough for the package, we can leave this method out.
:code:`CMakePackage` classes allow for control of other features in the
build system. For example, you can specify the path to the "out of source"
build directory and also point to the root of the :code:`CMakeLists.txt` file if it
is placed in a non-standard location.
A good example of a package that has its :code:`CMakeLists.txt` file located at a
different location is found in :code:`spades`.
.. code-block:: console
$ spack edit spade
.. code-block:: python
root_cmakelists_dir = "src"
Here :code:`root_cmakelists_dir` will tell Spack where to find the location
of :code:`CMakeLists.txt`. In this example, it is located a directory level below in
the :code:`src` directory.
Some :code:`CMake` packages also require the :code:`install` phase to be
overridden. For example, let's take a look at :code:`sniffles`.
.. code-block:: console
$ spack edit sniffles
In the :code:`install()` method, we have to manually install our targets
so we override the :code:`install()` method to do it for us:
.. code-block:: python
# the build process doesn't actually install anything, do it by hand
def install(self, spec, prefix):
mkdir(prefix.bin)
src = "bin/sniffles-core-{0}".format(spec.version.dotted)
binaries = ['sniffles', 'sniffles-debug']
for b in binaries:
install(join_path(src, b), join_path(prefix.bin, b))
--------------
PythonPackage
--------------
Python extensions and modules are built differently from source than most
applications. Python uses a :code:`setup.py` script to install Python modules.
The script consists of a call to :code:`setup()` which provides the information
required to build a module to Distutils. If you're familiar with pip or
easy_install, setup.py does the same thing.
These modules are usually installed using the following line:
.. code-block:: console
$ python setup.py install
There are also a list of commands and phases that you can call. To see the full
list you can run:
.. code-block:: console
$ python setup.py --help-commands
Standard commands:
build build everything needed to install
build_py "build" pure Python modules (copy to build directory)
build_ext build C/C++ extensions (compile/link to build directory)
build_clib build C/C++ libraries used by Python extensions
build_scripts "build" scripts (copy and fixup #! line)
clean (no description available)
install install everything from build directory
install_lib install all Python modules (extensions and pure Python)
install_headers install C/C++ header files
install_scripts install scripts (Python or otherwise)
install_data install data files
sdist create a source distribution (tarball, zip file, etc.)
register register the distribution with the Python package index
bdist create a built (binary) distribution
bdist_dumb create a "dumb" built distribution
bdist_rpm create an RPM distribution
bdist_wininst create an executable installer for MS Windows
upload upload binary package to PyPI
check perform some checks on the package
To see the defaults that Spack has for each a methods, we will take a look
at the :code:`PythonPackage` class:
.. code-block:: console
$ spack edit --build-system python
We see the following:
.. literalinclude:: ../../../lib/spack/spack/build_systems/python.py
:language: python
:lines: 35, 161-364
:linenos:
Each of these methods have sensible defaults or they can be overridden.
We can write package files for Python packages using the :code:`Package` class,
but the class brings with it a lot of methods that are useless for Python packages.
Instead, Spack has a :code: `PythonPackage` subclass that allows packagers
of Python modules to be able to invoke :code:`setup.py` and use :code:`Distutils`,
which is much more familiar to a typical python user.
We will write a package file for Pandas_:
.. _pandas: https://pandas.pydata.org
.. code-block:: console
$ spack create -f https://pypi.io/packages/source/p/pandas/pandas-0.19.0.tar.gz
==> This looks like a URL for pandas
==> Warning: Spack was unable to fetch url list due to a certificate verification problem. You can try running spack -k, which will not check SSL certificates. Use this at your own risk.
==> Found 1 version of pandas:
0.19.0 https://pypi.io/packages/source/p/pandas/pandas-0.19.0.tar.gz
==> How many would you like to checksum? (default is 1, q to abort) 1
==> Downloading...
==> Fetching https://pypi.io/packages/source/p/pandas/pandas-0.19.0.tar.gz
######################################################################## 100.0%
==> Checksummed 1 version of pandas
==> This package looks like it uses the python build system
==> Changing package name from pandas to py-pandas
==> Created template for py-pandas package
==> Created package file: /Users/mamelara/spack/var/spack/repos/builtin/packages/py-pandas/package.py
And we are left with the following template:
.. literalinclude:: tutorial/examples/PyPackage/0.package.py
:language: python
:linenos:
As you can see this is not any different than any package template that we have
written. We have the choice of providing build options or using the sensible
defaults
Luckily for us, there is no need to provide build args.
Next we need to find the dependencies of a package. Dependencies are usually
listed in :code:`setup.py`. You can find the dependencies by searching for
:code:`install_requires` keyword in that file. Here it is for :code:`Pandas`:
.. code-block:: python
# ... code
if sys.version_info[0] >= 3:
setuptools_kwargs = {
'zip_safe': False,
'install_requires': ['python-dateutil >= 2',
'pytz >= 2011k',
'numpy >= %s' % min_numpy_ver],
'setup_requires': ['numpy >= %s' % min_numpy_ver],
}
if not _have_setuptools:
sys.exit("need setuptools/distribute for Py3k"
"\n$ pip install distribute")
# ... more code
You can find a more comprehensive list at the Pandas documentation_.
.. _documentation: https://pandas.pydata.org/pandas-docs/stable/install.html
By reading the documentation and :code:`setup.py` we found that :code:`Pandas`
depends on :code:`python-dateutil`, :code:`pytz`, and :code:`numpy`, :code:`numexpr`,
and finally :code:`bottleneck`.
Here is the completed :code:`Pandas` script:
.. literalinclude:: tutorial/examples/PyPackage/1.package.py
:language: python
:linenos:
It is quite important to declare all the dependencies of a Python package.
Spack can "activate" Python packages to prevent the user from having to
load each dependency module explictly. If a dependency is missed, Spack will
be unable to properly activate the package and it will cause an issue. To
learn more about extensions go to :ref:`cmd-spack-extensions`.
From this example, you can see that building Python modules is made easy
through the :code:`PythonPackage` class.
-------------------
Other Build Systems
-------------------
Although we won't get in depth with any of the other build systems that Spack
supports, it is worth mentioning that Spack does provide subclasses
for the following build systems:
1. :code:`IntelPackage`
2. :code:`SconsPackage`
3. :code:`WafPackage`
4. :code:`RPackage`
5. :code:`PerlPackage`
6. :code:`QMake`
Each of these classes have their own abstractions to help assist in writing
package files. For whatever doesn't fit nicely into the other build-systems,
you can use the :code:`Package` class.
Hopefully by now you can see how we aim to make packaging simple and
robust through these classes. If you want to learn more about these build
systems, check out :ref:`installation_procedure` in the Packaging Guide.

View File

@@ -1,843 +0,0 @@
.. _configs-tutorial:
======================
Configuration Tutorial
======================
This tutorial will guide you through various configuration options
that allow you to customize Spack's behavior with respect to
software installation. We will first cover the configuration file
hierarchy. Then, we will cover configuration options for compilers,
focusing on how it can be used to extend Spack's compiler auto-detection.
Next, we will cover the packages configuration file, focusing on
how it can be used to override default build options as well as
specify external package installations to use. Finally, we will
briefly touch on the config configuration file, which manages more
high-level Spack configuration options.
For all of these features we will demonstrate how we build up a full
configuration file. For some we will then demonstrate how the
configuration affects the install command, and for others we will use
the ``spack spec`` command to demonstrate how the configuration
changes have affected Spack's concretization algorithm. The provided
output is all from a server running Ubuntu version 16.04.
.. _configs-tutorial-scopes:
--------------------
Configuration Scopes
--------------------
Depending on your use case, you may want to provide configuration
settings common to everyone on your team, or you may want to set
default behaviors specific to a single user account. Spack provides
4 configuration *scopes* to handle this customization. These scopes,
in order of decreasing priority, are:
====================== ==================================
Scope Directory
====================== ==================================
User configurations ``~/.spack``
Project configurations ``$SPACK_ROOT/etc/spack``
System configurations ``/etc/spack``
Default configurations ``$SPACK_ROOT/etc/spack/defaults``
====================== ==================================
Spack's default configuration settings reside in
``$SPACK_ROOT/etc/spack/defaults``. These are useful for reference,
but should never be directly edited. To override these settings,
create new configuration files in any of the higher-priority
configuration scopes.
A particular cluster may have multiple Spack installations associated
with different projects. To provide settings common to all Spack
installations, put your configuration files in ``/etc/spack``.
To provide settings specific to a particular Spack installation,
you can use the ``$SPACK_ROOT/etc/spack`` directory.
For settings specific to a particular user, you will want to add
configuration files to the ``~/.spack`` directory. When Spack first
checked for compilers on your system, you may have noticed that it
placed your compiler configuration in this directory.
Some facilities manage multiple platforms from a single shared
filesystem. In order to handle this, each of the configuration
scopes listed above has two *sub-scopes*: platform-specific and
platform-independent. For example, compiler settings can be stored
in ``compilers.yaml`` configuration files in the following locations:
- ``~/.spack/<platform>/compilers.yaml``
- ``~/.spack/compilers.yaml``
- ``$SPACK_ROOT/etc/spack/<platform>/compilers.yaml``
- ``$SPACK_ROOT/etc/spack/compilers.yaml``
- ``/etc/spack/<platform>/compilers.yaml``
- ``/etc/spack/compilers.yaml``
- ``$SPACK_ROOT/etc/defaults/<platform>/compilers.yaml``
- ``$SPACK_ROOT/etc/defaults/compilers.yaml``
These files are listed in decreasing order of precedence, so files in
``~/.spack/<platform>`` will override settings in ``~/.spack``.
Spack configurations are YAML dictionaries. Every configuration file
begins with a top-level dictionary that tells Spack which
configuration set it modifies. When Spack checks it's configuration,
the configuration scopes are updated as dictionaries in increasing
order of precedence, allowing higher precedence files to override
lower. YAML dictionaries use a colon ":" to specify key-value
pairs. Spack extends YAML syntax slightly to allow a double-colon
"::" to specify a key-value pair. When a double-colon is used to
specify a key-value pair, instead of adding that section Spack
replaces what was in that section with the new value. For example, a
user compilers configuration file as follows:
.. code-block:: yaml
compilers::
- compiler:
environment: {}
extra_rpaths: []
flags: {}
modules: []
operating_system: ubuntu16.04
paths:
cc: /usr/bin/gcc
cxx: /usr/bin/g++
f77: /usr/bin/gfortran
fc: /usr/bin/gfortran
spec: gcc@5.4.0
target: x86_64
ensures that no other compilers are used, as the user configuration
scope is the last scope searched and the ``compilers::`` line replaces
all previous configuration files information. If the same
configuration file had a single colon instead of the double colon, it
would add the gcc version 5.4.0 compiler to whatever other compilers
were listed in other configuration files.
.. _configs-tutorial-compilers:
----------------------
Compiler Configuration
----------------------
For most tasks, we can use Spack with the compilers auto-detected the
first time Spack runs on a system. As we discussed in the basic
installation section, we can also tell Spack where compilers are
located using the ``spack compiler add`` command. However, in some
circumstances we want even more fine-grained control over the
compilers available. This section will teach you how to exercise that
control using the compilers configuration file.
We will start by opening the compilers configuration file
.. code-block:: console
$ spack config edit compilers
.. code-block:: yaml
compilers:
- compiler:
environment: {}
extra_rpaths: []
flags: {}
modules: []
operating_system: ubuntu16.04
paths:
cc: /usr/bin/clang
cxx: /usr/bin/clang++
f77: null
fc: null
spec: clang@3.8.0-2ubuntu4
target: x86_64
- compiler:
environment: {}
extra_rpaths: []
flags: {}
modules: []
operating_system: ubuntu16.04
paths:
cc: /usr/bin/gcc
cxx: /usr/bin/g++
f77: /usr/bin/gfortran
fc: /usr/bin/gfortran
spec: gcc@5.4.0
target: x86_64
This specifies one version of the gcc compiler and one version of the
clang compiler with no flang compiler. Now suppose we have a code that
we want to compile with the clang compiler for C/C++ code, but with
gfortran for Fortran components. We can do this by adding another entry
to the ``compilers.yaml`` file.
.. code-block:: yaml
- compiler:
environment: {}
extra_rpaths: []
flags: {}
modules: []
operating_system: ubuntu16.04
paths:
cc: /usr/bin/clang
cxx: /usr/bin/clang++
f77: /usr/bin/gfortran
fc: /usr/bin/gfortran
spec: clang@3.8.0-gfortran
target: x86_64
Let's talk about the sections of this compiler entry that we've changed.
The biggest change we've made is to the ``paths`` section. This lists
the paths to the compilers to use for each language/specification.
In this case, we point to the clang compiler for C/C++ and the gfortran
compiler for both specifications of Fortran. We've also changed the
``spec`` entry for this compiler. The ``spec`` entry is effectively the
name of the compiler for Spack. It consists of a name and a version
number, separated by the ``@`` sigil. The name must be one of the supported
compiler names in Spack (gcc, intel, pgi, xl, xl_r, clang, nag, cce).
The version number can be an arbitrary string of alphanumeric characters,
as well as ``-``, ``.``, and ``_``. The ``target`` and ``operating_system``
sections we leave unchanged. These sections specify when Spack can use
different compilers, and are primarily useful for configuration files that
will be used across multiple systems.
We can verify that our new compiler works by invoking it now:
.. code-block:: console
$ spack install zlib %clang@3.8.0-gfortran
...
This new compiler also works on Fortran codes:
.. code-block:: console
$ spack install cfitsio %clang@3.8.0-gfortran
...
^^^^^^^^^^^^^^
Compiler Flags
^^^^^^^^^^^^^^
Some compilers may require specific compiler flags to work properly in
a particular computing environment. Spack provides configuration
options for setting compiler flags every time a specific compiler is
invoked. These flags become part of the package spec and therefore of
the build provenance. As on the command line, the flags are set
through the implicit build variables ``cflags``, ``cxxflags``, ``cppflags``,
``fflags``, ``ldflags``, and ``ldlibs``.
Let's open our compilers configuration file again and add a compiler flag.
.. code-block:: yaml
- compiler:
environment: {}
extra_rpaths: []
flags:
cppflags: -g
modules: []
operating_system: ubuntu16.04
paths:
cc: /usr/bin/clang
cxx: /usr/bin/clang++
f77: /usr/bin/gfortran
fc: /usr/bin/gfortran
spec: clang@3.8.0-gfortran
target: x86_64
We can test this out using the ``spack spec`` command to show how the
spec is concretized.
.. code-block:: console
$ spack spec cfitsio %clang@3.8.0-gfortran
Input spec
--------------------------------
cfitsio%clang@3.8.0-gfortran
Normalized
--------------------------------
cfitsio%clang@3.8.0-gfortran
Concretized
--------------------------------
cfitsio@3.410%clang@3.8.0-gfortran cppflags="-g" +bzip2+shared arch=linux-ubuntu16.04-x86_64
^bzip2@1.0.6%clang@3.8.0-gfortran cppflags="-g" +shared arch=linux-ubuntu16.04-x86_64
We can see that "cppflags=-g" has been added to every node in the DAG.
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Advanced Compiler Configuration
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
There are three fields of the compiler configuration entry that we
have not talked about yet.
The ``modules`` field of the compiler is used primarily on Cray systems,
but can be useful on any system that has compilers that are only
useful when a particular module is loaded. Any modules in the
``modules`` field of the compiler configuration will be loaded as part
of the build environment for packages using that compiler.
The ``extra_rpaths`` field of the compiler configuration is used for
compilers that do not rpath all of their dependencies by
default. Since compilers are generally installed externally to Spack,
Spack is unable to manage compiler dependencies and enforce
rpath usage. This can lead to packages not finding link dependencies
imposed by the compiler properly. For compilers that impose link
dependencies on the resulting executables that are not rpath'ed into
the executable automatically, the ``extra_rpath`` field of the compiler
configuration tells Spack which dependencies to rpath into every
executable created by that compiler. The executables will then be able
to find the link dependencies imposed by the compiler.
The ``environment`` field of the compiler configuration is used for
compilers that require environment variables to be set during build
time. For example, if your Intel compiler suite requires the
``INTEL_LICENSE_FILE`` environment variable to point to the proper
license server, you can set this in ``compilers.yaml``.
-------------------------------
Configuring Package Preferences
-------------------------------
Package preferences in Spack are managed through the ``packages.yaml``
configuration file. First, we will look at the default
``packages.yaml`` file.
.. code-block:: console
$ spack config --scope defaults edit packages
.. literalinclude:: ../../../etc/spack/defaults/packages.yaml
:language: yaml
This sets the default preferences for compilers and for providers of
virtual packages. To illustrate how this works, suppose we want to
change the preferences to prefer the clang compiler and to prefer
mpich over openmpi. Currently, we prefer gcc and openmpi
.. code-block:: console
$ spack spec hdf5
Input spec
--------------------------------
hdf5
Normalized
--------------------------------
hdf5
^zlib@1.1.2:
Concretized
--------------------------------
hdf5@1.10.1%gcc@5.4.0+cxx~debug+fortran+mpi+pic+shared~szip~threadsafe arch=linux-ubuntu16.04-x86_64
^openmpi@3.0.0%gcc@5.4.0~cuda fabrics= ~java schedulers= ~sqlite3~thread_multiple+vt arch=linux-ubuntu16.04-x86_64
^hwloc@1.11.7%gcc@5.4.0~cuda+libxml2~pci arch=linux-ubuntu16.04-x86_64
^libxml2@2.9.4%gcc@5.4.0~python arch=linux-ubuntu16.04-x86_64
^pkg-config@0.29.2%gcc@5.4.0+internal_glib arch=linux-ubuntu16.04-x86_64
^xz@5.2.3%gcc@5.4.0 arch=linux-ubuntu16.04-x86_64
^zlib@1.2.11%gcc@5.4.0+pic+shared arch=linux-ubuntu16.04-x86_64
Now we will open the packages configuration file and update our
preferences.
.. code-block:: console
$ spack config edit packages
.. code-block:: yaml
packages:
all:
compiler: [clang, gcc, intel, pgi, xl, nag]
providers:
mpi: [mpich, openmpi]
Because of the configuration scoping we discussed earlier, this
overrides the default settings just for these two items.
.. code-block:: console
$ spack spec hdf5
Input spec
--------------------------------
hdf5
Normalized
--------------------------------
hdf5
^zlib@1.1.2:
Concretized
--------------------------------
hdf5@1.10.1%clang@3.8.0-2ubuntu4+cxx~debug~fortran~hl+mpi+pic+shared~szip~threadsafe arch=linux-ubuntu16.04-x86_64
^mpich@3.2%clang@3.8.0-2ubuntu4 device=ch3 +hydra netmod=tcp +pmi+romio~verbs arch=linux-ubuntu16.04-x86_64
^zlib@1.2.11%clang@3.8.0-2ubuntu4+pic+shared arch=linux-ubuntu16.04-x86_64
^^^^^^^^^^^^^^^^^^^
Variant Preferences
^^^^^^^^^^^^^^^^^^^
The packages configuration file can also set variant preferences for
package variants. For example, let's change our preferences to build all
packages without shared libraries. We will accomplish this by turning
off the ``shared`` variant on all packages that have one.
.. code-block:: yaml
packages:
all:
compiler: [clang, gcc, intel, pgi, xl, nag]
providers:
mpi: [mpich, openmpi]
variants: ~shared
We can check the effect of this command with ``spack spec hdf5`` again.
.. code-block:: console
$ spack spec hdf5
Input spec
--------------------------------
hdf5
Normalized
--------------------------------
hdf5
^zlib@1.1.2:
Concretized
--------------------------------
hdf5@1.10.1%clang@3.8.0-2ubuntu4+cxx~debug~fortran~hl+mpi+pic~shared~szip~threadsafe arch=linux-ubuntu16.04-x86_64
^mpich@3.2%clang@3.8.0-2ubuntu4 device=ch3 +hydra netmod=tcp +pmi+romio~verbs arch=linux-ubuntu16.04-x86_64
^zlib@1.2.11%clang@3.8.0-2ubuntu4+pic~shared arch=linux-ubuntu16.04-x86_64
So far we have only made global changes to the package preferences. As
we've seen throughout this tutorial, hdf5 builds with MPI enabled by
default in Spack. If we were working on a project that would routinely
need serial hdf5, that might get annoying quickly, having to type
``hdf5~mpi`` all the time. Instead, we'll update our preferences for
hdf5.
.. code-block:: yaml
packages:
all:
compiler: [clang, gcc, intel, pgi, xl, nag]
providers:
mpi: [mpich, openmpi]
variants: ~shared
hdf5:
variants: ~mpi
Now hdf5 will concretize without an MPI dependency by default.
.. code-block:: console
$ spack spec hdf5
Input spec
--------------------------------
hdf5
Normalized
--------------------------------
hdf5
^zlib@1.1.2:
Concretized
--------------------------------
hdf5@1.10.1%clang@3.8.0-2ubuntu4+cxx~debug~fortran~hl+mpi+pic~shared~szip~threadsafe arch=linux-ubuntu16.04-x86_64
^zlib@1.2.11%clang@3.8.0-2ubuntu4+pic~shared arch=linux-ubuntu16.04-x86_64
In general, every attribute that we can set for all packages we can
set separately for an individual package.
^^^^^^^^^^^^^^^^^
External Packages
^^^^^^^^^^^^^^^^^
The packages configuration file also controls when Spack will build
against an externally installed package. On these systems we have a
pre-installed zlib.
.. code-block:: yaml
packages:
all:
compiler: [clang, gcc, intel, pgi, xl, nag]
providers:
mpi: [mpich, openmpi]
variants: ~shared
hdf5:
variants: ~mpi
zlib:
paths:
zlib@1.2.8%gcc@5.4.0 arch=linux-ubuntu16.04-x86_64: /usr
Here, we've told Spack that zlib 1.2.8 is installed on our system.
We've also told it the installation prefix where zlib can be found.
We don't know exactly which variants it was built with, but that's
okay.
.. code-block:: console
$ spack spec hdf5
Input spec
--------------------------------
hdf5
Normalized
--------------------------------
hdf5
^zlib@1.1.2:
Concretized
--------------------------------
hdf5@1.10.1%gcc@5.4.0~cxx~debug~fortran~hl~mpi+pic+shared~szip~threadsafe arch=linux-ubuntu16.04-x86_64
^zlib@1.2.8%gcc@5.4.0+optimize+pic~shared arch=linux-ubuntu16.04-x86_64
You'll notice that Spack is now using the external zlib installation,
but the compiler used to build zlib is now overriding our compiler
preference of clang. If we explicitly specify clang:
.. code-block:: console
$ spack spec hdf5 %clang
Input spec
--------------------------------
hdf5%clang
Normalized
--------------------------------
hdf5%clang
^zlib@1.1.2:
Concretized
--------------------------------
hdf5@1.10.1%clang@3.8.0-2ubuntu4~cxx~debug~fortran~hl~mpi+pic+shared~szip~threadsafe arch=linux-ubuntu16.04-x86_64
^zlib@1.2.11%clang@3.8.0-2ubuntu4+optimize+pic~shared arch=linux-ubuntu16.04-x86_64
Spack concretizes to both hdf5 and zlib being built with clang.
This has a side-effect of rebuilding zlib. If we want to force
Spack to use the system zlib, we have two choices. We can either
specify it on the command line, or we can tell Spack that it's
not allowed to build its own zlib. We'll go with the latter.
.. code-block:: yaml
packages:
all:
compiler: [clang, gcc, intel, pgi, xl, nag]
providers:
mpi: [mpich, openmpi]
variants: ~shared
hdf5:
variants: ~mpi
zlib:
paths:
zlib@1.2.8%gcc@5.4.0 arch=linux-ubuntu16.04-x86_64: /usr
buildable: False
Now Spack will be forced to choose the external zlib.
.. code-block:: console
$ spack spec hdf5 %clang
Input spec
--------------------------------
hdf5%clang
Normalized
--------------------------------
hdf5%clang
^zlib@1.1.2:
Concretized
--------------------------------
hdf5@1.10.1%clang@3.8.0-2ubuntu4~cxx~debug~fortran~hl~mpi+pic+shared~szip~threadsafe arch=linux-ubuntu16.04-x86_64
^zlib@1.2.8%gcc@5.4.0+optimize+pic~shared arch=linux-ubuntu16.04-x86_64
This gets slightly more complicated with virtual dependencies. Suppose
we don't want to build our own MPI, but we now want a parallel version
of hdf5? Well, fortunately we have mpich installed on these systems.
.. code-block:: yaml
packages:
all:
compiler: [clang, gcc, intel, pgi, xl, nag]
providers:
mpi: [mpich, openmpi]
variants: ~shared
hdf5:
variants: ~mpi
zlib:
paths:
zlib@1.2.8%gcc@5.4.0 arch=linux-ubuntu16.04-x86_64: /usr
buildable: False
mpich:
paths:
mpich@3.2%gcc@5.4.0 device=ch3 +hydra netmod=tcp +pmi+romio~verbs arch=linux-ubuntu16.04-x86_64: /usr
buildable: False
If we concretize ``hdf5+mpi`` with this configuration file, we will just
build with an alternate MPI implementation.
.. code-block:: console
$ spack spec hdf5 %clang +mpi
Input spec
--------------------------------
hdf5%clang+mpi
Normalized
--------------------------------
hdf5%clang+mpi
^mpi
^zlib@1.1.2:
Concretized
--------------------------------
hdf5@1.10.1%clang@3.8.0-2ubuntu4~cxx~debug~fortran~hl+mpi+pic~shared~szip~threadsafe arch=linux-ubuntu16.04-x86_64
^openmpi@3.0.0%clang@3.8.0-2ubuntu4~cuda fabrics=verbs ~java schedulers= ~sqlite3~thread_multiple+vt arch=linux-ubuntu16.04-x86_64
^hwloc@1.11.8%clang@3.8.0-2ubuntu4~cuda+libxml2+pci arch=linux-ubuntu16.04-x86_64
^libpciaccess@0.13.5%clang@3.8.0-2ubuntu4 arch=linux-ubuntu16.04-x86_64
^libtool@2.4.6%clang@3.8.0-2ubuntu4 arch=linux-ubuntu16.04-x86_64
^m4@1.4.18%clang@3.8.0-2ubuntu4 patches=3877ab548f88597ab2327a2230ee048d2d07ace1062efe81fc92e91b7f39cd00 +sigsegv arch=linux-ubuntu16.04-x86_64
^libsigsegv@2.11%clang@3.8.0-2ubuntu4 arch=linux-ubuntu16.04-x86_64
^pkg-config@0.29.2%clang@3.8.0-2ubuntu4+internal_glib arch=linux-ubuntu16.04-x86_64
^util-macros@1.19.1%clang@3.8.0-2ubuntu4 arch=linux-ubuntu16.04-x86_64
^libxml2@2.9.4%clang@3.8.0-2ubuntu4~python arch=linux-ubuntu16.04-x86_64
^xz@5.2.3%clang@3.8.0-2ubuntu4 arch=linux-ubuntu16.04-x86_64
^zlib@1.2.8%gcc@5.4.0+optimize+pic+shared arch=linux-ubuntu16.04-x86_64
We have only expressed a preference for mpich over other MPI
implementations, and Spack will happily build with one we haven't
forbid it from using. We could resolve this by requesting
``hdf5%clang+mpi^mpich`` explicitly, or we can configure Spack not to
use any other MPI implementation. Since we're focused on
configurations here and the former can get tedious, we'll need to
modify our ``packages.yaml`` file again.
While we're at it, we can configure hdf5 to build with MPI by default
again.
.. code-block:: yaml
packages:
all:
compiler: [clang, gcc, intel, pgi, xl, nag]
providers:
mpi: [mpich, openmpi]
variants: ~shared
zlib:
paths:
zlib@1.2.8%gcc@5.4.0 arch=linux-ubuntu16.04-x86_64: /usr
buildable: False
mpich:
paths:
mpich@3.2%gcc@5.4.0 device=ch3 +hydra netmod=tcp +pmi+romio~verbs arch=linux-ubuntu16.04-x86_64: /usr
buildable: False
openmpi:
buildable: False
mvapich2:
buildable: False
intel-mpi:
buildable: False
spectrum-mpi:
buildable: False
intel-parallel-studio:
buildable: False
Now that we have configured Spack not to build any of the possible
providers for MPI we can try again.
.. code-block:: console
$ spack spec hdf5 %clang
Input spec
--------------------------------
hdf5%clang
Normalized
--------------------------------
hdf5%clang
^mpi
^zlib@1.1.2:
Concretized
--------------------------------
hdf5@1.10.1%clang@3.8.0-2ubuntu4+cxx~debug~fortran~hl+mpi+pic~shared~szip~threadsafe arch=linux-ubuntu16.04-x86_64
^mpich@3.2%gcc@5.4.0 device=ch3 +hydra netmod=tcp +pmi+romio~verbs arch=linux-ubuntu16.04-x86_64
^zlib@1.2.8%gcc@5.4.0+pic+shared arch=linux-ubuntu16.04-x86_64
By configuring most of our package preferences in ``packages.yaml``,
we can cut down on the amount of work we need to do when specifying
a spec on the command line. In addition to compiler and variant
preferences, we can specify version preferences as well. Anything
that you can specify on the command line can be specified in
``packages.yaml`` with the exact same spec syntax.
.. warning::
Make sure to delete or move the ``packages.yaml`` you have been
editing up to this point. Otherwise, it will change the hashes
of your packages, leading to differences in the output of later
tutorial sections.
-----------------
High-level Config
-----------------
In addition to compiler and package settings, Spack allows customization
of several high-level settings. These settings are stored in the generic
``config.yaml`` configuration file. You can see the default settings by
running:
.. code-block:: console
$ spack config --scope defaults edit config
.. literalinclude:: ../../../etc/spack/defaults/config.yaml
:language: yaml
As you can see, many of the directories Spack uses can be customized.
For example, you can tell Spack to install packages to a prefix
outside of the ``$SPACK_ROOT`` hierarchy. Module files can be
written to a central location if you are using multiple Spack
instances. If you have a fast scratch filesystem, you can run builds
from this filesystem with the following ``config.yaml``:
.. code-block:: yaml
config:
build_stage:
- /scratch/$user
On systems with compilers that absolutely *require* environment variables
like ``LD_LIBRARY_PATH``, it is possible to prevent Spack from cleaning
the build environment with the ``dirty`` setting:
.. code-block:: yaml
config:
dirty: true
However, this is strongly discouraged, as it can pull unwanted libraries
into the build.
One last setting that may be of interest to many users is the ability
to customize the parallelism of Spack builds. By default, Spack
installs all packages in parallel with the number of jobs equal to the
number of cores on the node. For example, on a node with 36 cores,
this will look like:
.. code-block:: console
$ spack install --verbose zlib
==> Installing zlib
==> Using cached archive: ~/spack/var/spack/cache/zlib/zlib-1.2.11.tar.gz
==> Staging archive: ~/spack/var/spack/stage/zlib-1.2.11-5nus6knzumx4ik2yl44jxtgtsl7d54xb/zlib-1.2.11.tar.gz
==> Created stage in ~/spack/var/spack/stage/zlib-1.2.11-5nus6knzumx4ik2yl44jxtgtsl7d54xb
==> No patches needed for zlib
==> Building zlib [Package]
==> Executing phase: 'install'
==> './configure' '--prefix=~/spack/opt/spack/linux-ubuntu16.04-x86_64/gcc-5.4.0/zlib-1.2.11-5nus6knzumx4ik2yl44jxtgtsl7d54xb'
Checking for shared library support...
Building shared library libz.so.1.2.11 with ~/spack/lib/spack/env/gcc/gcc.
Checking for size_t... Yes.
Checking for off64_t... Yes.
Checking for fseeko... Yes.
Checking for strerror... Yes.
Checking for unistd.h... Yes.
Checking for stdarg.h... Yes.
Checking whether to use vs[n]printf() or s[n]printf()... using vs[n]printf().
Checking for vsnprintf() in stdio.h... Yes.
Checking for return value of vsnprintf()... Yes.
Checking for attribute(visibility) support... Yes.
==> 'make' '-j36'
...
==> 'make' '-j36' 'install'
...
As you can see, we are building with all 36 cores on the node. If you are
on a shared login node, this can slow down the system for other users. If
you have a strict ulimit or restriction on the number of available licenses,
you may not be able to build at all with this many cores. On nodes with 64+
cores, you may not see a significant speedup of the build anyway. To limit
the number of cores our build uses, set ``build_jobs`` like so:
.. code-block:: yaml
config:
build_jobs: 4
If we uninstall and reinstall zlib, we see that it now uses only 4 cores:
.. code-block:: console
$ spack install -v zlib
==> Installing zlib
==> Using cached archive: ~/spack/var/spack/cache/zlib/zlib-1.2.11.tar.gz
==> Staging archive: ~/spack/var/spack/stage/zlib-1.2.11-ezuwp4pa52e75v6iweawzwymmf4ahxxn/zlib-1.2.11.tar.gz
==> Created stage in ~/spack/var/spack/stage/zlib-1.2.11-ezuwp4pa52e75v6iweawzwymmf4ahxxn
==> No patches needed for zlib
==> Building zlib [Package]
==> Executing phase: 'install'
==> './configure' '--prefix=~/spack/opt/spack/linux-ubuntu16.04-x86_64/gcc-7.2.0/zlib-1.2.11-ezuwp4pa52e75v6iweawzwymmf4ahxxn'
Checking for shared library support...
Building shared library libz.so.1.2.11 with ~/spack/lib/spack/env/gcc/gcc.
Checking for size_t... Yes.
Checking for off64_t... Yes.
Checking for fseeko... Yes.
Checking for strerror... Yes.
Checking for unistd.h... Yes.
Checking for stdarg.h... Yes.
Checking whether to use vs[n]printf() or s[n]printf()... using vs[n]printf().
Checking for vsnprintf() in stdio.h... Yes.
Checking for return value of vsnprintf()... Yes.
Checking for attribute(visibility) support... Yes.
==> 'make' '-j4'
...
==> 'make' '-j4' 'install'
...
Obviously, if you want to build everything in serial for whatever reason,
you would set ``build_jobs`` to 1.

File diff suppressed because it is too large Load Diff

View File

@@ -4,9 +4,9 @@
Tutorial: Spack 101 Tutorial: Spack 101
============================= =============================
This is a full day introduction to Spack with lectures and live demos. It This is a 3-hour introduction to Spack with lectures and live demos. It
was presented as a tutorial at `Supercomputing 2017 was presented as a tutorial at `Supercomputing 2016
<http://sc17.supercomputing.org>`_. You can use these materials to teach <http://sc16.supercomputing.org>`_. You can use these materials to teach
a course on Spack at your own site, or you can just skip ahead and read a course on Spack at your own site, or you can just skip ahead and read
the live demo scripts to see how Spack is used in practice. the live demo scripts to see how Spack is used in practice.
@@ -15,18 +15,19 @@ the live demo scripts to see how Spack is used in practice.
.. rubric:: Slides .. rubric:: Slides
.. figure:: tutorial/sc16-tutorial-slide-preview.png .. figure:: tutorial/sc16-tutorial-slide-preview.png
:target: http://spack.io/slides/Spack-SC17-Tutorial.pdf :target: http://llnl.github.io/spack/files/Spack-SC16-Tutorial.pdf
:height: 72px :height: 72px
:align: left :align: left
:alt: Slide Preview :alt: Slide Preview
`Download Slides <http://spack.io/slides/Spack-SC17-Tutorial.pdf>`_. `Download Slides <http://llnl.github.io/spack/files/Spack-SC16-Tutorial.pdf>`_.
**Full citation:** Todd Gamblin, Massimiliano Culpo, Gregory Becker, Matt **Full citation:** Todd Gamblin, Massimiliano Culpo, Gregory Becker, Matt
Legendre, Greg Lee, Elizabeth Fischer, and Benedikt Hegner. Legendre, Greg Lee, Elizabeth Fischer, and Benedikt Hegner.
`Managing HPC Software Complexity with Spack `Managing HPC Software Complexity with Spack
<http://sc17.supercomputing.org/presentation/?id=tut151&sess=sess233>`_. <http://sc16.supercomputing.org/presentation/?id=tut166&sess=sess209>`_.
Tutorial presented at Supercomputing 2017. November 13, 2017, Denver, CO, USA. Tutorial presented at Supercomputing 2016. November 13, 2016, Salt Lake
City, UT, USA.
.. _sc16-live-demos: .. _sc16-live-demos:
@@ -36,18 +37,12 @@ These scripts will take you step-by-step through basic Spack tasks. They
correspond to sections in the slides above. correspond to sections in the slides above.
1. :ref:`basics-tutorial` 1. :ref:`basics-tutorial`
2. :ref:`configs-tutorial` 2. :ref:`packaging-tutorial`
3. :ref:`packaging-tutorial` 3. :ref:`modules-tutorial`
4. :ref:`build-systems-tutorial`
5. :ref:`advanced-packaging-tutorial`
6. :ref:`modules-tutorial`
Full contents: Full contents:
.. toctree:: .. toctree::
tutorial_basics tutorial_sc16_spack_basics
tutorial_configuration tutorial_sc16_packaging
tutorial_packaging tutorial_sc16_modules
tutorial_buildsystems
tutorial_advanced_packaging
tutorial_modules

View File

@@ -0,0 +1,982 @@
.. _modules-tutorial:
=============================
Module Configuration Tutorial
=============================
This tutorial will guide you through the customization of both
content and naming of module files generated by Spack.
Starting from the default Spack settings you will add an increasing
number of directives to the ``modules.yaml`` configuration file to
satisfy a number of constraints that mimic those that you may encounter
in a typical production environment at HPC sites.
Even though the focus will be for the most part on customizing
TCL non-hierarchical module files, everything
you'll see applies also to other kinds of module files generated by Spack.
The generation of Lua hierarchical
module files will be addressed at the end of the tutorial,
and you'll see that with minor modifications
to an existing ``modules.yaml`` written for TCL
non-hierarchical modules you'll get almost
for free the possibility to try a hierarchical layout.
Let's start!
.. _module_file_tutorial_prerequisites:
-------------
Prerequisites
-------------
Before proceeding further ensure:
- you have LMod or Environment Modules available
- have :ref:`shell support <shell-support>` activated in Spack
If you need to install Lmod or Environment module you can refer
to the documentation :ref:`here <InstallEnvironmentModules>`.
^^^^^^^^^^^^^^^^^^
Add a new compiler
^^^^^^^^^^^^^^^^^^
Spack automatically scans the environment to search for available
compilers on first use. On a Ubuntu 14.04 a fresh clone will show
something like this:
.. code-block:: console
$ uname -a
Linux nuvolari 4.4.0-45-generic #66~14.04.1-Ubuntu SMP Wed Oct 19 15:05:38 UTC 2016 x86_64 x86_64 x86_64 GNU/Linux
$ spack compilers
==> Available compilers
-- gcc ----------------------------------------------------------
gcc@4.8
For the purpose of building a limited set of packages with some features
that will help showcasing the capabilities of
module customization the first thing we need is to build a new compiler:
.. code-block:: console
$ spack install gcc@6.2.0
# ...
# Wait a long time
# ...
Then we can use shell support for modules to add it to the list of known compilers:
.. code-block:: console
# The name of the generated module may vary
$ module load gcc-6.2.0-gcc-4.8-twd5nqg
$ spack compiler add
==> Added 1 new compiler to ~/.spack/linux/compilers.yaml
gcc@6.2.0
$ spack compilers
==> Available compilers
-- gcc ----------------------------------------------------------
gcc@6.2.0 gcc@4.8
Note that the final 7 digits hash at the end of the generated module may vary depending
on architecture or package version.
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Build software that will be used in the tutorial
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Next you should install a few modules that will be used in the tutorial:
.. code-block:: console
$ spack install netlib-scalapack ^openmpi ^openblas
# ...
The packages you need to install are:
- ``netlib-scalapack ^openmpi ^openblas``
- ``netlib-scalapack ^mpich ^openblas``
- ``netlib-scalapack ^openmpi ^netlib-lapack``
- ``netlib-scalapack ^mpich ^netlib-lapack``
- ``py-scipy ^openblas``
In the end your environment should look something like:
.. code-block:: console
$ module avail
------------------------------------------------------------------------ ~/spack/share/spack/modules/linux-Ubuntu14-x86_64 ------------------------------------------------------------------------
binutils-2.27-gcc-4.8-dz3xevw libpciaccess-0.13.4-gcc-6.2.0-eo2siet lzo-2.09-gcc-6.2.0-jcngz72 netlib-scalapack-2.0.2-gcc-6.2.0-wnimqhw python-2.7.12-gcc-6.2.0-qu7rc5p
bzip2-1.0.6-gcc-6.2.0-csoc2mq libsigsegv-2.10-gcc-4.8-avb6azw m4-1.4.17-gcc-4.8-iggewke netlib-scalapack-2.0.2-gcc-6.2.0-wojunhq sqlite-3.8.5-gcc-6.2.0-td3zfe7
cmake-3.5.2-gcc-6.2.0-6poypqg libsigsegv-2.10-gcc-6.2.0-g3qpmbi m4-1.4.17-gcc-6.2.0-lhgqa6s nettle-3.2-gcc-6.2.0-djdthlh tcl-8.6.5-gcc-4.8-atddxu7
curl-7.50.3-gcc-6.2.0-2ffacqm libtool-2.4.6-gcc-6.2.0-kiepac6 mpc-1.0.3-gcc-4.8-lylv7lk openblas-0.2.19-gcc-6.2.0-js33umc util-macros-1.19.0-gcc-6.2.0-uoukuqk
expat-2.2.0-gcc-6.2.0-bxqnjar libxml2-2.9.4-gcc-6.2.0-3k4ykbe mpfr-3.1.4-gcc-4.8-bldfx3w openmpi-2.0.1-gcc-6.2.0-s3qbtby xz-5.2.2-gcc-6.2.0-t5lk6in
gcc-6.2.0-gcc-4.8-twd5nqg lmod-6.4.5-gcc-4.8-7v7bh7b mpich-3.2-gcc-6.2.0-5n5xoep openssl-1.0.2j-gcc-6.2.0-hibnfda zlib-1.2.8-gcc-4.8-bds4ies
gmp-6.1.1-gcc-4.8-uq52e2n lua-5.3.2-gcc-4.8-xozf2hx ncurses-6.0-gcc-4.8-u62fit4 pkg-config-0.29.1-gcc-6.2.0-rslsgcs zlib-1.2.8-gcc-6.2.0-asydrba
gmp-6.1.1-gcc-6.2.0-3cfh3hi lua-luafilesystem-1_6_3-gcc-4.8-sbzejlz ncurses-6.0-gcc-6.2.0-7tb426s py-nose-1.3.7-gcc-6.2.0-4gl5c42
hwloc-1.11.4-gcc-6.2.0-3ostwel lua-luaposix-33.4.0-gcc-4.8-xf7y2p5 netlib-lapack-3.6.1-gcc-6.2.0-mirer2l py-numpy-1.11.1-gcc-6.2.0-i3rpk4e
isl-0.14-gcc-4.8-cq73t5m lz4-131-gcc-6.2.0-cagoem4 netlib-scalapack-2.0.2-gcc-6.2.0-6bqlxqy py-scipy-0.18.1-gcc-6.2.0-e6uljfi
libarchive-3.2.1-gcc-6.2.0-2b54aos lzma-4.32.7-gcc-6.2.0-sfmeynw netlib-scalapack-2.0.2-gcc-6.2.0-hpqb3dp py-setuptools-25.2.0-gcc-6.2.0-hkqauaa
------------------------------------------------
Filter unwanted modifications to the environment
------------------------------------------------
The non-hierarchical TCL module files that have been generated so far
follow the default rules for module generation, which are given
:ref:`here <modules-yaml>` in the reference part of the manual. Taking a
look at the ``gcc`` module you'll see something like:
.. code-block:: console
$ module show gcc-6.2.0-gcc-4.8-twd5nqg
---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
~/spack/share/spack/modules/linux-Ubuntu14-x86_64/gcc-6.2.0-gcc-4.8-twd5nqg:
---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
whatis("gcc @6.2.0 ")
prepend_path("PATH","~/spack/opt/spack/linux-Ubuntu14-x86_64/gcc-4.8/gcc-6.2.0-twd5nqg33hrrssqclcfi5k42eccwxz5u/bin")
prepend_path("CMAKE_PREFIX_PATH","~/spack/opt/spack/linux-Ubuntu14-x86_64/gcc-4.8/gcc-6.2.0-twd5nqg33hrrssqclcfi5k42eccwxz5u/")
prepend_path("MANPATH","~/spack/opt/spack/linux-Ubuntu14-x86_64/gcc-4.8/gcc-6.2.0-twd5nqg33hrrssqclcfi5k42eccwxz5u/share/man")
prepend_path("PKG_CONFIG_PATH","~/spack/opt/spack/linux-Ubuntu14-x86_64/gcc-4.8/gcc-6.2.0-twd5nqg33hrrssqclcfi5k42eccwxz5u/lib64/pkgconfig")
prepend_path("LIBRARY_PATH","~/spack/opt/spack/linux-Ubuntu14-x86_64/gcc-4.8/gcc-6.2.0-twd5nqg33hrrssqclcfi5k42eccwxz5u/lib64")
prepend_path("LD_LIBRARY_PATH","~/spack/opt/spack/linux-Ubuntu14-x86_64/gcc-4.8/gcc-6.2.0-twd5nqg33hrrssqclcfi5k42eccwxz5u/lib64")
prepend_path("CPATH","~/spack/opt/spack/linux-Ubuntu14-x86_64/gcc-4.8/gcc-6.2.0-twd5nqg33hrrssqclcfi5k42eccwxz5u/include")
help([[The GNU Compiler Collection includes front ends for C, C++, Objective-C,
Fortran, and Java.
]])
As expected, a few environment variables representing paths will be modified
by the modules according to the default prefix inspection rules.
Consider now the case that your site has decided that e.g. ``CPATH`` and
``LIBRARY_PATH`` modifications should not be present in module files. What you can
do to abide by the rules is to create a configuration file ``~/.spack/modules.yaml``
with the following content:
.. code-block:: yaml
modules:
tcl:
all:
filter:
environment_blacklist: ['CPATH', 'LIBRARY_PATH']
Next you should regenerate all the module files:
.. code-block:: console
$ spack module refresh --module-type tcl
==> You are about to regenerate tcl module files for:
-- linux-Ubuntu14-x86_64 / gcc@4.8 ------------------------------
dz3xevw binutils@2.27 uq52e2n gmp@6.1.1 avb6azw libsigsegv@2.10 xozf2hx lua@5.3.2 xf7y2p5 lua-luaposix@33.4.0 lylv7lk mpc@1.0.3 u62fit4 ncurses@6.0 bds4ies zlib@1.2.8
twd5nqg gcc@6.2.0 cq73t5m isl@0.14 7v7bh7b lmod@6.4.5 sbzejlz lua-luafilesystem@1_6_3 iggewke m4@1.4.17 bldfx3w mpfr@3.1.4 atddxu7 tcl@8.6.5
...
==> Do you want to proceed ? [y/n]
y
==> Regenerating tcl module files
If you take a look now at the module for ``gcc`` you'll see that the unwanted
paths have disappeared:
.. code-block:: console
$ module show gcc-6.2.0-gcc-4.8-twd5nqg
---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
~/spack/share/spack/modules/linux-Ubuntu14-x86_64/gcc-6.2.0-gcc-4.8-twd5nqg:
---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
whatis("gcc @6.2.0 ")
prepend_path("PATH","~/spack/opt/spack/linux-Ubuntu14-x86_64/gcc-4.8/gcc-6.2.0-twd5nqg33hrrssqclcfi5k42eccwxz5u/bin")
prepend_path("CMAKE_PREFIX_PATH","~/spack/opt/spack/linux-Ubuntu14-x86_64/gcc-4.8/gcc-6.2.0-twd5nqg33hrrssqclcfi5k42eccwxz5u/")
prepend_path("MANPATH","~/spack/opt/spack/linux-Ubuntu14-x86_64/gcc-4.8/gcc-6.2.0-twd5nqg33hrrssqclcfi5k42eccwxz5u/share/man")
prepend_path("PKG_CONFIG_PATH","~/spack/opt/spack/linux-Ubuntu14-x86_64/gcc-4.8/gcc-6.2.0-twd5nqg33hrrssqclcfi5k42eccwxz5u/lib64/pkgconfig")
prepend_path("LD_LIBRARY_PATH","~/spack/opt/spack/linux-Ubuntu14-x86_64/gcc-4.8/gcc-6.2.0-twd5nqg33hrrssqclcfi5k42eccwxz5u/lib64")
help([[The GNU Compiler Collection includes front ends for C, C++, Objective-C,
Fortran, and Java.
]])
----------------------------------------------
Prevent some module files from being generated
----------------------------------------------
Another common request at many sites is to avoid exposing software that
is only needed as an intermediate step when building a newer stack.
Let's try to prevent the generation of
module files for anything that is compiled with ``gcc@4.8`` (the OS provided compiler).
To do this you should add a ``blacklist`` keyword to the configuration file:
.. code-block:: yaml
:emphasize-lines: 3,4
modules:
tcl:
blacklist:
- '%gcc@4.8'
all:
filter:
environment_blacklist: ['CPATH', 'LIBRARY_PATH']
and regenerate the module files:
.. code-block:: console
$ spack module refresh --module-type tcl --delete-tree
==> You are about to regenerate tcl module files for:
-- linux-Ubuntu14-x86_64 / gcc@4.8 ------------------------------
dz3xevw binutils@2.27 uq52e2n gmp@6.1.1 avb6azw libsigsegv@2.10 xozf2hx lua@5.3.2 xf7y2p5 lua-luaposix@33.4.0 lylv7lk mpc@1.0.3 u62fit4 ncurses@6.0 bds4ies zlib@1.2.8
twd5nqg gcc@6.2.0 cq73t5m isl@0.14 7v7bh7b lmod@6.4.5 sbzejlz lua-luafilesystem@1_6_3 iggewke m4@1.4.17 bldfx3w mpfr@3.1.4 atddxu7 tcl@8.6.5
-- linux-Ubuntu14-x86_64 / gcc@6.2.0 ----------------------------
csoc2mq bzip2@1.0.6 2b54aos libarchive@3.2.1 sfmeynw lzma@4.32.7 wnimqhw netlib-scalapack@2.0.2 s3qbtby openmpi@2.0.1 hkqauaa py-setuptools@25.2.0
6poypqg cmake@3.5.2 eo2siet libpciaccess@0.13.4 jcngz72 lzo@2.09 6bqlxqy netlib-scalapack@2.0.2 hibnfda openssl@1.0.2j qu7rc5p python@2.7.12
2ffacqm curl@7.50.3 g3qpmbi libsigsegv@2.10 lhgqa6s m4@1.4.17 wojunhq netlib-scalapack@2.0.2 rslsgcs pkg-config@0.29.1 td3zfe7 sqlite@3.8.5
bxqnjar expat@2.2.0 kiepac6 libtool@2.4.6 5n5xoep mpich@3.2 hpqb3dp netlib-scalapack@2.0.2 4gl5c42 py-nose@1.3.7 uoukuqk util-macros@1.19.0
3cfh3hi gmp@6.1.1 3k4ykbe libxml2@2.9.4 7tb426s ncurses@6.0 djdthlh nettle@3.2 i3rpk4e py-numpy@1.11.1 t5lk6in xz@5.2.2
3ostwel hwloc@1.11.4 cagoem4 lz4@131 mirer2l netlib-lapack@3.6.1 js33umc openblas@0.2.19 e6uljfi py-scipy@0.18.1 asydrba zlib@1.2.8
==> Do you want to proceed ? [y/n]
y
$ module avail
------------------------------------------------------------------------ ~/spack/share/spack/modules/linux-Ubuntu14-x86_64 ------------------------------------------------------------------------
bzip2-1.0.6-gcc-6.2.0-csoc2mq libsigsegv-2.10-gcc-6.2.0-g3qpmbi ncurses-6.0-gcc-6.2.0-7tb426s openmpi-2.0.1-gcc-6.2.0-s3qbtby sqlite-3.8.5-gcc-6.2.0-td3zfe7
cmake-3.5.2-gcc-6.2.0-6poypqg libtool-2.4.6-gcc-6.2.0-kiepac6 netlib-lapack-3.6.1-gcc-6.2.0-mirer2l openssl-1.0.2j-gcc-6.2.0-hibnfda util-macros-1.19.0-gcc-6.2.0-uoukuqk
curl-7.50.3-gcc-6.2.0-2ffacqm libxml2-2.9.4-gcc-6.2.0-3k4ykbe netlib-scalapack-2.0.2-gcc-6.2.0-6bqlxqy pkg-config-0.29.1-gcc-6.2.0-rslsgcs xz-5.2.2-gcc-6.2.0-t5lk6in
expat-2.2.0-gcc-6.2.0-bxqnjar lz4-131-gcc-6.2.0-cagoem4 netlib-scalapack-2.0.2-gcc-6.2.0-hpqb3dp py-nose-1.3.7-gcc-6.2.0-4gl5c42 zlib-1.2.8-gcc-6.2.0-asydrba
gmp-6.1.1-gcc-6.2.0-3cfh3hi lzma-4.32.7-gcc-6.2.0-sfmeynw netlib-scalapack-2.0.2-gcc-6.2.0-wnimqhw py-numpy-1.11.1-gcc-6.2.0-i3rpk4e
hwloc-1.11.4-gcc-6.2.0-3ostwel lzo-2.09-gcc-6.2.0-jcngz72 netlib-scalapack-2.0.2-gcc-6.2.0-wojunhq py-scipy-0.18.1-gcc-6.2.0-e6uljfi
libarchive-3.2.1-gcc-6.2.0-2b54aos m4-1.4.17-gcc-6.2.0-lhgqa6s nettle-3.2-gcc-6.2.0-djdthlh py-setuptools-25.2.0-gcc-6.2.0-hkqauaa
libpciaccess-0.13.4-gcc-6.2.0-eo2siet mpich-3.2-gcc-6.2.0-5n5xoep openblas-0.2.19-gcc-6.2.0-js33umc python-2.7.12-gcc-6.2.0-qu7rc5p
This time it is convenient to pass the option ``--delete-tree`` to the command that
regenerates the module files to instruct it to delete the existing tree and regenerate
a new one instead of overwriting the files in the existing directory.
If you pay careful attention you'll see though that we went too far in blacklisting modules:
the module for ``gcc@6.2.0`` disappeared as it was bootstrapped with ``gcc@4.8``. To specify
exceptions to the blacklist rules you can use ``whitelist``:
.. code-block:: yaml
:emphasize-lines: 3,4
modules:
tcl:
whitelist:
- gcc
blacklist:
- '%gcc@4.8'
all:
filter:
environment_blacklist: ['CPATH', 'LIBRARY_PATH']
``whitelist`` rules always have precedence over ``blacklist`` rules. If you regenerate the modules again:
.. code-block:: console
$ spack module refresh --module-type tcl -y
you'll see that now the module for ``gcc@6.2.0`` has reappeared:
.. code-block:: console
$ module avail gcc-6.2.0-gcc-4.8-twd5nqg
------------------------------------------------------------------------ ~/spack/share/spack/modules/linux-Ubuntu14-x86_64 ------------------------------------------------------------------------
gcc-6.2.0-gcc-4.8-twd5nqg
-------------------------
Change module file naming
-------------------------
The next step in making module files more user-friendly is to
improve their naming scheme.
To reduce the length of the hash or remove it altogether you can
use the ``hash_length`` keyword in the configuration file:
.. TODO: give reasons to remove hashes if they are not evident enough?
.. code-block:: yaml
:emphasize-lines: 3
modules:
tcl:
hash_length: 0
whitelist:
- gcc
blacklist:
- '%gcc@4.8'
all:
filter:
environment_blacklist: ['CPATH', 'LIBRARY_PATH']
If you try to regenerate the module files now you will get an error:
.. code-block:: console
$ spack module refresh --module-type tcl --delete-tree -y
==> Error: Name clashes detected in module files:
file : ~/spack/share/spack/modules/linux-Ubuntu14-x86_64/netlib-scalapack-2.0.2-gcc-6.2.0
spec : netlib-scalapack@2.0.2%gcc@6.2.0~fpic+shared arch=linux-Ubuntu14-x86_64
spec : netlib-scalapack@2.0.2%gcc@6.2.0~fpic+shared arch=linux-Ubuntu14-x86_64
spec : netlib-scalapack@2.0.2%gcc@6.2.0~fpic+shared arch=linux-Ubuntu14-x86_64
spec : netlib-scalapack@2.0.2%gcc@6.2.0~fpic+shared arch=linux-Ubuntu14-x86_64
==> Error: Operation aborted
.. note::
We try to check for errors upfront!
In Spack we check for errors upfront whenever possible, so don't worry about your module files:
as a name clash was detected nothing has been changed on disk.
The problem here is that without
the hashes the four different flavors of ``netlib-scalapack`` map to the same module file
name. We have the possibility to add suffixes to differentiate them:
.. code-block:: yaml
:emphasize-lines: 9-11,14-17
modules:
tcl:
hash_length: 0
whitelist:
- gcc
blacklist:
- '%gcc@4.8'
all:
suffixes:
'^openblas': openblas
'^netlib-lapack': netlib
filter:
environment_blacklist: ['CPATH', 'LIBRARY_PATH']
netlib-scalapack:
suffixes:
'^openmpi': openmpi
'^mpich': mpich
As you can see it is possible to specify rules that applies only to a
restricted set of packages using :ref:`anonymous specs <anonymous_specs>`.
Regenerating module files now we obtain:
.. code-block:: console
$ spack module refresh --module-type tcl --delete-tree -y
==> Regenerating tcl module files
$ module avail
------------------------------------------------------------------------ ~/spack/share/spack/modules/linux-Ubuntu14-x86_64 ------------------------------------------------------------------------
bzip2-1.0.6-gcc-6.2.0 libpciaccess-0.13.4-gcc-6.2.0 mpich-3.2-gcc-6.2.0 openblas-0.2.19-gcc-6.2.0 python-2.7.12-gcc-6.2.0
cmake-3.5.2-gcc-6.2.0 libsigsegv-2.10-gcc-6.2.0 ncurses-6.0-gcc-6.2.0 openmpi-2.0.1-gcc-6.2.0 sqlite-3.8.5-gcc-6.2.0
curl-7.50.3-gcc-6.2.0 libtool-2.4.6-gcc-6.2.0 netlib-lapack-3.6.1-gcc-6.2.0 openssl-1.0.2j-gcc-6.2.0 util-macros-1.19.0-gcc-6.2.0
expat-2.2.0-gcc-6.2.0 libxml2-2.9.4-gcc-6.2.0 netlib-scalapack-2.0.2-gcc-6.2.0-netlib-mpich pkg-config-0.29.1-gcc-6.2.0 xz-5.2.2-gcc-6.2.0
gcc-6.2.0-gcc-4.8 lz4-131-gcc-6.2.0 netlib-scalapack-2.0.2-gcc-6.2.0-netlib-openmpi py-nose-1.3.7-gcc-6.2.0 zlib-1.2.8-gcc-6.2.0
gmp-6.1.1-gcc-6.2.0 lzma-4.32.7-gcc-6.2.0 netlib-scalapack-2.0.2-gcc-6.2.0-openblas-mpich py-numpy-1.11.1-gcc-6.2.0-openblas
hwloc-1.11.4-gcc-6.2.0 lzo-2.09-gcc-6.2.0 netlib-scalapack-2.0.2-gcc-6.2.0-openblas-openmpi py-scipy-0.18.1-gcc-6.2.0-openblas
libarchive-3.2.1-gcc-6.2.0 m4-1.4.17-gcc-6.2.0 nettle-3.2-gcc-6.2.0 py-setuptools-25.2.0-gcc-6.2.0
Finally we can set a ``naming_scheme`` to prevent users from loading
modules that refer to different flavors of the same library/application:
.. code-block:: yaml
:emphasize-lines: 4,10,11
modules:
tcl:
hash_length: 0
naming_scheme: '${PACKAGE}/${VERSION}-${COMPILERNAME}-${COMPILERVER}'
whitelist:
- gcc
blacklist:
- '%gcc@4.8'
all:
conflict:
- '${PACKAGE}'
suffixes:
'^openblas': openblas
'^netlib-lapack': netlib
filter:
environment_blacklist: ['CPATH', 'LIBRARY_PATH']
netlib-scalapack:
suffixes:
'^openmpi': openmpi
'^mpich': mpich
The final result should look like:
.. code-block:: console
$ module avail
------------------------------------------------------------------------ ~/spack/share/spack/modules/linux-Ubuntu14-x86_64 ------------------------------------------------------------------------
bzip2/1.0.6-gcc-6.2.0 libpciaccess/0.13.4-gcc-6.2.0 mpich/3.2-gcc-6.2.0 openblas/0.2.19-gcc-6.2.0 python/2.7.12-gcc-6.2.0
cmake/3.5.2-gcc-6.2.0 libsigsegv/2.10-gcc-6.2.0 ncurses/6.0-gcc-6.2.0 openmpi/2.0.1-gcc-6.2.0 sqlite/3.8.5-gcc-6.2.0
curl/7.50.3-gcc-6.2.0 libtool/2.4.6-gcc-6.2.0 netlib-lapack/3.6.1-gcc-6.2.0 openssl/1.0.2j-gcc-6.2.0 util-macros/1.19.0-gcc-6.2.0
expat/2.2.0-gcc-6.2.0 libxml2/2.9.4-gcc-6.2.0 netlib-scalapack/2.0.2-gcc-6.2.0-netlib-mpich pkg-config/0.29.1-gcc-6.2.0 xz/5.2.2-gcc-6.2.0
gcc/6.2.0-gcc-4.8 lz4/131-gcc-6.2.0 netlib-scalapack/2.0.2-gcc-6.2.0-netlib-openmpi py-nose/1.3.7-gcc-6.2.0 zlib/1.2.8-gcc-6.2.0
gmp/6.1.1-gcc-6.2.0 lzma/4.32.7-gcc-6.2.0 netlib-scalapack/2.0.2-gcc-6.2.0-openblas-mpich py-numpy/1.11.1-gcc-6.2.0-openblas
hwloc/1.11.4-gcc-6.2.0 lzo/2.09-gcc-6.2.0 netlib-scalapack/2.0.2-gcc-6.2.0-openblas-openmpi (D) py-scipy/0.18.1-gcc-6.2.0-openblas
libarchive/3.2.1-gcc-6.2.0 m4/1.4.17-gcc-6.2.0 nettle/3.2-gcc-6.2.0 py-setuptools/25.2.0-gcc-6.2.0
.. note::
TCL specific directive
The directives ``naming_scheme`` and ``conflict`` are TCL specific and do not apply
to the ``dotkit`` or ``lmod`` sections in the configuration file.
------------------------------------
Add custom environment modifications
------------------------------------
At many sites it is customary to set an environment variable in a
package's module file that points to the folder in which the package
is installed. You can achieve this with Spack by adding an
``environment`` directive to the configuration file:
.. code-block:: yaml
:emphasize-lines: 17-19
modules:
tcl:
hash_length: 0
naming_scheme: '${PACKAGE}/${VERSION}-${COMPILERNAME}-${COMPILERVER}'
whitelist:
- gcc
blacklist:
- '%gcc@4.8'
all:
conflict:
- '${PACKAGE}'
suffixes:
'^openblas': openblas
'^netlib-lapack': netlib
filter:
environment_blacklist: ['CPATH', 'LIBRARY_PATH']
environment:
set:
'${PACKAGE}_ROOT': '${PREFIX}'
netlib-scalapack:
suffixes:
'^openmpi': openmpi
'^mpich': mpich
There are many variable tokens available to use in the ``environment``
and ``naming_scheme`` directives, such as ``${PACKAGE}``,
``${VERSION}``, etc. (see the :meth:`~spack.spec.Spec.format` API
documentation for the complete list).
Regenerating the module files should result in something like:
.. code-block:: console
:emphasize-lines: 14
$ spack module refresh -y --module-type tcl
==> Regenerating tcl module files
$ module show gcc
---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
~/spack/share/spack/modules/linux-Ubuntu14-x86_64/gcc/6.2.0-gcc-4.8:
---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
whatis("gcc @6.2.0 ")
prepend_path("PATH","~/spack/opt/spack/linux-Ubuntu14-x86_64/gcc-4.8/gcc-6.2.0-twd5nqg33hrrssqclcfi5k42eccwxz5u/bin")
prepend_path("CMAKE_PREFIX_PATH","~/spack/opt/spack/linux-Ubuntu14-x86_64/gcc-4.8/gcc-6.2.0-twd5nqg33hrrssqclcfi5k42eccwxz5u/")
prepend_path("MANPATH","~/spack/opt/spack/linux-Ubuntu14-x86_64/gcc-4.8/gcc-6.2.0-twd5nqg33hrrssqclcfi5k42eccwxz5u/share/man")
prepend_path("PKG_CONFIG_PATH","~/spack/opt/spack/linux-Ubuntu14-x86_64/gcc-4.8/gcc-6.2.0-twd5nqg33hrrssqclcfi5k42eccwxz5u/lib64/pkgconfig")
prepend_path("LD_LIBRARY_PATH","~/spack/opt/spack/linux-Ubuntu14-x86_64/gcc-4.8/gcc-6.2.0-twd5nqg33hrrssqclcfi5k42eccwxz5u/lib64")
setenv("GCC_ROOT","~/spack/opt/spack/linux-Ubuntu14-x86_64/gcc-4.8/gcc-6.2.0-twd5nqg33hrrssqclcfi5k42eccwxz5u")
conflict("gcc")
help([[The GNU Compiler Collection includes front ends for C, C++, Objective-C,
Fortran, and Java.
]])
As you see the ``gcc`` module has the environment variable ``GCC_ROOT`` set.
Sometimes it's also useful to apply environment modifications selectively and target
only certain packages. You can, for instance set the common variables ``CC``, ``CXX``,
etc. in the ``gcc`` module file and apply other custom modifications to the
``openmpi`` modules as follows:
.. code-block:: yaml
:emphasize-lines: 20-32
modules:
tcl:
hash_length: 0
naming_scheme: '${PACKAGE}/${VERSION}-${COMPILERNAME}-${COMPILERVER}'
whitelist:
- gcc
blacklist:
- '%gcc@4.8'
all:
conflict:
- '${PACKAGE}'
suffixes:
'^openblas': openblas
'^netlib-lapack': netlib
filter:
environment_blacklist: ['CPATH', 'LIBRARY_PATH']
environment:
set:
'${PACKAGE}_ROOT': '${PREFIX}'
gcc:
environment:
set:
CC: gcc
CXX: g++
FC: gfortran
F90: gfortran
F77: gfortran
openmpi:
environment:
set:
SLURM_MPI_TYPE: pmi2
OMPI_MCA_btl_openib_warn_default_gid_prefix: '0'
netlib-scalapack:
suffixes:
'^openmpi': openmpi
'^mpich': mpich
This time we will be more selective and regenerate only the ``gcc`` and
``openmpi`` module files:
.. code-block:: console
$ spack module refresh -y --module-type tcl gcc
==> Regenerating tcl module files
$ spack module refresh -y --module-type tcl openmpi
==> Regenerating tcl module files
$ module show gcc
---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
~/spack/share/spack/modules/linux-Ubuntu14-x86_64/gcc/6.2.0-gcc-4.8:
---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
whatis("gcc @6.2.0 ")
prepend_path("PATH","~/spack/opt/spack/linux-Ubuntu14-x86_64/gcc-4.8/gcc-6.2.0-twd5nqg33hrrssqclcfi5k42eccwxz5u/bin")
prepend_path("CMAKE_PREFIX_PATH","~/spack/opt/spack/linux-Ubuntu14-x86_64/gcc-4.8/gcc-6.2.0-twd5nqg33hrrssqclcfi5k42eccwxz5u/")
prepend_path("MANPATH","~/spack/opt/spack/linux-Ubuntu14-x86_64/gcc-4.8/gcc-6.2.0-twd5nqg33hrrssqclcfi5k42eccwxz5u/share/man")
prepend_path("PKG_CONFIG_PATH","~/spack/opt/spack/linux-Ubuntu14-x86_64/gcc-4.8/gcc-6.2.0-twd5nqg33hrrssqclcfi5k42eccwxz5u/lib64/pkgconfig")
prepend_path("LD_LIBRARY_PATH","~/spack/opt/spack/linux-Ubuntu14-x86_64/gcc-4.8/gcc-6.2.0-twd5nqg33hrrssqclcfi5k42eccwxz5u/lib64")
setenv("GCC_ROOT","~/spack/opt/spack/linux-Ubuntu14-x86_64/gcc-4.8/gcc-6.2.0-twd5nqg33hrrssqclcfi5k42eccwxz5u")
setenv("CC","gcc")
setenv("CXX","g++")
setenv("F90","gfortran")
setenv("FC","gfortran")
setenv("F77","gfortran")
conflict("gcc")
help([[The GNU Compiler Collection includes front ends for C, C++, Objective-C,
Fortran, and Java.
]])
$ module show openmpi
---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
~/spack/share/spack/modules/linux-Ubuntu14-x86_64/openmpi/2.0.1-gcc-6.2.0:
---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
whatis("openmpi @2.0.1 ")
prepend_path("PATH","~/spack/opt/spack/linux-Ubuntu14-x86_64/gcc-6.2.0/openmpi-2.0.1-s3qbtbyh3y5y4gkchmhcuak7th44l53w/bin")
prepend_path("CMAKE_PREFIX_PATH","~/spack/opt/spack/linux-Ubuntu14-x86_64/gcc-6.2.0/openmpi-2.0.1-s3qbtbyh3y5y4gkchmhcuak7th44l53w/")
prepend_path("LD_LIBRARY_PATH","~/spack/opt/spack/linux-Ubuntu14-x86_64/gcc-6.2.0/openmpi-2.0.1-s3qbtbyh3y5y4gkchmhcuak7th44l53w/lib")
prepend_path("PKG_CONFIG_PATH","~/spack/opt/spack/linux-Ubuntu14-x86_64/gcc-6.2.0/openmpi-2.0.1-s3qbtbyh3y5y4gkchmhcuak7th44l53w/lib/pkgconfig")
prepend_path("MANPATH","~/spack/opt/spack/linux-Ubuntu14-x86_64/gcc-6.2.0/openmpi-2.0.1-s3qbtbyh3y5y4gkchmhcuak7th44l53w/share/man")
setenv("SLURM_MPI_TYPE","pmi2")
setenv("OMPI_MCA_BTL_OPENIB_WARN_DEFAULT_GID_PREFIX","0")
setenv("OPENMPI_ROOT","~/spack/opt/spack/linux-Ubuntu14-x86_64/gcc-6.2.0/openmpi-2.0.1-s3qbtbyh3y5y4gkchmhcuak7th44l53w")
conflict("openmpi")
help([[The Open MPI Project is an open source Message Passing Interface
implementation that is developed and maintained by a consortium of
academic, research, and industry partners. Open MPI is therefore able to
combine the expertise, technologies, and resources from all across the
High Performance Computing community in order to build the best MPI
library available. Open MPI offers advantages for system and software
vendors, application developers and computer science researchers.
]])
---------------------
Autoload dependencies
---------------------
Spack can also generate module files that contain code to load the
dependencies automatically. You can, for instance generate python
modules that load their dependencies by adding the ``autoload``
directive and assigning it the value ``direct``:
.. code-block:: yaml
:emphasize-lines: 37,38
modules:
tcl:
hash_length: 0
naming_scheme: '${PACKAGE}/${VERSION}-${COMPILERNAME}-${COMPILERVER}'
whitelist:
- gcc
blacklist:
- '%gcc@4.8'
all:
conflict:
- '${PACKAGE}'
suffixes:
'^openblas': openblas
'^netlib-lapack': netlib
filter:
environment_blacklist: ['CPATH', 'LIBRARY_PATH']
environment:
set:
'${PACKAGE}_ROOT': '${PREFIX}'
gcc:
environment:
set:
CC: gcc
CXX: g++
FC: gfortran
F90: gfortran
F77: gfortran
openmpi:
environment:
set:
SLURM_MPI_TYPE: pmi2
OMPI_MCA_btl_openib_warn_default_gid_prefix: '0'
netlib-scalapack:
suffixes:
'^openmpi': openmpi
'^mpich': mpich
^python:
autoload: 'direct'
and regenerating the module files for every package that depends on ``python``:
.. code-block:: console
$ spack module refresh -y --module-type tcl ^python
==> Regenerating tcl module files
Now the ``py-scipy`` module will be:
.. code-block:: tcl
#%Module1.0
## Module file created by spack (https://github.com/LLNL/spack) on 2016-11-02 20:53:21.283547
##
## py-scipy@0.18.1%gcc@6.2.0 arch=linux-Ubuntu14-x86_64-e6uljfi
##
module-whatis "py-scipy @0.18.1"
proc ModulesHelp { } {
puts stderr "SciPy (pronounced "Sigh Pie") is a Scientific Library for Python. It"
puts stderr "provides many user-friendly and efficient numerical routines such as"
puts stderr "routines for numerical integration and optimization."
}
if ![ is-loaded python/2.7.12-gcc-6.2.0 ] {
puts stderr "Autoloading python/2.7.12-gcc-6.2.0"
module load python/2.7.12-gcc-6.2.0
}
if ![ is-loaded openblas/0.2.19-gcc-6.2.0 ] {
puts stderr "Autoloading openblas/0.2.19-gcc-6.2.0"
module load openblas/0.2.19-gcc-6.2.0
}
if ![ is-loaded py-numpy/1.11.1-gcc-6.2.0-openblas ] {
puts stderr "Autoloading py-numpy/1.11.1-gcc-6.2.0-openblas"
module load py-numpy/1.11.1-gcc-6.2.0-openblas
}
prepend-path CMAKE_PREFIX_PATH "~/spack/opt/spack/linux-Ubuntu14-x86_64/gcc-6.2.0/py-scipy-0.18.1-e6uljfiffgym4xvj6wveevqxfqnfb3gh/"
prepend-path LD_LIBRARY_PATH "~/spack/opt/spack/linux-Ubuntu14-x86_64/gcc-6.2.0/py-scipy-0.18.1-e6uljfiffgym4xvj6wveevqxfqnfb3gh/lib"
prepend-path PYTHONPATH "~/spack/opt/spack/linux-Ubuntu14-x86_64/gcc-6.2.0/py-scipy-0.18.1-e6uljfiffgym4xvj6wveevqxfqnfb3gh/lib/python2.7/site-packages"
setenv PY_SCIPY_ROOT "~/spack/opt/spack/linux-Ubuntu14-x86_64/gcc-6.2.0/py-scipy-0.18.1-e6uljfiffgym4xvj6wveevqxfqnfb3gh"
conflict py-scipy
and will contain code to autoload all the dependencies:
.. code-block:: console
$ module load py-scipy
Autoloading python/2.7.12-gcc-6.2.0
Autoloading openblas/0.2.19-gcc-6.2.0
Autoloading py-numpy/1.11.1-gcc-6.2.0-openblas
-----------------------------
Lua hierarchical module files
-----------------------------
In the final part of this tutorial you will modify ``modules.yaml`` to generate
Lua hierarchical module files. You will see that most of the directives used before
are also valid in the ``lmod`` context.
^^^^^^^^^^^^^^^^^
Core/Compiler/MPI
^^^^^^^^^^^^^^^^^
.. warning::
Only LMod supports Lua hierarchical module files
For this part of the tutorial you need to be using LMod to
manage your environment.
The most common hierarchy is the so called ``Core/Compiler/MPI``. To have an idea
how a hierarchy is organized you may refer to the
`Lmod guide <https://www.tacc.utexas.edu/research-development/tacc-projects/lmod/user-guide/module-hierarchy>`_.
Since ``lmod`` is not enabled by default, you need to add it to the list of
enabled module file generators. The other things you need to do are:
- change the ``tcl`` tag to ``lmod``
- remove ``tcl`` specific directives (``naming_scheme`` and ``conflict``)
- set which compilers are considered ``core``
- remove the ``mpi`` related suffixes (as they will be substituted by hierarchies)
After modifications the configuration file will be:
.. code-block:: yaml
:emphasize-lines: 2-6
modules:
enable::
- lmod
lmod:
core_compilers:
- 'gcc@4.8'
hash_length: 0
whitelist:
- gcc
blacklist:
- '%gcc@4.8'
all:
suffixes:
'^openblas': openblas
'^netlib-lapack': netlib
filter:
environment_blacklist: ['CPATH', 'LIBRARY_PATH']
environment:
set:
'${PACKAGE}_ROOT': '${PREFIX}'
gcc:
environment:
set:
CC: gcc
CXX: g++
FC: gfortran
F90: gfortran
F77: gfortran
openmpi:
environment:
set:
SLURM_MPI_TYPE: pmi2
OMPI_MCA_btl_openib_warn_default_gid_prefix: '0'
.. note::
The double colon
The double colon after ``enable`` is intentional and it serves the
purpose of overriding the default list of enabled generators so
that only ``lmod`` will be active (see :ref:`the reference
manual <config-overrides>` for a more detailed explanation of
config scopes).
The directive ``core_compilers`` accepts a list of compilers : everything built
using these compilers will create a module in the ``Core`` part of the hierarchy. It is
common practice to put the OS provided compilers in the list and only build common utilities
and other compilers in ``Core``.
If you regenerate the module files
.. code-block:: console
$ spack module refresh --module-type lmod --delete-tree -y
and update ``MODULEPATH`` to point to the ``Core`` folder, and
list the available modules, you'll see:
.. code-block:: console
$ module unuse ~/spack/share/spack/modules/linux-Ubuntu14-x86_64
$ module use ~/spack/share/spack/lmod/linux-Ubuntu14-x86_64/Core
$ module avail
----------------------------------------------------------------------- ~/spack/share/spack/lmod/linux-Ubuntu14-x86_64/Core -----------------------------------------------------------------------
gcc/6.2.0
The only module visible now is ``gcc``. Loading that you will make
visible the ``Compiler`` part of the software stack that was built with ``gcc/6.2.0``:
.. code-block:: console
$ module load gcc
$ module avail
-------------------------------------------------------------------- ~/spack/share/spack/lmod/linux-Ubuntu14-x86_64/gcc/6.2.0 ---------------------------------------------------------------------
binutils/2.27 curl/7.50.3 hwloc/1.11.4 libtool/2.4.6 lzo/2.09 netlib-lapack/3.6.1 openssl/1.0.2j py-scipy/0.18.1-openblas util-macros/1.19.0
bison/3.0.4 expat/2.2.0 libarchive/3.2.1 libxml2/2.9.4 m4/1.4.17 nettle/3.2 pkg-config/0.29.1 py-setuptools/25.2.0 xz/5.2.2
bzip2/1.0.6 flex/2.6.0 libpciaccess/0.13.4 lz4/131 mpich/3.2 openblas/0.2.19 py-nose/1.3.7 python/2.7.12 zlib/1.2.8
cmake/3.6.1 gmp/6.1.1 libsigsegv/2.10 lzma/4.32.7 ncurses/6.0 openmpi/2.0.1 py-numpy/1.11.1-openblas sqlite/3.8.5
----------------------------------------------------------------------- ~/spack/share/spack/lmod/linux-Ubuntu14-x86_64/Core -----------------------------------------------------------------------
gcc/6.2.0 (L)
The same holds true for the ``MPI`` part of the stack, that you can enable by loading
either ``mpich`` or ``openmpi``. The nice features of LMod will become evident
once you'll try switching among different stacks:
.. code-block:: console
$ module load mpich
$ module avail
----------------------------------------------------------- ~/spack/share/spack/lmod/linux-Ubuntu14-x86_64/mpich/3.2-5n5xoep/gcc/6.2.0 ------------------------------------------------------------
netlib-scalapack/2.0.2-netlib netlib-scalapack/2.0.2-openblas (D)
-------------------------------------------------------------------- ~/spack/share/spack/lmod/linux-Ubuntu14-x86_64/gcc/6.2.0 ---------------------------------------------------------------------
binutils/2.27 curl/7.50.3 hwloc/1.11.4 libtool/2.4.6 lzo/2.09 netlib-lapack/3.6.1 openssl/1.0.2j py-scipy/0.18.1-openblas util-macros/1.19.0
bison/3.0.4 expat/2.2.0 libarchive/3.2.1 libxml2/2.9.4 m4/1.4.17 nettle/3.2 pkg-config/0.29.1 py-setuptools/25.2.0 xz/5.2.2
bzip2/1.0.6 flex/2.6.0 libpciaccess/0.13.4 lz4/131 mpich/3.2 (L) openblas/0.2.19 py-nose/1.3.7 python/2.7.12 zlib/1.2.8
cmake/3.6.1 gmp/6.1.1 libsigsegv/2.10 lzma/4.32.7 ncurses/6.0 openmpi/2.0.1 py-numpy/1.11.1-openblas sqlite/3.8.5
----------------------------------------------------------------------- ~/spack/share/spack/lmod/linux-Ubuntu14-x86_64/Core -----------------------------------------------------------------------
gcc/6.2.0 (L)
$ module load openblas netlib-scalapack/2.0.2-openblas
$ module list
Currently Loaded Modules:
1) gcc/6.2.0 2) mpich/3.2 3) openblas/0.2.19 4) netlib-scalapack/2.0.2-openblas
$ module load openmpi
Lmod is automatically replacing "mpich/3.2" with "openmpi/2.0.1"
Due to MODULEPATH changes the following have been reloaded:
1) netlib-scalapack/2.0.2-openblas
This layout is already a great improvement over the usual non-hierarchical layout,
but it still has an asymmetry: ``LAPACK`` providers are semantically the same as ``MPI``
providers, but they are still not part of the hierarchy. We'll see a possible solution
next.
.. Activate lmod and turn the previous modifications into lmod:
Add core compilers
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Extend the hierarchy to other virtual providers
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.. warning::
This is an experimental feature
Having a hierarchy deeper than ``Core``/``Compiler``/``MPI`` is an experimental
feature, still not fully supported by ``module spider``,
see `here <https://github.com/TACC/Lmod/issues/114>`_. Furthermore its use
with hierarchies more complex than ``Core``/``Compiler``/``MPI``/``LAPACK``
has not been thoroughly tested in production environments.
Spack permits you to generate Lua hierarchical module files where users
can add an arbitrary list of virtual providers to the triplet
``Core``/``Compiler``/``MPI``. A configuration file like:
.. code-block:: yaml
:emphasize-lines: 7,8
modules:
enable::
- lmod
lmod:
core_compilers:
- 'gcc@4.8'
hierarchical_scheme:
- lapack
hash_length: 0
whitelist:
- gcc
blacklist:
- '%gcc@4.8'
- readline
all:
filter:
environment_blacklist: ['CPATH', 'LIBRARY_PATH']
environment:
set:
'${PACKAGE}_ROOT': '${PREFIX}'
gcc:
environment:
set:
CC: gcc
CXX: g++
FC: gfortran
F90: gfortran
F77: gfortran
openmpi:
environment:
set:
SLURM_MPI_TYPE: pmi2
OMPI_MCA_btl_openib_warn_default_gid_prefix: '0'
will add ``lapack`` providers to the mix. After the usual regeneration of module files:
.. code-block:: console
$ module purge
$ spack module refresh --module-type lmod --delete-tree -y
==> Regenerating lmod module files
you will have something like:
.. code-block:: console
$ module load gcc
$ module load openblas
$ module load openmpi
$ module avail
--------------------------------------------- ~/spack/share/spack/lmod/linux-Ubuntu14-x86_64/openblas/0.2.19-js33umc/openmpi/2.0.1-s3qbtby/gcc/6.2.0 ----------------------------------------------
netlib-scalapack/2.0.2
-------------------------------------------------------- ~/spack/share/spack/lmod/linux-Ubuntu14-x86_64/openblas/0.2.19-js33umc/gcc/6.2.0 ---------------------------------------------------------
py-numpy/1.11.1 py-scipy/0.18.1
-------------------------------------------------------------------- ~/spack/share/spack/lmod/linux-Ubuntu14-x86_64/gcc/6.2.0 ---------------------------------------------------------------------
binutils/2.27 curl/7.50.3 hwloc/1.11.4 libtool/2.4.6 lzo/2.09 netlib-lapack/3.6.1 openssl/1.0.2j python/2.7.12 zlib/1.2.8
bison/3.0.4 expat/2.2.0 libarchive/3.2.1 libxml2/2.9.4 m4/1.4.17 nettle/3.2 pkg-config/0.29.1 sqlite/3.8.5
bzip2/1.0.6 flex/2.6.0 libpciaccess/0.13.4 lz4/131 mpich/3.2 openblas/0.2.19 (L) py-nose/1.3.7 util-macros/1.19.0
cmake/3.6.1 gmp/6.1.1 libsigsegv/2.10 lzma/4.32.7 ncurses/6.0 openmpi/2.0.1 (L) py-setuptools/25.2.0 xz/5.2.2
----------------------------------------------------------------------- ~/spack/share/spack/lmod/linux-Ubuntu14-x86_64/Core -----------------------------------------------------------------------
gcc/6.2.0 (L)
Now both the ``MPI`` and the ``LAPACK`` providers are handled by LMod as hierarchies:
.. code-block:: console
$ module load py-numpy netlib-scalapack
$ module load mpich
Lmod is automatically replacing "openmpi/2.0.1" with "mpich/3.2"
Due to MODULEPATH changes the following have been reloaded:
1) netlib-scalapack/2.0.2
$ module load netlib-lapack
Lmod is automatically replacing "openblas/0.2.19" with "netlib-lapack/3.6.1"
Inactive Modules:
1) py-numpy
Due to MODULEPATH changes the following have been reloaded:
1) netlib-scalapack/2.0.2
making the use of tags to differentiate them unnecessary.
Note that because we only compiled ``py-numpy`` with ``openblas`` the module
is made inactive when we switch the ``LAPACK`` provider. The user
environment will now be consistent by design!

View File

@@ -43,7 +43,7 @@ A few things before we get started:
Creating the Package File Creating the Package File
------------------------- -------------------------
Spack comes with a handy command to create a new package: ``spack create``. Spack comes with a handy command to create a new package: ``spack create``
This command is given the location of a package's source code, downloads This command is given the location of a package's source code, downloads
the code, and sets up some basic packaging infrastructure for you. The the code, and sets up some basic packaging infrastructure for you. The
@@ -52,20 +52,12 @@ we run ``spack create`` on it:
.. code-block:: console .. code-block:: console
$ spack create -t generic -f https://github.com/hpc/mpileaks/releases/download/v1.0/mpileaks-1.0.tar.gz $ spack create -f https://github.com/hpc/mpileaks/releases/download/v1.0/mpileaks-1.0.tar.gz
==> This looks like a URL for mpileaks ==> This looks like a URL for mpileaks version 1.0
==> Found 1 version of mpileaks: ==> Creating template for package mpileaks
1.0 https://github.com/LLNL/mpileaks/releases/download/v1.0/mpileaks-1.0.tar.gz
==> How many would you like to checksum? (default is 1, q to abort) 1
==> Downloading... ==> Downloading...
==> Fetching https://github.com/LLNL/mpileaks/releases/download/v1.0/mpileaks-1.0.tar.gz ==> Fetching https://github.com/hpc/mpileaks/releases/download/v1.0/mpileaks-1.0.tar.gz
############################################################################# 100.0% ###################################################################################### 100.0%
==> Checksummed 1 version of mpileaks
==> Using specified package template: 'generic'
==> Created template for mpileaks package
==> Created package file: $SPACK_ROOT/var/spack/repos/builtin/packages/mpileaks/package.py
And Spack should spawn a text editor with this file: And Spack should spawn a text editor with this file:
@@ -200,27 +192,29 @@ Now when we try to install this package a lot more happens:
.. code-block:: console .. code-block:: console
$ spack install mpileaks $ spack install mpileaks
...
==> libdwarf is already installed in SPACK_ROOT/opt/spack/linux-ubuntu16.04-x86_64/gcc-5.4.0/libdwarf-20160507-er4jrjynul6uba7wiu5tasuj35roxw6m
==> dyninst is already installed in SPACK_ROOT/opt/spack/linux-ubuntu16.04-x86_64/gcc-5.4.0/dyninst-9.3.2-t7mau34jv3e76mpspdzhf2p2a6k7qubg
==> callpath is already installed in SPACK_ROOT/opt/spack/linux-ubuntu16.04-x86_64/gcc-5.4.0/callpath-1.0.4-ikbbkvfmsfmqzo624nvvrbooovf7egoc
==> Installing mpileaks ==> Installing mpileaks
==> Using cached archive: SPACK_ROOT/var/spack/cache/mpileaks/mpileaks-1.0.tar.gz ==> openmpi is already installed in /usr/workspace/wsa/legendre/spack/opt/spack/linux-rhel7-x86_64/gcc-4.9.3/openmpi-2.0.1-5ee5j34c2y4kb5c3joygrgahidqnwhnz
==> Already staged mpileaks-1.0-lfgf53rns5mswq25rxckzgvmjc6ywam7 in SPACK_ROOT/var/spack/stage/mpileaks-1.0-lfgf53rns5mswq25rxckzgvmjc6ywam7 ==> callpath is already installed in /usr/workspace/wsa/legendre/spack/opt/spack/linux-rhel7-x86_64/gcc-4.9.3/callpath-1.0.2-zm4pf3gasgxeibyu2y262suktvaazube
==> No patches needed for mpileaks ==> adept-utils is already installed in /usr/workspace/wsa/legendre/spack/opt/spack/linux-rhel7-x86_64/gcc-4.9.3/adept-utils-1.0.1-7p7ezxwtajdglj6cmojy2vybjct4j4jz
==> Building mpileaks [Package] ==> Using cached archive: /usr/workspace/wsa/legendre/spack/var/spack/cache/mpileaks/mpileaks-1.0.tar.gz
==> Executing phase: 'install' ==> Already staged mpileaks-1.0-eum4hmnlt6ovalwjnciaygfb3beja4gk in /usr/workspace/wsa/legendre/spack/var/spack/stage/mpileaks-1.0-eum4hmnlt6ovalwjnciaygfb3beja4gk
==> Error: ProcessError: Command exited with status 2: ==> Already patched mpileaks
'make' '-j36' ==> Building mpileaks [AutotoolsPackage]
==> Executing phase : 'autoreconf'
1 error found in build log: ==> Executing phase : 'configure'
1 ==> Executing phase: 'install' ==> Error: ProcessError: Command exited with status 1:
2 ==> 'make' '-j36' './configure' '--prefix=/usr/workspace/wsa/legendre/spack/opt/spack/linux-rhel7-x86_64/gcc-4.9.3/mpileaks-1.0-eum4hmnlt6ovalwjnciaygfb3beja4gk'
>> 3 make: *** No targets specified and no makefile found. Stop. /usr/workspace/wsa/legendre/spack/lib/spack/spack/build_systems/autotools.py:150, in configure:
145 def configure(self, spec, prefix):
146 """Runs configure with the arguments specified in `configure_args`
147 and an appropriately set prefix
148 """
149 options = ['--prefix={0}'.format(prefix)] + self.configure_args()
>> 150 inspect.getmodule(self).configure(*options)
See build log for details: See build log for details:
SPACK_ROOT/var/spack/stage/mpileaks-1.0-lfgf53rns5mswq25rxckzgvmjc6ywam7/mpileaks-1.0/spack-build.out /tmp/legendre/spack-stage/spack-stage-7V5yyk/mpileaks-1.0/spack-build.out
Note that this command may take a while to run and produce more output if Note that this command may take a while to run and produce more output if
you don't have an MPI already installed or configured in Spack. you don't have an MPI already installed or configured in Spack.
@@ -234,79 +228,44 @@ Debugging Package Builds
------------------------ ------------------------
Our ``mpileaks`` package is still not building. It may be obvious to Our ``mpileaks`` package is still not building. It may be obvious to
many of you that we never ran the configure script. Let's add a many of you that we're still missing the configure options. But let's
call to ``configure()`` to the top of the install routine. The resulting pretend we're not all intelligent developers and use this opportunity
package.py is in ``$SPACK_ROOT/lib/spack/docs/tutorial/examples/3.package.py``: spend some time debugging. We a few options that can tell us about
.. literalinclude:: tutorial/examples/3.package.py
:lines: 25-
:language: python
If we re-run we still get errors:
.. code-block:: console
==> Installing mpileaks
==> Using cached archive: SPACK_ROOT/var/spack/cache/mpileaks/mpileaks-1.0.tar.gz
==> Already staged mpileaks-1.0-lfgf53rns5mswq25rxckzgvmjc6ywam7 in SPACK_ROOT/var/spack/stage/mpileaks-1.0-lfgf53rns5mswq25rxckzgvmjc6ywam7
==> No patches needed for mpileaks
==> Building mpileaks [Package]
==> Executing phase: 'install'
==> Error: ProcessError: Command exited with status 1:
'./configure'
1 error found in build log:
[ ... ]
21 checking whether SPACK_ROOT/lib/spack/env/gcc/gcc and cc understand -c and -o together... yes
22 checking whether we are using the GNU C++ compiler... yes
23 checking whether SPACK_ROOT/lib/spack/env/gcc/g++ accepts -g... yes
24 checking dependency style of SPACK_ROOT/lib/spack/env/gcc/g++... gcc3
25 checking for SPACK_ROOT/opt/spack/linux-ubuntu16.04-x86_64/gcc-5.4.0/openmpi-3.0.0-yo5qkfvumpmgmvlbalqcadu46j5bd52f/bin/mpicc... SPACK_ROOT/opt/spack/linux-ubuntu16.04-x86_64/gcc-5.4.0/openmpi-3.0.0-yo5qkfvumpmgmvlbalqcadu46j5bd52f/bin/mpicc
26 Checking whether SPACK_ROOT/opt/spack/linux-ubuntu16.04-x86_64/gcc-5.4.0/openmpi-3.0.0-yo5qkfvumpmgmvlbalqcadu46j5bd52f/bin/mpicc responds to '-showme:compile'... yes
>> 27 configure: error: unable to locate adept-utils installation
See build log for details:
SPACK_ROOT/var/spack/stage/mpileaks-1.0-lfgf53rns5mswq25rxckzgvmjc6ywam7/mpileaks-1.0/spack-build.out
Again, the problem may be obvious. But let's pretend we're not
all intelligent developers and use this opportunity spend some
time debugging. We have a few options that can tell us about
what's going wrong: what's going wrong:
As per the error message, Spack has given us a ``spack-build.out`` debug log: As per the error message, Spack has given us a ``spack-build.out`` debug log:
.. code-block:: console .. code-block:: console
==> Executing phase: 'install' ==> './configure' '--prefix=/usr/workspace/wsa/legendre/spack/opt/spack/linux-rhel7-x86_64/gcc-4.9.3/mpileaks-1.0-eum4hmnlt6ovalwjnciaygfb3beja4gk'
==> './configure'
checking metadata... no checking metadata... no
checking installation directory variables... yes checking installation directory variables... yes
checking for a BSD-compatible install... /usr/bin/install -c checking for a BSD-compatible install... /usr/bin/install -c
checking whether build environment is sane... yes checking whether build environment is sane... yes
checking for a thread-safe mkdir -p... /bin/mkdir -p checking for a thread-safe mkdir -p... /usr/bin/mkdir -p
checking for gawk... gawk checking for gawk... gawk
checking whether make sets $(MAKE)... yes checking whether make sets $(MAKE)... yes
checking for gcc... SPACK_ROOT/lib/spack/env/gcc/gcc checking for gcc... /usr/workspace/wsa/legendre/spack/lib/spack/env/gcc/gcc
checking for C compiler default output file name... a.out checking for C compiler default output file name... a.out
checking whether the C compiler works... yes checking whether the C compiler works... yes
checking whether we are cross compiling... no checking whether we are cross compiling... no
checking for suffix of executables... checking for suffix of executables...
checking for suffix of object files... o checking for suffix of object files... o
checking whether we are using the GNU C compiler... yes checking whether we are using the GNU C compiler... yes
checking whether SPACK_ROOT/lib/spack/env/gcc/gcc accepts -g... yes checking whether /usr/workspace/wsa/legendre/spack/lib/spack/env/gcc/gcc accepts -g... yes
checking for SPACK_ROOT/lib/spack/env/gcc/gcc option to accept ISO C89... none needed checking for /usr/workspace/wsa/legendre/spack/lib/spack/env/gcc/gcc option to accept ISO C89... none needed
checking for style of include used by make... GNU checking for style of include used by make... GNU
checking dependency style of SPACK_ROOT/lib/spack/env/gcc/gcc... gcc3 checking dependency style of /usr/workspace/wsa/legendre/spack/lib/spack/env/gcc/gcc... gcc3
checking whether SPACK_ROOT/lib/spack/env/gcc/gcc and cc understand -c and -o together... yes checking whether /usr/workspace/wsa/legendre/spack/lib/spack/env/gcc/gcc and cc understand -c and -o together... yes
checking whether we are using the GNU C++ compiler... yes checking whether we are using the GNU C++ compiler... yes
checking whether SPACK_ROOT/lib/spack/env/gcc/g++ accepts -g... yes checking whether /usr/workspace/wsa/legendre/spack/lib/spack/env/gcc/g++ accepts -g... yes
checking dependency style of SPACK_ROOT/lib/spack/env/gcc/g++... gcc3 checking dependency style of /usr/workspace/wsa/legendre/spack/lib/spack/env/gcc/g++... gcc3
checking for SPACK_ROOT/opt/spack/linux-ubuntu16.04-x86_64/gcc-5.4.0/openmpi-3.0.0-yo5qkfvumpmgmvlbalqcadu46j5bd52f/bin/mpicc... SPACK_ROOT/opt/spack/linux-ubuntu16.04-x86_64/gcc-5.4.0/openmpi-3.0.0-yo5qkfvumpmgmvlbalqcadu46j5bd52f/bin/mpicc checking for /usr/workspace/wsa/legendre/spack/opt/spack/linux-rhel7-x86_64/gcc-4.9.3/openmpi-2.0.1-5ee5j34c2y4kb5c3joygrgahidqnwhnz/bin/mpicc... /usr/workspace/wsa/legendre/spack/opt/spack/linux-rhel7-x86_64/gcc-4.9.3/openmpi-2.0.1-5ee5j34c2y4kb5c3joygrgahidqnwhnz/bin/mpicc
Checking whether SPACK_ROOT/opt/spack/linux-ubuntu16.04-x86_64/gcc-5.4.0/openmpi-3.0.0-yo5qkfvumpmgmvlbalqcadu46j5bd52f/bin/mpicc responds to '-showme:compile'... yes Checking whether /usr/workspace/wsa/legendre/spack/opt/spack/linux-rhel7-x86_64/gcc-4.9.3/openmpi-2.0.1-5ee5j34c2y4kb5c3joygrgahidqnwhnz/bin/mpicc responds to '-showme:compile'... yes
configure: error: unable to locate adept-utils installation configure: error: unable to locate ``adept-utils`` installation
This gives us the output from the build, and mpileaks isn't This gives us the output from the build, and it's fairly obvious that
finding its ``adept-utils`` package. Spack has mpileaks isn't finding its ``adept-utils`` package. Spack has
automatically added the include and library directories of automatically added the include and library directories of
``adept-utils`` to the compiler's search path, but some packages like ``adept-utils`` to the compiler's search path, but some packages like
mpileaks can sometimes be picky and still want things spelled out on mpileaks can sometimes be picky and still want things spelled out on
@@ -333,26 +292,26 @@ From here we can manually re-run the build:
checking installation directory variables... yes checking installation directory variables... yes
checking for a BSD-compatible install... /usr/bin/install -c checking for a BSD-compatible install... /usr/bin/install -c
checking whether build environment is sane... yes checking whether build environment is sane... yes
checking for a thread-safe mkdir -p... /bin/mkdir -p checking for a thread-safe mkdir -p... /usr/bin/mkdir -p
checking for gawk... gawk checking for gawk... gawk
checking whether make sets $(MAKE)... yes checking whether make sets $(MAKE)... yes
checking for gcc... SPACK_ROOT/lib/spack/env/gcc/gcc checking for gcc... /usr/workspace/wsa/legendre/spack/lib/spack/env/gcc/gcc
checking for C compiler default output file name... a.out checking for C compiler default output file name... a.out
checking whether the C compiler works... yes checking whether the C compiler works... yes
checking whether we are cross compiling... no checking whether we are cross compiling... no
checking for suffix of executables... checking for suffix of executables...
checking for suffix of object files... o checking for suffix of object files... o
checking whether we are using the GNU C compiler... yes checking whether we are using the GNU C compiler... yes
checking whether SPACK_ROOT/lib/spack/env/gcc/gcc accepts -g... yes checking whether /usr/workspace/wsa/legendre/spack/lib/spack/env/gcc/gcc accepts -g... yes
checking for SPACK_ROOT/lib/spack/env/gcc/gcc option to accept ISO C89... none needed checking for /usr/workspace/wsa/legendre/spack/lib/spack/env/gcc/gcc option to accept ISO C89... none needed
checking for style of include used by make... GNU checking for style of include used by make... GNU
checking dependency style of SPACK_ROOT/lib/spack/env/gcc/gcc... gcc3 checking dependency style of /usr/workspace/wsa/legendre/spack/lib/spack/env/gcc/gcc... gcc3
checking whether SPACK_ROOT/lib/spack/env/gcc/gcc and cc understand -c and -o together... yes checking whether /usr/workspace/wsa/legendre/spack/lib/spack/env/gcc/gcc and cc understand -c and -o together... yes
checking whether we are using the GNU C++ compiler... yes checking whether we are using the GNU C++ compiler... yes
checking whether SPACK_ROOT/lib/spack/env/gcc/g++ accepts -g... yes checking whether /usr/workspace/wsa/legendre/spack/lib/spack/env/gcc/g++ accepts -g... yes
checking dependency style of SPACK_ROOT/lib/spack/env/gcc/g++... gcc3 checking dependency style of /usr/workspace/wsa/legendre/spack/lib/spack/env/gcc/g++... gcc3
checking for SPACK_ROOT/opt/spack/linux-ubuntu16.04-x86_64/gcc-5.4.0/openmpi-3.0.0-yo5qkfvumpmgmvlbalqcadu46j5bd52f/bin/mpicc... SPACK_ROOT/opt/spack/linux-ubuntu16.04-x86_64/gcc-5.4.0/openmpi-3.0.0-yo5qkfvumpmgmvlbalqcadu46j5bd52f/bin/mpicc checking for /usr/workspace/wsa/legendre/spack/opt/spack/linux-rhel7-x86_64/gcc-4.9.3/openmpi-2.0.1-5ee5j34c2y4kb5c3joygrgahidqnwhnz/bin/mpicc... /usr/workspace/wsa /legendre/spack/opt/spack/linux-rhel7-x86_64/gcc-4.9.3/openmpi-2.0.1-5ee5j34c2y4kb5c3joygrgahidqnwhnz/bin/mpicc
Checking whether SPACK_ROOT/opt/spack/linux-ubuntu16.04-x86_64/gcc-5.4.0/openmpi-3.0.0-yo5qkfvumpmgmvlbalqcadu46j5bd52f/bin/mpicc responds to '-showme:compile'... yes Checking whether /usr/workspace/wsa/legendre/spack/opt/spack/linux-rhel7-x86_64/gcc-4.9.3/openmpi-2.0.1-5ee5j34c2y4kb5c3joygrgahidqnwhnz/bin/mpicc responds to '-showme:compile'... yes
configure: error: unable to locate adept-utils installation configure: error: unable to locate adept-utils installation
We're seeing the same error, but now we're in a shell where we can run We're seeing the same error, but now we're in a shell where we can run
@@ -369,9 +328,9 @@ Specifying Configure Arguments
Let's add the configure arguments to the mpileaks' ``package.py``. This Let's add the configure arguments to the mpileaks' ``package.py``. This
version can be found in version can be found in
``$SPACK_ROOT/lib/spack/docs/tutorial/examples/4.package.py``: ``$SPACK_ROOT/lib/spack/docs/tutorial/examples/3.package.py``:
.. literalinclude:: tutorial/examples/4.package.py .. literalinclude:: tutorial/examples/3.package.py
:lines: 25- :lines: 25-
:language: python :language: python
@@ -380,35 +339,37 @@ This is all we need for working mpileaks! If we install now we'll see:
.. code-block:: console .. code-block:: console
$ spack install mpileaks $ spack install mpileaks
... spack install mpileaks
==> Installing mpileaks ==> Installing mpileaks
==> Using cached archive: SPACK_ROOT/var/spack/cache/mpileaks/mpileaks-1.0.tar.gz ==> openmpi is already installed in /usr/workspace/wsa/legendre/spack/opt/spack/linux-rhel7-x86_64/gcc-4.9.3/openmpi-2.0.1-5ee5j34c2y4kb5c3joygrgahidqnwhnz
==> Staging archive: SPACK_ROOT/var/spack/stage/mpileaks-1.0-lfgf53rns5mswq25rxckzgvmjc6ywam7/mpileaks-1.0.tar.gz ==> callpath is already installed in /usr/workspace/wsa/legendre/spack/opt/spack/linux-rhel7-x86_64/gcc-4.9.3/callpath-1.0.2-zm4pf3gasgxeibyu2y262suktvaazube
==> Created stage in SPACK_ROOT/var/spack/stage/mpileaks-1.0-lfgf53rns5mswq25rxckzgvmjc6ywam7 ==> adept-utils is already installed in /usr/workspace/wsa/legendre/spack/opt/spack/linux-rhel7-x86_64/gcc-4.9.3/adept-utils-1.0.1-7p7ezxwtajdglj6cmojy2vybjct4j4jz
==> No patches needed for mpileaks ==> Using cached archive: /usr/workspace/wsa/legendre/spack/var/spack/cache/mpileaks/mpileaks-1.0.tar.gz
==> Building mpileaks [Package] ==> Already staged mpileaks-1.0-eum4hmnlt6ovalwjnciaygfb3beja4gk in /usr/workspace/wsa/legendre/spack/var/spack/stage/mpileaks-1.0-eum4hmnlt6ovalwjnciaygfb3beja4gk
==> Executing phase: 'install' ==> Already patched mpileaks
==> Building mpileaks [AutotoolsPackage]
==> Executing phase : 'autoreconf'
==> Executing phase : 'configure'
==> Executing phase : 'build'
==> Executing phase : 'install'
==> Successfully installed mpileaks ==> Successfully installed mpileaks
Fetch: 0.00s. Build: 9.01s. Total: 9.01s. Fetch: 0.00s. Build: 14.08s. Total: 14.08s.
[+] SPACK_ROOT/opt/spack/linux-ubuntu16.04-x86_64/gcc-5.4.0/mpileaks-1.0-lfgf53rns5mswq25rxckzgvmjc6ywam7 [+] /usr/workspace/wsa/legendre/spack/opt/spack/linux-rhel7-x86_64/gcc-4.9.3/mpileaks-1.0-eum4hmnlt6ovalwjnciaygfb3beja4gk
We took a few shortcuts for this package that are worth highlighting.
There are some special circumstances in package that are worth highlighting. Spack automatically detected that mpileaks was an Autotools-based package
Normally spack would have automatically detected that mpileaks was an when we ran ``spack create``. If this had been a CMake-based package we
Autotools-based package when we ran ``spack create`` and made it an ``AutoToolsPackage`` class (except we added the ``-t generic`` option to skip this). Instead of would have been filling in a ``cmake_args`` function instead of
a full install routine we would have just written: ``configure_args``. If Spack hadn't been able to detect the build
system, we'd be filling in a generic install method that would manually
be calling build commands, such as is found in the ``zlib`` package:
.. code-block:: python .. code-block:: python
def configure_args(self): def install(self, spec, prefix):
args = ['--with-adept-utils=%s' % self.spec['adept-utils'].prefix, configure('--prefix={0}'.format(prefix))
'--with-callpath=%s' % self.spec['callpath'].prefix] make()
return args make('install')
Similarly, if this had been a CMake-based package we
would have been filling in a ``cmake_args`` function instead of
``configure_args``. There are similar default package types for
many build environments.
-------- --------
Variants Variants
@@ -420,9 +381,9 @@ that it walks. Let's add a variant to allow users to set this when they
build in Spack. build in Spack.
To do this, we'll add a variant to our package, as per the following (see To do this, we'll add a variant to our package, as per the following (see
``$SPACK_ROOT/lib/spack/docs/tutorial/examples/5.package.py``): ``$SPACK_ROOT/lib/spack/docs/tutorial/examples/4.package.py``):
.. literalinclude:: tutorial/examples/5.package.py .. literalinclude:: tutorial/examples/4.package.py
:lines: 25- :lines: 25-
:language: python :language: python
@@ -433,15 +394,18 @@ configure line (output truncated for length):
.. code-block:: console .. code-block:: console
$ spack install --verbose mpileaks stackstart=4 $ spack install --verbose mpileaks stackstart=4
...
==> Installing mpileaks ==> Installing mpileaks
==> Using cached archive: SPACK_ROOT/var/spack/cache/mpileaks/mpileaks-1.0.tar.gz ==> openmpi is already installed in /usr/workspace/wsa/legendre/spack/opt/spack/linux-rhel7-x86_64/gcc-4.9.3/openmpi-2.0.1-5ee5j34c2y4kb5c3joygrgahidqnwhnz
==> Staging archive: SPACK_ROOT/var/spack/stage/mpileaks-1.0-gxxi4fp57b4j6xalra5t65hyx5rj25t7/mpileaks-1.0.tar.gz ==> callpath is already installed in /usr/workspace/wsa/legendre/spack/opt/spack/linux-rhel7-x86_64/gcc-4.9.3/callpath-1.0.2-zm4pf3gasgxeibyu2y262suktvaazube
==> Created stage in SPACK_ROOT/var/spack/stage/mpileaks-1.0-gxxi4fp57b4j6xalra5t65hyx5rj25t7 ==> adept-utils is already installed in /usr/workspace/wsa/legendre/spack/opt/spack/linux-rhel7-x86_64/gcc-4.9.3/adept-utils-1.0.1-7p7ezxwtajdglj6cmojy2vybjct4j4jz
==> No patches needed for mpileaks ==> Using cached archive: /usr/workspace/wsa/legendre/spack/var/spack/cache/mpileaks/mpileaks-1.0.tar.gz
==> Building mpileaks [Package] ==> Staging archive: /usr/workspace/wsa/legendre/spack/var/spack/stage/mpileaks-1.0-otqo2opkhan5ksujt6tpmdftydrieig7/mpileaks-1.0.tar.gz
==> Executing phase: 'install' ==> Created stage in /usr/workspace/wsa/legendre/spack/var/spack/stage/mpileaks-1.0-otqo2opkhan5ksujt6tpmdftydrieig7
==> './configure' '--with-adept-utils=SPACK_ROOT/opt/spack/linux-ubuntu16.04-x86_64/gcc-5.4.0/adept-utils-1.0.1-pm3gffhrnwsdtqthtvsfvs2tny4r65wb' '--with-callpath=SPACK_ROOT/opt/spack/linux-ubuntu16.04-x86_64/gcc-5.4.0/callpath-1.0.4-ikbbkvfmsfmqzo624nvvrbooovf7egoc' '--prefix=SPACK_ROOT/opt/spack/linux-ubuntu16.04-x86_64/gcc-5.4.0/mpileaks-1.0-gxxi4fp57b4j6xalra5t65hyx5rj25t7' '--with-stack-start-c=4' '--with-stack-start-fortran=4' ==> Ran patch() for mpileaks
==> Building mpileaks [AutotoolsPackage]
==> Executing phase : 'autoreconf'
==> Executing phase : 'configure'
==> './configure' '--prefix=/usr/workspace/wsa/legendre/spack/opt/spack/linux-rhel7-x86_64/gcc-4.9.3/mpileaks-1.0-otqo2opkhan5ksujt6tpmdftydrieig7' '--with-adept-utils=/usr/workspace/wsa/legendre/spack/opt/spack/linux-rhel7-x86_64/gcc-4.9.3/adept-utils-1.0.1-7p7ezxwtajdglj6cmojy2vybjct4j4jz' '--with-callpath=/usr/workspace/wsa/legendre/spack/opt/spack/linux-rhel7-x86_64/gcc-4.9.3/callpath-1.0.2-zm4pf3gasgxeibyu2y262suktvaazube' '--with-stack-start-c=4' '--with-stack-start-fortran=4'
--------------- ---------------
The Spec Object The Spec Object

File diff suppressed because it is too large Load Diff

View File

@@ -33,12 +33,24 @@ possible realization of a particular package, out of combinatorially
many other realizations. For example, here is a concrete spec many other realizations. For example, here is a concrete spec
instantiated from ``curl``: instantiated from ``curl``:
.. command-output:: spack spec curl .. code-block:: console
curl@7.50.1%gcc@5.3.0 arch=linux-SuSE11-x86_64
^openssl@system%gcc@5.3.0 arch=linux-SuSE11-x86_64
^zlib@1.2.8%gcc@5.3.0 arch=linux-SuSE11-x86_64
Spack's core concretization algorithm generates concrete specs by Spack's core concretization algorithm generates concrete specs by
instantiating packages from its repo, based on a set of "hints", instantiating packages from its repo, based on a set of "hints",
including user input and the ``packages.yaml`` file. This algorithm including user input and the ``packages.yaml`` file. This algorithm
may be accessed at any time with the ``spack spec`` command. may be accessed at any time with the ``spack spec`` command. For
example:
.. code-block:: console
$ spack spec curl
curl@7.50.1%gcc@5.3.0 arch=linux-SuSE11-x86_64
^openssl@system%gcc@5.3.0 arch=linux-SuSE11-x86_64
^zlib@1.2.8%gcc@5.3.0 arch=linux-SuSE11-x86_64
Every time Spack installs a package, that installation corresponds to Every time Spack installs a package, that installation corresponds to
a concrete spec. Only a vanishingly small fraction of possible a concrete spec. Only a vanishingly small fraction of possible
@@ -56,7 +68,7 @@ variant, compiler, etc. For example, the following set is consistent:
.. code-block:: console .. code-block:: console
curl@7.50.1%gcc@5.3.0 arch=linux-SuSE11-x86_64 curl@7.50.1%gcc@5.3.0 arch=linux-SuSE11-x86_64
^openssl@1.0.2k%gcc@5.3.0 arch=linux-SuSE11-x86_64 ^openssl@system%gcc@5.3.0 arch=linux-SuSE11-x86_64
^zlib@1.2.8%gcc@5.3.0 arch=linux-SuSE11-x86_64 ^zlib@1.2.8%gcc@5.3.0 arch=linux-SuSE11-x86_64
zlib@1.2.8%gcc@5.3.0 arch=linux-SuSE11-x86_64 zlib@1.2.8%gcc@5.3.0 arch=linux-SuSE11-x86_64
@@ -65,7 +77,7 @@ The following set is not consistent:
.. code-block:: console .. code-block:: console
curl@7.50.1%gcc@5.3.0 arch=linux-SuSE11-x86_64 curl@7.50.1%gcc@5.3.0 arch=linux-SuSE11-x86_64
^openssl@1.0.2k%gcc@5.3.0 arch=linux-SuSE11-x86_64 ^openssl@system%gcc@5.3.0 arch=linux-SuSE11-x86_64
^zlib@1.2.8%gcc@5.3.0 arch=linux-SuSE11-x86_64 ^zlib@1.2.8%gcc@5.3.0 arch=linux-SuSE11-x86_64
zlib@1.2.7%gcc@5.3.0 arch=linux-SuSE11-x86_64 zlib@1.2.7%gcc@5.3.0 arch=linux-SuSE11-x86_64
@@ -170,7 +182,7 @@ of usage:
.. code-block:: sh .. code-block:: sh
#!/bin/bash #!/bin/sh
compilers=( compilers=(
%gcc %gcc
@@ -355,7 +367,7 @@ Transitive Dependencies
In the script above, each ``spack module loads`` command generates a In the script above, each ``spack module loads`` command generates a
*single* ``module load`` line. Transitive dependencies do not usually *single* ``module load`` line. Transitive dependencies do not usually
need to be loaded, only modules the user needs in ``$PATH``. This is need to be loaded, only modules the user needs in in ``$PATH``. This is
because Spack builds binaries with RPATH. Spack's RPATH policy has because Spack builds binaries with RPATH. Spack's RPATH policy has
some nice features: some nice features:
@@ -476,11 +488,10 @@ if the view is built with hardlinks.
.. FIXME: reference the relocation work of Hegner and Gartung (PR #1013) .. FIXME: reference the relocation work of Hegner and Gartung (PR #1013)
.. _cmd-spack-view:
"""""""""""""" """"""""""""""""""""""
``spack view`` Using Filesystem Views
"""""""""""""" """"""""""""""""""""""
A filesystem view is created, and packages are linked in, by the ``spack A filesystem view is created, and packages are linked in, by the ``spack
view`` command's ``symlink`` and ``hardlink`` sub-commands. The view`` command's ``symlink`` and ``hardlink`` sub-commands. The
@@ -1028,232 +1039,6 @@ or filesystem views. However, it has some drawbacks:
integrate Spack explicitly in their workflow. Not all users are integrate Spack explicitly in their workflow. Not all users are
willing to do this. willing to do this.
------------------------
Using Spack on Travis-CI
------------------------
Spack can be deployed as a provider for userland software in
`Travis-CI <https://http://travis-ci.org>`_.
A starting-point for a ``.travis.yml`` file can look as follows.
It uses `caching <https://docs.travis-ci.com/user/caching/>`_ for
already built environments, so make sure to clean the Travis cache if
you run into problems.
The main points that are implemented below:
#. Travis is detected as having up to 34 cores available, but only 2
are actually allocated for the user. We limit the parallelism of
the spack builds in the config.
(The Travis yaml parser is a bit buggy on the echo command.)
#. Builds over 10 minutes need to be prefixed with ``travis_wait``.
Alternatively, generate output once with ``spack install -v``.
#. Travis builds are non-interactive. This prevents using bash
aliases and functions for modules. We fix that by sourcing
``/etc/profile`` first (or running everything in a subshell with
``bash -l -c '...'``).
.. code-block:: yaml
language: cpp
sudo: false
dist: trusty
cache:
apt: true
directories:
- $HOME/.cache
addons:
apt:
sources:
- ubuntu-toolchain-r-test
packages:
- g++-4.9
- environment-modules
env:
global:
- SPACK_ROOT: $HOME/.cache/spack
- PATH: $PATH:$HOME/.cache/spack/bin
before_install:
- export CXX=g++-4.9
- export CC=gcc-4.9
- export FC=gfortran-4.9
- export CXXFLAGS="-std=c++11"
install:
- if ! which spack >/dev/null; then
mkdir -p $SPACK_ROOT &&
git clone --depth 50 https://github.com/spack/spack.git $SPACK_ROOT &&
echo -e "config:""\n build_jobs:"" 2" > $SPACK_ROOT/etc/spack/config.yaml;
fi
- travis_wait spack install cmake@3.7.2~openssl~ncurses
- travis_wait spack install boost@1.62.0~graph~iostream~locale~log~wave
- spack clean -a
- source /etc/profile &&
source $SPACK_ROOT/share/spack/setup-env.sh
- spack load cmake
- spack load boost
script:
- mkdir -p $HOME/build
- cd $HOME/build
- cmake $TRAVIS_BUILD_DIR
- make -j 2
- make test
.. _workflow_create_docker_image:
-----------------------------------
Using Spack to Create Docker Images
-----------------------------------
Spack can be the ideal tool to set up images for Docker (and Singularity).
An example ``Dockerfile`` is given below, downloading the latest spack
version.
The following functionality is prepared:
#. Base image: the example starts from a minimal ubuntu.
#. Installing as root: docker images are usually set up as root.
Since some autotools scripts might complain about this being unsafe, we set
``FORCE_UNSAFE_CONFIGURE=1`` to avoid configure errors.
#. Pre-install the spack dependencies, including modules from the packages.
This avoids needing to build those from scratch via ``spack bootstrap``.
Package installs are followed by a clean-up of the system package index,
to avoid outdated information and it saves space.
#. Install spack in ``/usr/local``.
Add ``setup-env.sh`` to profile scripts, so commands in *login* shells
can use the whole spack functionality, including modules.
#. Install an example package (``tar``).
As with system package managers above, ``spack install`` commands should be
concatenated with a ``&& spack clean -a`` in order to keep image sizes small.
#. Add a startup hook to an *interactive login shell* so spack modules will be
usable.
In order to build and run the image, execute:
.. code-block:: bash
docker build -t spack .
docker run -it spack
.. code-block:: docker
FROM ubuntu:16.04
MAINTAINER Your Name <someone@example.com>
# general environment for docker
ENV DEBIAN_FRONTEND=noninteractive \
SPACK_ROOT=/usr/local \
FORCE_UNSAFE_CONFIGURE=1
# install minimal spack depedencies
RUN apt-get update \
&& apt-get install -y --no-install-recommends \
autoconf \
build-essential \
ca-certificates \
coreutils \
curl \
environment-modules \
git \
python \
unzip \
vim \
&& rm -rf /var/lib/apt/lists/*
# load spack environment on login
RUN echo "source $SPACK_ROOT/share/spack/setup-env.sh" \
> /etc/profile.d/spack.sh
# spack settings
# note: if you wish to change default settings, add files alongside
# the Dockerfile with your desired settings. Then uncomment this line
#COPY packages.yaml modules.yaml $SPACK_ROOT/etc/spack/
# install spack
RUN curl -s -L https://api.github.com/repos/spack/spack/tarball \
| tar xzC $SPACK_ROOT --strip 1
# note: at this point one could also run ``spack bootstrap`` to avoid
# parts of the long apt-get install list above
# install software
RUN spack install tar \
&& spack clean -a
# need the modules already during image build?
#RUN /bin/bash -l -c ' \
# spack load tar \
# && which tar'
# image run hook: the -l will make sure /etc/profile environments are loaded
CMD /bin/bash -l
^^^^^^^^^^^^^^
Best Practices
^^^^^^^^^^^^^^
"""
MPI
"""
Due to the dependency on Fortran for OpenMPI, which is the spack default
implementation, consider adding ``gfortran`` to the ``apt-get install`` list.
Recent versions of OpenMPI will require you to pass ``--allow-run-as-root``
to your ``mpirun`` calls if started as root user inside Docker.
For execution on HPC clusters, it can be helpful to import the docker
image into Singularity in order to start a program with an *external*
MPI. Otherwise, also add ``openssh-server`` to the ``apt-get install`` list.
""""
CUDA
""""
Starting from CUDA 9.0, Nvidia provides minimal CUDA images based on
Ubuntu.
Please see `their instructions <https://hub.docker.com/r/nvidia/cuda/>`_.
Avoid double-installing CUDA by adding, e.g.
.. code-block:: yaml
packages:
cuda:
paths:
cuda@9.0.176%gcc@5.4.0 arch=linux-ubuntu16-x86_64: /usr/local/cuda
buildable: False
to your ``packages.yaml``.
Then ``COPY`` in that file into the image as in the example above.
Users will either need ``nvidia-docker`` or e.g. Singularity to *execute*
device kernels.
"""""""""""
Singularity
"""""""""""
Importing and running the image created above into
`Singularity <http://singularity.lbl.gov/>`_ works like a charm.
Just use the `docker bootstraping mechanism <http://singularity.lbl.gov/quickstart#bootstrap-recipes>`_:
.. code-block:: none
Bootstrap: docker
From: registry/user/image:tag
%runscript
exec /bin/bash -l
------------------ ------------------
Upstream Bug Fixes Upstream Bug Fixes
------------------ ------------------
@@ -1270,7 +1055,7 @@ Buggy New Version
Sometimes, the old version of a package works fine, but a new version Sometimes, the old version of a package works fine, but a new version
is buggy. For example, it was once found that `Adios did not build is buggy. For example, it was once found that `Adios did not build
with hdf5@1.10 <https://github.com/spack/spack/issues/1683>`_. If the with hdf5@1.10 <https://github.com/LLNL/spack/issues/1683>`_. If the
old version of ``hdf5`` will work with ``adios``, the suggested old version of ``hdf5`` will work with ``adios``, the suggested
procedure is: procedure is:
@@ -1280,7 +1065,7 @@ procedure is:
.. code-block:: python .. code-block:: python
# Adios does not build with HDF5 1.10 # Adios does not build with HDF5 1.10
# See: https://github.com/spack/spack/issues/1683 # See: https://github.com/LLNL/spack/issues/1683
depends_on('hdf5@:1.9') depends_on('hdf5@:1.9')
#. Determine whether the problem is with ``hdf5`` or ``adios``, and #. Determine whether the problem is with ``hdf5`` or ``adios``, and
@@ -1293,7 +1078,7 @@ procedure is:
.. code-block:: python .. code-block:: python
# Adios up to v1.10.0 does not build with HDF5 1.10 # Adios up to v1.10.0 does not build with HDF5 1.10
# See: https://github.com/spack/spack/issues/1683 # See: https://github.com/LLNL/spack/issues/1683
depends_on('hdf5@:1.9', when='@:1.10.0') depends_on('hdf5@:1.9', when='@:1.10.0')
depends_on('hdf5', when='@1.10.1:') depends_on('hdf5', when='@1.10.1:')

27
lib/spack/env/cc vendored
View File

@@ -1,14 +1,14 @@
#!/bin/bash #!/bin/bash
############################################################################## ##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC. # Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory. # Produced at the Lawrence Livermore National Laboratory.
# #
# This file is part of Spack. # This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. # Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188 # LLNL-CODE-647188
# #
# For details, see https://github.com/spack/spack # For details, see https://github.com/llnl/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL. # Please also see the LICENSE file for our notice and the LGPL.
# #
# This program is free software; you can redistribute it and/or modify # This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as # it under the terms of the GNU Lesser General Public License (as
@@ -45,7 +45,6 @@ parameters=(
SPACK_PREFIX SPACK_PREFIX
SPACK_ENV_PATH SPACK_ENV_PATH
SPACK_DEBUG_LOG_DIR SPACK_DEBUG_LOG_DIR
SPACK_DEBUG_LOG_ID
SPACK_COMPILER_SPEC SPACK_COMPILER_SPEC
SPACK_CC_RPATH_ARG SPACK_CC_RPATH_ARG
SPACK_CXX_RPATH_ARG SPACK_CXX_RPATH_ARG
@@ -59,7 +58,7 @@ parameters=(
# The default compiler flags are passed from these variables: # The default compiler flags are passed from these variables:
# SPACK_CFLAGS, SPACK_CXXFLAGS, SPACK_FCFLAGS, SPACK_FFLAGS, # SPACK_CFLAGS, SPACK_CXXFLAGS, SPACK_FCFLAGS, SPACK_FFLAGS,
# SPACK_LDFLAGS, SPACK_LDLIBS # SPACK_LDFLAGS, SPACK_LDLIBS
# Debug env var is optional; set to "TRUE" for debug logging: # Debug env var is optional; set to true for debug logging:
# SPACK_DEBUG # SPACK_DEBUG
# Test command is used to unit test the compiler script. # Test command is used to unit test the compiler script.
# SPACK_TEST_COMMAND # SPACK_TEST_COMMAND
@@ -99,25 +98,25 @@ case "$command" in
cpp) cpp)
mode=cpp mode=cpp
;; ;;
cc|c89|c99|gcc|clang|icc|pgcc|xlc|xlc_r) cc|c89|c99|gcc|clang|icc|pgcc|xlc)
command="$SPACK_CC" command="$SPACK_CC"
language="C" language="C"
comp="CC" comp="CC"
lang_flags=C lang_flags=C
;; ;;
c++|CC|g++|clang++|icpc|pgc++|xlc++|xlc++_r) c++|CC|g++|clang++|icpc|pgc++|xlc++)
command="$SPACK_CXX" command="$SPACK_CXX"
language="C++" language="C++"
comp="CXX" comp="CXX"
lang_flags=CXX lang_flags=CXX
;; ;;
ftn|f90|fc|f95|gfortran|flang|ifort|pgfortran|xlf90|xlf90_r|nagfor) ftn|f90|fc|f95|gfortran|ifort|pgfortran|xlf90|nagfor)
command="$SPACK_FC" command="$SPACK_FC"
language="Fortran 90" language="Fortran 90"
comp="FC" comp="FC"
lang_flags=F lang_flags=F
;; ;;
f77|gfortran|flang|ifort|pgfortran|xlf|xlf_r|nagfor|ftn) f77|gfortran|ifort|pgfortran|xlf|nagfor|ftn)
command="$SPACK_F77" command="$SPACK_F77"
language="Fortran 77" language="Fortran 77"
comp="F77" comp="F77"
@@ -134,7 +133,7 @@ esac
# If any of the arguments below are present, then the mode is vcheck. # If any of the arguments below are present, then the mode is vcheck.
# In vcheck mode, nothing is added in terms of extra search paths or # In vcheck mode, nothing is added in terms of extra search paths or
# libraries. # libraries.
if [[ -z $mode ]] || [[ $mode == ld ]]; then if [[ -z $mode ]]; then
for arg in "$@"; do for arg in "$@"; do
if [[ $arg == -v || $arg == -V || $arg == --version || $arg == -dumpversion ]]; then if [[ $arg == -v || $arg == -V || $arg == --version || $arg == -dumpversion ]]; then
mode=vcheck mode=vcheck
@@ -219,7 +218,7 @@ fi
add_rpaths=true add_rpaths=true
if [[ ($mode == ld || $mode == ccld) && "$SPACK_SHORT_SPEC" =~ "darwin" ]]; then if [[ ($mode == ld || $mode == ccld) && "$SPACK_SHORT_SPEC" =~ "darwin" ]]; then
for arg in "$@"; do for arg in "$@"; do
if [[ ($arg == -r && $mode == ld) || ($arg == -r && $mode == ccld) || ($arg == -Wl,-r && $mode == ccld) ]]; then if [[ ($arg == -r && $mode == ld) || ($arg == -Wl,-r && $mode == ccld) ]]; then
add_rpaths=false add_rpaths=false
break break
fi fi
@@ -329,10 +328,8 @@ IFS=':' read -ra extra_rpaths <<< "$SPACK_COMPILER_EXTRA_RPATHS"
for extra_rpath in "${extra_rpaths[@]}"; do for extra_rpath in "${extra_rpaths[@]}"; do
if [[ $mode == ccld ]]; then if [[ $mode == ccld ]]; then
$add_rpaths && args=("$rpath$extra_rpath" "${args[@]}") $add_rpaths && args=("$rpath$extra_rpath" "${args[@]}")
args=("-L$extra_rpath" "${args[@]}")
elif [[ $mode == ld ]]; then elif [[ $mode == ld ]]; then
$add_rpaths && args=("-rpath" "$extra_rpath" "${args[@]}") $add_rpaths && args=("-rpath" "$extra_rpath" "${args[@]}")
args=("-L$extra_rpath" "${args[@]}")
fi fi
done done
@@ -356,8 +353,8 @@ fi
# Write the input and output commands to debug logs if it's asked for. # Write the input and output commands to debug logs if it's asked for.
# #
if [[ $SPACK_DEBUG == TRUE ]]; then if [[ $SPACK_DEBUG == TRUE ]]; then
input_log="$SPACK_DEBUG_LOG_DIR/spack-cc-$SPACK_DEBUG_LOG_ID.in.log" input_log="$SPACK_DEBUG_LOG_DIR/spack-cc-$SPACK_SHORT_SPEC.in.log"
output_log="$SPACK_DEBUG_LOG_DIR/spack-cc-$SPACK_DEBUG_LOG_ID.out.log" output_log="$SPACK_DEBUG_LOG_DIR/spack-cc-$SPACK_SHORT_SPEC.out.log"
echo "[$mode] $command $input_command" >> "$input_log" echo "[$mode] $command $input_command" >> "$input_log"
echo "[$mode] ${full_command[@]}" >> "$output_log" echo "[$mode] ${full_command[@]}" >> "$output_log"
fi fi

View File

@@ -1 +0,0 @@
../cc

View File

@@ -1 +0,0 @@
../cc

View File

@@ -1 +0,0 @@
../cc

View File

@@ -1 +0,0 @@
../cc

View File

@@ -1 +0,0 @@
../cc

View File

@@ -1,13 +1,13 @@
############################################################################## ##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC. # Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory. # Produced at the Lawrence Livermore National Laboratory.
# #
# This file is part of Spack. # This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. # Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188 # LLNL-CODE-647188
# #
# For details, see https://github.com/spack/spack # For details, see https://github.com/llnl/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL. # Please also see the LICENSE file for our notice and the LGPL.
# #
# This program is free software; you can redistribute it and/or modify # This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as # it under the terms of the GNU Lesser General Public License (as
@@ -22,113 +22,28 @@
# License along with this program; if not, write to the Free Software # License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
############################################################################## ##############################################################################
"""This module contains the following external, potentially separately """
licensed, packages that are included in Spack: This module contains external, potentially separately licensed,
packages that are included in spack.
argparse
-------- So far:
argparse: We include our own version to be Python 2.6 compatible.
* Homepage: https://pypi.python.org/pypi/argparse
* Usage: We include our own version to be Python 2.6 compatible. distro: Provides a more stable linux distribution detection.
* Version: 1.4.0
* Note: This package has been slightly modified to improve functools: Used for implementation of total_ordering.
error message formatting. See the following commit if the
vendored copy ever needs to be updated again: jsonschema: An implementation of JSON Schema for Python.
https://github.com/spack/spack/pull/6786/commits/dfcef577b77249106ea4e4c69a6cd9e64fa6c418
ordereddict: We include our own version to be Python 2.6 compatible.
ctest_log_parser
---------------- py: Needed by pytest. Library with cross-python path,
ini-parsing, io, code, and log facilities.
* Homepage: https://github.com/Kitware/CMake/blob/master/Source/CTest/cmCTestBuildHandler.cxx
* Usage: Functions to parse build logs and extract error messages. pyqver2: External script to query required python version of
* Version: Unversioned python source code. Used for ensuring 2.6 compatibility.
* Note: This is a homemade port of Kitware's CTest build handler.
pytest: Testing framework used by Spack.
distro
------ yaml: Used for config files.
* Homepage: https://pypi.python.org/pypi/distro
* Usage: Provides a more stable linux distribution detection.
* Version: 1.0.4 (last version supporting Python 2.6)
functools
---------
* Homepage: https://github.com/python/cpython/blob/2.7/Lib/functools.py
* Usage: Used for implementation of total_ordering.
* Version: Unversioned
* Note: This is the functools.total_ordering implementation
from Python 2.7 backported so we can run on Python 2.6.
jinja2
------
* Homepage: https://pypi.python.org/pypi/Jinja2
* Usage: A modern and designer-friendly templating language for Python.
* Version: 2.10
jsonschema
----------
* Homepage: https://pypi.python.org/pypi/jsonschema
* Usage: An implementation of JSON Schema for Python.
* Version: 2.4.0 (last version before functools32 dependency was added)
* Note: functools32 doesn't support Python 2.6 or 3.0, so jsonschema
cannot be upgraded any further
markupsafe
----------
* Homepage: https://pypi.python.org/pypi/MarkupSafe
* Usage: Implements a XML/HTML/XHTML Markup safe string for Python.
* Version: 1.0
orderddict
----------
* Homepage: https://pypi.org/project/ordereddict/
* Usage: A drop-in substitute for Py2.7's new collections.OrderedDict
that works in Python 2.4-2.6.
* Version: 1.1
py
--
* Homepage: https://pypi.python.org/pypi/py
* Usage: Needed by pytest. Library with cross-python path,
ini-parsing, io, code, and log facilities.
* Version: 1.4.34 (last version supporting Python 2.6)
pyqver
------
* Homepage: https://github.com/ghewgill/pyqver
* Usage: External script to query required python version of
python source code. Used for ensuring 2.6 compatibility.
* Version: Unversioned
pytest
------
* Homepage: https://pypi.python.org/pypi/pytest
* Usage: Testing framework used by Spack.
* Version: 3.2.5 (last version supporting Python 2.6)
* Note: This package has been slightly modified to improve
Python 2.6 compatibility. See the following commit if the
vendored copy ever needs to be updated again:
https://github.com/spack/spack/pull/6801/commits/ff513c39f2c67ff615de5cbc581dd69a8ec96526
pyyaml
------
* Homepage: https://pypi.python.org/pypi/PyYAML
* Usage: Used for config files.
* Version: 3.12
six
---
* Homepage: https://pypi.python.org/pypi/six
* Usage: Python 2 and 3 compatibility utilities.
* Version: 1.11.0
""" """

141
lib/spack/external/_pytest/AUTHORS vendored Normal file
View File

@@ -0,0 +1,141 @@
Holger Krekel, holger at merlinux eu
merlinux GmbH, Germany, office at merlinux eu
Contributors include::
Abdeali JK
Abhijeet Kasurde
Ahn Ki-Wook
Alexei Kozlenok
Anatoly Bubenkoff
Andreas Zeidler
Andrzej Ostrowski
Andy Freeland
Anthon van der Neut
Antony Lee
Armin Rigo
Aron Curzon
Aviv Palivoda
Ben Webb
Benjamin Peterson
Bernard Pratz
Bob Ippolito
Brian Dorsey
Brian Okken
Brianna Laugher
Bruno Oliveira
Cal Leeming
Carl Friedrich Bolz
Charles Cloud
Charnjit SiNGH (CCSJ)
Chris Lamb
Christian Boelsen
Christian Theunert
Christian Tismer
Christopher Gilling
Daniel Grana
Daniel Hahler
Daniel Nuri
Daniel Wandschneider
Danielle Jenkins
Dave Hunt
David Díaz-Barquero
David Mohr
David Vierra
Diego Russo
Dmitry Dygalo
Duncan Betts
Edison Gustavo Muenz
Edoardo Batini
Eduardo Schettino
Elizaveta Shashkova
Endre Galaczi
Eric Hunsberger
Eric Siegerman
Erik M. Bray
Feng Ma
Florian Bruhin
Floris Bruynooghe
Gabriel Reis
Georgy Dyuldin
Graham Horler
Greg Price
Grig Gheorghiu
Grigorii Eremeev (budulianin)
Guido Wesdorp
Harald Armin Massa
Ian Bicking
Jaap Broekhuizen
Jan Balster
Janne Vanhala
Jason R. Coombs
Javier Domingo Cansino
Javier Romero
John Towler
Jon Sonesen
Jordan Guymon
Joshua Bronson
Jurko Gospodnetić
Justyna Janczyszyn
Kale Kundert
Katarzyna Jachim
Kevin Cox
Lee Kamentsky
Lev Maximov
Lukas Bednar
Luke Murphy
Maciek Fijalkowski
Maho
Marc Schlaich
Marcin Bachry
Mark Abramowitz
Markus Unterwaditzer
Martijn Faassen
Martin K. Scherer
Martin Prusse
Mathieu Clabaut
Matt Bachmann
Matt Williams
Matthias Hafner
mbyt
Michael Aquilina
Michael Birtwell
Michael Droettboom
Michael Seifert
Mike Lundy
Ned Batchelder
Neven Mundar
Nicolas Delaby
Oleg Pidsadnyi
Oliver Bestwalter
Omar Kohl
Pieter Mulder
Piotr Banaszkiewicz
Punyashloka Biswal
Quentin Pradet
Ralf Schmitt
Raphael Pierzina
Raquel Alegre
Roberto Polli
Romain Dorgueil
Roman Bolshakov
Ronny Pfannschmidt
Ross Lawley
Russel Winder
Ryan Wooden
Samuele Pedroni
Simon Gomizelj
Stefan Farmbauer
Stefan Zimmermann
Stefano Taschini
Steffen Allner
Stephan Obermann
Tareq Alayan
Ted Xiao
Thomas Grainger
Tom Viner
Trevor Bekolay
Tyler Goodlet
Vasily Kuznetsov
Wouter van Ackooy
Xuecong Liao

21
lib/spack/external/_pytest/LICENSE vendored Normal file
View File

@@ -0,0 +1,21 @@
The MIT License (MIT)
Copyright (c) 2004-2016 Holger Krekel and others
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
of the Software, and to permit persons to whom the Software is furnished to do
so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

102
lib/spack/external/_pytest/README.rst vendored Normal file
View File

@@ -0,0 +1,102 @@
.. image:: http://docs.pytest.org/en/latest/_static/pytest1.png
:target: http://docs.pytest.org
:align: center
:alt: pytest
------
.. image:: https://img.shields.io/pypi/v/pytest.svg
:target: https://pypi.python.org/pypi/pytest
.. image:: https://img.shields.io/pypi/pyversions/pytest.svg
:target: https://pypi.python.org/pypi/pytest
.. image:: https://img.shields.io/coveralls/pytest-dev/pytest/master.svg
:target: https://coveralls.io/r/pytest-dev/pytest
.. image:: https://travis-ci.org/pytest-dev/pytest.svg?branch=master
:target: https://travis-ci.org/pytest-dev/pytest
.. image:: https://ci.appveyor.com/api/projects/status/mrgbjaua7t33pg6b?svg=true
:target: https://ci.appveyor.com/project/pytestbot/pytest
The ``pytest`` framework makes it easy to write small tests, yet
scales to support complex functional testing for applications and libraries.
An example of a simple test:
.. code-block:: python
# content of test_sample.py
def inc(x):
return x + 1
def test_answer():
assert inc(3) == 5
To execute it::
$ pytest
============================= test session starts =============================
collected 1 items
test_sample.py F
================================== FAILURES ===================================
_________________________________ test_answer _________________________________
def test_answer():
> assert inc(3) == 5
E assert 4 == 5
E + where 4 = inc(3)
test_sample.py:5: AssertionError
========================== 1 failed in 0.04 seconds ===========================
Due to ``pytest``'s detailed assertion introspection, only plain ``assert`` statements are used. See `getting-started <http://docs.pytest.org/en/latest/getting-started.html#our-first-test-run>`_ for more examples.
Features
--------
- Detailed info on failing `assert statements <http://docs.pytest.org/en/latest/assert.html>`_ (no need to remember ``self.assert*`` names);
- `Auto-discovery
<http://docs.pytest.org/en/latest/goodpractices.html#python-test-discovery>`_
of test modules and functions;
- `Modular fixtures <http://docs.pytest.org/en/latest/fixture.html>`_ for
managing small or parametrized long-lived test resources;
- Can run `unittest <http://docs.pytest.org/en/latest/unittest.html>`_ (or trial),
`nose <http://docs.pytest.org/en/latest/nose.html>`_ test suites out of the box;
- Python2.6+, Python3.3+, PyPy-2.3, Jython-2.5 (untested);
- Rich plugin architecture, with over 150+ `external plugins <http://docs.pytest.org/en/latest/plugins.html#installing-external-plugins-searching>`_ and thriving community;
Documentation
-------------
For full documentation, including installation, tutorials and PDF documents, please see http://docs.pytest.org.
Bugs/Requests
-------------
Please use the `GitHub issue tracker <https://github.com/pytest-dev/pytest/issues>`_ to submit bugs or request features.
Changelog
---------
Consult the `Changelog <http://docs.pytest.org/en/latest/changelog.html>`__ page for fixes and enhancements of each version.
License
-------
Copyright Holger Krekel and others, 2004-2016.
Distributed under the terms of the `MIT`_ license, pytest is free and open source software.
.. _`MIT`: https://github.com/pytest-dev/pytest/blob/master/LICENSE

View File

@@ -1,8 +1,2 @@
__all__ = ['__version__'] #
__version__ = '3.0.5'
try:
from ._version import version as __version__
except ImportError:
# broken installation, we don't even try
# unknown only works because we do poor mans version compare
__version__ = 'unknown'

View File

@@ -57,29 +57,26 @@
which should throw a KeyError: 'COMPLINE' (which is properly set by the which should throw a KeyError: 'COMPLINE' (which is properly set by the
global argcomplete script). global argcomplete script).
""" """
from __future__ import absolute_import, division, print_function
import sys import sys
import os import os
from glob import glob from glob import glob
class FastFilesCompleter: class FastFilesCompleter:
'Fast file completer class' 'Fast file completer class'
def __init__(self, directories=True): def __init__(self, directories=True):
self.directories = directories self.directories = directories
def __call__(self, prefix, **kwargs): def __call__(self, prefix, **kwargs):
"""only called on non option completions""" """only called on non option completions"""
if os.path.sep in prefix[1:]: if os.path.sep in prefix[1:]: #
prefix_dir = len(os.path.dirname(prefix) + os.path.sep) prefix_dir = len(os.path.dirname(prefix) + os.path.sep)
else: else:
prefix_dir = 0 prefix_dir = 0
completion = [] completion = []
globbed = [] globbed = []
if '*' not in prefix and '?' not in prefix: if '*' not in prefix and '?' not in prefix:
# we are on unix, otherwise no bash if prefix[-1] == os.path.sep: # we are on unix, otherwise no bash
if not prefix or prefix[-1] == os.path.sep:
globbed.extend(glob(prefix + '.*')) globbed.extend(glob(prefix + '.*'))
prefix += '*' prefix += '*'
globbed.extend(glob(prefix)) globbed.extend(glob(prefix))
@@ -99,8 +96,7 @@ def __call__(self, prefix, **kwargs):
filescompleter = FastFilesCompleter() filescompleter = FastFilesCompleter()
def try_argcomplete(parser): def try_argcomplete(parser):
argcomplete.autocomplete(parser, always_complete_options=False) argcomplete.autocomplete(parser)
else: else:
def try_argcomplete(parser): def try_argcomplete(parser): pass
pass
filescompleter = None filescompleter = None

View File

@@ -1,5 +1,4 @@
""" python inspection/code generation API """ """ python inspection/code generation API """
from __future__ import absolute_import, division, print_function
from .code import Code # noqa from .code import Code # noqa
from .code import ExceptionInfo # noqa from .code import ExceptionInfo # noqa
from .code import Frame # noqa from .code import Frame # noqa

View File

@@ -2,10 +2,8 @@
# CHANGES: # CHANGES:
# - some_str is replaced, trying to create unicode strings # - some_str is replaced, trying to create unicode strings
# #
from __future__ import absolute_import, division, print_function
import types import types
def format_exception_only(etype, value): def format_exception_only(etype, value):
"""Format the exception part of a traceback. """Format the exception part of a traceback.
@@ -31,7 +29,7 @@ def format_exception_only(etype, value):
# would throw another exception and mask the original problem. # would throw another exception and mask the original problem.
if (isinstance(etype, BaseException) or if (isinstance(etype, BaseException) or
isinstance(etype, types.InstanceType) or isinstance(etype, types.InstanceType) or
etype is None or type(etype) is str): etype is None or type(etype) is str):
return [_format_final_exc_line(etype, value)] return [_format_final_exc_line(etype, value)]
stype = etype.__name__ stype = etype.__name__
@@ -63,7 +61,6 @@ def format_exception_only(etype, value):
lines.append(_format_final_exc_line(stype, value)) lines.append(_format_final_exc_line(stype, value))
return lines return lines
def _format_final_exc_line(etype, value): def _format_final_exc_line(etype, value):
"""Return a list of a single line -- normal case for format_exception_only""" """Return a list of a single line -- normal case for format_exception_only"""
valuestr = _some_str(value) valuestr = _some_str(value)
@@ -73,7 +70,6 @@ def _format_final_exc_line(etype, value):
line = "%s: %s\n" % (etype, valuestr) line = "%s: %s\n" % (etype, valuestr)
return line return line
def _some_str(value): def _some_str(value):
try: try:
return unicode(value) return unicode(value)

View File

@@ -1,16 +1,14 @@
from __future__ import absolute_import, division, print_function
import sys import sys
from inspect import CO_VARARGS, CO_VARKEYWORDS from inspect import CO_VARARGS, CO_VARKEYWORDS
import re import re
from weakref import ref from weakref import ref
from _pytest.compat import _PY2, _PY3, PY35, safe_str
import py import py
builtin_repr = repr builtin_repr = repr
reprlib = py.builtin._tryimport('repr', 'reprlib') reprlib = py.builtin._tryimport('repr', 'reprlib')
if _PY3: if sys.version_info[0] >= 3:
from traceback import format_exception_only from traceback import format_exception_only
else: else:
from ._py2traceback import format_exception_only from ._py2traceback import format_exception_only
@@ -18,7 +16,6 @@
class Code(object): class Code(object):
""" wrapper around Python code objects """ """ wrapper around Python code objects """
def __init__(self, rawcode): def __init__(self, rawcode):
if not hasattr(rawcode, "co_filename"): if not hasattr(rawcode, "co_filename"):
rawcode = getrawcode(rawcode) rawcode = getrawcode(rawcode)
@@ -27,7 +24,7 @@ def __init__(self, rawcode):
self.firstlineno = rawcode.co_firstlineno - 1 self.firstlineno = rawcode.co_firstlineno - 1
self.name = rawcode.co_name self.name = rawcode.co_name
except AttributeError: except AttributeError:
raise TypeError("not a code object: %r" % (rawcode,)) raise TypeError("not a code object: %r" %(rawcode,))
self.raw = rawcode self.raw = rawcode
def __eq__(self, other): def __eq__(self, other):
@@ -83,7 +80,6 @@ def getargs(self, var=False):
argcount += raw.co_flags & CO_VARKEYWORDS argcount += raw.co_flags & CO_VARKEYWORDS
return raw.co_varnames[:argcount] return raw.co_varnames[:argcount]
class Frame(object): class Frame(object):
"""Wrapper around a Python frame holding f_locals and f_globals """Wrapper around a Python frame holding f_locals and f_globals
in which expressions can be evaluated.""" in which expressions can be evaluated."""
@@ -121,7 +117,7 @@ def exec_(self, code, **vars):
""" """
f_locals = self.f_locals.copy() f_locals = self.f_locals.copy()
f_locals.update(vars) f_locals.update(vars)
py.builtin.exec_(code, self.f_globals, f_locals) py.builtin.exec_(code, self.f_globals, f_locals )
def repr(self, object): def repr(self, object):
""" return a 'safe' (non-recursive, one-line) string repr for 'object' """ return a 'safe' (non-recursive, one-line) string repr for 'object'
@@ -145,7 +141,6 @@ def getargs(self, var=False):
pass # this can occur when using Psyco pass # this can occur when using Psyco
return retval return retval
class TracebackEntry(object): class TracebackEntry(object):
""" a single entry in a traceback """ """ a single entry in a traceback """
@@ -171,7 +166,7 @@ def relline(self):
return self.lineno - self.frame.code.firstlineno return self.lineno - self.frame.code.firstlineno
def __repr__(self): def __repr__(self):
return "<TracebackEntry %s:%d>" % (self.frame.code.path, self.lineno + 1) return "<TracebackEntry %s:%d>" %(self.frame.code.path, self.lineno+1)
@property @property
def statement(self): def statement(self):
@@ -250,21 +245,19 @@ def __str__(self):
line = str(self.statement).lstrip() line = str(self.statement).lstrip()
except KeyboardInterrupt: except KeyboardInterrupt:
raise raise
except: # noqa except:
line = "???" line = "???"
return " File %r:%d in %s\n %s\n" % (fn, self.lineno + 1, name, line) return " File %r:%d in %s\n %s\n" %(fn, self.lineno+1, name, line)
def name(self): def name(self):
return self.frame.code.raw.co_name return self.frame.code.raw.co_name
name = property(name, None, None, "co_name of underlaying code") name = property(name, None, None, "co_name of underlaying code")
class Traceback(list): class Traceback(list):
""" Traceback objects encapsulate and offer higher level """ Traceback objects encapsulate and offer higher level
access to Traceback entries. access to Traceback entries.
""" """
Entry = TracebackEntry Entry = TracebackEntry
def __init__(self, tb, excinfo=None): def __init__(self, tb, excinfo=None):
""" initialize from given python traceback object and ExceptionInfo """ """ initialize from given python traceback object and ExceptionInfo """
self._excinfo = excinfo self._excinfo = excinfo
@@ -294,7 +287,7 @@ def cut(self, path=None, lineno=None, firstlineno=None, excludepath=None):
(excludepath is None or not hasattr(codepath, 'relto') or (excludepath is None or not hasattr(codepath, 'relto') or
not codepath.relto(excludepath)) and not codepath.relto(excludepath)) and
(lineno is None or x.lineno == lineno) and (lineno is None or x.lineno == lineno) and
(firstlineno is None or x.frame.code.firstlineno == firstlineno)): (firstlineno is None or x.frame.code.firstlineno == firstlineno)):
return Traceback(x._rawentry, self._excinfo) return Traceback(x._rawentry, self._excinfo)
return self return self
@@ -320,7 +313,7 @@ def getcrashentry(self):
""" return last non-hidden traceback entry that lead """ return last non-hidden traceback entry that lead
to the exception of a traceback. to the exception of a traceback.
""" """
for i in range(-1, -len(self) - 1, -1): for i in range(-1, -len(self)-1, -1):
entry = self[i] entry = self[i]
if not entry.ishidden(): if not entry.ishidden():
return entry return entry
@@ -335,33 +328,30 @@ def recursionindex(self):
# id for the code.raw is needed to work around # id for the code.raw is needed to work around
# the strange metaprogramming in the decorator lib from pypi # the strange metaprogramming in the decorator lib from pypi
# which generates code objects that have hash/value equality # which generates code objects that have hash/value equality
# XXX needs a test #XXX needs a test
key = entry.frame.code.path, id(entry.frame.code.raw), entry.lineno key = entry.frame.code.path, id(entry.frame.code.raw), entry.lineno
# print "checking for recursion at", key #print "checking for recursion at", key
values = cache.setdefault(key, []) l = cache.setdefault(key, [])
if values: if l:
f = entry.frame f = entry.frame
loc = f.f_locals loc = f.f_locals
for otherloc in values: for otherloc in l:
if f.is_true(f.eval(co_equal, if f.is_true(f.eval(co_equal,
__recursioncache_locals_1=loc, __recursioncache_locals_1=loc,
__recursioncache_locals_2=otherloc)): __recursioncache_locals_2=otherloc)):
return i return i
values.append(entry.frame.f_locals) l.append(entry.frame.f_locals)
return None return None
co_equal = compile('__recursioncache_locals_1 == __recursioncache_locals_2', co_equal = compile('__recursioncache_locals_1 == __recursioncache_locals_2',
'?', 'eval') '?', 'eval')
class ExceptionInfo(object): class ExceptionInfo(object):
""" wraps sys.exc_info() objects and offers """ wraps sys.exc_info() objects and offers
help for navigating the traceback. help for navigating the traceback.
""" """
_striptext = '' _striptext = ''
_assert_start_repr = "AssertionError(u\'assert " if _PY2 else "AssertionError(\'assert "
def __init__(self, tup=None, exprinfo=None): def __init__(self, tup=None, exprinfo=None):
import _pytest._code import _pytest._code
if tup is None: if tup is None:
@@ -369,8 +359,8 @@ def __init__(self, tup=None, exprinfo=None):
if exprinfo is None and isinstance(tup[1], AssertionError): if exprinfo is None and isinstance(tup[1], AssertionError):
exprinfo = getattr(tup[1], 'msg', None) exprinfo = getattr(tup[1], 'msg', None)
if exprinfo is None: if exprinfo is None:
exprinfo = py.io.saferepr(tup[1]) exprinfo = py._builtin._totext(tup[1])
if exprinfo and exprinfo.startswith(self._assert_start_repr): if exprinfo and exprinfo.startswith('assert '):
self._striptext = 'AssertionError: ' self._striptext = 'AssertionError: '
self._excinfo = tup self._excinfo = tup
#: the exception class #: the exception class
@@ -411,10 +401,10 @@ def _getreprcrash(self):
exconly = self.exconly(tryshort=True) exconly = self.exconly(tryshort=True)
entry = self.traceback.getcrashentry() entry = self.traceback.getcrashentry()
path, lineno = entry.frame.code.raw.co_filename, entry.lineno path, lineno = entry.frame.code.raw.co_filename, entry.lineno
return ReprFileLocation(path, lineno + 1, exconly) return ReprFileLocation(path, lineno+1, exconly)
def getrepr(self, showlocals=False, style="long", def getrepr(self, showlocals=False, style="long",
abspath=False, tbfilter=True, funcargs=False): abspath=False, tbfilter=True, funcargs=False):
""" return str()able representation of this exception info. """ return str()able representation of this exception info.
showlocals: show locals per traceback entry showlocals: show locals per traceback entry
style: long|short|no|native traceback style style: long|short|no|native traceback style
@@ -431,7 +421,7 @@ def getrepr(self, showlocals=False, style="long",
)), self._getreprcrash()) )), self._getreprcrash())
fmt = FormattedExcinfo(showlocals=showlocals, style=style, fmt = FormattedExcinfo(showlocals=showlocals, style=style,
abspath=abspath, tbfilter=tbfilter, funcargs=funcargs) abspath=abspath, tbfilter=tbfilter, funcargs=funcargs)
return fmt.repr_excinfo(self) return fmt.repr_excinfo(self)
def __str__(self): def __str__(self):
@@ -475,15 +465,15 @@ def __init__(self, showlocals=False, style="long", abspath=True, tbfilter=True,
def _getindent(self, source): def _getindent(self, source):
# figure out indent for given source # figure out indent for given source
try: try:
s = str(source.getstatement(len(source) - 1)) s = str(source.getstatement(len(source)-1))
except KeyboardInterrupt: except KeyboardInterrupt:
raise raise
except: # noqa except:
try: try:
s = str(source[-1]) s = str(source[-1])
except KeyboardInterrupt: except KeyboardInterrupt:
raise raise
except: # noqa except:
return 0 return 0
return 4 + (len(s) - len(s.lstrip())) return 4 + (len(s) - len(s.lstrip()))
@@ -519,7 +509,7 @@ def get_source(self, source, line_index=-1, excinfo=None, short=False):
for line in source.lines[:line_index]: for line in source.lines[:line_index]:
lines.append(space_prefix + line) lines.append(space_prefix + line)
lines.append(self.flow_marker + " " + source.lines[line_index]) lines.append(self.flow_marker + " " + source.lines[line_index])
for line in source.lines[line_index + 1:]: for line in source.lines[line_index+1:]:
lines.append(space_prefix + line) lines.append(space_prefix + line)
if excinfo is not None: if excinfo is not None:
indent = 4 if short else self._getindent(source) indent = 4 if short else self._getindent(source)
@@ -552,10 +542,10 @@ def repr_locals(self, locals):
# _repr() function, which is only reprlib.Repr in # _repr() function, which is only reprlib.Repr in
# disguise, so is very configurable. # disguise, so is very configurable.
str_repr = self._saferepr(value) str_repr = self._saferepr(value)
# if len(str_repr) < 70 or not isinstance(value, #if len(str_repr) < 70 or not isinstance(value,
# (list, tuple, dict)): # (list, tuple, dict)):
lines.append("%-10s = %s" % (name, str_repr)) lines.append("%-10s = %s" %(name, str_repr))
# else: #else:
# self._line("%-10s =\\" % (name,)) # self._line("%-10s =\\" % (name,))
# # XXX # # XXX
# py.std.pprint.pprint(value, stream=self.excinfowriter) # py.std.pprint.pprint(value, stream=self.excinfowriter)
@@ -581,14 +571,14 @@ def repr_traceback_entry(self, entry, excinfo=None):
s = self.get_source(source, line_index, excinfo, short=short) s = self.get_source(source, line_index, excinfo, short=short)
lines.extend(s) lines.extend(s)
if short: if short:
message = "in %s" % (entry.name) message = "in %s" %(entry.name)
else: else:
message = excinfo and excinfo.typename or "" message = excinfo and excinfo.typename or ""
path = self._makepath(entry.path) path = self._makepath(entry.path)
filelocrepr = ReprFileLocation(path, entry.lineno + 1, message) filelocrepr = ReprFileLocation(path, entry.lineno+1, message)
localsrepr = None localsrepr = None
if not short: if not short:
localsrepr = self.repr_locals(entry.locals) localsrepr = self.repr_locals(entry.locals)
return ReprEntry(lines, reprargs, localsrepr, filelocrepr, style) return ReprEntry(lines, reprargs, localsrepr, filelocrepr, style)
if excinfo: if excinfo:
lines.extend(self.get_exconly(excinfo, indent=4)) lines.extend(self.get_exconly(excinfo, indent=4))
@@ -608,54 +598,24 @@ def repr_traceback(self, excinfo):
traceback = excinfo.traceback traceback = excinfo.traceback
if self.tbfilter: if self.tbfilter:
traceback = traceback.filter() traceback = traceback.filter()
recursionindex = None
if is_recursion_error(excinfo): if is_recursion_error(excinfo):
traceback, extraline = self._truncate_recursive_traceback(traceback) recursionindex = traceback.recursionindex()
else:
extraline = None
last = traceback[-1] last = traceback[-1]
entries = [] entries = []
extraline = None
for index, entry in enumerate(traceback): for index, entry in enumerate(traceback):
einfo = (last == entry) and excinfo or None einfo = (last == entry) and excinfo or None
reprentry = self.repr_traceback_entry(entry, einfo) reprentry = self.repr_traceback_entry(entry, einfo)
entries.append(reprentry) entries.append(reprentry)
if index == recursionindex:
extraline = "!!! Recursion detected (same locals & position)"
break
return ReprTraceback(entries, extraline, style=self.style) return ReprTraceback(entries, extraline, style=self.style)
def _truncate_recursive_traceback(self, traceback):
"""
Truncate the given recursive traceback trying to find the starting point
of the recursion.
The detection is done by going through each traceback entry and finding the
point in which the locals of the frame are equal to the locals of a previous frame (see ``recursionindex()``.
Handle the situation where the recursion process might raise an exception (for example
comparing numpy arrays using equality raises a TypeError), in which case we do our best to
warn the user of the error and show a limited traceback.
"""
try:
recursionindex = traceback.recursionindex()
except Exception as e:
max_frames = 10
extraline = (
'!!! Recursion error detected, but an error occurred locating the origin of recursion.\n'
' The following exception happened when comparing locals in the stack frame:\n'
' {exc_type}: {exc_msg}\n'
' Displaying first and last {max_frames} stack frames out of {total}.'
).format(exc_type=type(e).__name__, exc_msg=safe_str(e), max_frames=max_frames, total=len(traceback))
traceback = traceback[:max_frames] + traceback[-max_frames:]
else:
if recursionindex is not None:
extraline = "!!! Recursion detected (same locals & position)"
traceback = traceback[:recursionindex + 1]
else:
extraline = None
return traceback, extraline
def repr_excinfo(self, excinfo): def repr_excinfo(self, excinfo):
if _PY2: if sys.version_info[0] < 3:
reprtraceback = self.repr_traceback(excinfo) reprtraceback = self.repr_traceback(excinfo)
reprcrash = excinfo._getreprcrash() reprcrash = excinfo._getreprcrash()
@@ -679,7 +639,7 @@ def repr_excinfo(self, excinfo):
e = e.__cause__ e = e.__cause__
excinfo = ExceptionInfo((type(e), e, e.__traceback__)) if e.__traceback__ else None excinfo = ExceptionInfo((type(e), e, e.__traceback__)) if e.__traceback__ else None
descr = 'The above exception was the direct cause of the following exception:' descr = 'The above exception was the direct cause of the following exception:'
elif (e.__context__ is not None and not e.__suppress_context__): elif e.__context__ is not None:
e = e.__context__ e = e.__context__
excinfo = ExceptionInfo((type(e), e, e.__traceback__)) if e.__traceback__ else None excinfo = ExceptionInfo((type(e), e, e.__traceback__)) if e.__traceback__ else None
descr = 'During handling of the above exception, another exception occurred:' descr = 'During handling of the above exception, another exception occurred:'
@@ -692,7 +652,7 @@ def repr_excinfo(self, excinfo):
class TerminalRepr(object): class TerminalRepr(object):
def __str__(self): def __str__(self):
s = self.__unicode__() s = self.__unicode__()
if _PY2: if sys.version_info[0] < 3:
s = s.encode('utf-8') s = s.encode('utf-8')
return s return s
@@ -705,7 +665,7 @@ def __unicode__(self):
return io.getvalue().strip() return io.getvalue().strip()
def __repr__(self): def __repr__(self):
return "<%s instance at %0x>" % (self.__class__, id(self)) return "<%s instance at %0x>" %(self.__class__, id(self))
class ExceptionRepr(TerminalRepr): class ExceptionRepr(TerminalRepr):
@@ -749,7 +709,6 @@ def toterminal(self, tw):
self.reprtraceback.toterminal(tw) self.reprtraceback.toterminal(tw)
super(ReprExceptionInfo, self).toterminal(tw) super(ReprExceptionInfo, self).toterminal(tw)
class ReprTraceback(TerminalRepr): class ReprTraceback(TerminalRepr):
entrysep = "_ " entrysep = "_ "
@@ -765,7 +724,7 @@ def toterminal(self, tw):
tw.line("") tw.line("")
entry.toterminal(tw) entry.toterminal(tw)
if i < len(self.reprentries) - 1: if i < len(self.reprentries) - 1:
next_entry = self.reprentries[i + 1] next_entry = self.reprentries[i+1]
if entry.style == "long" or \ if entry.style == "long" or \
entry.style == "short" and next_entry.style == "long": entry.style == "short" and next_entry.style == "long":
tw.sep(self.entrysep) tw.sep(self.entrysep)
@@ -773,14 +732,12 @@ def toterminal(self, tw):
if self.extraline: if self.extraline:
tw.line(self.extraline) tw.line(self.extraline)
class ReprTracebackNative(ReprTraceback): class ReprTracebackNative(ReprTraceback):
def __init__(self, tblines): def __init__(self, tblines):
self.style = "native" self.style = "native"
self.reprentries = [ReprEntryNative(tblines)] self.reprentries = [ReprEntryNative(tblines)]
self.extraline = None self.extraline = None
class ReprEntryNative(TerminalRepr): class ReprEntryNative(TerminalRepr):
style = "native" style = "native"
@@ -790,7 +747,6 @@ def __init__(self, tblines):
def toterminal(self, tw): def toterminal(self, tw):
tw.write("".join(self.lines)) tw.write("".join(self.lines))
class ReprEntry(TerminalRepr): class ReprEntry(TerminalRepr):
localssep = "_ " localssep = "_ "
@@ -807,7 +763,7 @@ def toterminal(self, tw):
for line in self.lines: for line in self.lines:
red = line.startswith("E ") red = line.startswith("E ")
tw.line(line, bold=True, red=red) tw.line(line, bold=True, red=red)
# tw.line("") #tw.line("")
return return
if self.reprfuncargs: if self.reprfuncargs:
self.reprfuncargs.toterminal(tw) self.reprfuncargs.toterminal(tw)
@@ -815,7 +771,7 @@ def toterminal(self, tw):
red = line.startswith("E ") red = line.startswith("E ")
tw.line(line, bold=True, red=red) tw.line(line, bold=True, red=red)
if self.reprlocals: if self.reprlocals:
# tw.sep(self.localssep, "Locals") #tw.sep(self.localssep, "Locals")
tw.line("") tw.line("")
self.reprlocals.toterminal(tw) self.reprlocals.toterminal(tw)
if self.reprfileloc: if self.reprfileloc:
@@ -828,7 +784,6 @@ def __str__(self):
self.reprlocals, self.reprlocals,
self.reprfileloc) self.reprfileloc)
class ReprFileLocation(TerminalRepr): class ReprFileLocation(TerminalRepr):
def __init__(self, path, lineno, message): def __init__(self, path, lineno, message):
self.path = str(path) self.path = str(path)
@@ -845,7 +800,6 @@ def toterminal(self, tw):
tw.write(self.path, bold=True, red=True) tw.write(self.path, bold=True, red=True)
tw.line(":%s: %s" % (self.lineno, msg)) tw.line(":%s: %s" % (self.lineno, msg))
class ReprLocals(TerminalRepr): class ReprLocals(TerminalRepr):
def __init__(self, lines): def __init__(self, lines):
self.lines = lines self.lines = lines
@@ -854,7 +808,6 @@ def toterminal(self, tw):
for line in self.lines: for line in self.lines:
tw.line(line) tw.line(line)
class ReprFuncArgs(TerminalRepr): class ReprFuncArgs(TerminalRepr):
def __init__(self, args): def __init__(self, args):
self.args = args self.args = args
@@ -863,11 +816,11 @@ def toterminal(self, tw):
if self.args: if self.args:
linesofar = "" linesofar = ""
for name, value in self.args: for name, value in self.args:
ns = "%s = %s" % (safe_str(name), safe_str(value)) ns = "%s = %s" %(name, value)
if len(ns) + len(linesofar) + 2 > tw.fullwidth: if len(ns) + len(linesofar) + 2 > tw.fullwidth:
if linesofar: if linesofar:
tw.line(linesofar) tw.line(linesofar)
linesofar = ns linesofar = ns
else: else:
if linesofar: if linesofar:
linesofar += ", " + ns linesofar += ", " + ns
@@ -895,7 +848,7 @@ def getrawcode(obj, trycall=True):
return obj return obj
if PY35: # RecursionError introduced in 3.5 if sys.version_info[:2] >= (3, 5): # RecursionError introduced in 3.5
def is_recursion_error(excinfo): def is_recursion_error(excinfo):
return excinfo.errisinstance(RecursionError) # noqa return excinfo.errisinstance(RecursionError) # noqa
else: else:

View File

@@ -1,9 +1,8 @@
from __future__ import absolute_import, division, generators, print_function from __future__ import generators
from bisect import bisect_right from bisect import bisect_right
import sys import sys
import inspect import inspect, tokenize
import tokenize
import py import py
cpy_compile = compile cpy_compile = compile
@@ -20,7 +19,6 @@ class Source(object):
possibly deindenting it. possibly deindenting it.
""" """
_compilecounter = 0 _compilecounter = 0
def __init__(self, *parts, **kwargs): def __init__(self, *parts, **kwargs):
self.lines = lines = [] self.lines = lines = []
de = kwargs.get('deindent', True) de = kwargs.get('deindent', True)
@@ -75,7 +73,7 @@ def strip(self):
start, end = 0, len(self) start, end = 0, len(self)
while start < end and not self.lines[start].strip(): while start < end and not self.lines[start].strip():
start += 1 start += 1
while end > start and not self.lines[end - 1].strip(): while end > start and not self.lines[end-1].strip():
end -= 1 end -= 1
source = Source() source = Source()
source.lines[:] = self.lines[start:end] source.lines[:] = self.lines[start:end]
@@ -88,8 +86,8 @@ def putaround(self, before='', after='', indent=' ' * 4):
before = Source(before) before = Source(before)
after = Source(after) after = Source(after)
newsource = Source() newsource = Source()
lines = [(indent + line) for line in self.lines] lines = [ (indent + line) for line in self.lines]
newsource.lines = before.lines + lines + after.lines newsource.lines = before.lines + lines + after.lines
return newsource return newsource
def indent(self, indent=' ' * 4): def indent(self, indent=' ' * 4):
@@ -97,7 +95,7 @@ def indent(self, indent=' ' * 4):
all lines indented by the given indent-string. all lines indented by the given indent-string.
""" """
newsource = Source() newsource = Source()
newsource.lines = [(indent + line) for line in self.lines] newsource.lines = [(indent+line) for line in self.lines]
return newsource return newsource
def getstatement(self, lineno, assertion=False): def getstatement(self, lineno, assertion=False):
@@ -136,8 +134,7 @@ def isparseable(self, deindent=True):
try: try:
import parser import parser
except ImportError: except ImportError:
def syntax_checker(x): syntax_checker = lambda x: compile(x, 'asd', 'exec')
return compile(x, 'asd', 'exec')
else: else:
syntax_checker = parser.suite syntax_checker = parser.suite
@@ -146,8 +143,8 @@ def syntax_checker(x):
else: else:
source = str(self) source = str(self)
try: try:
# compile(source+'\n', "x", "exec") #compile(source+'\n', "x", "exec")
syntax_checker(source + '\n') syntax_checker(source+'\n')
except KeyboardInterrupt: except KeyboardInterrupt:
raise raise
except Exception: except Exception:
@@ -167,8 +164,8 @@ def compile(self, filename=None, mode='exec',
""" """
if not filename or py.path.local(filename).check(file=0): if not filename or py.path.local(filename).check(file=0):
if _genframe is None: if _genframe is None:
_genframe = sys._getframe(1) # the caller _genframe = sys._getframe(1) # the caller
fn, lineno = _genframe.f_code.co_filename, _genframe.f_lineno fn,lineno = _genframe.f_code.co_filename, _genframe.f_lineno
base = "<%d-codegen " % self._compilecounter base = "<%d-codegen " % self._compilecounter
self.__class__._compilecounter += 1 self.__class__._compilecounter += 1
if not filename: if not filename:
@@ -183,7 +180,7 @@ def compile(self, filename=None, mode='exec',
# re-represent syntax errors from parsing python strings # re-represent syntax errors from parsing python strings
msglines = self.lines[:ex.lineno] msglines = self.lines[:ex.lineno]
if ex.offset: if ex.offset:
msglines.append(" " * ex.offset + '^') msglines.append(" "*ex.offset + '^')
msglines.append("(code was compiled probably from here: %s)" % filename) msglines.append("(code was compiled probably from here: %s)" % filename)
newex = SyntaxError('\n'.join(msglines)) newex = SyntaxError('\n'.join(msglines))
newex.offset = ex.offset newex.offset = ex.offset
@@ -201,8 +198,8 @@ def compile(self, filename=None, mode='exec',
# public API shortcut functions # public API shortcut functions
# #
def compile_(source, filename=None, mode='exec', flags=
def compile_(source, filename=None, mode='exec', flags=generators.compiler_flag, dont_inherit=0): generators.compiler_flag, dont_inherit=0):
""" compile the given source to a raw code object, """ compile the given source to a raw code object,
and maintain an internal cache which allows later and maintain an internal cache which allows later
retrieval of the source code for the code object retrieval of the source code for the code object
@@ -211,7 +208,7 @@ def compile_(source, filename=None, mode='exec', flags=generators.compiler_flag,
if _ast is not None and isinstance(source, _ast.AST): if _ast is not None and isinstance(source, _ast.AST):
# XXX should Source support having AST? # XXX should Source support having AST?
return cpy_compile(source, filename, mode, flags, dont_inherit) return cpy_compile(source, filename, mode, flags, dont_inherit)
_genframe = sys._getframe(1) # the caller _genframe = sys._getframe(1) # the caller
s = Source(source) s = Source(source)
co = s.compile(filename, mode, flags, _genframe=_genframe) co = s.compile(filename, mode, flags, _genframe=_genframe)
return co return co
@@ -248,13 +245,12 @@ def getfslineno(obj):
# helper functions # helper functions
# #
def findsource(obj): def findsource(obj):
try: try:
sourcelines, lineno = py.std.inspect.findsource(obj) sourcelines, lineno = py.std.inspect.findsource(obj)
except py.builtin._sysex: except py.builtin._sysex:
raise raise
except: # noqa except:
return None, -1 return None, -1
source = Source() source = Source()
source.lines = [line.rstrip() for line in sourcelines] source.lines = [line.rstrip() for line in sourcelines]
@@ -278,7 +274,7 @@ def deindent(lines, offset=None):
line = line.expandtabs() line = line.expandtabs()
s = line.lstrip() s = line.lstrip()
if s: if s:
offset = len(line) - len(s) offset = len(line)-len(s)
break break
else: else:
offset = 0 offset = 0
@@ -297,11 +293,11 @@ def readline_generator(lines):
try: try:
for _, _, (sline, _), (eline, _), _ in tokenize.generate_tokens(lambda: next(it)): for _, _, (sline, _), (eline, _), _ in tokenize.generate_tokens(lambda: next(it)):
if sline > len(lines): if sline > len(lines):
break # End of input reached break # End of input reached
if sline > len(newlines): if sline > len(newlines):
line = lines[sline - 1].expandtabs() line = lines[sline - 1].expandtabs()
if line.lstrip() and line[:offset].isspace(): if line.lstrip() and line[:offset].isspace():
line = line[offset:] # Deindent line = line[offset:] # Deindent
newlines.append(line) newlines.append(line)
for i in range(sline, eline): for i in range(sline, eline):
@@ -319,29 +315,29 @@ def get_statement_startend2(lineno, node):
import ast import ast
# flatten all statements and except handlers into one lineno-list # flatten all statements and except handlers into one lineno-list
# AST's line numbers start indexing at 1 # AST's line numbers start indexing at 1
values = [] l = []
for x in ast.walk(node): for x in ast.walk(node):
if isinstance(x, _ast.stmt) or isinstance(x, _ast.ExceptHandler): if isinstance(x, _ast.stmt) or isinstance(x, _ast.ExceptHandler):
values.append(x.lineno - 1) l.append(x.lineno - 1)
for name in "finalbody", "orelse": for name in "finalbody", "orelse":
val = getattr(x, name, None) val = getattr(x, name, None)
if val: if val:
# treat the finally/orelse part as its own statement # treat the finally/orelse part as its own statement
values.append(val[0].lineno - 1 - 1) l.append(val[0].lineno - 1 - 1)
values.sort() l.sort()
insert_index = bisect_right(values, lineno) insert_index = bisect_right(l, lineno)
start = values[insert_index - 1] start = l[insert_index - 1]
if insert_index >= len(values): if insert_index >= len(l):
end = None end = None
else: else:
end = values[insert_index] end = l[insert_index]
return start, end return start, end
def getstatementrange_ast(lineno, source, assertion=False, astnode=None): def getstatementrange_ast(lineno, source, assertion=False, astnode=None):
if astnode is None: if astnode is None:
content = str(source) content = str(source)
if sys.version_info < (2, 7): if sys.version_info < (2,7):
content += "\n" content += "\n"
try: try:
astnode = compile(content, "source", "exec", 1024) # 1024 for AST astnode = compile(content, "source", "exec", 1024) # 1024 for AST
@@ -397,7 +393,7 @@ def getstatementrange_old(lineno, source, assertion=False):
raise IndexError("likely a subclass") raise IndexError("likely a subclass")
if "assert" not in line and "raise" not in line: if "assert" not in line and "raise" not in line:
continue continue
trylines = source.lines[start:lineno + 1] trylines = source.lines[start:lineno+1]
# quick hack to prepare parsing an indented line with # quick hack to prepare parsing an indented line with
# compile_command() (which errors on "return" outside defs) # compile_command() (which errors on "return" outside defs)
trylines.insert(0, 'def xxx():') trylines.insert(0, 'def xxx():')
@@ -409,8 +405,10 @@ def getstatementrange_old(lineno, source, assertion=False):
continue continue
# 2. find the end of the statement # 2. find the end of the statement
for end in range(lineno + 1, len(source) + 1): for end in range(lineno+1, len(source)+1):
trysource = source[start:end] trysource = source[start:end]
if trysource.isparseable(): if trysource.isparseable():
return start, end return start, end
raise SyntaxError("no valid source range around line %d " % (lineno,)) raise SyntaxError("no valid source range around line %d " % (lineno,))

View File

@@ -2,7 +2,7 @@
imports symbols from vendored "pluggy" if available, otherwise imports symbols from vendored "pluggy" if available, otherwise
falls back to importing "pluggy" from the default namespace. falls back to importing "pluggy" from the default namespace.
""" """
from __future__ import absolute_import, division, print_function
try: try:
from _pytest.vendored_packages.pluggy import * # noqa from _pytest.vendored_packages.pluggy import * # noqa
from _pytest.vendored_packages.pluggy import __version__ # noqa from _pytest.vendored_packages.pluggy import __version__ # noqa

View File

@@ -1,13 +1,12 @@
""" """
support for presenting detailed information in failing assertions. support for presenting detailed information in failing assertions.
""" """
from __future__ import absolute_import, division, print_function
import py import py
import os
import sys import sys
from _pytest.assertion import util from _pytest.assertion import util
from _pytest.assertion import rewrite from _pytest.assertion import rewrite
from _pytest.assertion import truncate
def pytest_addoption(parser): def pytest_addoption(parser):
@@ -25,6 +24,10 @@ def pytest_addoption(parser):
expression information.""") expression information.""")
def pytest_namespace():
return {'register_assert_rewrite': register_assert_rewrite}
def register_assert_rewrite(*names): def register_assert_rewrite(*names):
"""Register one or more module names to be rewritten on import. """Register one or more module names to be rewritten on import.
@@ -97,6 +100,12 @@ def pytest_collection(session):
assertstate.hook.set_session(session) assertstate.hook.set_session(session)
def _running_on_ci():
"""Check if we're currently running on a CI system."""
env_vars = ['CI', 'BUILD_NUMBER']
return any(var in os.environ for var in env_vars)
def pytest_runtest_setup(item): def pytest_runtest_setup(item):
"""Setup the pytest_assertrepr_compare hook """Setup the pytest_assertrepr_compare hook
@@ -110,8 +119,8 @@ def callbinrepr(op, left, right):
This uses the first result from the hook and then ensures the This uses the first result from the hook and then ensures the
following: following:
* Overly verbose explanations are truncated unless configured otherwise * Overly verbose explanations are dropped unless -vv was used or
(eg. if running in verbose mode). running on a CI.
* Embedded newlines are escaped to help util.format_explanation() * Embedded newlines are escaped to help util.format_explanation()
later. later.
* If the rewrite mode is used embedded %-characters are replaced * If the rewrite mode is used embedded %-characters are replaced
@@ -124,7 +133,14 @@ def callbinrepr(op, left, right):
config=item.config, op=op, left=left, right=right) config=item.config, op=op, left=left, right=right)
for new_expl in hook_result: for new_expl in hook_result:
if new_expl: if new_expl:
new_expl = truncate.truncate_if_required(new_expl, item) if (sum(len(p) for p in new_expl[1:]) > 80*8 and
item.config.option.verbose < 2 and
not _running_on_ci()):
show_max = 10
truncated_lines = len(new_expl) - show_max
new_expl[show_max:] = [py.builtin._totext(
'Detailed information truncated (%d more lines)'
', use "-vv" to show' % truncated_lines)]
new_expl = [line.replace("\n", "\\n") for line in new_expl] new_expl = [line.replace("\n", "\\n") for line in new_expl]
res = py.builtin._totext("\n~").join(new_expl) res = py.builtin._totext("\n~").join(new_expl)
if item.config.getvalue("assertmode") == "rewrite": if item.config.getvalue("assertmode") == "rewrite":

View File

@@ -1,5 +1,5 @@
"""Rewrite assertion AST to produce nice error messages""" """Rewrite assertion AST to produce nice error messages"""
from __future__ import absolute_import, division, print_function
import ast import ast
import _ast import _ast
import errno import errno
@@ -11,6 +11,7 @@
import struct import struct
import sys import sys
import types import types
from fnmatch import fnmatch
import py import py
from _pytest.assertion import util from _pytest.assertion import util
@@ -36,11 +37,10 @@
REWRITE_NEWLINES = sys.version_info[:2] != (2, 7) and sys.version_info < (3, 2) REWRITE_NEWLINES = sys.version_info[:2] != (2, 7) and sys.version_info < (3, 2)
ASCII_IS_DEFAULT_ENCODING = sys.version_info[0] < 3 ASCII_IS_DEFAULT_ENCODING = sys.version_info[0] < 3
if sys.version_info >= (3, 5): if sys.version_info >= (3,5):
ast_Call = ast.Call ast_Call = ast.Call
else: else:
def ast_Call(a, b, c): ast_Call = lambda a,b,c: ast.Call(a, b, c, None, None)
return ast.Call(a, b, c, None, None)
class AssertionRewritingHook(object): class AssertionRewritingHook(object):
@@ -163,7 +163,11 @@ def _should_rewrite(self, name, fn_pypath, state):
# modules not passed explicitly on the command line are only # modules not passed explicitly on the command line are only
# rewritten if they match the naming convention for test files # rewritten if they match the naming convention for test files
for pat in self.fnpats: for pat in self.fnpats:
if fn_pypath.fnmatch(pat): # use fnmatch instead of fn_pypath.fnmatch because the
# latter might trigger an import to fnmatch.fnmatch
# internally, which would cause this method to be
# called recursively
if fnmatch(fn_pypath.basename, pat):
state.trace("matched test file %r" % (fn,)) state.trace("matched test file %r" % (fn,))
return True return True
@@ -210,12 +214,13 @@ def load_module(self, name):
mod.__cached__ = pyc mod.__cached__ = pyc
mod.__loader__ = self mod.__loader__ = self
py.builtin.exec_(co, mod.__dict__) py.builtin.exec_(co, mod.__dict__)
except: # noqa except:
if name in sys.modules: del sys.modules[name]
del sys.modules[name]
raise raise
return sys.modules[name] return sys.modules[name]
def is_package(self, name): def is_package(self, name):
try: try:
fd, fn, desc = imp.find_module(name) fd, fn, desc = imp.find_module(name)
@@ -260,7 +265,7 @@ def _write_pyc(state, co, source_stat, pyc):
fp = open(pyc, "wb") fp = open(pyc, "wb")
except IOError: except IOError:
err = sys.exc_info()[1].errno err = sys.exc_info()[1].errno
state.trace("error writing pyc file at %s: errno=%s" % (pyc, err)) state.trace("error writing pyc file at %s: errno=%s" %(pyc, err))
# we ignore any failure to write the cache file # we ignore any failure to write the cache file
# there are many reasons, permission-denied, __pycache__ being a # there are many reasons, permission-denied, __pycache__ being a
# file etc. # file etc.
@@ -282,7 +287,6 @@ def _write_pyc(state, co, source_stat, pyc):
cookie_re = re.compile(r"^[ \t\f]*#.*coding[:=][ \t]*[-\w.]+") cookie_re = re.compile(r"^[ \t\f]*#.*coding[:=][ \t]*[-\w.]+")
BOM_UTF8 = '\xef\xbb\xbf' BOM_UTF8 = '\xef\xbb\xbf'
def _rewrite_test(config, fn): def _rewrite_test(config, fn):
"""Try to read and rewrite *fn* and return the code object.""" """Try to read and rewrite *fn* and return the code object."""
state = config._assertstate state = config._assertstate
@@ -307,7 +311,7 @@ def _rewrite_test(config, fn):
end2 = source.find("\n", end1 + 1) end2 = source.find("\n", end1 + 1)
if (not source.startswith(BOM_UTF8) and if (not source.startswith(BOM_UTF8) and
cookie_re.match(source[0:end1]) is None and cookie_re.match(source[0:end1]) is None and
cookie_re.match(source[end1 + 1:end2]) is None): cookie_re.match(source[end1 + 1:end2]) is None):
if hasattr(state, "_indecode"): if hasattr(state, "_indecode"):
# encodings imported us again, so don't rewrite. # encodings imported us again, so don't rewrite.
return None, None return None, None
@@ -332,7 +336,7 @@ def _rewrite_test(config, fn):
return None, None return None, None
rewrite_asserts(tree, fn, config) rewrite_asserts(tree, fn, config)
try: try:
co = compile(tree, fn.strpath, "exec", dont_inherit=True) co = compile(tree, fn.strpath, "exec")
except SyntaxError: except SyntaxError:
# It's possible that this error is from some bug in the # It's possible that this error is from some bug in the
# assertion rewriting, but I don't know of a fast way to tell. # assertion rewriting, but I don't know of a fast way to tell.
@@ -340,7 +344,6 @@ def _rewrite_test(config, fn):
return None, None return None, None
return stat, co return stat, co
def _make_rewritten_pyc(state, source_stat, pyc, co): def _make_rewritten_pyc(state, source_stat, pyc, co):
"""Try to dump rewritten code to *pyc*.""" """Try to dump rewritten code to *pyc*."""
if sys.platform.startswith("win"): if sys.platform.startswith("win"):
@@ -354,7 +357,6 @@ def _make_rewritten_pyc(state, source_stat, pyc, co):
if _write_pyc(state, co, source_stat, proc_pyc): if _write_pyc(state, co, source_stat, proc_pyc):
os.rename(proc_pyc, pyc) os.rename(proc_pyc, pyc)
def _read_pyc(source, pyc, trace=lambda x: None): def _read_pyc(source, pyc, trace=lambda x: None):
"""Possibly read a pytest pyc containing rewritten code. """Possibly read a pytest pyc containing rewritten code.
@@ -412,8 +414,7 @@ def _saferepr(obj):
return repr.replace(t("\n"), t("\\n")) return repr.replace(t("\n"), t("\\n"))
from _pytest.assertion.util import format_explanation as _format_explanation # noqa from _pytest.assertion.util import format_explanation as _format_explanation # noqa
def _format_assertmsg(obj): def _format_assertmsg(obj):
"""Format the custom assertion message given. """Format the custom assertion message given.
@@ -442,11 +443,9 @@ def _format_assertmsg(obj):
s = s.replace(t("\\n"), t("\n~")) s = s.replace(t("\\n"), t("\n~"))
return s return s
def _should_repr_global_name(obj): def _should_repr_global_name(obj):
return not hasattr(obj, "__name__") and not py.builtin.callable(obj) return not hasattr(obj, "__name__") and not py.builtin.callable(obj)
def _format_boolop(explanations, is_or): def _format_boolop(explanations, is_or):
explanation = "(" + (is_or and " or " or " and ").join(explanations) + ")" explanation = "(" + (is_or and " or " or " and ").join(explanations) + ")"
if py.builtin._istext(explanation): if py.builtin._istext(explanation):
@@ -455,7 +454,6 @@ def _format_boolop(explanations, is_or):
t = py.builtin.bytes t = py.builtin.bytes
return explanation.replace(t('%'), t('%%')) return explanation.replace(t('%'), t('%%'))
def _call_reprcompare(ops, results, expls, each_obj): def _call_reprcompare(ops, results, expls, each_obj):
for i, res, expl in zip(range(len(ops)), results, expls): for i, res, expl in zip(range(len(ops)), results, expls):
try: try:
@@ -489,7 +487,7 @@ def _call_reprcompare(ops, results, expls, each_obj):
ast.Mult: "*", ast.Mult: "*",
ast.Div: "/", ast.Div: "/",
ast.FloorDiv: "//", ast.FloorDiv: "//",
ast.Mod: "%%", # escaped for string formatting ast.Mod: "%%", # escaped for string formatting
ast.Eq: "==", ast.Eq: "==",
ast.NotEq: "!=", ast.NotEq: "!=",
ast.Lt: "<", ast.Lt: "<",
@@ -595,26 +593,23 @@ def run(self, mod):
# docstrings and __future__ imports. # docstrings and __future__ imports.
aliases = [ast.alias(py.builtin.builtins.__name__, "@py_builtins"), aliases = [ast.alias(py.builtin.builtins.__name__, "@py_builtins"),
ast.alias("_pytest.assertion.rewrite", "@pytest_ar")] ast.alias("_pytest.assertion.rewrite", "@pytest_ar")]
doc = getattr(mod, "docstring", None) expect_docstring = True
expect_docstring = doc is None
if doc is not None and self.is_rewrite_disabled(doc):
return
pos = 0 pos = 0
lineno = 1 lineno = 0
for item in mod.body: for item in mod.body:
if (expect_docstring and isinstance(item, ast.Expr) and if (expect_docstring and isinstance(item, ast.Expr) and
isinstance(item.value, ast.Str)): isinstance(item.value, ast.Str)):
doc = item.value.s doc = item.value.s
if self.is_rewrite_disabled(doc): if "PYTEST_DONT_REWRITE" in doc:
# The module has disabled assertion rewriting.
return return
lineno += len(doc) - 1
expect_docstring = False expect_docstring = False
elif (not isinstance(item, ast.ImportFrom) or item.level > 0 or elif (not isinstance(item, ast.ImportFrom) or item.level > 0 or
item.module != "__future__"): item.module != "__future__"):
lineno = item.lineno lineno = item.lineno
break break
pos += 1 pos += 1
else:
lineno = item.lineno
imports = [ast.Import([alias], lineno=lineno, col_offset=0) imports = [ast.Import([alias], lineno=lineno, col_offset=0)
for alias in aliases] for alias in aliases]
mod.body[pos:pos] = imports mod.body[pos:pos] = imports
@@ -640,9 +635,6 @@ def run(self, mod):
not isinstance(field, ast.expr)): not isinstance(field, ast.expr)):
nodes.append(field) nodes.append(field)
def is_rewrite_disabled(self, docstring):
return "PYTEST_DONT_REWRITE" in docstring
def variable(self): def variable(self):
"""Get a new variable.""" """Get a new variable."""
# Use a character invalid in python identifiers to avoid clashing. # Use a character invalid in python identifiers to avoid clashing.
@@ -735,7 +727,7 @@ def visit_Assert(self, assert_):
if isinstance(assert_.test, ast.Tuple) and self.config is not None: if isinstance(assert_.test, ast.Tuple) and self.config is not None:
fslocation = (self.module_path, assert_.lineno) fslocation = (self.module_path, assert_.lineno)
self.config.warn('R1', 'assertion is always true, perhaps ' self.config.warn('R1', 'assertion is always true, perhaps '
'remove parentheses?', fslocation=fslocation) 'remove parentheses?', fslocation=fslocation)
self.statements = [] self.statements = []
self.variables = [] self.variables = []
self.variable_counter = itertools.count() self.variable_counter = itertools.count()
@@ -799,7 +791,7 @@ def visit_BoolOp(self, boolop):
if i: if i:
fail_inner = [] fail_inner = []
# cond is set in a prior loop iteration below # cond is set in a prior loop iteration below
self.on_failure.append(ast.If(cond, fail_inner, [])) # noqa self.on_failure.append(ast.If(cond, fail_inner, [])) # noqa
self.on_failure = fail_inner self.on_failure = fail_inner
self.push_format_context() self.push_format_context()
res, expl = self.visit(v) res, expl = self.visit(v)
@@ -851,7 +843,7 @@ def visit_Call_35(self, call):
new_kwargs.append(ast.keyword(keyword.arg, res)) new_kwargs.append(ast.keyword(keyword.arg, res))
if keyword.arg: if keyword.arg:
arg_expls.append(keyword.arg + "=" + expl) arg_expls.append(keyword.arg + "=" + expl)
else: # **args have `arg` keywords with an .arg of None else: ## **args have `arg` keywords with an .arg of None
arg_expls.append("**" + expl) arg_expls.append("**" + expl)
expl = "%s(%s)" % (func_expl, ', '.join(arg_expls)) expl = "%s(%s)" % (func_expl, ', '.join(arg_expls))
@@ -905,6 +897,7 @@ def visit_Call_legacy(self, call):
else: else:
visit_Call = visit_Call_legacy visit_Call = visit_Call_legacy
def visit_Attribute(self, attr): def visit_Attribute(self, attr):
if not isinstance(attr.ctx, ast.Load): if not isinstance(attr.ctx, ast.Load):
return self.generic_visit(attr) return self.generic_visit(attr)

View File

@@ -1,102 +0,0 @@
"""
Utilities for truncating assertion output.
Current default behaviour is to truncate assertion explanations at
~8 terminal lines, unless running in "-vv" mode or running on CI.
"""
from __future__ import absolute_import, division, print_function
import os
import py
DEFAULT_MAX_LINES = 8
DEFAULT_MAX_CHARS = 8 * 80
USAGE_MSG = "use '-vv' to show"
def truncate_if_required(explanation, item, max_length=None):
"""
Truncate this assertion explanation if the given test item is eligible.
"""
if _should_truncate_item(item):
return _truncate_explanation(explanation)
return explanation
def _should_truncate_item(item):
"""
Whether or not this test item is eligible for truncation.
"""
verbose = item.config.option.verbose
return verbose < 2 and not _running_on_ci()
def _running_on_ci():
"""Check if we're currently running on a CI system."""
env_vars = ['CI', 'BUILD_NUMBER']
return any(var in os.environ for var in env_vars)
def _truncate_explanation(input_lines, max_lines=None, max_chars=None):
"""
Truncate given list of strings that makes up the assertion explanation.
Truncates to either 8 lines, or 640 characters - whichever the input reaches
first. The remaining lines will be replaced by a usage message.
"""
if max_lines is None:
max_lines = DEFAULT_MAX_LINES
if max_chars is None:
max_chars = DEFAULT_MAX_CHARS
# Check if truncation required
input_char_count = len("".join(input_lines))
if len(input_lines) <= max_lines and input_char_count <= max_chars:
return input_lines
# Truncate first to max_lines, and then truncate to max_chars if max_chars
# is exceeded.
truncated_explanation = input_lines[:max_lines]
truncated_explanation = _truncate_by_char_count(truncated_explanation, max_chars)
# Add ellipsis to final line
truncated_explanation[-1] = truncated_explanation[-1] + "..."
# Append useful message to explanation
truncated_line_count = len(input_lines) - len(truncated_explanation)
truncated_line_count += 1 # Account for the part-truncated final line
msg = '...Full output truncated'
if truncated_line_count == 1:
msg += ' ({0} line hidden)'.format(truncated_line_count)
else:
msg += ' ({0} lines hidden)'.format(truncated_line_count)
msg += ", {0}" .format(USAGE_MSG)
truncated_explanation.extend([
py.builtin._totext(""),
py.builtin._totext(msg),
])
return truncated_explanation
def _truncate_by_char_count(input_lines, max_chars):
# Check if truncation required
if len("".join(input_lines)) <= max_chars:
return input_lines
# Find point at which input length exceeds total allowed length
iterated_char_count = 0
for iterated_index, input_line in enumerate(input_lines):
if iterated_char_count + len(input_line) > max_chars:
break
iterated_char_count += len(input_line)
# Create truncated explanation with modified final line
truncated_result = input_lines[:iterated_index]
final_line = input_lines[iterated_index]
if final_line:
final_line_truncate_point = max_chars - iterated_char_count
final_line = final_line[:final_line_truncate_point]
truncated_result.append(final_line)
return truncated_result

View File

@@ -1,5 +1,4 @@
"""Utilities for assertion debugging""" """Utilities for assertion debugging"""
from __future__ import absolute_import, division, print_function
import pprint import pprint
import _pytest._code import _pytest._code
@@ -9,7 +8,7 @@
except ImportError: except ImportError:
Sequence = list Sequence = list
BuiltinAssertionError = py.builtin.builtins.AssertionError
u = py.builtin._totext u = py.builtin._totext
# The _reprcompare attribute on the util module is used by the new assertion # The _reprcompare attribute on the util module is used by the new assertion
@@ -53,11 +52,11 @@ def _split_explanation(explanation):
""" """
raw_lines = (explanation or u('')).split('\n') raw_lines = (explanation or u('')).split('\n')
lines = [raw_lines[0]] lines = [raw_lines[0]]
for values in raw_lines[1:]: for l in raw_lines[1:]:
if values and values[0] in ['{', '}', '~', '>']: if l and l[0] in ['{', '}', '~', '>']:
lines.append(values) lines.append(l)
else: else:
lines[-1] += '\\n' + values lines[-1] += '\\n' + l
return lines return lines
@@ -82,7 +81,7 @@ def _format_lines(lines):
stack.append(len(result)) stack.append(len(result))
stackcnt[-1] += 1 stackcnt[-1] += 1
stackcnt.append(0) stackcnt.append(0)
result.append(u(' +') + u(' ') * (len(stack) - 1) + s + line[1:]) result.append(u(' +') + u(' ')*(len(stack)-1) + s + line[1:])
elif line.startswith('}'): elif line.startswith('}'):
stack.pop() stack.pop()
stackcnt.pop() stackcnt.pop()
@@ -91,7 +90,7 @@ def _format_lines(lines):
assert line[0] in ['~', '>'] assert line[0] in ['~', '>']
stack[-1] += 1 stack[-1] += 1
indent = len(stack) if line.startswith('~') else len(stack) - 1 indent = len(stack) if line.startswith('~') else len(stack) - 1
result.append(u(' ') * indent + line[1:]) result.append(u(' ')*indent + line[1:])
assert len(stack) == 1 assert len(stack) == 1
return result return result
@@ -106,22 +105,16 @@ def _format_lines(lines):
def assertrepr_compare(config, op, left, right): def assertrepr_compare(config, op, left, right):
"""Return specialised explanations for some operators/operands""" """Return specialised explanations for some operators/operands"""
width = 80 - 15 - len(op) - 2 # 15 chars indentation, 1 space around op width = 80 - 15 - len(op) - 2 # 15 chars indentation, 1 space around op
left_repr = py.io.saferepr(left, maxsize=int(width // 2)) left_repr = py.io.saferepr(left, maxsize=int(width//2))
right_repr = py.io.saferepr(right, maxsize=width - len(left_repr)) right_repr = py.io.saferepr(right, maxsize=width-len(left_repr))
summary = u('%s %s %s') % (ecu(left_repr), op, ecu(right_repr)) summary = u('%s %s %s') % (ecu(left_repr), op, ecu(right_repr))
def issequence(x): issequence = lambda x: (isinstance(x, (list, tuple, Sequence)) and
return (isinstance(x, (list, tuple, Sequence)) and not isinstance(x, basestring)) not isinstance(x, basestring))
istext = lambda x: isinstance(x, basestring)
def istext(x): isdict = lambda x: isinstance(x, dict)
return isinstance(x, basestring) isset = lambda x: isinstance(x, (set, frozenset))
def isdict(x):
return isinstance(x, dict)
def isset(x):
return isinstance(x, (set, frozenset))
def isiterable(obj): def isiterable(obj):
try: try:
@@ -263,8 +256,8 @@ def _compare_eq_dict(left, right, verbose=False):
explanation = [] explanation = []
common = set(left).intersection(set(right)) common = set(left).intersection(set(right))
same = dict((k, left[k]) for k in common if left[k] == right[k]) same = dict((k, left[k]) for k in common if left[k] == right[k])
if same and verbose < 2: if same and not verbose:
explanation += [u('Omitting %s identical items, use -vv to show') % explanation += [u('Omitting %s identical items, use -v to show') %
len(same)] len(same)]
elif same: elif same:
explanation += [u('Common items:')] explanation += [u('Common items:')]
@@ -291,7 +284,7 @@ def _compare_eq_dict(left, right, verbose=False):
def _notin_text(term, text, verbose=False): def _notin_text(term, text, verbose=False):
index = text.find(term) index = text.find(term)
head = text[:index] head = text[:index]
tail = text[index + len(term):] tail = text[index+len(term):]
correct_text = head + tail correct_text = head + tail
diff = _diff_text(correct_text, text, verbose) diff = _diff_text(correct_text, text, verbose)
newdiff = [u('%s is contained here:') % py.io.saferepr(term, maxsize=42)] newdiff = [u('%s is contained here:') % py.io.saferepr(term, maxsize=42)]

73
lib/spack/external/_pytest/cacheprovider.py vendored Executable file → Normal file
View File

@@ -1,21 +1,20 @@
""" """
merged implementation of the cache provider merged implementation of the cache provider
the name cache was not chosen to ensure pluggy automatically the name cache was not choosen to ensure pluggy automatically
ignores the external pytest-cache ignores the external pytest-cache
""" """
from __future__ import absolute_import, division, print_function
import py import py
import pytest import pytest
import json import json
import os
from os.path import sep as _sep, altsep as _altsep from os.path import sep as _sep, altsep as _altsep
class Cache(object): class Cache(object):
def __init__(self, config): def __init__(self, config):
self.config = config self.config = config
self._cachedir = Cache.cache_dir_from_config(config) self._cachedir = config.rootdir.join(".cache")
self.trace = config.trace.root.get("cache") self.trace = config.trace.root.get("cache")
if config.getvalue("cacheclear"): if config.getvalue("cacheclear"):
self.trace("clearing cachedir") self.trace("clearing cachedir")
@@ -23,16 +22,6 @@ def __init__(self, config):
self._cachedir.remove() self._cachedir.remove()
self._cachedir.mkdir() self._cachedir.mkdir()
@staticmethod
def cache_dir_from_config(config):
cache_dir = config.getini("cache_dir")
cache_dir = os.path.expanduser(cache_dir)
cache_dir = os.path.expandvars(cache_dir)
if os.path.isabs(cache_dir):
return py.path.local(cache_dir)
else:
return config.rootdir.join(cache_dir)
def makedir(self, name): def makedir(self, name):
""" return a directory path object with the given name. If the """ return a directory path object with the given name. If the
directory does not yet exist, it will be created. You can use it directory does not yet exist, it will be created. You can use it
@@ -100,31 +89,31 @@ def set(self, key, value):
class LFPlugin: class LFPlugin:
""" Plugin which implements the --lf (run last-failing) option """ """ Plugin which implements the --lf (run last-failing) option """
def __init__(self, config): def __init__(self, config):
self.config = config self.config = config
active_keys = 'lf', 'failedfirst' active_keys = 'lf', 'failedfirst'
self.active = any(config.getvalue(key) for key in active_keys) self.active = any(config.getvalue(key) for key in active_keys)
self.lastfailed = config.cache.get("cache/lastfailed", {})
self._previously_failed_count = None
def pytest_report_collectionfinish(self):
if self.active: if self.active:
if not self._previously_failed_count: self.lastfailed = config.cache.get("cache/lastfailed", {})
else:
self.lastfailed = {}
def pytest_report_header(self):
if self.active:
if not self.lastfailed:
mode = "run all (no recorded failures)" mode = "run all (no recorded failures)"
else: else:
noun = 'failure' if self._previously_failed_count == 1 else 'failures' mode = "rerun last %d failures%s" % (
suffix = " first" if self.config.getvalue("failedfirst") else "" len(self.lastfailed),
mode = "rerun previous {count} {noun}{suffix}".format( " first" if self.config.getvalue("failedfirst") else "")
count=self._previously_failed_count, suffix=suffix, noun=noun
)
return "run-last-failure: %s" % mode return "run-last-failure: %s" % mode
def pytest_runtest_logreport(self, report): def pytest_runtest_logreport(self, report):
if (report.when == 'call' and report.passed) or report.skipped: if report.failed and "xfail" not in report.keywords:
self.lastfailed.pop(report.nodeid, None)
elif report.failed:
self.lastfailed[report.nodeid] = True self.lastfailed[report.nodeid] = True
elif not report.failed:
if report.when == "call":
self.lastfailed.pop(report.nodeid, None)
def pytest_collectreport(self, report): def pytest_collectreport(self, report):
passed = report.outcome in ('passed', 'skipped') passed = report.outcome in ('passed', 'skipped')
@@ -146,24 +135,22 @@ def pytest_collection_modifyitems(self, session, config, items):
previously_failed.append(item) previously_failed.append(item)
else: else:
previously_passed.append(item) previously_passed.append(item)
self._previously_failed_count = len(previously_failed) if not previously_failed and previously_passed:
if not previously_failed:
# running a subset of all tests with recorded failures outside # running a subset of all tests with recorded failures outside
# of the set of tests currently executing # of the set of tests currently executing
return pass
if self.config.getvalue("lf"): elif self.config.getvalue("failedfirst"):
items[:] = previously_failed + previously_passed
else:
items[:] = previously_failed items[:] = previously_failed
config.hook.pytest_deselected(items=previously_passed) config.hook.pytest_deselected(items=previously_passed)
else:
items[:] = previously_failed + previously_passed
def pytest_sessionfinish(self, session): def pytest_sessionfinish(self, session):
config = self.config config = self.config
if config.getvalue("cacheshow") or hasattr(config, "slaveinput"): if config.getvalue("cacheshow") or hasattr(config, "slaveinput"):
return return
prev_failed = config.cache.get("cache/lastfailed", None) is not None
saved_lastfailed = config.cache.get("cache/lastfailed", {}) if (session.testscollected and prev_failed) or self.lastfailed:
if saved_lastfailed != self.lastfailed:
config.cache.set("cache/lastfailed", self.lastfailed) config.cache.set("cache/lastfailed", self.lastfailed)
@@ -184,9 +171,6 @@ def pytest_addoption(parser):
group.addoption( group.addoption(
'--cache-clear', action='store_true', dest="cacheclear", '--cache-clear', action='store_true', dest="cacheclear",
help="remove all cache contents at start of test run.") help="remove all cache contents at start of test run.")
parser.addini(
"cache_dir", default='.cache',
help="cache directory path.")
def pytest_cmdline_main(config): def pytest_cmdline_main(config):
@@ -195,6 +179,7 @@ def pytest_cmdline_main(config):
return wrap_session(config, cacheshow) return wrap_session(config, cacheshow)
@pytest.hookimpl(tryfirst=True) @pytest.hookimpl(tryfirst=True)
def pytest_configure(config): def pytest_configure(config):
config.cache = Cache(config) config.cache = Cache(config)
@@ -234,12 +219,12 @@ def cacheshow(config, session):
basedir = config.cache._cachedir basedir = config.cache._cachedir
vdir = basedir.join("v") vdir = basedir.join("v")
tw.sep("-", "cache values") tw.sep("-", "cache values")
for valpath in sorted(vdir.visit(lambda x: x.isfile())): for valpath in vdir.visit(lambda x: x.isfile()):
key = valpath.relto(vdir).replace(valpath.sep, "/") key = valpath.relto(vdir).replace(valpath.sep, "/")
val = config.cache.get(key, dummy) val = config.cache.get(key, dummy)
if val is dummy: if val is dummy:
tw.line("%s contains unreadable content, " tw.line("%s contains unreadable content, "
"will be ignored" % key) "will be ignored" % key)
else: else:
tw.line("%s contains:" % key) tw.line("%s contains:" % key)
stream = py.io.TextIO() stream = py.io.TextIO()
@@ -250,8 +235,8 @@ def cacheshow(config, session):
ddir = basedir.join("d") ddir = basedir.join("d")
if ddir.isdir() and ddir.listdir(): if ddir.isdir() and ddir.listdir():
tw.sep("-", "cache directories") tw.sep("-", "cache directories")
for p in sorted(basedir.join("d").visit()): for p in basedir.join("d").visit():
# if p.check(dir=1): #if p.check(dir=1):
# print("%s/" % p.relto(basedir)) # print("%s/" % p.relto(basedir))
if p.isfile(): if p.isfile():
key = p.relto(basedir) key = p.relto(basedir)

View File

@@ -2,19 +2,17 @@
per-test stdout/stderr capturing mechanism. per-test stdout/stderr capturing mechanism.
""" """
from __future__ import absolute_import, division, print_function from __future__ import with_statement
import contextlib import contextlib
import sys import sys
import os import os
import io
from io import UnsupportedOperation
from tempfile import TemporaryFile from tempfile import TemporaryFile
import py import py
import pytest import pytest
from _pytest.compat import CaptureIO
from py.io import TextIO
unicode = py.builtin.text unicode = py.builtin.text
patchsysdict = {0: 'stdin', 1: 'stdout', 2: 'stderr'} patchsysdict = {0: 'stdin', 1: 'stdout', 2: 'stderr'}
@@ -34,11 +32,8 @@ def pytest_addoption(parser):
@pytest.hookimpl(hookwrapper=True) @pytest.hookimpl(hookwrapper=True)
def pytest_load_initial_conftests(early_config, parser, args): def pytest_load_initial_conftests(early_config, parser, args):
ns = early_config.known_args_namespace
if ns.capture == "fd":
_py36_windowsconsoleio_workaround(sys.stdout)
_colorama_workaround()
_readline_workaround() _readline_workaround()
ns = early_config.known_args_namespace
pluginmanager = early_config.pluginmanager pluginmanager = early_config.pluginmanager
capman = CaptureManager(ns.capture) capman = CaptureManager(ns.capture)
pluginmanager.register(capman, "capturemanager") pluginmanager.register(capman, "capturemanager")
@@ -135,7 +130,7 @@ def pytest_runtest_call(self, item):
self.resumecapture() self.resumecapture()
self.activate_funcargs(item) self.activate_funcargs(item)
yield yield
# self.deactivate_funcargs() called from suspendcapture() #self.deactivate_funcargs() called from suspendcapture()
self.suspendcapture_item(item, "call") self.suspendcapture_item(item, "call")
@pytest.hookimpl(hookwrapper=True) @pytest.hookimpl(hookwrapper=True)
@@ -172,7 +167,6 @@ def capsys(request):
request.node._capfuncarg = c = CaptureFixture(SysCapture, request) request.node._capfuncarg = c = CaptureFixture(SysCapture, request)
return c return c
@pytest.fixture @pytest.fixture
def capfd(request): def capfd(request):
"""Enable capturing of writes to file descriptors 1 and 2 and make """Enable capturing of writes to file descriptors 1 and 2 and make
@@ -240,7 +234,6 @@ def safe_text_dupfile(f, mode, default_encoding="UTF8"):
class EncodedFile(object): class EncodedFile(object):
errors = "strict" # possibly needed by py3 code (issue555) errors = "strict" # possibly needed by py3 code (issue555)
def __init__(self, buffer, encoding): def __init__(self, buffer, encoding):
self.buffer = buffer self.buffer = buffer
self.encoding = encoding self.encoding = encoding
@@ -254,11 +247,6 @@ def writelines(self, linelist):
data = ''.join(linelist) data = ''.join(linelist)
self.write(data) self.write(data)
@property
def name(self):
"""Ensure that file.name is a string."""
return repr(self.buffer)
def __getattr__(self, name): def __getattr__(self, name):
return getattr(object.__getattribute__(self, "buffer"), name) return getattr(object.__getattribute__(self, "buffer"), name)
@@ -326,11 +314,9 @@ def readouterr(self):
return (self.out.snap() if self.out is not None else "", return (self.out.snap() if self.out is not None else "",
self.err.snap() if self.err is not None else "") self.err.snap() if self.err is not None else "")
class NoCapture: class NoCapture:
__init__ = start = done = suspend = resume = lambda *args: None __init__ = start = done = suspend = resume = lambda *args: None
class FDCapture: class FDCapture:
""" Capture IO to/from a given os-level filedescriptor. """ """ Capture IO to/from a given os-level filedescriptor. """
@@ -403,7 +389,7 @@ def resume(self):
def writeorg(self, data): def writeorg(self, data):
""" write to original file descriptor. """ """ write to original file descriptor. """
if py.builtin._istext(data): if py.builtin._istext(data):
data = data.encode("utf8") # XXX use encoding of original stream data = data.encode("utf8") # XXX use encoding of original stream
os.write(self.targetfd_save, data) os.write(self.targetfd_save, data)
@@ -416,7 +402,7 @@ def __init__(self, fd, tmpfile=None):
if name == "stdin": if name == "stdin":
tmpfile = DontReadFromInput() tmpfile = DontReadFromInput()
else: else:
tmpfile = CaptureIO() tmpfile = TextIO()
self.tmpfile = tmpfile self.tmpfile = tmpfile
def start(self): def start(self):
@@ -462,8 +448,7 @@ def read(self, *args):
__iter__ = read __iter__ = read
def fileno(self): def fileno(self):
raise UnsupportedOperation("redirected stdin is pseudofile, " raise ValueError("redirected Stdin is pseudofile, has no fileno()")
"has no fileno()")
def isatty(self): def isatty(self):
return False return False
@@ -473,30 +458,12 @@ def close(self):
@property @property
def buffer(self): def buffer(self):
if sys.version_info >= (3, 0): if sys.version_info >= (3,0):
return self return self
else: else:
raise AttributeError('redirected stdin has no attribute buffer') raise AttributeError('redirected stdin has no attribute buffer')
def _colorama_workaround():
"""
Ensure colorama is imported so that it attaches to the correct stdio
handles on Windows.
colorama uses the terminal on import time. So if something does the
first import of colorama while I/O capture is active, colorama will
fail in various ways.
"""
if not sys.platform.startswith('win32'):
return
try:
import colorama # noqa
except ImportError:
pass
def _readline_workaround(): def _readline_workaround():
""" """
Ensure readline is imported so that it attaches to the correct stdio Ensure readline is imported so that it attaches to the correct stdio
@@ -522,56 +489,3 @@ def _readline_workaround():
import readline # noqa import readline # noqa
except ImportError: except ImportError:
pass pass
def _py36_windowsconsoleio_workaround(stream):
"""
Python 3.6 implemented unicode console handling for Windows. This works
by reading/writing to the raw console handle using
``{Read,Write}ConsoleW``.
The problem is that we are going to ``dup2`` over the stdio file
descriptors when doing ``FDCapture`` and this will ``CloseHandle`` the
handles used by Python to write to the console. Though there is still some
weirdness and the console handle seems to only be closed randomly and not
on the first call to ``CloseHandle``, or maybe it gets reopened with the
same handle value when we suspend capturing.
The workaround in this case will reopen stdio with a different fd which
also means a different handle by replicating the logic in
"Py_lifecycle.c:initstdio/create_stdio".
:param stream: in practice ``sys.stdout`` or ``sys.stderr``, but given
here as parameter for unittesting purposes.
See https://github.com/pytest-dev/py/issues/103
"""
if not sys.platform.startswith('win32') or sys.version_info[:2] < (3, 6):
return
# bail out if ``stream`` doesn't seem like a proper ``io`` stream (#2666)
if not hasattr(stream, 'buffer'):
return
buffered = hasattr(stream.buffer, 'raw')
raw_stdout = stream.buffer.raw if buffered else stream.buffer
if not isinstance(raw_stdout, io._WindowsConsoleIO):
return
def _reopen_stdio(f, mode):
if not buffered and mode[0] == 'w':
buffering = 0
else:
buffering = -1
return io.TextIOWrapper(
open(os.dup(f.fileno()), mode, buffering),
f.encoding,
f.errors,
f.newlines,
f.line_buffering)
sys.__stdin__ = sys.stdin = _reopen_stdio(sys.stdin, 'rb')
sys.__stdout__ = sys.stdout = _reopen_stdio(sys.stdout, 'wb')
sys.__stderr__ = sys.stderr = _reopen_stdio(sys.stderr, 'wb')

View File

@@ -1,7 +1,6 @@
""" """
python version compatibility code python version compatibility code
""" """
from __future__ import absolute_import, division, print_function
import sys import sys
import inspect import inspect
import types import types
@@ -10,8 +9,8 @@
import py import py
import _pytest import _pytest
from _pytest.outcomes import TEST_OUTCOME
try: try:
@@ -20,7 +19,6 @@
# Only available in Python 3.4+ or as a backport # Only available in Python 3.4+ or as a backport
enum = None enum = None
_PY3 = sys.version_info > (3, 0) _PY3 = sys.version_info > (3, 0)
_PY2 = not _PY3 _PY2 = not _PY3
@@ -28,10 +26,6 @@
NoneType = type(None) NoneType = type(None)
NOTSET = object() NOTSET = object()
PY35 = sys.version_info[:2] >= (3, 5)
PY36 = sys.version_info[:2] >= (3, 6)
MODULE_NOT_FOUND_ERROR = 'ModuleNotFoundError' if PY36 else 'ImportError'
if hasattr(inspect, 'signature'): if hasattr(inspect, 'signature'):
def _format_args(func): def _format_args(func):
return str(inspect.signature(func)) return str(inspect.signature(func))
@@ -48,18 +42,11 @@ def _format_args(func):
def is_generator(func): def is_generator(func):
genfunc = inspect.isgeneratorfunction(func) try:
return genfunc and not iscoroutinefunction(func) return _pytest._code.getrawcode(func).co_flags & 32 # generator function
except AttributeError: # builtin functions have no bytecode
# assume them to not be generators
def iscoroutinefunction(func): return False
"""Return True if func is a decorated coroutine function.
Note: copied and modified from Python 3.5's builtin couroutines.py to avoid import asyncio directly,
which in turns also initializes the "logging" module as side-effect (see issue #8).
"""
return (getattr(func, '_is_coroutine', False) or
(hasattr(inspect, 'iscoroutinefunction') and inspect.iscoroutinefunction(func)))
def getlocation(function, curdir): def getlocation(function, curdir):
@@ -68,7 +55,7 @@ def getlocation(function, curdir):
lineno = py.builtin._getcode(function).co_firstlineno lineno = py.builtin._getcode(function).co_firstlineno
if fn.relto(curdir): if fn.relto(curdir):
fn = fn.relto(curdir) fn = fn.relto(curdir)
return "%s:%d" % (fn, lineno + 1) return "%s:%d" %(fn, lineno+1)
def num_mock_patch_args(function): def num_mock_patch_args(function):
@@ -79,21 +66,13 @@ def num_mock_patch_args(function):
mock = sys.modules.get("mock", sys.modules.get("unittest.mock", None)) mock = sys.modules.get("mock", sys.modules.get("unittest.mock", None))
if mock is not None: if mock is not None:
return len([p for p in patchings return len([p for p in patchings
if not p.attribute_name and p.new is mock.DEFAULT]) if not p.attribute_name and p.new is mock.DEFAULT])
return len(patchings) return len(patchings)
def getfuncargnames(function, startindex=None, cls=None): def getfuncargnames(function, startindex=None):
"""
@RonnyPfannschmidt: This function should be refactored when we revisit fixtures. The
fixture mechanism should ask the node for the fixture names, and not try to obtain
directly from the function object well after collection has occurred.
"""
if startindex is None and cls is not None:
is_staticmethod = isinstance(cls.__dict__.get(function.__name__, None), staticmethod)
startindex = 0 if is_staticmethod else 1
# XXX merge with main.py's varnames # XXX merge with main.py's varnames
# assert not isclass(function) #assert not isclass(function)
realfunction = function realfunction = function
while hasattr(realfunction, "__wrapped__"): while hasattr(realfunction, "__wrapped__"):
realfunction = realfunction.__wrapped__ realfunction = realfunction.__wrapped__
@@ -119,7 +98,8 @@ def getfuncargnames(function, startindex=None, cls=None):
return tuple(argnames[startindex:]) return tuple(argnames[startindex:])
if sys.version_info[:2] == (2, 6):
if sys.version_info[:2] == (2, 6):
def isclass(object): def isclass(object):
""" Return true if the object is a class. Overrides inspect.isclass for """ Return true if the object is a class. Overrides inspect.isclass for
python 2.6 because it will return True for objects which always return python 2.6 because it will return True for objects which always return
@@ -131,12 +111,10 @@ def isclass(object):
if _PY3: if _PY3:
import codecs import codecs
imap = map
izip = zip
STRING_TYPES = bytes, str
UNICODE_TYPES = str,
def _ascii_escaped(val): STRING_TYPES = bytes, str
def _escape_strings(val):
"""If val is pure ascii, returns it as a str(). Otherwise, escapes """If val is pure ascii, returns it as a str(). Otherwise, escapes
bytes objects into a sequence of escaped bytes: bytes objects into a sequence of escaped bytes:
@@ -166,11 +144,8 @@ def _ascii_escaped(val):
return val.encode('unicode_escape').decode('ascii') return val.encode('unicode_escape').decode('ascii')
else: else:
STRING_TYPES = bytes, str, unicode STRING_TYPES = bytes, str, unicode
UNICODE_TYPES = unicode,
from itertools import imap, izip # NOQA def _escape_strings(val):
def _ascii_escaped(val):
"""In py2 bytes and str are the same type, so return if it's a bytes """In py2 bytes and str are the same type, so return if it's a bytes
object, return it unchanged if it is a full ascii string, object, return it unchanged if it is a full ascii string,
otherwise escape it into its binary form. otherwise escape it into its binary form.
@@ -192,18 +167,8 @@ def get_real_func(obj):
""" gets the real function object of the (possibly) wrapped object by """ gets the real function object of the (possibly) wrapped object by
functools.wraps or functools.partial. functools.wraps or functools.partial.
""" """
start_obj = obj while hasattr(obj, "__wrapped__"):
for i in range(100): obj = obj.__wrapped__
new_obj = getattr(obj, '__wrapped__', None)
if new_obj is None:
break
obj = new_obj
else:
raise ValueError(
("could not find real function of {start}"
"\nstopped at {current}").format(
start=py.io.saferepr(start_obj),
current=py.io.saferepr(obj)))
if isinstance(obj, functools.partial): if isinstance(obj, functools.partial):
obj = obj.func obj = obj.func
return obj return obj
@@ -230,16 +195,14 @@ def getimfunc(func):
def safe_getattr(object, name, default): def safe_getattr(object, name, default):
""" Like getattr but return default upon any Exception or any OutcomeException. """ Like getattr but return default upon any Exception.
Attribute access can potentially fail for 'evil' Python objects. Attribute access can potentially fail for 'evil' Python objects.
See issue #214. See issue214
It catches OutcomeException because of #2490 (issue #580), new outcomes are derived from BaseException
instead of Exception (for more details check #2707)
""" """
try: try:
return getattr(object, name, default) return getattr(object, name, default)
except TEST_OUTCOME: except Exception:
return default return default
@@ -263,64 +226,5 @@ def safe_str(v):
try: try:
return str(v) return str(v)
except UnicodeError: except UnicodeError:
if not isinstance(v, unicode):
v = unicode(v)
errors = 'replace' errors = 'replace'
return v.encode('utf-8', errors) return v.encode('ascii', errors)
COLLECT_FAKEMODULE_ATTRIBUTES = (
'Collector',
'Module',
'Generator',
'Function',
'Instance',
'Session',
'Item',
'Class',
'File',
'_fillfuncargs',
)
def _setup_collect_fakemodule():
from types import ModuleType
import pytest
pytest.collect = ModuleType('pytest.collect')
pytest.collect.__all__ = [] # used for setns
for attr in COLLECT_FAKEMODULE_ATTRIBUTES:
setattr(pytest.collect, attr, getattr(pytest, attr))
if _PY2:
# Without this the test_dupfile_on_textio will fail, otherwise CaptureIO could directly inherit from StringIO.
from py.io import TextIO
class CaptureIO(TextIO):
@property
def encoding(self):
return getattr(self, '_encoding', 'UTF-8')
else:
import io
class CaptureIO(io.TextIOWrapper):
def __init__(self):
super(CaptureIO, self).__init__(
io.BytesIO(),
encoding='UTF-8', newline='', write_through=True,
)
def getvalue(self):
return self.buffer.getvalue().decode('UTF-8')
class FuncargnamesCompatAttr(object):
""" helper class so that Metafunc, Function and FixtureRequest
don't need to each define the "funcargnames" compatibility attribute.
"""
@property
def funcargnames(self):
""" alias attribute for ``fixturenames`` for pre-2.3 compatibility"""
return self.fixturenames

View File

@@ -1,5 +1,4 @@
""" command line options, ini-file and conftest.py processing. """ """ command line options, ini-file and conftest.py processing. """
from __future__ import absolute_import, division, print_function
import argparse import argparse
import shlex import shlex
import traceback import traceback
@@ -8,8 +7,7 @@
import py import py
# DON't import pytest here because it causes import cycle troubles # DON't import pytest here because it causes import cycle troubles
import sys import sys, os
import os
import _pytest._code import _pytest._code
import _pytest.hookspec # the extension point definitions import _pytest.hookspec # the extension point definitions
import _pytest.assertion import _pytest.assertion
@@ -55,15 +53,15 @@ def main(args=None, plugins=None):
return 4 return 4
else: else:
try: try:
config.pluginmanager.check_pending()
return config.hook.pytest_cmdline_main(config=config) return config.hook.pytest_cmdline_main(config=config)
finally: finally:
config._ensure_unconfigure() config._ensure_unconfigure()
except UsageError as e: except UsageError as e:
for msg in e.args: for msg in e.args:
sys.stderr.write("ERROR: %s\n" % (msg,)) sys.stderr.write("ERROR: %s\n" %(msg,))
return 4 return 4
class cmdline: # compatibility namespace class cmdline: # compatibility namespace
main = staticmethod(main) main = staticmethod(main)
@@ -72,12 +70,6 @@ class UsageError(Exception):
""" error in pytest usage or invocation""" """ error in pytest usage or invocation"""
class PrintHelp(Exception):
"""Raised when pytest should print it's help to skip the rest of the
argument parsing and validation."""
pass
def filename_arg(path, optname): def filename_arg(path, optname):
""" Argparse type validator for filename arguments. """ Argparse type validator for filename arguments.
@@ -103,11 +95,10 @@ def directory_arg(path, optname):
_preinit = [] _preinit = []
default_plugins = ( default_plugins = (
"mark main terminal runner python fixtures debugging unittest capture skipping " "mark main terminal runner python fixtures debugging unittest capture skipping "
"tmpdir monkeypatch recwarn pastebin helpconfig nose assertion " "tmpdir monkeypatch recwarn pastebin helpconfig nose assertion "
"junitxml resultlog doctest cacheprovider freeze_support " "junitxml resultlog doctest cacheprovider freeze_support "
"setuponly setupplan warnings").split() "setuponly setupplan").split()
builtin_plugins = set(default_plugins) builtin_plugins = set(default_plugins)
builtin_plugins.add("pytester") builtin_plugins.add("pytester")
@@ -117,7 +108,6 @@ def _preloadplugins():
assert not _preinit assert not _preinit
_preinit.append(get_config()) _preinit.append(get_config())
def get_config(): def get_config():
if _preinit: if _preinit:
return _preinit.pop(0) return _preinit.pop(0)
@@ -128,7 +118,6 @@ def get_config():
pluginmanager.import_plugin(spec) pluginmanager.import_plugin(spec)
return config return config
def get_plugin_manager(): def get_plugin_manager():
""" """
Obtain a new instance of the Obtain a new instance of the
@@ -140,7 +129,6 @@ def get_plugin_manager():
""" """
return get_config().pluginmanager return get_config().pluginmanager
def _prepareconfig(args=None, plugins=None): def _prepareconfig(args=None, plugins=None):
warning = None warning = None
if args is None: if args is None:
@@ -165,7 +153,7 @@ def _prepareconfig(args=None, plugins=None):
if warning: if warning:
config.warn('C1', warning) config.warn('C1', warning)
return pluginmanager.hook.pytest_cmdline_parse( return pluginmanager.hook.pytest_cmdline_parse(
pluginmanager=pluginmanager, args=args) pluginmanager=pluginmanager, args=args)
except BaseException: except BaseException:
config._ensure_unconfigure() config._ensure_unconfigure()
raise raise
@@ -173,14 +161,13 @@ def _prepareconfig(args=None, plugins=None):
class PytestPluginManager(PluginManager): class PytestPluginManager(PluginManager):
""" """
Overwrites :py:class:`pluggy.PluginManager <_pytest.vendored_packages.pluggy.PluginManager>` to add pytest-specific Overwrites :py:class:`pluggy.PluginManager` to add pytest-specific
functionality: functionality:
* loading plugins from the command line, ``PYTEST_PLUGIN`` env variable and * loading plugins from the command line, ``PYTEST_PLUGIN`` env variable and
``pytest_plugins`` global variables found in plugins being loaded; ``pytest_plugins`` global variables found in plugins being loaded;
* ``conftest.py`` loading during start-up; * ``conftest.py`` loading during start-up;
""" """
def __init__(self): def __init__(self):
super(PytestPluginManager, self).__init__("pytest", implprefix="pytest_") super(PytestPluginManager, self).__init__("pytest", implprefix="pytest_")
self._conftest_plugins = set() self._conftest_plugins = set()
@@ -211,8 +198,7 @@ def addhooks(self, module_or_class):
""" """
.. deprecated:: 2.8 .. deprecated:: 2.8
Use :py:meth:`pluggy.PluginManager.add_hookspecs <_pytest.vendored_packages.pluggy.PluginManager.add_hookspecs>` Use :py:meth:`pluggy.PluginManager.add_hookspecs` instead.
instead.
""" """
warning = dict(code="I2", warning = dict(code="I2",
fslocation=_pytest._code.getfslineno(sys._getframe(1)), fslocation=_pytest._code.getfslineno(sys._getframe(1)),
@@ -241,7 +227,7 @@ def parse_hookimpl_opts(self, plugin, name):
def parse_hookspec_opts(self, module_or_class, name): def parse_hookspec_opts(self, module_or_class, name):
opts = super(PytestPluginManager, self).parse_hookspec_opts( opts = super(PytestPluginManager, self).parse_hookspec_opts(
module_or_class, name) module_or_class, name)
if opts is None: if opts is None:
method = getattr(module_or_class, name) method = getattr(module_or_class, name)
if name.startswith("pytest_"): if name.startswith("pytest_"):
@@ -264,10 +250,7 @@ def register(self, plugin, name=None):
ret = super(PytestPluginManager, self).register(plugin, name) ret = super(PytestPluginManager, self).register(plugin, name)
if ret: if ret:
self.hook.pytest_plugin_registered.call_historic( self.hook.pytest_plugin_registered.call_historic(
kwargs=dict(plugin=plugin, manager=self)) kwargs=dict(plugin=plugin, manager=self))
if isinstance(plugin, types.ModuleType):
self.consider_module(plugin)
return ret return ret
def getplugin(self, name): def getplugin(self, name):
@@ -282,11 +265,11 @@ def pytest_configure(self, config):
# XXX now that the pluginmanager exposes hookimpl(tryfirst...) # XXX now that the pluginmanager exposes hookimpl(tryfirst...)
# we should remove tryfirst/trylast as markers # we should remove tryfirst/trylast as markers
config.addinivalue_line("markers", config.addinivalue_line("markers",
"tryfirst: mark a hook implementation function such that the " "tryfirst: mark a hook implementation function such that the "
"plugin machinery will try to call it first/as early as possible.") "plugin machinery will try to call it first/as early as possible.")
config.addinivalue_line("markers", config.addinivalue_line("markers",
"trylast: mark a hook implementation function such that the " "trylast: mark a hook implementation function such that the "
"plugin machinery will try to call it last/as late as possible.") "plugin machinery will try to call it last/as late as possible.")
def _warn(self, message): def _warn(self, message):
kwargs = message if isinstance(message, dict) else { kwargs = message if isinstance(message, dict) else {
@@ -310,7 +293,7 @@ def _set_initial_conftests(self, namespace):
""" """
current = py.path.local() current = py.path.local()
self._confcutdir = current.join(namespace.confcutdir, abs=True) \ self._confcutdir = current.join(namespace.confcutdir, abs=True) \
if namespace.confcutdir else None if namespace.confcutdir else None
self._noconftest = namespace.noconftest self._noconftest = namespace.noconftest
testpaths = namespace.file_or_dir testpaths = namespace.file_or_dir
foundanchor = False foundanchor = False
@@ -321,7 +304,7 @@ def _set_initial_conftests(self, namespace):
if i != -1: if i != -1:
path = path[:i] path = path[:i]
anchor = current.join(path, abs=1) anchor = current.join(path, abs=1)
if exists(anchor): # we found some file object if exists(anchor): # we found some file object
self._try_load_conftest(anchor) self._try_load_conftest(anchor)
foundanchor = True foundanchor = True
if not foundanchor: if not foundanchor:
@@ -388,7 +371,7 @@ def _importconftest(self, conftestpath):
if path and path.relto(dirpath) or path == dirpath: if path and path.relto(dirpath) or path == dirpath:
assert mod not in mods assert mod not in mods
mods.append(mod) mods.append(mod)
self.trace("loaded conftestmodule %r" % (mod)) self.trace("loaded conftestmodule %r" %(mod))
self.consider_conftest(mod) self.consider_conftest(mod)
return mod return mod
@@ -398,7 +381,7 @@ def _importconftest(self, conftestpath):
# #
def consider_preparse(self, args): def consider_preparse(self, args):
for opt1, opt2 in zip(args, args[1:]): for opt1,opt2 in zip(args, args[1:]):
if opt1 == "-p": if opt1 == "-p":
self.consider_pluginarg(opt2) self.consider_pluginarg(opt2)
@@ -412,33 +395,38 @@ def consider_pluginarg(self, arg):
self.import_plugin(arg) self.import_plugin(arg)
def consider_conftest(self, conftestmodule): def consider_conftest(self, conftestmodule):
self.register(conftestmodule, name=conftestmodule.__file__) if self.register(conftestmodule, name=conftestmodule.__file__):
self.consider_module(conftestmodule)
def consider_env(self): def consider_env(self):
self._import_plugin_specs(os.environ.get("PYTEST_PLUGINS")) self._import_plugin_specs(os.environ.get("PYTEST_PLUGINS"))
def consider_module(self, mod): def consider_module(self, mod):
self._import_plugin_specs(getattr(mod, 'pytest_plugins', [])) plugins = getattr(mod, 'pytest_plugins', [])
if isinstance(plugins, str):
plugins = [plugins]
self.rewrite_hook.mark_rewrite(*plugins)
self._import_plugin_specs(plugins)
def _import_plugin_specs(self, spec): def _import_plugin_specs(self, spec):
plugins = _get_plugin_specs_as_list(spec) if spec:
for import_spec in plugins: if isinstance(spec, str):
self.import_plugin(import_spec) spec = spec.split(",")
for import_spec in spec:
self.import_plugin(import_spec)
def import_plugin(self, modname): def import_plugin(self, modname):
# most often modname refers to builtin modules, e.g. "pytester", # most often modname refers to builtin modules, e.g. "pytester",
# "terminal" or "capture". Those plugins are registered under their # "terminal" or "capture". Those plugins are registered under their
# basename for historic purposes but must be imported with the # basename for historic purposes but must be imported with the
# _pytest prefix. # _pytest prefix.
assert isinstance(modname, (py.builtin.text, str)), "module name as text required, got %r" % modname assert isinstance(modname, str)
modname = str(modname)
if self.get_plugin(modname) is not None: if self.get_plugin(modname) is not None:
return return
if modname in builtin_plugins: if modname in builtin_plugins:
importspec = "_pytest." + modname importspec = "_pytest." + modname
else: else:
importspec = modname importspec = modname
self.rewrite_hook.mark_rewrite(importspec)
try: try:
__import__(importspec) __import__(importspec)
except ImportError as e: except ImportError as e:
@@ -452,28 +440,11 @@ def import_plugin(self, modname):
import pytest import pytest
if not hasattr(pytest, 'skip') or not isinstance(e, pytest.skip.Exception): if not hasattr(pytest, 'skip') or not isinstance(e, pytest.skip.Exception):
raise raise
self._warn("skipped plugin %r: %s" % ((modname, e.msg))) self._warn("skipped plugin %r: %s" %((modname, e.msg)))
else: else:
mod = sys.modules[importspec] mod = sys.modules[importspec]
self.register(mod, modname) self.register(mod, modname)
self.consider_module(mod)
def _get_plugin_specs_as_list(specs):
"""
Parses a list of "plugin specs" and returns a list of plugin names.
Plugin specs can be given as a list of strings separated by "," or already as a list/tuple in
which case it is returned as a list. Specs can also be `None` in which case an
empty list is returned.
"""
if specs is not None:
if isinstance(specs, str):
specs = specs.split(',') if specs else []
if not isinstance(specs, (list, tuple)):
raise UsageError("Plugin specs must be a ','-separated string or a "
"list/tuple of strings for plugin names. Given: %r" % specs)
return list(specs)
return []
class Parser: class Parser:
@@ -517,7 +488,7 @@ def getgroup(self, name, description="", after=None):
for i, grp in enumerate(self._groups): for i, grp in enumerate(self._groups):
if grp.name == after: if grp.name == after:
break break
self._groups.insert(i + 1, group) self._groups.insert(i+1, group)
return group return group
def addoption(self, *opts, **attrs): def addoption(self, *opts, **attrs):
@@ -555,7 +526,7 @@ def _getparser(self):
a = option.attrs() a = option.attrs()
arggroup.add_argument(*n, **a) arggroup.add_argument(*n, **a)
# bash like autocompletion for dirs (appending '/') # bash like autocompletion for dirs (appending '/')
optparser.add_argument(FILE_OR_DIR, nargs='*').completer = filescompleter optparser.add_argument(FILE_OR_DIR, nargs='*').completer=filescompleter
return optparser return optparser
def parse_setoption(self, args, option, namespace=None): def parse_setoption(self, args, option, namespace=None):
@@ -699,7 +670,7 @@ def attrs(self):
if self._attrs.get('help'): if self._attrs.get('help'):
a = self._attrs['help'] a = self._attrs['help']
a = a.replace('%default', '%(default)s') a = a.replace('%default', '%(default)s')
# a = a.replace('%prog', '%(prog)s') #a = a.replace('%prog', '%(prog)s')
self._attrs['help'] = a self._attrs['help'] = a
return self._attrs return self._attrs
@@ -783,7 +754,7 @@ def __init__(self, parser, extra_info=None):
extra_info = {} extra_info = {}
self._parser = parser self._parser = parser
argparse.ArgumentParser.__init__(self, usage=parser._usage, argparse.ArgumentParser.__init__(self, usage=parser._usage,
add_help=False, formatter_class=DropShorterLongHelpFormatter) add_help=False, formatter_class=DropShorterLongHelpFormatter)
# extra_info is a dict of (param -> value) to display if there's # extra_info is a dict of (param -> value) to display if there's
# an usage error to provide more contextual information to the user # an usage error to provide more contextual information to the user
self.extra_info = extra_info self.extra_info = extra_info
@@ -811,10 +782,9 @@ class DropShorterLongHelpFormatter(argparse.HelpFormatter):
- shortcut if there are only two options and one of them is a short one - shortcut if there are only two options and one of them is a short one
- cache result on action object as this is called at least 2 times - cache result on action object as this is called at least 2 times
""" """
def _format_action_invocation(self, action): def _format_action_invocation(self, action):
orgstr = argparse.HelpFormatter._format_action_invocation(self, action) orgstr = argparse.HelpFormatter._format_action_invocation(self, action)
if orgstr and orgstr[0] != '-': # only optional arguments if orgstr and orgstr[0] != '-': # only optional arguments
return orgstr return orgstr
res = getattr(action, '_formatted_action_invocation', None) res = getattr(action, '_formatted_action_invocation', None)
if res: if res:
@@ -825,7 +795,7 @@ def _format_action_invocation(self, action):
action._formatted_action_invocation = orgstr action._formatted_action_invocation = orgstr
return orgstr return orgstr
return_list = [] return_list = []
option_map = getattr(action, 'map_long_option', {}) option_map = getattr(action, 'map_long_option', {})
if option_map is None: if option_map is None:
option_map = {} option_map = {}
short_long = {} short_long = {}
@@ -843,7 +813,7 @@ def _format_action_invocation(self, action):
short_long[shortened] = xxoption short_long[shortened] = xxoption
# now short_long has been filled out to the longest with dashes # now short_long has been filled out to the longest with dashes
# **and** we keep the right option ordering from add_argument # **and** we keep the right option ordering from add_argument
for option in options: for option in options: #
if len(option) == 2 or option[2] == ' ': if len(option) == 2 or option[2] == ' ':
return_list.append(option) return_list.append(option)
if option[2:] == short_long.get(option.replace('-', '')): if option[2:] == short_long.get(option.replace('-', '')):
@@ -852,26 +822,22 @@ def _format_action_invocation(self, action):
return action._formatted_action_invocation return action._formatted_action_invocation
def _ensure_removed_sysmodule(modname): def _ensure_removed_sysmodule(modname):
try: try:
del sys.modules[modname] del sys.modules[modname]
except KeyError: except KeyError:
pass pass
class CmdOptions(object): class CmdOptions(object):
""" holds cmdline options as attributes.""" """ holds cmdline options as attributes."""
def __init__(self, values=()): def __init__(self, values=()):
self.__dict__.update(values) self.__dict__.update(values)
def __repr__(self): def __repr__(self):
return "<CmdOptions %r>" % (self.__dict__,) return "<CmdOptions %r>" %(self.__dict__,)
def copy(self): def copy(self):
return CmdOptions(self.__dict__) return CmdOptions(self.__dict__)
class Notset: class Notset:
def __repr__(self): def __repr__(self):
return "<NOTSET>" return "<NOTSET>"
@@ -881,18 +847,6 @@ def __repr__(self):
FILE_OR_DIR = 'file_or_dir' FILE_OR_DIR = 'file_or_dir'
def _iter_rewritable_modules(package_files):
for fn in package_files:
is_simple_module = '/' not in fn and fn.endswith('.py')
is_package = fn.count('/') == 1 and fn.endswith('__init__.py')
if is_simple_module:
module_name, _ = os.path.splitext(fn)
yield module_name
elif is_package:
package_name = os.path.dirname(fn)
yield package_name
class Config(object): class Config(object):
""" access to configuration values, pluginmanager and plugin hooks. """ """ access to configuration values, pluginmanager and plugin hooks. """
@@ -910,7 +864,6 @@ def __init__(self, pluginmanager):
self.trace = self.pluginmanager.trace.root.get("config") self.trace = self.pluginmanager.trace.root.get("config")
self.hook = self.pluginmanager.hook self.hook = self.pluginmanager.hook
self._inicache = {} self._inicache = {}
self._override_ini = ()
self._opt2dest = {} self._opt2dest = {}
self._cleanup = [] self._cleanup = []
self._warn = self.pluginmanager._warn self._warn = self.pluginmanager._warn
@@ -943,11 +896,11 @@ def _ensure_unconfigure(self):
fin = self._cleanup.pop() fin = self._cleanup.pop()
fin() fin()
def warn(self, code, message, fslocation=None, nodeid=None): def warn(self, code, message, fslocation=None):
""" generate a warning for this test session. """ """ generate a warning for this test session. """
self.hook.pytest_logwarning.call_historic(kwargs=dict( self.hook.pytest_logwarning.call_historic(kwargs=dict(
code=code, message=message, code=code, message=message,
fslocation=fslocation, nodeid=nodeid)) fslocation=fslocation, nodeid=None))
def get_terminal_writer(self): def get_terminal_writer(self):
return self.pluginmanager.get_plugin("terminalreporter")._tw return self.pluginmanager.get_plugin("terminalreporter")._tw
@@ -963,14 +916,14 @@ def notify_exception(self, excinfo, option=None):
else: else:
style = "native" style = "native"
excrepr = excinfo.getrepr(funcargs=True, excrepr = excinfo.getrepr(funcargs=True,
showlocals=getattr(option, 'showlocals', False), showlocals=getattr(option, 'showlocals', False),
style=style, style=style,
) )
res = self.hook.pytest_internalerror(excrepr=excrepr, res = self.hook.pytest_internalerror(excrepr=excrepr,
excinfo=excinfo) excinfo=excinfo)
if not py.builtin.any(res): if not py.builtin.any(res):
for line in str(excrepr).split("\n"): for line in str(excrepr).split("\n"):
sys.stderr.write("INTERNALERROR> %s\n" % line) sys.stderr.write("INTERNALERROR> %s\n" %line)
sys.stderr.flush() sys.stderr.flush()
def cwd_relative_nodeid(self, nodeid): def cwd_relative_nodeid(self, nodeid):
@@ -1011,9 +964,8 @@ def _initini(self, args):
self.invocation_dir = py.path.local() self.invocation_dir = py.path.local()
self._parser.addini('addopts', 'extra command line options', 'args') self._parser.addini('addopts', 'extra command line options', 'args')
self._parser.addini('minversion', 'minimally required pytest version') self._parser.addini('minversion', 'minimally required pytest version')
self._override_ini = ns.override_ini or ()
def _consider_importhook(self, args): def _consider_importhook(self, args, entrypoint_name):
"""Install the PEP 302 import hook if using assertion re-writing. """Install the PEP 302 import hook if using assertion re-writing.
Needs to parse the --assert=<mode> option from the commandline Needs to parse the --assert=<mode> option from the commandline
@@ -1028,34 +980,26 @@ def _consider_importhook(self, args):
except SystemError: except SystemError:
mode = 'plain' mode = 'plain'
else: else:
self._mark_plugins_for_rewrite(hook) import pkg_resources
self.pluginmanager.rewrite_hook = hook
for entrypoint in pkg_resources.iter_entry_points('pytest11'):
# 'RECORD' available for plugins installed normally (pip install)
# 'SOURCES.txt' available for plugins installed in dev mode (pip install -e)
# for installed plugins 'SOURCES.txt' returns an empty list, and vice-versa
# so it shouldn't be an issue
for metadata in ('RECORD', 'SOURCES.txt'):
for entry in entrypoint.dist._get_metadata(metadata):
fn = entry.split(',')[0]
is_simple_module = os.sep not in fn and fn.endswith('.py')
is_package = fn.count(os.sep) == 1 and fn.endswith('__init__.py')
if is_simple_module:
module_name, ext = os.path.splitext(fn)
hook.mark_rewrite(module_name)
elif is_package:
package_name = os.path.dirname(fn)
hook.mark_rewrite(package_name)
self._warn_about_missing_assertion(mode) self._warn_about_missing_assertion(mode)
def _mark_plugins_for_rewrite(self, hook):
"""
Given an importhook, mark for rewrite any top-level
modules or packages in the distribution package for
all pytest plugins.
"""
import pkg_resources
self.pluginmanager.rewrite_hook = hook
# 'RECORD' available for plugins installed normally (pip install)
# 'SOURCES.txt' available for plugins installed in dev mode (pip install -e)
# for installed plugins 'SOURCES.txt' returns an empty list, and vice-versa
# so it shouldn't be an issue
metadata_files = 'RECORD', 'SOURCES.txt'
package_files = (
entry.split(',')[0]
for entrypoint in pkg_resources.iter_entry_points('pytest11')
for metadata in metadata_files
for entry in entrypoint.dist._get_metadata(metadata)
)
for name in _iter_rewritable_modules(package_files):
hook.mark_rewrite(name)
def _warn_about_missing_assertion(self, mode): def _warn_about_missing_assertion(self, mode):
try: try:
assert False assert False
@@ -1079,17 +1023,19 @@ def _preparse(self, args, addopts=True):
args[:] = shlex.split(os.environ.get('PYTEST_ADDOPTS', '')) + args args[:] = shlex.split(os.environ.get('PYTEST_ADDOPTS', '')) + args
args[:] = self.getini("addopts") + args args[:] = self.getini("addopts") + args
self._checkversion() self._checkversion()
self._consider_importhook(args) entrypoint_name = 'pytest11'
self._consider_importhook(args, entrypoint_name)
self.pluginmanager.consider_preparse(args) self.pluginmanager.consider_preparse(args)
self.pluginmanager.load_setuptools_entrypoints('pytest11') self.pluginmanager.load_setuptools_entrypoints(entrypoint_name)
self.pluginmanager.consider_env() self.pluginmanager.consider_env()
self.known_args_namespace = ns = self._parser.parse_known_args(args, namespace=self.option.copy()) self.known_args_namespace = ns = self._parser.parse_known_args(args, namespace=self.option.copy())
confcutdir = self.known_args_namespace.confcutdir
if self.known_args_namespace.confcutdir is None and self.inifile: if self.known_args_namespace.confcutdir is None and self.inifile:
confcutdir = py.path.local(self.inifile).dirname confcutdir = py.path.local(self.inifile).dirname
self.known_args_namespace.confcutdir = confcutdir self.known_args_namespace.confcutdir = confcutdir
try: try:
self.hook.pytest_load_initial_conftests(early_config=self, self.hook.pytest_load_initial_conftests(early_config=self,
args=args, parser=self._parser) args=args, parser=self._parser)
except ConftestImportFailure: except ConftestImportFailure:
e = sys.exc_info()[1] e = sys.exc_info()[1]
if ns.help or ns.version: if ns.help or ns.version:
@@ -1107,32 +1053,28 @@ def _checkversion(self):
myver = pytest.__version__.split(".") myver = pytest.__version__.split(".")
if myver < ver: if myver < ver:
raise pytest.UsageError( raise pytest.UsageError(
"%s:%d: requires pytest-%s, actual pytest-%s'" % ( "%s:%d: requires pytest-%s, actual pytest-%s'" %(
self.inicfg.config.path, self.inicfg.lineof('minversion'), self.inicfg.config.path, self.inicfg.lineof('minversion'),
minver, pytest.__version__)) minver, pytest.__version__))
def parse(self, args, addopts=True): def parse(self, args, addopts=True):
# parse given cmdline arguments into this config object. # parse given cmdline arguments into this config object.
assert not hasattr(self, 'args'), ( assert not hasattr(self, 'args'), (
"can only parse cmdline args at most once per Config object") "can only parse cmdline args at most once per Config object")
self._origargs = args self._origargs = args
self.hook.pytest_addhooks.call_historic( self.hook.pytest_addhooks.call_historic(
kwargs=dict(pluginmanager=self.pluginmanager)) kwargs=dict(pluginmanager=self.pluginmanager))
self._preparse(args, addopts=addopts) self._preparse(args, addopts=addopts)
# XXX deprecated hook: # XXX deprecated hook:
self.hook.pytest_cmdline_preparse(config=self, args=args) self.hook.pytest_cmdline_preparse(config=self, args=args)
self._parser.after_preparse = True args = self._parser.parse_setoption(args, self.option, namespace=self.option)
try: if not args:
args = self._parser.parse_setoption(args, self.option, namespace=self.option) cwd = os.getcwd()
if cwd == self.rootdir:
args = self.getini('testpaths')
if not args: if not args:
cwd = os.getcwd() args = [cwd]
if cwd == self.rootdir: self.args = args
args = self.getini('testpaths')
if not args:
args = [cwd]
self.args = args
except PrintHelp:
pass
def addinivalue_line(self, name, line): def addinivalue_line(self, name, line):
""" add a line to an ini-file option. The option must have been """ add a line to an ini-file option. The option must have been
@@ -1140,12 +1082,12 @@ def addinivalue_line(self, name, line):
the first line in its value. """ the first line in its value. """
x = self.getini(name) x = self.getini(name)
assert isinstance(x, list) assert isinstance(x, list)
x.append(line) # modifies the cached list inline x.append(line) # modifies the cached list inline
def getini(self, name): def getini(self, name):
""" return configuration value from an :ref:`ini file <inifiles>`. If the """ return configuration value from an :ref:`ini file <inifiles>`. If the
specified name hasn't been registered through a prior specified name hasn't been registered through a prior
:py:func:`parser.addini <_pytest.config.Parser.addini>` :py:func:`parser.addini <pytest.config.Parser.addini>`
call (usually from a plugin), a ValueError is raised. """ call (usually from a plugin), a ValueError is raised. """
try: try:
return self._inicache[name] return self._inicache[name]
@@ -1157,7 +1099,7 @@ def _getini(self, name):
try: try:
description, type, default = self._parser._inidict[name] description, type, default = self._parser._inidict[name]
except KeyError: except KeyError:
raise ValueError("unknown configuration value: %r" % (name,)) raise ValueError("unknown configuration value: %r" %(name,))
value = self._get_override_ini_value(name) value = self._get_override_ini_value(name)
if value is None: if value is None:
try: try:
@@ -1170,10 +1112,10 @@ def _getini(self, name):
return [] return []
if type == "pathlist": if type == "pathlist":
dp = py.path.local(self.inicfg.config.path).dirpath() dp = py.path.local(self.inicfg.config.path).dirpath()
values = [] l = []
for relpath in shlex.split(value): for relpath in shlex.split(value):
values.append(dp.join(relpath, abs=True)) l.append(dp.join(relpath, abs=True))
return values return l
elif type == "args": elif type == "args":
return shlex.split(value) return shlex.split(value)
elif type == "linelist": elif type == "linelist":
@@ -1190,13 +1132,13 @@ def _getconftest_pathlist(self, name, path):
except KeyError: except KeyError:
return None return None
modpath = py.path.local(mod.__file__).dirpath() modpath = py.path.local(mod.__file__).dirpath()
values = [] l = []
for relroot in relroots: for relroot in relroots:
if not isinstance(relroot, py.path.local): if not isinstance(relroot, py.path.local):
relroot = relroot.replace("/", py.path.local.sep) relroot = relroot.replace("/", py.path.local.sep)
relroot = modpath.join(relroot, abs=True) relroot = modpath.join(relroot, abs=True)
values.append(relroot) l.append(relroot)
return values return l
def _get_override_ini_value(self, name): def _get_override_ini_value(self, name):
value = None value = None
@@ -1204,14 +1146,15 @@ def _get_override_ini_value(self, name):
# and -o foo1=bar1 -o foo2=bar2 options # and -o foo1=bar1 -o foo2=bar2 options
# always use the last item if multiple value set for same ini-name, # always use the last item if multiple value set for same ini-name,
# e.g. -o foo=bar1 -o foo=bar2 will set foo to bar2 # e.g. -o foo=bar1 -o foo=bar2 will set foo to bar2
for ini_config_list in self._override_ini: if self.getoption("override_ini", None):
for ini_config in ini_config_list: for ini_config_list in self.option.override_ini:
try: for ini_config in ini_config_list:
(key, user_ini_value) = ini_config.split("=", 1) try:
except ValueError: (key, user_ini_value) = ini_config.split("=", 1)
raise UsageError("-o/--override-ini expects option=value style.") except ValueError:
if key == name: raise UsageError("-o/--override-ini expects option=value style.")
value = user_ini_value if key == name:
value = user_ini_value
return value return value
def getoption(self, name, default=notset, skip=False): def getoption(self, name, default=notset, skip=False):
@@ -1234,7 +1177,7 @@ def getoption(self, name, default=notset, skip=False):
return default return default
if skip: if skip:
import pytest import pytest
pytest.skip("no %r option found" % (name,)) pytest.skip("no %r option found" %(name,))
raise ValueError("no option named %r" % (name,)) raise ValueError("no option named %r" % (name,))
def getvalue(self, name, path=None): def getvalue(self, name, path=None):
@@ -1245,14 +1188,12 @@ def getvalueorskip(self, name, path=None):
""" (deprecated, use getoption(skip=True)) """ """ (deprecated, use getoption(skip=True)) """
return self.getoption(name, skip=True) return self.getoption(name, skip=True)
def exists(path, ignore=EnvironmentError): def exists(path, ignore=EnvironmentError):
try: try:
return path.check() return path.check()
except ignore: except ignore:
return False return False
def getcfg(args, warnfunc=None): def getcfg(args, warnfunc=None):
""" """
Search the list of arguments for a valid ini-file for pytest, Search the list of arguments for a valid ini-file for pytest,
@@ -1287,20 +1228,25 @@ def getcfg(args, warnfunc=None):
return None, None, None return None, None, None
def get_common_ancestor(paths): def get_common_ancestor(args):
# args are what we get after early command line parsing (usually
# strings, but can be py.path.local objects as well)
common_ancestor = None common_ancestor = None
for path in paths: for arg in args:
if not path.exists(): if str(arg)[0] == "-":
continue
p = py.path.local(arg)
if not p.exists():
continue continue
if common_ancestor is None: if common_ancestor is None:
common_ancestor = path common_ancestor = p
else: else:
if path.relto(common_ancestor) or path == common_ancestor: if p.relto(common_ancestor) or p == common_ancestor:
continue continue
elif common_ancestor.relto(path): elif common_ancestor.relto(p):
common_ancestor = path common_ancestor = p
else: else:
shared = path.common(common_ancestor) shared = p.common(common_ancestor)
if shared is not None: if shared is not None:
common_ancestor = shared common_ancestor = shared
if common_ancestor is None: if common_ancestor is None:
@@ -1311,29 +1257,9 @@ def get_common_ancestor(paths):
def get_dirs_from_args(args): def get_dirs_from_args(args):
def is_option(x): return [d for d in (py.path.local(x) for x in args
return str(x).startswith('-') if not str(x).startswith("-"))
if d.exists()]
def get_file_part_from_node_id(x):
return str(x).split('::')[0]
def get_dir_from_path(path):
if path.isdir():
return path
return py.path.local(path.dirname)
# These look like paths but may not exist
possible_paths = (
py.path.local(get_file_part_from_node_id(arg))
for arg in args
if not is_option(arg)
)
return [
get_dir_from_path(path)
for path in possible_paths
if path.exists()
]
def determine_setup(inifile, args, warnfunc=None): def determine_setup(inifile, args, warnfunc=None):
@@ -1356,7 +1282,7 @@ def determine_setup(inifile, args, warnfunc=None):
rootdir, inifile, inicfg = getcfg(dirs, warnfunc=warnfunc) rootdir, inifile, inicfg = getcfg(dirs, warnfunc=warnfunc)
if rootdir is None: if rootdir is None:
rootdir = get_common_ancestor([py.path.local(), ancestor]) rootdir = get_common_ancestor([py.path.local(), ancestor])
is_fs_root = os.path.splitdrive(str(rootdir))[1] == '/' is_fs_root = os.path.splitdrive(str(rootdir))[1] == os.sep
if is_fs_root: if is_fs_root:
rootdir = ancestor rootdir = ancestor
return rootdir, inifile, inicfg or {} return rootdir, inifile, inicfg or {}
@@ -1378,7 +1304,7 @@ def setns(obj, dic):
else: else:
setattr(obj, name, value) setattr(obj, name, value)
obj.__all__.append(name) obj.__all__.append(name)
# if obj != pytest: #if obj != pytest:
# pytest.__all__.append(name) # pytest.__all__.append(name)
setattr(pytest, name, value) setattr(pytest, name, value)

View File

@@ -1,8 +1,10 @@
""" interactive debugging with PDB, the Python Debugger. """ """ interactive debugging with PDB, the Python Debugger. """
from __future__ import absolute_import, division, print_function from __future__ import absolute_import
import pdb import pdb
import sys import sys
import pytest
def pytest_addoption(parser): def pytest_addoption(parser):
group = parser.getgroup("general") group = parser.getgroup("general")
@@ -14,17 +16,19 @@ def pytest_addoption(parser):
help="start a custom interactive Python debugger on errors. " help="start a custom interactive Python debugger on errors. "
"For example: --pdbcls=IPython.terminal.debugger:TerminalPdb") "For example: --pdbcls=IPython.terminal.debugger:TerminalPdb")
def pytest_namespace():
return {'set_trace': pytestPDB().set_trace}
def pytest_configure(config): def pytest_configure(config):
if config.getvalue("usepdb_cls"): if config.getvalue("usepdb") or config.getvalue("usepdb_cls"):
modname, classname = config.getvalue("usepdb_cls").split(":")
__import__(modname)
pdb_cls = getattr(sys.modules[modname], classname)
else:
pdb_cls = pdb.Pdb
if config.getvalue("usepdb"):
config.pluginmanager.register(PdbInvoke(), 'pdbinvoke') config.pluginmanager.register(PdbInvoke(), 'pdbinvoke')
if config.getvalue("usepdb_cls"):
modname, classname = config.getvalue("usepdb_cls").split(":")
__import__(modname)
pdb_cls = getattr(sys.modules[modname], classname)
else:
pdb_cls = pdb.Pdb
pytestPDB._pdb_cls = pdb_cls
old = (pdb.set_trace, pytestPDB._pluginmanager) old = (pdb.set_trace, pytestPDB._pluginmanager)
@@ -33,33 +37,30 @@ def fin():
pytestPDB._config = None pytestPDB._config = None
pytestPDB._pdb_cls = pdb.Pdb pytestPDB._pdb_cls = pdb.Pdb
pdb.set_trace = pytestPDB.set_trace pdb.set_trace = pytest.set_trace
pytestPDB._pluginmanager = config.pluginmanager pytestPDB._pluginmanager = config.pluginmanager
pytestPDB._config = config pytestPDB._config = config
pytestPDB._pdb_cls = pdb_cls
config._cleanup.append(fin) config._cleanup.append(fin)
class pytestPDB: class pytestPDB:
""" Pseudo PDB that defers to the real pdb. """ """ Pseudo PDB that defers to the real pdb. """
_pluginmanager = None _pluginmanager = None
_config = None _config = None
_pdb_cls = pdb.Pdb _pdb_cls = pdb.Pdb
@classmethod def set_trace(self):
def set_trace(cls):
""" invoke PDB set_trace debugging, dropping any IO capturing. """ """ invoke PDB set_trace debugging, dropping any IO capturing. """
import _pytest.config import _pytest.config
frame = sys._getframe().f_back frame = sys._getframe().f_back
if cls._pluginmanager is not None: if self._pluginmanager is not None:
capman = cls._pluginmanager.getplugin("capturemanager") capman = self._pluginmanager.getplugin("capturemanager")
if capman: if capman:
capman.suspendcapture(in_=True) capman.suspendcapture(in_=True)
tw = _pytest.config.create_terminal_writer(cls._config) tw = _pytest.config.create_terminal_writer(self._config)
tw.line() tw.line()
tw.sep(">", "PDB set_trace (IO-capturing turned off)") tw.sep(">", "PDB set_trace (IO-capturing turned off)")
cls._pluginmanager.hook.pytest_enter_pdb(config=cls._config) self._pluginmanager.hook.pytest_enter_pdb(config=self._config)
cls._pdb_cls().set_trace(frame) self._pdb_cls().set_trace(frame)
class PdbInvoke: class PdbInvoke:
@@ -73,7 +74,7 @@ def pytest_exception_interact(self, node, call, report):
def pytest_internalerror(self, excrepr, excinfo): def pytest_internalerror(self, excrepr, excinfo):
for line in str(excrepr).split("\n"): for line in str(excrepr).split("\n"):
sys.stderr.write("INTERNALERROR> %s\n" % line) sys.stderr.write("INTERNALERROR> %s\n" %line)
sys.stderr.flush() sys.stderr.flush()
tb = _postmortem_traceback(excinfo) tb = _postmortem_traceback(excinfo)
post_mortem(tb) post_mortem(tb)

View File

@@ -5,15 +5,10 @@
Keeping it in a central location makes it easy to track what is deprecated and should Keeping it in a central location makes it easy to track what is deprecated and should
be removed when the time comes. be removed when the time comes.
""" """
from __future__ import absolute_import, division, print_function
class RemovedInPytest4Warning(DeprecationWarning):
"""warning class for features removed in pytest 4.0"""
MAIN_STR_ARGS = 'passing a string to pytest.main() is deprecated, ' \ MAIN_STR_ARGS = 'passing a string to pytest.main() is deprecated, ' \
'pass a list of arguments instead.' 'pass a list of arguments instead.'
YIELD_TESTS = 'yield tests are deprecated, and scheduled to be removed in pytest 4.0' YIELD_TESTS = 'yield tests are deprecated, and scheduled to be removed in pytest 4.0'
@@ -26,17 +21,4 @@ class RemovedInPytest4Warning(DeprecationWarning):
GETFUNCARGVALUE = "use of getfuncargvalue is deprecated, use getfixturevalue" GETFUNCARGVALUE = "use of getfuncargvalue is deprecated, use getfixturevalue"
RESULT_LOG = ( RESULT_LOG = '--result-log is deprecated and scheduled for removal in pytest 4.0'
'--result-log is deprecated and scheduled for removal in pytest 4.0.\n'
'See https://docs.pytest.org/en/latest/usage.html#creating-resultlog-format-files for more information.'
)
MARK_INFO_ATTRIBUTE = RemovedInPytest4Warning(
"MarkInfo objects are deprecated as they contain the merged marks"
)
MARK_PARAMETERSET_UNPACKING = RemovedInPytest4Warning(
"Applying marks directly to parameters is deprecated,"
" please use pytest.param(..., marks=...) instead.\n"
"For more details, see: https://docs.pytest.org/en/latest/parametrize.html"
)

View File

@@ -1,5 +1,5 @@
""" discover and run doctests in modules and test files.""" """ discover and run doctests in modules and test files."""
from __future__ import absolute_import, division, print_function from __future__ import absolute_import
import traceback import traceback
@@ -22,29 +22,27 @@
DOCTEST_REPORT_CHOICE_ONLY_FIRST_FAILURE, DOCTEST_REPORT_CHOICE_ONLY_FIRST_FAILURE,
) )
def pytest_addoption(parser): def pytest_addoption(parser):
parser.addini('doctest_optionflags', 'option flags for doctests', parser.addini('doctest_optionflags', 'option flags for doctests',
type="args", default=["ELLIPSIS"]) type="args", default=["ELLIPSIS"])
parser.addini("doctest_encoding", 'encoding used for doctest files', default="utf-8")
group = parser.getgroup("collect") group = parser.getgroup("collect")
group.addoption("--doctest-modules", group.addoption("--doctest-modules",
action="store_true", default=False, action="store_true", default=False,
help="run doctests in all .py modules", help="run doctests in all .py modules",
dest="doctestmodules") dest="doctestmodules")
group.addoption("--doctest-report", group.addoption("--doctest-report",
type=str.lower, default="udiff", type=str.lower, default="udiff",
help="choose another output format for diffs on doctest failure", help="choose another output format for diffs on doctest failure",
choices=DOCTEST_REPORT_CHOICES, choices=DOCTEST_REPORT_CHOICES,
dest="doctestreport") dest="doctestreport")
group.addoption("--doctest-glob", group.addoption("--doctest-glob",
action="append", default=[], metavar="pat", action="append", default=[], metavar="pat",
help="doctests file matching pattern, default: test*.txt", help="doctests file matching pattern, default: test*.txt",
dest="doctestglob") dest="doctestglob")
group.addoption("--doctest-ignore-import-errors", group.addoption("--doctest-ignore-import-errors",
action="store_true", default=False, action="store_true", default=False,
help="ignore doctest ImportErrors", help="ignore doctest ImportErrors",
dest="doctest_ignore_import_errors") dest="doctest_ignore_import_errors")
def pytest_collect_file(path, parent): def pytest_collect_file(path, parent):
@@ -120,7 +118,7 @@ def repr_failure(self, excinfo):
lines = ["%03d %s" % (i + test.lineno + 1, x) lines = ["%03d %s" % (i + test.lineno + 1, x)
for (i, x) in enumerate(lines)] for (i, x) in enumerate(lines)]
# trim docstring error lines to 10 # trim docstring error lines to 10
lines = lines[max(example.lineno - 9, 0):example.lineno + 1] lines = lines[example.lineno - 9:example.lineno + 1]
else: else:
lines = ['EXAMPLE LOCATION UNKNOWN, not showing all tests of that example'] lines = ['EXAMPLE LOCATION UNKNOWN, not showing all tests of that example']
indent = '>>>' indent = '>>>'
@@ -129,18 +127,18 @@ def repr_failure(self, excinfo):
indent = '...' indent = '...'
if excinfo.errisinstance(doctest.DocTestFailure): if excinfo.errisinstance(doctest.DocTestFailure):
lines += checker.output_difference(example, lines += checker.output_difference(example,
doctestfailure.got, report_choice).split("\n") doctestfailure.got, report_choice).split("\n")
else: else:
inner_excinfo = ExceptionInfo(excinfo.value.exc_info) inner_excinfo = ExceptionInfo(excinfo.value.exc_info)
lines += ["UNEXPECTED EXCEPTION: %s" % lines += ["UNEXPECTED EXCEPTION: %s" %
repr(inner_excinfo.value)] repr(inner_excinfo.value)]
lines += traceback.format_exception(*excinfo.value.exc_info) lines += traceback.format_exception(*excinfo.value.exc_info)
return ReprFailDoctest(reprlocation, lines) return ReprFailDoctest(reprlocation, lines)
else: else:
return super(DoctestItem, self).repr_failure(excinfo) return super(DoctestItem, self).repr_failure(excinfo)
def reportinfo(self): def reportinfo(self):
return self.fspath, self.dtest.lineno, "[doctest] %s" % self.name return self.fspath, None, "[doctest] %s" % self.name
def _get_flag_lookup(): def _get_flag_lookup():
@@ -173,16 +171,15 @@ def collect(self):
# inspired by doctest.testfile; ideally we would use it directly, # inspired by doctest.testfile; ideally we would use it directly,
# but it doesn't support passing a custom checker # but it doesn't support passing a custom checker
encoding = self.config.getini("doctest_encoding") text = self.fspath.read()
text = self.fspath.read_text(encoding)
filename = str(self.fspath) filename = str(self.fspath)
name = self.fspath.basename name = self.fspath.basename
globs = {'__name__': '__main__'} globs = {'__name__': '__main__'}
optionflags = get_optionflags(self) optionflags = get_optionflags(self)
runner = doctest.DebugRunner(verbose=0, optionflags=optionflags, runner = doctest.DebugRunner(verbose=0, optionflags=optionflags,
checker=_get_checker()) checker=_get_checker())
_fix_spoof_python2(runner, encoding)
parser = doctest.DocTestParser() parser = doctest.DocTestParser()
test = parser.get_doctest(text, globs, name, filename, 0) test = parser.get_doctest(text, globs, name, filename, 0)
@@ -218,7 +215,6 @@ def collect(self):
optionflags = get_optionflags(self) optionflags = get_optionflags(self)
runner = doctest.DebugRunner(verbose=0, optionflags=optionflags, runner = doctest.DebugRunner(verbose=0, optionflags=optionflags,
checker=_get_checker()) checker=_get_checker())
for test in finder.find(module, module.__name__): for test in finder.find(module, module.__name__):
if test.examples: # skip empty doctests if test.examples: # skip empty doctests
yield DoctestItem(test.name, self, runner, test) yield DoctestItem(test.name, self, runner, test)
@@ -327,33 +323,6 @@ def _get_report_choice(key):
DOCTEST_REPORT_CHOICE_NONE: 0, DOCTEST_REPORT_CHOICE_NONE: 0,
}[key] }[key]
def _fix_spoof_python2(runner, encoding):
"""
Installs a "SpoofOut" into the given DebugRunner so it properly deals with unicode output. This
should patch only doctests for text files because they don't have a way to declare their
encoding. Doctests in docstrings from Python modules don't have the same problem given that
Python already decoded the strings.
This fixes the problem related in issue #2434.
"""
from _pytest.compat import _PY2
if not _PY2:
return
from doctest import _SpoofOut
class UnicodeSpoof(_SpoofOut):
def getvalue(self):
result = _SpoofOut.getvalue(self)
if encoding:
result = result.decode(encoding)
return result
runner._fakeout = UnicodeSpoof()
@pytest.fixture(scope='session') @pytest.fixture(scope='session')
def doctest_namespace(): def doctest_namespace():
""" """

View File

@@ -1,39 +1,22 @@
from __future__ import absolute_import, division, print_function
import inspect
import sys import sys
import warnings
import py
from py._code.code import FormattedExcinfo from py._code.code import FormattedExcinfo
import py
import pytest
import warnings
import inspect
import _pytest import _pytest
from _pytest import nodes
from _pytest._code.code import TerminalRepr from _pytest._code.code import TerminalRepr
from _pytest.compat import ( from _pytest.compat import (
NOTSET, exc_clear, _format_args, NOTSET, exc_clear, _format_args,
getfslineno, get_real_func, getfslineno, get_real_func,
is_generator, isclass, getimfunc, is_generator, isclass, getimfunc,
getlocation, getfuncargnames, getlocation, getfuncargnames,
safe_getattr,
FuncargnamesCompatAttr,
) )
from _pytest.outcomes import fail, TEST_OUTCOME
if sys.version_info[:2] == (2, 6):
from ordereddict import OrderedDict
else:
from collections import OrderedDict # nopyqver
def pytest_sessionstart(session): def pytest_sessionstart(session):
import _pytest.python
scopename2class.update({
'class': _pytest.python.Class,
'module': _pytest.python.Module,
'function': _pytest.main.Item,
})
session._fixturemanager = FixtureManager(session) session._fixturemanager = FixtureManager(session)
@@ -46,7 +29,6 @@ def pytest_sessionstart(session):
scope2props["instance"] = scope2props["class"] + ("instance", ) scope2props["instance"] = scope2props["class"] + ("instance", )
scope2props["function"] = scope2props["instance"] + ("function", "keywords") scope2props["function"] = scope2props["instance"] + ("function", "keywords")
def scopeproperty(name=None, doc=None): def scopeproperty(name=None, doc=None):
def decoratescope(func): def decoratescope(func):
scopename = name or func.__name__ scopename = name or func.__name__
@@ -61,6 +43,19 @@ def provide(self):
return decoratescope return decoratescope
def pytest_namespace():
scopename2class.update({
'class': pytest.Class,
'module': pytest.Module,
'function': pytest.Item,
})
return {
'fixture': fixture,
'yield_fixture': yield_fixture,
'collect': {'_fillfuncargs': fillfixtures}
}
def get_scope_node(node, scope): def get_scope_node(node, scope):
cls = scopename2class.get(scope) cls = scopename2class.get(scope)
if cls is None: if cls is None:
@@ -78,7 +73,7 @@ def add_funcarg_pseudo_fixture_def(collector, metafunc, fixturemanager):
# XXX we can probably avoid this algorithm if we modify CallSpec2 # XXX we can probably avoid this algorithm if we modify CallSpec2
# to directly care for creating the fixturedefs within its methods. # to directly care for creating the fixturedefs within its methods.
if not metafunc._calls[0].funcargs: if not metafunc._calls[0].funcargs:
return # this function call does not have direct parametrization return # this function call does not have direct parametrization
# collect funcargs of all callspecs into a list of values # collect funcargs of all callspecs into a list of values
arg2params = {} arg2params = {}
arg2scope = {} arg2scope = {}
@@ -108,32 +103,36 @@ def add_funcarg_pseudo_fixture_def(collector, metafunc, fixturemanager):
if scope != "function": if scope != "function":
node = get_scope_node(collector, scope) node = get_scope_node(collector, scope)
if node is None: if node is None:
assert scope == "class" and isinstance(collector, _pytest.python.Module) assert scope == "class" and isinstance(collector, pytest.Module)
# use module-level collector for class-scope (for now) # use module-level collector for class-scope (for now)
node = collector node = collector
if node and argname in node._name2pseudofixturedef: if node and argname in node._name2pseudofixturedef:
arg2fixturedefs[argname] = [node._name2pseudofixturedef[argname]] arg2fixturedefs[argname] = [node._name2pseudofixturedef[argname]]
else: else:
fixturedef = FixtureDef(fixturemanager, '', argname, fixturedef = FixtureDef(fixturemanager, '', argname,
get_direct_param_fixture_func, get_direct_param_fixture_func,
arg2scope[argname], arg2scope[argname],
valuelist, False, False) valuelist, False, False)
arg2fixturedefs[argname] = [fixturedef] arg2fixturedefs[argname] = [fixturedef]
if node is not None: if node is not None:
node._name2pseudofixturedef[argname] = fixturedef node._name2pseudofixturedef[argname] = fixturedef
def getfixturemarker(obj): def getfixturemarker(obj):
""" return fixturemarker or None if it doesn't exist or raised """ return fixturemarker or None if it doesn't exist or raised
exceptions.""" exceptions."""
try: try:
return getattr(obj, "_pytestfixturefunction", None) return getattr(obj, "_pytestfixturefunction", None)
except TEST_OUTCOME: except KeyboardInterrupt:
raise
except Exception:
# some objects raise errors like request (from flask import request) # some objects raise errors like request (from flask import request)
# we don't expect them to be fixture functions # we don't expect them to be fixture functions
return None return None
def get_parametrized_fixture_keys(item, scopenum): def get_parametrized_fixture_keys(item, scopenum):
""" return list of keys for all parametrized arguments which match """ return list of keys for all parametrized arguments which match
the specified scope. """ the specified scope. """
@@ -143,10 +142,10 @@ def get_parametrized_fixture_keys(item, scopenum):
except AttributeError: except AttributeError:
pass pass
else: else:
# cs.indices.items() is random order of argnames. Need to # cs.indictes.items() is random order of argnames but
# sort this so that different calls to # then again different functions (items) can change order of
# get_parametrized_fixture_keys will be deterministic. # arguments so it doesn't matter much probably
for argname, param_index in sorted(cs.indices.items()): for argname, param_index in cs.indices.items():
if cs._arg2scopenum[argname] != scopenum: if cs._arg2scopenum[argname] != scopenum:
continue continue
if scopenum == 0: # session if scopenum == 0: # session
@@ -168,21 +167,20 @@ def reorder_items(items):
for scopenum in range(0, scopenum_function): for scopenum in range(0, scopenum_function):
argkeys_cache[scopenum] = d = {} argkeys_cache[scopenum] = d = {}
for item in items: for item in items:
keys = OrderedDict.fromkeys(get_parametrized_fixture_keys(item, scopenum)) keys = set(get_parametrized_fixture_keys(item, scopenum))
if keys: if keys:
d[item] = keys d[item] = keys
return reorder_items_atscope(items, set(), argkeys_cache, 0) return reorder_items_atscope(items, set(), argkeys_cache, 0)
def reorder_items_atscope(items, ignore, argkeys_cache, scopenum): def reorder_items_atscope(items, ignore, argkeys_cache, scopenum):
if scopenum >= scopenum_function or len(items) < 3: if scopenum >= scopenum_function or len(items) < 3:
return items return items
items_done = [] items_done = []
while 1: while 1:
items_before, items_same, items_other, newignore = \ items_before, items_same, items_other, newignore = \
slice_items(items, ignore, argkeys_cache[scopenum]) slice_items(items, ignore, argkeys_cache[scopenum])
items_before = reorder_items_atscope( items_before = reorder_items_atscope(
items_before, ignore, argkeys_cache, scopenum + 1) items_before, ignore, argkeys_cache,scopenum+1)
if items_same is None: if items_same is None:
# nothing to reorder in this scope # nothing to reorder in this scope
assert items_other is None assert items_other is None
@@ -203,9 +201,9 @@ def slice_items(items, ignore, scoped_argkeys_cache):
for i, item in enumerate(it): for i, item in enumerate(it):
argkeys = scoped_argkeys_cache.get(item) argkeys = scoped_argkeys_cache.get(item)
if argkeys is not None: if argkeys is not None:
newargkeys = OrderedDict.fromkeys(k for k in argkeys if k not in ignore) argkeys = argkeys.difference(ignore)
if newargkeys: # found a slicing key if argkeys: # found a slicing key
slicing_argkey, _ = newargkeys.popitem() slicing_argkey = argkeys.pop()
items_before = items[:i] items_before = items[:i]
items_same = [item] items_same = [item]
items_other = [] items_other = []
@@ -213,7 +211,7 @@ def slice_items(items, ignore, scoped_argkeys_cache):
for item in it: for item in it:
argkeys = scoped_argkeys_cache.get(item) argkeys = scoped_argkeys_cache.get(item)
if argkeys and slicing_argkey in argkeys and \ if argkeys and slicing_argkey in argkeys and \
slicing_argkey not in ignore: slicing_argkey not in ignore:
items_same.append(item) items_same.append(item)
else: else:
items_other.append(item) items_other.append(item)
@@ -223,6 +221,17 @@ def slice_items(items, ignore, scoped_argkeys_cache):
return items, None, None, None return items, None, None, None
class FuncargnamesCompatAttr:
""" helper class so that Metafunc, Function and FixtureRequest
don't need to each define the "funcargnames" compatibility attribute.
"""
@property
def funcargnames(self):
""" alias attribute for ``fixturenames`` for pre-2.3 compatibility"""
return self.fixturenames
def fillfixtures(function): def fillfixtures(function):
""" fill missing funcargs for a test function. """ """ fill missing funcargs for a test function. """
try: try:
@@ -245,10 +254,10 @@ def fillfixtures(function):
request._fillfixtures() request._fillfixtures()
def get_direct_param_fixture_func(request): def get_direct_param_fixture_func(request):
return request.param return request.param
class FuncFixtureInfo: class FuncFixtureInfo:
def __init__(self, argnames, names_closure, name2fixturedefs): def __init__(self, argnames, names_closure, name2fixturedefs):
self.argnames = argnames self.argnames = argnames
@@ -287,6 +296,7 @@ def node(self):
""" underlying collection node (depends on current request scope)""" """ underlying collection node (depends on current request scope)"""
return self._getscopeitem(self.scope) return self._getscopeitem(self.scope)
def _getnextfixturedef(self, argname): def _getnextfixturedef(self, argname):
fixturedefs = self._arg2fixturedefs.get(argname, None) fixturedefs = self._arg2fixturedefs.get(argname, None)
if fixturedefs is None: if fixturedefs is None:
@@ -308,6 +318,7 @@ def config(self):
""" the pytest config object associated with this request. """ """ the pytest config object associated with this request. """
return self._pyfuncitem.config return self._pyfuncitem.config
@scopeproperty() @scopeproperty()
def function(self): def function(self):
""" test function object if the request has a per-function scope. """ """ test function object if the request has a per-function scope. """
@@ -316,7 +327,7 @@ def function(self):
@scopeproperty("class") @scopeproperty("class")
def cls(self): def cls(self):
""" class (can be None) where the test function was collected. """ """ class (can be None) where the test function was collected. """
clscol = self._pyfuncitem.getparent(_pytest.python.Class) clscol = self._pyfuncitem.getparent(pytest.Class)
if clscol: if clscol:
return clscol.obj return clscol.obj
@@ -334,7 +345,7 @@ def instance(self):
@scopeproperty() @scopeproperty()
def module(self): def module(self):
""" python module object where the test function was collected. """ """ python module object where the test function was collected. """
return self._pyfuncitem.getparent(_pytest.python.Module).obj return self._pyfuncitem.getparent(pytest.Module).obj
@scopeproperty() @scopeproperty()
def fspath(self): def fspath(self):
@@ -403,7 +414,7 @@ def cached_setup(self, setup, teardown=None, scope="module", extrakey=None):
:arg extrakey: added to internal caching key of (funcargname, scope). :arg extrakey: added to internal caching key of (funcargname, scope).
""" """
if not hasattr(self.config, '_setupcache'): if not hasattr(self.config, '_setupcache'):
self.config._setupcache = {} # XXX weakref? self.config._setupcache = {} # XXX weakref?
cachekey = (self.fixturename, self._getscopeitem(scope), extrakey) cachekey = (self.fixturename, self._getscopeitem(scope), extrakey)
cache = self.config._setupcache cache = self.config._setupcache
try: try:
@@ -434,8 +445,7 @@ def getfuncargvalue(self, argname):
from _pytest import deprecated from _pytest import deprecated
warnings.warn( warnings.warn(
deprecated.GETFUNCARGVALUE, deprecated.GETFUNCARGVALUE,
DeprecationWarning, DeprecationWarning)
stacklevel=2)
return self.getfixturevalue(argname) return self.getfixturevalue(argname)
def _get_active_fixturedef(self, argname): def _get_active_fixturedef(self, argname):
@@ -460,13 +470,13 @@ class PseudoFixtureDef:
def _get_fixturestack(self): def _get_fixturestack(self):
current = self current = self
values = [] l = []
while 1: while 1:
fixturedef = getattr(current, "_fixturedef", None) fixturedef = getattr(current, "_fixturedef", None)
if fixturedef is None: if fixturedef is None:
values.reverse() l.reverse()
return values return l
values.append(fixturedef) l.append(fixturedef)
current = current._parent_request current = current._parent_request
def _getfixturevalue(self, fixturedef): def _getfixturevalue(self, fixturedef):
@@ -498,7 +508,7 @@ def _getfixturevalue(self, fixturedef):
source_lineno, source_lineno,
) )
) )
fail(msg) pytest.fail(msg)
else: else:
# indices might not be set if old-style metafunc.addcall() was used # indices might not be set if old-style metafunc.addcall() was used
param_index = funcitem.callspec.indices.get(argname, 0) param_index = funcitem.callspec.indices.get(argname, 0)
@@ -531,11 +541,11 @@ def _check_scope(self, argname, invoking_scope, requested_scope):
if scopemismatch(invoking_scope, requested_scope): if scopemismatch(invoking_scope, requested_scope):
# try to report something helpful # try to report something helpful
lines = self._factorytraceback() lines = self._factorytraceback()
fail("ScopeMismatch: You tried to access the %r scoped " pytest.fail("ScopeMismatch: You tried to access the %r scoped "
"fixture %r with a %r scoped request object, " "fixture %r with a %r scoped request object, "
"involved factories\n%s" % ( "involved factories\n%s" %(
(requested_scope, argname, invoking_scope, "\n".join(lines))), (requested_scope, argname, invoking_scope, "\n".join(lines))),
pytrace=False) pytrace=False)
def _factorytraceback(self): def _factorytraceback(self):
lines = [] lines = []
@@ -544,7 +554,7 @@ def _factorytraceback(self):
fs, lineno = getfslineno(factory) fs, lineno = getfslineno(factory)
p = self._pyfuncitem.session.fspath.bestrelpath(fs) p = self._pyfuncitem.session.fspath.bestrelpath(fs)
args = _format_args(factory) args = _format_args(factory)
lines.append("%s:%d: def %s%s" % ( lines.append("%s:%d: def %s%s" %(
p, lineno, factory.__name__, args)) p, lineno, factory.__name__, args))
return lines return lines
@@ -560,13 +570,12 @@ def _getscopeitem(self, scope):
return node return node
def __repr__(self): def __repr__(self):
return "<FixtureRequest for %r>" % (self.node) return "<FixtureRequest for %r>" %(self.node)
class SubRequest(FixtureRequest): class SubRequest(FixtureRequest):
""" a sub request for handling getting a fixture from a """ a sub request for handling getting a fixture from a
test function/fixture. """ test function/fixture. """
def __init__(self, request, scope, param, param_index, fixturedef): def __init__(self, request, scope, param, param_index, fixturedef):
self._parent_request = request self._parent_request = request
self.fixturename = fixturedef.argname self.fixturename = fixturedef.argname
@@ -575,8 +584,9 @@ def __init__(self, request, scope, param, param_index, fixturedef):
self.param_index = param_index self.param_index = param_index
self.scope = scope self.scope = scope
self._fixturedef = fixturedef self._fixturedef = fixturedef
self.addfinalizer = fixturedef.addfinalizer
self._pyfuncitem = request._pyfuncitem self._pyfuncitem = request._pyfuncitem
self._fixture_values = request._fixture_values self._fixture_values = request._fixture_values
self._fixture_defs = request._fixture_defs self._fixture_defs = request._fixture_defs
self._arg2fixturedefs = request._arg2fixturedefs self._arg2fixturedefs = request._arg2fixturedefs
self._arg2index = request._arg2index self._arg2index = request._arg2index
@@ -585,9 +595,6 @@ def __init__(self, request, scope, param, param_index, fixturedef):
def __repr__(self): def __repr__(self):
return "<SubRequest %r for %r>" % (self.fixturename, self._pyfuncitem) return "<SubRequest %r for %r>" % (self.fixturename, self._pyfuncitem)
def addfinalizer(self, finalizer):
self._fixturedef.addfinalizer(finalizer)
class ScopeMismatchError(Exception): class ScopeMismatchError(Exception):
""" A fixture function tries to use a different fixture function which """ A fixture function tries to use a different fixture function which
@@ -619,7 +626,6 @@ def scope2index(scope, descr, where=None):
class FixtureLookupError(LookupError): class FixtureLookupError(LookupError):
""" could not return a requested Fixture (missing or invalid). """ """ could not return a requested Fixture (missing or invalid). """
def __init__(self, argname, request, msg=None): def __init__(self, argname, request, msg=None):
self.argname = argname self.argname = argname
self.request = request self.request = request
@@ -642,9 +648,9 @@ def formatrepr(self):
lines, _ = inspect.getsourcelines(get_real_func(function)) lines, _ = inspect.getsourcelines(get_real_func(function))
except (IOError, IndexError, TypeError): except (IOError, IndexError, TypeError):
error_msg = "file %s, line %s: source code not available" error_msg = "file %s, line %s: source code not available"
addline(error_msg % (fspath, lineno + 1)) addline(error_msg % (fspath, lineno+1))
else: else:
addline("file %s, line %s" % (fspath, lineno + 1)) addline("file %s, line %s" % (fspath, lineno+1))
for i, line in enumerate(lines): for i, line in enumerate(lines):
line = line.rstrip() line = line.rstrip()
addline(" " + line) addline(" " + line)
@@ -660,7 +666,7 @@ def formatrepr(self):
if faclist and name not in available: if faclist and name not in available:
available.append(name) available.append(name)
msg = "fixture %r not found" % (self.argname,) msg = "fixture %r not found" % (self.argname,)
msg += "\n available fixtures: %s" % (", ".join(sorted(available)),) msg += "\n available fixtures: %s" %(", ".join(sorted(available)),)
msg += "\n use 'pytest --fixtures [testpath]' for help on them." msg += "\n use 'pytest --fixtures [testpath]' for help on them."
return FixtureLookupErrorRepr(fspath, lineno, tblines, msg, self.argname) return FixtureLookupErrorRepr(fspath, lineno, tblines, msg, self.argname)
@@ -686,16 +692,15 @@ def toterminal(self, tw):
tw.line('{0} {1}'.format(FormattedExcinfo.flow_marker, tw.line('{0} {1}'.format(FormattedExcinfo.flow_marker,
line.strip()), red=True) line.strip()), red=True)
tw.line() tw.line()
tw.line("%s:%d" % (self.filename, self.firstlineno + 1)) tw.line("%s:%d" % (self.filename, self.firstlineno+1))
def fail_fixturefunc(fixturefunc, msg): def fail_fixturefunc(fixturefunc, msg):
fs, lineno = getfslineno(fixturefunc) fs, lineno = getfslineno(fixturefunc)
location = "%s:%s" % (fs, lineno + 1) location = "%s:%s" % (fs, lineno+1)
source = _pytest._code.Source(fixturefunc) source = _pytest._code.Source(fixturefunc)
fail(msg + ":\n\n" + str(source.indent()) + "\n" + location, pytest.fail(msg + ":\n\n" + str(source.indent()) + "\n" + location,
pytrace=False) pytrace=False)
def call_fixture_func(fixturefunc, request, kwargs): def call_fixture_func(fixturefunc, request, kwargs):
yieldctx = is_generator(fixturefunc) yieldctx = is_generator(fixturefunc)
@@ -710,7 +715,7 @@ def teardown():
pass pass
else: else:
fail_fixturefunc(fixturefunc, fail_fixturefunc(fixturefunc,
"yield_fixture function has more than one 'yield'") "yield_fixture function has more than one 'yield'")
request.addfinalizer(teardown) request.addfinalizer(teardown)
else: else:
@@ -720,7 +725,6 @@ def teardown():
class FixtureDef: class FixtureDef:
""" A container for a factory definition. """ """ A container for a factory definition. """
def __init__(self, fixturemanager, baseid, argname, func, scope, params, def __init__(self, fixturemanager, baseid, argname, func, scope, params,
unittest=False, ids=None): unittest=False, ids=None):
self._fixturemanager = fixturemanager self._fixturemanager = fixturemanager
@@ -745,19 +749,10 @@ def addfinalizer(self, finalizer):
self._finalizer.append(finalizer) self._finalizer.append(finalizer)
def finish(self): def finish(self):
exceptions = []
try: try:
while self._finalizer: while self._finalizer:
try: func = self._finalizer.pop()
func = self._finalizer.pop() func()
func()
except: # noqa
exceptions.append(sys.exc_info())
if exceptions:
e = exceptions[0]
del exceptions # ensure we don't keep all frames alive because of the traceback
py.builtin._reraise(*e)
finally: finally:
ihook = self._fixturemanager.session.ihook ihook = self._fixturemanager.session.ihook
ihook.pytest_fixture_post_finalizer(fixturedef=self) ihook.pytest_fixture_post_finalizer(fixturedef=self)
@@ -795,7 +790,6 @@ def __repr__(self):
return ("<FixtureDef name=%r scope=%r baseid=%r >" % return ("<FixtureDef name=%r scope=%r baseid=%r >" %
(self.argname, self.scope, self.baseid)) (self.argname, self.scope, self.baseid))
def pytest_fixture_setup(fixturedef, request): def pytest_fixture_setup(fixturedef, request):
""" Execution of fixture setup. """ """ Execution of fixture setup. """
kwargs = {} kwargs = {}
@@ -821,7 +815,7 @@ def pytest_fixture_setup(fixturedef, request):
my_cache_key = request.param_index my_cache_key = request.param_index
try: try:
result = call_fixture_func(fixturefunc, request, kwargs) result = call_fixture_func(fixturefunc, request, kwargs)
except TEST_OUTCOME: except Exception:
fixturedef.cached_result = (None, my_cache_key, sys.exc_info()) fixturedef.cached_result = (None, my_cache_key, sys.exc_info())
raise raise
fixturedef.cached_result = (result, my_cache_key, None) fixturedef.cached_result = (result, my_cache_key, None)
@@ -839,16 +833,17 @@ def __init__(self, scope, params, autouse=False, ids=None, name=None):
def __call__(self, function): def __call__(self, function):
if isclass(function): if isclass(function):
raise ValueError( raise ValueError(
"class fixtures not supported (may be in the future)") "class fixtures not supported (may be in the future)")
function._pytestfixturefunction = self function._pytestfixturefunction = self
return function return function
def fixture(scope="function", params=None, autouse=False, ids=None, name=None): def fixture(scope="function", params=None, autouse=False, ids=None, name=None):
""" (return a) decorator to mark a fixture factory function. """ (return a) decorator to mark a fixture factory function.
This decorator can be used (with or without parameters) to define a This decorator can be used (with or or without parameters) to define
fixture function. The name of the fixture function can later be a fixture function. The name of the fixture function can later be
referenced to cause its invocation ahead of running tests: test referenced to cause its invocation ahead of running tests: test
modules or classes can use the pytest.mark.usefixtures(fixturename) modules or classes can use the pytest.mark.usefixtures(fixturename)
marker. Test functions can directly use fixture names as input marker. Test functions can directly use fixture names as input
@@ -867,25 +862,25 @@ def fixture(scope="function", params=None, autouse=False, ids=None, name=None):
reference is needed to activate the fixture. reference is needed to activate the fixture.
:arg ids: list of string ids each corresponding to the params :arg ids: list of string ids each corresponding to the params
so that they are part of the test id. If no ids are provided so that they are part of the test id. If no ids are provided
they will be generated automatically from the params. they will be generated automatically from the params.
:arg name: the name of the fixture. This defaults to the name of the :arg name: the name of the fixture. This defaults to the name of the
decorated function. If a fixture is used in the same module in decorated function. If a fixture is used in the same module in
which it is defined, the function name of the fixture will be which it is defined, the function name of the fixture will be
shadowed by the function arg that requests the fixture; one way shadowed by the function arg that requests the fixture; one way
to resolve this is to name the decorated function to resolve this is to name the decorated function
``fixture_<fixturename>`` and then use ``fixture_<fixturename>`` and then use
``@pytest.fixture(name='<fixturename>')``. ``@pytest.fixture(name='<fixturename>')``.
Fixtures can optionally provide their values to test functions using a ``yield`` statement, Fixtures can optionally provide their values to test functions using a ``yield`` statement,
instead of ``return``. In this case, the code block after the ``yield`` statement is executed instead of ``return``. In this case, the code block after the ``yield`` statement is executed
as teardown code regardless of the test outcome. A fixture function must yield exactly once. as teardown code regardless of the test outcome. A fixture function must yield exactly once.
""" """
if callable(scope) and params is None and autouse is False: if callable(scope) and params is None and autouse == False:
# direct decoration # direct decoration
return FixtureFunctionMarker( return FixtureFunctionMarker(
"function", params, autouse, name=name)(scope) "function", params, autouse, name=name)(scope)
if params is not None and not isinstance(params, (list, tuple)): if params is not None and not isinstance(params, (list, tuple)):
params = list(params) params = list(params)
return FixtureFunctionMarker(scope, params, autouse, ids=ids, name=name) return FixtureFunctionMarker(scope, params, autouse, ids=ids, name=name)
@@ -900,7 +895,7 @@ def yield_fixture(scope="function", params=None, autouse=False, ids=None, name=N
if callable(scope) and params is None and not autouse: if callable(scope) and params is None and not autouse:
# direct decoration # direct decoration
return FixtureFunctionMarker( return FixtureFunctionMarker(
"function", params, autouse, ids=ids, name=name)(scope) "function", params, autouse, ids=ids, name=name)(scope)
else: else:
return FixtureFunctionMarker(scope, params, autouse, ids=ids, name=name) return FixtureFunctionMarker(scope, params, autouse, ids=ids, name=name)
@@ -959,9 +954,14 @@ def __init__(self, session):
self._nodeid_and_autousenames = [("", self.config.getini("usefixtures"))] self._nodeid_and_autousenames = [("", self.config.getini("usefixtures"))]
session.config.pluginmanager.register(self, "funcmanage") session.config.pluginmanager.register(self, "funcmanage")
def getfixtureinfo(self, node, func, cls, funcargs=True): def getfixtureinfo(self, node, func, cls, funcargs=True):
if funcargs and not hasattr(node, "nofuncargs"): if funcargs and not hasattr(node, "nofuncargs"):
argnames = getfuncargnames(func, cls=cls) if cls is not None:
startindex = 1
else:
startindex = None
argnames = getfuncargnames(func, startindex)
else: else:
argnames = () argnames = ()
usefixtures = getattr(func, "usefixtures", None) usefixtures = getattr(func, "usefixtures", None)
@@ -985,8 +985,8 @@ def pytest_plugin_registered(self, plugin):
# by their test id) # by their test id)
if p.basename.startswith("conftest.py"): if p.basename.startswith("conftest.py"):
nodeid = p.dirpath().relto(self.config.rootdir) nodeid = p.dirpath().relto(self.config.rootdir)
if p.sep != nodes.SEP: if p.sep != "/":
nodeid = nodeid.replace(p.sep, nodes.SEP) nodeid = nodeid.replace(p.sep, "/")
self.parsefactories(plugin, nodeid) self.parsefactories(plugin, nodeid)
def _getautousenames(self, nodeid): def _getautousenames(self, nodeid):
@@ -996,7 +996,7 @@ def _getautousenames(self, nodeid):
if nodeid.startswith(baseid): if nodeid.startswith(baseid):
if baseid: if baseid:
i = len(baseid) i = len(baseid)
nextchar = nodeid[i:i + 1] nextchar = nodeid[i:i+1]
if nextchar and nextchar not in ":/": if nextchar and nextchar not in ":/":
continue continue
autousenames.extend(basenames) autousenames.extend(basenames)
@@ -1041,14 +1041,9 @@ def pytest_generate_tests(self, metafunc):
if faclist: if faclist:
fixturedef = faclist[-1] fixturedef = faclist[-1]
if fixturedef.params is not None: if fixturedef.params is not None:
parametrize_func = getattr(metafunc.function, 'parametrize', None) func_params = getattr(getattr(metafunc.function, 'parametrize', None), 'args', [[None]])
func_params = getattr(parametrize_func, 'args', [[None]])
func_kwargs = getattr(parametrize_func, 'kwargs', {})
# skip directly parametrized arguments # skip directly parametrized arguments
if "argnames" in func_kwargs: argnames = func_params[0]
argnames = parametrize_func.kwargs["argnames"]
else:
argnames = func_params[0]
if not isinstance(argnames, (tuple, list)): if not isinstance(argnames, (tuple, list)):
argnames = [x.strip() for x in argnames.split(",") if x.strip()] argnames = [x.strip() for x in argnames.split(",") if x.strip()]
if argname not in func_params and argname not in argnames: if argname not in func_params and argname not in argnames:
@@ -1073,9 +1068,7 @@ def parsefactories(self, node_or_obj, nodeid=NOTSET, unittest=False):
self._holderobjseen.add(holderobj) self._holderobjseen.add(holderobj)
autousenames = [] autousenames = []
for name in dir(holderobj): for name in dir(holderobj):
# The attribute can be an arbitrary descriptor, so the attribute obj = getattr(holderobj, name, None)
# access below can raise. safe_getatt() ignores such exceptions.
obj = safe_getattr(holderobj, name, None)
# fixture functions have a pytest_funcarg__ prefix (pre-2.3 style) # fixture functions have a pytest_funcarg__ prefix (pre-2.3 style)
# or are "@pytest.fixture" marked # or are "@pytest.fixture" marked
marker = getfixturemarker(obj) marker = getfixturemarker(obj)
@@ -1086,7 +1079,7 @@ def parsefactories(self, node_or_obj, nodeid=NOTSET, unittest=False):
continue continue
marker = defaultfuncargprefixmarker marker = defaultfuncargprefixmarker
from _pytest import deprecated from _pytest import deprecated
self.config.warn('C1', deprecated.FUNCARG_PREFIX.format(name=name), nodeid=nodeid) self.config.warn('C1', deprecated.FUNCARG_PREFIX.format(name=name))
name = name[len(self._argprefix):] name = name[len(self._argprefix):]
elif not isinstance(marker, FixtureFunctionMarker): elif not isinstance(marker, FixtureFunctionMarker):
# magic globals with __getattr__ might have got us a wrong # magic globals with __getattr__ might have got us a wrong
@@ -1136,5 +1129,6 @@ def getfixturedefs(self, argname, nodeid):
def _matchfactories(self, fixturedefs, nodeid): def _matchfactories(self, fixturedefs, nodeid):
for fixturedef in fixturedefs: for fixturedef in fixturedefs:
if nodes.ischildnode(fixturedef.baseid, nodeid): if nodeid.startswith(fixturedef.baseid):
yield fixturedef yield fixturedef

View File

@@ -2,7 +2,9 @@
Provides a function to report all internal modules for using freezing tools Provides a function to report all internal modules for using freezing tools
pytest pytest
""" """
from __future__ import absolute_import, division, print_function
def pytest_namespace():
return {'freeze_includes': freeze_includes}
def freeze_includes(): def freeze_includes():

View File

@@ -1,61 +1,25 @@
""" version info, help messages, tracing configuration. """ """ version info, help messages, tracing configuration. """
from __future__ import absolute_import, division, print_function
import py import py
import pytest import pytest
from _pytest.config import PrintHelp import os, sys
import os
import sys
from argparse import Action
class HelpAction(Action):
"""This is an argparse Action that will raise an exception in
order to skip the rest of the argument parsing when --help is passed.
This prevents argparse from quitting due to missing required arguments
when any are defined, for example by ``pytest_addoption``.
This is similar to the way that the builtin argparse --help option is
implemented by raising SystemExit.
"""
def __init__(self,
option_strings,
dest=None,
default=False,
help=None):
super(HelpAction, self).__init__(
option_strings=option_strings,
dest=dest,
const=True,
default=default,
nargs=0,
help=help)
def __call__(self, parser, namespace, values, option_string=None):
setattr(namespace, self.dest, self.const)
# We should only skip the rest of the parsing after preparse is done
if getattr(parser._parser, 'after_preparse', False):
raise PrintHelp
def pytest_addoption(parser): def pytest_addoption(parser):
group = parser.getgroup('debugconfig') group = parser.getgroup('debugconfig')
group.addoption('--version', action="store_true", group.addoption('--version', action="store_true",
help="display pytest lib version and import information.") help="display pytest lib version and import information.")
group._addoption("-h", "--help", action=HelpAction, dest="help", group._addoption("-h", "--help", action="store_true", dest="help",
help="show help message and configuration info") help="show help message and configuration info")
group._addoption('-p', action="append", dest="plugins", default=[], group._addoption('-p', action="append", dest="plugins", default = [],
metavar="name", metavar="name",
help="early-load given plugin (multi-allowed). " help="early-load given plugin (multi-allowed). "
"To avoid loading of plugins, use the `no:` prefix, e.g. " "To avoid loading of plugins, use the `no:` prefix, e.g. "
"`no:doctest`.") "`no:doctest`.")
group.addoption('--traceconfig', '--trace-config', group.addoption('--traceconfig', '--trace-config',
action="store_true", default=False, action="store_true", default=False,
help="trace considerations of conftest.py files."), help="trace considerations of conftest.py files."),
group.addoption('--debug', group.addoption('--debug',
action="store_true", dest="debug", default=False, action="store_true", dest="debug", default=False,
help="store internal tracing debug information in 'pytestdebug.log'.") help="store internal tracing debug information in 'pytestdebug.log'.")
group._addoption( group._addoption(
'-o', '--override-ini', nargs='*', dest="override_ini", '-o', '--override-ini', nargs='*', dest="override_ini",
action="append", action="append",
@@ -70,10 +34,10 @@ def pytest_cmdline_parse():
path = os.path.abspath("pytestdebug.log") path = os.path.abspath("pytestdebug.log")
debugfile = open(path, 'w') debugfile = open(path, 'w')
debugfile.write("versions pytest-%s, py-%s, " debugfile.write("versions pytest-%s, py-%s, "
"python-%s\ncwd=%s\nargs=%s\n\n" % ( "python-%s\ncwd=%s\nargs=%s\n\n" %(
pytest.__version__, py.__version__, pytest.__version__, py.__version__,
".".join(map(str, sys.version_info)), ".".join(map(str, sys.version_info)),
os.getcwd(), config._origargs)) os.getcwd(), config._origargs))
config.trace.root.setwriter(debugfile.write) config.trace.root.setwriter(debugfile.write)
undo_tracing = config.pluginmanager.enable_tracing() undo_tracing = config.pluginmanager.enable_tracing()
sys.stderr.write("writing pytestdebug information to %s\n" % path) sys.stderr.write("writing pytestdebug information to %s\n" % path)
@@ -87,12 +51,11 @@ def unset_tracing():
config.add_cleanup(unset_tracing) config.add_cleanup(unset_tracing)
def pytest_cmdline_main(config): def pytest_cmdline_main(config):
if config.option.version: if config.option.version:
p = py.path.local(pytest.__file__) p = py.path.local(pytest.__file__)
sys.stderr.write("This is pytest version %s, imported from %s\n" % sys.stderr.write("This is pytest version %s, imported from %s\n" %
(pytest.__version__, p)) (pytest.__version__, p))
plugininfo = getpluginversioninfo(config) plugininfo = getpluginversioninfo(config)
if plugininfo: if plugininfo:
for line in plugininfo: for line in plugininfo:
@@ -104,7 +67,6 @@ def pytest_cmdline_main(config):
config._ensure_unconfigure() config._ensure_unconfigure()
return 0 return 0
def showhelp(config): def showhelp(config):
reporter = config.pluginmanager.get_plugin('terminalreporter') reporter = config.pluginmanager.get_plugin('terminalreporter')
tw = reporter._tw tw = reporter._tw
@@ -120,7 +82,7 @@ def showhelp(config):
if type is None: if type is None:
type = "string" type = "string"
spec = "%s (%s)" % (name, type) spec = "%s (%s)" % (name, type)
line = " %-24s %s" % (spec, help) line = " %-24s %s" %(spec, help)
tw.line(line[:tw.fullwidth]) tw.line(line[:tw.fullwidth])
tw.line() tw.line()
@@ -149,7 +111,6 @@ def showhelp(config):
('pytest_plugins', 'list of plugin names to load'), ('pytest_plugins', 'list of plugin names to load'),
] ]
def getpluginversioninfo(config): def getpluginversioninfo(config):
lines = [] lines = []
plugininfo = config.pluginmanager.list_plugin_distinfo() plugininfo = config.pluginmanager.list_plugin_distinfo()
@@ -161,12 +122,11 @@ def getpluginversioninfo(config):
lines.append(" " + content) lines.append(" " + content)
return lines return lines
def pytest_report_header(config): def pytest_report_header(config):
lines = [] lines = []
if config.option.debug or config.option.traceconfig: if config.option.debug or config.option.traceconfig:
lines.append("using: pytest-%s pylib-%s" % lines.append("using: pytest-%s pylib-%s" %
(pytest.__version__, py.__version__)) (pytest.__version__,py.__version__))
verinfo = getpluginversioninfo(config) verinfo = getpluginversioninfo(config)
if verinfo: if verinfo:
@@ -180,5 +140,5 @@ def pytest_report_header(config):
r = plugin.__file__ r = plugin.__file__
else: else:
r = repr(plugin) r = repr(plugin)
lines.append(" %-20s: %s" % (name, r)) lines.append(" %-20s: %s" %(name, r))
return lines return lines

Some files were not shown because too many files have changed in this diff Show More